Split the RTP muxer out of rtp.c, to simplify the RTSP demuxer's dependencies
[ffmpeg-lucabe.git] / libavdevice / v4l2.c
blobd385d57c7c5b378ec3863e744ff510d336db9902
1 /*
2 * Video4Linux2 grab interface
3 * Copyright (c) 2000,2001 Fabrice Bellard.
4 * Copyright (c) 2006 Luca Abeni.
6 * Part of this file is based on the V4L2 video capture example
7 * (http://v4l2spec.bytesex.org/v4l2spec/capture.c)
9 * Thanks to Michael Niedermayer for providing the mapping between
10 * V4L2_PIX_FMT_* and PIX_FMT_*
13 * This file is part of FFmpeg.
15 * FFmpeg is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU Lesser General Public
17 * License as published by the Free Software Foundation; either
18 * version 2.1 of the License, or (at your option) any later version.
20 * FFmpeg is distributed in the hope that it will be useful,
21 * but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * Lesser General Public License for more details.
25 * You should have received a copy of the GNU Lesser General Public
26 * License along with FFmpeg; if not, write to the Free Software
27 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
29 #include "avformat.h"
30 #include <unistd.h>
31 #include <fcntl.h>
32 #include <sys/ioctl.h>
33 #include <sys/mman.h>
34 #include <sys/time.h>
35 #include <asm/types.h>
36 #include <linux/videodev2.h>
37 #include <time.h>
39 static const int desired_video_buffers = 256;
41 enum io_method {
42 io_read,
43 io_mmap,
44 io_userptr
47 struct video_data {
48 int fd;
49 int frame_format; /* V4L2_PIX_FMT_* */
50 enum io_method io_method;
51 int width, height;
52 int frame_rate;
53 int frame_rate_base;
54 int frame_size;
55 int top_field_first;
57 int buffers;
58 void **buf_start;
59 unsigned int *buf_len;
62 struct buff_data {
63 int index;
64 int fd;
67 struct fmt_map {
68 enum PixelFormat ff_fmt;
69 int32_t v4l2_fmt;
72 static struct fmt_map fmt_conversion_table[] = {
74 .ff_fmt = PIX_FMT_YUV420P,
75 .v4l2_fmt = V4L2_PIX_FMT_YUV420,
78 .ff_fmt = PIX_FMT_YUV422P,
79 .v4l2_fmt = V4L2_PIX_FMT_YUV422P,
82 .ff_fmt = PIX_FMT_YUYV422,
83 .v4l2_fmt = V4L2_PIX_FMT_YUYV,
86 .ff_fmt = PIX_FMT_UYVY422,
87 .v4l2_fmt = V4L2_PIX_FMT_UYVY,
90 .ff_fmt = PIX_FMT_YUV411P,
91 .v4l2_fmt = V4L2_PIX_FMT_YUV411P,
94 .ff_fmt = PIX_FMT_YUV410P,
95 .v4l2_fmt = V4L2_PIX_FMT_YUV410,
98 .ff_fmt = PIX_FMT_BGR24,
99 .v4l2_fmt = V4L2_PIX_FMT_BGR24,
102 .ff_fmt = PIX_FMT_RGB24,
103 .v4l2_fmt = V4L2_PIX_FMT_RGB24,
107 .ff_fmt = PIX_FMT_RGB32,
108 .v4l2_fmt = V4L2_PIX_FMT_BGR32,
112 .ff_fmt = PIX_FMT_GRAY8,
113 .v4l2_fmt = V4L2_PIX_FMT_GREY,
117 static int device_open(AVFormatContext *ctx, uint32_t *capabilities)
119 struct v4l2_capability cap;
120 int fd;
121 int res;
122 int flags = O_RDWR;
124 if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
125 flags |= O_NONBLOCK;
127 fd = open(ctx->filename, flags, 0);
128 if (fd < 0) {
129 av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s : %s\n",
130 ctx->filename, strerror(errno));
132 return -1;
135 res = ioctl(fd, VIDIOC_QUERYCAP, &cap);
136 // ENOIOCTLCMD definition only availble on __KERNEL__
137 if (res < 0 && errno == 515)
139 av_log(ctx, AV_LOG_ERROR, "QUERYCAP not implemented, probably V4L device but not supporting V4L2\n");
140 close(fd);
142 return -1;
144 if (res < 0) {
145 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
146 strerror(errno));
147 close(fd);
149 return -1;
151 if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
152 av_log(ctx, AV_LOG_ERROR, "Not a video capture device\n");
153 close(fd);
155 return -1;
157 *capabilities = cap.capabilities;
159 return fd;
162 static int device_init(AVFormatContext *ctx, int *width, int *height, int pix_fmt)
164 struct video_data *s = ctx->priv_data;
165 int fd = s->fd;
166 struct v4l2_format fmt;
167 int res;
169 memset(&fmt, 0, sizeof(struct v4l2_format));
170 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
171 fmt.fmt.pix.width = *width;
172 fmt.fmt.pix.height = *height;
173 fmt.fmt.pix.pixelformat = pix_fmt;
174 fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
175 res = ioctl(fd, VIDIOC_S_FMT, &fmt);
176 if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
177 av_log(ctx, AV_LOG_INFO, "The V4L2 driver changed the video from %dx%d to %dx%d\n", *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
178 *width = fmt.fmt.pix.width;
179 *height = fmt.fmt.pix.height;
182 return res;
185 static int first_field(int fd)
187 int res;
188 v4l2_std_id std;
190 res = ioctl(fd, VIDIOC_G_STD, &std);
191 if (res < 0) {
192 return 0;
194 if (std & V4L2_STD_NTSC) {
195 return 0;
198 return 1;
201 static uint32_t fmt_ff2v4l(enum PixelFormat pix_fmt)
203 int i;
205 for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
206 if (fmt_conversion_table[i].ff_fmt == pix_fmt) {
207 return fmt_conversion_table[i].v4l2_fmt;
211 return 0;
214 static enum PixelFormat fmt_v4l2ff(uint32_t pix_fmt)
216 int i;
218 for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
219 if (fmt_conversion_table[i].v4l2_fmt == pix_fmt) {
220 return fmt_conversion_table[i].ff_fmt;
224 return -1;
227 static int mmap_init(AVFormatContext *ctx)
229 struct video_data *s = ctx->priv_data;
230 struct v4l2_requestbuffers req;
231 int i, res;
233 memset(&req, 0, sizeof(struct v4l2_requestbuffers));
234 req.count = desired_video_buffers;
235 req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
236 req.memory = V4L2_MEMORY_MMAP;
237 res = ioctl (s->fd, VIDIOC_REQBUFS, &req);
238 if (res < 0) {
239 if (errno == EINVAL) {
240 av_log(ctx, AV_LOG_ERROR, "Device does not support mmap\n");
241 } else {
242 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n");
245 return -1;
248 if (req.count < 2) {
249 av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
251 return -1;
253 s->buffers = req.count;
254 s->buf_start = av_malloc(sizeof(void *) * s->buffers);
255 if (s->buf_start == NULL) {
256 av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
258 return -1;
260 s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
261 if (s->buf_len == NULL) {
262 av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
263 av_free(s->buf_start);
265 return -1;
268 for (i = 0; i < req.count; i++) {
269 struct v4l2_buffer buf;
271 memset(&buf, 0, sizeof(struct v4l2_buffer));
272 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
273 buf.memory = V4L2_MEMORY_MMAP;
274 buf.index = i;
275 res = ioctl (s->fd, VIDIOC_QUERYBUF, &buf);
276 if (res < 0) {
277 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
279 return -1;
282 s->buf_len[i] = buf.length;
283 if (s->buf_len[i] < s->frame_size) {
284 av_log(ctx, AV_LOG_ERROR, "Buffer len [%d] = %d != %d\n", i, s->buf_len[i], s->frame_size);
286 return -1;
288 s->buf_start[i] = mmap (NULL, buf.length,
289 PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);
290 if (s->buf_start[i] == MAP_FAILED) {
291 av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));
293 return -1;
297 return 0;
300 static int read_init(AVFormatContext *ctx)
302 return -1;
305 static void mmap_release_buffer(AVPacket *pkt)
307 struct v4l2_buffer buf;
308 int res, fd;
309 struct buff_data *buf_descriptor = pkt->priv;
311 memset(&buf, 0, sizeof(struct v4l2_buffer));
312 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
313 buf.memory = V4L2_MEMORY_MMAP;
314 buf.index = buf_descriptor->index;
315 fd = buf_descriptor->fd;
316 av_free(buf_descriptor);
318 res = ioctl (fd, VIDIOC_QBUF, &buf);
319 if (res < 0) {
320 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n");
322 pkt->data = NULL;
323 pkt->size = 0;
326 static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
328 struct video_data *s = ctx->priv_data;
329 struct v4l2_buffer buf;
330 struct buff_data *buf_descriptor;
331 int res;
333 memset(&buf, 0, sizeof(struct v4l2_buffer));
334 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
335 buf.memory = V4L2_MEMORY_MMAP;
337 /* FIXME: Some special treatment might be needed in case of loss of signal... */
338 while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
339 if (res < 0) {
340 if (errno == EAGAIN) {
341 pkt->size = 0;
343 return AVERROR(EAGAIN);
345 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", strerror(errno));
347 return -1;
349 assert (buf.index < s->buffers);
350 if (buf.bytesused != s->frame_size) {
351 av_log(ctx, AV_LOG_ERROR, "The v4l2 frame is %d bytes, but %d bytes are expected\n", buf.bytesused, s->frame_size);
353 return -1;
356 /* Image is at s->buff_start[buf.index] */
357 pkt->data= s->buf_start[buf.index];
358 pkt->size = buf.bytesused;
359 pkt->pts = buf.timestamp.tv_sec * INT64_C(1000000) + buf.timestamp.tv_usec;
360 pkt->destruct = mmap_release_buffer;
361 buf_descriptor = av_malloc(sizeof(struct buff_data));
362 if (buf_descriptor == NULL) {
363 /* Something went wrong... Since av_malloc() failed, we cannot even
364 * allocate a buffer for memcopying into it
366 av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
367 res = ioctl (s->fd, VIDIOC_QBUF, &buf);
369 return -1;
371 buf_descriptor->fd = s->fd;
372 buf_descriptor->index = buf.index;
373 pkt->priv = buf_descriptor;
375 return s->buf_len[buf.index];
378 static int read_frame(AVFormatContext *ctx, AVPacket *pkt)
380 return -1;
383 static int mmap_start(AVFormatContext *ctx)
385 struct video_data *s = ctx->priv_data;
386 enum v4l2_buf_type type;
387 int i, res;
389 for (i = 0; i < s->buffers; i++) {
390 struct v4l2_buffer buf;
392 memset(&buf, 0, sizeof(struct v4l2_buffer));
393 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
394 buf.memory = V4L2_MEMORY_MMAP;
395 buf.index = i;
397 res = ioctl (s->fd, VIDIOC_QBUF, &buf);
398 if (res < 0) {
399 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", strerror(errno));
401 return -1;
405 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
406 res = ioctl (s->fd, VIDIOC_STREAMON, &type);
407 if (res < 0) {
408 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", strerror(errno));
410 return -1;
413 return 0;
416 static void mmap_close(struct video_data *s)
418 enum v4l2_buf_type type;
419 int i;
421 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
422 /* We do not check for the result, because we could
423 * not do anything about it anyway...
425 ioctl(s->fd, VIDIOC_STREAMOFF, &type);
426 for (i = 0; i < s->buffers; i++) {
427 munmap(s->buf_start[i], s->buf_len[i]);
429 av_free(s->buf_start);
430 av_free(s->buf_len);
433 static int v4l2_set_parameters( AVFormatContext *s1, AVFormatParameters *ap )
435 struct video_data *s = s1->priv_data;
436 struct v4l2_input input;
437 struct v4l2_standard standard;
438 int i;
440 if(ap->channel>=0) {
441 /* set tv video input */
442 memset (&input, 0, sizeof (input));
443 input.index = ap->channel;
444 if(ioctl (s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
445 av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl enum input failed:\n");
446 return AVERROR(EIO);
449 av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set input_id: %d, input: %s\n",
450 ap->channel, input.name);
451 if(ioctl (s->fd, VIDIOC_S_INPUT, &input.index) < 0 ) {
452 av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl set input(%d) failed\n",
453 ap->channel);
454 return AVERROR(EIO);
458 if(ap->standard) {
459 av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set standard: %s\n",
460 ap->standard );
461 /* set tv standard */
462 memset (&standard, 0, sizeof (standard));
463 for(i=0;;i++) {
464 standard.index = i;
465 if (ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
466 av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl set standard(%s) failed\n",
467 ap->standard);
468 return AVERROR(EIO);
471 if(!strcasecmp(standard.name, ap->standard)) {
472 break;
476 av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set standard: %s, id: %"PRIu64"\n",
477 ap->standard, standard.id);
478 if (ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
479 av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl set standard(%s) failed\n",
480 ap->standard);
481 return AVERROR(EIO);
485 return 0;
488 static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
490 struct video_data *s = s1->priv_data;
491 AVStream *st;
492 int width, height;
493 int res, frame_rate, frame_rate_base;
494 uint32_t desired_format, capabilities;
496 if (ap->width <= 0 || ap->height <= 0 || ap->time_base.den <= 0) {
497 av_log(s1, AV_LOG_ERROR, "Missing/Wrong parameters\n");
499 return -1;
502 width = ap->width;
503 height = ap->height;
504 frame_rate = ap->time_base.den;
505 frame_rate_base = ap->time_base.num;
507 if((unsigned)width > 32767 || (unsigned)height > 32767) {
508 av_log(s1, AV_LOG_ERROR, "Wrong size %dx%d\n", width, height);
510 return -1;
513 st = av_new_stream(s1, 0);
514 if (!st) {
515 return AVERROR(ENOMEM);
517 av_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
519 s->width = width;
520 s->height = height;
521 s->frame_rate = frame_rate;
522 s->frame_rate_base = frame_rate_base;
524 capabilities = 0;
525 s->fd = device_open(s1, &capabilities);
526 if (s->fd < 0) {
527 av_free(st);
529 return AVERROR(EIO);
531 av_log(s1, AV_LOG_INFO, "[%d]Capabilities: %x\n", s->fd, capabilities);
533 desired_format = fmt_ff2v4l(ap->pix_fmt);
534 if (desired_format == 0 || (device_init(s1, &width, &height, desired_format) < 0)) {
535 int i, done;
537 done = 0; i = 0;
538 while (!done) {
539 desired_format = fmt_conversion_table[i].v4l2_fmt;
540 if (device_init(s1, &width, &height, desired_format) < 0) {
541 desired_format = 0;
542 i++;
543 } else {
544 done = 1;
546 if (i == sizeof(fmt_conversion_table) / sizeof(struct fmt_map)) {
547 done = 1;
551 if (desired_format == 0) {
552 av_log(s1, AV_LOG_ERROR, "Cannot find a proper format.\n");
553 close(s->fd);
554 av_free(st);
556 return AVERROR(EIO);
558 s->frame_format = desired_format;
560 if( v4l2_set_parameters( s1, ap ) < 0 )
561 return AVERROR(EIO);
563 st->codec->pix_fmt = fmt_v4l2ff(desired_format);
564 s->frame_size = avpicture_get_size(st->codec->pix_fmt, width, height);
565 if (capabilities & V4L2_CAP_STREAMING) {
566 s->io_method = io_mmap;
567 res = mmap_init(s1);
568 if (res == 0) {
569 res = mmap_start(s1);
571 } else {
572 s->io_method = io_read;
573 res = read_init(s1);
575 if (res < 0) {
576 close(s->fd);
577 av_free(st);
579 return AVERROR(EIO);
581 s->top_field_first = first_field(s->fd);
583 st->codec->codec_type = CODEC_TYPE_VIDEO;
584 st->codec->codec_id = CODEC_ID_RAWVIDEO;
585 st->codec->width = width;
586 st->codec->height = height;
587 st->codec->time_base.den = frame_rate;
588 st->codec->time_base.num = frame_rate_base;
589 st->codec->bit_rate = s->frame_size * 1/av_q2d(st->codec->time_base) * 8;
591 return 0;
594 static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
596 struct video_data *s = s1->priv_data;
597 int res;
599 if (s->io_method == io_mmap) {
600 av_init_packet(pkt);
601 res = mmap_read_frame(s1, pkt);
602 } else if (s->io_method == io_read) {
603 if (av_new_packet(pkt, s->frame_size) < 0)
604 return AVERROR(EIO);
606 res = read_frame(s1, pkt);
607 } else {
608 return AVERROR(EIO);
610 if (res < 0) {
611 return res;
614 if (s1->streams[0]->codec->coded_frame) {
615 s1->streams[0]->codec->coded_frame->interlaced_frame = 1;
616 s1->streams[0]->codec->coded_frame->top_field_first = s->top_field_first;
619 return s->frame_size;
622 static int v4l2_read_close(AVFormatContext *s1)
624 struct video_data *s = s1->priv_data;
626 if (s->io_method == io_mmap) {
627 mmap_close(s);
630 close(s->fd);
631 return 0;
634 AVInputFormat v4l2_demuxer = {
635 "video4linux2",
636 "video grab",
637 sizeof(struct video_data),
638 NULL,
639 v4l2_read_header,
640 v4l2_read_packet,
641 v4l2_read_close,
642 .flags = AVFMT_NOFILE,