FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavdevice/v4l2.c
Date: 2024-02-16 17:37:06
Exec Total Coverage
Lines: 3 544 0.6%
Functions: 1 23 4.3%
Branches: 1 297 0.3%

Line Branch Exec Source
1 /*
2 * Copyright (c) 2000,2001 Fabrice Bellard
3 * Copyright (c) 2006 Luca Abeni
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 /**
23 * @file
24 * Video4Linux2 grab interface
25 *
26 * Part of this file is based on the V4L2 video capture example
27 * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28 *
29 * Thanks to Michael Niedermayer for providing the mapping between
30 * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31 */
32
33 #include <stdatomic.h>
34
35 #include "libavutil/avassert.h"
36 #include "libavutil/avstring.h"
37 #include "libavutil/imgutils.h"
38 #include "libavutil/parseutils.h"
39 #include "libavutil/pixdesc.h"
40 #include "libavutil/time.h"
41 #include "libavcodec/avcodec.h"
42 #include "libavcodec/codec_desc.h"
43 #include "libavformat/demux.h"
44 #include "libavformat/internal.h"
45 #include "avdevice.h"
46 #include "timefilter.h"
47 #include "v4l2-common.h"
48 #include <dirent.h>
49
50 #if CONFIG_LIBV4L2
51 #include <libv4l2.h>
52 #endif
53
54 static const int desired_video_buffers = 256;
55
56 #define V4L_ALLFORMATS 3
57 #define V4L_RAWFORMATS 1
58 #define V4L_COMPFORMATS 2
59
60 /**
61 * Return timestamps to the user exactly as returned by the kernel
62 */
63 #define V4L_TS_DEFAULT 0
64 /**
65 * Autodetect the kind of timestamps returned by the kernel and convert to
66 * absolute (wall clock) timestamps.
67 */
68 #define V4L_TS_ABS 1
69 /**
70 * Assume kernel timestamps are from the monotonic clock and convert to
71 * absolute timestamps.
72 */
73 #define V4L_TS_MONO2ABS 2
74
75 /**
76 * Once the kind of timestamps returned by the kernel have been detected,
77 * the value of the timefilter (NULL or not) determines whether a conversion
78 * takes place.
79 */
80 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
81
82 struct video_data {
83 AVClass *class;
84 int fd;
85 int pixelformat; /* V4L2_PIX_FMT_* */
86 int width, height;
87 int frame_size;
88 int interlaced;
89 int top_field_first;
90 int ts_mode;
91 TimeFilter *timefilter;
92 int64_t last_time_m;
93
94 int buffers;
95 atomic_int buffers_queued;
96 void **buf_start;
97 unsigned int *buf_len;
98 char *standard;
99 v4l2_std_id std_id;
100 int channel;
101 char *pixel_format; /**< Set by a private option. */
102 int list_format; /**< Set by a private option. */
103 int list_standard; /**< Set by a private option. */
104 char *framerate; /**< Set by a private option. */
105
106 int use_libv4l2;
107 int (*open_f)(const char *file, int oflag, ...);
108 int (*close_f)(int fd);
109 int (*dup_f)(int fd);
110 #ifdef __GLIBC__
111 int (*ioctl_f)(int fd, unsigned long int request, ...);
112 #else
113 int (*ioctl_f)(int fd, int request, ...);
114 #endif
115 ssize_t (*read_f)(int fd, void *buffer, size_t n);
116 void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset);
117 int (*munmap_f)(void *_start, size_t length);
118 };
119
120 struct buff_data {
121 struct video_data *s;
122 int index;
123 };
124
125 static int device_open(AVFormatContext *ctx, const char* device_path)
126 {
127 struct video_data *s = ctx->priv_data;
128 struct v4l2_capability cap;
129 int fd;
130 int err;
131 int flags = O_RDWR;
132
133 #define SET_WRAPPERS(prefix) do { \
134 s->open_f = prefix ## open; \
135 s->close_f = prefix ## close; \
136 s->dup_f = prefix ## dup; \
137 s->ioctl_f = prefix ## ioctl; \
138 s->read_f = prefix ## read; \
139 s->mmap_f = prefix ## mmap; \
140 s->munmap_f = prefix ## munmap; \
141 } while (0)
142
143 if (s->use_libv4l2) {
144 #if CONFIG_LIBV4L2
145 SET_WRAPPERS(v4l2_);
146 #else
147 av_log(ctx, AV_LOG_ERROR, "libavdevice is not built with libv4l2 support.\n");
148 return AVERROR(EINVAL);
149 #endif
150 } else {
151 SET_WRAPPERS();
152 }
153
154 #define v4l2_open s->open_f
155 #define v4l2_close s->close_f
156 #define v4l2_dup s->dup_f
157 #define v4l2_ioctl s->ioctl_f
158 #define v4l2_read s->read_f
159 #define v4l2_mmap s->mmap_f
160 #define v4l2_munmap s->munmap_f
161
162 if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
163 flags |= O_NONBLOCK;
164 }
165
166 fd = v4l2_open(device_path, flags, 0);
167 if (fd < 0) {
168 err = AVERROR(errno);
169 av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
170 device_path, av_err2str(err));
171 return err;
172 }
173
174 if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
175 err = AVERROR(errno);
176 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
177 av_err2str(err));
178 goto fail;
179 }
180
181 av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
182 fd, cap.capabilities);
183
184 if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
185 av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
186 err = AVERROR(ENODEV);
187 goto fail;
188 }
189
190 if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
191 av_log(ctx, AV_LOG_ERROR,
192 "The device does not support the streaming I/O method.\n");
193 err = AVERROR(ENOSYS);
194 goto fail;
195 }
196
197 return fd;
198
199 fail:
200 v4l2_close(fd);
201 return err;
202 }
203
204 static int device_init(AVFormatContext *ctx, int *width, int *height,
205 uint32_t pixelformat)
206 {
207 struct video_data *s = ctx->priv_data;
208 struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
209 int res = 0;
210
211 fmt.fmt.pix.width = *width;
212 fmt.fmt.pix.height = *height;
213 fmt.fmt.pix.pixelformat = pixelformat;
214 fmt.fmt.pix.field = V4L2_FIELD_ANY;
215
216 /* Some drivers will fail and return EINVAL when the pixelformat
217 is not supported (even if type field is valid and supported) */
218 if (v4l2_ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0)
219 res = AVERROR(errno);
220
221 if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
222 av_log(ctx, AV_LOG_INFO,
223 "The V4L2 driver changed the video from %dx%d to %dx%d\n",
224 *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
225 *width = fmt.fmt.pix.width;
226 *height = fmt.fmt.pix.height;
227 }
228
229 if (pixelformat != fmt.fmt.pix.pixelformat) {
230 av_log(ctx, AV_LOG_DEBUG,
231 "The V4L2 driver changed the pixel format "
232 "from 0x%08X to 0x%08X\n",
233 pixelformat, fmt.fmt.pix.pixelformat);
234 res = AVERROR(EINVAL);
235 }
236
237 if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
238 av_log(ctx, AV_LOG_DEBUG,
239 "The V4L2 driver is using the interlaced mode\n");
240 s->interlaced = 1;
241 }
242
243 return res;
244 }
245
246 static int first_field(const struct video_data *s)
247 {
248 int res;
249 v4l2_std_id std;
250
251 res = v4l2_ioctl(s->fd, VIDIOC_G_STD, &std);
252 if (res < 0)
253 return 0;
254 if (std & V4L2_STD_NTSC)
255 return 0;
256
257 return 1;
258 }
259
260 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
261 static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat)
262 {
263 const struct video_data *s = ctx->priv_data;
264 struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
265
266 while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
267 switch (vfse.type) {
268 case V4L2_FRMSIZE_TYPE_DISCRETE:
269 av_log(ctx, AV_LOG_INFO, " %ux%u",
270 vfse.discrete.width, vfse.discrete.height);
271 break;
272 case V4L2_FRMSIZE_TYPE_CONTINUOUS:
273 case V4L2_FRMSIZE_TYPE_STEPWISE:
274 av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
275 vfse.stepwise.min_width,
276 vfse.stepwise.max_width,
277 vfse.stepwise.step_width,
278 vfse.stepwise.min_height,
279 vfse.stepwise.max_height,
280 vfse.stepwise.step_height);
281 }
282 vfse.index++;
283 }
284 }
285 #endif
286
287 static void list_formats(AVFormatContext *ctx, int type)
288 {
289 const struct video_data *s = ctx->priv_data;
290 struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
291
292 while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) {
293 enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
294 enum AVPixelFormat pix_fmt = ff_fmt_v4l2ff(vfd.pixelformat, codec_id);
295
296 vfd.index++;
297
298 if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
299 type & V4L_RAWFORMATS) {
300 const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
301 av_log(ctx, AV_LOG_INFO, "Raw : %11s : %20s :",
302 fmt_name ? fmt_name : "Unsupported",
303 vfd.description);
304 } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
305 type & V4L_COMPFORMATS) {
306 const AVCodecDescriptor *desc = avcodec_descriptor_get(codec_id);
307 av_log(ctx, AV_LOG_INFO, "Compressed: %11s : %20s :",
308 desc ? desc->name : "Unsupported",
309 vfd.description);
310 } else {
311 continue;
312 }
313
314 #ifdef V4L2_FMT_FLAG_EMULATED
315 if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
316 av_log(ctx, AV_LOG_INFO, " Emulated :");
317 #endif
318 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
319 list_framesizes(ctx, vfd.pixelformat);
320 #endif
321 av_log(ctx, AV_LOG_INFO, "\n");
322 }
323 }
324
325 static void list_standards(AVFormatContext *ctx)
326 {
327 int ret;
328 struct video_data *s = ctx->priv_data;
329 struct v4l2_standard standard;
330
331 if (s->std_id == 0)
332 return;
333
334 for (standard.index = 0; ; standard.index++) {
335 if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
336 ret = AVERROR(errno);
337 if (ret == AVERROR(EINVAL)) {
338 break;
339 } else {
340 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
341 return;
342 }
343 }
344 av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n",
345 standard.index, (uint64_t)standard.id, standard.name);
346 }
347 }
348
349 static int mmap_init(AVFormatContext *ctx)
350 {
351 int i, res;
352 struct video_data *s = ctx->priv_data;
353 struct v4l2_requestbuffers req = {
354 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
355 .count = desired_video_buffers,
356 .memory = V4L2_MEMORY_MMAP
357 };
358
359 if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
360 res = AVERROR(errno);
361 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
362 return res;
363 }
364
365 if (req.count < 2) {
366 av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
367 return AVERROR(ENOMEM);
368 }
369 s->buffers = req.count;
370 s->buf_start = av_malloc_array(s->buffers, sizeof(void *));
371 if (!s->buf_start) {
372 av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
373 return AVERROR(ENOMEM);
374 }
375 s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int));
376 if (!s->buf_len) {
377 av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
378 av_freep(&s->buf_start);
379 return AVERROR(ENOMEM);
380 }
381
382 for (i = 0; i < req.count; i++) {
383 struct v4l2_buffer buf = {
384 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
385 .index = i,
386 .memory = V4L2_MEMORY_MMAP
387 };
388 if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
389 res = AVERROR(errno);
390 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
391 return res;
392 }
393
394 s->buf_len[i] = buf.length;
395 if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
396 av_log(ctx, AV_LOG_ERROR,
397 "buf_len[%d] = %d < expected frame size %d\n",
398 i, s->buf_len[i], s->frame_size);
399 return AVERROR(ENOMEM);
400 }
401 s->buf_start[i] = v4l2_mmap(NULL, buf.length,
402 PROT_READ | PROT_WRITE, MAP_SHARED,
403 s->fd, buf.m.offset);
404
405 if (s->buf_start[i] == MAP_FAILED) {
406 res = AVERROR(errno);
407 av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
408 return res;
409 }
410 }
411
412 return 0;
413 }
414
415 static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
416 {
417 int res = 0;
418
419 if (v4l2_ioctl(s->fd, VIDIOC_QBUF, buf) < 0) {
420 res = AVERROR(errno);
421 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
422 } else {
423 atomic_fetch_add(&s->buffers_queued, 1);
424 }
425
426 return res;
427 }
428
429 static void mmap_release_buffer(void *opaque, uint8_t *data)
430 {
431 struct v4l2_buffer buf = { 0 };
432 struct buff_data *buf_descriptor = opaque;
433 struct video_data *s = buf_descriptor->s;
434
435 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
436 buf.memory = V4L2_MEMORY_MMAP;
437 buf.index = buf_descriptor->index;
438 av_free(buf_descriptor);
439
440 enqueue_buffer(s, &buf);
441 }
442
443 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
444 static int64_t av_gettime_monotonic(void)
445 {
446 return av_gettime_relative();
447 }
448 #endif
449
450 static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
451 {
452 struct video_data *s = ctx->priv_data;
453 int64_t now;
454
455 now = av_gettime();
456 if (s->ts_mode == V4L_TS_ABS &&
457 ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
458 av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
459 s->ts_mode = V4L_TS_CONVERT_READY;
460 return 0;
461 }
462 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
463 if (ctx->streams[0]->avg_frame_rate.num) {
464 now = av_gettime_monotonic();
465 if (s->ts_mode == V4L_TS_MONO2ABS ||
466 (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
467 AVRational tb = {AV_TIME_BASE, 1};
468 int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
469 av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
470 /* microseconds instead of seconds, MHz instead of Hz */
471 s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
472 if (!s->timefilter)
473 return AVERROR(ENOMEM);
474 s->ts_mode = V4L_TS_CONVERT_READY;
475 return 0;
476 }
477 }
478 #endif
479 av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
480 return AVERROR(EIO);
481 }
482
483 static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
484 {
485 struct video_data *s = ctx->priv_data;
486
487 if (s->ts_mode) {
488 int r = init_convert_timestamp(ctx, *ts);
489 if (r < 0)
490 return r;
491 }
492 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
493 if (s->timefilter) {
494 int64_t nowa = av_gettime();
495 int64_t nowm = av_gettime_monotonic();
496 ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
497 s->last_time_m = nowm;
498 *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
499 }
500 #endif
501 return 0;
502 }
503
504 static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
505 {
506 struct video_data *s = ctx->priv_data;
507 struct v4l2_buffer buf = {
508 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
509 .memory = V4L2_MEMORY_MMAP
510 };
511 struct timeval buf_ts;
512 int res;
513
514 pkt->size = 0;
515
516 /* FIXME: Some special treatment might be needed in case of loss of signal... */
517 while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
518 if (res < 0) {
519 if (errno == EAGAIN)
520 return AVERROR(EAGAIN);
521
522 res = AVERROR(errno);
523 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n",
524 av_err2str(res));
525 return res;
526 }
527
528 buf_ts = buf.timestamp;
529
530 if (buf.index >= s->buffers) {
531 av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
532 return AVERROR(EINVAL);
533 }
534 atomic_fetch_add(&s->buffers_queued, -1);
535 // always keep at least one buffer queued
536 av_assert0(atomic_load(&s->buffers_queued) >= 1);
537
538 #ifdef V4L2_BUF_FLAG_ERROR
539 if (buf.flags & V4L2_BUF_FLAG_ERROR) {
540 av_log(ctx, AV_LOG_WARNING,
541 "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
542 buf.bytesused);
543 buf.bytesused = 0;
544 } else
545 #endif
546 {
547 /* CPIA is a compressed format and we don't know the exact number of bytes
548 * used by a frame, so set it here as the driver announces it. */
549 if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
550 s->frame_size = buf.bytesused;
551
552 if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
553 av_log(ctx, AV_LOG_WARNING,
554 "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
555 buf.bytesused, s->frame_size, buf.flags);
556 buf.bytesused = 0;
557 }
558 }
559
560 /* Image is at s->buff_start[buf.index] */
561 if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
562 /* when we start getting low on queued buffers, fall back on copying data */
563 res = av_new_packet(pkt, buf.bytesused);
564 if (res < 0) {
565 av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
566 enqueue_buffer(s, &buf);
567 return res;
568 }
569 memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
570
571 res = enqueue_buffer(s, &buf);
572 if (res) {
573 av_packet_unref(pkt);
574 return res;
575 }
576 } else {
577 struct buff_data *buf_descriptor;
578
579 pkt->data = s->buf_start[buf.index];
580 pkt->size = buf.bytesused;
581
582 buf_descriptor = av_malloc(sizeof(struct buff_data));
583 if (!buf_descriptor) {
584 /* Something went wrong... Since av_malloc() failed, we cannot even
585 * allocate a buffer for memcpying into it
586 */
587 av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
588 enqueue_buffer(s, &buf);
589
590 return AVERROR(ENOMEM);
591 }
592 buf_descriptor->index = buf.index;
593 buf_descriptor->s = s;
594
595 pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer,
596 buf_descriptor, 0);
597 if (!pkt->buf) {
598 av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
599 enqueue_buffer(s, &buf);
600 av_freep(&buf_descriptor);
601 return AVERROR(ENOMEM);
602 }
603 }
604 pkt->pts = buf_ts.tv_sec * INT64_C(1000000) + buf_ts.tv_usec;
605 convert_timestamp(ctx, &pkt->pts);
606
607 return pkt->size;
608 }
609
610 static int mmap_start(AVFormatContext *ctx)
611 {
612 struct video_data *s = ctx->priv_data;
613 enum v4l2_buf_type type;
614 int i, res;
615
616 for (i = 0; i < s->buffers; i++) {
617 struct v4l2_buffer buf = {
618 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
619 .index = i,
620 .memory = V4L2_MEMORY_MMAP
621 };
622
623 if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
624 res = AVERROR(errno);
625 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
626 av_err2str(res));
627 return res;
628 }
629 }
630 atomic_store(&s->buffers_queued, s->buffers);
631
632 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
633 if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
634 res = AVERROR(errno);
635 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
636 av_err2str(res));
637 return res;
638 }
639
640 return 0;
641 }
642
643 static void mmap_close(struct video_data *s)
644 {
645 enum v4l2_buf_type type;
646 int i;
647
648 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
649 /* We do not check for the result, because we could
650 * not do anything about it anyway...
651 */
652 v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
653 for (i = 0; i < s->buffers; i++) {
654 v4l2_munmap(s->buf_start[i], s->buf_len[i]);
655 }
656 av_freep(&s->buf_start);
657 av_freep(&s->buf_len);
658 }
659
660 static int v4l2_set_parameters(AVFormatContext *ctx)
661 {
662 struct video_data *s = ctx->priv_data;
663 struct v4l2_standard standard = { 0 };
664 struct v4l2_streamparm streamparm = { 0 };
665 struct v4l2_fract *tpf;
666 AVRational framerate_q = { 0 };
667 int i, ret;
668
669 if (s->framerate &&
670 (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
671 av_log(ctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
672 s->framerate);
673 return ret;
674 }
675
676 if (s->standard) {
677 if (s->std_id) {
678 ret = 0;
679 av_log(ctx, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
680 /* set tv standard */
681 for (i = 0; ; i++) {
682 standard.index = i;
683 if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
684 ret = AVERROR(errno);
685 break;
686 }
687 if (!av_strcasecmp(standard.name, s->standard))
688 break;
689 }
690 if (ret < 0) {
691 av_log(ctx, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
692 return ret;
693 }
694
695 if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
696 ret = AVERROR(errno);
697 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
698 return ret;
699 }
700 } else {
701 av_log(ctx, AV_LOG_WARNING,
702 "This device does not support any standard\n");
703 }
704 }
705
706 /* get standard */
707 if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
708 tpf = &standard.frameperiod;
709 for (i = 0; ; i++) {
710 standard.index = i;
711 if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
712 ret = AVERROR(errno);
713 if (ret == AVERROR(EINVAL)
714 #ifdef ENODATA
715 || ret == AVERROR(ENODATA)
716 #endif
717 ) {
718 tpf = &streamparm.parm.capture.timeperframe;
719 break;
720 }
721 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
722 return ret;
723 }
724 if (standard.id == s->std_id) {
725 av_log(ctx, AV_LOG_DEBUG,
726 "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n",
727 standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
728 break;
729 }
730 }
731 } else {
732 tpf = &streamparm.parm.capture.timeperframe;
733 }
734
735 streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
736 if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
737 ret = AVERROR(errno);
738 av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
739 } else if (framerate_q.num && framerate_q.den) {
740 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
741 tpf = &streamparm.parm.capture.timeperframe;
742
743 av_log(ctx, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
744 framerate_q.den, framerate_q.num);
745 tpf->numerator = framerate_q.den;
746 tpf->denominator = framerate_q.num;
747
748 if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
749 ret = AVERROR(errno);
750 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n",
751 av_err2str(ret));
752 return ret;
753 }
754
755 if (framerate_q.num != tpf->denominator ||
756 framerate_q.den != tpf->numerator) {
757 av_log(ctx, AV_LOG_INFO,
758 "The driver changed the time per frame from "
759 "%d/%d to %d/%d\n",
760 framerate_q.den, framerate_q.num,
761 tpf->numerator, tpf->denominator);
762 }
763 } else {
764 av_log(ctx, AV_LOG_WARNING,
765 "The driver does not permit changing the time per frame\n");
766 }
767 }
768 if (tpf->denominator > 0 && tpf->numerator > 0) {
769 ctx->streams[0]->avg_frame_rate.num = tpf->denominator;
770 ctx->streams[0]->avg_frame_rate.den = tpf->numerator;
771 ctx->streams[0]->r_frame_rate = ctx->streams[0]->avg_frame_rate;
772 } else
773 av_log(ctx, AV_LOG_WARNING, "Time per frame unknown\n");
774
775 return 0;
776 }
777
778 static int device_try_init(AVFormatContext *ctx,
779 enum AVPixelFormat pix_fmt,
780 int *width,
781 int *height,
782 uint32_t *desired_format,
783 enum AVCodecID *codec_id)
784 {
785 int ret, i;
786
787 *desired_format = ff_fmt_ff2v4l(pix_fmt, ctx->video_codec_id);
788
789 if (*desired_format) {
790 ret = device_init(ctx, width, height, *desired_format);
791 if (ret < 0) {
792 *desired_format = 0;
793 if (ret != AVERROR(EINVAL))
794 return ret;
795 }
796 }
797
798 if (!*desired_format) {
799 for (i = 0; ff_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) {
800 if (ctx->video_codec_id == AV_CODEC_ID_NONE ||
801 ff_fmt_conversion_table[i].codec_id == ctx->video_codec_id) {
802 av_log(ctx, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
803 avcodec_get_name(ff_fmt_conversion_table[i].codec_id),
804 (char *)av_x_if_null(av_get_pix_fmt_name(ff_fmt_conversion_table[i].ff_fmt), "none"));
805
806 *desired_format = ff_fmt_conversion_table[i].v4l2_fmt;
807 ret = device_init(ctx, width, height, *desired_format);
808 if (ret >= 0)
809 break;
810 else if (ret != AVERROR(EINVAL))
811 return ret;
812 *desired_format = 0;
813 }
814 }
815
816 if (*desired_format == 0) {
817 av_log(ctx, AV_LOG_ERROR, "Cannot find a proper format for "
818 "codec '%s' (id %d), pixel format '%s' (id %d)\n",
819 avcodec_get_name(ctx->video_codec_id), ctx->video_codec_id,
820 (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
821 ret = AVERROR(EINVAL);
822 }
823 }
824
825 *codec_id = ff_fmt_v4l2codec(*desired_format);
826 if (*codec_id == AV_CODEC_ID_NONE)
827 av_assert0(ret == AVERROR(EINVAL));
828 return ret;
829 }
830
831 3643 static int v4l2_read_probe(const AVProbeData *p)
832 {
833
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 3643 times.
3643 if (av_strstart(p->filename, "/dev/video", NULL))
834 return AVPROBE_SCORE_MAX - 1;
835 3643 return 0;
836 }
837
838 static int v4l2_read_header(AVFormatContext *ctx)
839 {
840 struct video_data *s = ctx->priv_data;
841 AVStream *st;
842 int res = 0;
843 uint32_t desired_format;
844 enum AVCodecID codec_id = AV_CODEC_ID_NONE;
845 enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
846 struct v4l2_input input = { 0 };
847
848 st = avformat_new_stream(ctx, NULL);
849 if (!st)
850 return AVERROR(ENOMEM);
851
852 #if CONFIG_LIBV4L2
853 /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
854 and errors will get sent to stderr */
855 if (s->use_libv4l2)
856 v4l2_log_file = fopen("/dev/null", "w");
857 #endif
858
859 s->fd = device_open(ctx, ctx->url);
860 if (s->fd < 0)
861 return s->fd;
862
863 if (s->channel != -1) {
864 /* set video input */
865 av_log(ctx, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
866 if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
867 res = AVERROR(errno);
868 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
869 goto fail;
870 }
871 } else {
872 /* get current video input */
873 if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
874 res = AVERROR(errno);
875 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
876 goto fail;
877 }
878 }
879
880 /* enum input */
881 input.index = s->channel;
882 if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
883 res = AVERROR(errno);
884 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
885 goto fail;
886 }
887 s->std_id = input.std;
888 av_log(ctx, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n",
889 s->channel, input.name, (uint64_t)input.std);
890
891 if (s->list_format) {
892 list_formats(ctx, s->list_format);
893 res = AVERROR_EXIT;
894 goto fail;
895 }
896
897 if (s->list_standard) {
898 list_standards(ctx);
899 res = AVERROR_EXIT;
900 goto fail;
901 }
902
903 avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
904
905 if (s->pixel_format) {
906 const AVCodecDescriptor *desc = avcodec_descriptor_get_by_name(s->pixel_format);
907
908 if (desc)
909 ctx->video_codec_id = desc->id;
910
911 pix_fmt = av_get_pix_fmt(s->pixel_format);
912
913 if (pix_fmt == AV_PIX_FMT_NONE && !desc) {
914 av_log(ctx, AV_LOG_ERROR, "No such input format: %s.\n",
915 s->pixel_format);
916
917 res = AVERROR(EINVAL);
918 goto fail;
919 }
920 }
921
922 if (!s->width && !s->height) {
923 struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
924
925 av_log(ctx, AV_LOG_VERBOSE,
926 "Querying the device for the current frame size\n");
927 if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
928 res = AVERROR(errno);
929 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n",
930 av_err2str(res));
931 goto fail;
932 }
933
934 s->width = fmt.fmt.pix.width;
935 s->height = fmt.fmt.pix.height;
936 av_log(ctx, AV_LOG_VERBOSE,
937 "Setting frame size to %dx%d\n", s->width, s->height);
938 }
939
940 res = device_try_init(ctx, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
941 if (res < 0)
942 goto fail;
943
944 /* If no pixel_format was specified, the codec_id was not known up
945 * until now. Set video_codec_id in the context, as codec_id will
946 * not be available outside this function
947 */
948 if (codec_id != AV_CODEC_ID_NONE && ctx->video_codec_id == AV_CODEC_ID_NONE)
949 ctx->video_codec_id = codec_id;
950
951 if ((res = av_image_check_size(s->width, s->height, 0, ctx)) < 0)
952 goto fail;
953
954 s->pixelformat = desired_format;
955
956 if ((res = v4l2_set_parameters(ctx)) < 0)
957 goto fail;
958
959 st->codecpar->format = ff_fmt_v4l2ff(desired_format, codec_id);
960 if (st->codecpar->format != AV_PIX_FMT_NONE)
961 s->frame_size = av_image_get_buffer_size(st->codecpar->format,
962 s->width, s->height, 1);
963
964 if ((res = mmap_init(ctx)) ||
965 (res = mmap_start(ctx)) < 0)
966 goto fail;
967
968 s->top_field_first = first_field(s);
969
970 st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
971 st->codecpar->codec_id = codec_id;
972 if (codec_id == AV_CODEC_ID_RAWVIDEO)
973 st->codecpar->codec_tag =
974 avcodec_pix_fmt_to_codec_tag(st->codecpar->format);
975 else if (codec_id == AV_CODEC_ID_H264) {
976 avpriv_stream_set_need_parsing(st, AVSTREAM_PARSE_FULL_ONCE);
977 }
978 if (desired_format == V4L2_PIX_FMT_YVU420)
979 st->codecpar->codec_tag = MKTAG('Y', 'V', '1', '2');
980 else if (desired_format == V4L2_PIX_FMT_YVU410)
981 st->codecpar->codec_tag = MKTAG('Y', 'V', 'U', '9');
982 st->codecpar->width = s->width;
983 st->codecpar->height = s->height;
984 if (st->avg_frame_rate.den)
985 st->codecpar->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
986
987 return 0;
988
989 fail:
990 v4l2_close(s->fd);
991 return res;
992 }
993
994 static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt)
995 {
996 int res;
997
998 if ((res = mmap_read_frame(ctx, pkt)) < 0) {
999 return res;
1000 }
1001
1002 return pkt->size;
1003 }
1004
1005 static int v4l2_read_close(AVFormatContext *ctx)
1006 {
1007 struct video_data *s = ctx->priv_data;
1008
1009 if (atomic_load(&s->buffers_queued) != s->buffers)
1010 av_log(ctx, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
1011 "close.\n");
1012
1013 mmap_close(s);
1014
1015 ff_timefilter_destroy(s->timefilter);
1016 v4l2_close(s->fd);
1017 return 0;
1018 }
1019
1020 static int v4l2_is_v4l_dev(const char *name)
1021 {
1022 return !strncmp(name, "video", 5) ||
1023 !strncmp(name, "radio", 5) ||
1024 !strncmp(name, "vbi", 3) ||
1025 !strncmp(name, "v4l-subdev", 10);
1026 }
1027
1028 static int v4l2_get_device_list(AVFormatContext *ctx, AVDeviceInfoList *device_list)
1029 {
1030 struct video_data *s = ctx->priv_data;
1031 DIR *dir;
1032 struct dirent *entry;
1033 int ret = 0;
1034
1035 if (!device_list)
1036 return AVERROR(EINVAL);
1037
1038 dir = opendir("/dev");
1039 if (!dir) {
1040 ret = AVERROR(errno);
1041 av_log(ctx, AV_LOG_ERROR, "Couldn't open the directory: %s\n", av_err2str(ret));
1042 return ret;
1043 }
1044 while ((entry = readdir(dir))) {
1045 AVDeviceInfo *device = NULL;
1046 struct v4l2_capability cap;
1047 int fd = -1, size;
1048 char device_name[256];
1049
1050 if (!v4l2_is_v4l_dev(entry->d_name))
1051 continue;
1052
1053 size = snprintf(device_name, sizeof(device_name), "/dev/%s", entry->d_name);
1054 if (size >= sizeof(device_name)) {
1055 av_log(ctx, AV_LOG_ERROR, "Device name too long.\n");
1056 ret = AVERROR(ENOSYS);
1057 break;
1058 }
1059
1060 if ((fd = device_open(ctx, device_name)) < 0)
1061 continue;
1062
1063 if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
1064 ret = AVERROR(errno);
1065 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n", av_err2str(ret));
1066 goto fail;
1067 }
1068
1069 device = av_mallocz(sizeof(AVDeviceInfo));
1070 if (!device) {
1071 ret = AVERROR(ENOMEM);
1072 goto fail;
1073 }
1074 device->device_name = av_strdup(device_name);
1075 device->device_description = av_strdup(cap.card);
1076 if (!device->device_name || !device->device_description) {
1077 ret = AVERROR(ENOMEM);
1078 goto fail;
1079 }
1080
1081 if ((ret = av_dynarray_add_nofree(&device_list->devices,
1082 &device_list->nb_devices, device)) < 0)
1083 goto fail;
1084
1085 v4l2_close(fd);
1086 continue;
1087
1088 fail:
1089 if (device) {
1090 av_freep(&device->device_name);
1091 av_freep(&device->device_description);
1092 av_freep(&device);
1093 }
1094 v4l2_close(fd);
1095 break;
1096 }
1097 closedir(dir);
1098 return ret;
1099 }
1100
1101 #define OFFSET(x) offsetof(struct video_data, x)
1102 #define DEC AV_OPT_FLAG_DECODING_PARAM
1103
1104 static const AVOption options[] = {
1105 { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
1106 { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = -1 }, -1, INT_MAX, DEC },
1107 { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC },
1108 { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1109 { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1110 { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1111
1112 { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, .unit = "list_formats" },
1113 { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, .unit = "list_formats" },
1114 { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, .unit = "list_formats" },
1115 { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, .unit = "list_formats" },
1116
1117 { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, .unit = "list_standards" },
1118 { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, .unit = "list_standards" },
1119
1120 { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, .unit = "timestamps" },
1121 { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, .unit = "timestamps" },
1122 { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, .unit = "timestamps" },
1123 { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, .unit = "timestamps" },
1124 { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, .unit = "timestamps" },
1125 { "use_libv4l2", "use libv4l2 (v4l-utils) conversion functions", OFFSET(use_libv4l2), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, DEC },
1126 { NULL },
1127 };
1128
1129 static const AVClass v4l2_class = {
1130 .class_name = "V4L2 indev",
1131 .item_name = av_default_item_name,
1132 .option = options,
1133 .version = LIBAVUTIL_VERSION_INT,
1134 .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
1135 };
1136
1137 const AVInputFormat ff_v4l2_demuxer = {
1138 .name = "video4linux2,v4l2",
1139 .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1140 .priv_data_size = sizeof(struct video_data),
1141 .read_probe = v4l2_read_probe,
1142 .read_header = v4l2_read_header,
1143 .read_packet = v4l2_read_packet,
1144 .read_close = v4l2_read_close,
1145 .get_device_list = v4l2_get_device_list,
1146 .flags = AVFMT_NOFILE,
1147 .priv_class = &v4l2_class,
1148 };
1149