FFmpeg
v4l2.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2000,2001 Fabrice Bellard
3  * Copyright (c) 2006 Luca Abeni
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Video4Linux2 grab interface
25  *
26  * Part of this file is based on the V4L2 video capture example
27  * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28  *
29  * Thanks to Michael Niedermayer for providing the mapping between
30  * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31  */
32 
33 #include <stdatomic.h>
34 
35 #include "libavutil/avassert.h"
36 #include "libavutil/avstring.h"
37 #include "v4l2-common.h"
38 #include <dirent.h>
39 
40 #if CONFIG_LIBV4L2
41 #include <libv4l2.h>
42 #endif
43 
44 static const int desired_video_buffers = 256;
45 
46 #define V4L_ALLFORMATS 3
47 #define V4L_RAWFORMATS 1
48 #define V4L_COMPFORMATS 2
49 
50 /**
51  * Return timestamps to the user exactly as returned by the kernel
52  */
53 #define V4L_TS_DEFAULT 0
54 /**
55  * Autodetect the kind of timestamps returned by the kernel and convert to
56  * absolute (wall clock) timestamps.
57  */
58 #define V4L_TS_ABS 1
59 /**
60  * Assume kernel timestamps are from the monotonic clock and convert to
61  * absolute timestamps.
62  */
63 #define V4L_TS_MONO2ABS 2
64 
65 /**
66  * Once the kind of timestamps returned by the kernel have been detected,
67  * the value of the timefilter (NULL or not) determines whether a conversion
68  * takes place.
69  */
70 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
71 
72 struct video_data {
73  AVClass *class;
74  int fd;
75  int pixelformat; /* V4L2_PIX_FMT_* */
76  int width, height;
80  int ts_mode;
82  int64_t last_time_m;
83 
84  int buffers;
86  void **buf_start;
87  unsigned int *buf_len;
88  char *standard;
89  v4l2_std_id std_id;
90  int channel;
91  char *pixel_format; /**< Set by a private option. */
92  int list_format; /**< Set by a private option. */
93  int list_standard; /**< Set by a private option. */
94  char *framerate; /**< Set by a private option. */
95 
97  int (*open_f)(const char *file, int oflag, ...);
98  int (*close_f)(int fd);
99  int (*dup_f)(int fd);
100 #ifdef __GLIBC__
101  int (*ioctl_f)(int fd, unsigned long int request, ...);
102 #else
103  int (*ioctl_f)(int fd, int request, ...);
104 #endif
105  ssize_t (*read_f)(int fd, void *buffer, size_t n);
106  void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset);
107  int (*munmap_f)(void *_start, size_t length);
108 };
109 
110 struct buff_data {
111  struct video_data *s;
112  int index;
113 };
114 
115 static int device_open(AVFormatContext *ctx, const char* device_path)
116 {
117  struct video_data *s = ctx->priv_data;
118  struct v4l2_capability cap;
119  int fd;
120  int err;
121  int flags = O_RDWR;
122 
123 #define SET_WRAPPERS(prefix) do { \
124  s->open_f = prefix ## open; \
125  s->close_f = prefix ## close; \
126  s->dup_f = prefix ## dup; \
127  s->ioctl_f = prefix ## ioctl; \
128  s->read_f = prefix ## read; \
129  s->mmap_f = prefix ## mmap; \
130  s->munmap_f = prefix ## munmap; \
131 } while (0)
132 
133  if (s->use_libv4l2) {
134 #if CONFIG_LIBV4L2
135  SET_WRAPPERS(v4l2_);
136 #else
137  av_log(ctx, AV_LOG_ERROR, "libavdevice is not built with libv4l2 support.\n");
138  return AVERROR(EINVAL);
139 #endif
140  } else {
141  SET_WRAPPERS();
142  }
143 
144 #define v4l2_open s->open_f
145 #define v4l2_close s->close_f
146 #define v4l2_dup s->dup_f
147 #define v4l2_ioctl s->ioctl_f
148 #define v4l2_read s->read_f
149 #define v4l2_mmap s->mmap_f
150 #define v4l2_munmap s->munmap_f
151 
152  if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
153  flags |= O_NONBLOCK;
154  }
155 
156  fd = v4l2_open(device_path, flags, 0);
157  if (fd < 0) {
158  err = AVERROR(errno);
159  av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
160  device_path, av_err2str(err));
161  return err;
162  }
163 
164  if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
165  err = AVERROR(errno);
166  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
167  av_err2str(err));
168  goto fail;
169  }
170 
171  av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
172  fd, cap.capabilities);
173 
174  if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
175  av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
176  err = AVERROR(ENODEV);
177  goto fail;
178  }
179 
180  if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
182  "The device does not support the streaming I/O method.\n");
183  err = AVERROR(ENOSYS);
184  goto fail;
185  }
186 
187  return fd;
188 
189 fail:
190  v4l2_close(fd);
191  return err;
192 }
193 
194 static int device_init(AVFormatContext *ctx, int *width, int *height,
195  uint32_t pixelformat)
196 {
197  struct video_data *s = ctx->priv_data;
198  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
199  int res = 0;
200 
201  fmt.fmt.pix.width = *width;
202  fmt.fmt.pix.height = *height;
203  fmt.fmt.pix.pixelformat = pixelformat;
204  fmt.fmt.pix.field = V4L2_FIELD_ANY;
205 
206  /* Some drivers will fail and return EINVAL when the pixelformat
207  is not supported (even if type field is valid and supported) */
208  if (v4l2_ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0)
209  res = AVERROR(errno);
210 
211  if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
213  "The V4L2 driver changed the video from %dx%d to %dx%d\n",
214  *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
215  *width = fmt.fmt.pix.width;
216  *height = fmt.fmt.pix.height;
217  }
218 
219  if (pixelformat != fmt.fmt.pix.pixelformat) {
221  "The V4L2 driver changed the pixel format "
222  "from 0x%08X to 0x%08X\n",
223  pixelformat, fmt.fmt.pix.pixelformat);
224  res = AVERROR(EINVAL);
225  }
226 
227  if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
229  "The V4L2 driver is using the interlaced mode\n");
230  s->interlaced = 1;
231  }
232 
233  return res;
234 }
235 
236 static int first_field(const struct video_data *s)
237 {
238  int res;
239  v4l2_std_id std;
240 
241  res = v4l2_ioctl(s->fd, VIDIOC_G_STD, &std);
242  if (res < 0)
243  return 0;
244  if (std & V4L2_STD_NTSC)
245  return 0;
246 
247  return 1;
248 }
249 
250 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
251 static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat)
252 {
253  const struct video_data *s = ctx->priv_data;
254  struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
255 
256  while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
257  switch (vfse.type) {
258  case V4L2_FRMSIZE_TYPE_DISCRETE:
259  av_log(ctx, AV_LOG_INFO, " %ux%u",
260  vfse.discrete.width, vfse.discrete.height);
261  break;
262  case V4L2_FRMSIZE_TYPE_CONTINUOUS:
263  case V4L2_FRMSIZE_TYPE_STEPWISE:
264  av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
265  vfse.stepwise.min_width,
266  vfse.stepwise.max_width,
267  vfse.stepwise.step_width,
268  vfse.stepwise.min_height,
269  vfse.stepwise.max_height,
270  vfse.stepwise.step_height);
271  }
272  vfse.index++;
273  }
274 }
275 #endif
276 
278 {
279  const struct video_data *s = ctx->priv_data;
280  struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
281 
282  while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) {
283  enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
284  enum AVPixelFormat pix_fmt = ff_fmt_v4l2ff(vfd.pixelformat, codec_id);
285 
286  vfd.index++;
287 
288  if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
289  type & V4L_RAWFORMATS) {
290  const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
291  av_log(ctx, AV_LOG_INFO, "Raw : %11s : %20s :",
292  fmt_name ? fmt_name : "Unsupported",
293  vfd.description);
294  } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
295  type & V4L_COMPFORMATS) {
297  av_log(ctx, AV_LOG_INFO, "Compressed: %11s : %20s :",
298  desc ? desc->name : "Unsupported",
299  vfd.description);
300  } else {
301  continue;
302  }
303 
304 #ifdef V4L2_FMT_FLAG_EMULATED
305  if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
306  av_log(ctx, AV_LOG_INFO, " Emulated :");
307 #endif
308 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
309  list_framesizes(ctx, vfd.pixelformat);
310 #endif
311  av_log(ctx, AV_LOG_INFO, "\n");
312  }
313 }
314 
316 {
317  int ret;
318  struct video_data *s = ctx->priv_data;
319  struct v4l2_standard standard;
320 
321  if (s->std_id == 0)
322  return;
323 
324  for (standard.index = 0; ; standard.index++) {
325  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
326  ret = AVERROR(errno);
327  if (ret == AVERROR(EINVAL)) {
328  break;
329  } else {
330  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
331  return;
332  }
333  }
334  av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n",
335  standard.index, (uint64_t)standard.id, standard.name);
336  }
337 }
338 
340 {
341  int i, res;
342  struct video_data *s = ctx->priv_data;
343  struct v4l2_requestbuffers req = {
344  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
345  .count = desired_video_buffers,
346  .memory = V4L2_MEMORY_MMAP
347  };
348 
349  if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
350  res = AVERROR(errno);
351  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
352  return res;
353  }
354 
355  if (req.count < 2) {
356  av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
357  return AVERROR(ENOMEM);
358  }
359  s->buffers = req.count;
360  s->buf_start = av_malloc_array(s->buffers, sizeof(void *));
361  if (!s->buf_start) {
362  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
363  return AVERROR(ENOMEM);
364  }
365  s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int));
366  if (!s->buf_len) {
367  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
368  av_freep(&s->buf_start);
369  return AVERROR(ENOMEM);
370  }
371 
372  for (i = 0; i < req.count; i++) {
373  struct v4l2_buffer buf = {
374  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
375  .index = i,
376  .memory = V4L2_MEMORY_MMAP
377  };
378  if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
379  res = AVERROR(errno);
380  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
381  return res;
382  }
383 
384  s->buf_len[i] = buf.length;
385  if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
387  "buf_len[%d] = %d < expected frame size %d\n",
388  i, s->buf_len[i], s->frame_size);
389  return AVERROR(ENOMEM);
390  }
391  s->buf_start[i] = v4l2_mmap(NULL, buf.length,
392  PROT_READ | PROT_WRITE, MAP_SHARED,
393  s->fd, buf.m.offset);
394 
395  if (s->buf_start[i] == MAP_FAILED) {
396  res = AVERROR(errno);
397  av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
398  return res;
399  }
400  }
401 
402  return 0;
403 }
404 
405 static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
406 {
407  int res = 0;
408 
409  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, buf) < 0) {
410  res = AVERROR(errno);
411  av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
412  } else {
413  atomic_fetch_add(&s->buffers_queued, 1);
414  }
415 
416  return res;
417 }
418 
419 static void mmap_release_buffer(void *opaque, uint8_t *data)
420 {
421  struct v4l2_buffer buf = { 0 };
422  struct buff_data *buf_descriptor = opaque;
423  struct video_data *s = buf_descriptor->s;
424 
425  buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
426  buf.memory = V4L2_MEMORY_MMAP;
427  buf.index = buf_descriptor->index;
428  av_free(buf_descriptor);
429 
430  enqueue_buffer(s, &buf);
431 }
432 
433 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
434 static int64_t av_gettime_monotonic(void)
435 {
436  return av_gettime_relative();
437 }
438 #endif
439 
441 {
442  struct video_data *s = ctx->priv_data;
443  int64_t now;
444 
445  now = av_gettime();
446  if (s->ts_mode == V4L_TS_ABS &&
447  ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
448  av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
449  s->ts_mode = V4L_TS_CONVERT_READY;
450  return 0;
451  }
452 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
453  if (ctx->streams[0]->avg_frame_rate.num) {
454  now = av_gettime_monotonic();
455  if (s->ts_mode == V4L_TS_MONO2ABS ||
456  (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
457  AVRational tb = {AV_TIME_BASE, 1};
458  int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
459  av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
460  /* microseconds instead of seconds, MHz instead of Hz */
461  s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
462  if (!s->timefilter)
463  return AVERROR(ENOMEM);
464  s->ts_mode = V4L_TS_CONVERT_READY;
465  return 0;
466  }
467  }
468 #endif
469  av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
470  return AVERROR(EIO);
471 }
472 
473 static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
474 {
475  struct video_data *s = ctx->priv_data;
476 
477  if (s->ts_mode) {
478  int r = init_convert_timestamp(ctx, *ts);
479  if (r < 0)
480  return r;
481  }
482 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
483  if (s->timefilter) {
484  int64_t nowa = av_gettime();
485  int64_t nowm = av_gettime_monotonic();
486  ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
487  s->last_time_m = nowm;
488  *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
489  }
490 #endif
491  return 0;
492 }
493 
495 {
496  struct video_data *s = ctx->priv_data;
497  struct v4l2_buffer buf = {
498  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
499  .memory = V4L2_MEMORY_MMAP
500  };
501  struct timeval buf_ts;
502  int res;
503 
504  pkt->size = 0;
505 
506  /* FIXME: Some special treatment might be needed in case of loss of signal... */
507  while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
508  if (res < 0) {
509  if (errno == EAGAIN)
510  return AVERROR(EAGAIN);
511 
512  res = AVERROR(errno);
513  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n",
514  av_err2str(res));
515  return res;
516  }
517 
518  buf_ts = buf.timestamp;
519 
520  if (buf.index >= s->buffers) {
521  av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
522  return AVERROR(EINVAL);
523  }
524  atomic_fetch_add(&s->buffers_queued, -1);
525  // always keep at least one buffer queued
526  av_assert0(atomic_load(&s->buffers_queued) >= 1);
527 
528 #ifdef V4L2_BUF_FLAG_ERROR
529  if (buf.flags & V4L2_BUF_FLAG_ERROR) {
531  "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
532  buf.bytesused);
533  buf.bytesused = 0;
534  } else
535 #endif
536  {
537  /* CPIA is a compressed format and we don't know the exact number of bytes
538  * used by a frame, so set it here as the driver announces it. */
540  s->frame_size = buf.bytesused;
541 
542  if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
544  "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
545  buf.bytesused, s->frame_size, buf.flags);
546  buf.bytesused = 0;
547  }
548  }
549 
550  /* Image is at s->buff_start[buf.index] */
551  if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
552  /* when we start getting low on queued buffers, fall back on copying data */
553  res = av_new_packet(pkt, buf.bytesused);
554  if (res < 0) {
555  av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
556  enqueue_buffer(s, &buf);
557  return res;
558  }
559  memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
560 
561  res = enqueue_buffer(s, &buf);
562  if (res) {
564  return res;
565  }
566  } else {
567  struct buff_data *buf_descriptor;
568 
569  pkt->data = s->buf_start[buf.index];
570  pkt->size = buf.bytesused;
571 
572  buf_descriptor = av_malloc(sizeof(struct buff_data));
573  if (!buf_descriptor) {
574  /* Something went wrong... Since av_malloc() failed, we cannot even
575  * allocate a buffer for memcpying into it
576  */
577  av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
578  enqueue_buffer(s, &buf);
579 
580  return AVERROR(ENOMEM);
581  }
582  buf_descriptor->index = buf.index;
583  buf_descriptor->s = s;
584 
586  buf_descriptor, 0);
587  if (!pkt->buf) {
588  av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
589  enqueue_buffer(s, &buf);
590  av_freep(&buf_descriptor);
591  return AVERROR(ENOMEM);
592  }
593  }
594  pkt->pts = buf_ts.tv_sec * INT64_C(1000000) + buf_ts.tv_usec;
596 
597  return pkt->size;
598 }
599 
601 {
602  struct video_data *s = ctx->priv_data;
603  enum v4l2_buf_type type;
604  int i, res;
605 
606  for (i = 0; i < s->buffers; i++) {
607  struct v4l2_buffer buf = {
608  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
609  .index = i,
610  .memory = V4L2_MEMORY_MMAP
611  };
612 
613  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
614  res = AVERROR(errno);
615  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
616  av_err2str(res));
617  return res;
618  }
619  }
620  atomic_store(&s->buffers_queued, s->buffers);
621 
622  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
623  if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
624  res = AVERROR(errno);
625  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
626  av_err2str(res));
627  return res;
628  }
629 
630  return 0;
631 }
632 
633 static void mmap_close(struct video_data *s)
634 {
635  enum v4l2_buf_type type;
636  int i;
637 
638  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
639  /* We do not check for the result, because we could
640  * not do anything about it anyway...
641  */
642  v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
643  for (i = 0; i < s->buffers; i++) {
644  v4l2_munmap(s->buf_start[i], s->buf_len[i]);
645  }
646  av_freep(&s->buf_start);
647  av_freep(&s->buf_len);
648 }
649 
651 {
652  struct video_data *s = ctx->priv_data;
653  struct v4l2_standard standard = { 0 };
654  struct v4l2_streamparm streamparm = { 0 };
655  struct v4l2_fract *tpf;
656  AVRational framerate_q = { 0 };
657  int i, ret;
658 
659  if (s->framerate &&
660  (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
661  av_log(ctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
662  s->framerate);
663  return ret;
664  }
665 
666  if (s->standard) {
667  if (s->std_id) {
668  ret = 0;
669  av_log(ctx, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
670  /* set tv standard */
671  for (i = 0; ; i++) {
672  standard.index = i;
673  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
674  ret = AVERROR(errno);
675  break;
676  }
677  if (!av_strcasecmp(standard.name, s->standard))
678  break;
679  }
680  if (ret < 0) {
681  av_log(ctx, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
682  return ret;
683  }
684 
685  if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
686  ret = AVERROR(errno);
687  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
688  return ret;
689  }
690  } else {
692  "This device does not support any standard\n");
693  }
694  }
695 
696  /* get standard */
697  if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
698  tpf = &standard.frameperiod;
699  for (i = 0; ; i++) {
700  standard.index = i;
701  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
702  ret = AVERROR(errno);
703  if (ret == AVERROR(EINVAL)
704 #ifdef ENODATA
705  || ret == AVERROR(ENODATA)
706 #endif
707  ) {
708  tpf = &streamparm.parm.capture.timeperframe;
709  break;
710  }
711  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
712  return ret;
713  }
714  if (standard.id == s->std_id) {
716  "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n",
717  standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
718  break;
719  }
720  }
721  } else {
722  tpf = &streamparm.parm.capture.timeperframe;
723  }
724 
725  streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
726  if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
727  ret = AVERROR(errno);
728  av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
729  } else if (framerate_q.num && framerate_q.den) {
730  if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
731  tpf = &streamparm.parm.capture.timeperframe;
732 
733  av_log(ctx, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
734  framerate_q.den, framerate_q.num);
735  tpf->numerator = framerate_q.den;
736  tpf->denominator = framerate_q.num;
737 
738  if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
739  ret = AVERROR(errno);
740  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n",
741  av_err2str(ret));
742  return ret;
743  }
744 
745  if (framerate_q.num != tpf->denominator ||
746  framerate_q.den != tpf->numerator) {
748  "The driver changed the time per frame from "
749  "%d/%d to %d/%d\n",
750  framerate_q.den, framerate_q.num,
751  tpf->numerator, tpf->denominator);
752  }
753  } else {
755  "The driver does not permit changing the time per frame\n");
756  }
757  }
758  if (tpf->denominator > 0 && tpf->numerator > 0) {
759  ctx->streams[0]->avg_frame_rate.num = tpf->denominator;
760  ctx->streams[0]->avg_frame_rate.den = tpf->numerator;
762  } else
763  av_log(ctx, AV_LOG_WARNING, "Time per frame unknown\n");
764 
765  return 0;
766 }
767 
769  enum AVPixelFormat pix_fmt,
770  int *width,
771  int *height,
772  uint32_t *desired_format,
773  enum AVCodecID *codec_id)
774 {
775  int ret, i;
776 
777  *desired_format = ff_fmt_ff2v4l(pix_fmt, ctx->video_codec_id);
778 
779  if (*desired_format) {
780  ret = device_init(ctx, width, height, *desired_format);
781  if (ret < 0) {
782  *desired_format = 0;
783  if (ret != AVERROR(EINVAL))
784  return ret;
785  }
786  }
787 
788  if (!*desired_format) {
792  av_log(ctx, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
794  (char *)av_x_if_null(av_get_pix_fmt_name(ff_fmt_conversion_table[i].ff_fmt), "none"));
795 
796  *desired_format = ff_fmt_conversion_table[i].v4l2_fmt;
797  ret = device_init(ctx, width, height, *desired_format);
798  if (ret >= 0)
799  break;
800  else if (ret != AVERROR(EINVAL))
801  return ret;
802  *desired_format = 0;
803  }
804  }
805 
806  if (*desired_format == 0) {
807  av_log(ctx, AV_LOG_ERROR, "Cannot find a proper format for "
808  "codec '%s' (id %d), pixel format '%s' (id %d)\n",
810  (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
811  ret = AVERROR(EINVAL);
812  }
813  }
814 
815  *codec_id = ff_fmt_v4l2codec(*desired_format);
816  if (*codec_id == AV_CODEC_ID_NONE)
817  av_assert0(ret == AVERROR(EINVAL));
818  return ret;
819 }
820 
821 static int v4l2_read_probe(const AVProbeData *p)
822 {
823  if (av_strstart(p->filename, "/dev/video", NULL))
824  return AVPROBE_SCORE_MAX - 1;
825  return 0;
826 }
827 
829 {
830  struct video_data *s = ctx->priv_data;
831  AVStream *st;
832  int res = 0;
833  uint32_t desired_format;
836  struct v4l2_input input = { 0 };
837 
839  if (!st)
840  return AVERROR(ENOMEM);
841 
842 #if CONFIG_LIBV4L2
843  /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
844  and errors will get sent to stderr */
845  if (s->use_libv4l2)
846  v4l2_log_file = fopen("/dev/null", "w");
847 #endif
848 
849  s->fd = device_open(ctx, ctx->url);
850  if (s->fd < 0)
851  return s->fd;
852 
853  if (s->channel != -1) {
854  /* set video input */
855  av_log(ctx, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
856  if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
857  res = AVERROR(errno);
858  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
859  goto fail;
860  }
861  } else {
862  /* get current video input */
863  if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
864  res = AVERROR(errno);
865  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
866  goto fail;
867  }
868  }
869 
870  /* enum input */
871  input.index = s->channel;
872  if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
873  res = AVERROR(errno);
874  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
875  goto fail;
876  }
877  s->std_id = input.std;
878  av_log(ctx, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n",
879  s->channel, input.name, (uint64_t)input.std);
880 
881  if (s->list_format) {
882  list_formats(ctx, s->list_format);
883  res = AVERROR_EXIT;
884  goto fail;
885  }
886 
887  if (s->list_standard) {
889  res = AVERROR_EXIT;
890  goto fail;
891  }
892 
893  avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
894 
895  if (s->pixel_format) {
896  const AVCodecDescriptor *desc = avcodec_descriptor_get_by_name(s->pixel_format);
897 
898  if (desc)
899  ctx->video_codec_id = desc->id;
900 
901  pix_fmt = av_get_pix_fmt(s->pixel_format);
902 
903  if (pix_fmt == AV_PIX_FMT_NONE && !desc) {
904  av_log(ctx, AV_LOG_ERROR, "No such input format: %s.\n",
905  s->pixel_format);
906 
907  res = AVERROR(EINVAL);
908  goto fail;
909  }
910  }
911 
912  if (!s->width && !s->height) {
913  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
914 
916  "Querying the device for the current frame size\n");
917  if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
918  res = AVERROR(errno);
919  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n",
920  av_err2str(res));
921  goto fail;
922  }
923 
924  s->width = fmt.fmt.pix.width;
925  s->height = fmt.fmt.pix.height;
927  "Setting frame size to %dx%d\n", s->width, s->height);
928  }
929 
930  res = device_try_init(ctx, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
931  if (res < 0)
932  goto fail;
933 
934  /* If no pixel_format was specified, the codec_id was not known up
935  * until now. Set video_codec_id in the context, as codec_id will
936  * not be available outside this function
937  */
940 
941  if ((res = av_image_check_size(s->width, s->height, 0, ctx)) < 0)
942  goto fail;
943 
944  s->pixelformat = desired_format;
945 
946  if ((res = v4l2_set_parameters(ctx)) < 0)
947  goto fail;
948 
949  st->codecpar->format = ff_fmt_v4l2ff(desired_format, codec_id);
950  if (st->codecpar->format != AV_PIX_FMT_NONE)
951  s->frame_size = av_image_get_buffer_size(st->codecpar->format,
952  s->width, s->height, 1);
953 
954  if ((res = mmap_init(ctx)) ||
955  (res = mmap_start(ctx)) < 0)
956  goto fail;
957 
958  s->top_field_first = first_field(s);
959 
961  st->codecpar->codec_id = codec_id;
963  st->codecpar->codec_tag =
965  else if (codec_id == AV_CODEC_ID_H264) {
967  }
968  if (desired_format == V4L2_PIX_FMT_YVU420)
969  st->codecpar->codec_tag = MKTAG('Y', 'V', '1', '2');
970  else if (desired_format == V4L2_PIX_FMT_YVU410)
971  st->codecpar->codec_tag = MKTAG('Y', 'V', 'U', '9');
972  st->codecpar->width = s->width;
973  st->codecpar->height = s->height;
974  if (st->avg_frame_rate.den)
975  st->codecpar->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
976 
977  return 0;
978 
979 fail:
980  v4l2_close(s->fd);
981  return res;
982 }
983 
985 {
986  int res;
987 
988  if ((res = mmap_read_frame(ctx, pkt)) < 0) {
989  return res;
990  }
991 
992  return pkt->size;
993 }
994 
996 {
997  struct video_data *s = ctx->priv_data;
998 
999  if (atomic_load(&s->buffers_queued) != s->buffers)
1000  av_log(ctx, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
1001  "close.\n");
1002 
1003  mmap_close(s);
1004 
1005  v4l2_close(s->fd);
1006  return 0;
1007 }
1008 
1009 static int v4l2_is_v4l_dev(const char *name)
1010 {
1011  return !strncmp(name, "video", 5) ||
1012  !strncmp(name, "radio", 5) ||
1013  !strncmp(name, "vbi", 3) ||
1014  !strncmp(name, "v4l-subdev", 10);
1015 }
1016 
1018 {
1019  struct video_data *s = ctx->priv_data;
1020  DIR *dir;
1021  struct dirent *entry;
1022  int ret = 0;
1023 
1024  if (!device_list)
1025  return AVERROR(EINVAL);
1026 
1027  dir = opendir("/dev");
1028  if (!dir) {
1029  ret = AVERROR(errno);
1030  av_log(ctx, AV_LOG_ERROR, "Couldn't open the directory: %s\n", av_err2str(ret));
1031  return ret;
1032  }
1033  while ((entry = readdir(dir))) {
1034  AVDeviceInfo *device = NULL;
1035  struct v4l2_capability cap;
1036  int fd = -1, size;
1037  char device_name[256];
1038 
1039  if (!v4l2_is_v4l_dev(entry->d_name))
1040  continue;
1041 
1042  size = snprintf(device_name, sizeof(device_name), "/dev/%s", entry->d_name);
1043  if (size >= sizeof(device_name)) {
1044  av_log(ctx, AV_LOG_ERROR, "Device name too long.\n");
1045  ret = AVERROR(ENOSYS);
1046  break;
1047  }
1048 
1049  if ((fd = device_open(ctx, device_name)) < 0)
1050  continue;
1051 
1052  if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
1053  ret = AVERROR(errno);
1054  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n", av_err2str(ret));
1055  goto fail;
1056  }
1057 
1058  device = av_mallocz(sizeof(AVDeviceInfo));
1059  if (!device) {
1060  ret = AVERROR(ENOMEM);
1061  goto fail;
1062  }
1063  device->device_name = av_strdup(device_name);
1064  device->device_description = av_strdup(cap.card);
1065  if (!device->device_name || !device->device_description) {
1066  ret = AVERROR(ENOMEM);
1067  goto fail;
1068  }
1069 
1070  if ((ret = av_dynarray_add_nofree(&device_list->devices,
1071  &device_list->nb_devices, device)) < 0)
1072  goto fail;
1073 
1074  v4l2_close(fd);
1075  continue;
1076 
1077  fail:
1078  if (device) {
1079  av_freep(&device->device_name);
1080  av_freep(&device->device_description);
1081  av_freep(&device);
1082  }
1083  v4l2_close(fd);
1084  break;
1085  }
1086  closedir(dir);
1087  return ret;
1088 }
1089 
1090 #define OFFSET(x) offsetof(struct video_data, x)
1091 #define DEC AV_OPT_FLAG_DECODING_PARAM
1092 
1093 static const AVOption options[] = {
1094  { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
1095  { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = -1 }, -1, INT_MAX, DEC },
1096  { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC },
1097  { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1098  { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1099  { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1100 
1101  { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
1102  { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1103  { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1104  { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1105 
1106  { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, "list_standards" },
1107  { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, "list_standards" },
1108 
1109  { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1110  { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1111  { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, "timestamps" },
1112  { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, "timestamps" },
1113  { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" },
1114  { "use_libv4l2", "use libv4l2 (v4l-utils) conversion functions", OFFSET(use_libv4l2), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, DEC },
1115  { NULL },
1116 };
1117 
1118 static const AVClass v4l2_class = {
1119  .class_name = "V4L2 indev",
1120  .item_name = av_default_item_name,
1121  .option = options,
1122  .version = LIBAVUTIL_VERSION_INT,
1124 };
1125 
1127  .name = "video4linux2,v4l2",
1128  .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1129  .priv_data_size = sizeof(struct video_data),
1131  .read_header = v4l2_read_header,
1132  .read_packet = v4l2_read_packet,
1133  .read_close = v4l2_read_close,
1134  .get_device_list = v4l2_get_device_list,
1135  .flags = AVFMT_NOFILE,
1136  .priv_class = &v4l2_class,
1137 };
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:424
TimeFilter
Opaque type representing a time filter state.
Definition: timefilter.c:30
av_gettime_relative
int64_t av_gettime_relative(void)
Get the current time in microseconds since some unspecified starting point.
Definition: time.c:56
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
r
const char * r
Definition: vf_curves.c:116
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
avformat_new_stream
AVStream * avformat_new_stream(AVFormatContext *s, const AVCodec *c)
Add a new stream to a media file.
Definition: utils.c:768
video_data::channel
int channel
Definition: v4l2.c:90
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:56
ff_v4l2_demuxer
const AVInputFormat ff_v4l2_demuxer
Definition: v4l2.c:1126
v4l2_class
static const AVClass v4l2_class
Definition: v4l2.c:1118
v4l2_set_parameters
static int v4l2_set_parameters(AVFormatContext *ctx)
Definition: v4l2.c:650
AVDeviceInfo::device_name
char * device_name
device name, format depends on device
Definition: avdevice.h:458
av_strcasecmp
int av_strcasecmp(const char *a, const char *b)
Locale-independent case-insensitive compare.
Definition: avstring.c:215
AV_CODEC_ID_RAWVIDEO
@ AV_CODEC_ID_RAWVIDEO
Definition: codec_id.h:63
AVDeviceInfoList::nb_devices
int nb_devices
number of autodetected devices
Definition: avdevice.h:469
AVFormatContext::streams
AVStream ** streams
A list of all streams in the file.
Definition: avformat.h:1268
AVPacket::data
uint8_t * data
Definition: packet.h:373
video_data::buffers
int buffers
Definition: v4l2.c:84
AVOption
AVOption.
Definition: opt.h:247
AVStream::avg_frame_rate
AVRational avg_frame_rate
Average framerate.
Definition: avformat.h:1015
video_data::interlaced
int interlaced
Definition: v4l2.c:78
data
const char data[16]
Definition: mxf.c:143
mmap_init
static int mmap_init(AVFormatContext *ctx)
Definition: v4l2.c:339
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCodecParameters::codec_tag
uint32_t codec_tag
Additional information about the codec (corresponds to the AVI FOURCC).
Definition: codec_par.h:64
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFormatContext::video_codec_id
enum AVCodecID video_codec_id
Forced video codec_id.
Definition: avformat.h:1375
video_data::width
int width
Definition: v4l2.c:76
ff_fmt_ff2v4l
uint32_t ff_fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
Definition: v4l2-common.c:73
ff_fmt_conversion_table
const struct fmt_map ff_fmt_conversion_table[]
Definition: v4l2-common.c:21
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
avcodec_pix_fmt_to_codec_tag
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt)
Return a value representing the fourCC code associated to the pixel format pix_fmt,...
Definition: raw.c:305
AVPROBE_SCORE_MAX
#define AVPROBE_SCORE_MAX
maximum score
Definition: avformat.h:459
framerate
int framerate
Definition: h264_levels.c:65
ff_timefilter_eval
double ff_timefilter_eval(TimeFilter *self, double delta)
Evaluate the filter at a specified time.
Definition: timefilter.c:88
fail
#define fail()
Definition: checkasm.h:127
AVSTREAM_PARSE_FULL_ONCE
@ AVSTREAM_PARSE_FULL_ONCE
full parsing and repack of the first frame only, only implemented for H.264 currently
Definition: avformat.h:795
ff_fmt_v4l2codec
enum AVCodecID ff_fmt_v4l2codec(uint32_t v4l2_fmt)
Definition: v4l2-common.c:103
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
video_data::framerate
char * framerate
Set by a private option.
Definition: v4l2.c:94
AVRational::num
int num
Numerator.
Definition: rational.h:59
ff_timefilter_new
TimeFilter * ff_timefilter_new(double time_base, double period, double bandwidth)
Create a new Delay Locked Loop time filter.
Definition: timefilter.c:46
video_data::use_libv4l2
int use_libv4l2
Definition: v4l2.c:96
AVDeviceInfoList::devices
AVDeviceInfo ** devices
list of autodetected devices
Definition: avdevice.h:468
video_data::close_f
int(* close_f)(int fd)
Definition: v4l2.c:98
video_data::frame_size
int frame_size
Definition: v4l2.c:77
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
AVInputFormat
Definition: avformat.h:650
device_init
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:194
width
#define width
AVCodecDescriptor
This struct describes the properties of a single codec described by an AVCodecID.
Definition: codec_desc.h:38
s
#define s(width, name)
Definition: cbs_vp9.c:257
av_new_packet
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:99
video_data::buffers_queued
atomic_int buffers_queued
Definition: v4l2.c:85
video_data::buf_start
void ** buf_start
Definition: v4l2.c:86
AVFormatContext::flags
int flags
Flags modifying the (de)muxer behaviour.
Definition: avformat.h:1318
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:655
AVCodecParameters::width
int width
Video only.
Definition: codec_par.h:126
avpriv_stream_set_need_parsing
void avpriv_stream_set_need_parsing(AVStream *st, enum AVStreamParseType type)
Definition: utils.c:100
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
AVProbeData::filename
const char * filename
Definition: avformat.h:448
video_data::last_time_m
int64_t last_time_m
Definition: v4l2.c:82
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:141
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
v4l2-common.h
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:77
E
#define E
Definition: avdct.c:32
video_data::ts_mode
int ts_mode
Definition: v4l2.c:80
v4l2_ioctl
#define v4l2_ioctl
AVFormatContext
Format I/O context.
Definition: avformat.h:1200
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:1095
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: packet.h:356
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
video_data::std_id
v4l2_std_id std_id
Definition: v4l2.c:89
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
read_probe
static int read_probe(const AVProbeData *pd)
Definition: jvdec.c:55
video_data::fd
int fd
Definition: v4l2.c:74
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
period
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without period
Definition: writing_filters.txt:89
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:234
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AV_CODEC_ID_CPIA
@ AV_CODEC_ID_CPIA
Definition: codec_id.h:260
buff_data
Definition: v4l2.c:110
v4l2_open
#define v4l2_open
ff_fmt_v4l2ff
enum AVPixelFormat ff_fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
Definition: v4l2-common.c:89
V4L_RAWFORMATS
#define V4L_RAWFORMATS
Definition: v4l2.c:47
AVProbeData
This structure contains the data a format has to probe a file.
Definition: avformat.h:447
fmt_map::v4l2_fmt
uint32_t v4l2_fmt
Definition: v4l2-common.h:51
video_data::height
int height
Definition: v4l2.c:76
AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
@ AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
Definition: log.h:41
video_data::open_f
int(* open_f)(const char *file, int oflag,...)
Definition: v4l2.c:97
v4l2_close
#define v4l2_close
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:47
device_open
static int device_open(AVFormatContext *ctx, const char *device_path)
Definition: v4l2.c:115
mmap_release_buffer
static void mmap_release_buffer(void *opaque, uint8_t *data)
Definition: v4l2.c:419
init_convert_timestamp
static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
Definition: v4l2.c:440
v4l2_read_close
static int v4l2_read_close(AVFormatContext *ctx)
Definition: v4l2.c:995
list_formats
static void list_formats(AVFormatContext *ctx, int type)
Definition: v4l2.c:277
v4l2_read_probe
static int v4l2_read_probe(const AVProbeData *p)
Definition: v4l2.c:821
V4L_COMPFORMATS
#define V4L_COMPFORMATS
Definition: v4l2.c:48
AVPacket::size
int size
Definition: packet.h:374
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:121
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1283
size
int size
Definition: twinvq_data.h:10344
AVFMT_NOFILE
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:464
AVDeviceInfo
Structure describes basic parameters of the device.
Definition: avdevice.h:457
video_data::ioctl_f
int(* ioctl_f)(int fd, int request,...)
Definition: v4l2.c:103
DEC
#define DEC
Definition: v4l2.c:1091
height
#define height
av_image_get_buffer_size
int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align)
Return the size in bytes of the amount of data required to store an image with the given parameters.
Definition: imgutils.c:466
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
video_data::pixelformat
int pixelformat
Definition: v4l2.c:75
v4l2_mmap
#define v4l2_mmap
av_strstart
int av_strstart(const char *str, const char *pfx, const char **ptr)
Return non-zero if pfx is a prefix of str.
Definition: avstring.c:34
AVDeviceInfo::device_description
char * device_description
human friendly name
Definition: avdevice.h:459
fmt_map::codec_id
enum AVCodecID codec_id
Definition: v4l2-common.h:50
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
buff_data::index
int index
Definition: v4l2.c:112
video_data::timefilter
TimeFilter * timefilter
Definition: v4l2.c:81
avcodec_get_name
const char * avcodec_get_name(enum AVCodecID id)
Get the name of a codec.
Definition: utils.c:443
av_parse_video_rate
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
Definition: parseutils.c:181
AV_CODEC_ID_NONE
@ AV_CODEC_ID_NONE
Definition: codec_id.h:48
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:366
mmap_close
static void mmap_close(struct video_data *s)
Definition: v4l2.c:633
v4l2_is_v4l_dev
static int v4l2_is_v4l_dev(const char *name)
Definition: v4l2.c:1009
AVCodecParameters::height
int height
Definition: codec_par.h:127
AV_TIME_BASE
#define AV_TIME_BASE
Internal time base represented as integer.
Definition: avutil.h:254
v4l2_munmap
#define v4l2_munmap
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
options
static const AVOption options[]
Definition: v4l2.c:1093
tb
#define tb
Definition: regdef.h:68
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:263
ff_timefilter_update
double ff_timefilter_update(TimeFilter *self, double system_time, double period)
Update the filter.
Definition: timefilter.c:72
OFFSET
#define OFFSET(x)
Definition: v4l2.c:1090
V4L_TS_CONVERT_READY
#define V4L_TS_CONVERT_READY
Once the kind of timestamps returned by the kernel have been detected, the value of the timefilter (N...
Definition: v4l2.c:70
AVFMT_FLAG_NONBLOCK
#define AVFMT_FLAG_NONBLOCK
Do not block when reading packets from input.
Definition: avformat.h:1321
v4l2_get_device_list
static int v4l2_get_device_list(AVFormatContext *ctx, AVDeviceInfoList *device_list)
Definition: v4l2.c:1017
V4L_TS_MONO2ABS
#define V4L_TS_MONO2ABS
Assume kernel timestamps are from the monotonic clock and convert to absolute timestamps.
Definition: v4l2.c:63
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:935
video_data::read_f
ssize_t(* read_f)(int fd, void *buffer, size_t n)
Definition: v4l2.c:105
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
video_data
Definition: v4l2.c:72
AVDeviceInfoList
List of devices.
Definition: avdevice.h:467
video_data::top_field_first
int top_field_first
Definition: v4l2.c:79
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:2592
V4L_ALLFORMATS
#define V4L_ALLFORMATS
Definition: v4l2.c:46
av_dynarray_add_nofree
int av_dynarray_add_nofree(void *tab_ptr, int *nb_ptr, void *elem)
Add an element to a dynamic array.
Definition: mem.c:322
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
video_data::list_format
int list_format
Set by a private option.
Definition: v4l2.c:92
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
video_data::list_standard
int list_standard
Set by a private option.
Definition: v4l2.c:93
convert_timestamp
static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
Definition: v4l2.c:473
video_data::pixel_format
char * pixel_format
Set by a private option.
Definition: v4l2.c:91
video_data::munmap_f
int(* munmap_f)(void *_start, size_t length)
Definition: v4l2.c:107
AVStream::r_frame_rate
AVRational r_frame_rate
Real base framerate of the stream.
Definition: avformat.h:1084
video_data::buf_len
unsigned int * buf_len
Definition: v4l2.c:87
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: utils.c:1196
buff_data::s
struct video_data * s
Definition: v4l2.c:111
av_gettime
int64_t av_gettime(void)
Get the current time in microseconds.
Definition: time.c:39
SET_WRAPPERS
#define SET_WRAPPERS(prefix)
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:279
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
video_data::standard
char * standard
Definition: v4l2.c:88
v4l2_read_header
static int v4l2_read_header(AVFormatContext *ctx)
Definition: v4l2.c:828
mmap_start
static int mmap_start(AVFormatContext *ctx)
Definition: v4l2.c:600
AVCodecParameters::format
int format
Definition: codec_par.h:84
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVCodecParameters::codec_id
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
Definition: codec_par.h:60
AVPacket
This structure stores compressed data.
Definition: packet.h:350
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:241
list_standards
static void list_standards(AVFormatContext *ctx)
Definition: v4l2.c:315
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
v4l2_read_packet
static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt)
Definition: v4l2.c:984
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
video_data::dup_f
int(* dup_f)(int fd)
Definition: v4l2.c:99
AVCodecParameters::bit_rate
int64_t bit_rate
The average bitrate of the encoded data (in bits per second).
Definition: codec_par.h:89
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
AVERROR_EXIT
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:58
V4L_TS_DEFAULT
#define V4L_TS_DEFAULT
Return timestamps to the user exactly as returned by the kernel.
Definition: v4l2.c:53
avcodec_descriptor_get
const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id)
Definition: codec_desc.c:3521
avstring.h
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:228
avcodec_descriptor_get_by_name
const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char *name)
Definition: codec_desc.c:3536
int
int
Definition: ffmpeg_filter.c:153
first_field
static int first_field(const struct video_data *s)
Definition: v4l2.c:236
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:233
snprintf
#define snprintf
Definition: snprintf.h:34
AVFormatContext::priv_data
void * priv_data
Format private data.
Definition: avformat.h:1228
V4L_TS_ABS
#define V4L_TS_ABS
Autodetect the kind of timestamps returned by the kernel and convert to absolute (wall clock) timesta...
Definition: v4l2.c:58
channel
channel
Definition: ebur128.h:39
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2580
enqueue_buffer
static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
Definition: v4l2.c:405
mmap_read_frame
static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
Definition: v4l2.c:494
desired_video_buffers
static const int desired_video_buffers
Definition: v4l2.c:44
device_try_init
static int device_try_init(AVFormatContext *ctx, enum AVPixelFormat pix_fmt, int *width, int *height, uint32_t *desired_format, enum AVCodecID *codec_id)
Definition: v4l2.c:768