FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
v4l2_buffers.c
Go to the documentation of this file.
1 /*
2  * V4L2 buffer helper functions.
3  *
4  * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
5  * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <linux/videodev2.h>
25 #include <sys/ioctl.h>
26 #include <sys/mman.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 #include <poll.h>
30 #include "libavcodec/avcodec.h"
31 #include "libavcodec/internal.h"
32 #include "v4l2_context.h"
33 #include "v4l2_buffers.h"
34 #include "v4l2_m2m.h"
35 
36 #define USEC_PER_SEC 1000000
37 
39 {
40  return V4L2_TYPE_IS_OUTPUT(buf->context->type) ?
41  container_of(buf->context, V4L2m2mContext, output) :
42  container_of(buf->context, V4L2m2mContext, capture);
43 }
44 
46 {
47  return buf_to_m2mctx(buf)->avctx;
48 }
49 
50 static inline void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
51 {
53  AVRational v4l2_timebase = { 1, USEC_PER_SEC };
54  int64_t v4l2_pts;
55 
56  if (pts == AV_NOPTS_VALUE)
57  pts = 0;
58 
59  /* convert pts to v4l2 timebase */
60  v4l2_pts = av_rescale_q(pts, s->avctx->time_base, v4l2_timebase);
61  out->buf.timestamp.tv_usec = v4l2_pts % USEC_PER_SEC;
62  out->buf.timestamp.tv_sec = v4l2_pts / USEC_PER_SEC;
63 }
64 
65 static inline uint64_t v4l2_get_pts(V4L2Buffer *avbuf)
66 {
67  V4L2m2mContext *s = buf_to_m2mctx(avbuf);
68  AVRational v4l2_timebase = { 1, USEC_PER_SEC };
69  int64_t v4l2_pts;
70 
71  /* convert pts back to encoder timebase */
72  v4l2_pts = (int64_t)avbuf->buf.timestamp.tv_sec * USEC_PER_SEC +
73  avbuf->buf.timestamp.tv_usec;
74 
75  return av_rescale_q(v4l2_pts, v4l2_timebase, s->avctx->time_base);
76 }
77 
79 {
80  enum v4l2_ycbcr_encoding ycbcr;
81  enum v4l2_colorspace cs;
82 
83  cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
84  buf->context->format.fmt.pix_mp.colorspace :
85  buf->context->format.fmt.pix.colorspace;
86 
87  ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
88  buf->context->format.fmt.pix_mp.ycbcr_enc:
89  buf->context->format.fmt.pix.ycbcr_enc;
90 
91  switch(ycbcr) {
92  case V4L2_YCBCR_ENC_XV709:
93  case V4L2_YCBCR_ENC_709: return AVCOL_PRI_BT709;
94  case V4L2_YCBCR_ENC_XV601:
95  case V4L2_YCBCR_ENC_601:return AVCOL_PRI_BT470M;
96  default:
97  break;
98  }
99 
100  switch(cs) {
101  case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_PRI_BT470BG;
102  case V4L2_COLORSPACE_SMPTE170M: return AVCOL_PRI_SMPTE170M;
103  case V4L2_COLORSPACE_SMPTE240M: return AVCOL_PRI_SMPTE240M;
104  case V4L2_COLORSPACE_BT2020: return AVCOL_PRI_BT2020;
105  default:
106  break;
107  }
108 
109  return AVCOL_PRI_UNSPECIFIED;
110 }
111 
113 {
114  enum v4l2_quantization qt;
115 
116  qt = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
117  buf->context->format.fmt.pix_mp.quantization :
118  buf->context->format.fmt.pix.quantization;
119 
120  switch (qt) {
121  case V4L2_QUANTIZATION_LIM_RANGE: return AVCOL_RANGE_MPEG;
122  case V4L2_QUANTIZATION_FULL_RANGE: return AVCOL_RANGE_JPEG;
123  default:
124  break;
125  }
126 
128 }
129 
131 {
132  enum v4l2_ycbcr_encoding ycbcr;
133  enum v4l2_colorspace cs;
134 
135  cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
136  buf->context->format.fmt.pix_mp.colorspace :
137  buf->context->format.fmt.pix.colorspace;
138 
139  ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
140  buf->context->format.fmt.pix_mp.ycbcr_enc:
141  buf->context->format.fmt.pix.ycbcr_enc;
142 
143  switch(cs) {
144  case V4L2_COLORSPACE_SRGB: return AVCOL_SPC_RGB;
145  case V4L2_COLORSPACE_REC709: return AVCOL_SPC_BT709;
146  case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_SPC_FCC;
147  case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_SPC_BT470BG;
148  case V4L2_COLORSPACE_SMPTE170M: return AVCOL_SPC_SMPTE170M;
149  case V4L2_COLORSPACE_SMPTE240M: return AVCOL_SPC_SMPTE240M;
150  case V4L2_COLORSPACE_BT2020:
151  if (ycbcr == V4L2_YCBCR_ENC_BT2020_CONST_LUM)
152  return AVCOL_SPC_BT2020_CL;
153  else
154  return AVCOL_SPC_BT2020_NCL;
155  default:
156  break;
157  }
158 
159  return AVCOL_SPC_UNSPECIFIED;
160 }
161 
163 {
164  enum v4l2_ycbcr_encoding ycbcr;
165  enum v4l2_xfer_func xfer;
166  enum v4l2_colorspace cs;
167 
168  cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
169  buf->context->format.fmt.pix_mp.colorspace :
170  buf->context->format.fmt.pix.colorspace;
171 
172  ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
173  buf->context->format.fmt.pix_mp.ycbcr_enc:
174  buf->context->format.fmt.pix.ycbcr_enc;
175 
176  xfer = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
177  buf->context->format.fmt.pix_mp.xfer_func:
178  buf->context->format.fmt.pix.xfer_func;
179 
180  switch (xfer) {
181  case V4L2_XFER_FUNC_709: return AVCOL_TRC_BT709;
182  case V4L2_XFER_FUNC_SRGB: return AVCOL_TRC_IEC61966_2_1;
183  default:
184  break;
185  }
186 
187  switch (cs) {
188  case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_TRC_GAMMA22;
189  case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_TRC_GAMMA28;
190  case V4L2_COLORSPACE_SMPTE170M: return AVCOL_TRC_SMPTE170M;
191  case V4L2_COLORSPACE_SMPTE240M: return AVCOL_TRC_SMPTE240M;
192  default:
193  break;
194  }
195 
196  switch (ycbcr) {
197  case V4L2_YCBCR_ENC_XV709:
198  case V4L2_YCBCR_ENC_XV601: return AVCOL_TRC_BT1361_ECG;
199  default:
200  break;
201  }
202 
203  return AVCOL_TRC_UNSPECIFIED;
204 }
205 
206 static void v4l2_free_buffer(void *opaque, uint8_t *unused)
207 {
208  V4L2Buffer* avbuf = opaque;
209  V4L2m2mContext *s = buf_to_m2mctx(avbuf);
210 
211  if (atomic_fetch_sub(&avbuf->context_refcount, 1) == 1) {
212  atomic_fetch_sub_explicit(&s->refcount, 1, memory_order_acq_rel);
213 
214  if (s->reinit) {
215  if (!atomic_load(&s->refcount))
216  sem_post(&s->refsync);
217  } else {
218  if (s->draining) {
219  /* no need to queue more buffers to the driver */
220  avbuf->status = V4L2BUF_AVAILABLE;
221  }
222  else if (avbuf->context->streamon)
223  ff_v4l2_buffer_enqueue(avbuf);
224  }
225 
226  av_buffer_unref(&avbuf->context_ref);
227  }
228 }
229 
231 {
233 
234  if (plane >= in->num_planes)
235  return AVERROR(EINVAL);
236 
237  /* even though most encoders return 0 in data_offset encoding vp8 does require this value */
238  *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset,
239  in->plane_info[plane].length, v4l2_free_buffer, in, 0);
240  if (!*buf)
241  return AVERROR(ENOMEM);
242 
243  if (in->context_ref)
245  else {
247  if (!in->context_ref) {
248  av_buffer_unref(buf);
249  return AVERROR(ENOMEM);
250  }
251  in->context_refcount = 1;
252  }
253 
254  in->status = V4L2BUF_RET_USER;
255  atomic_fetch_add_explicit(&s->refcount, 1, memory_order_relaxed);
256 
257  return 0;
258 }
259 
260 static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, AVBufferRef* bref)
261 {
262  unsigned int bytesused, length;
263 
264  if (plane >= out->num_planes)
265  return AVERROR(EINVAL);
266 
267  bytesused = FFMIN(size, out->plane_info[plane].length);
268  length = out->plane_info[plane].length;
269 
270  memcpy(out->plane_info[plane].mm_addr, data, FFMIN(size, out->plane_info[plane].length));
271 
272  if (V4L2_TYPE_IS_MULTIPLANAR(out->buf.type)) {
273  out->planes[plane].bytesused = bytesused;
274  out->planes[plane].length = length;
275  } else {
276  out->buf.bytesused = bytesused;
277  out->buf.length = length;
278  }
279 
280  return 0;
281 }
282 
283 /******************************************************************************
284  *
285  * V4L2uffer interface
286  *
287  ******************************************************************************/
288 
290 {
291  int i, ret;
292 
293  for(i = 0; i < out->num_planes; i++) {
294  ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, frame->buf[i]);
295  if (ret)
296  return ret;
297  }
298 
299  v4l2_set_pts(out, frame->pts);
300 
301  return 0;
302 }
303 
305 {
306  V4L2m2mContext *s = buf_to_m2mctx(avbuf);
307  int i, ret;
308 
309  av_frame_unref(frame);
310 
311  /* 1. get references to the actual data */
312  for (i = 0; i < avbuf->num_planes; i++) {
313  ret = v4l2_buf_to_bufref(avbuf, i, &frame->buf[i]);
314  if (ret)
315  return ret;
316 
317  frame->linesize[i] = avbuf->plane_info[i].bytesperline;
318  frame->data[i] = frame->buf[i]->data;
319  }
320 
321  /* 1.1 fixup special cases */
322  switch (avbuf->context->av_pix_fmt) {
323  case AV_PIX_FMT_NV12:
324  if (avbuf->num_planes > 1)
325  break;
326  frame->linesize[1] = avbuf->plane_info[0].bytesperline;
327  frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
328  break;
329  default:
330  break;
331  }
332 
333  /* 2. get frame information */
334  frame->key_frame = !!(avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME);
335  frame->format = avbuf->context->av_pix_fmt;
337  frame->colorspace = v4l2_get_color_space(avbuf);
338  frame->color_range = v4l2_get_color_range(avbuf);
339  frame->color_trc = v4l2_get_color_trc(avbuf);
340  frame->pts = v4l2_get_pts(avbuf);
341 
342  /* these two values are updated also during re-init in v4l2_process_driver_event */
343  frame->height = s->output.height;
344  frame->width = s->output.width;
345 
346  /* 3. report errors upstream */
347  if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
348  av_log(logger(avbuf), AV_LOG_ERROR, "%s: driver decode error\n", avbuf->context->name);
350  }
351 
352  return 0;
353 }
354 
356 {
357  int ret;
358 
359  av_packet_unref(pkt);
360  ret = v4l2_buf_to_bufref(avbuf, 0, &pkt->buf);
361  if (ret)
362  return ret;
363 
364  pkt->size = V4L2_TYPE_IS_MULTIPLANAR(avbuf->buf.type) ? avbuf->buf.m.planes[0].bytesused : avbuf->buf.bytesused;
365  pkt->data = pkt->buf->data;
366 
367  if (avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME)
368  pkt->flags |= AV_PKT_FLAG_KEY;
369 
370  if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
371  av_log(logger(avbuf), AV_LOG_ERROR, "%s driver encode error\n", avbuf->context->name);
372  pkt->flags |= AV_PKT_FLAG_CORRUPT;
373  }
374 
375  pkt->dts = pkt->pts = v4l2_get_pts(avbuf);
376 
377  return 0;
378 }
379 
381 {
382  int ret;
383 
384  ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, pkt->buf);
385  if (ret)
386  return ret;
387 
388  v4l2_set_pts(out, pkt->pts);
389 
390  if (pkt->flags & AV_PKT_FLAG_KEY)
391  out->flags = V4L2_BUF_FLAG_KEYFRAME;
392 
393  return 0;
394 }
395 
397 {
398  V4L2Context *ctx = avbuf->context;
399  int ret, i;
400 
401  avbuf->buf.memory = V4L2_MEMORY_MMAP;
402  avbuf->buf.type = ctx->type;
403  avbuf->buf.index = index;
404 
405  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
406  avbuf->buf.length = VIDEO_MAX_PLANES;
407  avbuf->buf.m.planes = avbuf->planes;
408  }
409 
410  ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QUERYBUF, &avbuf->buf);
411  if (ret < 0)
412  return AVERROR(errno);
413 
414  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
415  avbuf->num_planes = 0;
416  for (;;) {
417  /* in MP, the V4L2 API states that buf.length means num_planes */
418  if (avbuf->num_planes >= avbuf->buf.length)
419  break;
420  if (avbuf->buf.m.planes[avbuf->num_planes].length)
421  avbuf->num_planes++;
422  }
423  } else
424  avbuf->num_planes = 1;
425 
426  for (i = 0; i < avbuf->num_planes; i++) {
427 
428  avbuf->plane_info[i].bytesperline = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
429  ctx->format.fmt.pix_mp.plane_fmt[i].bytesperline :
430  ctx->format.fmt.pix.bytesperline;
431 
432  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
433  avbuf->plane_info[i].length = avbuf->buf.m.planes[i].length;
434  avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.m.planes[i].length,
435  PROT_READ | PROT_WRITE, MAP_SHARED,
436  buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.planes[i].m.mem_offset);
437  } else {
438  avbuf->plane_info[i].length = avbuf->buf.length;
439  avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.length,
440  PROT_READ | PROT_WRITE, MAP_SHARED,
441  buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.offset);
442  }
443 
444  if (avbuf->plane_info[i].mm_addr == MAP_FAILED)
445  return AVERROR(ENOMEM);
446  }
447 
448  avbuf->status = V4L2BUF_AVAILABLE;
449 
450  if (V4L2_TYPE_IS_OUTPUT(ctx->type))
451  return 0;
452 
453  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
454  avbuf->buf.m.planes = avbuf->planes;
455  avbuf->buf.length = avbuf->num_planes;
456 
457  } else {
458  avbuf->buf.bytesused = avbuf->planes[0].bytesused;
459  avbuf->buf.length = avbuf->planes[0].length;
460  }
461 
462  return ff_v4l2_buffer_enqueue(avbuf);
463 }
464 
466 {
467  int ret;
468 
469  avbuf->buf.flags = avbuf->flags;
470 
471  ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QBUF, &avbuf->buf);
472  if (ret < 0)
473  return AVERROR(errno);
474 
475  avbuf->status = V4L2BUF_IN_DRIVER;
476 
477  return 0;
478 }
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:475
int plane
Definition: avisynth_c.h:422
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:768
static AVCodecContext * logger(V4L2Buffer *buf)
Definition: v4l2_buffers.c:45
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
const char * name
context name.
Definition: v4l2_context.h:40
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
AVCodecContext * avctx
Definition: v4l2_m2m.h:52
static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t *data, int size, AVBufferRef *bref)
Definition: v4l2_buffers.c:260
static enum AVColorTransferCharacteristic v4l2_get_color_trc(V4L2Buffer *buf)
Definition: v4l2_buffers.c:162
int ff_v4l2_buffer_buf_to_avpkt(AVPacket *pkt, V4L2Buffer *avbuf)
Extracts the data from a V4L2Buffer to an AVPacket.
Definition: v4l2_buffers.c:355
int ff_v4l2_buffer_initialize(V4L2Buffer *avbuf, int index)
Initializes a V4L2Buffer.
Definition: v4l2_buffers.c:396
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:410
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:479
int size
Definition: avcodec.h:1431
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:480
int width
Width and height of the frames it produces (in case of a capture context, e.g.
Definition: v4l2_context.h:71
int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
Extracts the data from an AVFrame to a V4L2Buffer.
Definition: v4l2_buffers.c:289
static AVPacket pkt
enum V4L2Buffer_status status
Definition: v4l2_buffers.h:64
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
Definition: pixfmt.h:474
static enum AVColorSpace v4l2_get_color_space(V4L2Buffer *buf)
Definition: v4l2_buffers.c:130
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:444
functionally identical to above
Definition: pixfmt.h:481
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1640
#define USEC_PER_SEC
Definition: v4l2_buffers.c:36
static uint64_t v4l2_get_pts(V4L2Buffer *avbuf)
Definition: v4l2_buffers.c:65
uint8_t
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:473
static enum AVColorPrimaries v4l2_get_color_primaries(V4L2Buffer *buf)
Definition: v4l2_buffers.c:78
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:449
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:311
struct V4L2Buffer::V4L2Plane_info plane_info[VIDEO_MAX_PLANES]
static AVFrame * frame
int ff_v4l2_buffer_enqueue(V4L2Buffer *avbuf)
Enqueues a V4L2Buffer.
Definition: v4l2_buffers.c:465
uint8_t * data
Definition: avcodec.h:1430
AVColorRange
MPEG vs JPEG YUV range.
Definition: pixfmt.h:496
ptrdiff_t size
Definition: opengl_enc.c:101
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:420
#define sem_post(psem)
Definition: semaphore.h:26
#define av_log(a,...)
struct V4L2Context * context
Definition: v4l2_buffers.h:43
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1462
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
static enum AVColorRange v4l2_get_color_range(V4L2Buffer *buf)
Definition: v4l2_buffers.c:112
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:425
int width
Definition: frame.h:276
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
ITU-R BT1361 Extended Colour Gamut.
Definition: pixfmt.h:457
#define atomic_load(object)
Definition: stdatomic.h:93
#define AVERROR(e)
Definition: error.h:43
struct v4l2_buffer buf
Definition: v4l2_buffers.h:60
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:463
int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
Extracts the data from a V4L2Buffer to an AVFrame.
Definition: v4l2_buffers.c:304
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: avcodec.h:1413
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:474
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:85
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
Definition: pixfmt.h:422
GLsizei GLsizei * length
Definition: opengl_enc.c:115
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
enum AVPixelFormat av_pix_fmt
AVPixelFormat corresponding to this buffer context.
Definition: v4l2_context.h:53
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1436
int streamon
Whether the stream has been started (VIDIOC_STREAMON has been sent).
Definition: v4l2_context.h:86
#define FFMIN(a, b)
Definition: common.h:96
AVBufferRef * context_ref
Definition: v4l2_buffers.h:47
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:484
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:427
AVFormatContext * ctx
Definition: movenc.c:48
struct v4l2_plane planes[VIDEO_MAX_PLANES]
Definition: v4l2_buffers.h:61
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:478
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:499
struct v4l2_format format
Format returned by the driver after initializing the buffer context.
Definition: v4l2_context.h:65
#define atomic_fetch_add_explicit(object, operand, order)
Definition: stdatomic.h:149
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:291
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
also ITU-R BT1361
Definition: pixfmt.h:446
atomic_uint context_refcount
Definition: v4l2_buffers.h:48
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:451
Libavcodec external API header.
#define atomic_fetch_sub_explicit(object, operand, order)
Definition: stdatomic.h:152
functionally identical to above
Definition: pixfmt.h:429
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
atomic_uint refcount
Definition: v4l2_m2m.h:54
main external API structure.
Definition: avcodec.h:1518
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:592
uint8_t * data
The data buffer.
Definition: buffer.h:89
V4L2Buffer (wrapper for v4l2_buffer management)
Definition: v4l2_buffers.h:41
static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf)
Definition: v4l2_buffers.c:230
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out)
Extracts the data from an AVPacket to a V4L2Buffer.
Definition: v4l2_buffers.c:380
void * buf
Definition: avisynth_c.h:690
int index
Definition: gxfenc.c:89
#define container_of(ptr, type, member)
Definition: v4l2_m2m.h:35
Rational number (pair of numerator and denominator).
Definition: rational.h:58
#define FF_DECODE_ERROR_INVALID_BITSTREAM
Definition: frame.h:515
int decode_error_flags
decode error flags of the frame, set to a combination of FF_DECODE_ERROR_xxx flags if the decoder pro...
Definition: frame.h:514
int size
Size of data in bytes.
Definition: buffer.h:93
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:551
static int64_t pts
V4L2Context output
Definition: v4l2_m2m.h:49
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
the normal 219*2^(n-8) "MPEG" YUV ranges
Definition: pixfmt.h:498
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:485
A reference to a data buffer.
Definition: buffer.h:81
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:458
common internal api header.
also ITU-R BT470BG
Definition: pixfmt.h:450
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
AVBufferRef * self_ref
Definition: v4l2_m2m.h:61
#define AV_PKT_FLAG_CORRUPT
The packet content is corrupted.
Definition: avcodec.h:1463
int num_planes
Definition: v4l2_buffers.h:57
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:296
enum AVColorPrimaries color_primaries
Definition: frame.h:465
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1429
static V4L2m2mContext * buf_to_m2mctx(V4L2Buffer *buf)
Definition: v4l2_buffers.c:38
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:428
ITU-R BT2020.
Definition: pixfmt.h:431
int height
Definition: frame.h:276
FILE * out
Definition: movenc.c:54
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:467
sem_t refsync
Definition: v4l2_m2m.h:53
This structure stores compressed data.
Definition: avcodec.h:1407
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1423
static void v4l2_free_buffer(void *opaque, uint8_t *unused)
Definition: v4l2_buffers.c:206
static void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
Definition: v4l2_buffers.c:50
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
enum v4l2_buf_type type
Type of this buffer context.
Definition: v4l2_context.h:47