FFmpeg
hwcontext.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "avassert.h"
22 #include "buffer.h"
23 #include "common.h"
24 #include "hwcontext.h"
25 #include "hwcontext_internal.h"
26 #include "imgutils.h"
27 #include "log.h"
28 #include "mem.h"
29 #include "pixdesc.h"
30 #include "pixfmt.h"
31 
32 static const HWContextType * const hw_table[] = {
33 #if CONFIG_CUDA
35 #endif
36 #if CONFIG_D3D11VA
38 #endif
39 #if CONFIG_LIBDRM
41 #endif
42 #if CONFIG_DXVA2
44 #endif
45 #if CONFIG_OPENCL
47 #endif
48 #if CONFIG_QSV
50 #endif
51 #if CONFIG_VAAPI
53 #endif
54 #if CONFIG_VDPAU
56 #endif
57 #if CONFIG_VIDEOTOOLBOX
59 #endif
60 #if CONFIG_MEDIACODEC
62 #endif
63 #if CONFIG_VULKAN
65 #endif
66  NULL,
67 };
68 
69 static const char *const hw_type_names[] = {
70  [AV_HWDEVICE_TYPE_CUDA] = "cuda",
71  [AV_HWDEVICE_TYPE_DRM] = "drm",
72  [AV_HWDEVICE_TYPE_DXVA2] = "dxva2",
73  [AV_HWDEVICE_TYPE_D3D11VA] = "d3d11va",
74  [AV_HWDEVICE_TYPE_OPENCL] = "opencl",
75  [AV_HWDEVICE_TYPE_QSV] = "qsv",
76  [AV_HWDEVICE_TYPE_VAAPI] = "vaapi",
77  [AV_HWDEVICE_TYPE_VDPAU] = "vdpau",
78  [AV_HWDEVICE_TYPE_VIDEOTOOLBOX] = "videotoolbox",
79  [AV_HWDEVICE_TYPE_MEDIACODEC] = "mediacodec",
80  [AV_HWDEVICE_TYPE_VULKAN] = "vulkan",
81 };
82 
84 {
85  int type;
86  for (type = 0; type < FF_ARRAY_ELEMS(hw_type_names); type++) {
87  if (hw_type_names[type] && !strcmp(hw_type_names[type], name))
88  return type;
89  }
90  return AV_HWDEVICE_TYPE_NONE;
91 }
92 
94 {
97  return hw_type_names[type];
98  else
99  return NULL;
100 }
101 
103 {
104  enum AVHWDeviceType next;
105  int i, set = 0;
106  for (i = 0; hw_table[i]; i++) {
107  if (prev != AV_HWDEVICE_TYPE_NONE && hw_table[i]->type <= prev)
108  continue;
109  if (!set || hw_table[i]->type < next) {
110  next = hw_table[i]->type;
111  set = 1;
112  }
113  }
114  return set ? next : AV_HWDEVICE_TYPE_NONE;
115 }
116 
117 static const AVClass hwdevice_ctx_class = {
118  .class_name = "AVHWDeviceContext",
119  .item_name = av_default_item_name,
120  .version = LIBAVUTIL_VERSION_INT,
121 };
122 
123 static void hwdevice_ctx_free(void *opaque, uint8_t *data)
124 {
126 
127  /* uninit might still want access the hw context and the user
128  * free() callback might destroy it, so uninit has to be called first */
129  if (ctx->internal->hw_type->device_uninit)
130  ctx->internal->hw_type->device_uninit(ctx);
131 
132  if (ctx->free)
133  ctx->free(ctx);
134 
135  av_buffer_unref(&ctx->internal->source_device);
136 
137  av_freep(&ctx->hwctx);
138  av_freep(&ctx->internal->priv);
139  av_freep(&ctx->internal);
140  av_freep(&ctx);
141 }
142 
144 {
146  AVBufferRef *buf;
147  const HWContextType *hw_type = NULL;
148  int i;
149 
150  for (i = 0; hw_table[i]; i++) {
151  if (hw_table[i]->type == type) {
152  hw_type = hw_table[i];
153  break;
154  }
155  }
156  if (!hw_type)
157  return NULL;
158 
159  ctx = av_mallocz(sizeof(*ctx));
160  if (!ctx)
161  return NULL;
162 
163  ctx->internal = av_mallocz(sizeof(*ctx->internal));
164  if (!ctx->internal)
165  goto fail;
166 
167  if (hw_type->device_priv_size) {
168  ctx->internal->priv = av_mallocz(hw_type->device_priv_size);
169  if (!ctx->internal->priv)
170  goto fail;
171  }
172 
173  if (hw_type->device_hwctx_size) {
174  ctx->hwctx = av_mallocz(hw_type->device_hwctx_size);
175  if (!ctx->hwctx)
176  goto fail;
177  }
178 
179  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
182  if (!buf)
183  goto fail;
184 
185  ctx->type = type;
187 
188  ctx->internal->hw_type = hw_type;
189 
190  return buf;
191 
192 fail:
193  if (ctx->internal)
194  av_freep(&ctx->internal->priv);
195  av_freep(&ctx->internal);
196  av_freep(&ctx->hwctx);
197  av_freep(&ctx);
198  return NULL;
199 }
200 
202 {
204  int ret;
205 
206  if (ctx->internal->hw_type->device_init) {
207  ret = ctx->internal->hw_type->device_init(ctx);
208  if (ret < 0)
209  goto fail;
210  }
211 
212  return 0;
213 fail:
214  if (ctx->internal->hw_type->device_uninit)
215  ctx->internal->hw_type->device_uninit(ctx);
216  return ret;
217 }
218 
219 static const AVClass hwframe_ctx_class = {
220  .class_name = "AVHWFramesContext",
221  .item_name = av_default_item_name,
222  .version = LIBAVUTIL_VERSION_INT,
223 };
224 
225 static void hwframe_ctx_free(void *opaque, uint8_t *data)
226 {
228 
229  if (ctx->internal->pool_internal)
230  av_buffer_pool_uninit(&ctx->internal->pool_internal);
231 
232  if (ctx->internal->hw_type->frames_uninit)
233  ctx->internal->hw_type->frames_uninit(ctx);
234 
235  if (ctx->free)
236  ctx->free(ctx);
237 
238  av_buffer_unref(&ctx->internal->source_frames);
239 
240  av_buffer_unref(&ctx->device_ref);
241 
242  av_freep(&ctx->hwctx);
243  av_freep(&ctx->internal->priv);
244  av_freep(&ctx->internal);
245  av_freep(&ctx);
246 }
247 
249 {
250  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref_in->data;
251  const HWContextType *hw_type = device_ctx->internal->hw_type;
253  AVBufferRef *buf, *device_ref = NULL;
254 
255  ctx = av_mallocz(sizeof(*ctx));
256  if (!ctx)
257  return NULL;
258 
259  ctx->internal = av_mallocz(sizeof(*ctx->internal));
260  if (!ctx->internal)
261  goto fail;
262 
263  if (hw_type->frames_priv_size) {
264  ctx->internal->priv = av_mallocz(hw_type->frames_priv_size);
265  if (!ctx->internal->priv)
266  goto fail;
267  }
268 
269  if (hw_type->frames_hwctx_size) {
270  ctx->hwctx = av_mallocz(hw_type->frames_hwctx_size);
271  if (!ctx->hwctx)
272  goto fail;
273  }
274 
275  device_ref = av_buffer_ref(device_ref_in);
276  if (!device_ref)
277  goto fail;
278 
279  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
282  if (!buf)
283  goto fail;
284 
286  ctx->device_ref = device_ref;
287  ctx->device_ctx = device_ctx;
288  ctx->format = AV_PIX_FMT_NONE;
289  ctx->sw_format = AV_PIX_FMT_NONE;
290 
291  ctx->internal->hw_type = hw_type;
292 
293  return buf;
294 
295 fail:
296  if (device_ref)
297  av_buffer_unref(&device_ref);
298  if (ctx->internal)
299  av_freep(&ctx->internal->priv);
300  av_freep(&ctx->internal);
301  av_freep(&ctx->hwctx);
302  av_freep(&ctx);
303  return NULL;
304 }
305 
307 {
309  AVFrame **frames;
310  int i, ret = 0;
311 
312  frames = av_calloc(ctx->initial_pool_size, sizeof(*frames));
313  if (!frames)
314  return AVERROR(ENOMEM);
315 
316  for (i = 0; i < ctx->initial_pool_size; i++) {
317  frames[i] = av_frame_alloc();
318  if (!frames[i])
319  goto fail;
320 
322  if (ret < 0)
323  goto fail;
324  }
325 
326 fail:
327  for (i = 0; i < ctx->initial_pool_size; i++)
329  av_freep(&frames);
330 
331  return ret;
332 }
333 
335 {
337  const enum AVPixelFormat *pix_fmt;
338  int ret;
339 
340  if (ctx->internal->source_frames) {
341  /* A derived frame context is already initialised. */
342  return 0;
343  }
344 
345  /* validate the pixel format */
346  for (pix_fmt = ctx->internal->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) {
347  if (*pix_fmt == ctx->format)
348  break;
349  }
350  if (*pix_fmt == AV_PIX_FMT_NONE) {
352  "The hardware pixel format '%s' is not supported by the device type '%s'\n",
353  av_get_pix_fmt_name(ctx->format), ctx->internal->hw_type->name);
354  return AVERROR(ENOSYS);
355  }
356 
357  /* validate the dimensions */
358  ret = av_image_check_size(ctx->width, ctx->height, 0, ctx);
359  if (ret < 0)
360  return ret;
361 
362  /* format-specific init */
363  if (ctx->internal->hw_type->frames_init) {
364  ret = ctx->internal->hw_type->frames_init(ctx);
365  if (ret < 0)
366  goto fail;
367  }
368 
369  if (ctx->internal->pool_internal && !ctx->pool)
370  ctx->pool = ctx->internal->pool_internal;
371 
372  /* preallocate the frames in the pool, if requested */
373  if (ctx->initial_pool_size > 0) {
375  if (ret < 0)
376  goto fail;
377  }
378 
379  return 0;
380 fail:
381  if (ctx->internal->hw_type->frames_uninit)
382  ctx->internal->hw_type->frames_uninit(ctx);
383  return ret;
384 }
385 
388  enum AVPixelFormat **formats, int flags)
389 {
390  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
391 
392  if (!ctx->internal->hw_type->transfer_get_formats)
393  return AVERROR(ENOSYS);
394 
395  return ctx->internal->hw_type->transfer_get_formats(ctx, dir, formats);
396 }
397 
398 static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
399 {
400  AVHWFramesContext *ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
401  AVFrame *frame_tmp;
402  int ret = 0;
403 
404  frame_tmp = av_frame_alloc();
405  if (!frame_tmp)
406  return AVERROR(ENOMEM);
407 
408  /* if the format is set, use that
409  * otherwise pick the first supported one */
410  if (dst->format >= 0) {
411  frame_tmp->format = dst->format;
412  } else {
413  enum AVPixelFormat *formats;
414 
415  ret = av_hwframe_transfer_get_formats(src->hw_frames_ctx,
417  &formats, 0);
418  if (ret < 0)
419  goto fail;
420  frame_tmp->format = formats[0];
421  av_freep(&formats);
422  }
423  frame_tmp->width = ctx->width;
424  frame_tmp->height = ctx->height;
425 
426  ret = av_frame_get_buffer(frame_tmp, 0);
427  if (ret < 0)
428  goto fail;
429 
430  ret = av_hwframe_transfer_data(frame_tmp, src, flags);
431  if (ret < 0)
432  goto fail;
433 
434  frame_tmp->width = src->width;
435  frame_tmp->height = src->height;
436 
437  av_frame_move_ref(dst, frame_tmp);
438 
439 fail:
440  av_frame_free(&frame_tmp);
441  return ret;
442 }
443 
445 {
447  int ret;
448 
449  if (!dst->buf[0])
450  return transfer_data_alloc(dst, src, flags);
451 
452  /*
453  * Hardware -> Hardware Transfer.
454  * Unlike Software -> Hardware or Hardware -> Software, the transfer
455  * function could be provided by either the src or dst, depending on
456  * the specific combination of hardware.
457  */
458  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
459  AVHWFramesContext *src_ctx =
460  (AVHWFramesContext*)src->hw_frames_ctx->data;
461  AVHWFramesContext *dst_ctx =
463 
464  if (src_ctx->internal->source_frames) {
465  av_log(src_ctx, AV_LOG_ERROR,
466  "A device with a derived frame context cannot be used as "
467  "the source of a HW -> HW transfer.");
468  return AVERROR(ENOSYS);
469  }
470 
471  if (dst_ctx->internal->source_frames) {
472  av_log(src_ctx, AV_LOG_ERROR,
473  "A device with a derived frame context cannot be used as "
474  "the destination of a HW -> HW transfer.");
475  return AVERROR(ENOSYS);
476  }
477 
478  ret = src_ctx->internal->hw_type->transfer_data_from(src_ctx, dst, src);
479  if (ret == AVERROR(ENOSYS))
480  ret = dst_ctx->internal->hw_type->transfer_data_to(dst_ctx, dst, src);
481  if (ret < 0)
482  return ret;
483  } else {
484  if (src->hw_frames_ctx) {
485  ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
486 
487  ret = ctx->internal->hw_type->transfer_data_from(ctx, dst, src);
488  if (ret < 0)
489  return ret;
490  } else if (dst->hw_frames_ctx) {
492 
493  ret = ctx->internal->hw_type->transfer_data_to(ctx, dst, src);
494  if (ret < 0)
495  return ret;
496  } else {
497  return AVERROR(ENOSYS);
498  }
499  }
500  return 0;
501 }
502 
504 {
505  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
506  int ret;
507 
508  if (ctx->internal->source_frames) {
509  // This is a derived frame context, so we allocate in the source
510  // and map the frame immediately.
511  AVFrame *src_frame;
512 
513  frame->format = ctx->format;
514  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
515  if (!frame->hw_frames_ctx)
516  return AVERROR(ENOMEM);
517 
518  src_frame = av_frame_alloc();
519  if (!src_frame)
520  return AVERROR(ENOMEM);
521 
522  ret = av_hwframe_get_buffer(ctx->internal->source_frames,
523  src_frame, 0);
524  if (ret < 0) {
525  av_frame_free(&src_frame);
526  return ret;
527  }
528 
529  ret = av_hwframe_map(frame, src_frame,
530  ctx->internal->source_allocation_map_flags);
531  if (ret) {
532  av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived "
533  "frame context: %d.\n", ret);
534  av_frame_free(&src_frame);
535  return ret;
536  }
537 
538  // Free the source frame immediately - the mapped frame still
539  // contains a reference to it.
540  av_frame_free(&src_frame);
541 
542  return 0;
543  }
544 
545  if (!ctx->internal->hw_type->frames_get_buffer)
546  return AVERROR(ENOSYS);
547 
548  if (!ctx->pool)
549  return AVERROR(EINVAL);
550 
551  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
552  if (!frame->hw_frames_ctx)
553  return AVERROR(ENOMEM);
554 
555  ret = ctx->internal->hw_type->frames_get_buffer(ctx, frame);
556  if (ret < 0) {
557  av_buffer_unref(&frame->hw_frames_ctx);
558  return ret;
559  }
560 
561  frame->extended_data = frame->data;
562 
563  return 0;
564 }
565 
567 {
569  const HWContextType *hw_type = ctx->internal->hw_type;
570 
571  if (hw_type->device_hwconfig_size == 0)
572  return NULL;
573 
574  return av_mallocz(hw_type->device_hwconfig_size);
575 }
576 
578  const void *hwconfig)
579 {
581  const HWContextType *hw_type = ctx->internal->hw_type;
582  AVHWFramesConstraints *constraints;
583 
584  if (!hw_type->frames_get_constraints)
585  return NULL;
586 
587  constraints = av_mallocz(sizeof(*constraints));
588  if (!constraints)
589  return NULL;
590 
591  constraints->min_width = constraints->min_height = 0;
592  constraints->max_width = constraints->max_height = INT_MAX;
593 
594  if (hw_type->frames_get_constraints(ctx, hwconfig, constraints) >= 0) {
595  return constraints;
596  } else {
597  av_hwframe_constraints_free(&constraints);
598  return NULL;
599  }
600 }
601 
603 {
604  if (*constraints) {
605  av_freep(&(*constraints)->valid_hw_formats);
606  av_freep(&(*constraints)->valid_sw_formats);
607  }
608  av_freep(constraints);
609 }
610 
612  const char *device, AVDictionary *opts, int flags)
613 {
614  AVBufferRef *device_ref = NULL;
615  AVHWDeviceContext *device_ctx;
616  int ret = 0;
617 
618  device_ref = av_hwdevice_ctx_alloc(type);
619  if (!device_ref) {
620  ret = AVERROR(ENOMEM);
621  goto fail;
622  }
623  device_ctx = (AVHWDeviceContext*)device_ref->data;
624 
625  if (!device_ctx->internal->hw_type->device_create) {
626  ret = AVERROR(ENOSYS);
627  goto fail;
628  }
629 
630  ret = device_ctx->internal->hw_type->device_create(device_ctx, device,
631  opts, flags);
632  if (ret < 0)
633  goto fail;
634 
635  ret = av_hwdevice_ctx_init(device_ref);
636  if (ret < 0)
637  goto fail;
638 
639  *pdevice_ref = device_ref;
640  return 0;
641 fail:
642  av_buffer_unref(&device_ref);
643  *pdevice_ref = NULL;
644  return ret;
645 }
646 
648  enum AVHWDeviceType type,
649  AVBufferRef *src_ref,
650  AVDictionary *options, int flags)
651 {
652  AVBufferRef *dst_ref = NULL, *tmp_ref;
653  AVHWDeviceContext *dst_ctx, *tmp_ctx;
654  int ret = 0;
655 
656  tmp_ref = src_ref;
657  while (tmp_ref) {
658  tmp_ctx = (AVHWDeviceContext*)tmp_ref->data;
659  if (tmp_ctx->type == type) {
660  dst_ref = av_buffer_ref(tmp_ref);
661  if (!dst_ref) {
662  ret = AVERROR(ENOMEM);
663  goto fail;
664  }
665  goto done;
666  }
667  tmp_ref = tmp_ctx->internal->source_device;
668  }
669 
670  dst_ref = av_hwdevice_ctx_alloc(type);
671  if (!dst_ref) {
672  ret = AVERROR(ENOMEM);
673  goto fail;
674  }
675  dst_ctx = (AVHWDeviceContext*)dst_ref->data;
676 
677  tmp_ref = src_ref;
678  while (tmp_ref) {
679  tmp_ctx = (AVHWDeviceContext*)tmp_ref->data;
680  if (dst_ctx->internal->hw_type->device_derive) {
681  ret = dst_ctx->internal->hw_type->device_derive(dst_ctx,
682  tmp_ctx,
683  options,
684  flags);
685  if (ret == 0) {
686  dst_ctx->internal->source_device = av_buffer_ref(src_ref);
687  if (!dst_ctx->internal->source_device) {
688  ret = AVERROR(ENOMEM);
689  goto fail;
690  }
691  ret = av_hwdevice_ctx_init(dst_ref);
692  if (ret < 0)
693  goto fail;
694  goto done;
695  }
696  if (ret != AVERROR(ENOSYS))
697  goto fail;
698  }
699  tmp_ref = tmp_ctx->internal->source_device;
700  }
701 
702  ret = AVERROR(ENOSYS);
703  goto fail;
704 
705 done:
706  *dst_ref_ptr = dst_ref;
707  return 0;
708 
709 fail:
710  av_buffer_unref(&dst_ref);
711  *dst_ref_ptr = NULL;
712  return ret;
713 }
714 
716  enum AVHWDeviceType type,
717  AVBufferRef *src_ref, int flags)
718 {
719  return av_hwdevice_ctx_create_derived_opts(dst_ref_ptr, type, src_ref,
720  NULL, flags);
721 }
722 
723 static void ff_hwframe_unmap(void *opaque, uint8_t *data)
724 {
726  AVHWFramesContext *ctx = opaque;
727 
728  if (hwmap->unmap)
729  hwmap->unmap(ctx, hwmap);
730 
731  av_frame_free(&hwmap->source);
732 
734 
735  av_free(hwmap);
736 }
737 
739  AVFrame *dst, const AVFrame *src,
740  void (*unmap)(AVHWFramesContext *ctx,
741  HWMapDescriptor *hwmap),
742  void *priv)
743 {
744  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
745  HWMapDescriptor *hwmap;
746  int ret;
747 
748  hwmap = av_mallocz(sizeof(*hwmap));
749  if (!hwmap) {
750  ret = AVERROR(ENOMEM);
751  goto fail;
752  }
753 
754  hwmap->source = av_frame_alloc();
755  if (!hwmap->source) {
756  ret = AVERROR(ENOMEM);
757  goto fail;
758  }
759  ret = av_frame_ref(hwmap->source, src);
760  if (ret < 0)
761  goto fail;
762 
763  hwmap->hw_frames_ctx = av_buffer_ref(hwframe_ref);
764  if (!hwmap->hw_frames_ctx) {
765  ret = AVERROR(ENOMEM);
766  goto fail;
767  }
768 
769  hwmap->unmap = unmap;
770  hwmap->priv = priv;
771 
772  dst->buf[0] = av_buffer_create((uint8_t*)hwmap, sizeof(*hwmap),
773  &ff_hwframe_unmap, ctx, 0);
774  if (!dst->buf[0]) {
775  ret = AVERROR(ENOMEM);
776  goto fail;
777  }
778 
779  return 0;
780 
781 fail:
782  if (hwmap) {
783  av_buffer_unref(&hwmap->hw_frames_ctx);
784  av_frame_free(&hwmap->source);
785  }
786  av_free(hwmap);
787  return ret;
788 }
789 
790 int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
791 {
792  AVBufferRef *orig_dst_frames = dst->hw_frames_ctx;
793  enum AVPixelFormat orig_dst_fmt = dst->format;
794  AVHWFramesContext *src_frames, *dst_frames;
795  HWMapDescriptor *hwmap;
796  int ret;
797 
798  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
799  src_frames = (AVHWFramesContext*)src->hw_frames_ctx->data;
800  dst_frames = (AVHWFramesContext*)dst->hw_frames_ctx->data;
801 
802  if ((src_frames == dst_frames &&
803  src->format == dst_frames->sw_format &&
804  dst->format == dst_frames->format) ||
805  (src_frames->internal->source_frames &&
806  src_frames->internal->source_frames->data ==
807  (uint8_t*)dst_frames)) {
808  // This is an unmap operation. We don't need to directly
809  // do anything here other than fill in the original frame,
810  // because the real unmap will be invoked when the last
811  // reference to the mapped frame disappears.
812  if (!src->buf[0]) {
813  av_log(src_frames, AV_LOG_ERROR, "Invalid mapping "
814  "found when attempting unmap.\n");
815  return AVERROR(EINVAL);
816  }
817  hwmap = (HWMapDescriptor*)src->buf[0]->data;
818  av_frame_unref(dst);
819  return av_frame_ref(dst, hwmap->source);
820  }
821  }
822 
823  if (src->hw_frames_ctx) {
824  src_frames = (AVHWFramesContext*)src->hw_frames_ctx->data;
825 
826  if (src_frames->format == src->format &&
827  src_frames->internal->hw_type->map_from) {
828  ret = src_frames->internal->hw_type->map_from(src_frames,
829  dst, src, flags);
830  if (ret >= 0)
831  return ret;
832  else if (ret != AVERROR(ENOSYS))
833  goto fail;
834  }
835  }
836 
837  if (dst->hw_frames_ctx) {
838  dst_frames = (AVHWFramesContext*)dst->hw_frames_ctx->data;
839 
840  if (dst_frames->format == dst->format &&
841  dst_frames->internal->hw_type->map_to) {
842  ret = dst_frames->internal->hw_type->map_to(dst_frames,
843  dst, src, flags);
844  if (ret >= 0)
845  return ret;
846  else if (ret != AVERROR(ENOSYS))
847  goto fail;
848  }
849  }
850 
851  return AVERROR(ENOSYS);
852 
853 fail:
854  // if the caller provided dst frames context, it should be preserved
855  // by this function
856  av_assert0(orig_dst_frames == NULL ||
857  orig_dst_frames == dst->hw_frames_ctx);
858 
859  // preserve user-provided dst frame fields, but clean
860  // anything we might have set
861  dst->hw_frames_ctx = NULL;
862  av_frame_unref(dst);
863 
864  dst->hw_frames_ctx = orig_dst_frames;
865  dst->format = orig_dst_fmt;
866 
867  return ret;
868 }
869 
871  enum AVPixelFormat format,
872  AVBufferRef *derived_device_ctx,
873  AVBufferRef *source_frame_ctx,
874  int flags)
875 {
876  AVBufferRef *dst_ref = NULL;
877  AVHWFramesContext *dst = NULL;
878  AVHWFramesContext *src = (AVHWFramesContext*)source_frame_ctx->data;
879  int ret;
880 
881  if (src->internal->source_frames) {
882  AVHWFramesContext *src_src =
883  (AVHWFramesContext*)src->internal->source_frames->data;
884  AVHWDeviceContext *dst_dev =
885  (AVHWDeviceContext*)derived_device_ctx->data;
886 
887  if (src_src->device_ctx == dst_dev) {
888  // This is actually an unmapping, so we just return a
889  // reference to the source frame context.
890  *derived_frame_ctx =
891  av_buffer_ref(src->internal->source_frames);
892  if (!*derived_frame_ctx) {
893  ret = AVERROR(ENOMEM);
894  goto fail;
895  }
896  return 0;
897  }
898  }
899 
900  dst_ref = av_hwframe_ctx_alloc(derived_device_ctx);
901  if (!dst_ref) {
902  ret = AVERROR(ENOMEM);
903  goto fail;
904  }
905 
906  dst = (AVHWFramesContext*)dst_ref->data;
907 
908  dst->format = format;
909  dst->sw_format = src->sw_format;
910  dst->width = src->width;
911  dst->height = src->height;
912 
913  dst->internal->source_frames = av_buffer_ref(source_frame_ctx);
914  if (!dst->internal->source_frames) {
915  ret = AVERROR(ENOMEM);
916  goto fail;
917  }
918 
924 
925  ret = AVERROR(ENOSYS);
926  if (src->internal->hw_type->frames_derive_from)
927  ret = src->internal->hw_type->frames_derive_from(dst, src, flags);
928  if (ret == AVERROR(ENOSYS) &&
930  ret = dst->internal->hw_type->frames_derive_to(dst, src, flags);
931  if (ret == AVERROR(ENOSYS))
932  ret = 0;
933  if (ret)
934  goto fail;
935 
936  *derived_frame_ctx = dst_ref;
937  return 0;
938 
939 fail:
940  if (dst)
942  av_buffer_unref(&dst_ref);
943  return ret;
944 }
945 
947 {
948  HWMapDescriptor *hwmap = (HWMapDescriptor*)dst->buf[0]->data;
949  av_frame_unref(hwmap->source);
950  return av_frame_ref(hwmap->source, src);
951 }
formats
formats
Definition: signature.h:48
hwframe_ctx_free
static void hwframe_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:225
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVHWDeviceInternal::source_device
AVBufferRef * source_device
For a derived device, a reference to the original device context it was derived from.
Definition: hwcontext_internal.h:111
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AV_HWFRAME_TRANSFER_DIRECTION_FROM
@ AV_HWFRAME_TRANSFER_DIRECTION_FROM
Transfer the data from the queried hw frame.
Definition: hwcontext.h:419
HWMapDescriptor::source
AVFrame * source
A reference to the original source of the mapping.
Definition: hwcontext_internal.h:136
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:254
transfer_data_alloc
static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext.c:398
av_hwdevice_hwconfig_alloc
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:566
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_hwframe_unmap
static void ff_hwframe_unmap(void *opaque, uint8_t *data)
Definition: hwcontext.c:723
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
hwdevice_ctx_free
static void hwdevice_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:123
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1665
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:790
ff_hwcontext_type_drm
const HWContextType ff_hwcontext_type_drm
Definition: hwcontext_drm.c:304
data
const char data[16]
Definition: mxf.c:143
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
HWContextType::map_to
int(* map_to)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:92
ff_hwcontext_type_vdpau
const HWContextType ff_hwcontext_type_vdpau
Definition: hwcontext_vdpau.c:507
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:83
ff_hwcontext_type_vaapi
const HWContextType ff_hwcontext_type_vaapi
Definition: hwcontext_vaapi.c:1863
HWContextType::map_from
int(* map_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:94
av_hwdevice_iterate_types
enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev)
Iterate over supported device types.
Definition: hwcontext.c:102
AVHWDeviceContext::internal
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:71
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:540
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
HWMapDescriptor::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the hardware frames context in which this mapping was made.
Definition: hwcontext_internal.h:142
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:738
AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
Definition: hwcontext.h:34
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:151
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
av_hwdevice_get_hwframe_constraints
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:577
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:201
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
AVHWFramesInternal::source_frames
AVBufferRef * source_frames
For a derived context, a reference to the original frames context it was derived from.
Definition: hwcontext_internal.h:124
AV_HWDEVICE_TYPE_VULKAN
@ AV_HWDEVICE_TYPE_VULKAN
Definition: hwcontext.h:39
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
AV_HWDEVICE_TYPE_CUDA
@ AV_HWDEVICE_TYPE_CUDA
Definition: hwcontext.h:30
ff_hwcontext_type_d3d11va
const HWContextType ff_hwcontext_type_d3d11va
Definition: hwcontext_d3d11va.c:622
HWContextType::device_derive
int(* device_derive)(AVHWDeviceContext *dst_ctx, AVHWDeviceContext *src_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:69
fail
#define fail()
Definition: checkasm.h:131
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_hwdevice_ctx_create_derived_opts
int av_hwdevice_ctx_create_derived_opts(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, AVDictionary *options, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:647
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
ff_hwcontext_type_mediacodec
const HWContextType ff_hwcontext_type_mediacodec
Definition: hwcontext_mediacodec.c:38
AVHWFramesInternal::source_allocation_map_flags
int source_allocation_map_flags
Flags to apply to the mapping from the source to the derived frame context when trying to allocate in...
Definition: hwcontext_internal.h:129
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
hwframe_pool_prealloc
static int hwframe_pool_prealloc(AVBufferRef *ref)
Definition: hwcontext.c:306
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_hwcontext_type_dxva2
const HWContextType ff_hwcontext_type_dxva2
Definition: hwcontext_dxva2.c:577
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
hw_type_names
static const char *const hw_type_names[]
Definition: hwcontext.c:69
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:143
hw_table
static const HWContextType *const hw_table[]
Definition: hwcontext.c:32
av_hwframe_constraints_free
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:602
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
HWContextType::device_create
int(* device_create)(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:67
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:93
ff_hwcontext_type_videotoolbox
const HWContextType ff_hwcontext_type_videotoolbox
Definition: hwcontext_videotoolbox.c:746
if
if(ret)
Definition: filter_design.txt:179
opts
AVDictionary * opts
Definition: movenc.c:50
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
av_buffer_pool_uninit
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:322
av_hwframe_ctx_create_derived
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, enum AVPixelFormat format, AVBufferRef *derived_device_ctx, AVBufferRef *source_frame_ctx, int flags)
Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a di...
Definition: hwcontext.c:870
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
options
const OptionDef options[]
HWContextType::frames_derive_to
int(* frames_derive_to)(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_internal.h:97
AV_HWDEVICE_TYPE_OPENCL
@ AV_HWDEVICE_TYPE_OPENCL
Definition: hwcontext.h:37
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:343
ff_hwcontext_type_cuda
const HWContextType ff_hwcontext_type_cuda
Definition: hwcontext_cuda.c:530
AVHWFramesInternal::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:115
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
buffer.h
hwframe_ctx_class
static const AVClass hwframe_ctx_class
Definition: hwcontext.c:219
hwdevice_ctx_class
static const AVClass hwdevice_ctx_class
Definition: hwcontext.c:117
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
ff_hwcontext_type_vulkan
const HWContextType ff_hwcontext_type_vulkan
Definition: hwcontext_vulkan.c:4133
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:524
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:715
HWContextType::device_hwctx_size
size_t device_hwctx_size
size of the public hardware-specific context, i.e.
Definition: hwcontext_internal.h:43
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
common.h
HWMapDescriptor::unmap
void(* unmap)(AVHWFramesContext *ctx, struct HWMapDescriptor *hwmap)
Unmap function.
Definition: hwcontext_internal.h:146
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:506
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:477
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
AVHWDeviceInternal::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:104
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:528
ff_hwframe_map_replace
int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src)
Replace the current hwmap of dst with the one from src, used for indirect mappings like VAAPI->(DRM)-...
Definition: hwcontext.c:946
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
AVFormatContext::av_class
const AVClass * av_class
A class for logging and AVOptions.
Definition: avformat.h:1218
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:444
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:659
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:534
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
HWContextType::device_priv_size
size_t device_priv_size
size of the private data, i.e.
Definition: hwcontext_internal.h:48
av_hwframe_transfer_get_formats
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
Get a list of possible source or target formats usable in av_hwframe_transfer_data().
Definition: hwcontext.c:386
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
hwcontext_internal.h
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType::transfer_data_from
int(* transfer_data_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_internal.h:89
HWContextType
Definition: hwcontext_internal.h:29
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:503
HWMapDescriptor
Definition: hwcontext_internal.h:132
ff_hwcontext_type_opencl
const HWContextType ff_hwcontext_type_opencl
Definition: hwcontext_opencl.c:2931
AV_HWDEVICE_TYPE_DRM
@ AV_HWDEVICE_TYPE_DRM
Definition: hwcontext.h:36
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2582