FFmpeg
hwcontext.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "buffer.h"
22 #include "common.h"
23 #include "hwcontext.h"
24 #include "hwcontext_internal.h"
25 #include "imgutils.h"
26 #include "log.h"
27 #include "mem.h"
28 #include "pixdesc.h"
29 #include "pixfmt.h"
30 
31 static const HWContextType * const hw_table[] = {
32 #if CONFIG_CUDA
34 #endif
35 #if CONFIG_D3D11VA
37 #endif
38 #if CONFIG_LIBDRM
40 #endif
41 #if CONFIG_DXVA2
43 #endif
44 #if CONFIG_OPENCL
46 #endif
47 #if CONFIG_QSV
49 #endif
50 #if CONFIG_VAAPI
52 #endif
53 #if CONFIG_VDPAU
55 #endif
56 #if CONFIG_VIDEOTOOLBOX
58 #endif
59 #if CONFIG_MEDIACODEC
61 #endif
62 #if CONFIG_VULKAN
64 #endif
65  NULL,
66 };
67 
68 static const char *const hw_type_names[] = {
69  [AV_HWDEVICE_TYPE_CUDA] = "cuda",
70  [AV_HWDEVICE_TYPE_DRM] = "drm",
71  [AV_HWDEVICE_TYPE_DXVA2] = "dxva2",
72  [AV_HWDEVICE_TYPE_D3D11VA] = "d3d11va",
73  [AV_HWDEVICE_TYPE_OPENCL] = "opencl",
74  [AV_HWDEVICE_TYPE_QSV] = "qsv",
75  [AV_HWDEVICE_TYPE_VAAPI] = "vaapi",
76  [AV_HWDEVICE_TYPE_VDPAU] = "vdpau",
77  [AV_HWDEVICE_TYPE_VIDEOTOOLBOX] = "videotoolbox",
78  [AV_HWDEVICE_TYPE_MEDIACODEC] = "mediacodec",
79  [AV_HWDEVICE_TYPE_VULKAN] = "vulkan",
80 };
81 
83 {
84  int type;
85  for (type = 0; type < FF_ARRAY_ELEMS(hw_type_names); type++) {
86  if (hw_type_names[type] && !strcmp(hw_type_names[type], name))
87  return type;
88  }
89  return AV_HWDEVICE_TYPE_NONE;
90 }
91 
93 {
96  return hw_type_names[type];
97  else
98  return NULL;
99 }
100 
102 {
103  enum AVHWDeviceType next;
104  int i, set = 0;
105  for (i = 0; hw_table[i]; i++) {
106  if (prev != AV_HWDEVICE_TYPE_NONE && hw_table[i]->type <= prev)
107  continue;
108  if (!set || hw_table[i]->type < next) {
109  next = hw_table[i]->type;
110  set = 1;
111  }
112  }
113  return set ? next : AV_HWDEVICE_TYPE_NONE;
114 }
115 
116 static const AVClass hwdevice_ctx_class = {
117  .class_name = "AVHWDeviceContext",
118  .item_name = av_default_item_name,
119  .version = LIBAVUTIL_VERSION_INT,
120 };
121 
122 static void hwdevice_ctx_free(void *opaque, uint8_t *data)
123 {
125 
126  /* uninit might still want access the hw context and the user
127  * free() callback might destroy it, so uninit has to be called first */
128  if (ctx->internal->hw_type->device_uninit)
129  ctx->internal->hw_type->device_uninit(ctx);
130 
131  if (ctx->free)
132  ctx->free(ctx);
133 
134  av_buffer_unref(&ctx->internal->source_device);
135 
136  av_freep(&ctx->hwctx);
137  av_freep(&ctx->internal->priv);
138  av_freep(&ctx->internal);
139  av_freep(&ctx);
140 }
141 
143 {
145  AVBufferRef *buf;
146  const HWContextType *hw_type = NULL;
147  int i;
148 
149  for (i = 0; hw_table[i]; i++) {
150  if (hw_table[i]->type == type) {
151  hw_type = hw_table[i];
152  break;
153  }
154  }
155  if (!hw_type)
156  return NULL;
157 
158  ctx = av_mallocz(sizeof(*ctx));
159  if (!ctx)
160  return NULL;
161 
162  ctx->internal = av_mallocz(sizeof(*ctx->internal));
163  if (!ctx->internal)
164  goto fail;
165 
166  if (hw_type->device_priv_size) {
167  ctx->internal->priv = av_mallocz(hw_type->device_priv_size);
168  if (!ctx->internal->priv)
169  goto fail;
170  }
171 
172  if (hw_type->device_hwctx_size) {
173  ctx->hwctx = av_mallocz(hw_type->device_hwctx_size);
174  if (!ctx->hwctx)
175  goto fail;
176  }
177 
178  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
181  if (!buf)
182  goto fail;
183 
184  ctx->type = type;
186 
187  ctx->internal->hw_type = hw_type;
188 
189  return buf;
190 
191 fail:
192  if (ctx->internal)
193  av_freep(&ctx->internal->priv);
194  av_freep(&ctx->internal);
195  av_freep(&ctx->hwctx);
196  av_freep(&ctx);
197  return NULL;
198 }
199 
201 {
203  int ret;
204 
205  if (ctx->internal->hw_type->device_init) {
206  ret = ctx->internal->hw_type->device_init(ctx);
207  if (ret < 0)
208  goto fail;
209  }
210 
211  return 0;
212 fail:
213  if (ctx->internal->hw_type->device_uninit)
214  ctx->internal->hw_type->device_uninit(ctx);
215  return ret;
216 }
217 
218 static const AVClass hwframe_ctx_class = {
219  .class_name = "AVHWFramesContext",
220  .item_name = av_default_item_name,
221  .version = LIBAVUTIL_VERSION_INT,
222 };
223 
224 static void hwframe_ctx_free(void *opaque, uint8_t *data)
225 {
227 
228  if (ctx->internal->pool_internal)
229  av_buffer_pool_uninit(&ctx->internal->pool_internal);
230 
231  if (ctx->internal->hw_type->frames_uninit)
232  ctx->internal->hw_type->frames_uninit(ctx);
233 
234  if (ctx->free)
235  ctx->free(ctx);
236 
237  av_buffer_unref(&ctx->internal->source_frames);
238 
239  av_buffer_unref(&ctx->device_ref);
240 
241  av_freep(&ctx->hwctx);
242  av_freep(&ctx->internal->priv);
243  av_freep(&ctx->internal);
244  av_freep(&ctx);
245 }
246 
248 {
249  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref_in->data;
250  const HWContextType *hw_type = device_ctx->internal->hw_type;
252  AVBufferRef *buf, *device_ref = NULL;
253 
254  ctx = av_mallocz(sizeof(*ctx));
255  if (!ctx)
256  return NULL;
257 
258  ctx->internal = av_mallocz(sizeof(*ctx->internal));
259  if (!ctx->internal)
260  goto fail;
261 
262  if (hw_type->frames_priv_size) {
263  ctx->internal->priv = av_mallocz(hw_type->frames_priv_size);
264  if (!ctx->internal->priv)
265  goto fail;
266  }
267 
268  if (hw_type->frames_hwctx_size) {
269  ctx->hwctx = av_mallocz(hw_type->frames_hwctx_size);
270  if (!ctx->hwctx)
271  goto fail;
272  }
273 
274  device_ref = av_buffer_ref(device_ref_in);
275  if (!device_ref)
276  goto fail;
277 
278  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
281  if (!buf)
282  goto fail;
283 
285  ctx->device_ref = device_ref;
286  ctx->device_ctx = device_ctx;
287  ctx->format = AV_PIX_FMT_NONE;
288  ctx->sw_format = AV_PIX_FMT_NONE;
289 
290  ctx->internal->hw_type = hw_type;
291 
292  return buf;
293 
294 fail:
295  if (device_ref)
296  av_buffer_unref(&device_ref);
297  if (ctx->internal)
298  av_freep(&ctx->internal->priv);
299  av_freep(&ctx->internal);
300  av_freep(&ctx->hwctx);
301  av_freep(&ctx);
302  return NULL;
303 }
304 
306 {
308  AVFrame **frames;
309  int i, ret = 0;
310 
311  frames = av_calloc(ctx->initial_pool_size, sizeof(*frames));
312  if (!frames)
313  return AVERROR(ENOMEM);
314 
315  for (i = 0; i < ctx->initial_pool_size; i++) {
316  frames[i] = av_frame_alloc();
317  if (!frames[i])
318  goto fail;
319 
321  if (ret < 0)
322  goto fail;
323  }
324 
325 fail:
326  for (i = 0; i < ctx->initial_pool_size; i++)
328  av_freep(&frames);
329 
330  return ret;
331 }
332 
334 {
336  const enum AVPixelFormat *pix_fmt;
337  int ret;
338 
339  if (ctx->internal->source_frames) {
340  /* A derived frame context is already initialised. */
341  return 0;
342  }
343 
344  /* validate the pixel format */
345  for (pix_fmt = ctx->internal->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) {
346  if (*pix_fmt == ctx->format)
347  break;
348  }
349  if (*pix_fmt == AV_PIX_FMT_NONE) {
351  "The hardware pixel format '%s' is not supported by the device type '%s'\n",
352  av_get_pix_fmt_name(ctx->format), ctx->internal->hw_type->name);
353  return AVERROR(ENOSYS);
354  }
355 
356  /* validate the dimensions */
357  ret = av_image_check_size(ctx->width, ctx->height, 0, ctx);
358  if (ret < 0)
359  return ret;
360 
361  /* format-specific init */
362  if (ctx->internal->hw_type->frames_init) {
363  ret = ctx->internal->hw_type->frames_init(ctx);
364  if (ret < 0)
365  goto fail;
366  }
367 
368  if (ctx->internal->pool_internal && !ctx->pool)
369  ctx->pool = ctx->internal->pool_internal;
370 
371  /* preallocate the frames in the pool, if requested */
372  if (ctx->initial_pool_size > 0) {
374  if (ret < 0)
375  goto fail;
376  }
377 
378  return 0;
379 fail:
380  if (ctx->internal->hw_type->frames_uninit)
381  ctx->internal->hw_type->frames_uninit(ctx);
382  return ret;
383 }
384 
387  enum AVPixelFormat **formats, int flags)
388 {
389  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
390 
391  if (!ctx->internal->hw_type->transfer_get_formats)
392  return AVERROR(ENOSYS);
393 
394  return ctx->internal->hw_type->transfer_get_formats(ctx, dir, formats);
395 }
396 
397 static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
398 {
399  AVHWFramesContext *ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
400  AVFrame *frame_tmp;
401  int ret = 0;
402 
403  frame_tmp = av_frame_alloc();
404  if (!frame_tmp)
405  return AVERROR(ENOMEM);
406 
407  /* if the format is set, use that
408  * otherwise pick the first supported one */
409  if (dst->format >= 0) {
410  frame_tmp->format = dst->format;
411  } else {
412  enum AVPixelFormat *formats;
413 
414  ret = av_hwframe_transfer_get_formats(src->hw_frames_ctx,
416  &formats, 0);
417  if (ret < 0)
418  goto fail;
419  frame_tmp->format = formats[0];
420  av_freep(&formats);
421  }
422  frame_tmp->width = ctx->width;
423  frame_tmp->height = ctx->height;
424 
425  ret = av_frame_get_buffer(frame_tmp, 0);
426  if (ret < 0)
427  goto fail;
428 
429  ret = av_hwframe_transfer_data(frame_tmp, src, flags);
430  if (ret < 0)
431  goto fail;
432 
433  frame_tmp->width = src->width;
434  frame_tmp->height = src->height;
435 
436  av_frame_move_ref(dst, frame_tmp);
437 
438 fail:
439  av_frame_free(&frame_tmp);
440  return ret;
441 }
442 
444 {
446  int ret;
447 
448  if (!dst->buf[0])
449  return transfer_data_alloc(dst, src, flags);
450 
451  /*
452  * Hardware -> Hardware Transfer.
453  * Unlike Software -> Hardware or Hardware -> Software, the transfer
454  * function could be provided by either the src or dst, depending on
455  * the specific combination of hardware.
456  */
457  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
458  AVHWFramesContext *src_ctx =
459  (AVHWFramesContext*)src->hw_frames_ctx->data;
460  AVHWFramesContext *dst_ctx =
462 
463  if (src_ctx->internal->source_frames) {
464  av_log(src_ctx, AV_LOG_ERROR,
465  "A device with a derived frame context cannot be used as "
466  "the source of a HW -> HW transfer.");
467  return AVERROR(ENOSYS);
468  }
469 
470  if (dst_ctx->internal->source_frames) {
471  av_log(src_ctx, AV_LOG_ERROR,
472  "A device with a derived frame context cannot be used as "
473  "the destination of a HW -> HW transfer.");
474  return AVERROR(ENOSYS);
475  }
476 
477  ret = src_ctx->internal->hw_type->transfer_data_from(src_ctx, dst, src);
478  if (ret == AVERROR(ENOSYS))
479  ret = dst_ctx->internal->hw_type->transfer_data_to(dst_ctx, dst, src);
480  if (ret < 0)
481  return ret;
482  } else {
483  if (src->hw_frames_ctx) {
484  ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
485 
486  ret = ctx->internal->hw_type->transfer_data_from(ctx, dst, src);
487  if (ret < 0)
488  return ret;
489  } else if (dst->hw_frames_ctx) {
491 
492  ret = ctx->internal->hw_type->transfer_data_to(ctx, dst, src);
493  if (ret < 0)
494  return ret;
495  } else {
496  return AVERROR(ENOSYS);
497  }
498  }
499  return 0;
500 }
501 
503 {
504  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
505  int ret;
506 
507  if (ctx->internal->source_frames) {
508  // This is a derived frame context, so we allocate in the source
509  // and map the frame immediately.
510  AVFrame *src_frame;
511 
512  frame->format = ctx->format;
513  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
514  if (!frame->hw_frames_ctx)
515  return AVERROR(ENOMEM);
516 
517  src_frame = av_frame_alloc();
518  if (!src_frame)
519  return AVERROR(ENOMEM);
520 
521  ret = av_hwframe_get_buffer(ctx->internal->source_frames,
522  src_frame, 0);
523  if (ret < 0) {
524  av_frame_free(&src_frame);
525  return ret;
526  }
527 
528  ret = av_hwframe_map(frame, src_frame,
529  ctx->internal->source_allocation_map_flags);
530  if (ret) {
531  av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived "
532  "frame context: %d.\n", ret);
533  av_frame_free(&src_frame);
534  return ret;
535  }
536 
537  // Free the source frame immediately - the mapped frame still
538  // contains a reference to it.
539  av_frame_free(&src_frame);
540 
541  return 0;
542  }
543 
544  if (!ctx->internal->hw_type->frames_get_buffer)
545  return AVERROR(ENOSYS);
546 
547  if (!ctx->pool)
548  return AVERROR(EINVAL);
549 
550  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
551  if (!frame->hw_frames_ctx)
552  return AVERROR(ENOMEM);
553 
554  ret = ctx->internal->hw_type->frames_get_buffer(ctx, frame);
555  if (ret < 0) {
556  av_buffer_unref(&frame->hw_frames_ctx);
557  return ret;
558  }
559 
560  frame->extended_data = frame->data;
561 
562  return 0;
563 }
564 
566 {
568  const HWContextType *hw_type = ctx->internal->hw_type;
569 
570  if (hw_type->device_hwconfig_size == 0)
571  return NULL;
572 
573  return av_mallocz(hw_type->device_hwconfig_size);
574 }
575 
577  const void *hwconfig)
578 {
580  const HWContextType *hw_type = ctx->internal->hw_type;
581  AVHWFramesConstraints *constraints;
582 
583  if (!hw_type->frames_get_constraints)
584  return NULL;
585 
586  constraints = av_mallocz(sizeof(*constraints));
587  if (!constraints)
588  return NULL;
589 
590  constraints->min_width = constraints->min_height = 0;
591  constraints->max_width = constraints->max_height = INT_MAX;
592 
593  if (hw_type->frames_get_constraints(ctx, hwconfig, constraints) >= 0) {
594  return constraints;
595  } else {
596  av_hwframe_constraints_free(&constraints);
597  return NULL;
598  }
599 }
600 
602 {
603  if (*constraints) {
604  av_freep(&(*constraints)->valid_hw_formats);
605  av_freep(&(*constraints)->valid_sw_formats);
606  }
607  av_freep(constraints);
608 }
609 
611  const char *device, AVDictionary *opts, int flags)
612 {
613  AVBufferRef *device_ref = NULL;
614  AVHWDeviceContext *device_ctx;
615  int ret = 0;
616 
617  device_ref = av_hwdevice_ctx_alloc(type);
618  if (!device_ref) {
619  ret = AVERROR(ENOMEM);
620  goto fail;
621  }
622  device_ctx = (AVHWDeviceContext*)device_ref->data;
623 
624  if (!device_ctx->internal->hw_type->device_create) {
625  ret = AVERROR(ENOSYS);
626  goto fail;
627  }
628 
629  ret = device_ctx->internal->hw_type->device_create(device_ctx, device,
630  opts, flags);
631  if (ret < 0)
632  goto fail;
633 
634  ret = av_hwdevice_ctx_init(device_ref);
635  if (ret < 0)
636  goto fail;
637 
638  *pdevice_ref = device_ref;
639  return 0;
640 fail:
641  av_buffer_unref(&device_ref);
642  *pdevice_ref = NULL;
643  return ret;
644 }
645 
647  enum AVHWDeviceType type,
648  AVBufferRef *src_ref,
649  AVDictionary *options, int flags)
650 {
651  AVBufferRef *dst_ref = NULL, *tmp_ref;
652  AVHWDeviceContext *dst_ctx, *tmp_ctx;
653  int ret = 0;
654 
655  tmp_ref = src_ref;
656  while (tmp_ref) {
657  tmp_ctx = (AVHWDeviceContext*)tmp_ref->data;
658  if (tmp_ctx->type == type) {
659  dst_ref = av_buffer_ref(tmp_ref);
660  if (!dst_ref) {
661  ret = AVERROR(ENOMEM);
662  goto fail;
663  }
664  goto done;
665  }
666  tmp_ref = tmp_ctx->internal->source_device;
667  }
668 
669  dst_ref = av_hwdevice_ctx_alloc(type);
670  if (!dst_ref) {
671  ret = AVERROR(ENOMEM);
672  goto fail;
673  }
674  dst_ctx = (AVHWDeviceContext*)dst_ref->data;
675 
676  tmp_ref = src_ref;
677  while (tmp_ref) {
678  tmp_ctx = (AVHWDeviceContext*)tmp_ref->data;
679  if (dst_ctx->internal->hw_type->device_derive) {
680  ret = dst_ctx->internal->hw_type->device_derive(dst_ctx,
681  tmp_ctx,
682  options,
683  flags);
684  if (ret == 0) {
685  dst_ctx->internal->source_device = av_buffer_ref(src_ref);
686  if (!dst_ctx->internal->source_device) {
687  ret = AVERROR(ENOMEM);
688  goto fail;
689  }
690  ret = av_hwdevice_ctx_init(dst_ref);
691  if (ret < 0)
692  goto fail;
693  goto done;
694  }
695  if (ret != AVERROR(ENOSYS))
696  goto fail;
697  }
698  tmp_ref = tmp_ctx->internal->source_device;
699  }
700 
701  ret = AVERROR(ENOSYS);
702  goto fail;
703 
704 done:
705  *dst_ref_ptr = dst_ref;
706  return 0;
707 
708 fail:
709  av_buffer_unref(&dst_ref);
710  *dst_ref_ptr = NULL;
711  return ret;
712 }
713 
715  enum AVHWDeviceType type,
716  AVBufferRef *src_ref, int flags)
717 {
718  return av_hwdevice_ctx_create_derived_opts(dst_ref_ptr, type, src_ref,
719  NULL, flags);
720 }
721 
722 static void ff_hwframe_unmap(void *opaque, uint8_t *data)
723 {
725  AVHWFramesContext *ctx = opaque;
726 
727  if (hwmap->unmap)
728  hwmap->unmap(ctx, hwmap);
729 
730  av_frame_free(&hwmap->source);
731 
733 
734  av_free(hwmap);
735 }
736 
738  AVFrame *dst, const AVFrame *src,
739  void (*unmap)(AVHWFramesContext *ctx,
740  HWMapDescriptor *hwmap),
741  void *priv)
742 {
743  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
744  HWMapDescriptor *hwmap;
745  int ret;
746 
747  hwmap = av_mallocz(sizeof(*hwmap));
748  if (!hwmap) {
749  ret = AVERROR(ENOMEM);
750  goto fail;
751  }
752 
753  hwmap->source = av_frame_alloc();
754  if (!hwmap->source) {
755  ret = AVERROR(ENOMEM);
756  goto fail;
757  }
758  ret = av_frame_ref(hwmap->source, src);
759  if (ret < 0)
760  goto fail;
761 
762  hwmap->hw_frames_ctx = av_buffer_ref(hwframe_ref);
763  if (!hwmap->hw_frames_ctx) {
764  ret = AVERROR(ENOMEM);
765  goto fail;
766  }
767 
768  hwmap->unmap = unmap;
769  hwmap->priv = priv;
770 
771  dst->buf[0] = av_buffer_create((uint8_t*)hwmap, sizeof(*hwmap),
772  &ff_hwframe_unmap, ctx, 0);
773  if (!dst->buf[0]) {
774  ret = AVERROR(ENOMEM);
775  goto fail;
776  }
777 
778  return 0;
779 
780 fail:
781  if (hwmap) {
782  av_buffer_unref(&hwmap->hw_frames_ctx);
783  av_frame_free(&hwmap->source);
784  }
785  av_free(hwmap);
786  return ret;
787 }
788 
789 int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
790 {
791  AVHWFramesContext *src_frames, *dst_frames;
792  HWMapDescriptor *hwmap;
793  int ret;
794 
795  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
796  src_frames = (AVHWFramesContext*)src->hw_frames_ctx->data;
797  dst_frames = (AVHWFramesContext*)dst->hw_frames_ctx->data;
798 
799  if ((src_frames == dst_frames &&
800  src->format == dst_frames->sw_format &&
801  dst->format == dst_frames->format) ||
802  (src_frames->internal->source_frames &&
803  src_frames->internal->source_frames->data ==
804  (uint8_t*)dst_frames)) {
805  // This is an unmap operation. We don't need to directly
806  // do anything here other than fill in the original frame,
807  // because the real unmap will be invoked when the last
808  // reference to the mapped frame disappears.
809  if (!src->buf[0]) {
810  av_log(src_frames, AV_LOG_ERROR, "Invalid mapping "
811  "found when attempting unmap.\n");
812  return AVERROR(EINVAL);
813  }
814  hwmap = (HWMapDescriptor*)src->buf[0]->data;
815  av_frame_unref(dst);
816  return av_frame_ref(dst, hwmap->source);
817  }
818  }
819 
820  if (src->hw_frames_ctx) {
821  src_frames = (AVHWFramesContext*)src->hw_frames_ctx->data;
822 
823  if (src_frames->format == src->format &&
824  src_frames->internal->hw_type->map_from) {
825  ret = src_frames->internal->hw_type->map_from(src_frames,
826  dst, src, flags);
827  if (ret != AVERROR(ENOSYS))
828  return ret;
829  }
830  }
831 
832  if (dst->hw_frames_ctx) {
833  dst_frames = (AVHWFramesContext*)dst->hw_frames_ctx->data;
834 
835  if (dst_frames->format == dst->format &&
836  dst_frames->internal->hw_type->map_to) {
837  ret = dst_frames->internal->hw_type->map_to(dst_frames,
838  dst, src, flags);
839  if (ret != AVERROR(ENOSYS))
840  return ret;
841  }
842  }
843 
844  return AVERROR(ENOSYS);
845 }
846 
848  enum AVPixelFormat format,
849  AVBufferRef *derived_device_ctx,
850  AVBufferRef *source_frame_ctx,
851  int flags)
852 {
853  AVBufferRef *dst_ref = NULL;
854  AVHWFramesContext *dst = NULL;
855  AVHWFramesContext *src = (AVHWFramesContext*)source_frame_ctx->data;
856  int ret;
857 
858  if (src->internal->source_frames) {
859  AVHWFramesContext *src_src =
860  (AVHWFramesContext*)src->internal->source_frames->data;
861  AVHWDeviceContext *dst_dev =
862  (AVHWDeviceContext*)derived_device_ctx->data;
863 
864  if (src_src->device_ctx == dst_dev) {
865  // This is actually an unmapping, so we just return a
866  // reference to the source frame context.
867  *derived_frame_ctx =
868  av_buffer_ref(src->internal->source_frames);
869  if (!*derived_frame_ctx) {
870  ret = AVERROR(ENOMEM);
871  goto fail;
872  }
873  return 0;
874  }
875  }
876 
877  dst_ref = av_hwframe_ctx_alloc(derived_device_ctx);
878  if (!dst_ref) {
879  ret = AVERROR(ENOMEM);
880  goto fail;
881  }
882 
883  dst = (AVHWFramesContext*)dst_ref->data;
884 
885  dst->format = format;
886  dst->sw_format = src->sw_format;
887  dst->width = src->width;
888  dst->height = src->height;
889 
890  dst->internal->source_frames = av_buffer_ref(source_frame_ctx);
891  if (!dst->internal->source_frames) {
892  ret = AVERROR(ENOMEM);
893  goto fail;
894  }
895 
901 
902  ret = AVERROR(ENOSYS);
903  if (src->internal->hw_type->frames_derive_from)
904  ret = src->internal->hw_type->frames_derive_from(dst, src, flags);
905  if (ret == AVERROR(ENOSYS) &&
907  ret = dst->internal->hw_type->frames_derive_to(dst, src, flags);
908  if (ret == AVERROR(ENOSYS))
909  ret = 0;
910  if (ret)
911  goto fail;
912 
913  *derived_frame_ctx = dst_ref;
914  return 0;
915 
916 fail:
917  if (dst)
919  av_buffer_unref(&dst_ref);
920  return ret;
921 }
922 
924 {
925  HWMapDescriptor *hwmap = (HWMapDescriptor*)dst->buf[0]->data;
926  av_frame_unref(hwmap->source);
927  return av_frame_ref(hwmap->source, src);
928 }
formats
formats
Definition: signature.h:48
hwframe_ctx_free
static void hwframe_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:224
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVHWDeviceInternal::source_device
AVBufferRef * source_device
For a derived device, a reference to the original device context it was derived from.
Definition: hwcontext_internal.h:111
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AV_HWFRAME_TRANSFER_DIRECTION_FROM
@ AV_HWFRAME_TRANSFER_DIRECTION_FROM
Transfer the data from the queried hw frame.
Definition: hwcontext.h:419
HWMapDescriptor::source
AVFrame * source
A reference to the original source of the mapping.
Definition: hwcontext_internal.h:136
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:243
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:528
transfer_data_alloc
static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext.c:397
av_hwdevice_hwconfig_alloc
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:565
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_hwframe_unmap
static void ff_hwframe_unmap(void *opaque, uint8_t *data)
Definition: hwcontext.c:722
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
hwdevice_ctx_free
static void hwdevice_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:122
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:109
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
pixdesc.h
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1603
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:789
ff_hwcontext_type_drm
const HWContextType ff_hwcontext_type_drm
Definition: hwcontext_drm.c:304
data
const char data[16]
Definition: mxf.c:143
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
HWContextType::map_to
int(* map_to)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:92
ff_hwcontext_type_vdpau
const HWContextType ff_hwcontext_type_vdpau
Definition: hwcontext_vdpau.c:507
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:82
ff_hwcontext_type_vaapi
const HWContextType ff_hwcontext_type_vaapi
Definition: hwcontext_vaapi.c:1808
HWContextType::map_from
int(* map_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:94
av_hwdevice_iterate_types
enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev)
Iterate over supported device types.
Definition: hwcontext.c:101
AVHWDeviceContext::internal
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:71
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
HWMapDescriptor::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the hardware frames context in which this mapping was made.
Definition: hwcontext_internal.h:142
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:737
AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
Definition: hwcontext.h:34
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:151
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
av_hwdevice_get_hwframe_constraints
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:576
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:200
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:513
AVHWFramesInternal::source_frames
AVBufferRef * source_frames
For a derived context, a reference to the original frames context it was derived from.
Definition: hwcontext_internal.h:124
AV_HWDEVICE_TYPE_VULKAN
@ AV_HWDEVICE_TYPE_VULKAN
Definition: hwcontext.h:39
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
AV_HWDEVICE_TYPE_CUDA
@ AV_HWDEVICE_TYPE_CUDA
Definition: hwcontext.h:30
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:524
ff_hwcontext_type_d3d11va
const HWContextType ff_hwcontext_type_d3d11va
Definition: hwcontext_d3d11va.c:621
HWContextType::device_derive
int(* device_derive)(AVHWDeviceContext *dst_ctx, AVHWDeviceContext *src_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:69
fail
#define fail()
Definition: checkasm.h:127
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_hwdevice_ctx_create_derived_opts
int av_hwdevice_ctx_create_derived_opts(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, AVDictionary *options, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:646
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
ff_hwcontext_type_mediacodec
const HWContextType ff_hwcontext_type_mediacodec
Definition: hwcontext_mediacodec.c:38
AVHWFramesInternal::source_allocation_map_flags
int source_allocation_map_flags
Flags to apply to the mapping from the source to the derived frame context when trying to allocate in...
Definition: hwcontext_internal.h:129
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:97
hwframe_pool_prealloc
static int hwframe_pool_prealloc(AVBufferRef *ref)
Definition: hwcontext.c:305
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_hwcontext_type_dxva2
const HWContextType ff_hwcontext_type_dxva2
Definition: hwcontext_dxva2.c:577
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
hw_type_names
static const char *const hw_type_names[]
Definition: hwcontext.c:68
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:142
hw_table
static const HWContextType *const hw_table[]
Definition: hwcontext.c:31
av_hwframe_constraints_free
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:601
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:534
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
HWContextType::device_create
int(* device_create)(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:67
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:92
ff_hwcontext_type_videotoolbox
const HWContextType ff_hwcontext_type_videotoolbox
Definition: hwcontext_videotoolbox.c:725
if
if(ret)
Definition: filter_design.txt:179
opts
AVDictionary * opts
Definition: movenc.c:50
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
src
#define src
Definition: vp8dsp.c:255
av_buffer_pool_uninit
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:322
av_hwframe_ctx_create_derived
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, enum AVPixelFormat format, AVBufferRef *derived_device_ctx, AVBufferRef *source_frame_ctx, int flags)
Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a di...
Definition: hwcontext.c:847
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
options
const OptionDef options[]
HWContextType::frames_derive_to
int(* frames_derive_to)(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_internal.h:97
AV_HWDEVICE_TYPE_OPENCL
@ AV_HWDEVICE_TYPE_OPENCL
Definition: hwcontext.h:37
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:325
ff_hwcontext_type_cuda
const HWContextType ff_hwcontext_type_cuda
Definition: hwcontext_cuda.c:530
AVHWFramesInternal::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:115
format
ofilter format
Definition: ffmpeg_filter.c:172
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:404
buffer.h
hwframe_ctx_class
static const AVClass hwframe_ctx_class
Definition: hwcontext.c:218
hwdevice_ctx_class
static const AVClass hwdevice_ctx_class
Definition: hwcontext.c:116
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
ff_hwcontext_type_vulkan
const HWContextType ff_hwcontext_type_vulkan
Definition: hwcontext_vulkan.c:4122
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:714
HWContextType::device_hwctx_size
size_t device_hwctx_size
size of the public hardware-specific context, i.e.
Definition: hwcontext_internal.h:43
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
common.h
HWMapDescriptor::unmap
void(* unmap)(AVHWFramesContext *ctx, struct HWMapDescriptor *hwmap)
Unmap function.
Definition: hwcontext_internal.h:146
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:462
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:435
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:263
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
AVHWDeviceInternal::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:104
ff_hwframe_map_replace
int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src)
Replace the current hwmap of dst with the one from src, used for indirect mappings like VAAPI->(DRM)-...
Definition: hwcontext.c:923
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:271
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:610
AVFormatContext::av_class
const AVClass * av_class
A class for logging and AVOptions.
Definition: avformat.h:1205
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:443
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:643
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:540
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
HWContextType::device_priv_size
size_t device_priv_size
size of the private data, i.e.
Definition: hwcontext_internal.h:48
av_hwframe_transfer_get_formats
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
Get a list of possible source or target formats usable in av_hwframe_transfer_data().
Definition: hwcontext.c:385
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
hwcontext_internal.h
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
HWContextType::transfer_data_from
int(* transfer_data_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_internal.h:89
HWContextType
Definition: hwcontext_internal.h:29
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:502
HWMapDescriptor
Definition: hwcontext_internal.h:132
ff_hwcontext_type_opencl
const HWContextType ff_hwcontext_type_opencl
Definition: hwcontext_opencl.c:2911
AV_HWDEVICE_TYPE_DRM
@ AV_HWDEVICE_TYPE_DRM
Definition: hwcontext.h:36
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2580