FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/imgutils.h"
23 #include "libavutil/hwcontext.h"
24 #if CONFIG_D3D11VA
26 #endif
27 #if CONFIG_DXVA2
28 #define COBJMACROS
30 #endif
31 #include "libavutil/mem.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/time.h"
34 
35 #include "amfenc.h"
36 #include "internal.h"
37 
38 #if CONFIG_D3D11VA
39 #include <d3d11.h>
40 #endif
41 
42 #ifdef _WIN32
43 #include "compat/w32dlfcn.h"
44 #else
45 #include <dlfcn.h>
46 #endif
47 
48 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
49 
50 #define PTS_PROP L"PtsProp"
51 
55 #if CONFIG_D3D11VA
57 #endif
58 #if CONFIG_DXVA2
60 #endif
62 };
63 
64 typedef struct FormatMap {
66  enum AMF_SURFACE_FORMAT amf_format;
67 } FormatMap;
68 
69 static const FormatMap format_map[] =
70 {
71  { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
72  { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
73  { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
74  { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
75  { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
76  { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
77  { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
78 };
79 
80 static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
81 {
82  int i;
83  for (i = 0; i < amf_countof(format_map); i++) {
84  if (format_map[i].av_format == fmt) {
85  return format_map[i].amf_format;
86  }
87  }
88  return AMF_SURFACE_UNKNOWN;
89 }
90 
91 static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
92  const wchar_t *scope, const wchar_t *message)
93 {
94  AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
95  av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
96 }
97 
98 static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
99 {
100 }
101 
102 static AMFTraceWriterVtbl tracer_vtbl =
103 {
104  .Write = AMFTraceWriter_Write,
105  .Flush = AMFTraceWriter_Flush,
106 };
107 
109 {
110  AmfContext *ctx = avctx->priv_data;
111  AMFInit_Fn init_fun;
112  AMFQueryVersion_Fn version_fun;
113  AMF_RESULT res;
114 
115  ctx->delayed_frame = av_frame_alloc();
116  if (!ctx->delayed_frame) {
117  return AVERROR(ENOMEM);
118  }
119  // hardcoded to current HW queue size - will realloc in timestamp_queue_enqueue() if too small
120  ctx->timestamp_list = av_fifo_alloc((avctx->max_b_frames + 16) * sizeof(int64_t));
121  if (!ctx->timestamp_list) {
122  return AVERROR(ENOMEM);
123  }
124  ctx->dts_delay = 0;
125 
126 
127  ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
128  AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
129  AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
130 
131  init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
132  AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
133 
134  version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
135  AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
136 
137  res = version_fun(&ctx->version);
138  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
139  res = init_fun(AMF_FULL_VERSION, &ctx->factory);
140  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
141  res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
142  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
143  res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
144  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
145  return 0;
146 }
147 
148 #if CONFIG_D3D11VA
149 static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
150 {
151  AmfContext *ctx = avctx->priv_data;
152  AMF_RESULT res;
153 
154  res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
155  if (res != AMF_OK) {
156  if (res == AMF_NOT_SUPPORTED)
157  av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
158  else
159  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
160  return AVERROR(ENODEV);
161  }
162 
163  return 0;
164 }
165 #endif
166 
167 #if CONFIG_DXVA2
168 static int amf_init_from_dxva2_device(AVCodecContext *avctx, AVDXVA2DeviceContext *hwctx)
169 {
170  AmfContext *ctx = avctx->priv_data;
171  HANDLE device_handle;
172  IDirect3DDevice9 *device;
173  HRESULT hr;
174  AMF_RESULT res;
175  int ret;
176 
177  hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
178  if (FAILED(hr)) {
179  av_log(avctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
180  return AVERROR_EXTERNAL;
181  }
182 
183  hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
184  if (SUCCEEDED(hr)) {
185  IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
186  ret = 0;
187  } else {
188  av_log(avctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
190  }
191 
192  IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
193 
194  if (ret < 0)
195  return ret;
196 
197  res = ctx->context->pVtbl->InitDX9(ctx->context, device);
198 
199  IDirect3DDevice9_Release(device);
200 
201  if (res != AMF_OK) {
202  if (res == AMF_NOT_SUPPORTED)
203  av_log(avctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
204  else
205  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
206  return AVERROR(ENODEV);
207  }
208 
209  return 0;
210 }
211 #endif
212 
214 {
215  AmfContext *ctx = avctx->priv_data;
216  AMFContext1 *context1 = NULL;
217  AMF_RESULT res;
218  av_unused int ret;
219 
220  ctx->hwsurfaces_in_queue = 0;
221  ctx->hwsurfaces_in_queue_max = 16;
222 
223  // configure AMF logger
224  // the return of these functions indicates old state and do not affect behaviour
225  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
226  if (ctx->log_to_dbg)
227  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
228  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
229  ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
230 
231  // connect AMF logger to av_log
232  ctx->tracer.vtbl = &tracer_vtbl;
233  ctx->tracer.avctx = avctx;
234  ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
235  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
236 
237  res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
238  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
239 
240  // If a device was passed to the encoder, try to initialise from that.
241  if (avctx->hw_frames_ctx) {
242  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
243 
244  if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
245  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
246  av_get_pix_fmt_name(frames_ctx->sw_format));
247  return AVERROR(EINVAL);
248  }
249 
250  switch (frames_ctx->device_ctx->type) {
251 #if CONFIG_D3D11VA
253  ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
254  if (ret < 0)
255  return ret;
256  break;
257 #endif
258 #if CONFIG_DXVA2
260  ret = amf_init_from_dxva2_device(avctx, frames_ctx->device_ctx->hwctx);
261  if (ret < 0)
262  return ret;
263  break;
264 #endif
265  default:
266  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
268  return AVERROR(ENOSYS);
269  }
270 
271  ctx->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
272  if (!ctx->hw_frames_ctx)
273  return AVERROR(ENOMEM);
274 
275  if (frames_ctx->initial_pool_size > 0)
276  ctx->hwsurfaces_in_queue_max = frames_ctx->initial_pool_size - 1;
277 
278  } else if (avctx->hw_device_ctx) {
279  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
280 
281  switch (device_ctx->type) {
282 #if CONFIG_D3D11VA
284  ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
285  if (ret < 0)
286  return ret;
287  break;
288 #endif
289 #if CONFIG_DXVA2
291  ret = amf_init_from_dxva2_device(avctx, device_ctx->hwctx);
292  if (ret < 0)
293  return ret;
294  break;
295 #endif
296  default:
297  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
298  av_hwdevice_get_type_name(device_ctx->type));
299  return AVERROR(ENOSYS);
300  }
301 
302  ctx->hw_device_ctx = av_buffer_ref(avctx->hw_device_ctx);
303  if (!ctx->hw_device_ctx)
304  return AVERROR(ENOMEM);
305 
306  } else {
307  res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
308  if (res == AMF_OK) {
309  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
310  } else {
311  res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
312  if (res == AMF_OK) {
313  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
314  } else {
315  AMFGuid guid = IID_AMFContext1();
316  res = ctx->context->pVtbl->QueryInterface(ctx->context, &guid, (void**)&context1);
317  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext1() failed with error %d\n", res);
318 
319  res = context1->pVtbl->InitVulkan(context1, NULL);
320  context1->pVtbl->Release(context1);
321  if (res != AMF_OK) {
322  if (res == AMF_NOT_SUPPORTED)
323  av_log(avctx, AV_LOG_ERROR, "AMF via Vulkan is not supported on the given device.\n");
324  else
325  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given Vulkan device: %d.\n", res);
326  return AVERROR(ENOSYS);
327  }
328  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via Vulkan.\n");
329  }
330  }
331  }
332  return 0;
333 }
334 
336 {
337  AmfContext *ctx = avctx->priv_data;
338  const wchar_t *codec_id = NULL;
339  AMF_RESULT res;
340  enum AVPixelFormat pix_fmt;
341 
342  switch (avctx->codec->id) {
343  case AV_CODEC_ID_H264:
344  codec_id = AMFVideoEncoderVCE_AVC;
345  break;
346  case AV_CODEC_ID_HEVC:
347  codec_id = AMFVideoEncoder_HEVC;
348  break;
349  default:
350  break;
351  }
352  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
353 
354  if (ctx->hw_frames_ctx)
355  pix_fmt = ((AVHWFramesContext*)ctx->hw_frames_ctx->data)->sw_format;
356  else
357  pix_fmt = avctx->pix_fmt;
358 
359  ctx->format = amf_av_to_amf_format(pix_fmt);
360  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
361  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
362 
363  res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
364  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
365 
366  return 0;
367 }
368 
370 {
371  AmfContext *ctx = avctx->priv_data;
372 
373  if (ctx->delayed_surface) {
374  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
375  ctx->delayed_surface = NULL;
376  }
377 
378  if (ctx->encoder) {
379  ctx->encoder->pVtbl->Terminate(ctx->encoder);
380  ctx->encoder->pVtbl->Release(ctx->encoder);
381  ctx->encoder = NULL;
382  }
383 
384  if (ctx->context) {
385  ctx->context->pVtbl->Terminate(ctx->context);
386  ctx->context->pVtbl->Release(ctx->context);
387  ctx->context = NULL;
388  }
389  av_buffer_unref(&ctx->hw_device_ctx);
390  av_buffer_unref(&ctx->hw_frames_ctx);
391 
392  if (ctx->trace) {
393  ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
394  }
395  if (ctx->library) {
396  dlclose(ctx->library);
397  ctx->library = NULL;
398  }
399  ctx->trace = NULL;
400  ctx->debug = NULL;
401  ctx->factory = NULL;
402  ctx->version = 0;
403  ctx->delayed_drain = 0;
404  av_frame_free(&ctx->delayed_frame);
405  av_fifo_freep(&ctx->timestamp_list);
406 
407  return 0;
408 }
409 
410 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
411  AMFSurface* surface)
412 {
413  AMFPlane *plane;
414  uint8_t *dst_data[4];
415  int dst_linesize[4];
416  int planes;
417  int i;
418 
419  planes = surface->pVtbl->GetPlanesCount(surface);
420  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
421 
422  for (i = 0; i < planes; i++) {
423  plane = surface->pVtbl->GetPlaneAt(surface, i);
424  dst_data[i] = plane->pVtbl->GetNative(plane);
425  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
426  }
427  av_image_copy(dst_data, dst_linesize,
428  (const uint8_t**)frame->data, frame->linesize, frame->format,
429  avctx->width, avctx->height);
430 
431  return 0;
432 }
433 
434 static inline int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
435 {
436  AmfContext *ctx = avctx->priv_data;
437  if (av_fifo_space(ctx->timestamp_list) < sizeof(timestamp)) {
438  if (av_fifo_grow(ctx->timestamp_list, sizeof(timestamp)) < 0) {
439  return AVERROR(ENOMEM);
440  }
441  }
442  av_fifo_generic_write(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
443  return 0;
444 }
445 
446 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
447 {
448  AmfContext *ctx = avctx->priv_data;
449  int ret;
450  AMFVariantStruct var = {0};
451  int64_t timestamp = AV_NOPTS_VALUE;
452  int64_t size = buffer->pVtbl->GetSize(buffer);
453 
454  if ((ret = av_new_packet(pkt, size)) < 0) {
455  return ret;
456  }
457  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
458 
459  switch (avctx->codec->id) {
460  case AV_CODEC_ID_H264:
461  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
462  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
464  }
465  break;
466  case AV_CODEC_ID_HEVC:
467  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
468  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
470  }
471  break;
472  default:
473  break;
474  }
475 
476  buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
477 
478  pkt->pts = var.int64Value; // original pts
479 
480 
481  AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN, "timestamp_list is empty\n");
482 
483  av_fifo_generic_read(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
484 
485  // calc dts shift if max_b_frames > 0
486  if (avctx->max_b_frames > 0 && ctx->dts_delay == 0) {
487  int64_t timestamp_last = AV_NOPTS_VALUE;
488  AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN,
489  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
491  ctx->timestamp_list,
492  &timestamp_last,
493  (av_fifo_size(ctx->timestamp_list) / sizeof(timestamp) - 1) * sizeof(timestamp_last),
494  sizeof(timestamp_last),
495  NULL);
496  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
497  return AVERROR(ERANGE);
498  }
499  ctx->dts_delay = timestamp_last - timestamp;
500  }
501  pkt->dts = timestamp - ctx->dts_delay;
502  return 0;
503 }
504 
505 // amfenc API implementation
507 {
508  int ret;
509 
510  if ((ret = amf_load_library(avctx)) == 0) {
511  if ((ret = amf_init_context(avctx)) == 0) {
512  if ((ret = amf_init_encoder(avctx)) == 0) {
513  return 0;
514  }
515  }
516  }
517  ff_amf_encode_close(avctx);
518  return ret;
519 }
520 
521 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
522 {
523  AMF_RESULT res;
524  AMFVariantStruct var;
525  res = AMFVariantInit(&var);
526  if (res == AMF_OK) {
527  AMFGuid guid_AMFInterface = IID_AMFInterface();
528  AMFInterface *amf_interface;
529  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
530 
531  if (res == AMF_OK) {
532  res = AMFVariantAssignInterface(&var, amf_interface);
533  amf_interface->pVtbl->Release(amf_interface);
534  }
535  if (res == AMF_OK) {
536  res = object->pVtbl->SetProperty(object, name, var);
537  }
538  AMFVariantClear(&var);
539  }
540  return res;
541 }
542 
543 static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
544 {
545  AMF_RESULT res;
546  AMFVariantStruct var;
547  res = AMFVariantInit(&var);
548  if (res == AMF_OK) {
549  res = object->pVtbl->GetProperty(object, name, &var);
550  if (res == AMF_OK) {
551  if (var.type == AMF_VARIANT_INTERFACE) {
552  AMFGuid guid_AMFBuffer = IID_AMFBuffer();
553  AMFInterface *amf_interface = AMFVariantInterface(&var);
554  res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
555  } else {
556  res = AMF_INVALID_DATA_TYPE;
557  }
558  }
559  AMFVariantClear(&var);
560  }
561  return res;
562 }
563 
564 static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
565 {
566  AVFrame *frame_ref;
567  AMFBuffer *frame_ref_storage_buffer = NULL;
568  AMF_RESULT res;
569 
570  res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
571  if (res == AMF_OK) {
572  frame_ref = av_frame_clone(frame);
573  if (frame_ref) {
574  memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
575  } else {
576  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
577  frame_ref_storage_buffer = NULL;
578  }
579  }
580  return frame_ref_storage_buffer;
581 }
582 
583 static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
584 {
585  AVFrame *frame_ref;
586  memcpy(&frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(frame_ref));
587  av_frame_free(&frame_ref);
588  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
589 }
590 
592 {
593  AmfContext *ctx = avctx->priv_data;
594  AMFSurface *surface;
595  AMF_RESULT res;
596  int ret;
597 
598  if (!ctx->encoder)
599  return AVERROR(EINVAL);
600 
601  if (!frame) { // submit drain
602  if (!ctx->eof) { // submit drain one time only
603  if (ctx->delayed_surface != NULL) {
604  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
605  } else if(!ctx->delayed_drain) {
606  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
607  if (res == AMF_INPUT_FULL) {
608  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
609  } else {
610  if (res == AMF_OK) {
611  ctx->eof = 1; // drain started
612  }
613  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
614  }
615  }
616  } else{
617  return AVERROR_EOF;
618  }
619  } else { // submit frame
620  int hw_surface = 0;
621 
622  if (ctx->delayed_surface != NULL) {
623  return AVERROR(EAGAIN); // should not happen when called from ffmpeg, other clients may resubmit
624  }
625  // prepare surface from frame
626  switch (frame->format) {
627 #if CONFIG_D3D11VA
628  case AV_PIX_FMT_D3D11:
629  {
630  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
631  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
632  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
633 
634  av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
635  frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
636 
637  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
638 
639  res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
640  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
641 
642  hw_surface = 1;
643  }
644  break;
645 #endif
646 #if CONFIG_DXVA2
648  {
649  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
650 
651  res = ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
652  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
653 
654  hw_surface = 1;
655  }
656  break;
657 #endif
658  default:
659  {
660  res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
661  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
662  amf_copy_surface(avctx, frame, surface);
663  }
664  break;
665  }
666 
667  if (hw_surface) {
668  AMFBuffer *frame_ref_storage_buffer;
669 
670  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
671  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
672 
673  frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
674  AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
675 
676  res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
677  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
678  ctx->hwsurfaces_in_queue++;
679  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
680  }
681 
682  surface->pVtbl->SetPts(surface, frame->pts);
683  AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
684 
685  switch (avctx->codec->id) {
686  case AV_CODEC_ID_H264:
687  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
688  break;
689  case AV_CODEC_ID_HEVC:
690  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
691  break;
692  default:
693  break;
694  }
695 
696 
697  // submit surface
698  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
699  if (res == AMF_INPUT_FULL) { // handle full queue
700  //store surface for later submission
701  ctx->delayed_surface = surface;
702  if (surface->pVtbl->GetMemoryType(surface) == AMF_MEMORY_DX11) {
703  av_frame_ref(ctx->delayed_frame, frame);
704  }
705  } else {
706  surface->pVtbl->Release(surface);
707  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
708 
709  if ((ret = timestamp_queue_enqueue(avctx, frame->pts)) < 0) {
710  return ret;
711  }
712 
713  }
714  }
715  return 0;
716 }
718 {
719  int ret;
720  AMF_RESULT res;
721  AMF_RESULT res_query;
722  AmfContext *ctx = avctx->priv_data;
723  AMFData *data = NULL;
724  int block_and_wait;
725 
726  if (!ctx->encoder)
727  return AVERROR(EINVAL);
728 
729  do {
730  block_and_wait = 0;
731  // poll data
732  res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
733  if (data) {
734  // copy data to packet
735  AMFBuffer* buffer;
736  AMFGuid guid = IID_AMFBuffer();
737  data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
738  ret = amf_copy_buffer(avctx, avpkt, buffer);
739 
740  buffer->pVtbl->Release(buffer);
741 
742  if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
743  AMFBuffer *frame_ref_storage_buffer;
744  res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
745  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
746  amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
747  ctx->hwsurfaces_in_queue--;
748  }
749 
750  data->pVtbl->Release(data);
751 
752  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
753 
754  if (ctx->delayed_surface != NULL) { // try to resubmit frame
755  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
756  if (res != AMF_INPUT_FULL) {
757  int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
758  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
759  ctx->delayed_surface = NULL;
760  av_frame_unref(ctx->delayed_frame);
761  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res);
762 
763  if ((ret = timestamp_queue_enqueue(avctx, pts)) < 0) {
764  return ret;
765  }
766  } else {
767  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed frame submission got AMF_INPUT_FULL- should not happen\n");
768  }
769  } else if (ctx->delayed_drain) { // try to resubmit drain
770  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
771  if (res != AMF_INPUT_FULL) {
772  ctx->delayed_drain = 0;
773  ctx->eof = 1; // drain started
774  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
775  } else {
776  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
777  }
778  }
779  } else if (ctx->delayed_surface != NULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
780  block_and_wait = 1;
781  av_usleep(1000); // wait and poll again
782  }
783  } while (block_and_wait);
784 
785  if (res_query == AMF_EOF) {
786  ret = AVERROR_EOF;
787  } else if (data == NULL) {
788  ret = AVERROR(EAGAIN);
789  } else {
790  ret = 0;
791  }
792  return ret;
793 }
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AMFTraceWriter_Write
static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis, const wchar_t *scope, const wchar_t *message)
Definition: amfenc.c:91
av_fifo_generic_write
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
FFMPEG_AMF_WRITER_ID
#define FFMPEG_AMF_WRITER_ID
Definition: amfenc.c:48
message
Definition: api-threadmessage-test.c:46
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:89
av_fifo_grow
int av_fifo_grow(AVFifoBuffer *f, unsigned int size)
Enlarge an AVFifoBuffer.
Definition: fifo.c:107
av_unused
#define av_unused
Definition: attributes.h:131
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
pixdesc.h
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:355
data
const char data[16]
Definition: mxf.c:91
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:521
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:144
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:410
av_fifo_generic_read
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
AMFTraceWriter_Flush
static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
Definition: amfenc.c:98
timestamp_queue_enqueue
static int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
Definition: amfenc.c:434
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:388
FormatMap::amf_format
enum AMF_SURFACE_FORMAT amf_format
Definition: amfenc.c:66
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:535
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:369
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:506
val
static double val(void *priv, double ch)
Definition: aeval.c:76
pts
static int64_t pts
Definition: transcode_aac.c:647
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
av_fifo_space
int av_fifo_space(const AVFifoBuffer *f)
Return the amount of space in bytes in the AVFifoBuffer, that is the amount of data you can write int...
Definition: fifo.c:82
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:137
av_new_packet
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:88
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
amf_av_to_amf_format
static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
Definition: amfenc.c:80
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:335
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:541
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:92
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:76
AmfTraceWriter::avctx
AVCodecContext * avctx
Definition: amfenc.h:39
if
if(ret)
Definition: filter_design.txt:179
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
tracer_vtbl
static AMFTraceWriterVtbl tracer_vtbl
Definition: amfenc.c:102
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Definition: amfenc.c:717
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:446
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:50
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
FormatMap::av_format
enum AVPixelFormat av_format
Definition: amfenc.c:65
index
int index
Definition: gxfenc.c:89
ff_amf_send_frame
int ff_amf_send_frame(AVCodecContext *avctx, const AVFrame *frame)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:591
AmfTraceWriter
AMF trace writer callback class Used to capture all AMF logging.
Definition: amfenc.h:37
hwcontext_dxva2.h
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:444
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:52
size
int size
Definition: twinvq_data.h:11134
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:354
amf_create_buffer_with_frame_ref
static AMFBuffer * amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
Definition: amfenc.c:564
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:361
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:238
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:313
AVCodec::id
enum AVCodecID id
Definition: codec.h:204
planes
static const struct @315 planes[]
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:348
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
amf_load_library
static int amf_load_library(AVCodecContext *avctx)
Definition: amfenc.c:108
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:223
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:554
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:2278
AVCodecContext::height
int height
Definition: avcodec.h:699
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:736
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:2226
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
FormatMap
Definition: amfenc.c:64
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen_template.c:38
L
#define L(x)
Definition: vp56_arith.h:36
AVFormatContext::debug
int debug
Flags to enable debugging.
Definition: avformat.h:1618
AVCodecContext
main external API structure.
Definition: avcodec.h:526
av_image_copy
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:387
pkt
static AVPacket pkt
Definition: demuxing_decoding.c:54
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
amf_get_property_buffer
static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
Definition: amfenc.c:543
amfenc.h
av_buffer_ref
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:54
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:786
av_fifo_size
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
av_fifo_generic_peek_at
int av_fifo_generic_peek_at(AVFifoBuffer *f, void *dest, int offset, int buf_size, void(*func)(void *, void *, int))
Feed data at specific position from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:151
av_fifo_freep
void av_fifo_freep(AVFifoBuffer **f)
Free an AVFifoBuffer and reset pointer to NULL.
Definition: fifo.c:63
format_map
static const FormatMap format_map[]
Definition: amfenc.c:69
AVPacket
This structure stores compressed data.
Definition: packet.h:332
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:553
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:699
av_fifo_alloc
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AmfContext
AMF encoder context.
Definition: amfenc.h:46
amf_release_buffer_with_frame_ref
static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
Definition: amfenc.c:583
hwcontext_d3d11va.h
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2465
amf_init_context
static int amf_init_context(AVCodecContext *avctx)
Definition: amfenc.c:213