FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/imgutils.h"
23 #include "libavutil/hwcontext.h"
24 #if CONFIG_D3D11VA
26 #endif
27 #if CONFIG_DXVA2
28 #define COBJMACROS
30 #endif
31 #include "libavutil/mem.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/time.h"
34 
35 #include "amfenc.h"
36 #include "internal.h"
37 
38 #if CONFIG_D3D11VA
39 #include <d3d11.h>
40 #endif
41 
42 #ifdef _WIN32
43 #include "compat/w32dlfcn.h"
44 #else
45 #include <dlfcn.h>
46 #endif
47 
48 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
49 
50 #define PTS_PROP L"PtsProp"
51 
55 #if CONFIG_D3D11VA
57 #endif
58 #if CONFIG_DXVA2
60 #endif
62 };
63 
64 typedef struct FormatMap {
66  enum AMF_SURFACE_FORMAT amf_format;
67 } FormatMap;
68 
69 static const FormatMap format_map[] =
70 {
71  { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
72  { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
73  { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
74  { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
75  { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
76  { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
77  { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
78 };
79 
80 static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
81 {
82  int i;
83  for (i = 0; i < amf_countof(format_map); i++) {
84  if (format_map[i].av_format == fmt) {
85  return format_map[i].amf_format;
86  }
87  }
88  return AMF_SURFACE_UNKNOWN;
89 }
90 
91 static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
92  const wchar_t *scope, const wchar_t *message)
93 {
94  AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
95  av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
96 }
97 
98 static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
99 {
100 }
101 
102 static AMFTraceWriterVtbl tracer_vtbl =
103 {
104  .Write = AMFTraceWriter_Write,
105  .Flush = AMFTraceWriter_Flush,
106 };
107 
109 {
110  AmfContext *ctx = avctx->priv_data;
111  AMFInit_Fn init_fun;
112  AMFQueryVersion_Fn version_fun;
113  AMF_RESULT res;
114 
115  ctx->delayed_frame = av_frame_alloc();
116  if (!ctx->delayed_frame) {
117  return AVERROR(ENOMEM);
118  }
119  // hardcoded to current HW queue size - will realloc in timestamp_queue_enqueue() if too small
120  ctx->timestamp_list = av_fifo_alloc((avctx->max_b_frames + 16) * sizeof(int64_t));
121  if (!ctx->timestamp_list) {
122  return AVERROR(ENOMEM);
123  }
124  ctx->dts_delay = 0;
125 
126 
127  ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
128  AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
129  AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
130 
131  init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
132  AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
133 
134  version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
135  AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
136 
137  res = version_fun(&ctx->version);
138  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
139  res = init_fun(AMF_FULL_VERSION, &ctx->factory);
140  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
141  res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
142  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
143  res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
144  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
145  return 0;
146 }
147 
148 #if CONFIG_D3D11VA
149 static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
150 {
151  AmfContext *ctx = avctx->priv_data;
152  AMF_RESULT res;
153 
154  res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
155  if (res != AMF_OK) {
156  if (res == AMF_NOT_SUPPORTED)
157  av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
158  else
159  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
160  return AVERROR(ENODEV);
161  }
162 
163  return 0;
164 }
165 #endif
166 
167 #if CONFIG_DXVA2
168 static int amf_init_from_dxva2_device(AVCodecContext *avctx, AVDXVA2DeviceContext *hwctx)
169 {
170  AmfContext *ctx = avctx->priv_data;
171  HANDLE device_handle;
172  IDirect3DDevice9 *device;
173  HRESULT hr;
174  AMF_RESULT res;
175  int ret;
176 
177  hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
178  if (FAILED(hr)) {
179  av_log(avctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
180  return AVERROR_EXTERNAL;
181  }
182 
183  hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
184  if (SUCCEEDED(hr)) {
185  IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
186  ret = 0;
187  } else {
188  av_log(avctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
189  ret = AVERROR_EXTERNAL;
190  }
191 
192  IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
193 
194  if (ret < 0)
195  return ret;
196 
197  res = ctx->context->pVtbl->InitDX9(ctx->context, device);
198 
199  IDirect3DDevice9_Release(device);
200 
201  if (res != AMF_OK) {
202  if (res == AMF_NOT_SUPPORTED)
203  av_log(avctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
204  else
205  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
206  return AVERROR(ENODEV);
207  }
208 
209  return 0;
210 }
211 #endif
212 
214 {
215  AmfContext *ctx = avctx->priv_data;
216  AMF_RESULT res;
217  av_unused int ret;
218 
219  ctx->hwsurfaces_in_queue = 0;
220  ctx->hwsurfaces_in_queue_max = 16;
221 
222  // configure AMF logger
223  // the return of these functions indicates old state and do not affect behaviour
224  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
225  if (ctx->log_to_dbg)
226  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
227  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
228  ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
229 
230  // connect AMF logger to av_log
231  ctx->tracer.vtbl = &tracer_vtbl;
232  ctx->tracer.avctx = avctx;
233  ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
234  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
235 
236  res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
237  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
238 
239  // If a device was passed to the encoder, try to initialise from that.
240  if (avctx->hw_frames_ctx) {
241  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
242 
243  if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
244  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
245  av_get_pix_fmt_name(frames_ctx->sw_format));
246  return AVERROR(EINVAL);
247  }
248 
249  switch (frames_ctx->device_ctx->type) {
250 #if CONFIG_D3D11VA
252  ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
253  if (ret < 0)
254  return ret;
255  break;
256 #endif
257 #if CONFIG_DXVA2
259  ret = amf_init_from_dxva2_device(avctx, frames_ctx->device_ctx->hwctx);
260  if (ret < 0)
261  return ret;
262  break;
263 #endif
264  default:
265  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
267  return AVERROR(ENOSYS);
268  }
269 
271  if (!ctx->hw_frames_ctx)
272  return AVERROR(ENOMEM);
273 
274  if (frames_ctx->initial_pool_size > 0)
275  ctx->hwsurfaces_in_queue_max = frames_ctx->initial_pool_size - 1;
276 
277  } else if (avctx->hw_device_ctx) {
278  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
279 
280  switch (device_ctx->type) {
281 #if CONFIG_D3D11VA
283  ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
284  if (ret < 0)
285  return ret;
286  break;
287 #endif
288 #if CONFIG_DXVA2
290  ret = amf_init_from_dxva2_device(avctx, device_ctx->hwctx);
291  if (ret < 0)
292  return ret;
293  break;
294 #endif
295  default:
296  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
297  av_hwdevice_get_type_name(device_ctx->type));
298  return AVERROR(ENOSYS);
299  }
300 
302  if (!ctx->hw_device_ctx)
303  return AVERROR(ENOMEM);
304 
305  } else {
306  res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
307  if (res == AMF_OK) {
308  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
309  } else {
310  res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
311  if (res == AMF_OK) {
312  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
313  } else {
314  av_log(avctx, AV_LOG_ERROR, "AMF initialisation failed via D3D9: error %d.\n", res);
315  return AVERROR(ENOSYS);
316  }
317  }
318  }
319  return 0;
320 }
321 
323 {
324  AmfContext *ctx = avctx->priv_data;
325  const wchar_t *codec_id = NULL;
326  AMF_RESULT res;
327  enum AVPixelFormat pix_fmt;
328 
329  switch (avctx->codec->id) {
330  case AV_CODEC_ID_H264:
331  codec_id = AMFVideoEncoderVCE_AVC;
332  break;
333  case AV_CODEC_ID_HEVC:
334  codec_id = AMFVideoEncoder_HEVC;
335  break;
336  default:
337  break;
338  }
339  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
340 
341  if (ctx->hw_frames_ctx)
342  pix_fmt = ((AVHWFramesContext*)ctx->hw_frames_ctx->data)->sw_format;
343  else
344  pix_fmt = avctx->pix_fmt;
345 
346  ctx->format = amf_av_to_amf_format(pix_fmt);
347  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
348  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
349 
350  res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
351  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
352 
353  return 0;
354 }
355 
357 {
358  AmfContext *ctx = avctx->priv_data;
359 
360  if (ctx->delayed_surface) {
361  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
362  ctx->delayed_surface = NULL;
363  }
364 
365  if (ctx->encoder) {
366  ctx->encoder->pVtbl->Terminate(ctx->encoder);
367  ctx->encoder->pVtbl->Release(ctx->encoder);
368  ctx->encoder = NULL;
369  }
370 
371  if (ctx->context) {
372  ctx->context->pVtbl->Terminate(ctx->context);
373  ctx->context->pVtbl->Release(ctx->context);
374  ctx->context = NULL;
375  }
378 
379  if (ctx->trace) {
380  ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
381  }
382  if (ctx->library) {
383  dlclose(ctx->library);
384  ctx->library = NULL;
385  }
386  ctx->trace = NULL;
387  ctx->debug = NULL;
388  ctx->factory = NULL;
389  ctx->version = 0;
390  ctx->delayed_drain = 0;
393 
394  return 0;
395 }
396 
397 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
398  AMFSurface* surface)
399 {
400  AMFPlane *plane;
401  uint8_t *dst_data[4];
402  int dst_linesize[4];
403  int planes;
404  int i;
405 
406  planes = surface->pVtbl->GetPlanesCount(surface);
407  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
408 
409  for (i = 0; i < planes; i++) {
410  plane = surface->pVtbl->GetPlaneAt(surface, i);
411  dst_data[i] = plane->pVtbl->GetNative(plane);
412  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
413  }
414  av_image_copy(dst_data, dst_linesize,
415  (const uint8_t**)frame->data, frame->linesize, frame->format,
416  avctx->width, avctx->height);
417 
418  return 0;
419 }
420 
421 static inline int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
422 {
423  AmfContext *ctx = avctx->priv_data;
424  if (av_fifo_space(ctx->timestamp_list) < sizeof(timestamp)) {
425  if (av_fifo_grow(ctx->timestamp_list, sizeof(timestamp)) < 0) {
426  return AVERROR(ENOMEM);
427  }
428  }
429  av_fifo_generic_write(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
430  return 0;
431 }
432 
433 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
434 {
435  AmfContext *ctx = avctx->priv_data;
436  int ret;
437  AMFVariantStruct var = {0};
438  int64_t timestamp = AV_NOPTS_VALUE;
439  int64_t size = buffer->pVtbl->GetSize(buffer);
440 
441  if ((ret = ff_alloc_packet2(avctx, pkt, size, 0)) < 0) {
442  return ret;
443  }
444  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
445 
446  switch (avctx->codec->id) {
447  case AV_CODEC_ID_H264:
448  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
449  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
450  pkt->flags = AV_PKT_FLAG_KEY;
451  }
452  break;
453  case AV_CODEC_ID_HEVC:
454  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
455  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
456  pkt->flags = AV_PKT_FLAG_KEY;
457  }
458  break;
459  default:
460  break;
461  }
462 
463  buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
464 
465  pkt->pts = var.int64Value; // original pts
466 
467 
468  AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN, "timestamp_list is empty\n");
469 
470  av_fifo_generic_read(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
471 
472  // calc dts shift if max_b_frames > 0
473  if (avctx->max_b_frames > 0 && ctx->dts_delay == 0) {
474  int64_t timestamp_last = AV_NOPTS_VALUE;
476  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
478  ctx->timestamp_list,
479  &timestamp_last,
480  (av_fifo_size(ctx->timestamp_list) / sizeof(timestamp) - 1) * sizeof(timestamp_last),
481  sizeof(timestamp_last),
482  NULL);
483  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
484  return AVERROR(ERANGE);
485  }
486  ctx->dts_delay = timestamp_last - timestamp;
487  }
488  pkt->dts = timestamp - ctx->dts_delay;
489  return 0;
490 }
491 
492 // amfenc API implementation
494 {
495  int ret;
496 
497  if ((ret = amf_load_library(avctx)) == 0) {
498  if ((ret = amf_init_context(avctx)) == 0) {
499  if ((ret = amf_init_encoder(avctx)) == 0) {
500  return 0;
501  }
502  }
503  }
504  ff_amf_encode_close(avctx);
505  return ret;
506 }
507 
508 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
509 {
510  AMF_RESULT res;
511  AMFVariantStruct var;
512  res = AMFVariantInit(&var);
513  if (res == AMF_OK) {
514  AMFGuid guid_AMFInterface = IID_AMFInterface();
515  AMFInterface *amf_interface;
516  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
517 
518  if (res == AMF_OK) {
519  res = AMFVariantAssignInterface(&var, amf_interface);
520  amf_interface->pVtbl->Release(amf_interface);
521  }
522  if (res == AMF_OK) {
523  res = object->pVtbl->SetProperty(object, name, var);
524  }
525  AMFVariantClear(&var);
526  }
527  return res;
528 }
529 
530 static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
531 {
532  AMF_RESULT res;
533  AMFVariantStruct var;
534  res = AMFVariantInit(&var);
535  if (res == AMF_OK) {
536  res = object->pVtbl->GetProperty(object, name, &var);
537  if (res == AMF_OK) {
538  if (var.type == AMF_VARIANT_INTERFACE) {
539  AMFGuid guid_AMFBuffer = IID_AMFBuffer();
540  AMFInterface *amf_interface = AMFVariantInterface(&var);
541  res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
542  } else {
543  res = AMF_INVALID_DATA_TYPE;
544  }
545  }
546  AMFVariantClear(&var);
547  }
548  return res;
549 }
550 
551 static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
552 {
553  AVFrame *frame_ref;
554  AMFBuffer *frame_ref_storage_buffer = NULL;
555  AMF_RESULT res;
556 
557  res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
558  if (res == AMF_OK) {
559  frame_ref = av_frame_clone(frame);
560  if (frame_ref) {
561  memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
562  } else {
563  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
564  frame_ref_storage_buffer = NULL;
565  }
566  }
567  return frame_ref_storage_buffer;
568 }
569 
570 static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
571 {
572  AVFrame *frame_ref;
573  memcpy(&frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(frame_ref));
574  av_frame_free(&frame_ref);
575  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
576 }
577 
579 {
580  AmfContext *ctx = avctx->priv_data;
581  AMFSurface *surface;
582  AMF_RESULT res;
583  int ret;
584 
585  if (!ctx->encoder)
586  return AVERROR(EINVAL);
587 
588  if (!frame) { // submit drain
589  if (!ctx->eof) { // submit drain one time only
590  if (ctx->delayed_surface != NULL) {
591  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
592  } else if(!ctx->delayed_drain) {
593  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
594  if (res == AMF_INPUT_FULL) {
595  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
596  } else {
597  if (res == AMF_OK) {
598  ctx->eof = 1; // drain started
599  }
600  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
601  }
602  }
603  } else{
604  return AVERROR_EOF;
605  }
606  } else { // submit frame
607  int hw_surface = 0;
608 
609  if (ctx->delayed_surface != NULL) {
610  return AVERROR(EAGAIN); // should not happen when called from ffmpeg, other clients may resubmit
611  }
612  // prepare surface from frame
613  switch (frame->format) {
614 #if CONFIG_D3D11VA
615  case AV_PIX_FMT_D3D11:
616  {
617  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
618  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
619  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
620 
621  av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
622  frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
623 
624  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
625 
626  res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
627  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
628 
629  hw_surface = 1;
630  }
631  break;
632 #endif
633 #if CONFIG_DXVA2
635  {
636  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
637 
638  res = ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
639  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
640 
641  hw_surface = 1;
642  }
643  break;
644 #endif
645  default:
646  {
647  res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
648  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
649  amf_copy_surface(avctx, frame, surface);
650  }
651  break;
652  }
653 
654  if (hw_surface) {
655  AMFBuffer *frame_ref_storage_buffer;
656 
657  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
658  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
659 
660  frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
661  AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
662 
663  res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
664  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
665  ctx->hwsurfaces_in_queue++;
666  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
667  }
668 
669  surface->pVtbl->SetPts(surface, frame->pts);
670  AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
671 
672  switch (avctx->codec->id) {
673  case AV_CODEC_ID_H264:
674  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
675  break;
676  case AV_CODEC_ID_HEVC:
677  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
678  break;
679  default:
680  break;
681  }
682 
683 
684  // submit surface
685  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
686  if (res == AMF_INPUT_FULL) { // handle full queue
687  //store surface for later submission
688  ctx->delayed_surface = surface;
689  if (surface->pVtbl->GetMemoryType(surface) == AMF_MEMORY_DX11) {
690  av_frame_ref(ctx->delayed_frame, frame);
691  }
692  } else {
693  surface->pVtbl->Release(surface);
694  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
695 
696  if ((ret = timestamp_queue_enqueue(avctx, frame->pts)) < 0) {
697  return ret;
698  }
699 
700  }
701  }
702  return 0;
703 }
705 {
706  int ret;
707  AMF_RESULT res;
708  AMF_RESULT res_query;
709  AmfContext *ctx = avctx->priv_data;
710  AMFData *data = NULL;
711  int block_and_wait;
712 
713  if (!ctx->encoder)
714  return AVERROR(EINVAL);
715 
716  do {
717  block_and_wait = 0;
718  // poll data
719  res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
720  if (data) {
721  // copy data to packet
722  AMFBuffer* buffer;
723  AMFGuid guid = IID_AMFBuffer();
724  data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
725  ret = amf_copy_buffer(avctx, avpkt, buffer);
726 
727  buffer->pVtbl->Release(buffer);
728 
729  if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
730  AMFBuffer *frame_ref_storage_buffer;
731  res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
732  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
733  amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
734  ctx->hwsurfaces_in_queue--;
735  }
736 
737  data->pVtbl->Release(data);
738 
739  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
740 
741  if (ctx->delayed_surface != NULL) { // try to resubmit frame
742  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
743  if (res != AMF_INPUT_FULL) {
744  int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
745  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
746  ctx->delayed_surface = NULL;
748  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res);
749 
750  if ((ret = timestamp_queue_enqueue(avctx, pts)) < 0) {
751  return ret;
752  }
753  } else {
754  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed frame submission got AMF_INPUT_FULL- should not happen\n");
755  }
756  } else if (ctx->delayed_drain) { // try to resubmit drain
757  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
758  if (res != AMF_INPUT_FULL) {
759  ctx->delayed_drain = 0;
760  ctx->eof = 1; // drain started
761  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
762  } else {
763  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
764  }
765  }
766  } else if (ctx->delayed_surface != NULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
767  block_and_wait = 1;
768  av_usleep(1000); // wait and poll again
769  }
770  } while (block_and_wait);
771 
772  if (res_query == AMF_EOF) {
773  ret = AVERROR_EOF;
774  } else if (data == NULL) {
775  ret = AVERROR(EAGAIN);
776  } else {
777  ret = 0;
778  }
779  return ret;
780 }
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:54
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
int plane
Definition: avisynth_c.h:422
#define NULL
Definition: coverity.c:32
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:322
const struct AVCodec * codec
Definition: avcodec.h:1542
const char const char void * val
Definition: avisynth_c.h:771
int av_fifo_grow(AVFifoBuffer *f, unsigned int size)
Enlarge an AVFifoBuffer.
Definition: fifo.c:107
AMFContext * context
AMF context.
Definition: amfenc.h:56
static enum AVPixelFormat pix_fmt
int hwsurfaces_in_queue
Definition: amfenc.h:65
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
#define FFMPEG_AMF_WRITER_ID
Definition: amfenc.c:48
This structure describes decoded (raw) audio or video data.
Definition: frame.h:226
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
const char * fmt
Definition: avisynth_c.h:769
An API-specific header for AV_HWDEVICE_TYPE_D3D11VA.
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
Memory handling functions.
An API-specific header for AV_HWDEVICE_TYPE_DXVA2.
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:1793
AVBufferRef * hw_device_ctx
pointer to HW accelerator (decoder)
Definition: amfenc.h:62
static int amf_init_context(AVCodecContext *avctx)
Definition: amfenc.c:213
int hwsurfaces_in_queue_max
Definition: amfenc.h:66
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1743
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:144
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
static AVPacket pkt
static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis, const wchar_t *scope, const wchar_t *message)
Definition: amfenc.c:91
static int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
Definition: amfenc.c:421
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
AMF_SURFACE_FORMAT format
AMF surface format.
Definition: amfenc.h:60
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame...
Definition: frame.h:564
static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
Definition: amfenc.c:570
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int64_t min_size)
Check AVPacket size and/or allocate data.
Definition: encode.c:32
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:238
int ff_amf_send_frame(AVCodecContext *avctx, const AVFrame *frame)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:578
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:319
int av_fifo_space(const AVFifoBuffer *f)
Return the amount of space in bytes in the AVFifoBuffer, that is the amount of data you can write int...
Definition: fifo.c:82
AVFrame * delayed_frame
Definition: amfenc.h:71
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
ID3D11Device * device
Device used for texture creation and access.
uint8_t * data
Definition: avcodec.h:1445
#define AVERROR_EOF
End of file.
Definition: error.h:55
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
int delayed_drain
Definition: amfenc.h:69
ptrdiff_t size
Definition: opengl_enc.c:101
#define av_log(a,...)
static int amf_load_library(AVCodecContext *avctx)
Definition: amfenc.c:108
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1477
enum AVPixelFormat av_format
Definition: amfenc.c:65
AMF trace writer callback class Used to capture all AMF logging.
Definition: amfenc.h:37
enum AVCodecID id
Definition: avcodec.h:3438
int width
Definition: frame.h:284
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:356
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define FALSE
Definition: windows2linux.h:37
static AMFTraceWriterVtbl tracer_vtbl
Definition: amfenc.c:102
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:508
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
simple assert() macros that are a bit more flexible than ISO C assert().
IDirect3DDeviceManager9 * devmgr
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:397
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:78
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:387
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:52
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1451
static const struct @304 planes[]
static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
Definition: amfenc.c:80
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:198
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:148
static AMFBuffer * amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
Definition: amfenc.c:551
AVCodecContext * avctx
Definition: amfenc.h:39
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:493
int width
picture width / height.
Definition: avcodec.h:1706
PVOID HANDLE
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:3213
AVFormatContext * ctx
Definition: movenc.c:48
static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
Definition: amfenc.c:98
enum AVCodecID codec_id
Definition: vaapi_decode.c:364
AMFFactory * factory
pointer to AMF factory
Definition: amfenc.h:50
AVBufferRef * hw_frames_ctx
pointer to HW accelerator (frame allocator)
Definition: amfenc.h:63
#define L(x)
Definition: vp56_arith.h:36
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
#define FF_ARRAY_ELEMS(a)
AMFTraceWriterVtbl * vtbl
Definition: amfenc.h:38
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:433
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:299
amf_handle library
handle to DLL library
Definition: amfenc.h:49
#define PTS_PROP
Definition: amfenc.c:50
AMFComponent * encoder
AMF encoder object.
Definition: amfenc.h:58
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
int aud
Definition: amfenc.h:107
int av_fifo_generic_peek_at(AVFifoBuffer *f, void *dest, int offset, int buf_size, void(*func)(void *, void *, int))
Feed data at specific position from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:151
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:257
AMFDebug * debug
pointer to AMF debug interface
Definition: amfenc.h:51
int64_t dts_delay
Definition: amfenc.h:75
AVFifoBuffer * timestamp_list
Definition: amfenc.h:74
main external API structure.
Definition: avcodec.h:1533
uint8_t * data
The data buffer.
Definition: buffer.h:89
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
amf_bool eof
flag indicating EOF happened
Definition: amfenc.h:59
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:88
int index
Definition: gxfenc.c:89
amf_uint64 version
version of AMF runtime
Definition: amfenc.h:54
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Definition: amfenc.c:704
AMF encoder context.
Definition: amfenc.h:46
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:137
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
static int64_t pts
DWORD HRESULT
#define FAILED(hr)
Definition: windows2linux.h:48
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:240
static const FormatMap format_map[]
Definition: amfenc.c:69
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:313
#define SUCCEEDED(hr)
Definition: windows2linux.h:49
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
Y , 8bpp.
Definition: pixfmt.h:74
static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
Definition: amfenc.c:530
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:279
AmfTraceWriter tracer
AMF writer registered with AMF.
Definition: amfenc.h:55
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
enum AMF_SURFACE_FORMAT amf_format
Definition: amfenc.c:66
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
void * priv_data
Definition: avcodec.h:1560
This struct is allocated as AVHWDeviceContext.hwctx.
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1444
This struct is allocated as AVHWDeviceContext.hwctx.
int height
Definition: frame.h:284
int log_to_dbg
Definition: amfenc.h:79
AMFSurface * delayed_surface
Definition: amfenc.h:70
void av_fifo_freep(AVFifoBuffer **f)
Free an AVFifoBuffer and reset pointer to NULL.
Definition: fifo.c:63
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2362
AMFTrace * trace
pointer to AMF trace interface
Definition: amfenc.h:52
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3265
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: avcodec.h:1422
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1438
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
GLuint buffer
Definition: opengl_enc.c:102
#define av_unused
Definition: attributes.h:125
const char * name
Definition: opengl_enc.c:103