FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
qsvdec.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV codec-independent code
3  *
4  * copyright (c) 2013 Luca Barbato
5  * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <string.h>
25 #include <sys/types.h>
26 
27 #include <mfx/mfxvideo.h>
28 
29 #include "libavutil/common.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/log.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/pixfmt.h"
36 #include "libavutil/time.h"
37 
38 #include "avcodec.h"
39 #include "internal.h"
40 #include "qsv.h"
41 #include "qsv_internal.h"
42 #include "qsvdec.h"
43 
45  &(const AVCodecHWConfigInternal) {
46  .public = {
50  .device_type = AV_HWDEVICE_TYPE_QSV,
51  },
52  .hwaccel = NULL,
53  },
54  NULL
55 };
56 
57 static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
58  AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
59 {
60  int ret;
61 
62  if (session) {
63  q->session = session;
64  } else if (hw_frames_ref) {
65  if (q->internal_session) {
66  MFXClose(q->internal_session);
68  }
70 
71  q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
72  if (!q->frames_ctx.hw_frames_ctx)
73  return AVERROR(ENOMEM);
74 
76  &q->frames_ctx, q->load_plugins,
77  q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
78  if (ret < 0) {
80  return ret;
81  }
82 
83  q->session = q->internal_session;
84  } else if (hw_device_ref) {
85  if (q->internal_session) {
86  MFXClose(q->internal_session);
88  }
89 
91  hw_device_ref, q->load_plugins);
92  if (ret < 0)
93  return ret;
94 
95  q->session = q->internal_session;
96  } else {
97  if (!q->internal_session) {
99  q->load_plugins);
100  if (ret < 0)
101  return ret;
102  }
103 
104  q->session = q->internal_session;
105  }
106 
107  /* make sure the decoder is uninitialized */
108  MFXVideoDECODE_Close(q->session);
109 
110  return 0;
111 }
112 
113 static inline unsigned int qsv_fifo_item_size(void)
114 {
115  return sizeof(mfxSyncPoint*) + sizeof(QSVFrame*);
116 }
117 
118 static inline unsigned int qsv_fifo_size(const AVFifoBuffer* fifo)
119 {
120  return av_fifo_size(fifo) / qsv_fifo_item_size();
121 }
122 
124 {
125  const AVPixFmtDescriptor *desc;
126  mfxSession session = NULL;
127  int iopattern = 0;
128  mfxVideoParam param = { 0 };
129  int frame_width = avctx->coded_width;
130  int frame_height = avctx->coded_height;
131  int ret;
132 
133  desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
134  if (!desc)
135  return AVERROR_BUG;
136 
137  if (!q->async_fifo) {
139  if (!q->async_fifo)
140  return AVERROR(ENOMEM);
141  }
142 
143  if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) {
144  AVQSVContext *user_ctx = avctx->hwaccel_context;
145  session = user_ctx->session;
146  iopattern = user_ctx->iopattern;
147  q->ext_buffers = user_ctx->ext_buffers;
148  q->nb_ext_buffers = user_ctx->nb_ext_buffers;
149  }
150 
151  if (avctx->hw_frames_ctx) {
152  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
153  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
154 
155  if (!iopattern) {
156  if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
157  iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
158  else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
159  iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
160  }
161  }
162 
163  if (!iopattern)
164  iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
165  q->iopattern = iopattern;
166 
167  ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx, avctx->hw_device_ctx);
168  if (ret < 0) {
169  av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
170  return ret;
171  }
172 
173  ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
174  if (ret < 0)
175  return ret;
176 
177  param.mfx.CodecId = ret;
178  param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
179  param.mfx.CodecLevel = avctx->level == FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN : avctx->level;
180 
181  param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
182  param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
183  param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
184  param.mfx.FrameInfo.FourCC = q->fourcc;
185  param.mfx.FrameInfo.Width = frame_width;
186  param.mfx.FrameInfo.Height = frame_height;
187  param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
188 
189  switch (avctx->field_order) {
191  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
192  break;
193  case AV_FIELD_TT:
194  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
195  break;
196  case AV_FIELD_BB:
197  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
198  break;
199  default:
200  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
201  break;
202  }
203 
204  param.IOPattern = q->iopattern;
205  param.AsyncDepth = q->async_depth;
206  param.ExtParam = q->ext_buffers;
207  param.NumExtParam = q->nb_ext_buffers;
208 
209  ret = MFXVideoDECODE_Init(q->session, &param);
210  if (ret < 0)
211  return ff_qsv_print_error(avctx, ret,
212  "Error initializing the MFX video decoder");
213 
214  q->frame_info = param.mfx.FrameInfo;
215 
216  return 0;
217 }
218 
220 {
221  int ret;
222 
223  ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
224  if (ret < 0)
225  return ret;
226 
227  if (frame->frame->format == AV_PIX_FMT_QSV) {
228  frame->surface = *(mfxFrameSurface1*)frame->frame->data[3];
229  } else {
230  frame->surface.Info = q->frame_info;
231 
232  frame->surface.Data.PitchLow = frame->frame->linesize[0];
233  frame->surface.Data.Y = frame->frame->data[0];
234  frame->surface.Data.UV = frame->frame->data[1];
235  }
236 
237  if (q->frames_ctx.mids) {
238  ret = ff_qsv_find_surface_idx(&q->frames_ctx, frame);
239  if (ret < 0)
240  return ret;
241 
242  frame->surface.Data.MemId = &q->frames_ctx.mids[ret];
243  }
244  frame->surface.Data.ExtParam = &frame->ext_param;
245  frame->surface.Data.NumExtParam = 1;
246  frame->ext_param = (mfxExtBuffer*)&frame->dec_info;
247  frame->dec_info.Header.BufferId = MFX_EXTBUFF_DECODED_FRAME_INFO;
248  frame->dec_info.Header.BufferSz = sizeof(frame->dec_info);
249 
250  frame->used = 1;
251 
252  return 0;
253 }
254 
256 {
257  QSVFrame *cur = q->work_frames;
258  while (cur) {
259  if (cur->used && !cur->surface.Data.Locked && !cur->queued) {
260  cur->used = 0;
261  av_frame_unref(cur->frame);
262  }
263  cur = cur->next;
264  }
265 }
266 
267 static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
268 {
269  QSVFrame *frame, **last;
270  int ret;
271 
273 
274  frame = q->work_frames;
275  last = &q->work_frames;
276  while (frame) {
277  if (!frame->used) {
278  ret = alloc_frame(avctx, q, frame);
279  if (ret < 0)
280  return ret;
281  *surf = &frame->surface;
282  return 0;
283  }
284 
285  last = &frame->next;
286  frame = frame->next;
287  }
288 
289  frame = av_mallocz(sizeof(*frame));
290  if (!frame)
291  return AVERROR(ENOMEM);
292  frame->frame = av_frame_alloc();
293  if (!frame->frame) {
294  av_freep(&frame);
295  return AVERROR(ENOMEM);
296  }
297  *last = frame;
298 
299  ret = alloc_frame(avctx, q, frame);
300  if (ret < 0)
301  return ret;
302 
303  *surf = &frame->surface;
304 
305  return 0;
306 }
307 
308 static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
309 {
310  QSVFrame *cur = q->work_frames;
311  while (cur) {
312  if (surf == &cur->surface)
313  return cur;
314  cur = cur->next;
315  }
316  return NULL;
317 }
318 
319 static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
320  AVFrame *frame, int *got_frame,
321  AVPacket *avpkt)
322 {
323  QSVFrame *out_frame;
324  mfxFrameSurface1 *insurf;
325  mfxFrameSurface1 *outsurf;
326  mfxSyncPoint *sync;
327  mfxBitstream bs = { { { 0 } } };
328  int ret;
329 
330  if (avpkt->size) {
331  bs.Data = avpkt->data;
332  bs.DataLength = avpkt->size;
333  bs.MaxLength = bs.DataLength;
334  bs.TimeStamp = avpkt->pts;
335  if (avctx->field_order == AV_FIELD_PROGRESSIVE)
336  bs.DataFlag |= MFX_BITSTREAM_COMPLETE_FRAME;
337  }
338 
339  sync = av_mallocz(sizeof(*sync));
340  if (!sync) {
341  av_freep(&sync);
342  return AVERROR(ENOMEM);
343  }
344 
345  do {
346  ret = get_surface(avctx, q, &insurf);
347  if (ret < 0) {
348  av_freep(&sync);
349  return ret;
350  }
351 
352  ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
353  insurf, &outsurf, sync);
354  if (ret == MFX_WRN_DEVICE_BUSY)
355  av_usleep(500);
356 
357  } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);
358 
359  if (ret != MFX_ERR_NONE &&
360  ret != MFX_ERR_MORE_DATA &&
361  ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
362  ret != MFX_ERR_MORE_SURFACE) {
363  av_freep(&sync);
364  return ff_qsv_print_error(avctx, ret,
365  "Error during QSV decoding.");
366  }
367 
368  /* make sure we do not enter an infinite loop if the SDK
369  * did not consume any data and did not return anything */
370  if (!*sync && !bs.DataOffset) {
371  bs.DataOffset = avpkt->size;
372  ++q->zero_consume_run;
373  if (q->zero_consume_run > 1)
374  ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data");
375  } else {
376  q->zero_consume_run = 0;
377  }
378 
379  if (*sync) {
380  QSVFrame *out_frame = find_frame(q, outsurf);
381 
382  if (!out_frame) {
383  av_log(avctx, AV_LOG_ERROR,
384  "The returned surface does not correspond to any frame\n");
385  av_freep(&sync);
386  return AVERROR_BUG;
387  }
388 
389  out_frame->queued = 1;
390  av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
391  av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
392  } else {
393  av_freep(&sync);
394  }
395 
396  if ((qsv_fifo_size(q->async_fifo) >= q->async_depth) ||
397  (!avpkt->size && av_fifo_size(q->async_fifo))) {
398  AVFrame *src_frame;
399 
400  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
401  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
402  out_frame->queued = 0;
403 
404  if (avctx->pix_fmt != AV_PIX_FMT_QSV) {
405  do {
406  ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
407  } while (ret == MFX_WRN_IN_EXECUTION);
408  }
409 
410  av_freep(&sync);
411 
412  src_frame = out_frame->frame;
413 
414  ret = av_frame_ref(frame, src_frame);
415  if (ret < 0)
416  return ret;
417 
418  outsurf = &out_frame->surface;
419 
420 #if FF_API_PKT_PTS
422  frame->pkt_pts = outsurf->Data.TimeStamp;
424 #endif
425  frame->pts = outsurf->Data.TimeStamp;
426 
427  frame->repeat_pict =
428  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
429  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
430  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
431  frame->top_field_first =
432  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
433  frame->interlaced_frame =
434  !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
435  frame->pict_type = ff_qsv_map_pictype(out_frame->dec_info.FrameType);
436  //Key frame is IDR frame is only suitable for H264. For HEVC, IRAPs are key frames.
437  if (avctx->codec_id == AV_CODEC_ID_H264)
438  frame->key_frame = !!(out_frame->dec_info.FrameType & MFX_FRAMETYPE_IDR);
439 
440  /* update the surface properties */
441  if (avctx->pix_fmt == AV_PIX_FMT_QSV)
442  ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info;
443 
444  *got_frame = 1;
445  }
446 
447  return bs.DataOffset;
448 }
449 
451 {
452  QSVFrame *cur = q->work_frames;
453 
454  if (q->session)
455  MFXVideoDECODE_Close(q->session);
456 
457  while (q->async_fifo && av_fifo_size(q->async_fifo)) {
458  QSVFrame *out_frame;
459  mfxSyncPoint *sync;
460 
461  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
462  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
463 
464  av_freep(&sync);
465  }
466 
467  while (cur) {
468  q->work_frames = cur->next;
469  av_frame_free(&cur->frame);
470  av_freep(&cur);
471  cur = q->work_frames;
472  }
473 
475  q->async_fifo = NULL;
476 
479 
480  if (q->internal_session)
481  MFXClose(q->internal_session);
482 
485 
486  return 0;
487 }
488 
490  AVFrame *frame, int *got_frame, AVPacket *pkt)
491 {
492  uint8_t *dummy_data;
493  int dummy_size;
494  int ret;
495  const AVPixFmtDescriptor *desc;
496 
497  if (!q->avctx_internal) {
499  if (!q->avctx_internal)
500  return AVERROR(ENOMEM);
501 
502  q->parser = av_parser_init(avctx->codec_id);
503  if (!q->parser)
504  return AVERROR(ENOMEM);
505 
508  }
509 
510  if (!pkt->size)
511  return qsv_decode(avctx, q, frame, got_frame, pkt);
512 
513  /* we assume the packets are already split properly and want
514  * just the codec parameters here */
516  &dummy_data, &dummy_size,
517  pkt->data, pkt->size, pkt->pts, pkt->dts,
518  pkt->pos);
519 
520  avctx->field_order = q->parser->field_order;
521  /* TODO: flush delayed frames on reinit */
522  if (q->parser->format != q->orig_pix_fmt ||
523  FFALIGN(q->parser->coded_width, 16) != FFALIGN(avctx->coded_width, 16) ||
524  FFALIGN(q->parser->coded_height, 16) != FFALIGN(avctx->coded_height, 16)) {
527  AV_PIX_FMT_NONE };
528  enum AVPixelFormat qsv_format;
529 
530  qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc);
531  if (qsv_format < 0) {
532  av_log(avctx, AV_LOG_ERROR,
533  "Decoding pixel format '%s' is not supported\n",
535  ret = AVERROR(ENOSYS);
536  goto reinit_fail;
537  }
538 
539  q->orig_pix_fmt = q->parser->format;
540  avctx->pix_fmt = pix_fmts[1] = qsv_format;
541  avctx->width = q->parser->width;
542  avctx->height = q->parser->height;
543  avctx->coded_width = FFALIGN(q->parser->coded_width, 16);
544  avctx->coded_height = FFALIGN(q->parser->coded_height, 16);
545  avctx->level = q->avctx_internal->level;
546  avctx->profile = q->avctx_internal->profile;
547 
548  ret = ff_get_format(avctx, pix_fmts);
549  if (ret < 0)
550  goto reinit_fail;
551 
552  avctx->pix_fmt = ret;
553 
554  desc = av_pix_fmt_desc_get(avctx->pix_fmt);
555  if (!desc)
556  goto reinit_fail;
557 
558  if (desc->comp[0].depth > 8) {
559  avctx->coded_width = FFALIGN(q->parser->coded_width, 32);
560  avctx->coded_height = FFALIGN(q->parser->coded_height, 32);
561  }
562 
563  ret = qsv_decode_init(avctx, q);
564  if (ret < 0)
565  goto reinit_fail;
566  }
567 
568  return qsv_decode(avctx, q, frame, got_frame, pkt);
569 
570 reinit_fail:
571  q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE;
572  return ret;
573 }
574 
576 {
578 }
AVCodecHWConfig public
This is the structure which will be returned to the user by avcodec_get_hw_config().
Definition: hwaccel.h:34
#define NULL
Definition: coverity.c:32
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1326
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:123
int iopattern
Definition: qsvdec.h:66
static unsigned int qsv_fifo_size(const AVFifoBuffer *fifo)
Definition: qsvdec.c:118
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2446
This structure describes decoded (raw) audio or video data.
Definition: frame.h:226
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1721
Memory handling functions.
int64_t pos
byte position in stream, -1 if unknown
Definition: avcodec.h:1465
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:65
int width
Dimensions of the decoded video intended for presentation.
Definition: avcodec.h:5195
enum AVFieldOrder field_order
Definition: avcodec.h:5172
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:368
int size
Definition: avcodec.h:1446
int coded_width
Dimensions of the coded video.
Definition: avcodec.h:5201
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1743
mfxExtBuffer ** ext_buffers
Definition: qsvdec.h:70
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
static AVPacket pkt
static int qsv_decode(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: qsvdec.c:319
int profile
profile
Definition: avcodec.h:2859
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:68
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:141
#define FF_LEVEL_UNKNOWN
Definition: avcodec.h:2970
mfxExtDecodedFrameInfo dec_info
Definition: qsv_internal.h:58
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:2702
AVBufferRef * mids_buf
Definition: qsv_internal.h:75
int ff_qsv_decode_close(QSVContext *q)
Definition: qsvdec.c:450
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:319
enum AVPixelFormat pix_fmt
A hardware pixel format which the codec can use.
Definition: avcodec.h:3402
static AVFrame * frame
int queued
Definition: qsv_internal.h:61
uint8_t * data
Definition: avcodec.h:1445
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:373
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque)
Definition: qsv.c:643
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
static QSVFrame * find_frame(QSVContext *q, mfxFrameSurface1 *surf)
Definition: qsvdec.c:308
AVCodecParserContext * parser
Definition: qsvdec.h:58
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:188
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
int iopattern
The IO pattern to use.
Definition: qsv.h:46
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:199
int nb_ext_buffers
Definition: qsv.h:52
void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:575
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:151
mfxExtBuffer * ext_param
Definition: qsv_internal.h:59
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:171
int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile)
Definition: qsv.c:70
int zero_consume_run
Definition: qsvdec.h:55
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:309
static unsigned int qsv_fifo_item_size(void)
Definition: qsvdec.c:113
AVCodecContext * avcodec_alloc_context3(const AVCodec *codec)
Allocate an AVCodecContext and set its fields to default values.
Definition: options.c:156
int av_parser_parse2(AVCodecParserContext *s, AVCodecContext *avctx, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int64_t pts, int64_t dts, int64_t pos)
Parse a packet.
Definition: parser.c:120
int width
picture width / height.
Definition: avcodec.h:1706
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:3213
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:43
void av_parser_close(AVCodecParserContext *s)
Definition: parser.c:224
int level
level
Definition: avcodec.h:2969
mfxFrameSurface1 surface
Definition: qsv_internal.h:56
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:299
enum AVPixelFormat orig_pix_fmt
Definition: qsvdec.h:60
AVCodecParserContext * av_parser_init(int codec_id)
Definition: parser.c:34
Libavcodec external API header.
void avcodec_free_context(AVCodecContext **avctx)
Free the codec context and everything associated with it and write NULL to the provided pointer...
Definition: options.c:171
enum AVCodecID codec_id
Definition: avcodec.h:1543
mfxSession internal_session
Definition: qsvdec.h:45
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
mfxExtBuffer ** ext_buffers
Extra buffers to pass to encoder or decoder initialization.
Definition: qsv.h:51
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:257
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
Definition: qsvdec.c:219
main external API structure.
Definition: avcodec.h:1533
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:64
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:161
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1918
static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
Definition: qsvdec.c:267
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int coded_height
Definition: avcodec.h:1721
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
char * load_plugins
Definition: qsvdec.h:68
This struct is used for communicating QSV parameters between libavcodec and the caller.
Definition: qsv.h:36
static void qsv_clear_unused_frames(QSVContext *q)
Definition: qsvdec.c:255
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
The codec supports this format via the hw_frames_ctx interface.
Definition: avcodec.h:3379
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:240
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:327
A reference to a data buffer.
Definition: buffer.h:81
The codec supports this format by some ad-hoc method.
Definition: avcodec.h:3395
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
common internal api header.
common internal and external API header
if(ret< 0)
Definition: vf_mcdeint.c:279
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
Definition: qsvdec.c:57
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session, const char *load_plugins)
Definition: qsv.c:279
#define PARSER_FLAG_COMPLETE_FRAMES
Definition: avcodec.h:5082
mfxFrameInfo frame_info
Definition: qsvdec.h:62
pixel format definitions
AVCodecContext * avctx_internal
Definition: qsvdec.h:59
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:378
QSVFramesContext frames_ctx
Definition: qsvdec.h:47
int format
The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat...
Definition: avcodec.h:5212
mfxSession session
Definition: qsvdec.h:41
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:304
AVFifoBuffer * async_fifo
Definition: qsvdec.h:54
uint32_t fourcc
Definition: qsvdec.h:61
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1444
#define av_freep(p)
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:2186
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:55
const AVCodecHWConfigInternal * ff_qsv_hw_configs[]
Definition: qsvdec.c:44
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2362
int async_depth
Definition: qsvdec.h:65
int depth
Number of bits in the component.
Definition: pixdesc.h:58
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3265
int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *pkt)
Definition: qsvdec.c:489
QSVFrame * work_frames
a linked list of frames currently being used by QSV
Definition: qsvdec.h:52
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: avcodec.h:1422
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1144
mfxSession session
If non-NULL, the session to use for encoding or decoding.
Definition: qsv.h:41
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1438
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:3063
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins)
Definition: qsv.c:573
int nb_ext_buffers
Definition: qsvdec.h:71