FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
qsvdec.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV codec-independent code
3  *
4  * copyright (c) 2013 Luca Barbato
5  * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <string.h>
25 #include <sys/types.h>
26 
27 #include <mfx/mfxvideo.h>
28 
29 #include "libavutil/common.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/log.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/pixfmt.h"
36 #include "libavutil/time.h"
37 
38 #include "avcodec.h"
39 #include "internal.h"
40 #include "qsv.h"
41 #include "qsv_internal.h"
42 #include "qsvdec.h"
43 
44 static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
45  AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
46 {
47  int ret;
48 
49  if (session) {
50  q->session = session;
51  } else if (hw_frames_ref) {
52  if (q->internal_session) {
53  MFXClose(q->internal_session);
55  }
57 
58  q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
59  if (!q->frames_ctx.hw_frames_ctx)
60  return AVERROR(ENOMEM);
61 
63  &q->frames_ctx, q->load_plugins,
64  q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
65  if (ret < 0) {
67  return ret;
68  }
69 
70  q->session = q->internal_session;
71  } else if (hw_device_ref) {
72  if (q->internal_session) {
73  MFXClose(q->internal_session);
75  }
76 
78  hw_device_ref, q->load_plugins);
79  if (ret < 0)
80  return ret;
81 
82  q->session = q->internal_session;
83  } else {
84  if (!q->internal_session) {
86  q->load_plugins);
87  if (ret < 0)
88  return ret;
89  }
90 
91  q->session = q->internal_session;
92  }
93 
94  /* make sure the decoder is uninitialized */
95  MFXVideoDECODE_Close(q->session);
96 
97  return 0;
98 }
99 
101 {
102  const AVPixFmtDescriptor *desc;
103  mfxSession session = NULL;
104  int iopattern = 0;
105  mfxVideoParam param = { 0 };
106  int frame_width = avctx->coded_width;
107  int frame_height = avctx->coded_height;
108  int ret;
109 
110  desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
111  if (!desc)
112  return AVERROR_BUG;
113 
114  if (!q->async_fifo) {
115  q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
116  (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*)));
117  if (!q->async_fifo)
118  return AVERROR(ENOMEM);
119  }
120 
121  if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) {
122  AVQSVContext *user_ctx = avctx->hwaccel_context;
123  session = user_ctx->session;
124  iopattern = user_ctx->iopattern;
125  q->ext_buffers = user_ctx->ext_buffers;
126  q->nb_ext_buffers = user_ctx->nb_ext_buffers;
127  }
128 
129  if (avctx->hw_frames_ctx) {
130  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
131  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
132 
133  if (!iopattern) {
134  if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
135  iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
136  else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
137  iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
138  }
139 
140  frame_width = frames_hwctx->surfaces[0].Info.Width;
141  frame_height = frames_hwctx->surfaces[0].Info.Height;
142  }
143 
144  if (!iopattern)
145  iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
146  q->iopattern = iopattern;
147 
148  ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx, avctx->hw_device_ctx);
149  if (ret < 0) {
150  av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
151  return ret;
152  }
153 
154  ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
155  if (ret < 0)
156  return ret;
157 
158  param.mfx.CodecId = ret;
159  param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
160  param.mfx.CodecLevel = avctx->level == FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN : avctx->level;
161 
162  param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
163  param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
164  param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
165  param.mfx.FrameInfo.FourCC = q->fourcc;
166  param.mfx.FrameInfo.Width = frame_width;
167  param.mfx.FrameInfo.Height = frame_height;
168  param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
169 
170  switch (avctx->field_order) {
172  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
173  break;
174  case AV_FIELD_TT:
175  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
176  break;
177  case AV_FIELD_BB:
178  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
179  break;
180  default:
181  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
182  break;
183  }
184 
185  param.IOPattern = q->iopattern;
186  param.AsyncDepth = q->async_depth;
187  param.ExtParam = q->ext_buffers;
188  param.NumExtParam = q->nb_ext_buffers;
189 
190  ret = MFXVideoDECODE_Init(q->session, &param);
191  if (ret < 0)
192  return ff_qsv_print_error(avctx, ret,
193  "Error initializing the MFX video decoder");
194 
195  q->frame_info = param.mfx.FrameInfo;
196 
197  return 0;
198 }
199 
201 {
202  int ret;
203 
204  ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
205  if (ret < 0)
206  return ret;
207 
208  if (frame->frame->format == AV_PIX_FMT_QSV) {
209  frame->surface = *(mfxFrameSurface1*)frame->frame->data[3];
210  } else {
211  frame->surface.Info = q->frame_info;
212 
213  frame->surface.Data.PitchLow = frame->frame->linesize[0];
214  frame->surface.Data.Y = frame->frame->data[0];
215  frame->surface.Data.UV = frame->frame->data[1];
216  }
217 
218  if (q->frames_ctx.mids) {
219  ret = ff_qsv_find_surface_idx(&q->frames_ctx, frame);
220  if (ret < 0)
221  return ret;
222 
223  frame->surface.Data.MemId = &q->frames_ctx.mids[ret];
224  }
225 
226  frame->used = 1;
227 
228  return 0;
229 }
230 
232 {
233  QSVFrame *cur = q->work_frames;
234  while (cur) {
235  if (cur->used && !cur->surface.Data.Locked && !cur->queued) {
236  cur->used = 0;
237  av_frame_unref(cur->frame);
238  }
239  cur = cur->next;
240  }
241 }
242 
243 static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
244 {
245  QSVFrame *frame, **last;
246  int ret;
247 
249 
250  frame = q->work_frames;
251  last = &q->work_frames;
252  while (frame) {
253  if (!frame->used) {
254  ret = alloc_frame(avctx, q, frame);
255  if (ret < 0)
256  return ret;
257  *surf = &frame->surface;
258  return 0;
259  }
260 
261  last = &frame->next;
262  frame = frame->next;
263  }
264 
265  frame = av_mallocz(sizeof(*frame));
266  if (!frame)
267  return AVERROR(ENOMEM);
268  frame->frame = av_frame_alloc();
269  if (!frame->frame) {
270  av_freep(&frame);
271  return AVERROR(ENOMEM);
272  }
273  *last = frame;
274 
275  ret = alloc_frame(avctx, q, frame);
276  if (ret < 0)
277  return ret;
278 
279  *surf = &frame->surface;
280 
281  return 0;
282 }
283 
284 static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
285 {
286  QSVFrame *cur = q->work_frames;
287  while (cur) {
288  if (surf == &cur->surface)
289  return cur;
290  cur = cur->next;
291  }
292  return NULL;
293 }
294 
295 static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
296  AVFrame *frame, int *got_frame,
297  AVPacket *avpkt)
298 {
299  QSVFrame *out_frame;
300  mfxFrameSurface1 *insurf;
301  mfxFrameSurface1 *outsurf;
302  mfxSyncPoint *sync;
303  mfxBitstream bs = { { { 0 } } };
304  int ret;
305 
306  if (avpkt->size) {
307  bs.Data = avpkt->data;
308  bs.DataLength = avpkt->size;
309  bs.MaxLength = bs.DataLength;
310  bs.TimeStamp = avpkt->pts;
311  }
312 
313  sync = av_mallocz(sizeof(*sync));
314  if (!sync) {
315  av_freep(&sync);
316  return AVERROR(ENOMEM);
317  }
318 
319  do {
320  ret = get_surface(avctx, q, &insurf);
321  if (ret < 0) {
322  av_freep(&sync);
323  return ret;
324  }
325 
326  ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
327  insurf, &outsurf, sync);
328  if (ret == MFX_WRN_DEVICE_BUSY)
329  av_usleep(500);
330 
331  } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);
332 
333  if (ret != MFX_ERR_NONE &&
334  ret != MFX_ERR_MORE_DATA &&
335  ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
336  ret != MFX_ERR_MORE_SURFACE) {
337  av_freep(&sync);
338  return ff_qsv_print_error(avctx, ret,
339  "Error during QSV decoding.");
340  }
341 
342  /* make sure we do not enter an infinite loop if the SDK
343  * did not consume any data and did not return anything */
344  if (!*sync && !bs.DataOffset) {
345  bs.DataOffset = avpkt->size;
346  ++q->zero_consume_run;
347  if (q->zero_consume_run > 1)
348  ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data");
349  } else {
350  q->zero_consume_run = 0;
351  }
352 
353  if (*sync) {
354  QSVFrame *out_frame = find_frame(q, outsurf);
355 
356  if (!out_frame) {
357  av_log(avctx, AV_LOG_ERROR,
358  "The returned surface does not correspond to any frame\n");
359  av_freep(&sync);
360  return AVERROR_BUG;
361  }
362 
363  out_frame->queued = 1;
364  av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
365  av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
366  } else {
367  av_freep(&sync);
368  }
369 
370  if (!av_fifo_space(q->async_fifo) ||
371  (!avpkt->size && av_fifo_size(q->async_fifo))) {
372  AVFrame *src_frame;
373 
374  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
375  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
376  out_frame->queued = 0;
377 
378  if (avctx->pix_fmt != AV_PIX_FMT_QSV) {
379  do {
380  ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
381  } while (ret == MFX_WRN_IN_EXECUTION);
382  }
383 
384  av_freep(&sync);
385 
386  src_frame = out_frame->frame;
387 
388  ret = av_frame_ref(frame, src_frame);
389  if (ret < 0)
390  return ret;
391 
392  outsurf = &out_frame->surface;
393 
394 #if FF_API_PKT_PTS
396  frame->pkt_pts = outsurf->Data.TimeStamp;
398 #endif
399  frame->pts = outsurf->Data.TimeStamp;
400 
401  frame->repeat_pict =
402  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
403  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
404  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
405  frame->top_field_first =
406  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
407  frame->interlaced_frame =
408  !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
409 
410  /* update the surface properties */
411  if (avctx->pix_fmt == AV_PIX_FMT_QSV)
412  ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info;
413 
414  *got_frame = 1;
415  }
416 
417  return bs.DataOffset;
418 }
419 
421 {
422  QSVFrame *cur = q->work_frames;
423 
424  if (q->session)
425  MFXVideoDECODE_Close(q->session);
426 
427  while (q->async_fifo && av_fifo_size(q->async_fifo)) {
428  QSVFrame *out_frame;
429  mfxSyncPoint *sync;
430 
431  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
432  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
433 
434  av_freep(&sync);
435  }
436 
437  while (cur) {
438  q->work_frames = cur->next;
439  av_frame_free(&cur->frame);
440  av_freep(&cur);
441  cur = q->work_frames;
442  }
443 
445  q->async_fifo = NULL;
446 
449 
450  if (q->internal_session)
451  MFXClose(q->internal_session);
452 
455 
456  return 0;
457 }
458 
460  AVFrame *frame, int *got_frame, AVPacket *pkt)
461 {
462  uint8_t *dummy_data;
463  int dummy_size;
464  int ret;
465 
466  if (!q->avctx_internal) {
468  if (!q->avctx_internal)
469  return AVERROR(ENOMEM);
470 
471  q->parser = av_parser_init(avctx->codec_id);
472  if (!q->parser)
473  return AVERROR(ENOMEM);
474 
477  }
478 
479  if (!pkt->size)
480  return qsv_decode(avctx, q, frame, got_frame, pkt);
481 
482  /* we assume the packets are already split properly and want
483  * just the codec parameters here */
485  &dummy_data, &dummy_size,
486  pkt->data, pkt->size, pkt->pts, pkt->dts,
487  pkt->pos);
488 
489  /* TODO: flush delayed frames on reinit */
490  if (q->parser->format != q->orig_pix_fmt ||
491  q->parser->coded_width != avctx->coded_width ||
492  q->parser->coded_height != avctx->coded_height) {
495  AV_PIX_FMT_NONE };
496  enum AVPixelFormat qsv_format;
497 
498  qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc);
499  if (qsv_format < 0) {
500  av_log(avctx, AV_LOG_ERROR,
501  "Decoding pixel format '%s' is not supported\n",
503  ret = AVERROR(ENOSYS);
504  goto reinit_fail;
505  }
506 
507  q->orig_pix_fmt = q->parser->format;
508  avctx->pix_fmt = pix_fmts[1] = qsv_format;
509  avctx->width = q->parser->width;
510  avctx->height = q->parser->height;
511  avctx->coded_width = q->parser->coded_width;
512  avctx->coded_height = q->parser->coded_height;
513  avctx->field_order = q->parser->field_order;
514  avctx->level = q->avctx_internal->level;
515  avctx->profile = q->avctx_internal->profile;
516 
517  ret = ff_get_format(avctx, pix_fmts);
518  if (ret < 0)
519  goto reinit_fail;
520 
521  avctx->pix_fmt = ret;
522 
523  ret = qsv_decode_init(avctx, q);
524  if (ret < 0)
525  goto reinit_fail;
526  }
527 
528  return qsv_decode(avctx, q, frame, got_frame, pkt);
529 
530 reinit_fail:
531  q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE;
532  return ret;
533 }
534 
536 {
538 }
#define NULL
Definition: coverity.c:32
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1184
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:100
int iopattern
Definition: qsvdec.h:65
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2419
This structure describes decoded (raw) audio or video data.
Definition: frame.h:201
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1963
Memory handling functions.
int64_t pos
byte position in stream, -1 if unknown
Definition: avcodec.h:1699
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:60
int width
Dimensions of the decoded video intended for presentation.
Definition: avcodec.h:5314
enum AVFieldOrder field_order
Definition: avcodec.h:5291
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:343
int size
Definition: avcodec.h:1680
int coded_width
Dimensions of the coded video.
Definition: avcodec.h:5320
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1989
mfxExtBuffer ** ext_buffers
Definition: qsvdec.h:69
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:222
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
static AVPacket pkt
static int qsv_decode(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: qsvdec.c:295
int profile
profile
Definition: avcodec.h:3266
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:62
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:137
#define FF_LEVEL_UNKNOWN
Definition: avcodec.h:3365
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:3094
AVBufferRef * mids_buf
Definition: qsv_internal.h:69
int ff_qsv_decode_close(QSVContext *q)
Definition: qsvdec.c:420
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:395
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:294
int av_fifo_space(const AVFifoBuffer *f)
Return the amount of space in bytes in the AVFifoBuffer, that is the amount of data you can write int...
Definition: fifo.c:82
static AVFrame * frame
int queued
Definition: qsv_internal.h:55
uint8_t * data
Definition: avcodec.h:1679
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque)
Definition: qsv.c:603
#define av_log(a,...)
static QSVFrame * find_frame(QSVContext *q, mfxFrameSurface1 *surf)
Definition: qsvdec.c:284
AVCodecParserContext * parser
Definition: qsvdec.h:57
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:184
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
int iopattern
The IO pattern to use.
Definition: qsv.h:46
int nb_ext_buffers
Definition: qsv.h:52
void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:535
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:147
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:167
int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile)
Definition: qsv.c:66
int zero_consume_run
Definition: qsvdec.h:54
AVCodecContext * avcodec_alloc_context3(const AVCodec *codec)
Allocate an AVCodecContext and set its fields to default values.
Definition: options.c:157
int av_parser_parse2(AVCodecParserContext *s, AVCodecContext *avctx, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int64_t pts, int64_t dts, int64_t pos)
Parse a packet.
Definition: parser.c:137
int width
picture width / height.
Definition: avcodec.h:1948
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:3616
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:41
void av_parser_close(AVCodecParserContext *s)
Definition: parser.c:241
int level
level
Definition: avcodec.h:3364
mfxFrameSurface1 surface
Definition: qsv_internal.h:52
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:274
enum AVPixelFormat orig_pix_fmt
Definition: qsvdec.h:59
AVCodecParserContext * av_parser_init(int codec_id)
Definition: parser.c:52
Libavcodec external API header.
void avcodec_free_context(AVCodecContext **avctx)
Free the codec context and everything associated with it and write NULL to the provided pointer...
Definition: options.c:172
enum AVCodecID codec_id
Definition: avcodec.h:1778
mfxSession internal_session
Definition: qsvdec.h:44
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
mfxExtBuffer ** ext_buffers
Extra buffers to pass to encoder or decoder initialization.
Definition: qsv.h:51
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
Definition: qsvdec.c:200
main external API structure.
Definition: avcodec.h:1761
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:58
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:159
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1669
static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
Definition: qsvdec.c:243
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int coded_height
Definition: avcodec.h:1963
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:121
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:236
char * load_plugins
Definition: qsvdec.h:67
This struct is used for communicating QSV parameters between libavcodec and the caller.
Definition: qsv.h:36
static void qsv_clear_unused_frames(QSVContext *q)
Definition: qsvdec.c:231
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:505
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:302
A reference to a data buffer.
Definition: buffer.h:81
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:83
common internal api header.
common internal and external API header
if(ret< 0)
Definition: vf_mcdeint.c:279
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
Definition: qsvdec.c:44
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session, const char *load_plugins)
Definition: qsv.c:246
#define PARSER_FLAG_COMPLETE_FRAMES
Definition: avcodec.h:5201
mfxFrameInfo frame_info
Definition: qsvdec.h:61
pixel format definitions
AVCodecContext * avctx_internal
Definition: qsvdec.h:58
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:353
QSVFramesContext frames_ctx
Definition: qsvdec.h:46
int format
The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat...
Definition: avcodec.h:5331
mfxSession session
Definition: qsvdec.h:40
AVFifoBuffer * async_fifo
Definition: qsvdec.h:53
uint32_t fourcc
Definition: qsvdec.h:60
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1678
#define av_freep(p)
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:2520
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:51
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2335
int async_depth
Definition: qsvdec.h:64
int depth
Number of bits in the component.
Definition: pixdesc.h:58
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3668
int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *pkt)
Definition: qsvdec.c:459
QSVFrame * work_frames
a linked list of frames currently being used by QSV
Definition: qsvdec.h:51
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
This structure stores compressed data.
Definition: avcodec.h:1656
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1397
mfxSession session
If non-NULL, the session to use for encoding or decoding.
Definition: qsv.h:41
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1672
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:3467
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins)
Definition: qsv.c:540
int nb_ext_buffers
Definition: qsvdec.h:70