FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vdpau.c
Go to the documentation of this file.
1 /*
2  * Video Decode and Presentation API for UNIX (VDPAU) is used for
3  * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
4  *
5  * Copyright (c) 2008 NVIDIA
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <limits.h>
25 
26 #include "avcodec.h"
27 #include "internal.h"
28 #include "h264dec.h"
29 #include "vc1.h"
30 #include "vdpau.h"
31 #include "vdpau_compat.h"
32 #include "vdpau_internal.h"
33 
34 // XXX: at the time of adding this ifdefery, av_assert* wasn't use outside.
35 // When dropping it, make sure other av_assert* were not added since then.
36 #if FF_API_BUFS_VDPAU
37 #include "libavutil/avassert.h"
38 #endif
39 
40 #if FF_API_VDPAU
41 #undef NDEBUG
42 #include <assert.h>
43 #endif
44 
45 /**
46  * @addtogroup VDPAU_Decoding
47  *
48  * @{
49  */
50 
51 static int vdpau_error(VdpStatus status)
52 {
53  switch (status) {
54  case VDP_STATUS_OK:
55  return 0;
56  case VDP_STATUS_NO_IMPLEMENTATION:
57  return AVERROR(ENOSYS);
58  case VDP_STATUS_DISPLAY_PREEMPTED:
59  return AVERROR(EIO);
60  case VDP_STATUS_INVALID_HANDLE:
61  return AVERROR(EBADF);
62  case VDP_STATUS_INVALID_POINTER:
63  return AVERROR(EFAULT);
64  case VDP_STATUS_RESOURCES:
65  return AVERROR(ENOBUFS);
66  case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
67  return AVERROR(EXDEV);
68  case VDP_STATUS_ERROR:
69  return AVERROR(EIO);
70  default:
71  return AVERROR(EINVAL);
72  }
73 }
74 
76 {
77  return av_vdpau_alloc_context();
78 }
79 
80 MAKE_ACCESSORS(AVVDPAUContext, vdpau_hwaccel, AVVDPAU_Render2, render2)
81 
83  VdpChromaType *type,
84  uint32_t *width, uint32_t *height)
85 {
86  VdpChromaType t;
87  uint32_t w = avctx->coded_width;
88  uint32_t h = avctx->coded_height;
89 
90  /* See <vdpau/vdpau.h> for per-type alignment constraints. */
91  switch (avctx->sw_pix_fmt) {
92  case AV_PIX_FMT_YUV420P:
94  t = VDP_CHROMA_TYPE_420;
95  w = (w + 1) & ~1;
96  h = (h + 3) & ~3;
97  break;
98  case AV_PIX_FMT_YUV422P:
100  t = VDP_CHROMA_TYPE_422;
101  w = (w + 1) & ~1;
102  h = (h + 1) & ~1;
103  break;
104  case AV_PIX_FMT_YUV444P:
105  case AV_PIX_FMT_YUVJ444P:
106  t = VDP_CHROMA_TYPE_444;
107  h = (h + 1) & ~1;
108  break;
109  default:
110  return AVERROR(ENOSYS);
111  }
112 
113  if (type)
114  *type = t;
115  if (width)
116  *width = w;
117  if (height)
118  *height = h;
119  return 0;
120 }
121 
122 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
123  int level)
124 {
125  VDPAUHWContext *hwctx = avctx->hwaccel_context;
126  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
127  VdpVideoSurfaceQueryCapabilities *surface_query_caps;
128  VdpDecoderQueryCapabilities *decoder_query_caps;
129  VdpDecoderCreate *create;
130  VdpGetInformationString *info;
131  const char *info_string;
132  void *func;
133  VdpStatus status;
134  VdpBool supported;
135  uint32_t max_level, max_mb, max_width, max_height;
136  VdpChromaType type;
137  uint32_t width;
138  uint32_t height;
139 
140  vdctx->width = UINT32_MAX;
141  vdctx->height = UINT32_MAX;
142 
143  if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
144  return AVERROR(ENOSYS);
145 
146  if (hwctx) {
147  hwctx->reset = 0;
148 
149  if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
150  vdctx->decoder = hwctx->context.decoder;
151  vdctx->render = hwctx->context.render;
152  vdctx->device = VDP_INVALID_HANDLE;
153  return 0; /* Decoder created by user */
154  }
155 
156  vdctx->device = hwctx->device;
157  vdctx->get_proc_address = hwctx->get_proc_address;
158 
159  if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
160  level = 0;
161 
162  if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
163  type != VDP_CHROMA_TYPE_420)
164  return AVERROR(ENOSYS);
165  } else {
166  AVHWFramesContext *frames_ctx = NULL;
167  AVVDPAUDeviceContext *dev_ctx;
168 
169  // We assume the hw_frames_ctx always survives until ff_vdpau_common_uninit
170  // is called. This holds true as the user is not allowed to touch
171  // hw_device_ctx, or hw_frames_ctx after get_format (and ff_get_format
172  // itself also uninits before unreffing hw_frames_ctx).
173  if (avctx->hw_frames_ctx) {
174  frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
175  } else if (avctx->hw_device_ctx) {
176  int ret;
177 
179  if (!avctx->hw_frames_ctx)
180  return AVERROR(ENOMEM);
181 
182  frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
183  frames_ctx->format = AV_PIX_FMT_VDPAU;
184  frames_ctx->sw_format = avctx->sw_pix_fmt;
185  frames_ctx->width = avctx->coded_width;
186  frames_ctx->height = avctx->coded_height;
187 
188  ret = av_hwframe_ctx_init(avctx->hw_frames_ctx);
189  if (ret < 0) {
191  return ret;
192  }
193  }
194 
195  if (!frames_ctx) {
196  av_log(avctx, AV_LOG_ERROR, "A hardware frames context is "
197  "required for VDPAU decoding.\n");
198  return AVERROR(EINVAL);
199  }
200 
201  dev_ctx = frames_ctx->device_ctx->hwctx;
202 
203  vdctx->device = dev_ctx->device;
204  vdctx->get_proc_address = dev_ctx->get_proc_address;
205 
207  level = 0;
208  }
209 
210  if (level < 0)
211  return AVERROR(ENOTSUP);
212 
213  status = vdctx->get_proc_address(vdctx->device,
214  VDP_FUNC_ID_GET_INFORMATION_STRING,
215  &func);
216  if (status != VDP_STATUS_OK)
217  return vdpau_error(status);
218  else
219  info = func;
220 
221  status = info(&info_string);
222  if (status != VDP_STATUS_OK)
223  return vdpau_error(status);
224  if (avctx->codec_id == AV_CODEC_ID_HEVC && strncmp(info_string, "NVIDIA ", 7) == 0 &&
226  av_log(avctx, AV_LOG_VERBOSE, "HEVC with NVIDIA VDPAU drivers is buggy, skipping.\n");
227  return AVERROR(ENOTSUP);
228  }
229 
230  status = vdctx->get_proc_address(vdctx->device,
231  VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
232  &func);
233  if (status != VDP_STATUS_OK)
234  return vdpau_error(status);
235  else
236  surface_query_caps = func;
237 
238  status = surface_query_caps(vdctx->device, type, &supported,
239  &max_width, &max_height);
240  if (status != VDP_STATUS_OK)
241  return vdpau_error(status);
242  if (supported != VDP_TRUE ||
243  max_width < width || max_height < height)
244  return AVERROR(ENOTSUP);
245 
246  status = vdctx->get_proc_address(vdctx->device,
247  VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
248  &func);
249  if (status != VDP_STATUS_OK)
250  return vdpau_error(status);
251  else
252  decoder_query_caps = func;
253 
254  status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
255  &max_mb, &max_width, &max_height);
256 #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
257  if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
258  profile = VDP_DECODER_PROFILE_H264_MAIN;
259  status = decoder_query_caps(vdctx->device, profile, &supported,
260  &max_level, &max_mb,
261  &max_width, &max_height);
262  }
263 #endif
264  if (status != VDP_STATUS_OK)
265  return vdpau_error(status);
266 
267  if (supported != VDP_TRUE || max_level < level ||
268  max_width < width || max_height < height)
269  return AVERROR(ENOTSUP);
270 
271  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
272  &func);
273  if (status != VDP_STATUS_OK)
274  return vdpau_error(status);
275  else
276  create = func;
277 
278  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
279  &func);
280  if (status != VDP_STATUS_OK)
281  return vdpau_error(status);
282  else
283  vdctx->render = func;
284 
285  status = create(vdctx->device, profile, width, height, avctx->refs,
286  &vdctx->decoder);
287  if (status == VDP_STATUS_OK) {
288  vdctx->width = avctx->coded_width;
289  vdctx->height = avctx->coded_height;
290  }
291 
292  return vdpau_error(status);
293 }
294 
296 {
297  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
298  VdpDecoderDestroy *destroy;
299  void *func;
300  VdpStatus status;
301 
302  if (vdctx->device == VDP_INVALID_HANDLE)
303  return 0; /* Decoder created and destroyed by user */
304  if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
305  return 0;
306 
307  status = vdctx->get_proc_address(vdctx->device,
308  VDP_FUNC_ID_DECODER_DESTROY, &func);
309  if (status != VDP_STATUS_OK)
310  return vdpau_error(status);
311  else
312  destroy = func;
313 
314  status = destroy(vdctx->decoder);
315  return vdpau_error(status);
316 }
317 
319 {
320  VDPAUHWContext *hwctx = avctx->hwaccel_context;
321  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
322 
323  if (vdctx->device == VDP_INVALID_HANDLE)
324  return 0; /* Decoder created by user */
325  if (avctx->coded_width == vdctx->width &&
326  avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset))
327  return 0;
328 
329  avctx->hwaccel->uninit(avctx);
330  return avctx->hwaccel->init(avctx);
331 }
332 
334  av_unused const uint8_t *buffer,
335  av_unused uint32_t size)
336 {
337  pic_ctx->bitstream_buffers_allocated = 0;
338  pic_ctx->bitstream_buffers_used = 0;
339  pic_ctx->bitstream_buffers = NULL;
340  return 0;
341 }
342 
344  struct vdpau_picture_context *pic_ctx)
345 {
346  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
347  AVVDPAUContext *hwctx = avctx->hwaccel_context;
348  VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
349  VdpStatus status;
350  int val;
351 
352  val = ff_vdpau_common_reinit(avctx);
353  if (val < 0)
354  return val;
355 
356 #if FF_API_BUFS_VDPAU
358  if (hwctx) {
359  av_assert0(sizeof(hwctx->info) <= sizeof(pic_ctx->info));
360  memcpy(&hwctx->info, &pic_ctx->info, sizeof(hwctx->info));
361  hwctx->bitstream_buffers = pic_ctx->bitstream_buffers;
364  }
366 #endif
367 
368  if (hwctx && !hwctx->render && hwctx->render2) {
369  status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info,
370  pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
371  } else
372  status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info,
373  pic_ctx->bitstream_buffers_used,
374  pic_ctx->bitstream_buffers);
375 
376  av_freep(&pic_ctx->bitstream_buffers);
377 
378 #if FF_API_BUFS_VDPAU
380  if (hwctx) {
381  hwctx->bitstream_buffers = NULL;
382  hwctx->bitstream_buffers_used = 0;
383  hwctx->bitstream_buffers_allocated = 0;
384  }
386 #endif
387 
388  return vdpau_error(status);
389 }
390 
391 #if CONFIG_MPEG1_VDPAU_HWACCEL || \
392  CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
393  CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL
395 {
396  MpegEncContext *s = avctx->priv_data;
397  Picture *pic = s->current_picture_ptr;
398  struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
399  int val;
400 
401  val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
402  if (val < 0)
403  return val;
404 
406  return 0;
407 }
408 #endif
409 
411  const uint8_t *buf, uint32_t size)
412 {
413  VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
414 
415  buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
416  (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
417  if (!buffers)
418  return AVERROR(ENOMEM);
419 
420  pic_ctx->bitstream_buffers = buffers;
421  buffers += pic_ctx->bitstream_buffers_used++;
422 
423  buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
424  buffers->bitstream = buf;
425  buffers->bitstream_bytes = size;
426  return 0;
427 }
428 
429 /* Obsolete non-hwaccel VDPAU support below... */
430 
431 #if FF_API_VDPAU
432 void ff_vdpau_add_data_chunk(uint8_t *data, const uint8_t *buf, int buf_size)
433 {
434  struct vdpau_render_state *render = (struct vdpau_render_state*)data;
435  assert(render);
436 
438  render->bitstream_buffers,
440  sizeof(*render->bitstream_buffers)*(render->bitstream_buffers_used + 1)
441  );
442 
443  render->bitstream_buffers[render->bitstream_buffers_used].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
444  render->bitstream_buffers[render->bitstream_buffers_used].bitstream = buf;
445  render->bitstream_buffers[render->bitstream_buffers_used].bitstream_bytes = buf_size;
446  render->bitstream_buffers_used++;
447 }
448 
449 #if CONFIG_H264_VDPAU_DECODER
451 {
452  struct vdpau_render_state *render, *render_ref;
453  VdpReferenceFrameH264 *rf, *rf2;
454  H264Picture *pic;
455  int i, list, pic_frame_idx;
456 
457  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
458  assert(render);
459 
460  rf = &render->info.h264.referenceFrames[0];
461 #define H264_RF_COUNT FF_ARRAY_ELEMS(render->info.h264.referenceFrames)
462 
463  for (list = 0; list < 2; ++list) {
464  H264Picture **lp = list ? h->long_ref : h->short_ref;
465  int ls = list ? 16 : h->short_ref_count;
466 
467  for (i = 0; i < ls; ++i) {
468  pic = lp[i];
469  if (!pic || !pic->reference)
470  continue;
471  pic_frame_idx = pic->long_ref ? pic->pic_id : pic->frame_num;
472 
473  render_ref = (struct vdpau_render_state *)pic->f->data[0];
474  assert(render_ref);
475 
476  rf2 = &render->info.h264.referenceFrames[0];
477  while (rf2 != rf) {
478  if (
479  (rf2->surface == render_ref->surface)
480  && (rf2->is_long_term == pic->long_ref)
481  && (rf2->frame_idx == pic_frame_idx)
482  )
483  break;
484  ++rf2;
485  }
486  if (rf2 != rf) {
487  rf2->top_is_reference |= (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
488  rf2->bottom_is_reference |= (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
489  continue;
490  }
491 
492  if (rf >= &render->info.h264.referenceFrames[H264_RF_COUNT])
493  continue;
494 
495  rf->surface = render_ref->surface;
496  rf->is_long_term = pic->long_ref;
497  rf->top_is_reference = (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
498  rf->bottom_is_reference = (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
499  rf->field_order_cnt[0] = pic->field_poc[0];
500  rf->field_order_cnt[1] = pic->field_poc[1];
501  rf->frame_idx = pic_frame_idx;
502 
503  ++rf;
504  }
505  }
506 
507  for (; rf < &render->info.h264.referenceFrames[H264_RF_COUNT]; ++rf) {
508  rf->surface = VDP_INVALID_HANDLE;
509  rf->is_long_term = 0;
510  rf->top_is_reference = 0;
511  rf->bottom_is_reference = 0;
512  rf->field_order_cnt[0] = 0;
513  rf->field_order_cnt[1] = 0;
514  rf->frame_idx = 0;
515  }
516 }
517 
519 {
520  struct vdpau_render_state *render;
521  int i;
522 
523  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
524  assert(render);
525 
526  for (i = 0; i < 2; ++i) {
527  int foc = h->cur_pic_ptr->field_poc[i];
528  if (foc == INT_MAX)
529  foc = 0;
530  render->info.h264.field_order_cnt[i] = foc;
531  }
532 
533  render->info.h264.frame_num = h->poc.frame_num;
534 }
535 
537 {
538  struct vdpau_render_state *render;
539 
540  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
541  assert(render);
542 
543  render->info.h264.slice_count = h->current_slice;
544  if (render->info.h264.slice_count < 1)
545  return;
546 
547  render->info.h264.is_reference = (h->cur_pic_ptr->reference & 3) ? VDP_TRUE : VDP_FALSE;
548  render->info.h264.field_pic_flag = h->picture_structure != PICT_FRAME;
549  render->info.h264.bottom_field_flag = h->picture_structure == PICT_BOTTOM_FIELD;
550  render->info.h264.num_ref_frames = h->ps.sps->ref_frame_count;
551  render->info.h264.mb_adaptive_frame_field_flag = h->ps.sps->mb_aff && !render->info.h264.field_pic_flag;
552  render->info.h264.constrained_intra_pred_flag = h->ps.pps->constrained_intra_pred;
553  render->info.h264.weighted_pred_flag = h->ps.pps->weighted_pred;
554  render->info.h264.weighted_bipred_idc = h->ps.pps->weighted_bipred_idc;
555  render->info.h264.frame_mbs_only_flag = h->ps.sps->frame_mbs_only_flag;
556  render->info.h264.transform_8x8_mode_flag = h->ps.pps->transform_8x8_mode;
557  render->info.h264.chroma_qp_index_offset = h->ps.pps->chroma_qp_index_offset[0];
558  render->info.h264.second_chroma_qp_index_offset = h->ps.pps->chroma_qp_index_offset[1];
559  render->info.h264.pic_init_qp_minus26 = h->ps.pps->init_qp - 26;
560  render->info.h264.num_ref_idx_l0_active_minus1 = h->ps.pps->ref_count[0] - 1;
561  render->info.h264.num_ref_idx_l1_active_minus1 = h->ps.pps->ref_count[1] - 1;
562  render->info.h264.log2_max_frame_num_minus4 = h->ps.sps->log2_max_frame_num - 4;
563  render->info.h264.pic_order_cnt_type = h->ps.sps->poc_type;
564  render->info.h264.log2_max_pic_order_cnt_lsb_minus4 = h->ps.sps->poc_type ? 0 : h->ps.sps->log2_max_poc_lsb - 4;
565  render->info.h264.delta_pic_order_always_zero_flag = h->ps.sps->delta_pic_order_always_zero_flag;
566  render->info.h264.direct_8x8_inference_flag = h->ps.sps->direct_8x8_inference_flag;
567  render->info.h264.entropy_coding_mode_flag = h->ps.pps->cabac;
568  render->info.h264.pic_order_present_flag = h->ps.pps->pic_order_present;
569  render->info.h264.deblocking_filter_control_present_flag = h->ps.pps->deblocking_filter_parameters_present;
570  render->info.h264.redundant_pic_cnt_present_flag = h->ps.pps->redundant_pic_cnt_present;
571  memcpy(render->info.h264.scaling_lists_4x4, h->ps.pps->scaling_matrix4, sizeof(render->info.h264.scaling_lists_4x4));
572  memcpy(render->info.h264.scaling_lists_8x8[0], h->ps.pps->scaling_matrix8[0], sizeof(render->info.h264.scaling_lists_8x8[0]));
573  memcpy(render->info.h264.scaling_lists_8x8[1], h->ps.pps->scaling_matrix8[3], sizeof(render->info.h264.scaling_lists_8x8[0]));
574 
575  ff_h264_draw_horiz_band(h, &h->slice_ctx[0], 0, h->avctx->height);
576  render->bitstream_buffers_used = 0;
577 }
578 #endif /* CONFIG_H264_VDPAU_DECODER */
579 
580 #if CONFIG_MPEG_VDPAU_DECODER || CONFIG_MPEG1_VDPAU_DECODER
582  int buf_size, int slice_count)
583 {
584  struct vdpau_render_state *render, *last, *next;
585  int i;
586 
587  if (!s->current_picture_ptr) return;
588 
589  render = (struct vdpau_render_state *)s->current_picture_ptr->f->data[0];
590  assert(render);
591 
592  /* fill VdpPictureInfoMPEG1Or2 struct */
593  render->info.mpeg.picture_structure = s->picture_structure;
594  render->info.mpeg.picture_coding_type = s->pict_type;
595  render->info.mpeg.intra_dc_precision = s->intra_dc_precision;
596  render->info.mpeg.frame_pred_frame_dct = s->frame_pred_frame_dct;
597  render->info.mpeg.concealment_motion_vectors = s->concealment_motion_vectors;
598  render->info.mpeg.intra_vlc_format = s->intra_vlc_format;
599  render->info.mpeg.alternate_scan = s->alternate_scan;
600  render->info.mpeg.q_scale_type = s->q_scale_type;
601  render->info.mpeg.top_field_first = s->top_field_first;
602  render->info.mpeg.full_pel_forward_vector = s->full_pel[0]; // MPEG-1 only. Set 0 for MPEG-2
603  render->info.mpeg.full_pel_backward_vector = s->full_pel[1]; // MPEG-1 only. Set 0 for MPEG-2
604  render->info.mpeg.f_code[0][0] = s->mpeg_f_code[0][0]; // For MPEG-1 fill both horiz. & vert.
605  render->info.mpeg.f_code[0][1] = s->mpeg_f_code[0][1];
606  render->info.mpeg.f_code[1][0] = s->mpeg_f_code[1][0];
607  render->info.mpeg.f_code[1][1] = s->mpeg_f_code[1][1];
608  for (i = 0; i < 64; ++i) {
609  render->info.mpeg.intra_quantizer_matrix[i] = s->intra_matrix[i];
610  render->info.mpeg.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
611  }
612 
613  render->info.mpeg.forward_reference = VDP_INVALID_HANDLE;
614  render->info.mpeg.backward_reference = VDP_INVALID_HANDLE;
615 
616  switch(s->pict_type){
617  case AV_PICTURE_TYPE_B:
618  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
619  assert(next);
620  render->info.mpeg.backward_reference = next->surface;
621  // no return here, going to set forward prediction
622  case AV_PICTURE_TYPE_P:
623  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
624  if (!last) // FIXME: Does this test make sense?
625  last = render; // predict second field from the first
626  render->info.mpeg.forward_reference = last->surface;
627  }
628 
629  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
630 
631  render->info.mpeg.slice_count = slice_count;
632 
633  if (slice_count)
635  render->bitstream_buffers_used = 0;
636 }
637 #endif /* CONFIG_MPEG_VDPAU_DECODER || CONFIG_MPEG1_VDPAU_DECODER */
638 
639 #if CONFIG_VC1_VDPAU_DECODER
641  int buf_size)
642 {
643  VC1Context *v = s->avctx->priv_data;
644  struct vdpau_render_state *render, *last, *next;
645 
646  render = (struct vdpau_render_state *)s->current_picture.f->data[0];
647  assert(render);
648 
649  /* fill LvPictureInfoVC1 struct */
650  render->info.vc1.frame_coding_mode = v->fcm ? v->fcm + 1 : 0;
651  render->info.vc1.postprocflag = v->postprocflag;
652  render->info.vc1.pulldown = v->broadcast;
653  render->info.vc1.interlace = v->interlace;
654  render->info.vc1.tfcntrflag = v->tfcntrflag;
655  render->info.vc1.finterpflag = v->finterpflag;
656  render->info.vc1.psf = v->psf;
657  render->info.vc1.dquant = v->dquant;
658  render->info.vc1.panscan_flag = v->panscanflag;
659  render->info.vc1.refdist_flag = v->refdist_flag;
660  render->info.vc1.quantizer = v->quantizer_mode;
661  render->info.vc1.extended_mv = v->extended_mv;
662  render->info.vc1.extended_dmv = v->extended_dmv;
663  render->info.vc1.overlap = v->overlap;
664  render->info.vc1.vstransform = v->vstransform;
665  render->info.vc1.loopfilter = v->s.loop_filter;
666  render->info.vc1.fastuvmc = v->fastuvmc;
667  render->info.vc1.range_mapy_flag = v->range_mapy_flag;
668  render->info.vc1.range_mapy = v->range_mapy;
669  render->info.vc1.range_mapuv_flag = v->range_mapuv_flag;
670  render->info.vc1.range_mapuv = v->range_mapuv;
671  /* Specific to simple/main profile only */
672  render->info.vc1.multires = v->multires;
673  render->info.vc1.syncmarker = v->resync_marker;
674  render->info.vc1.rangered = v->rangered | (v->rangeredfrm << 1);
675  render->info.vc1.maxbframes = v->s.max_b_frames;
676 
677  render->info.vc1.deblockEnable = v->postprocflag & 1;
678  render->info.vc1.pquant = v->pq;
679 
680  render->info.vc1.forward_reference = VDP_INVALID_HANDLE;
681  render->info.vc1.backward_reference = VDP_INVALID_HANDLE;
682 
683  if (v->bi_type)
684  render->info.vc1.picture_type = 4;
685  else
686  render->info.vc1.picture_type = s->pict_type - 1 + s->pict_type / 3;
687 
688  switch(s->pict_type){
689  case AV_PICTURE_TYPE_B:
690  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
691  assert(next);
692  render->info.vc1.backward_reference = next->surface;
693  // no break here, going to set forward prediction
694  case AV_PICTURE_TYPE_P:
695  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
696  if (!last) // FIXME: Does this test make sense?
697  last = render; // predict second field from the first
698  render->info.vc1.forward_reference = last->surface;
699  }
700 
701  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
702 
703  render->info.vc1.slice_count = 1;
704 
706  render->bitstream_buffers_used = 0;
707 }
708 #endif /* (CONFIG_VC1_VDPAU_DECODER */
709 
710 #if CONFIG_MPEG4_VDPAU_DECODER
712  int buf_size)
713 {
714  MpegEncContext *s = &ctx->m;
715  struct vdpau_render_state *render, *last, *next;
716  int i;
717 
718  if (!s->current_picture_ptr) return;
719 
720  render = (struct vdpau_render_state *)s->current_picture_ptr->f->data[0];
721  assert(render);
722 
723  /* fill VdpPictureInfoMPEG4Part2 struct */
724  render->info.mpeg4.trd[0] = s->pp_time;
725  render->info.mpeg4.trb[0] = s->pb_time;
726  render->info.mpeg4.trd[1] = s->pp_field_time >> 1;
727  render->info.mpeg4.trb[1] = s->pb_field_time >> 1;
728  render->info.mpeg4.vop_time_increment_resolution = s->avctx->time_base.den;
729  render->info.mpeg4.vop_coding_type = 0;
730  render->info.mpeg4.vop_fcode_forward = s->f_code;
731  render->info.mpeg4.vop_fcode_backward = s->b_code;
732  render->info.mpeg4.resync_marker_disable = !ctx->resync_marker;
733  render->info.mpeg4.interlaced = !s->progressive_sequence;
734  render->info.mpeg4.quant_type = s->mpeg_quant;
735  render->info.mpeg4.quarter_sample = s->quarter_sample;
736  render->info.mpeg4.short_video_header = s->avctx->codec->id == AV_CODEC_ID_H263;
737  render->info.mpeg4.rounding_control = s->no_rounding;
738  render->info.mpeg4.alternate_vertical_scan_flag = s->alternate_scan;
739  render->info.mpeg4.top_field_first = s->top_field_first;
740  for (i = 0; i < 64; ++i) {
741  render->info.mpeg4.intra_quantizer_matrix[i] = s->intra_matrix[i];
742  render->info.mpeg4.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
743  }
744  render->info.mpeg4.forward_reference = VDP_INVALID_HANDLE;
745  render->info.mpeg4.backward_reference = VDP_INVALID_HANDLE;
746 
747  switch (s->pict_type) {
748  case AV_PICTURE_TYPE_B:
749  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
750  assert(next);
751  render->info.mpeg4.backward_reference = next->surface;
752  render->info.mpeg4.vop_coding_type = 2;
753  // no break here, going to set forward prediction
754  case AV_PICTURE_TYPE_P:
755  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
756  assert(last);
757  render->info.mpeg4.forward_reference = last->surface;
758  }
759 
760  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
761 
763  render->bitstream_buffers_used = 0;
764 }
765 #endif /* CONFIG_MPEG4_VDPAU_DECODER */
766 #endif /* FF_API_VDPAU */
767 
768 #if FF_API_VDPAU_PROFILE
769 int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
770 {
771 #define PROFILE(prof) \
772 do { \
773  *profile = VDP_DECODER_PROFILE_##prof; \
774  return 0; \
775 } while (0)
776 
777  switch (avctx->codec_id) {
778  case AV_CODEC_ID_MPEG1VIDEO: PROFILE(MPEG1);
780  switch (avctx->profile) {
781  case FF_PROFILE_MPEG2_MAIN: PROFILE(MPEG2_MAIN);
782  case FF_PROFILE_MPEG2_SIMPLE: PROFILE(MPEG2_SIMPLE);
783  default: return AVERROR(EINVAL);
784  }
785  case AV_CODEC_ID_H263: PROFILE(MPEG4_PART2_ASP);
786  case AV_CODEC_ID_MPEG4:
787  switch (avctx->profile) {
788  case FF_PROFILE_MPEG4_SIMPLE: PROFILE(MPEG4_PART2_SP);
789  case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP);
790  default: return AVERROR(EINVAL);
791  }
792  case AV_CODEC_ID_H264:
793  switch (avctx->profile & ~FF_PROFILE_H264_INTRA) {
794  case FF_PROFILE_H264_BASELINE: PROFILE(H264_BASELINE);
796  case FF_PROFILE_H264_MAIN: PROFILE(H264_MAIN);
797  case FF_PROFILE_H264_HIGH: PROFILE(H264_HIGH);
798 #ifdef VDP_DECODER_PROFILE_H264_EXTENDED
799  case FF_PROFILE_H264_EXTENDED: PROFILE(H264_EXTENDED);
800 #endif
801  default: return AVERROR(EINVAL);
802  }
803  case AV_CODEC_ID_WMV3:
804  case AV_CODEC_ID_VC1:
805  switch (avctx->profile) {
806  case FF_PROFILE_VC1_SIMPLE: PROFILE(VC1_SIMPLE);
807  case FF_PROFILE_VC1_MAIN: PROFILE(VC1_MAIN);
808  case FF_PROFILE_VC1_ADVANCED: PROFILE(VC1_ADVANCED);
809  default: return AVERROR(EINVAL);
810  }
811  }
812  return AVERROR(EINVAL);
813 #undef PROFILE
814 }
815 #endif /* FF_API_VDPAU_PROFILE */
816 
818 {
819  return av_mallocz(sizeof(VDPAUHWContext));
820 }
821 
822 int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
823  VdpGetProcAddress *get_proc, unsigned flags)
824 {
825  VDPAUHWContext *hwctx;
826 
828  return AVERROR(EINVAL);
829 
830  if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
831  return AVERROR(ENOMEM);
832 
833  hwctx = avctx->hwaccel_context;
834 
835  memset(hwctx, 0, sizeof(*hwctx));
836  hwctx->context.decoder = VDP_INVALID_HANDLE;
837  hwctx->device = device;
838  hwctx->get_proc_address = get_proc;
839  hwctx->flags = flags;
840  hwctx->reset = 1;
841  return 0;
842 }
843 
844 /* @}*/
#define FF_PROFILE_H264_MAIN
Definition: avcodec.h:3307
#define FF_PROFILE_MPEG4_SIMPLE
Definition: avcodec.h:3326
#define NULL
Definition: coverity.c:32
const struct AVCodec * codec
Definition: avcodec.h:1770
const char const char void * val
Definition: avisynth_c.h:771
void ff_vdpau_h264_picture_complete(H264Context *h)
This struct is allocated as AVHWDeviceContext.hwctx.
int long_ref
1->long term reference 0->short term reference
Definition: h264dec.h:154
const char * s
Definition: avisynth_c.h:768
H264POCContext poc
Definition: h264dec.h:459
The VC1 Context.
Definition: vc1.h:173
#define FF_PROFILE_MPEG2_MAIN
Definition: avcodec.h:3299
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
This structure describes decoded (raw) audio or video data.
Definition: frame.h:201
int(* init)(AVCodecContext *avctx)
Initialize the hwaccel private data.
Definition: avcodec.h:3984
VdpDevice device
VDPAU device handle.
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1963
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
VdpGetProcAddress * get_proc_address
int weighted_bipred_idc
Definition: h264_ps.h:116
int chroma_qp_index_offset[2]
Definition: h264_ps.h:119
int resync_marker
could this stream contain resync markers
Definition: mpeg4video.h:82
VdpDecoder decoder
VDPAU decoder handle.
int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx, av_unused const uint8_t *buffer, av_unused uint32_t size)
Definition: vdpau.c:333
int extended_mv
Ext MV in P/B (not in Simple)
Definition: vc1.h:223
VdpGetProcAddress * get_proc_address
int broadcast
TFF/RFF present.
Definition: vc1.h:200
#define FF_PROFILE_H264_INTRA
Definition: avcodec.h:3303
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264_ps.h:114
uint8_t rangeredfrm
Frame decoding info for S/M profiles only.
Definition: vc1.h:302
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:226
int bitstream_buffers_used
Useful bitstream buffers in the bitstream buffers table.
int frame_mbs_only_flag
Definition: h264_ps.h:61
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:206
VdpPictureInfoMPEG1Or2 mpeg
Definition: vdpau.h:63
attribute_deprecated VdpBitstreamBuffer * bitstream_buffers
Table of bitstream buffers.
Definition: vdpau.h:137
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:222
H264Context.
Definition: h264dec.h:337
AVFrame * f
Definition: h264dec.h:129
Public libavcodec VDPAU header.
int fastuvmc
Rounding of qpel vector to hpel ? (not in Simple)
Definition: vc1.h:222
H264Picture * long_ref[32]
Definition: h264dec.h:463
int profile
profile
Definition: avcodec.h:3266
int picture_structure
Definition: h264dec.h:406
AVVDPAUContext * av_vdpau_alloc_context(void)
Allocate an AVVDPAUContext.
Definition: vdpau.c:817
AVVDPAUContext * av_alloc_vdpaucontext(void)
allocation function for AVVDPAUContext
Definition: vdpau.c:75
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1898
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, VdpChromaType *type, uint32_t *width, uint32_t *height)
Gets the parameters to create an adequate VDPAU video surface for the codec context using VDPAU hardw...
Definition: vdpau.c:82
struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:3082
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2732
VdpBitstreamBuffer * bitstream_buffers
Table of bitstream buffers.
uint8_t scaling_matrix4[6][16]
Definition: h264_ps.h:124
int deblocking_filter_parameters_present
deblocking_filter_parameters_present_flag
Definition: h264_ps.h:120
int bi_type
Definition: vc1.h:381
#define FF_PROFILE_H264_BASELINE
Definition: avcodec.h:3305
const PPS * pps
Definition: h264_ps.h:144
int ff_vdpau_common_uninit(AVCodecContext *avctx)
Definition: vdpau.c:295
uint8_t
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:3094
int panscanflag
NUMPANSCANWIN, TOPLEFT{X,Y}, BOTRIGHT{X,Y} present.
Definition: vc1.h:203
int interlace
Progressive/interlaced (RPTFTM syntax element)
Definition: vc1.h:201
void ff_vdpau_mpeg4_decode_picture(Mpeg4DecContext *s, const uint8_t *buf, int buf_size)
int cabac
entropy_coding_mode_flag
Definition: h264_ps.h:110
int no_rounding
apply no rounding to motion compensation (MPEG-4, msmpeg4, ...) for B-frames rounding mode is always ...
Definition: mpegvideo.h:284
int full_pel[2]
Definition: mpegvideo.h:480
VdpGetProcAddress * get_proc_address
VDPAU device driver.
Picture current_picture
copy of the current picture structure.
Definition: mpegvideo.h:177
int intra_dc_precision
Definition: mpegvideo.h:461
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:89
attribute_deprecated int bitstream_buffers_used
Useful bitstream buffers in the bitstream buffers table.
Definition: vdpau.h:128
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:38
#define height
int refdist_flag
REFDIST syntax element present in II, IP, PI or PP field picture headers.
Definition: vc1.h:204
static int flags
Definition: log.c:57
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:75
VdpDecoder decoder
VDPAU decoder handle.
Definition: vdpau.h:96
int redundant_pic_cnt_present
redundant_pic_cnt_present_flag
Definition: h264_ps.h:122
#define FF_PROFILE_H264_EXTENDED
Definition: avcodec.h:3308
uint16_t pp_time
time distance between the last 2 p,s,i frames
Definition: mpegvideo.h:390
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
ptrdiff_t size
Definition: opengl_enc.c:101
AVVDPAUContext context
#define av_log(a,...)
void ff_vdpau_mpeg_picture_complete(MpegEncContext *s, const uint8_t *buf, int buf_size, int slice_count)
int psf
Progressive Segmented Frame.
Definition: vc1.h:211
int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
Get a decoder profile that should be used for initializing a VDPAU decoder.
Definition: vdpau.c:769
MpegEncContext m
Definition: mpeg4video.h:66
VdpDevice device
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264dec.c:102
attribute_deprecated union AVVDPAUPictureInfo info
VDPAU picture information.
Definition: vdpau.h:112
enum AVCodecID id
Definition: avcodec.h:3753
#define AV_HWACCEL_FLAG_IGNORE_LEVEL
Hardware acceleration should be used for decoding even if the codec level used is unknown or higher t...
Definition: avcodec.h:4020
#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH
Hardware acceleration can output YUV pixel formats with a different chroma sampling than 4:2:0 and/or...
Definition: avcodec.h:4026
#define PROFILE(prof)
int mb_aff
mb_adaptive_frame_field_flag
Definition: h264_ps.h:62
int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level)
Definition: vdpau.c:122
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
VdpBitstreamBuffer * bitstream_buffers
The user is responsible for freeing this buffer using av_freep().
Definition: vdpau.h:247
int overlap
overlapped transforms in use
Definition: vc1.h:226
This structure is used to share data between the libavcodec library and the client video application...
Definition: vdpau.h:90
int poc_type
pic_order_cnt_type
Definition: h264_ps.h:50
int constrained_intra_pred
constrained_intra_pred_flag
Definition: h264_ps.h:121
#define MAKE_ACCESSORS(str, name, type, field)
Definition: internal.h:90
#define AVERROR(e)
Definition: error.h:43
#define FF_PROFILE_H264_HIGH
Definition: avcodec.h:3309
uint16_t width
Definition: gdv.c:47
simple assert() macros that are a bit more flexible than ISO C assert().
int weighted_pred
weighted_pred_flag
Definition: h264_ps.h:115
#define PICT_TOP_FIELD
Definition: mpegutils.h:37
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:399
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:323
int bitstream_buffers_allocated
Allocated size of the bitstream_buffers table.
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264dec.h:149
int resync_marker
could this stream contain resync markers
Definition: vc1.h:396
int postprocflag
Per-frame processing suggestion flag present.
Definition: vc1.h:199
int delta_pic_order_always_zero_flag
Definition: h264_ps.h:52
attribute_deprecated int bitstream_buffers_allocated
Allocated size of the bitstream_buffers table.
Definition: vdpau.h:120
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
uint8_t scaling_matrix8[6][64]
Definition: h264_ps.h:125
int refs
number of reference frames
Definition: avcodec.h:2442
int intra_vlc_format
Definition: mpegvideo.h:467
void ff_vdpau_h264_picture_start(H264Context *h)
uint32_t width
union AVVDPAUPictureInfo info
picture parameter information for all supported codecs
Definition: vdpau.h:240
int ref_frame_count
num_ref_frames
Definition: h264_ps.h:56
int top_field_first
Definition: mpegvideo.h:463
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:146
int reference
Definition: h264dec.h:160
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:74
int tfcntrflag
TFCNTR present.
Definition: vc1.h:202
#define FF_PROFILE_VC1_MAIN
Definition: avcodec.h:3322
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:3616
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:181
Picture.
Definition: mpegpicture.h:45
int alternate_scan
Definition: mpegvideo.h:468
void * hwaccel_picture_private
Hardware accelerator private data.
Definition: mpegpicture.h:77
static int vdpau_error(VdpStatus status)
Definition: vdpau.c:51
static struct ResampleContext * create(struct ResampleContext *c, int out_rate, int in_rate, int filter_size, int phase_shift, int linear, double cutoff, enum AVSampleFormat format, enum SwrFilterType filter_type, double kaiser_beta, double precision, int cheby, int exact_rational)
Definition: soxr_resample.c:32
AVFormatContext * ctx
Definition: movenc.c:48
int init_qp
pic_init_qp_minus26 + 26
Definition: h264_ps.h:117
H.264 / AVC / MPEG-4 part10 codec.
H264SliceContext * slice_ctx
Definition: h264dec.h:350
int direct_8x8_inference_flag
Definition: h264_ps.h:63
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:450
uint8_t range_mapuv_flag
Definition: vc1.h:329
int mpeg_f_code[2][2]
Definition: mpegvideo.h:455
#define FF_PROFILE_VC1_SIMPLE
Definition: avcodec.h:3321
VdpPictureInfoMPEG4Part2 mpeg4
Definition: vdpau.h:65
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:219
int pic_order_present
pic_order_present_flag
Definition: h264_ps.h:111
int rangered
RANGEREDFRM (range reduction) syntax element present at frame level.
Definition: vc1.h:189
int frame_pred_frame_dct
Definition: mpegvideo.h:462
static void destroy(struct ResampleContext **c)
Definition: soxr_resample.c:64
int finterpflag
INTERPFRM present.
Definition: vc1.h:228
uint16_t inter_matrix[64]
Definition: mpegvideo.h:302
AVCodecContext * avctx
Definition: h264dec.h:339
int av_reallocp(void *ptr, size_t size)
Allocate, reallocate, or free a block of memory through a pointer to a pointer.
Definition: mem.c:163
int concealment_motion_vectors
Definition: mpegvideo.h:464
Libavcodec external API header.
enum AVCodecID codec_id
Definition: avcodec.h:1778
H264Picture * short_ref[32]
Definition: h264dec.h:462
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
int multires
frame-level RESPIC syntax element present
Definition: vc1.h:186
int field_poc[2]
top/bottom POC
Definition: h264dec.h:147
main external API structure.
Definition: avcodec.h:1761
#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE
Definition: avcodec.h:3341
uint8_t * data
The data buffer.
Definition: buffer.h:89
int bitstream_buffers_used
Definition: vdpau.h:245
uint8_t range_mapy
Definition: vc1.h:330
void ff_vdpau_h264_set_reference_frames(H264Context *h)
int extended_dmv
Additional extended dmv range at P/B-frame-level.
Definition: vc1.h:205
void * buf
Definition: avisynth_c.h:690
GLint GLenum type
Definition: opengl_enc.c:105
VdpDecoderRender * render
VDPAU decoder render callback.
int progressive_sequence
Definition: mpegvideo.h:454
int bitstream_buffers_allocated
Describe size/location of the compressed video data.
Definition: vdpau.h:244
int coded_height
Definition: avcodec.h:1963
#define H264_RF_COUNT
struct AVFrame * f
Definition: mpegpicture.h:46
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:209
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:121
int(* func)(AVBPrint *dst, const char *in, const char *arg)
Definition: jacosubdec.c:67
H264Picture * cur_pic_ptr
Definition: h264dec.h:346
VdpDecoderRender * render
VDPAU decoder render callback.
Definition: vdpau.h:103
const SPS * sps
Definition: h264_ps.h:145
int quantizer_mode
2 bits, quantizer mode used for sequence, see QUANT_*
Definition: vc1.h:227
int f_code
forward MV resolution
Definition: mpegvideo.h:235
int log2_max_poc_lsb
log2_max_pic_order_cnt_lsb_minus4
Definition: h264_ps.h:51
int max_b_frames
max number of B-frames for encoding
Definition: mpegvideo.h:112
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:209
int(* AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *, const VdpPictureInfo *, uint32_t, const VdpBitstreamBuffer *)
Definition: vdpau.h:72
mfxU16 profile
Definition: qsvenc.c:44
int vstransform
variable-size [48]x[48] transform type + info
Definition: vc1.h:225
int transform_8x8_mode
transform_8x8_mode_flag
Definition: h264_ps.h:123
uint8_t range_mapuv
Definition: vc1.h:331
uint16_t pb_field_time
like above, just for interlaced
Definition: mpegvideo.h:393
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, VdpGetProcAddress *get_proc, unsigned flags)
Associate a VDPAU device with a codec context for hardware acceleration.
Definition: vdpau.c:822
uint8_t level
Definition: svq3.c:207
MpegEncContext s
Definition: vc1.h:174
MpegEncContext.
Definition: mpegvideo.h:78
struct AVCodecContext * avctx
Definition: mpegvideo.h:95
uint16_t pp_field_time
Definition: mpegvideo.h:392
uint8_t pq
Definition: vc1.h:238
int
int pic_id
pic_num (short -> no wrap version of pic_num, pic_num & max_pic_num; long -> long_pic_num) ...
Definition: h264dec.h:152
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
AVVDPAU_Render2 render2
Definition: vdpau.h:139
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:83
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:279
VdpPictureInfoH264 h264
Definition: vdpau.h:62
This structure is used as a callback between the FFmpeg decoder (vd_) and presentation (vo_) module...
Definition: vdpau.h:234
GLuint * buffers
Definition: opengl_enc.c:99
VdpPictureInfoVC1 vc1
Definition: vdpau.h:64
int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, const uint8_t *buf, uint32_t size)
Definition: vdpau.c:410
enum FrameCodingMode fcm
Frame decoding info for Advanced profile.
Definition: vc1.h:308
int log2_max_frame_num
log2_max_frame_num_minus4 + 4
Definition: h264_ps.h:49
int(* uninit)(AVCodecContext *avctx)
Uninitialize the hwaccel private data.
Definition: avcodec.h:3992
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:237
H264ParamSets ps
Definition: h264dec.h:455
void * hwaccel_priv_data
hwaccel-specific private data
Definition: internal.h:192
Picture last_picture
copy of the previous picture structure.
Definition: mpegvideo.h:159
Bi-dir predicted.
Definition: avutil.h:276
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:76
int den
Denominator.
Definition: rational.h:60
unsigned char flags
int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, struct vdpau_picture_context *pic_ctx)
Definition: vdpau.c:343
void * priv_data
Definition: avcodec.h:1803
#define FF_PROFILE_VC1_ADVANCED
Definition: avcodec.h:3324
#define PICT_FRAME
Definition: mpegutils.h:39
int picture_structure
Definition: mpegvideo.h:458
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1811
union VDPAUPictureInfo info
VDPAU picture information.
#define FF_PROFILE_MPEG2_SIMPLE
Definition: avcodec.h:3300
Picture next_picture
copy of the next picture structure.
Definition: mpegvideo.h:165
int current_slice
current slice number, used to initialize slice_num of each thread/context
Definition: h264dec.h:487
uint8_t range_mapy_flag
Definition: vc1.h:328
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:300
#define av_freep(p)
#define AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH
Hardware acceleration should still be attempted for decoding when the codec profile does not match th...
Definition: avcodec.h:4040
int hwaccel_flags
Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active)...
Definition: avcodec.h:3677
int dquant
How qscale varies with MBs, 2 bits (not in Simple)
Definition: vc1.h:224
void ff_vdpau_add_data_chunk(uint8_t *data, const uint8_t *buf, int buf_size)
#define FF_PROFILE_H264_CONSTRAINED_BASELINE
Definition: avcodec.h:3306
int b_code
backward MV resolution for B-frames (MPEG-4)
Definition: mpegvideo.h:236
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3668
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:219
void ff_vdpau_vc1_decode_picture(MpegEncContext *s, const uint8_t *buf, int buf_size)
uint32_t height
static int ff_vdpau_common_reinit(AVCodecContext *avctx)
Definition: vdpau.c:318
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:3467
VdpVideoSurface surface
Used as rendered surface, never changed.
Definition: vdpau.h:235
Predicted.
Definition: avutil.h:275
GLuint buffer
Definition: opengl_enc.c:102
#define av_unused
Definition: attributes.h:125
uint16_t pb_time
time distance between the last b and p,s,i frame
Definition: mpegvideo.h:391
static uintptr_t ff_vdpau_get_surface_id(AVFrame *pic)
Extract VdpVideoSurface from an AVFrame.
int short_ref_count
number of actual short term references
Definition: h264dec.h:478