FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vdpau.c
Go to the documentation of this file.
1 /*
2  * Video Decode and Presentation API for UNIX (VDPAU) is used for
3  * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
4  *
5  * Copyright (c) 2008 NVIDIA
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <limits.h>
25 #include "libavutil/avassert.h"
26 #include "avcodec.h"
27 #include "internal.h"
28 #include "h264.h"
29 #include "vc1.h"
30 
31 #undef NDEBUG
32 #include <assert.h>
33 
34 #include "vdpau.h"
35 #include "vdpau_compat.h"
36 #include "vdpau_internal.h"
37 
38 /**
39  * @addtogroup VDPAU_Decoding
40  *
41  * @{
42  */
43 
44 static int vdpau_error(VdpStatus status)
45 {
46  switch (status) {
47  case VDP_STATUS_OK:
48  return 0;
49  case VDP_STATUS_NO_IMPLEMENTATION:
50  return AVERROR(ENOSYS);
51  case VDP_STATUS_DISPLAY_PREEMPTED:
52  return AVERROR(EIO);
53  case VDP_STATUS_INVALID_HANDLE:
54  return AVERROR(EBADF);
55  case VDP_STATUS_INVALID_POINTER:
56  return AVERROR(EFAULT);
57  case VDP_STATUS_RESOURCES:
58  return AVERROR(ENOBUFS);
59  case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
60  return AVERROR(EXDEV);
61  case VDP_STATUS_ERROR:
62  return AVERROR(EIO);
63  default:
64  return AVERROR(EINVAL);
65  }
66 }
67 
69 {
70  return av_vdpau_alloc_context();
71 }
72 
73 MAKE_ACCESSORS(AVVDPAUContext, vdpau_hwaccel, AVVDPAU_Render2, render2)
74 
76  VdpChromaType *type,
77  uint32_t *width, uint32_t *height)
78 {
79  VdpChromaType t;
80  uint32_t w = avctx->coded_width;
81  uint32_t h = avctx->coded_height;
82 
83  /* See <vdpau/vdpau.h> for per-type alignment constraints. */
84  switch (avctx->sw_pix_fmt) {
85  case AV_PIX_FMT_YUV420P:
87  t = VDP_CHROMA_TYPE_420;
88  w = (w + 1) & ~1;
89  h = (h + 3) & ~3;
90  break;
91  case AV_PIX_FMT_YUV422P:
93  t = VDP_CHROMA_TYPE_422;
94  w = (w + 1) & ~1;
95  h = (h + 1) & ~1;
96  break;
97  case AV_PIX_FMT_YUV444P:
99  t = VDP_CHROMA_TYPE_444;
100  h = (h + 1) & ~1;
101  break;
102  default:
103  return AVERROR(ENOSYS);
104  }
105 
106  if (type)
107  *type = t;
108  if (width)
109  *width = w;
110  if (height)
111  *height = h;
112  return 0;
113 }
114 
115 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
116  int level)
117 {
118  VDPAUHWContext *hwctx = avctx->hwaccel_context;
119  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
120  VdpVideoSurfaceQueryCapabilities *surface_query_caps;
121  VdpDecoderQueryCapabilities *decoder_query_caps;
122  VdpDecoderCreate *create;
123  void *func;
124  VdpStatus status;
125  VdpBool supported;
126  uint32_t max_level, max_mb, max_width, max_height;
127  VdpChromaType type;
128  uint32_t width;
129  uint32_t height;
130 
131  vdctx->width = UINT32_MAX;
132  vdctx->height = UINT32_MAX;
133 
134  if (!hwctx) {
135  vdctx->device = VDP_INVALID_HANDLE;
136  av_log(avctx, AV_LOG_WARNING, "hwaccel_context has not been setup by the user application, cannot initialize\n");
137  return 0;
138  }
139 
140  if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
141  vdctx->decoder = hwctx->context.decoder;
142  vdctx->render = hwctx->context.render;
143  vdctx->device = VDP_INVALID_HANDLE;
144  return 0; /* Decoder created by user */
145  }
146  hwctx->reset = 0;
147 
148  vdctx->device = hwctx->device;
149  vdctx->get_proc_address = hwctx->get_proc_address;
150 
151  if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
152  level = 0;
153  else if (level < 0)
154  return AVERROR(ENOTSUP);
155 
156  if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
157  return AVERROR(ENOSYS);
158 
159  if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
160  type != VDP_CHROMA_TYPE_420)
161  return AVERROR(ENOSYS);
162 
163  status = vdctx->get_proc_address(vdctx->device,
164  VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
165  &func);
166  if (status != VDP_STATUS_OK)
167  return vdpau_error(status);
168  else
169  surface_query_caps = func;
170 
171  status = surface_query_caps(vdctx->device, type, &supported,
172  &max_width, &max_height);
173  if (status != VDP_STATUS_OK)
174  return vdpau_error(status);
175  if (supported != VDP_TRUE ||
176  max_width < width || max_height < height)
177  return AVERROR(ENOTSUP);
178 
179  status = vdctx->get_proc_address(vdctx->device,
180  VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
181  &func);
182  if (status != VDP_STATUS_OK)
183  return vdpau_error(status);
184  else
185  decoder_query_caps = func;
186 
187  status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
188  &max_mb, &max_width, &max_height);
189 #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
190  if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
191  profile = VDP_DECODER_PROFILE_H264_MAIN;
192  status = decoder_query_caps(vdctx->device, profile, &supported,
193  &max_level, &max_mb,
194  &max_width, &max_height);
195  }
196 #endif
197  if (status != VDP_STATUS_OK)
198  return vdpau_error(status);
199 
200  if (supported != VDP_TRUE || max_level < level ||
201  max_width < width || max_height < height)
202  return AVERROR(ENOTSUP);
203 
204  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
205  &func);
206  if (status != VDP_STATUS_OK)
207  return vdpau_error(status);
208  else
209  create = func;
210 
211  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
212  &func);
213  if (status != VDP_STATUS_OK)
214  return vdpau_error(status);
215  else
216  vdctx->render = func;
217 
218  status = create(vdctx->device, profile, width, height, avctx->refs,
219  &vdctx->decoder);
220  if (status == VDP_STATUS_OK) {
221  vdctx->width = avctx->coded_width;
222  vdctx->height = avctx->coded_height;
223  }
224 
225  return vdpau_error(status);
226 }
227 
229 {
230  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
231  VdpDecoderDestroy *destroy;
232  void *func;
233  VdpStatus status;
234 
235  if (vdctx->device == VDP_INVALID_HANDLE)
236  return 0; /* Decoder created and destroyed by user */
237  if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
238  return 0;
239 
240  status = vdctx->get_proc_address(vdctx->device,
241  VDP_FUNC_ID_DECODER_DESTROY, &func);
242  if (status != VDP_STATUS_OK)
243  return vdpau_error(status);
244  else
245  destroy = func;
246 
247  status = destroy(vdctx->decoder);
248  return vdpau_error(status);
249 }
250 
252 {
253  VDPAUHWContext *hwctx = avctx->hwaccel_context;
254  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
255 
256  if (vdctx->device == VDP_INVALID_HANDLE)
257  return 0; /* Decoder created by user */
258  if (avctx->coded_width == vdctx->width &&
259  avctx->coded_height == vdctx->height && !hwctx->reset)
260  return 0;
261 
262  avctx->hwaccel->uninit(avctx);
263  return avctx->hwaccel->init(avctx);
264 }
265 
266 int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx,
267  av_unused const uint8_t *buffer,
268  av_unused uint32_t size)
269 {
270  pic_ctx->bitstream_buffers_allocated = 0;
271  pic_ctx->bitstream_buffers_used = 0;
272  pic_ctx->bitstream_buffers = NULL;
273  return 0;
274 }
275 
277  struct vdpau_picture_context *pic_ctx)
278 {
279  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
280  AVVDPAUContext *hwctx = avctx->hwaccel_context;
281  VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
282  VdpStatus status;
283  int val;
284 
285  val = ff_vdpau_common_reinit(avctx);
286  if (val < 0)
287  return val;
288 
289 #if FF_API_BUFS_VDPAU
291  av_assert0(sizeof(hwctx->info) <= sizeof(pic_ctx->info));
292  memcpy(&hwctx->info, &pic_ctx->info, sizeof(hwctx->info));
293  hwctx->bitstream_buffers = pic_ctx->bitstream_buffers;
294  hwctx->bitstream_buffers_used = pic_ctx->bitstream_buffers_used;
295  hwctx->bitstream_buffers_allocated = pic_ctx->bitstream_buffers_allocated;
297 #endif
298 
299  if (!hwctx->render && hwctx->render2) {
300  status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info,
301  pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
302  } else
303  status = vdctx->render(vdctx->decoder, surf, (void *)&pic_ctx->info,
304  pic_ctx->bitstream_buffers_used,
305  pic_ctx->bitstream_buffers);
306 
307  av_freep(&pic_ctx->bitstream_buffers);
308 
309 #if FF_API_BUFS_VDPAU
311  hwctx->bitstream_buffers = NULL;
312  hwctx->bitstream_buffers_used = 0;
313  hwctx->bitstream_buffers_allocated = 0;
315 #endif
316 
317  return vdpau_error(status);
318 }
319 
320 #if CONFIG_MPEG1_VDPAU_HWACCEL || \
321  CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
322  CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL
324 {
325  MpegEncContext *s = avctx->priv_data;
326  Picture *pic = s->current_picture_ptr;
327  struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
328  int val;
329 
330  val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
331  if (val < 0)
332  return val;
333 
335  return 0;
336 }
337 #endif
338 
339 int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx,
340  const uint8_t *buf, uint32_t size)
341 {
342  VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
343 
344  buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
345  (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
346  if (!buffers)
347  return AVERROR(ENOMEM);
348 
349  pic_ctx->bitstream_buffers = buffers;
350  buffers += pic_ctx->bitstream_buffers_used++;
351 
352  buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
353  buffers->bitstream = buf;
354  buffers->bitstream_bytes = size;
355  return 0;
356 }
357 
358 /* Obsolete non-hwaccel VDPAU support below... */
359 
360 #if FF_API_VDPAU
361 void ff_vdpau_add_data_chunk(uint8_t *data, const uint8_t *buf, int buf_size)
362 {
363  struct vdpau_render_state *render = (struct vdpau_render_state*)data;
364  assert(render);
365 
367  render->bitstream_buffers,
369  sizeof(*render->bitstream_buffers)*(render->bitstream_buffers_used + 1)
370  );
371 
372  render->bitstream_buffers[render->bitstream_buffers_used].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
373  render->bitstream_buffers[render->bitstream_buffers_used].bitstream = buf;
374  render->bitstream_buffers[render->bitstream_buffers_used].bitstream_bytes = buf_size;
375  render->bitstream_buffers_used++;
376 }
377 
378 #if CONFIG_H264_VDPAU_DECODER
380 {
381  struct vdpau_render_state *render, *render_ref;
382  VdpReferenceFrameH264 *rf, *rf2;
383  H264Picture *pic;
384  int i, list, pic_frame_idx;
385 
386  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
387  assert(render);
388 
389  rf = &render->info.h264.referenceFrames[0];
390 #define H264_RF_COUNT FF_ARRAY_ELEMS(render->info.h264.referenceFrames)
391 
392  for (list = 0; list < 2; ++list) {
393  H264Picture **lp = list ? h->long_ref : h->short_ref;
394  int ls = list ? 16 : h->short_ref_count;
395 
396  for (i = 0; i < ls; ++i) {
397  pic = lp[i];
398  if (!pic || !pic->reference)
399  continue;
400  pic_frame_idx = pic->long_ref ? pic->pic_id : pic->frame_num;
401 
402  render_ref = (struct vdpau_render_state *)pic->f->data[0];
403  assert(render_ref);
404 
405  rf2 = &render->info.h264.referenceFrames[0];
406  while (rf2 != rf) {
407  if (
408  (rf2->surface == render_ref->surface)
409  && (rf2->is_long_term == pic->long_ref)
410  && (rf2->frame_idx == pic_frame_idx)
411  )
412  break;
413  ++rf2;
414  }
415  if (rf2 != rf) {
416  rf2->top_is_reference |= (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
417  rf2->bottom_is_reference |= (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
418  continue;
419  }
420 
421  if (rf >= &render->info.h264.referenceFrames[H264_RF_COUNT])
422  continue;
423 
424  rf->surface = render_ref->surface;
425  rf->is_long_term = pic->long_ref;
426  rf->top_is_reference = (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
427  rf->bottom_is_reference = (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
428  rf->field_order_cnt[0] = pic->field_poc[0];
429  rf->field_order_cnt[1] = pic->field_poc[1];
430  rf->frame_idx = pic_frame_idx;
431 
432  ++rf;
433  }
434  }
435 
436  for (; rf < &render->info.h264.referenceFrames[H264_RF_COUNT]; ++rf) {
437  rf->surface = VDP_INVALID_HANDLE;
438  rf->is_long_term = 0;
439  rf->top_is_reference = 0;
440  rf->bottom_is_reference = 0;
441  rf->field_order_cnt[0] = 0;
442  rf->field_order_cnt[1] = 0;
443  rf->frame_idx = 0;
444  }
445 }
446 
448 {
449  struct vdpau_render_state *render;
450  int i;
451 
452  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
453  assert(render);
454 
455  for (i = 0; i < 2; ++i) {
456  int foc = h->cur_pic_ptr->field_poc[i];
457  if (foc == INT_MAX)
458  foc = 0;
459  render->info.h264.field_order_cnt[i] = foc;
460  }
461 
462  render->info.h264.frame_num = h->frame_num;
463 }
464 
466 {
467  struct vdpau_render_state *render;
468 
469  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
470  assert(render);
471 
472  render->info.h264.slice_count = h->current_slice;
473  if (render->info.h264.slice_count < 1)
474  return;
475 
476  render->info.h264.is_reference = (h->cur_pic_ptr->reference & 3) ? VDP_TRUE : VDP_FALSE;
477  render->info.h264.field_pic_flag = h->picture_structure != PICT_FRAME;
478  render->info.h264.bottom_field_flag = h->picture_structure == PICT_BOTTOM_FIELD;
479  render->info.h264.num_ref_frames = h->sps.ref_frame_count;
480  render->info.h264.mb_adaptive_frame_field_flag = h->sps.mb_aff && !render->info.h264.field_pic_flag;
481  render->info.h264.constrained_intra_pred_flag = h->pps.constrained_intra_pred;
482  render->info.h264.weighted_pred_flag = h->pps.weighted_pred;
483  render->info.h264.weighted_bipred_idc = h->pps.weighted_bipred_idc;
484  render->info.h264.frame_mbs_only_flag = h->sps.frame_mbs_only_flag;
485  render->info.h264.transform_8x8_mode_flag = h->pps.transform_8x8_mode;
486  render->info.h264.chroma_qp_index_offset = h->pps.chroma_qp_index_offset[0];
487  render->info.h264.second_chroma_qp_index_offset = h->pps.chroma_qp_index_offset[1];
488  render->info.h264.pic_init_qp_minus26 = h->pps.init_qp - 26;
489  render->info.h264.num_ref_idx_l0_active_minus1 = h->pps.ref_count[0] - 1;
490  render->info.h264.num_ref_idx_l1_active_minus1 = h->pps.ref_count[1] - 1;
491  render->info.h264.log2_max_frame_num_minus4 = h->sps.log2_max_frame_num - 4;
492  render->info.h264.pic_order_cnt_type = h->sps.poc_type;
493  render->info.h264.log2_max_pic_order_cnt_lsb_minus4 = h->sps.poc_type ? 0 : h->sps.log2_max_poc_lsb - 4;
494  render->info.h264.delta_pic_order_always_zero_flag = h->sps.delta_pic_order_always_zero_flag;
495  render->info.h264.direct_8x8_inference_flag = h->sps.direct_8x8_inference_flag;
496  render->info.h264.entropy_coding_mode_flag = h->pps.cabac;
497  render->info.h264.pic_order_present_flag = h->pps.pic_order_present;
498  render->info.h264.deblocking_filter_control_present_flag = h->pps.deblocking_filter_parameters_present;
499  render->info.h264.redundant_pic_cnt_present_flag = h->pps.redundant_pic_cnt_present;
500  memcpy(render->info.h264.scaling_lists_4x4, h->pps.scaling_matrix4, sizeof(render->info.h264.scaling_lists_4x4));
501  memcpy(render->info.h264.scaling_lists_8x8[0], h->pps.scaling_matrix8[0], sizeof(render->info.h264.scaling_lists_8x8[0]));
502  memcpy(render->info.h264.scaling_lists_8x8[1], h->pps.scaling_matrix8[3], sizeof(render->info.h264.scaling_lists_8x8[0]));
503 
504  ff_h264_draw_horiz_band(h, &h->slice_ctx[0], 0, h->avctx->height);
505  render->bitstream_buffers_used = 0;
506 }
507 #endif /* CONFIG_H264_VDPAU_DECODER */
508 
509 #if CONFIG_MPEG_VDPAU_DECODER || CONFIG_MPEG1_VDPAU_DECODER
511  int buf_size, int slice_count)
512 {
513  struct vdpau_render_state *render, *last, *next;
514  int i;
515 
516  if (!s->current_picture_ptr) return;
517 
518  render = (struct vdpau_render_state *)s->current_picture_ptr->f->data[0];
519  assert(render);
520 
521  /* fill VdpPictureInfoMPEG1Or2 struct */
522  render->info.mpeg.picture_structure = s->picture_structure;
523  render->info.mpeg.picture_coding_type = s->pict_type;
524  render->info.mpeg.intra_dc_precision = s->intra_dc_precision;
525  render->info.mpeg.frame_pred_frame_dct = s->frame_pred_frame_dct;
526  render->info.mpeg.concealment_motion_vectors = s->concealment_motion_vectors;
527  render->info.mpeg.intra_vlc_format = s->intra_vlc_format;
528  render->info.mpeg.alternate_scan = s->alternate_scan;
529  render->info.mpeg.q_scale_type = s->q_scale_type;
530  render->info.mpeg.top_field_first = s->top_field_first;
531  render->info.mpeg.full_pel_forward_vector = s->full_pel[0]; // MPEG-1 only. Set 0 for MPEG-2
532  render->info.mpeg.full_pel_backward_vector = s->full_pel[1]; // MPEG-1 only. Set 0 for MPEG-2
533  render->info.mpeg.f_code[0][0] = s->mpeg_f_code[0][0]; // For MPEG-1 fill both horiz. & vert.
534  render->info.mpeg.f_code[0][1] = s->mpeg_f_code[0][1];
535  render->info.mpeg.f_code[1][0] = s->mpeg_f_code[1][0];
536  render->info.mpeg.f_code[1][1] = s->mpeg_f_code[1][1];
537  for (i = 0; i < 64; ++i) {
538  render->info.mpeg.intra_quantizer_matrix[i] = s->intra_matrix[i];
539  render->info.mpeg.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
540  }
541 
542  render->info.mpeg.forward_reference = VDP_INVALID_HANDLE;
543  render->info.mpeg.backward_reference = VDP_INVALID_HANDLE;
544 
545  switch(s->pict_type){
546  case AV_PICTURE_TYPE_B:
547  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
548  assert(next);
549  render->info.mpeg.backward_reference = next->surface;
550  // no return here, going to set forward prediction
551  case AV_PICTURE_TYPE_P:
552  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
553  if (!last) // FIXME: Does this test make sense?
554  last = render; // predict second field from the first
555  render->info.mpeg.forward_reference = last->surface;
556  }
557 
558  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
559 
560  render->info.mpeg.slice_count = slice_count;
561 
562  if (slice_count)
564  render->bitstream_buffers_used = 0;
565 }
566 #endif /* CONFIG_MPEG_VDPAU_DECODER || CONFIG_MPEG1_VDPAU_DECODER */
567 
568 #if CONFIG_VC1_VDPAU_DECODER
570  int buf_size)
571 {
572  VC1Context *v = s->avctx->priv_data;
573  struct vdpau_render_state *render, *last, *next;
574 
575  render = (struct vdpau_render_state *)s->current_picture.f->data[0];
576  assert(render);
577 
578  /* fill LvPictureInfoVC1 struct */
579  render->info.vc1.frame_coding_mode = v->fcm ? v->fcm + 1 : 0;
580  render->info.vc1.postprocflag = v->postprocflag;
581  render->info.vc1.pulldown = v->broadcast;
582  render->info.vc1.interlace = v->interlace;
583  render->info.vc1.tfcntrflag = v->tfcntrflag;
584  render->info.vc1.finterpflag = v->finterpflag;
585  render->info.vc1.psf = v->psf;
586  render->info.vc1.dquant = v->dquant;
587  render->info.vc1.panscan_flag = v->panscanflag;
588  render->info.vc1.refdist_flag = v->refdist_flag;
589  render->info.vc1.quantizer = v->quantizer_mode;
590  render->info.vc1.extended_mv = v->extended_mv;
591  render->info.vc1.extended_dmv = v->extended_dmv;
592  render->info.vc1.overlap = v->overlap;
593  render->info.vc1.vstransform = v->vstransform;
594  render->info.vc1.loopfilter = v->s.loop_filter;
595  render->info.vc1.fastuvmc = v->fastuvmc;
596  render->info.vc1.range_mapy_flag = v->range_mapy_flag;
597  render->info.vc1.range_mapy = v->range_mapy;
598  render->info.vc1.range_mapuv_flag = v->range_mapuv_flag;
599  render->info.vc1.range_mapuv = v->range_mapuv;
600  /* Specific to simple/main profile only */
601  render->info.vc1.multires = v->multires;
602  render->info.vc1.syncmarker = v->resync_marker;
603  render->info.vc1.rangered = v->rangered | (v->rangeredfrm << 1);
604  render->info.vc1.maxbframes = v->s.max_b_frames;
605 
606  render->info.vc1.deblockEnable = v->postprocflag & 1;
607  render->info.vc1.pquant = v->pq;
608 
609  render->info.vc1.forward_reference = VDP_INVALID_HANDLE;
610  render->info.vc1.backward_reference = VDP_INVALID_HANDLE;
611 
612  if (v->bi_type)
613  render->info.vc1.picture_type = 4;
614  else
615  render->info.vc1.picture_type = s->pict_type - 1 + s->pict_type / 3;
616 
617  switch(s->pict_type){
618  case AV_PICTURE_TYPE_B:
619  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
620  assert(next);
621  render->info.vc1.backward_reference = next->surface;
622  // no break here, going to set forward prediction
623  case AV_PICTURE_TYPE_P:
624  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
625  if (!last) // FIXME: Does this test make sense?
626  last = render; // predict second field from the first
627  render->info.vc1.forward_reference = last->surface;
628  }
629 
630  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
631 
632  render->info.vc1.slice_count = 1;
633 
635  render->bitstream_buffers_used = 0;
636 }
637 #endif /* (CONFIG_VC1_VDPAU_DECODER */
638 
639 #if CONFIG_MPEG4_VDPAU_DECODER
641  int buf_size)
642 {
643  MpegEncContext *s = &ctx->m;
644  struct vdpau_render_state *render, *last, *next;
645  int i;
646 
647  if (!s->current_picture_ptr) return;
648 
649  render = (struct vdpau_render_state *)s->current_picture_ptr->f->data[0];
650  assert(render);
651 
652  /* fill VdpPictureInfoMPEG4Part2 struct */
653  render->info.mpeg4.trd[0] = s->pp_time;
654  render->info.mpeg4.trb[0] = s->pb_time;
655  render->info.mpeg4.trd[1] = s->pp_field_time >> 1;
656  render->info.mpeg4.trb[1] = s->pb_field_time >> 1;
657  render->info.mpeg4.vop_time_increment_resolution = s->avctx->time_base.den;
658  render->info.mpeg4.vop_coding_type = 0;
659  render->info.mpeg4.vop_fcode_forward = s->f_code;
660  render->info.mpeg4.vop_fcode_backward = s->b_code;
661  render->info.mpeg4.resync_marker_disable = !ctx->resync_marker;
662  render->info.mpeg4.interlaced = !s->progressive_sequence;
663  render->info.mpeg4.quant_type = s->mpeg_quant;
664  render->info.mpeg4.quarter_sample = s->quarter_sample;
665  render->info.mpeg4.short_video_header = s->avctx->codec->id == AV_CODEC_ID_H263;
666  render->info.mpeg4.rounding_control = s->no_rounding;
667  render->info.mpeg4.alternate_vertical_scan_flag = s->alternate_scan;
668  render->info.mpeg4.top_field_first = s->top_field_first;
669  for (i = 0; i < 64; ++i) {
670  render->info.mpeg4.intra_quantizer_matrix[i] = s->intra_matrix[i];
671  render->info.mpeg4.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
672  }
673  render->info.mpeg4.forward_reference = VDP_INVALID_HANDLE;
674  render->info.mpeg4.backward_reference = VDP_INVALID_HANDLE;
675 
676  switch (s->pict_type) {
677  case AV_PICTURE_TYPE_B:
678  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
679  assert(next);
680  render->info.mpeg4.backward_reference = next->surface;
681  render->info.mpeg4.vop_coding_type = 2;
682  // no break here, going to set forward prediction
683  case AV_PICTURE_TYPE_P:
684  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
685  assert(last);
686  render->info.mpeg4.forward_reference = last->surface;
687  }
688 
689  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
690 
692  render->bitstream_buffers_used = 0;
693 }
694 #endif /* CONFIG_MPEG4_VDPAU_DECODER */
695 #endif /* FF_API_VDPAU */
696 
697 int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
698 {
699 #define PROFILE(prof) \
700 do { \
701  *profile = VDP_DECODER_PROFILE_##prof; \
702  return 0; \
703 } while (0)
704 
705  switch (avctx->codec_id) {
706  case AV_CODEC_ID_MPEG1VIDEO: PROFILE(MPEG1);
708  switch (avctx->profile) {
709  case FF_PROFILE_MPEG2_MAIN: PROFILE(MPEG2_MAIN);
710  case FF_PROFILE_MPEG2_SIMPLE: PROFILE(MPEG2_SIMPLE);
711  default: return AVERROR(EINVAL);
712  }
713  case AV_CODEC_ID_H263: PROFILE(MPEG4_PART2_ASP);
714  case AV_CODEC_ID_MPEG4:
715  switch (avctx->profile) {
716  case FF_PROFILE_MPEG4_SIMPLE: PROFILE(MPEG4_PART2_SP);
717  case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP);
718  default: return AVERROR(EINVAL);
719  }
720  case AV_CODEC_ID_H264:
721  switch (avctx->profile & ~FF_PROFILE_H264_INTRA) {
722  case FF_PROFILE_H264_BASELINE: PROFILE(H264_BASELINE);
724  case FF_PROFILE_H264_MAIN: PROFILE(H264_MAIN);
725  case FF_PROFILE_H264_HIGH: PROFILE(H264_HIGH);
726 #ifdef VDP_DECODER_PROFILE_H264_EXTENDED
727  case FF_PROFILE_H264_EXTENDED: PROFILE(H264_EXTENDED);
728 #endif
729  default: return AVERROR(EINVAL);
730  }
731  case AV_CODEC_ID_WMV3:
732  case AV_CODEC_ID_VC1:
733  switch (avctx->profile) {
734  case FF_PROFILE_VC1_SIMPLE: PROFILE(VC1_SIMPLE);
735  case FF_PROFILE_VC1_MAIN: PROFILE(VC1_MAIN);
736  case FF_PROFILE_VC1_ADVANCED: PROFILE(VC1_ADVANCED);
737  default: return AVERROR(EINVAL);
738  }
739  }
740  return AVERROR(EINVAL);
741 #undef PROFILE
742 }
743 
745 {
746  return av_mallocz(sizeof(AVVDPAUContext));
747 }
748 
749 int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
750  VdpGetProcAddress *get_proc, unsigned flags)
751 {
752  VDPAUHWContext *hwctx;
753 
755  return AVERROR(EINVAL);
756 
757  if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
758  return AVERROR(ENOMEM);
759 
760  hwctx = avctx->hwaccel_context;
761 
762  memset(hwctx, 0, sizeof(*hwctx));
763  hwctx->context.decoder = VDP_INVALID_HANDLE;
764  hwctx->device = device;
765  hwctx->get_proc_address = get_proc;
766  hwctx->flags = flags;
767  hwctx->reset = 1;
768  return 0;
769 }
770 
771 /* @}*/
#define FF_PROFILE_H264_MAIN
Definition: avcodec.h:3062
#define FF_PROFILE_MPEG4_SIMPLE
Definition: avcodec.h:3079
#define NULL
Definition: coverity.c:32
const struct AVCodec * codec
Definition: avcodec.h:1541
const char const char void * val
Definition: avisynth_c.h:634
void ff_vdpau_h264_picture_complete(H264Context *h)
int long_ref
1->long term reference 0->short term reference
Definition: h264.h:340
const char * s
Definition: avisynth_c.h:631
The VC1 Context.
Definition: vc1.h:173
#define FF_PROFILE_MPEG2_MAIN
Definition: avcodec.h:3054
This structure describes decoded (raw) audio or video data.
Definition: frame.h:181
int(* init)(AVCodecContext *avctx)
Initialize the hwaccel private data.
Definition: avcodec.h:3614
VdpDevice device
Definition: ffmpeg_vdpau.c:38
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1726
AVFormatContext * ctx
Definition: movenc-test.c:48
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:68
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
int weighted_bipred_idc
Definition: h264.h:248
int chroma_qp_index_offset[2]
Definition: h264.h:251
int resync_marker
could this stream contain resync markers
Definition: mpeg4video.h:82
VdpDecoder decoder
Definition: ffmpeg_vdpau.c:39
int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx, av_unused const uint8_t *buffer, av_unused uint32_t size)
Definition: vdpau.c:266
int extended_mv
Ext MV in P/B (not in Simple)
Definition: vc1.h:223
int broadcast
TFF/RFF present.
Definition: vc1.h:200
#define FF_PROFILE_H264_INTRA
Definition: avcodec.h:3058
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264.h:246
uint8_t rangeredfrm
Frame decoding info for S/M profiles only.
Definition: vc1.h:302
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264.c:101
int frame_mbs_only_flag
Definition: h264.h:192
VdpPictureInfoMPEG1Or2 mpeg
Definition: vdpau.h:63
attribute_deprecated VdpBitstreamBuffer * bitstream_buffers
Table of bitstream buffers.
Definition: vdpau.h:137
H264Context.
Definition: h264.h:522
AVFrame * f
Definition: h264.h:315
Public libavcodec VDPAU header.
int fastuvmc
Rounding of qpel vector to hpel ? (not in Simple)
Definition: vc1.h:222
H264Picture * long_ref[32]
Definition: h264.h:674
int profile
profile
Definition: avcodec.h:3028
int picture_structure
Definition: h264.h:595
AVVDPAUContext * av_vdpau_alloc_context(void)
Allocate an AVVDPAUContext.
Definition: vdpau.c:744
AVVDPAUContext * av_alloc_vdpaucontext(void)
allocation function for AVVDPAUContext
Definition: vdpau.c:68
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1661
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, VdpChromaType *type, uint32_t *width, uint32_t *height)
Gets the parameters to create an adequate VDPAU video surface for the codec context using VDPAU hardw...
Definition: vdpau.c:75
struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:2843
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2732
uint8_t scaling_matrix4[6][16]
Definition: h264.h:256
int deblocking_filter_parameters_present
deblocking_filter_parameters_present_flag
Definition: h264.h:252
int bi_type
Definition: vc1.h:381
#define FF_PROFILE_H264_BASELINE
Definition: avcodec.h:3060
int ff_vdpau_common_uninit(AVCodecContext *avctx)
Definition: vdpau.c:228
uint8_t
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:2855
int panscanflag
NUMPANSCANWIN, TOPLEFT{X,Y}, BOTRIGHT{X,Y} present.
Definition: vc1.h:203
int interlace
Progressive/interlaced (RPTFTM syntax element)
Definition: vc1.h:201
void ff_vdpau_mpeg4_decode_picture(Mpeg4DecContext *s, const uint8_t *buf, int buf_size)
int cabac
entropy_coding_mode_flag
Definition: h264.h:242
int no_rounding
apply no rounding to motion compensation (MPEG4, msmpeg4, ...) for b-frames rounding mode is always 0...
Definition: mpegvideo.h:284
int full_pel[2]
Definition: mpegvideo.h:479
VdpGetProcAddress * get_proc_address
Definition: ffmpeg_vdpau.c:40
static struct ResampleContext * create(struct ResampleContext *c, int out_rate, int in_rate, int filter_size, int phase_shift, int linear, double cutoff, enum AVSampleFormat format, enum SwrFilterType filter_type, double kaiser_beta, double precision, int cheby)
Definition: soxr_resample.c:32
Picture current_picture
copy of the current picture structure.
Definition: mpegvideo.h:177
int intra_dc_precision
Definition: mpegvideo.h:460
static AVFrame * frame
attribute_deprecated int bitstream_buffers_used
Useful bitstream buffers in the bitstream buffers table.
Definition: vdpau.h:128
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:38
int refdist_flag
REFDIST syntax element present in II, IP, PI or PP field picture headers.
Definition: vc1.h:204
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:76
int redundant_pic_cnt_present
redundant_pic_cnt_present_flag
Definition: h264.h:254
#define FF_PROFILE_H264_EXTENDED
Definition: avcodec.h:3063
uint16_t pp_time
time distance between the last 2 p,s,i frames
Definition: mpegvideo.h:390
ptrdiff_t size
Definition: opengl_enc.c:101
#define av_log(a,...)
void ff_vdpau_mpeg_picture_complete(MpegEncContext *s, const uint8_t *buf, int buf_size, int slice_count)
int psf
Progressive Segmented Frame.
Definition: vc1.h:211
int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
Get a decoder profile that should be used for initializing a VDPAU decoder.
Definition: vdpau.c:697
MpegEncContext m
Definition: mpeg4video.h:66
H.264 / AVC / MPEG4 part10 codec.
int frame_num
Definition: h264.h:655
attribute_deprecated union AVVDPAUPictureInfo info
VDPAU picture information.
Definition: vdpau.h:112
enum AVCodecID id
Definition: avcodec.h:3406
#define AV_HWACCEL_FLAG_IGNORE_LEVEL
Hardware acceleration should be used for decoding even if the codec level used is unknown or higher t...
Definition: avcodec.h:3639
#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH
Hardware acceleration can output YUV pixel formats with a different chroma sampling than 4:2:0 and/or...
Definition: avcodec.h:3645
#define PROFILE(prof)
int mb_aff
mb_adaptive_frame_field_flag
Definition: h264.h:193
int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level)
Definition: vdpau.c:115
VdpBitstreamBuffer * bitstream_buffers
The user is responsible for freeing this buffer using av_freep().
Definition: vdpau.h:247
int overlap
overlapped transforms in use
Definition: vc1.h:226
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given block if it is not large enough, otherwise do nothing.
Definition: mem.c:480
This structure is used to share data between the libavcodec library and the client video application...
Definition: vdpau.h:90
int poc_type
pic_order_cnt_type
Definition: h264.h:182
int constrained_intra_pred
constrained_intra_pred_flag
Definition: h264.h:253
#define MAKE_ACCESSORS(str, name, type, field)
Definition: internal.h:87
#define AVERROR(e)
Definition: error.h:43
PPS pps
current pps
Definition: h264.h:582
#define FF_PROFILE_H264_HIGH
Definition: avcodec.h:3064
simple assert() macros that are a bit more flexible than ISO C assert().
int weighted_pred
weighted_pred_flag
Definition: h264.h:247
#define PICT_TOP_FIELD
Definition: mpegutils.h:37
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:399
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264.h:335
int resync_marker
could this stream contain resync markers
Definition: vc1.h:396
int postprocflag
Per-frame processing suggestion flag present.
Definition: vc1.h:199
int delta_pic_order_always_zero_flag
Definition: h264.h:184
attribute_deprecated int bitstream_buffers_allocated
Allocated size of the bitstream_buffers table.
Definition: vdpau.h:120
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:67
uint8_t scaling_matrix8[6][64]
Definition: h264.h:257
int refs
number of reference frames
Definition: avcodec.h:2205
int intra_vlc_format
Definition: mpegvideo.h:466
void ff_vdpau_h264_picture_start(H264Context *h)
union AVVDPAUPictureInfo info
picture parameter information for all supported codecs
Definition: vdpau.h:240
int ref_frame_count
num_ref_frames
Definition: h264.h:188
int top_field_first
Definition: mpegvideo.h:462
int reference
Definition: h264.h:346
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:75
int tfcntrflag
TFCNTR present.
Definition: vc1.h:202
#define FF_PROFILE_VC1_MAIN
Definition: avcodec.h:3075
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:181
Picture.
Definition: mpegpicture.h:45
int alternate_scan
Definition: mpegvideo.h:467
void * hwaccel_picture_private
Hardware accelerator private data.
Definition: mpegpicture.h:77
SPS sps
current sps
Definition: h264.h:581
static int vdpau_error(VdpStatus status)
Definition: vdpau.c:44
int init_qp
pic_init_qp_minus26 + 26
Definition: h264.h:249
H264SliceContext * slice_ctx
Definition: h264.h:536
int direct_8x8_inference_flag
Definition: h264.h:194
uint8_t range_mapuv_flag
Definition: vc1.h:329
int mpeg_f_code[2][2]
Definition: mpegvideo.h:454
#define FF_PROFILE_VC1_SIMPLE
Definition: avcodec.h:3074
VdpPictureInfoMPEG4Part2 mpeg4
Definition: vdpau.h:65
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:106
int pic_order_present
pic_order_present_flag
Definition: h264.h:243
int rangered
RANGEREDFRM (range reduction) syntax element present at frame level.
Definition: vc1.h:189
int frame_pred_frame_dct
Definition: mpegvideo.h:461
static void destroy(struct ResampleContext **c)
Definition: soxr_resample.c:64
int finterpflag
INTERPFRM present.
Definition: vc1.h:228
uint16_t inter_matrix[64]
Definition: mpegvideo.h:302
AVCodecContext * avctx
Definition: h264.h:524
int concealment_motion_vectors
Definition: mpegvideo.h:463
Libavcodec external API header.
enum AVCodecID codec_id
Definition: avcodec.h:1549
H264Picture * short_ref[32]
Definition: h264.h:673
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
int multires
frame-level RESPIC syntax element present
Definition: vc1.h:186
int field_poc[2]
top/bottom POC
Definition: h264.h:333
main external API structure.
Definition: avcodec.h:1532
#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE
Definition: avcodec.h:3094
int bitstream_buffers_used
Definition: vdpau.h:245
uint8_t range_mapy
Definition: vc1.h:330
void ff_vdpau_h264_set_reference_frames(H264Context *h)
int extended_dmv
Additional extended dmv range at P/B frame-level.
Definition: vc1.h:205
void * buf
Definition: avisynth_c.h:553
GLint GLenum type
Definition: opengl_enc.c:105
int progressive_sequence
Definition: mpegvideo.h:453
BYTE int const BYTE int int int height
Definition: avisynth_c.h:676
int bitstream_buffers_allocated
Describe size/location of the compressed video data.
Definition: vdpau.h:244
int coded_height
Definition: avcodec.h:1726
#define H264_RF_COUNT
struct AVFrame * f
Definition: mpegpicture.h:46
int(* func)(AVBPrint *dst, const char *in, const char *arg)
Definition: jacosubdec.c:67
H264Picture * cur_pic_ptr
Definition: h264.h:532
VdpDecoderRender * render
VDPAU decoder render callback.
Definition: vdpau.h:103
int quantizer_mode
2bits, quantizer mode used for sequence, see QUANT_*
Definition: vc1.h:227
int f_code
forward MV resolution
Definition: mpegvideo.h:235
int log2_max_poc_lsb
log2_max_pic_order_cnt_lsb_minus4
Definition: h264.h:183
int max_b_frames
max number of b-frames for encoding
Definition: mpegvideo.h:112
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:209
int(* AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *, const VdpPictureInfo *, uint32_t, const VdpBitstreamBuffer *)
Definition: vdpau.h:72
mfxU16 profile
Definition: qsvenc.c:42
int vstransform
variable-size [48]x[48] transform type + info
Definition: vc1.h:225
int transform_8x8_mode
transform_8x8_mode_flag
Definition: h264.h:255
static int flags
Definition: cpu.c:47
uint8_t range_mapuv
Definition: vc1.h:331
uint16_t pb_field_time
like above, just for interlaced
Definition: mpegvideo.h:393
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:192
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, VdpGetProcAddress *get_proc, unsigned flags)
Associate a VDPAU device with a codec context for hardware acceleration.
Definition: vdpau.c:749
uint8_t level
Definition: svq3.c:150
MpegEncContext s
Definition: vc1.h:174
MpegEncContext.
Definition: mpegvideo.h:78
struct AVCodecContext * avctx
Definition: mpegvideo.h:95
uint16_t pp_field_time
Definition: mpegvideo.h:392
uint8_t pq
Definition: vc1.h:238
int pic_id
pic_num (short -> no wrap version of pic_num, pic_num & max_pic_num; long -> long_pic_num) ...
Definition: h264.h:338
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:63
AVVDPAU_Render2 render2
Definition: vdpau.h:139
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:80
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:282
VdpPictureInfoH264 h264
Definition: vdpau.h:62
This structure is used as a callback between the FFmpeg decoder (vd_) and presentation (vo_) module...
Definition: vdpau.h:234
GLuint * buffers
Definition: opengl_enc.c:99
VdpPictureInfoVC1 vc1
Definition: vdpau.h:64
int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, const uint8_t *buf, uint32_t size)
Definition: vdpau.c:339
enum FrameCodingMode fcm
Frame decoding info for Advanced profile.
Definition: vc1.h:308
int log2_max_frame_num
log2_max_frame_num_minus4 + 4
Definition: h264.h:181
int av_reallocp(void *ptr, size_t size)
Allocate or reallocate a block of memory.
Definition: mem.c:187
int(* uninit)(AVCodecContext *avctx)
Uninitialize the hwaccel private data.
Definition: avcodec.h:3622
void * hwaccel_priv_data
hwaccel-specific private data
Definition: internal.h:162
Picture last_picture
copy of the previous picture structure.
Definition: mpegvideo.h:159
Bi-dir predicted.
Definition: avutil.h:268
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:77
int den
denominator
Definition: rational.h:45
int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, struct vdpau_picture_context *pic_ctx)
Definition: vdpau.c:276
void * priv_data
Definition: avcodec.h:1574
#define FF_PROFILE_VC1_ADVANCED
Definition: avcodec.h:3077
#define PICT_FRAME
Definition: mpegutils.h:39
int picture_structure
Definition: mpegvideo.h:457
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:81
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1582
#define FF_PROFILE_MPEG2_SIMPLE
Definition: avcodec.h:3055
Picture next_picture
copy of the next picture structure.
Definition: mpegvideo.h:165
int current_slice
current slice number, used to initialize slice_num of each thread/context
Definition: h264.h:697
uint8_t range_mapy_flag
Definition: vc1.h:328
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:300
#define av_freep(p)
int dquant
How qscale varies with MBs, 2bits (not in Simple)
Definition: vc1.h:224
void ff_vdpau_add_data_chunk(uint8_t *data, const uint8_t *buf, int buf_size)
#define FF_PROFILE_H264_CONSTRAINED_BASELINE
Definition: avcodec.h:3061
int b_code
backward MV resolution for B Frames (mpeg4)
Definition: mpegvideo.h:236
void ff_vdpau_vc1_decode_picture(MpegEncContext *s, const uint8_t *buf, int buf_size)
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:252
static int ff_vdpau_common_reinit(AVCodecContext *avctx)
Definition: vdpau.c:251
VdpVideoSurface surface
Used as rendered surface, never changed.
Definition: vdpau.h:235
Predicted.
Definition: avutil.h:267
GLuint buffer
Definition: opengl_enc.c:102
#define av_unused
Definition: attributes.h:126
uint16_t pb_time
time distance between the last b and p,s,i frame
Definition: mpegvideo.h:391
static uintptr_t ff_vdpau_get_surface_id(AVFrame *pic)
Extract VdpVideoSurface from an AVFrame.
int short_ref_count
number of actual short term references
Definition: h264.h:688
static int width