FFmpeg
mpeg12dec.c
Go to the documentation of this file.
1 /*
2  * MPEG-1/2 decoder
3  * Copyright (c) 2000, 2001 Fabrice Bellard
4  * Copyright (c) 2002-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * MPEG-1/2 decoder
26  */
27 
28 #include "config_components.h"
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 #include <inttypes.h>
32 
33 #include "libavutil/attributes.h"
34 #include "libavutil/imgutils.h"
35 #include "libavutil/internal.h"
36 #include "libavutil/mem_internal.h"
37 #include "libavutil/reverse.h"
38 #include "libavutil/stereo3d.h"
39 #include "libavutil/timecode.h"
40 
41 #include "avcodec.h"
42 #include "codec_internal.h"
43 #include "decode.h"
44 #include "error_resilience.h"
45 #include "hwconfig.h"
46 #include "idctdsp.h"
47 #include "internal.h"
48 #include "mpeg_er.h"
49 #include "mpeg12.h"
50 #include "mpeg12codecs.h"
51 #include "mpeg12data.h"
52 #include "mpeg12dec.h"
53 #include "mpegutils.h"
54 #include "mpegvideo.h"
55 #include "mpegvideodata.h"
56 #include "mpegvideodec.h"
57 #include "profiles.h"
58 #include "startcode.h"
59 #include "thread.h"
60 
61 #define A53_MAX_CC_COUNT 2000
62 
63 typedef struct Mpeg1Context {
65  int mpeg_enc_ctx_allocated; /* true if decoding context allocated */
66  int repeat_field; /* true if we must repeat the field */
67  AVPanScan pan_scan; /* some temporary storage for the panscan */
71  uint8_t afd;
72  int has_afd;
78  AVRational frame_rate_ext; /* MPEG-2 specific framerate modificator */
79  unsigned frame_rate_index;
80  int sync; /* Did we reach a sync point like a GOP/SEQ/KEYFrame? */
82  int tmpgexs;
85  int64_t timecode_frame_start; /*< GOP timecode frame start number, in non drop frame format */
86 } Mpeg1Context;
87 
88 #define MB_TYPE_ZERO_MV 0x20000000
89 
90 static const uint32_t ptype2mb_type[7] = {
93  MB_TYPE_L0,
98 };
99 
100 static const uint32_t btype2mb_type[11] = {
102  MB_TYPE_L1,
104  MB_TYPE_L0,
106  MB_TYPE_L0L1,
112 };
113 
114 /* as H.263, but only 17 codes */
115 static int mpeg_decode_motion(MpegEncContext *s, int fcode, int pred)
116 {
117  int code, sign, val, shift;
118 
119  code = get_vlc2(&s->gb, ff_mv_vlc.table, MV_VLC_BITS, 2);
120  if (code == 0)
121  return pred;
122  if (code < 0)
123  return 0xffff;
124 
125  sign = get_bits1(&s->gb);
126  shift = fcode - 1;
127  val = code;
128  if (shift) {
129  val = (val - 1) << shift;
130  val |= get_bits(&s->gb, shift);
131  val++;
132  }
133  if (sign)
134  val = -val;
135  val += pred;
136 
137  /* modulo decoding */
138  return sign_extend(val, 5 + shift);
139 }
140 
141 #define MAX_INDEX (64 - 1)
142 #define check_scantable_index(ctx, x) \
143  do { \
144  if ((x) > MAX_INDEX) { \
145  av_log(ctx->avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n", \
146  ctx->mb_x, ctx->mb_y); \
147  return AVERROR_INVALIDDATA; \
148  } \
149  } while (0)
150 
152  int16_t *block, int n)
153 {
154  int level, i, j, run;
155  uint8_t *const scantable = s->intra_scantable.permutated;
156  const uint16_t *quant_matrix = s->inter_matrix;
157  const int qscale = s->qscale;
158 
159  {
160  OPEN_READER(re, &s->gb);
161  i = -1;
162  // special case for first coefficient, no need to add second VLC table
163  UPDATE_CACHE(re, &s->gb);
164  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
165  level = (3 * qscale * quant_matrix[0]) >> 5;
166  level = (level - 1) | 1;
167  if (GET_CACHE(re, &s->gb) & 0x40000000)
168  level = -level;
169  block[0] = level;
170  i++;
171  SKIP_BITS(re, &s->gb, 2);
172  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
173  goto end;
174  }
175  /* now quantify & encode AC coefficients */
176  for (;;) {
178  TEX_VLC_BITS, 2, 0);
179 
180  if (level != 0) {
181  i += run;
182  if (i > MAX_INDEX)
183  break;
184  j = scantable[i];
185  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
186  level = (level - 1) | 1;
187  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
188  SHOW_SBITS(re, &s->gb, 1);
189  SKIP_BITS(re, &s->gb, 1);
190  } else {
191  /* escape */
192  run = SHOW_UBITS(re, &s->gb, 6) + 1;
193  LAST_SKIP_BITS(re, &s->gb, 6);
194  UPDATE_CACHE(re, &s->gb);
195  level = SHOW_SBITS(re, &s->gb, 8);
196  SKIP_BITS(re, &s->gb, 8);
197  if (level == -128) {
198  level = SHOW_UBITS(re, &s->gb, 8) - 256;
199  SKIP_BITS(re, &s->gb, 8);
200  } else if (level == 0) {
201  level = SHOW_UBITS(re, &s->gb, 8);
202  SKIP_BITS(re, &s->gb, 8);
203  }
204  i += run;
205  if (i > MAX_INDEX)
206  break;
207  j = scantable[i];
208  if (level < 0) {
209  level = -level;
210  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
211  level = (level - 1) | 1;
212  level = -level;
213  } else {
214  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
215  level = (level - 1) | 1;
216  }
217  }
218 
219  block[j] = level;
220  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
221  break;
222  UPDATE_CACHE(re, &s->gb);
223  }
224 end:
225  LAST_SKIP_BITS(re, &s->gb, 2);
226  CLOSE_READER(re, &s->gb);
227  }
228 
230 
231  s->block_last_index[n] = i;
232  return 0;
233 }
234 
235 /**
236  * Changing this would eat up any speed benefits it has.
237  * Do not use "fast" flag if you need the code to be robust.
238  */
240  int16_t *block, int n)
241 {
242  int level, i, j, run;
243  uint8_t *const scantable = s->intra_scantable.permutated;
244  const int qscale = s->qscale;
245 
246  {
247  OPEN_READER(re, &s->gb);
248  i = -1;
249  // Special case for first coefficient, no need to add second VLC table.
250  UPDATE_CACHE(re, &s->gb);
251  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
252  level = (3 * qscale) >> 1;
253  level = (level - 1) | 1;
254  if (GET_CACHE(re, &s->gb) & 0x40000000)
255  level = -level;
256  block[0] = level;
257  i++;
258  SKIP_BITS(re, &s->gb, 2);
259  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
260  goto end;
261  }
262 
263  /* now quantify & encode AC coefficients */
264  for (;;) {
266  TEX_VLC_BITS, 2, 0);
267 
268  if (level != 0) {
269  i += run;
270  if (i > MAX_INDEX)
271  break;
272  j = scantable[i];
273  level = ((level * 2 + 1) * qscale) >> 1;
274  level = (level - 1) | 1;
275  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
276  SHOW_SBITS(re, &s->gb, 1);
277  SKIP_BITS(re, &s->gb, 1);
278  } else {
279  /* escape */
280  run = SHOW_UBITS(re, &s->gb, 6) + 1;
281  LAST_SKIP_BITS(re, &s->gb, 6);
282  UPDATE_CACHE(re, &s->gb);
283  level = SHOW_SBITS(re, &s->gb, 8);
284  SKIP_BITS(re, &s->gb, 8);
285  if (level == -128) {
286  level = SHOW_UBITS(re, &s->gb, 8) - 256;
287  SKIP_BITS(re, &s->gb, 8);
288  } else if (level == 0) {
289  level = SHOW_UBITS(re, &s->gb, 8);
290  SKIP_BITS(re, &s->gb, 8);
291  }
292  i += run;
293  if (i > MAX_INDEX)
294  break;
295  j = scantable[i];
296  if (level < 0) {
297  level = -level;
298  level = ((level * 2 + 1) * qscale) >> 1;
299  level = (level - 1) | 1;
300  level = -level;
301  } else {
302  level = ((level * 2 + 1) * qscale) >> 1;
303  level = (level - 1) | 1;
304  }
305  }
306 
307  block[j] = level;
308  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
309  break;
310  UPDATE_CACHE(re, &s->gb);
311  }
312 end:
313  LAST_SKIP_BITS(re, &s->gb, 2);
314  CLOSE_READER(re, &s->gb);
315  }
316 
318 
319  s->block_last_index[n] = i;
320  return 0;
321 }
322 
324  int16_t *block, int n)
325 {
326  int level, i, j, run;
327  uint8_t *const scantable = s->intra_scantable.permutated;
328  const uint16_t *quant_matrix;
329  const int qscale = s->qscale;
330  int mismatch;
331 
332  mismatch = 1;
333 
334  {
335  OPEN_READER(re, &s->gb);
336  i = -1;
337  if (n < 4)
338  quant_matrix = s->inter_matrix;
339  else
340  quant_matrix = s->chroma_inter_matrix;
341 
342  // Special case for first coefficient, no need to add second VLC table.
343  UPDATE_CACHE(re, &s->gb);
344  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
345  level = (3 * qscale * quant_matrix[0]) >> 5;
346  if (GET_CACHE(re, &s->gb) & 0x40000000)
347  level = -level;
348  block[0] = level;
349  mismatch ^= level;
350  i++;
351  SKIP_BITS(re, &s->gb, 2);
352  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
353  goto end;
354  }
355 
356  /* now quantify & encode AC coefficients */
357  for (;;) {
359  TEX_VLC_BITS, 2, 0);
360 
361  if (level != 0) {
362  i += run;
363  if (i > MAX_INDEX)
364  break;
365  j = scantable[i];
366  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
367  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
368  SHOW_SBITS(re, &s->gb, 1);
369  SKIP_BITS(re, &s->gb, 1);
370  } else {
371  /* escape */
372  run = SHOW_UBITS(re, &s->gb, 6) + 1;
373  LAST_SKIP_BITS(re, &s->gb, 6);
374  UPDATE_CACHE(re, &s->gb);
375  level = SHOW_SBITS(re, &s->gb, 12);
376  SKIP_BITS(re, &s->gb, 12);
377 
378  i += run;
379  if (i > MAX_INDEX)
380  break;
381  j = scantable[i];
382  if (level < 0) {
383  level = ((-level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
384  level = -level;
385  } else {
386  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
387  }
388  }
389 
390  mismatch ^= level;
391  block[j] = level;
392  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
393  break;
394  UPDATE_CACHE(re, &s->gb);
395  }
396 end:
397  LAST_SKIP_BITS(re, &s->gb, 2);
398  CLOSE_READER(re, &s->gb);
399  }
400  block[63] ^= (mismatch & 1);
401 
403 
404  s->block_last_index[n] = i;
405  return 0;
406 }
407 
408 /**
409  * Changing this would eat up any speed benefits it has.
410  * Do not use "fast" flag if you need the code to be robust.
411  */
413  int16_t *block, int n)
414 {
415  int level, i, j, run;
416  uint8_t *const scantable = s->intra_scantable.permutated;
417  const int qscale = s->qscale;
418  OPEN_READER(re, &s->gb);
419  i = -1;
420 
421  // special case for first coefficient, no need to add second VLC table
422  UPDATE_CACHE(re, &s->gb);
423  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
424  level = (3 * qscale) >> 1;
425  if (GET_CACHE(re, &s->gb) & 0x40000000)
426  level = -level;
427  block[0] = level;
428  i++;
429  SKIP_BITS(re, &s->gb, 2);
430  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
431  goto end;
432  }
433 
434  /* now quantify & encode AC coefficients */
435  for (;;) {
437 
438  if (level != 0) {
439  i += run;
440  if (i > MAX_INDEX)
441  break;
442  j = scantable[i];
443  level = ((level * 2 + 1) * qscale) >> 1;
444  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
445  SHOW_SBITS(re, &s->gb, 1);
446  SKIP_BITS(re, &s->gb, 1);
447  } else {
448  /* escape */
449  run = SHOW_UBITS(re, &s->gb, 6) + 1;
450  LAST_SKIP_BITS(re, &s->gb, 6);
451  UPDATE_CACHE(re, &s->gb);
452  level = SHOW_SBITS(re, &s->gb, 12);
453  SKIP_BITS(re, &s->gb, 12);
454 
455  i += run;
456  if (i > MAX_INDEX)
457  break;
458  j = scantable[i];
459  if (level < 0) {
460  level = ((-level * 2 + 1) * qscale) >> 1;
461  level = -level;
462  } else {
463  level = ((level * 2 + 1) * qscale) >> 1;
464  }
465  }
466 
467  block[j] = level;
468  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF || i > 63)
469  break;
470 
471  UPDATE_CACHE(re, &s->gb);
472  }
473 end:
474  LAST_SKIP_BITS(re, &s->gb, 2);
475  CLOSE_READER(re, &s->gb);
476 
478 
479  s->block_last_index[n] = i;
480  return 0;
481 }
482 
484  int16_t *block, int n)
485 {
486  int level, dc, diff, i, j, run;
487  int component;
488  const RL_VLC_ELEM *rl_vlc;
489  uint8_t *const scantable = s->intra_scantable.permutated;
490  const uint16_t *quant_matrix;
491  const int qscale = s->qscale;
492  int mismatch;
493 
494  /* DC coefficient */
495  if (n < 4) {
496  quant_matrix = s->intra_matrix;
497  component = 0;
498  } else {
499  quant_matrix = s->chroma_intra_matrix;
500  component = (n & 1) + 1;
501  }
502  diff = decode_dc(&s->gb, component);
503  dc = s->last_dc[component];
504  dc += diff;
505  s->last_dc[component] = dc;
506  block[0] = dc * (1 << (3 - s->intra_dc_precision));
507  ff_tlog(s->avctx, "dc=%d\n", block[0]);
508  mismatch = block[0] ^ 1;
509  i = 0;
510  if (s->intra_vlc_format)
512  else
514 
515  {
516  OPEN_READER(re, &s->gb);
517  /* now quantify & encode AC coefficients */
518  for (;;) {
519  UPDATE_CACHE(re, &s->gb);
520  GET_RL_VLC(level, run, re, &s->gb, rl_vlc,
521  TEX_VLC_BITS, 2, 0);
522 
523  if (level == 127) {
524  break;
525  } else if (level != 0) {
526  i += run;
527  if (i > MAX_INDEX)
528  break;
529  j = scantable[i];
530  level = (level * qscale * quant_matrix[j]) >> 4;
531  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
532  SHOW_SBITS(re, &s->gb, 1);
533  LAST_SKIP_BITS(re, &s->gb, 1);
534  } else {
535  /* escape */
536  run = SHOW_UBITS(re, &s->gb, 6) + 1;
537  SKIP_BITS(re, &s->gb, 6);
538  level = SHOW_SBITS(re, &s->gb, 12);
539  LAST_SKIP_BITS(re, &s->gb, 12);
540  i += run;
541  if (i > MAX_INDEX)
542  break;
543  j = scantable[i];
544  if (level < 0) {
545  level = (-level * qscale * quant_matrix[j]) >> 4;
546  level = -level;
547  } else {
548  level = (level * qscale * quant_matrix[j]) >> 4;
549  }
550  }
551 
552  mismatch ^= level;
553  block[j] = level;
554  }
555  CLOSE_READER(re, &s->gb);
556  }
557  block[63] ^= mismatch & 1;
558 
560 
561  s->block_last_index[n] = i;
562  return 0;
563 }
564 
565 /**
566  * Changing this would eat up any speed benefits it has.
567  * Do not use "fast" flag if you need the code to be robust.
568  */
570  int16_t *block, int n)
571 {
572  int level, dc, diff, i, j, run;
573  int component;
574  const RL_VLC_ELEM *rl_vlc;
575  uint8_t *const scantable = s->intra_scantable.permutated;
576  const uint16_t *quant_matrix;
577  const int qscale = s->qscale;
578 
579  /* DC coefficient */
580  if (n < 4) {
581  quant_matrix = s->intra_matrix;
582  component = 0;
583  } else {
584  quant_matrix = s->chroma_intra_matrix;
585  component = (n & 1) + 1;
586  }
587  diff = decode_dc(&s->gb, component);
588  dc = s->last_dc[component];
589  dc += diff;
590  s->last_dc[component] = dc;
591  block[0] = dc * (1 << (3 - s->intra_dc_precision));
592  i = 0;
593  if (s->intra_vlc_format)
595  else
597 
598  {
599  OPEN_READER(re, &s->gb);
600  /* now quantify & encode AC coefficients */
601  for (;;) {
602  UPDATE_CACHE(re, &s->gb);
603  GET_RL_VLC(level, run, re, &s->gb, rl_vlc,
604  TEX_VLC_BITS, 2, 0);
605 
606  if (level >= 64 || i > 63) {
607  break;
608  } else if (level != 0) {
609  i += run;
610  j = scantable[i];
611  level = (level * qscale * quant_matrix[j]) >> 4;
612  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
613  SHOW_SBITS(re, &s->gb, 1);
614  LAST_SKIP_BITS(re, &s->gb, 1);
615  } else {
616  /* escape */
617  run = SHOW_UBITS(re, &s->gb, 6) + 1;
618  SKIP_BITS(re, &s->gb, 6);
619  level = SHOW_SBITS(re, &s->gb, 12);
620  LAST_SKIP_BITS(re, &s->gb, 12);
621  i += run;
622  j = scantable[i];
623  if (level < 0) {
624  level = (-level * qscale * quant_matrix[j]) >> 4;
625  level = -level;
626  } else {
627  level = (level * qscale * quant_matrix[j]) >> 4;
628  }
629  }
630 
631  block[j] = level;
632  }
633  CLOSE_READER(re, &s->gb);
634  }
635 
637 
638  s->block_last_index[n] = i;
639  return 0;
640 }
641 
642 /******************************************/
643 /* decoding */
644 
645 static inline int get_dmv(MpegEncContext *s)
646 {
647  if (get_bits1(&s->gb))
648  return 1 - (get_bits1(&s->gb) << 1);
649  else
650  return 0;
651 }
652 
653 /* motion type (for MPEG-2) */
654 #define MT_FIELD 1
655 #define MT_FRAME 2
656 #define MT_16X8 2
657 #define MT_DMV 3
658 
659 static int mpeg_decode_mb(MpegEncContext *s, int16_t block[12][64])
660 {
661  int i, j, k, cbp, val, mb_type, motion_type;
662  const int mb_block_count = 4 + (1 << s->chroma_format);
663  int ret;
664 
665  ff_tlog(s->avctx, "decode_mb: x=%d y=%d\n", s->mb_x, s->mb_y);
666 
667  av_assert2(s->mb_skipped == 0);
668 
669  if (s->mb_skip_run-- != 0) {
670  if (s->pict_type == AV_PICTURE_TYPE_P) {
671  s->mb_skipped = 1;
672  s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride] =
674  } else {
675  int mb_type;
676 
677  if (s->mb_x)
678  mb_type = s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride - 1];
679  else
680  // FIXME not sure if this is allowed in MPEG at all
681  mb_type = s->current_picture.mb_type[s->mb_width + (s->mb_y - 1) * s->mb_stride - 1];
682  if (IS_INTRA(mb_type)) {
683  av_log(s->avctx, AV_LOG_ERROR, "skip with previntra\n");
684  return AVERROR_INVALIDDATA;
685  }
686  s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride] =
687  mb_type | MB_TYPE_SKIP;
688 
689  if ((s->mv[0][0][0] | s->mv[0][0][1] | s->mv[1][0][0] | s->mv[1][0][1]) == 0)
690  s->mb_skipped = 1;
691  }
692 
693  return 0;
694  }
695 
696  switch (s->pict_type) {
697  default:
698  case AV_PICTURE_TYPE_I:
699  if (get_bits1(&s->gb) == 0) {
700  if (get_bits1(&s->gb) == 0) {
701  av_log(s->avctx, AV_LOG_ERROR,
702  "Invalid mb type in I-frame at %d %d\n",
703  s->mb_x, s->mb_y);
704  return AVERROR_INVALIDDATA;
705  }
706  mb_type = MB_TYPE_QUANT | MB_TYPE_INTRA;
707  } else {
708  mb_type = MB_TYPE_INTRA;
709  }
710  break;
711  case AV_PICTURE_TYPE_P:
712  mb_type = get_vlc2(&s->gb, ff_mb_ptype_vlc.table, MB_PTYPE_VLC_BITS, 1);
713  if (mb_type < 0) {
714  av_log(s->avctx, AV_LOG_ERROR,
715  "Invalid mb type in P-frame at %d %d\n", s->mb_x, s->mb_y);
716  return AVERROR_INVALIDDATA;
717  }
718  mb_type = ptype2mb_type[mb_type];
719  break;
720  case AV_PICTURE_TYPE_B:
721  mb_type = get_vlc2(&s->gb, ff_mb_btype_vlc.table, MB_BTYPE_VLC_BITS, 1);
722  if (mb_type < 0) {
723  av_log(s->avctx, AV_LOG_ERROR,
724  "Invalid mb type in B-frame at %d %d\n", s->mb_x, s->mb_y);
725  return AVERROR_INVALIDDATA;
726  }
727  mb_type = btype2mb_type[mb_type];
728  break;
729  }
730  ff_tlog(s->avctx, "mb_type=%x\n", mb_type);
731 // motion_type = 0; /* avoid warning */
732  if (IS_INTRA(mb_type)) {
733  s->bdsp.clear_blocks(s->block[0]);
734 
735  if (!s->chroma_y_shift)
736  s->bdsp.clear_blocks(s->block[6]);
737 
738  /* compute DCT type */
739  // FIXME: add an interlaced_dct coded var?
740  if (s->picture_structure == PICT_FRAME &&
741  !s->frame_pred_frame_dct)
742  s->interlaced_dct = get_bits1(&s->gb);
743 
744  if (IS_QUANT(mb_type))
745  s->qscale = mpeg_get_qscale(s);
746 
747  if (s->concealment_motion_vectors) {
748  /* just parse them */
749  if (s->picture_structure != PICT_FRAME)
750  skip_bits1(&s->gb); /* field select */
751 
752  s->mv[0][0][0] =
753  s->last_mv[0][0][0] =
754  s->last_mv[0][1][0] = mpeg_decode_motion(s, s->mpeg_f_code[0][0],
755  s->last_mv[0][0][0]);
756  s->mv[0][0][1] =
757  s->last_mv[0][0][1] =
758  s->last_mv[0][1][1] = mpeg_decode_motion(s, s->mpeg_f_code[0][1],
759  s->last_mv[0][0][1]);
760 
761  check_marker(s->avctx, &s->gb, "after concealment_motion_vectors");
762  } else {
763  /* reset mv prediction */
764  memset(s->last_mv, 0, sizeof(s->last_mv));
765  }
766  s->mb_intra = 1;
767 
768  if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
769  if (s->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
770  for (i = 0; i < 6; i++)
771  mpeg2_fast_decode_block_intra(s, *s->pblocks[i], i);
772  } else {
773  for (i = 0; i < mb_block_count; i++)
774  if ((ret = mpeg2_decode_block_intra(s, *s->pblocks[i], i)) < 0)
775  return ret;
776  }
777  } else {
778  for (i = 0; i < 6; i++) {
780  s->intra_matrix,
781  s->intra_scantable.permutated,
782  s->last_dc, *s->pblocks[i],
783  i, s->qscale);
784  if (ret < 0) {
785  av_log(s->avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n",
786  s->mb_x, s->mb_y);
787  return ret;
788  }
789 
790  s->block_last_index[i] = ret;
791  }
792  }
793  } else {
794  if (mb_type & MB_TYPE_ZERO_MV) {
795  av_assert2(mb_type & MB_TYPE_CBP);
796 
797  s->mv_dir = MV_DIR_FORWARD;
798  if (s->picture_structure == PICT_FRAME) {
799  if (s->picture_structure == PICT_FRAME
800  && !s->frame_pred_frame_dct)
801  s->interlaced_dct = get_bits1(&s->gb);
802  s->mv_type = MV_TYPE_16X16;
803  } else {
804  s->mv_type = MV_TYPE_FIELD;
805  mb_type |= MB_TYPE_INTERLACED;
806  s->field_select[0][0] = s->picture_structure - 1;
807  }
808 
809  if (IS_QUANT(mb_type))
810  s->qscale = mpeg_get_qscale(s);
811 
812  s->last_mv[0][0][0] = 0;
813  s->last_mv[0][0][1] = 0;
814  s->last_mv[0][1][0] = 0;
815  s->last_mv[0][1][1] = 0;
816  s->mv[0][0][0] = 0;
817  s->mv[0][0][1] = 0;
818  } else {
819  av_assert2(mb_type & MB_TYPE_L0L1);
820  // FIXME decide if MBs in field pictures are MB_TYPE_INTERLACED
821  /* get additional motion vector type */
822  if (s->picture_structure == PICT_FRAME && s->frame_pred_frame_dct) {
823  motion_type = MT_FRAME;
824  } else {
825  motion_type = get_bits(&s->gb, 2);
826  if (s->picture_structure == PICT_FRAME && HAS_CBP(mb_type))
827  s->interlaced_dct = get_bits1(&s->gb);
828  }
829 
830  if (IS_QUANT(mb_type))
831  s->qscale = mpeg_get_qscale(s);
832 
833  /* motion vectors */
834  s->mv_dir = (mb_type >> 13) & 3;
835  ff_tlog(s->avctx, "motion_type=%d\n", motion_type);
836  switch (motion_type) {
837  case MT_FRAME: /* or MT_16X8 */
838  if (s->picture_structure == PICT_FRAME) {
839  mb_type |= MB_TYPE_16x16;
840  s->mv_type = MV_TYPE_16X16;
841  for (i = 0; i < 2; i++) {
842  if (USES_LIST(mb_type, i)) {
843  /* MT_FRAME */
844  s->mv[i][0][0] =
845  s->last_mv[i][0][0] =
846  s->last_mv[i][1][0] =
847  mpeg_decode_motion(s, s->mpeg_f_code[i][0],
848  s->last_mv[i][0][0]);
849  s->mv[i][0][1] =
850  s->last_mv[i][0][1] =
851  s->last_mv[i][1][1] =
852  mpeg_decode_motion(s, s->mpeg_f_code[i][1],
853  s->last_mv[i][0][1]);
854  /* full_pel: only for MPEG-1 */
855  if (s->full_pel[i]) {
856  s->mv[i][0][0] *= 2;
857  s->mv[i][0][1] *= 2;
858  }
859  }
860  }
861  } else {
862  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
863  s->mv_type = MV_TYPE_16X8;
864  for (i = 0; i < 2; i++) {
865  if (USES_LIST(mb_type, i)) {
866  /* MT_16X8 */
867  for (j = 0; j < 2; j++) {
868  s->field_select[i][j] = get_bits1(&s->gb);
869  for (k = 0; k < 2; k++) {
870  val = mpeg_decode_motion(s, s->mpeg_f_code[i][k],
871  s->last_mv[i][j][k]);
872  s->last_mv[i][j][k] = val;
873  s->mv[i][j][k] = val;
874  }
875  }
876  }
877  }
878  }
879  break;
880  case MT_FIELD:
881  s->mv_type = MV_TYPE_FIELD;
882  if (s->picture_structure == PICT_FRAME) {
883  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
884  for (i = 0; i < 2; i++) {
885  if (USES_LIST(mb_type, i)) {
886  for (j = 0; j < 2; j++) {
887  s->field_select[i][j] = get_bits1(&s->gb);
888  val = mpeg_decode_motion(s, s->mpeg_f_code[i][0],
889  s->last_mv[i][j][0]);
890  s->last_mv[i][j][0] = val;
891  s->mv[i][j][0] = val;
892  ff_tlog(s->avctx, "fmx=%d\n", val);
893  val = mpeg_decode_motion(s, s->mpeg_f_code[i][1],
894  s->last_mv[i][j][1] >> 1);
895  s->last_mv[i][j][1] = 2 * val;
896  s->mv[i][j][1] = val;
897  ff_tlog(s->avctx, "fmy=%d\n", val);
898  }
899  }
900  }
901  } else {
902  av_assert0(!s->progressive_sequence);
903  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
904  for (i = 0; i < 2; i++) {
905  if (USES_LIST(mb_type, i)) {
906  s->field_select[i][0] = get_bits1(&s->gb);
907  for (k = 0; k < 2; k++) {
908  val = mpeg_decode_motion(s, s->mpeg_f_code[i][k],
909  s->last_mv[i][0][k]);
910  s->last_mv[i][0][k] = val;
911  s->last_mv[i][1][k] = val;
912  s->mv[i][0][k] = val;
913  }
914  }
915  }
916  }
917  break;
918  case MT_DMV:
919  if (s->progressive_sequence){
920  av_log(s->avctx, AV_LOG_ERROR, "MT_DMV in progressive_sequence\n");
921  return AVERROR_INVALIDDATA;
922  }
923  s->mv_type = MV_TYPE_DMV;
924  for (i = 0; i < 2; i++) {
925  if (USES_LIST(mb_type, i)) {
926  int dmx, dmy, mx, my, m;
927  const int my_shift = s->picture_structure == PICT_FRAME;
928 
929  mx = mpeg_decode_motion(s, s->mpeg_f_code[i][0],
930  s->last_mv[i][0][0]);
931  s->last_mv[i][0][0] = mx;
932  s->last_mv[i][1][0] = mx;
933  dmx = get_dmv(s);
934  my = mpeg_decode_motion(s, s->mpeg_f_code[i][1],
935  s->last_mv[i][0][1] >> my_shift);
936  dmy = get_dmv(s);
937 
938 
939  s->last_mv[i][0][1] = my * (1 << my_shift);
940  s->last_mv[i][1][1] = my * (1 << my_shift);
941 
942  s->mv[i][0][0] = mx;
943  s->mv[i][0][1] = my;
944  s->mv[i][1][0] = mx; // not used
945  s->mv[i][1][1] = my; // not used
946 
947  if (s->picture_structure == PICT_FRAME) {
948  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
949 
950  // m = 1 + 2 * s->top_field_first;
951  m = s->top_field_first ? 1 : 3;
952 
953  /* top -> top pred */
954  s->mv[i][2][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
955  s->mv[i][2][1] = ((my * m + (my > 0)) >> 1) + dmy - 1;
956  m = 4 - m;
957  s->mv[i][3][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
958  s->mv[i][3][1] = ((my * m + (my > 0)) >> 1) + dmy + 1;
959  } else {
960  mb_type |= MB_TYPE_16x16;
961 
962  s->mv[i][2][0] = ((mx + (mx > 0)) >> 1) + dmx;
963  s->mv[i][2][1] = ((my + (my > 0)) >> 1) + dmy;
964  if (s->picture_structure == PICT_TOP_FIELD)
965  s->mv[i][2][1]--;
966  else
967  s->mv[i][2][1]++;
968  }
969  }
970  }
971  break;
972  default:
973  av_log(s->avctx, AV_LOG_ERROR,
974  "00 motion_type at %d %d\n", s->mb_x, s->mb_y);
975  return AVERROR_INVALIDDATA;
976  }
977  }
978 
979  s->mb_intra = 0;
980  if (HAS_CBP(mb_type)) {
981  s->bdsp.clear_blocks(s->block[0]);
982 
983  cbp = get_vlc2(&s->gb, ff_mb_pat_vlc.table, MB_PAT_VLC_BITS, 1);
984  if (mb_block_count > 6) {
985  cbp *= 1 << mb_block_count - 6;
986  cbp |= get_bits(&s->gb, mb_block_count - 6);
987  s->bdsp.clear_blocks(s->block[6]);
988  }
989  if (cbp <= 0) {
990  av_log(s->avctx, AV_LOG_ERROR,
991  "invalid cbp %d at %d %d\n", cbp, s->mb_x, s->mb_y);
992  return AVERROR_INVALIDDATA;
993  }
994 
995  if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
996  if (s->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
997  for (i = 0; i < 6; i++) {
998  if (cbp & 32)
999  mpeg2_fast_decode_block_non_intra(s, *s->pblocks[i], i);
1000  else
1001  s->block_last_index[i] = -1;
1002  cbp += cbp;
1003  }
1004  } else {
1005  cbp <<= 12 - mb_block_count;
1006 
1007  for (i = 0; i < mb_block_count; i++) {
1008  if (cbp & (1 << 11)) {
1009  if ((ret = mpeg2_decode_block_non_intra(s, *s->pblocks[i], i)) < 0)
1010  return ret;
1011  } else {
1012  s->block_last_index[i] = -1;
1013  }
1014  cbp += cbp;
1015  }
1016  }
1017  } else {
1018  if (s->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
1019  for (i = 0; i < 6; i++) {
1020  if (cbp & 32)
1021  mpeg1_fast_decode_block_inter(s, *s->pblocks[i], i);
1022  else
1023  s->block_last_index[i] = -1;
1024  cbp += cbp;
1025  }
1026  } else {
1027  for (i = 0; i < 6; i++) {
1028  if (cbp & 32) {
1029  if ((ret = mpeg1_decode_block_inter(s, *s->pblocks[i], i)) < 0)
1030  return ret;
1031  } else {
1032  s->block_last_index[i] = -1;
1033  }
1034  cbp += cbp;
1035  }
1036  }
1037  }
1038  } else {
1039  for (i = 0; i < 12; i++)
1040  s->block_last_index[i] = -1;
1041  }
1042  }
1043 
1044  s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride] = mb_type;
1045 
1046  return 0;
1047 }
1048 
1050 {
1051  Mpeg1Context *s = avctx->priv_data;
1052  MpegEncContext *s2 = &s->mpeg_enc_ctx;
1053 
1054  if ( avctx->codec_tag != AV_RL32("VCR2")
1055  && avctx->codec_tag != AV_RL32("BW10"))
1056  avctx->coded_width = avctx->coded_height = 0; // do not trust dimensions from input
1057  ff_mpv_decode_init(s2, avctx);
1058 
1059  /* we need some permutation to store matrices,
1060  * until the decoder sets the real permutation. */
1063 
1064  s2->chroma_format = 1;
1065  s->mpeg_enc_ctx_allocated = 0;
1066  s->repeat_field = 0;
1067  avctx->color_range = AVCOL_RANGE_MPEG;
1068  return 0;
1069 }
1070 
1071 #if HAVE_THREADS
1072 static int mpeg_decode_update_thread_context(AVCodecContext *avctx,
1073  const AVCodecContext *avctx_from)
1074 {
1075  Mpeg1Context *ctx = avctx->priv_data, *ctx_from = avctx_from->priv_data;
1076  MpegEncContext *s = &ctx->mpeg_enc_ctx, *s1 = &ctx_from->mpeg_enc_ctx;
1077  int err;
1078 
1079  if (avctx == avctx_from ||
1080  !ctx_from->mpeg_enc_ctx_allocated ||
1081  !s1->context_initialized)
1082  return 0;
1083 
1084  err = ff_mpeg_update_thread_context(avctx, avctx_from);
1085  if (err)
1086  return err;
1087 
1088  if (!ctx->mpeg_enc_ctx_allocated)
1089  memcpy(s + 1, s1 + 1, sizeof(Mpeg1Context) - sizeof(MpegEncContext));
1090 
1091  return 0;
1092 }
1093 #endif
1094 
1095 static void quant_matrix_rebuild(uint16_t *matrix, const uint8_t *old_perm,
1096  const uint8_t *new_perm)
1097 {
1098  uint16_t temp_matrix[64];
1099  int i;
1100 
1101  memcpy(temp_matrix, matrix, 64 * sizeof(uint16_t));
1102 
1103  for (i = 0; i < 64; i++)
1104  matrix[new_perm[i]] = temp_matrix[old_perm[i]];
1105 }
1106 
1108 #if CONFIG_MPEG1_NVDEC_HWACCEL
1110 #endif
1111 #if CONFIG_MPEG1_VDPAU_HWACCEL
1113 #endif
1116 };
1117 
1119 #if CONFIG_MPEG2_NVDEC_HWACCEL
1121 #endif
1122 #if CONFIG_MPEG2_VDPAU_HWACCEL
1124 #endif
1125 #if CONFIG_MPEG2_DXVA2_HWACCEL
1127 #endif
1128 #if CONFIG_MPEG2_D3D11VA_HWACCEL
1131 #endif
1132 #if CONFIG_MPEG2_VAAPI_HWACCEL
1134 #endif
1135 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
1137 #endif
1140 };
1141 
1142 static const enum AVPixelFormat mpeg12_pixfmt_list_422[] = {
1145 };
1146 
1147 static const enum AVPixelFormat mpeg12_pixfmt_list_444[] = {
1150 };
1151 
1153 {
1154  Mpeg1Context *s1 = avctx->priv_data;
1155  MpegEncContext *s = &s1->mpeg_enc_ctx;
1156  const enum AVPixelFormat *pix_fmts;
1157 
1158  if (CONFIG_GRAY && (avctx->flags & AV_CODEC_FLAG_GRAY))
1159  return AV_PIX_FMT_GRAY8;
1160 
1161  if (s->chroma_format < 2)
1165  else if (s->chroma_format == 2)
1167  else
1169 
1170  return ff_thread_get_format(avctx, pix_fmts);
1171 }
1172 
1173 /* Call this function when we know all parameters.
1174  * It may be called in different places for MPEG-1 and MPEG-2. */
1176 {
1177  Mpeg1Context *s1 = avctx->priv_data;
1178  MpegEncContext *s = &s1->mpeg_enc_ctx;
1179  uint8_t old_permutation[64];
1180  int ret;
1181 
1182  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
1183  // MPEG-1 aspect
1184  AVRational aspect_inv = av_d2q(ff_mpeg1_aspect[s1->aspect_ratio_info], 255);
1185  avctx->sample_aspect_ratio = (AVRational) { aspect_inv.den, aspect_inv.num };
1186  } else { // MPEG-2
1187  // MPEG-2 aspect
1188  if (s1->aspect_ratio_info > 1) {
1189  AVRational dar =
1190  av_mul_q(av_div_q(ff_mpeg2_aspect[s1->aspect_ratio_info],
1191  (AVRational) { s1->pan_scan.width,
1192  s1->pan_scan.height }),
1193  (AVRational) { s->width, s->height });
1194 
1195  /* We ignore the spec here and guess a bit as reality does not
1196  * match the spec, see for example res_change_ffmpeg_aspect.ts
1197  * and sequence-display-aspect.mpg.
1198  * issue1613, 621, 562 */
1199  if ((s1->pan_scan.width == 0) || (s1->pan_scan.height == 0) ||
1200  (av_cmp_q(dar, (AVRational) { 4, 3 }) &&
1201  av_cmp_q(dar, (AVRational) { 16, 9 }))) {
1202  s->avctx->sample_aspect_ratio =
1203  av_div_q(ff_mpeg2_aspect[s1->aspect_ratio_info],
1204  (AVRational) { s->width, s->height });
1205  } else {
1206  s->avctx->sample_aspect_ratio =
1207  av_div_q(ff_mpeg2_aspect[s1->aspect_ratio_info],
1208  (AVRational) { s1->pan_scan.width, s1->pan_scan.height });
1209 // issue1613 4/3 16/9 -> 16/9
1210 // res_change_ffmpeg_aspect.ts 4/3 225/44 ->4/3
1211 // widescreen-issue562.mpg 4/3 16/9 -> 16/9
1212 // s->avctx->sample_aspect_ratio = av_mul_q(s->avctx->sample_aspect_ratio, (AVRational) {s->width, s->height});
1213  ff_dlog(avctx, "aspect A %d/%d\n",
1214  ff_mpeg2_aspect[s1->aspect_ratio_info].num,
1215  ff_mpeg2_aspect[s1->aspect_ratio_info].den);
1216  ff_dlog(avctx, "aspect B %d/%d\n", s->avctx->sample_aspect_ratio.num,
1217  s->avctx->sample_aspect_ratio.den);
1218  }
1219  } else {
1220  s->avctx->sample_aspect_ratio =
1221  ff_mpeg2_aspect[s1->aspect_ratio_info];
1222  }
1223  } // MPEG-2
1224 
1225  if (av_image_check_sar(s->width, s->height,
1226  avctx->sample_aspect_ratio) < 0) {
1227  av_log(avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
1228  avctx->sample_aspect_ratio.num,
1229  avctx->sample_aspect_ratio.den);
1230  avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
1231  }
1232 
1233  if ((s1->mpeg_enc_ctx_allocated == 0) ||
1234  avctx->coded_width != s->width ||
1235  avctx->coded_height != s->height ||
1236  s1->save_width != s->width ||
1237  s1->save_height != s->height ||
1238  av_cmp_q(s1->save_aspect, s->avctx->sample_aspect_ratio) ||
1239  (s1->save_progressive_seq != s->progressive_sequence && FFALIGN(s->height, 16) != FFALIGN(s->height, 32)) ||
1240  0) {
1241  if (s1->mpeg_enc_ctx_allocated) {
1242 #if FF_API_FLAG_TRUNCATED
1243  ParseContext pc = s->parse_context;
1244  s->parse_context.buffer = 0;
1246  s->parse_context = pc;
1247 #else
1249 #endif
1250  s1->mpeg_enc_ctx_allocated = 0;
1251  }
1252 
1253  ret = ff_set_dimensions(avctx, s->width, s->height);
1254  if (ret < 0)
1255  return ret;
1256 
1257  if (avctx->codec_id == AV_CODEC_ID_MPEG2VIDEO && s->bit_rate) {
1258  avctx->rc_max_rate = s->bit_rate;
1259  } else if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && s->bit_rate &&
1260  (s->bit_rate != 0x3FFFF*400 || s->vbv_delay != 0xFFFF)) {
1261  avctx->bit_rate = s->bit_rate;
1262  }
1263  s1->save_aspect = s->avctx->sample_aspect_ratio;
1264  s1->save_width = s->width;
1265  s1->save_height = s->height;
1266  s1->save_progressive_seq = s->progressive_sequence;
1267 
1268  /* low_delay may be forced, in this case we will have B-frames
1269  * that behave like P-frames. */
1270  avctx->has_b_frames = !s->low_delay;
1271 
1272  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
1273  // MPEG-1 fps
1274  avctx->framerate = ff_mpeg12_frame_rate_tab[s1->frame_rate_index];
1275  avctx->ticks_per_frame = 1;
1276 
1278  } else { // MPEG-2
1279  // MPEG-2 fps
1280  av_reduce(&s->avctx->framerate.num,
1281  &s->avctx->framerate.den,
1282  ff_mpeg12_frame_rate_tab[s1->frame_rate_index].num * s1->frame_rate_ext.num,
1283  ff_mpeg12_frame_rate_tab[s1->frame_rate_index].den * s1->frame_rate_ext.den,
1284  1 << 30);
1285  avctx->ticks_per_frame = 2;
1286 
1287  switch (s->chroma_format) {
1288  case 1: avctx->chroma_sample_location = AVCHROMA_LOC_LEFT; break;
1289  case 2:
1290  case 3: avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT; break;
1291  default: av_assert0(0);
1292  }
1293  } // MPEG-2
1294 
1295  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
1296 
1297  /* Quantization matrices may need reordering
1298  * if DCT permutation is changed. */
1299  memcpy(old_permutation, s->idsp.idct_permutation, 64 * sizeof(uint8_t));
1300 
1302  if ((ret = ff_mpv_common_init(s)) < 0)
1303  return ret;
1304 
1305  quant_matrix_rebuild(s->intra_matrix, old_permutation, s->idsp.idct_permutation);
1306  quant_matrix_rebuild(s->inter_matrix, old_permutation, s->idsp.idct_permutation);
1307  quant_matrix_rebuild(s->chroma_intra_matrix, old_permutation, s->idsp.idct_permutation);
1308  quant_matrix_rebuild(s->chroma_inter_matrix, old_permutation, s->idsp.idct_permutation);
1309 
1310  s1->mpeg_enc_ctx_allocated = 1;
1311  }
1312  return 0;
1313 }
1314 
1315 static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf,
1316  int buf_size)
1317 {
1318  Mpeg1Context *s1 = avctx->priv_data;
1319  MpegEncContext *s = &s1->mpeg_enc_ctx;
1320  int ref, f_code, vbv_delay, ret;
1321 
1322  ret = init_get_bits8(&s->gb, buf, buf_size);
1323  if (ret < 0)
1324  return ret;
1325 
1326  ref = get_bits(&s->gb, 10); /* temporal ref */
1327  s->pict_type = get_bits(&s->gb, 3);
1328  if (s->pict_type == 0 || s->pict_type > 3)
1329  return AVERROR_INVALIDDATA;
1330 
1331  vbv_delay = get_bits(&s->gb, 16);
1332  s->vbv_delay = vbv_delay;
1333  if (s->pict_type == AV_PICTURE_TYPE_P ||
1334  s->pict_type == AV_PICTURE_TYPE_B) {
1335  s->full_pel[0] = get_bits1(&s->gb);
1336  f_code = get_bits(&s->gb, 3);
1337  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1338  return AVERROR_INVALIDDATA;
1339  f_code += !f_code;
1340  s->mpeg_f_code[0][0] = f_code;
1341  s->mpeg_f_code[0][1] = f_code;
1342  }
1343  if (s->pict_type == AV_PICTURE_TYPE_B) {
1344  s->full_pel[1] = get_bits1(&s->gb);
1345  f_code = get_bits(&s->gb, 3);
1346  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1347  return AVERROR_INVALIDDATA;
1348  f_code += !f_code;
1349  s->mpeg_f_code[1][0] = f_code;
1350  s->mpeg_f_code[1][1] = f_code;
1351  }
1352  s->current_picture.f->pict_type = s->pict_type;
1353  s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
1354 
1355  if (avctx->debug & FF_DEBUG_PICT_INFO)
1356  av_log(avctx, AV_LOG_DEBUG,
1357  "vbv_delay %d, ref %d type:%d\n", vbv_delay, ref, s->pict_type);
1358 
1359  s->y_dc_scale = 8;
1360  s->c_dc_scale = 8;
1361  return 0;
1362 }
1363 
1365 {
1366  MpegEncContext *s = &s1->mpeg_enc_ctx;
1367  int horiz_size_ext, vert_size_ext;
1368  int bit_rate_ext;
1369  AVCPBProperties *cpb_props;
1370 
1371  skip_bits(&s->gb, 1); /* profile and level esc*/
1372  s->avctx->profile = get_bits(&s->gb, 3);
1373  s->avctx->level = get_bits(&s->gb, 4);
1374  s->progressive_sequence = get_bits1(&s->gb); /* progressive_sequence */
1375  s->chroma_format = get_bits(&s->gb, 2); /* chroma_format 1=420, 2=422, 3=444 */
1376 
1377  if (!s->chroma_format) {
1378  s->chroma_format = 1;
1379  av_log(s->avctx, AV_LOG_WARNING, "Chroma format invalid\n");
1380  }
1381 
1382  horiz_size_ext = get_bits(&s->gb, 2);
1383  vert_size_ext = get_bits(&s->gb, 2);
1384  s->width |= (horiz_size_ext << 12);
1385  s->height |= (vert_size_ext << 12);
1386  bit_rate_ext = get_bits(&s->gb, 12); /* XXX: handle it */
1387  s->bit_rate += (bit_rate_ext << 18) * 400LL;
1388  check_marker(s->avctx, &s->gb, "after bit rate extension");
1389  s1->rc_buffer_size += get_bits(&s->gb, 8) * 1024 * 16 << 10;
1390 
1391  s->low_delay = get_bits1(&s->gb);
1392  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1393  s->low_delay = 1;
1394 
1395  s1->frame_rate_ext.num = get_bits(&s->gb, 2) + 1;
1396  s1->frame_rate_ext.den = get_bits(&s->gb, 5) + 1;
1397 
1398  ff_dlog(s->avctx, "sequence extension\n");
1399  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1400 
1401  if (cpb_props = ff_add_cpb_side_data(s->avctx)) {
1402  cpb_props->buffer_size = s1->rc_buffer_size;
1403  if (s->bit_rate != 0x3FFFF*400)
1404  cpb_props->max_bitrate = s->bit_rate;
1405  }
1406 
1407  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1408  av_log(s->avctx, AV_LOG_DEBUG,
1409  "profile: %d, level: %d ps: %d cf:%d vbv buffer: %d, bitrate:%"PRId64"\n",
1410  s->avctx->profile, s->avctx->level, s->progressive_sequence, s->chroma_format,
1411  s1->rc_buffer_size, s->bit_rate);
1412 }
1413 
1415 {
1416  MpegEncContext *s = &s1->mpeg_enc_ctx;
1417  int color_description, w, h;
1418 
1419  skip_bits(&s->gb, 3); /* video format */
1420  color_description = get_bits1(&s->gb);
1421  if (color_description) {
1422  s->avctx->color_primaries = get_bits(&s->gb, 8);
1423  s->avctx->color_trc = get_bits(&s->gb, 8);
1424  s->avctx->colorspace = get_bits(&s->gb, 8);
1425  }
1426  w = get_bits(&s->gb, 14);
1427  skip_bits(&s->gb, 1); // marker
1428  h = get_bits(&s->gb, 14);
1429  // remaining 3 bits are zero padding
1430 
1431  s1->pan_scan.width = 16 * w;
1432  s1->pan_scan.height = 16 * h;
1433 
1434  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1435  av_log(s->avctx, AV_LOG_DEBUG, "sde w:%d, h:%d\n", w, h);
1436 }
1437 
1439 {
1440  MpegEncContext *s = &s1->mpeg_enc_ctx;
1441  int i, nofco;
1442 
1443  nofco = 1;
1444  if (s->progressive_sequence) {
1445  if (s->repeat_first_field) {
1446  nofco++;
1447  if (s->top_field_first)
1448  nofco++;
1449  }
1450  } else {
1451  if (s->picture_structure == PICT_FRAME) {
1452  nofco++;
1453  if (s->repeat_first_field)
1454  nofco++;
1455  }
1456  }
1457  for (i = 0; i < nofco; i++) {
1458  s1->pan_scan.position[i][0] = get_sbits(&s->gb, 16);
1459  skip_bits(&s->gb, 1); // marker
1460  s1->pan_scan.position[i][1] = get_sbits(&s->gb, 16);
1461  skip_bits(&s->gb, 1); // marker
1462  }
1463 
1464  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1465  av_log(s->avctx, AV_LOG_DEBUG,
1466  "pde (%"PRId16",%"PRId16") (%"PRId16",%"PRId16") (%"PRId16",%"PRId16")\n",
1467  s1->pan_scan.position[0][0], s1->pan_scan.position[0][1],
1468  s1->pan_scan.position[1][0], s1->pan_scan.position[1][1],
1469  s1->pan_scan.position[2][0], s1->pan_scan.position[2][1]);
1470 }
1471 
1472 static int load_matrix(MpegEncContext *s, uint16_t matrix0[64],
1473  uint16_t matrix1[64], int intra)
1474 {
1475  int i;
1476 
1477  for (i = 0; i < 64; i++) {
1478  int j = s->idsp.idct_permutation[ff_zigzag_direct[i]];
1479  int v = get_bits(&s->gb, 8);
1480  if (v == 0) {
1481  av_log(s->avctx, AV_LOG_ERROR, "matrix damaged\n");
1482  return AVERROR_INVALIDDATA;
1483  }
1484  if (intra && i == 0 && v != 8) {
1485  av_log(s->avctx, AV_LOG_DEBUG, "intra matrix specifies invalid DC quantizer %d, ignoring\n", v);
1486  v = 8; // needed by pink.mpg / issue1046
1487  }
1488  matrix0[j] = v;
1489  if (matrix1)
1490  matrix1[j] = v;
1491  }
1492  return 0;
1493 }
1494 
1496 {
1497  ff_dlog(s->avctx, "matrix extension\n");
1498 
1499  if (get_bits1(&s->gb))
1500  load_matrix(s, s->chroma_intra_matrix, s->intra_matrix, 1);
1501  if (get_bits1(&s->gb))
1502  load_matrix(s, s->chroma_inter_matrix, s->inter_matrix, 0);
1503  if (get_bits1(&s->gb))
1504  load_matrix(s, s->chroma_intra_matrix, NULL, 1);
1505  if (get_bits1(&s->gb))
1506  load_matrix(s, s->chroma_inter_matrix, NULL, 0);
1507 }
1508 
1510 {
1511  MpegEncContext *s = &s1->mpeg_enc_ctx;
1512 
1513  s->full_pel[0] = s->full_pel[1] = 0;
1514  s->mpeg_f_code[0][0] = get_bits(&s->gb, 4);
1515  s->mpeg_f_code[0][1] = get_bits(&s->gb, 4);
1516  s->mpeg_f_code[1][0] = get_bits(&s->gb, 4);
1517  s->mpeg_f_code[1][1] = get_bits(&s->gb, 4);
1518  s->mpeg_f_code[0][0] += !s->mpeg_f_code[0][0];
1519  s->mpeg_f_code[0][1] += !s->mpeg_f_code[0][1];
1520  s->mpeg_f_code[1][0] += !s->mpeg_f_code[1][0];
1521  s->mpeg_f_code[1][1] += !s->mpeg_f_code[1][1];
1522  if (!s->pict_type && s1->mpeg_enc_ctx_allocated) {
1523  av_log(s->avctx, AV_LOG_ERROR, "Missing picture start code\n");
1524  if (s->avctx->err_recognition & AV_EF_EXPLODE)
1525  return AVERROR_INVALIDDATA;
1526  av_log(s->avctx, AV_LOG_WARNING, "Guessing pict_type from mpeg_f_code\n");
1527  if (s->mpeg_f_code[1][0] == 15 && s->mpeg_f_code[1][1] == 15) {
1528  if (s->mpeg_f_code[0][0] == 15 && s->mpeg_f_code[0][1] == 15)
1529  s->pict_type = AV_PICTURE_TYPE_I;
1530  else
1531  s->pict_type = AV_PICTURE_TYPE_P;
1532  } else
1533  s->pict_type = AV_PICTURE_TYPE_B;
1534  s->current_picture.f->pict_type = s->pict_type;
1535  s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
1536  }
1537 
1538  s->intra_dc_precision = get_bits(&s->gb, 2);
1539  s->picture_structure = get_bits(&s->gb, 2);
1540  s->top_field_first = get_bits1(&s->gb);
1541  s->frame_pred_frame_dct = get_bits1(&s->gb);
1542  s->concealment_motion_vectors = get_bits1(&s->gb);
1543  s->q_scale_type = get_bits1(&s->gb);
1544  s->intra_vlc_format = get_bits1(&s->gb);
1545  s->alternate_scan = get_bits1(&s->gb);
1546  s->repeat_first_field = get_bits1(&s->gb);
1547  s->chroma_420_type = get_bits1(&s->gb);
1548  s->progressive_frame = get_bits1(&s->gb);
1549 
1550  if (s->alternate_scan) {
1551  ff_init_scantable(s->idsp.idct_permutation, &s->inter_scantable, ff_alternate_vertical_scan);
1552  ff_init_scantable(s->idsp.idct_permutation, &s->intra_scantable, ff_alternate_vertical_scan);
1553  } else {
1554  ff_init_scantable(s->idsp.idct_permutation, &s->inter_scantable, ff_zigzag_direct);
1555  ff_init_scantable(s->idsp.idct_permutation, &s->intra_scantable, ff_zigzag_direct);
1556  }
1557 
1558  /* composite display not parsed */
1559  ff_dlog(s->avctx, "intra_dc_precision=%d\n", s->intra_dc_precision);
1560  ff_dlog(s->avctx, "picture_structure=%d\n", s->picture_structure);
1561  ff_dlog(s->avctx, "top field first=%d\n", s->top_field_first);
1562  ff_dlog(s->avctx, "repeat first field=%d\n", s->repeat_first_field);
1563  ff_dlog(s->avctx, "conceal=%d\n", s->concealment_motion_vectors);
1564  ff_dlog(s->avctx, "intra_vlc_format=%d\n", s->intra_vlc_format);
1565  ff_dlog(s->avctx, "alternate_scan=%d\n", s->alternate_scan);
1566  ff_dlog(s->avctx, "frame_pred_frame_dct=%d\n", s->frame_pred_frame_dct);
1567  ff_dlog(s->avctx, "progressive_frame=%d\n", s->progressive_frame);
1568 
1569  return 0;
1570 }
1571 
1572 static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
1573 {
1574  AVCodecContext *avctx = s->avctx;
1575  Mpeg1Context *s1 = (Mpeg1Context *) s;
1576  int ret;
1577 
1578  if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)) {
1579  if (s->mb_width * s->mb_height * 11LL / (33 * 2 * 8) > buf_size)
1580  return AVERROR_INVALIDDATA;
1581  }
1582 
1583  /* start frame decoding */
1584  if (s->first_field || s->picture_structure == PICT_FRAME) {
1585  AVFrameSideData *pan_scan;
1586 
1587  if ((ret = ff_mpv_frame_start(s, avctx)) < 0)
1588  return ret;
1589 
1591 
1592  /* first check if we must repeat the frame */
1593  s->current_picture_ptr->f->repeat_pict = 0;
1594  if (s->repeat_first_field) {
1595  if (s->progressive_sequence) {
1596  if (s->top_field_first)
1597  s->current_picture_ptr->f->repeat_pict = 4;
1598  else
1599  s->current_picture_ptr->f->repeat_pict = 2;
1600  } else if (s->progressive_frame) {
1601  s->current_picture_ptr->f->repeat_pict = 1;
1602  }
1603  }
1604 
1605  pan_scan = av_frame_new_side_data(s->current_picture_ptr->f,
1607  sizeof(s1->pan_scan));
1608  if (!pan_scan)
1609  return AVERROR(ENOMEM);
1610  memcpy(pan_scan->data, &s1->pan_scan, sizeof(s1->pan_scan));
1611 
1612  if (s1->a53_buf_ref) {
1614  s->current_picture_ptr->f, AV_FRAME_DATA_A53_CC,
1615  s1->a53_buf_ref);
1616  if (!sd)
1617  av_buffer_unref(&s1->a53_buf_ref);
1618  s1->a53_buf_ref = NULL;
1619  }
1620 
1621  if (s1->has_stereo3d) {
1622  AVStereo3D *stereo = av_stereo3d_create_side_data(s->current_picture_ptr->f);
1623  if (!stereo)
1624  return AVERROR(ENOMEM);
1625 
1626  *stereo = s1->stereo3d;
1627  s1->has_stereo3d = 0;
1628  }
1629 
1630  if (s1->has_afd) {
1631  AVFrameSideData *sd =
1632  av_frame_new_side_data(s->current_picture_ptr->f,
1633  AV_FRAME_DATA_AFD, 1);
1634  if (!sd)
1635  return AVERROR(ENOMEM);
1636 
1637  *sd->data = s1->afd;
1638  s1->has_afd = 0;
1639  }
1640 
1641  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME))
1642  ff_thread_finish_setup(avctx);
1643  } else { // second field
1644  int i;
1645 
1646  if (!s->current_picture_ptr) {
1647  av_log(s->avctx, AV_LOG_ERROR, "first field missing\n");
1648  return AVERROR_INVALIDDATA;
1649  }
1650 
1651  if (s->avctx->hwaccel) {
1652  if ((ret = s->avctx->hwaccel->end_frame(s->avctx)) < 0) {
1653  av_log(avctx, AV_LOG_ERROR,
1654  "hardware accelerator failed to decode first field\n");
1655  return ret;
1656  }
1657  }
1658 
1659  for (i = 0; i < 4; i++) {
1660  s->current_picture.f->data[i] = s->current_picture_ptr->f->data[i];
1661  if (s->picture_structure == PICT_BOTTOM_FIELD)
1662  s->current_picture.f->data[i] +=
1663  s->current_picture_ptr->f->linesize[i];
1664  }
1665  }
1666 
1667  if (avctx->hwaccel) {
1668  if ((ret = avctx->hwaccel->start_frame(avctx, buf, buf_size)) < 0)
1669  return ret;
1670  }
1671 
1672  return 0;
1673 }
1674 
1675 #define DECODE_SLICE_ERROR -1
1676 #define DECODE_SLICE_OK 0
1677 
1678 /**
1679  * Decode a slice.
1680  * MpegEncContext.mb_y must be set to the MB row from the startcode.
1681  * @return DECODE_SLICE_ERROR if the slice is damaged,
1682  * DECODE_SLICE_OK if this slice is OK
1683  */
1684 static int mpeg_decode_slice(MpegEncContext *s, int mb_y,
1685  const uint8_t **buf, int buf_size)
1686 {
1687  AVCodecContext *avctx = s->avctx;
1688  const int lowres = s->avctx->lowres;
1689  const int field_pic = s->picture_structure != PICT_FRAME;
1690  int ret;
1691 
1692  s->resync_mb_x =
1693  s->resync_mb_y = -1;
1694 
1695  av_assert0(mb_y < s->mb_height);
1696 
1697  ret = init_get_bits8(&s->gb, *buf, buf_size);
1698  if (ret < 0)
1699  return ret;
1700 
1701  if (s->codec_id != AV_CODEC_ID_MPEG1VIDEO && s->mb_height > 2800/16)
1702  skip_bits(&s->gb, 3);
1703 
1705  s->interlaced_dct = 0;
1706 
1707  s->qscale = mpeg_get_qscale(s);
1708 
1709  if (s->qscale == 0) {
1710  av_log(s->avctx, AV_LOG_ERROR, "qscale == 0\n");
1711  return AVERROR_INVALIDDATA;
1712  }
1713 
1714  /* extra slice info */
1715  if (skip_1stop_8data_bits(&s->gb) < 0)
1716  return AVERROR_INVALIDDATA;
1717 
1718  s->mb_x = 0;
1719 
1720  if (mb_y == 0 && s->codec_tag == AV_RL32("SLIF")) {
1721  skip_bits1(&s->gb);
1722  } else {
1723  while (get_bits_left(&s->gb) > 0) {
1724  int code = get_vlc2(&s->gb, ff_mbincr_vlc.table,
1725  MBINCR_VLC_BITS, 2);
1726  if (code < 0) {
1727  av_log(s->avctx, AV_LOG_ERROR, "first mb_incr damaged\n");
1728  return AVERROR_INVALIDDATA;
1729  }
1730  if (code >= 33) {
1731  if (code == 33)
1732  s->mb_x += 33;
1733  /* otherwise, stuffing, nothing to do */
1734  } else {
1735  s->mb_x += code;
1736  break;
1737  }
1738  }
1739  }
1740 
1741  if (s->mb_x >= (unsigned) s->mb_width) {
1742  av_log(s->avctx, AV_LOG_ERROR, "initial skip overflow\n");
1743  return AVERROR_INVALIDDATA;
1744  }
1745 
1746  if (avctx->hwaccel && avctx->hwaccel->decode_slice) {
1747  const uint8_t *buf_end, *buf_start = *buf - 4; /* include start_code */
1748  int start_code = -1;
1749  buf_end = avpriv_find_start_code(buf_start + 2, *buf + buf_size, &start_code);
1750  if (buf_end < *buf + buf_size)
1751  buf_end -= 4;
1752  s->mb_y = mb_y;
1753  if (avctx->hwaccel->decode_slice(avctx, buf_start, buf_end - buf_start) < 0)
1754  return DECODE_SLICE_ERROR;
1755  *buf = buf_end;
1756  return DECODE_SLICE_OK;
1757  }
1758 
1759  s->resync_mb_x = s->mb_x;
1760  s->resync_mb_y = s->mb_y = mb_y;
1761  s->mb_skip_run = 0;
1763 
1764  if (s->mb_y == 0 && s->mb_x == 0 && (s->first_field || s->picture_structure == PICT_FRAME)) {
1765  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
1766  av_log(s->avctx, AV_LOG_DEBUG,
1767  "qp:%d fc:%2d%2d%2d%2d %c %s %s %s %s dc:%d pstruct:%d fdct:%d cmv:%d qtype:%d ivlc:%d rff:%d %s\n",
1768  s->qscale,
1769  s->mpeg_f_code[0][0], s->mpeg_f_code[0][1],
1770  s->mpeg_f_code[1][0], s->mpeg_f_code[1][1],
1771  s->pict_type == AV_PICTURE_TYPE_I ? 'I' :
1772  (s->pict_type == AV_PICTURE_TYPE_P ? 'P' :
1773  (s->pict_type == AV_PICTURE_TYPE_B ? 'B' : 'S')),
1774  s->progressive_sequence ? "ps" : "",
1775  s->progressive_frame ? "pf" : "",
1776  s->alternate_scan ? "alt" : "",
1777  s->top_field_first ? "top" : "",
1778  s->intra_dc_precision, s->picture_structure,
1779  s->frame_pred_frame_dct, s->concealment_motion_vectors,
1780  s->q_scale_type, s->intra_vlc_format,
1781  s->repeat_first_field, s->chroma_420_type ? "420" : "");
1782  }
1783  }
1784 
1785  for (;;) {
1786  if ((ret = mpeg_decode_mb(s, s->block)) < 0)
1787  return ret;
1788 
1789  // Note motion_val is normally NULL unless we want to extract the MVs.
1790  if (s->current_picture.motion_val[0]) {
1791  const int wrap = s->b8_stride;
1792  int xy = s->mb_x * 2 + s->mb_y * 2 * wrap;
1793  int b8_xy = 4 * (s->mb_x + s->mb_y * s->mb_stride);
1794  int motion_x, motion_y, dir, i;
1795 
1796  for (i = 0; i < 2; i++) {
1797  for (dir = 0; dir < 2; dir++) {
1798  if (s->mb_intra ||
1799  (dir == 1 && s->pict_type != AV_PICTURE_TYPE_B)) {
1800  motion_x = motion_y = 0;
1801  } else if (s->mv_type == MV_TYPE_16X16 ||
1802  (s->mv_type == MV_TYPE_FIELD && field_pic)) {
1803  motion_x = s->mv[dir][0][0];
1804  motion_y = s->mv[dir][0][1];
1805  } else { /* if ((s->mv_type == MV_TYPE_FIELD) || (s->mv_type == MV_TYPE_16X8)) */
1806  motion_x = s->mv[dir][i][0];
1807  motion_y = s->mv[dir][i][1];
1808  }
1809 
1810  s->current_picture.motion_val[dir][xy][0] = motion_x;
1811  s->current_picture.motion_val[dir][xy][1] = motion_y;
1812  s->current_picture.motion_val[dir][xy + 1][0] = motion_x;
1813  s->current_picture.motion_val[dir][xy + 1][1] = motion_y;
1814  s->current_picture.ref_index [dir][b8_xy] =
1815  s->current_picture.ref_index [dir][b8_xy + 1] = s->field_select[dir][i];
1816  av_assert2(s->field_select[dir][i] == 0 ||
1817  s->field_select[dir][i] == 1);
1818  }
1819  xy += wrap;
1820  b8_xy += 2;
1821  }
1822  }
1823 
1824  s->dest[0] += 16 >> lowres;
1825  s->dest[1] +=(16 >> lowres) >> s->chroma_x_shift;
1826  s->dest[2] +=(16 >> lowres) >> s->chroma_x_shift;
1827 
1828  ff_mpv_reconstruct_mb(s, s->block);
1829 
1830  if (++s->mb_x >= s->mb_width) {
1831  const int mb_size = 16 >> s->avctx->lowres;
1832  int left;
1833 
1834  ff_mpeg_draw_horiz_band(s, mb_size * (s->mb_y >> field_pic), mb_size);
1836 
1837  s->mb_x = 0;
1838  s->mb_y += 1 << field_pic;
1839 
1840  if (s->mb_y >= s->mb_height) {
1841  int left = get_bits_left(&s->gb);
1842  int is_d10 = s->chroma_format == 2 &&
1843  s->pict_type == AV_PICTURE_TYPE_I &&
1844  avctx->profile == 0 && avctx->level == 5 &&
1845  s->intra_dc_precision == 2 &&
1846  s->q_scale_type == 1 && s->alternate_scan == 0 &&
1847  s->progressive_frame == 0
1848  /* vbv_delay == 0xBBB || 0xE10 */;
1849 
1850  if (left >= 32 && !is_d10) {
1851  GetBitContext gb = s->gb;
1852  align_get_bits(&gb);
1853  if (show_bits(&gb, 24) == 0x060E2B) {
1854  av_log(avctx, AV_LOG_DEBUG, "Invalid MXF data found in video stream\n");
1855  is_d10 = 1;
1856  }
1857  if (left > 32 && show_bits_long(&gb, 32) == 0x201) {
1858  av_log(avctx, AV_LOG_DEBUG, "skipping m704 alpha (unsupported)\n");
1859  goto eos;
1860  }
1861  }
1862 
1863  if (left < 0 ||
1864  (left && show_bits(&s->gb, FFMIN(left, 23)) && !is_d10) ||
1865  ((avctx->err_recognition & (AV_EF_BITSTREAM | AV_EF_AGGRESSIVE)) && left > 8)) {
1866  av_log(avctx, AV_LOG_ERROR, "end mismatch left=%d %0X at %d %d\n",
1867  left, left>0 ? show_bits(&s->gb, FFMIN(left, 23)) : 0, s->mb_x, s->mb_y);
1868  return AVERROR_INVALIDDATA;
1869  } else
1870  goto eos;
1871  }
1872  // There are some files out there which are missing the last slice
1873  // in cases where the slice is completely outside the visible
1874  // area, we detect this here instead of running into the end expecting
1875  // more data
1876  left = get_bits_left(&s->gb);
1877  if (s->mb_y >= ((s->height + 15) >> 4) &&
1878  !s->progressive_sequence &&
1879  left <= 25 &&
1880  left >= 0 &&
1881  s->mb_skip_run == -1 &&
1882  (!left || show_bits(&s->gb, left) == 0))
1883  goto eos;
1884 
1886  }
1887 
1888  /* skip mb handling */
1889  if (s->mb_skip_run == -1) {
1890  /* read increment again */
1891  s->mb_skip_run = 0;
1892  for (;;) {
1893  int code = get_vlc2(&s->gb, ff_mbincr_vlc.table,
1894  MBINCR_VLC_BITS, 2);
1895  if (code < 0) {
1896  av_log(s->avctx, AV_LOG_ERROR, "mb incr damaged\n");
1897  return AVERROR_INVALIDDATA;
1898  }
1899  if (code >= 33) {
1900  if (code == 33) {
1901  s->mb_skip_run += 33;
1902  } else if (code == 35) {
1903  if (s->mb_skip_run != 0 || show_bits(&s->gb, 15) != 0) {
1904  av_log(s->avctx, AV_LOG_ERROR, "slice mismatch\n");
1905  return AVERROR_INVALIDDATA;
1906  }
1907  goto eos; /* end of slice */
1908  }
1909  /* otherwise, stuffing, nothing to do */
1910  } else {
1911  s->mb_skip_run += code;
1912  break;
1913  }
1914  }
1915  if (s->mb_skip_run) {
1916  int i;
1917  if (s->pict_type == AV_PICTURE_TYPE_I) {
1918  av_log(s->avctx, AV_LOG_ERROR,
1919  "skipped MB in I-frame at %d %d\n", s->mb_x, s->mb_y);
1920  return AVERROR_INVALIDDATA;
1921  }
1922 
1923  /* skip mb */
1924  s->mb_intra = 0;
1925  for (i = 0; i < 12; i++)
1926  s->block_last_index[i] = -1;
1927  if (s->picture_structure == PICT_FRAME)
1928  s->mv_type = MV_TYPE_16X16;
1929  else
1930  s->mv_type = MV_TYPE_FIELD;
1931  if (s->pict_type == AV_PICTURE_TYPE_P) {
1932  /* if P type, zero motion vector is implied */
1933  s->mv_dir = MV_DIR_FORWARD;
1934  s->mv[0][0][0] = s->mv[0][0][1] = 0;
1935  s->last_mv[0][0][0] = s->last_mv[0][0][1] = 0;
1936  s->last_mv[0][1][0] = s->last_mv[0][1][1] = 0;
1937  s->field_select[0][0] = (s->picture_structure - 1) & 1;
1938  } else {
1939  /* if B type, reuse previous vectors and directions */
1940  s->mv[0][0][0] = s->last_mv[0][0][0];
1941  s->mv[0][0][1] = s->last_mv[0][0][1];
1942  s->mv[1][0][0] = s->last_mv[1][0][0];
1943  s->mv[1][0][1] = s->last_mv[1][0][1];
1944  s->field_select[0][0] = (s->picture_structure - 1) & 1;
1945  s->field_select[1][0] = (s->picture_structure - 1) & 1;
1946  }
1947  }
1948  }
1949  }
1950 eos: // end of slice
1951  if (get_bits_left(&s->gb) < 0) {
1952  av_log(s, AV_LOG_ERROR, "overread %d\n", -get_bits_left(&s->gb));
1953  return AVERROR_INVALIDDATA;
1954  }
1955  *buf += (get_bits_count(&s->gb) - 1) / 8;
1956  ff_dlog(s, "Slice start:%d %d end:%d %d\n", s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y);
1957  return 0;
1958 }
1959 
1961 {
1962  MpegEncContext *s = *(void **) arg;
1963  const uint8_t *buf = s->gb.buffer;
1964  int mb_y = s->start_mb_y;
1965  const int field_pic = s->picture_structure != PICT_FRAME;
1966 
1967  s->er.error_count = (3 * (s->end_mb_y - s->start_mb_y) * s->mb_width) >> field_pic;
1968 
1969  for (;;) {
1970  uint32_t start_code;
1971  int ret;
1972 
1973  ret = mpeg_decode_slice(s, mb_y, &buf, s->gb.buffer_end - buf);
1974  emms_c();
1975  ff_dlog(c, "ret:%d resync:%d/%d mb:%d/%d ts:%d/%d ec:%d\n",
1976  ret, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y,
1977  s->start_mb_y, s->end_mb_y, s->er.error_count);
1978  if (ret < 0) {
1979  if (c->err_recognition & AV_EF_EXPLODE)
1980  return ret;
1981  if (s->resync_mb_x >= 0 && s->resync_mb_y >= 0)
1982  ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
1983  s->mb_x, s->mb_y,
1985  } else {
1986  ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
1987  s->mb_x - 1, s->mb_y,
1989  }
1990 
1991  if (s->mb_y == s->end_mb_y)
1992  return 0;
1993 
1994  start_code = -1;
1995  buf = avpriv_find_start_code(buf, s->gb.buffer_end, &start_code);
1996  if (start_code < SLICE_MIN_START_CODE || start_code > SLICE_MAX_START_CODE)
1997  return AVERROR_INVALIDDATA;
1999  if (s->codec_id != AV_CODEC_ID_MPEG1VIDEO && s->mb_height > 2800/16)
2000  mb_y += (*buf&0xE0)<<2;
2001  mb_y <<= field_pic;
2002  if (s->picture_structure == PICT_BOTTOM_FIELD)
2003  mb_y++;
2004  if (mb_y >= s->end_mb_y)
2005  return AVERROR_INVALIDDATA;
2006  }
2007 }
2008 
2009 /**
2010  * Handle slice ends.
2011  * @return 1 if it seems to be the last slice
2012  */
2013 static int slice_end(AVCodecContext *avctx, AVFrame *pict)
2014 {
2015  Mpeg1Context *s1 = avctx->priv_data;
2016  MpegEncContext *s = &s1->mpeg_enc_ctx;
2017 
2018  if (!s1->mpeg_enc_ctx_allocated || !s->current_picture_ptr)
2019  return 0;
2020 
2021  if (s->avctx->hwaccel) {
2022  int ret = s->avctx->hwaccel->end_frame(s->avctx);
2023  if (ret < 0) {
2024  av_log(avctx, AV_LOG_ERROR,
2025  "hardware accelerator failed to decode picture\n");
2026  return ret;
2027  }
2028  }
2029 
2030  /* end of slice reached */
2031  if (/* s->mb_y << field_pic == s->mb_height && */ !s->first_field && !s1->first_slice) {
2032  /* end of image */
2033 
2034  ff_er_frame_end(&s->er);
2035 
2037 
2038  if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
2039  int ret = av_frame_ref(pict, s->current_picture_ptr->f);
2040  if (ret < 0)
2041  return ret;
2042  ff_print_debug_info(s, s->current_picture_ptr, pict);
2043  ff_mpv_export_qp_table(s, pict, s->current_picture_ptr, FF_MPV_QSCALE_TYPE_MPEG2);
2044  } else {
2045  /* latency of 1 frame for I- and P-frames */
2046  if (s->last_picture_ptr) {
2047  int ret = av_frame_ref(pict, s->last_picture_ptr->f);
2048  if (ret < 0)
2049  return ret;
2050  ff_print_debug_info(s, s->last_picture_ptr, pict);
2051  ff_mpv_export_qp_table(s, pict, s->last_picture_ptr, FF_MPV_QSCALE_TYPE_MPEG2);
2052  }
2053  }
2054 
2055  return 1;
2056  } else {
2057  return 0;
2058  }
2059 }
2060 
2062  const uint8_t *buf, int buf_size)
2063 {
2064  Mpeg1Context *s1 = avctx->priv_data;
2065  MpegEncContext *s = &s1->mpeg_enc_ctx;
2066  int width, height;
2067  int i, v, j;
2068 
2069  int ret = init_get_bits8(&s->gb, buf, buf_size);
2070  if (ret < 0)
2071  return ret;
2072 
2073  width = get_bits(&s->gb, 12);
2074  height = get_bits(&s->gb, 12);
2075  if (width == 0 || height == 0) {
2076  av_log(avctx, AV_LOG_WARNING,
2077  "Invalid horizontal or vertical size value.\n");
2079  return AVERROR_INVALIDDATA;
2080  }
2081  s1->aspect_ratio_info = get_bits(&s->gb, 4);
2082  if (s1->aspect_ratio_info == 0) {
2083  av_log(avctx, AV_LOG_ERROR, "aspect ratio has forbidden 0 value\n");
2085  return AVERROR_INVALIDDATA;
2086  }
2087  s1->frame_rate_index = get_bits(&s->gb, 4);
2088  if (s1->frame_rate_index == 0 || s1->frame_rate_index > 13) {
2089  av_log(avctx, AV_LOG_WARNING,
2090  "frame_rate_index %d is invalid\n", s1->frame_rate_index);
2091  s1->frame_rate_index = 1;
2092  }
2093  s->bit_rate = get_bits(&s->gb, 18) * 400LL;
2094  if (check_marker(s->avctx, &s->gb, "in sequence header") == 0) {
2095  return AVERROR_INVALIDDATA;
2096  }
2097 
2098  s1->rc_buffer_size = get_bits(&s->gb, 10) * 1024 * 16;
2099  skip_bits(&s->gb, 1);
2100 
2101  /* get matrix */
2102  if (get_bits1(&s->gb)) {
2103  load_matrix(s, s->chroma_intra_matrix, s->intra_matrix, 1);
2104  } else {
2105  for (i = 0; i < 64; i++) {
2106  j = s->idsp.idct_permutation[i];
2108  s->intra_matrix[j] = v;
2109  s->chroma_intra_matrix[j] = v;
2110  }
2111  }
2112  if (get_bits1(&s->gb)) {
2113  load_matrix(s, s->chroma_inter_matrix, s->inter_matrix, 0);
2114  } else {
2115  for (i = 0; i < 64; i++) {
2116  int j = s->idsp.idct_permutation[i];
2118  s->inter_matrix[j] = v;
2119  s->chroma_inter_matrix[j] = v;
2120  }
2121  }
2122 
2123  if (show_bits(&s->gb, 23) != 0) {
2124  av_log(s->avctx, AV_LOG_ERROR, "sequence header damaged\n");
2125  return AVERROR_INVALIDDATA;
2126  }
2127 
2128  s->width = width;
2129  s->height = height;
2130 
2131  /* We set MPEG-2 parameters so that it emulates MPEG-1. */
2132  s->progressive_sequence = 1;
2133  s->progressive_frame = 1;
2134  s->picture_structure = PICT_FRAME;
2135  s->first_field = 0;
2136  s->frame_pred_frame_dct = 1;
2137  s->chroma_format = 1;
2138  s->codec_id =
2139  s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
2140  s->out_format = FMT_MPEG1;
2141  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
2142  s->low_delay = 1;
2143 
2144  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
2145  av_log(s->avctx, AV_LOG_DEBUG, "vbv buffer: %d, bitrate:%"PRId64", aspect_ratio_info: %d \n",
2146  s1->rc_buffer_size, s->bit_rate, s1->aspect_ratio_info);
2147 
2148  return 0;
2149 }
2150 
2152 {
2153  Mpeg1Context *s1 = avctx->priv_data;
2154  MpegEncContext *s = &s1->mpeg_enc_ctx;
2155  int i, v, ret;
2156 
2157  /* start new MPEG-1 context decoding */
2158  s->out_format = FMT_MPEG1;
2159  if (s1->mpeg_enc_ctx_allocated) {
2161  s1->mpeg_enc_ctx_allocated = 0;
2162  }
2163  s->width = avctx->coded_width;
2164  s->height = avctx->coded_height;
2165  avctx->has_b_frames = 0; // true?
2166  s->low_delay = 1;
2167 
2168  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
2169 
2171  if ((ret = ff_mpv_common_init(s)) < 0)
2172  return ret;
2173  s1->mpeg_enc_ctx_allocated = 1;
2174 
2175  for (i = 0; i < 64; i++) {
2176  int j = s->idsp.idct_permutation[i];
2178  s->intra_matrix[j] = v;
2179  s->chroma_intra_matrix[j] = v;
2180 
2182  s->inter_matrix[j] = v;
2183  s->chroma_inter_matrix[j] = v;
2184  }
2185 
2186  s->progressive_sequence = 1;
2187  s->progressive_frame = 1;
2188  s->picture_structure = PICT_FRAME;
2189  s->first_field = 0;
2190  s->frame_pred_frame_dct = 1;
2191  s->chroma_format = 1;
2192  if (s->codec_tag == AV_RL32("BW10")) {
2193  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
2194  } else {
2195  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
2196  }
2197  s1->save_width = s->width;
2198  s1->save_height = s->height;
2199  s1->save_progressive_seq = s->progressive_sequence;
2200  return 0;
2201 }
2202 
2204  const uint8_t *p, int buf_size)
2205 {
2206  Mpeg1Context *s1 = avctx->priv_data;
2207 
2208  if (buf_size >= 6 &&
2209  p[0] == 'G' && p[1] == 'A' && p[2] == '9' && p[3] == '4' &&
2210  p[4] == 3 && (p[5] & 0x40)) {
2211  /* extract A53 Part 4 CC data */
2212  int cc_count = p[5] & 0x1f;
2213  if (cc_count > 0 && buf_size >= 7 + cc_count * 3) {
2214  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2215  const uint64_t new_size = (old_size + cc_count
2216  * UINT64_C(3));
2217  int ret;
2218 
2219  if (new_size > 3*A53_MAX_CC_COUNT)
2220  return AVERROR(EINVAL);
2221 
2222  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2223  if (ret >= 0)
2224  memcpy(s1->a53_buf_ref->data + old_size, p + 7, cc_count * UINT64_C(3));
2225 
2227  }
2228  return 1;
2229  } else if (buf_size >= 2 &&
2230  p[0] == 0x03 && (p[1]&0x7f) == 0x01) {
2231  /* extract SCTE-20 CC data */
2232  GetBitContext gb;
2233  int cc_count = 0;
2234  int i, ret;
2235 
2236  ret = init_get_bits8(&gb, p + 2, buf_size - 2);
2237  if (ret < 0)
2238  return ret;
2239  cc_count = get_bits(&gb, 5);
2240  if (cc_count > 0) {
2241  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2242  const uint64_t new_size = (old_size + cc_count
2243  * UINT64_C(3));
2244  if (new_size > 3*A53_MAX_CC_COUNT)
2245  return AVERROR(EINVAL);
2246 
2247  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2248  if (ret >= 0) {
2249  uint8_t field, cc1, cc2;
2250  uint8_t *cap = s1->a53_buf_ref->data;
2251 
2252  memset(s1->a53_buf_ref->data + old_size, 0, cc_count * 3);
2253  for (i = 0; i < cc_count && get_bits_left(&gb) >= 26; i++) {
2254  skip_bits(&gb, 2); // priority
2255  field = get_bits(&gb, 2);
2256  skip_bits(&gb, 5); // line_offset
2257  cc1 = get_bits(&gb, 8);
2258  cc2 = get_bits(&gb, 8);
2259  skip_bits(&gb, 1); // marker
2260 
2261  if (!field) { // forbidden
2262  cap[0] = cap[1] = cap[2] = 0x00;
2263  } else {
2264  field = (field == 2 ? 1 : 0);
2265  if (!s1->mpeg_enc_ctx.top_field_first) field = !field;
2266  cap[0] = 0x04 | field;
2267  cap[1] = ff_reverse[cc1];
2268  cap[2] = ff_reverse[cc2];
2269  }
2270  cap += 3;
2271  }
2272  }
2274  }
2275  return 1;
2276  } else if (buf_size >= 11 &&
2277  p[0] == 'C' && p[1] == 'C' && p[2] == 0x01 && p[3] == 0xf8) {
2278  /* extract DVD CC data
2279  *
2280  * uint32_t user_data_start_code 0x000001B2 (big endian)
2281  * uint16_t user_identifier 0x4343 "CC"
2282  * uint8_t user_data_type_code 0x01
2283  * uint8_t caption_block_size 0xF8
2284  * uint8_t
2285  * bit 7 caption_odd_field_first 1=odd field (CC1/CC2) first 0=even field (CC3/CC4) first
2286  * bit 6 caption_filler 0
2287  * bit 5:1 caption_block_count number of caption blocks (pairs of caption words = frames). Most DVDs use 15 per start of GOP.
2288  * bit 0 caption_extra_field_added 1=one additional caption word
2289  *
2290  * struct caption_field_block {
2291  * uint8_t
2292  * bit 7:1 caption_filler 0x7F (all 1s)
2293  * bit 0 caption_field_odd 1=odd field (this is CC1/CC2) 0=even field (this is CC3/CC4)
2294  * uint8_t caption_first_byte
2295  * uint8_t caption_second_byte
2296  * } caption_block[(caption_block_count * 2) + caption_extra_field_added];
2297  *
2298  * Some DVDs encode caption data for both fields with caption_field_odd=1. The only way to decode the fields
2299  * correctly is to start on the field indicated by caption_odd_field_first and count between odd/even fields.
2300  * Don't assume that the first caption word is the odd field. There do exist MPEG files in the wild that start
2301  * on the even field. There also exist DVDs in the wild that encode an odd field count and the
2302  * caption_extra_field_added/caption_odd_field_first bits change per packet to allow that. */
2303  int cc_count = 0;
2304  int i, ret;
2305  // There is a caption count field in the data, but it is often
2306  // incorrect. So count the number of captions present.
2307  for (i = 5; i + 6 <= buf_size && ((p[i] & 0xfe) == 0xfe); i += 6)
2308  cc_count++;
2309  // Transform the DVD format into A53 Part 4 format
2310  if (cc_count > 0) {
2311  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2312  const uint64_t new_size = (old_size + cc_count
2313  * UINT64_C(6));
2314  if (new_size > 3*A53_MAX_CC_COUNT)
2315  return AVERROR(EINVAL);
2316 
2317  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2318  if (ret >= 0) {
2319  uint8_t field1 = !!(p[4] & 0x80);
2320  uint8_t *cap = s1->a53_buf_ref->data;
2321  p += 5;
2322  for (i = 0; i < cc_count; i++) {
2323  cap[0] = (p[0] == 0xff && field1) ? 0xfc : 0xfd;
2324  cap[1] = p[1];
2325  cap[2] = p[2];
2326  cap[3] = (p[3] == 0xff && !field1) ? 0xfc : 0xfd;
2327  cap[4] = p[4];
2328  cap[5] = p[5];
2329  cap += 6;
2330  p += 6;
2331  }
2332  }
2334  }
2335  return 1;
2336  }
2337  return 0;
2338 }
2339 
2341  const uint8_t *p, int buf_size)
2342 {
2343  Mpeg1Context *s = avctx->priv_data;
2344  const uint8_t *buf_end = p + buf_size;
2345  Mpeg1Context *s1 = avctx->priv_data;
2346 
2347 #if 0
2348  int i;
2349  for(i=0; !(!p[i-2] && !p[i-1] && p[i]==1) && i<buf_size; i++){
2350  av_log(avctx, AV_LOG_ERROR, "%c", p[i]);
2351  }
2352  av_log(avctx, AV_LOG_ERROR, "\n");
2353 #endif
2354 
2355  if (buf_size > 29){
2356  int i;
2357  for(i=0; i<20; i++)
2358  if (!memcmp(p+i, "\0TMPGEXS\0", 9)){
2359  s->tmpgexs= 1;
2360  }
2361  }
2362  /* we parse the DTG active format information */
2363  if (buf_end - p >= 5 &&
2364  p[0] == 'D' && p[1] == 'T' && p[2] == 'G' && p[3] == '1') {
2365  int flags = p[4];
2366  p += 5;
2367  if (flags & 0x80) {
2368  /* skip event id */
2369  p += 2;
2370  }
2371  if (flags & 0x40) {
2372  if (buf_end - p < 1)
2373  return;
2374  s1->has_afd = 1;
2375  s1->afd = p[0] & 0x0f;
2376  }
2377  } else if (buf_end - p >= 6 &&
2378  p[0] == 'J' && p[1] == 'P' && p[2] == '3' && p[3] == 'D' &&
2379  p[4] == 0x03) { // S3D_video_format_length
2380  // the 0x7F mask ignores the reserved_bit value
2381  const uint8_t S3D_video_format_type = p[5] & 0x7F;
2382 
2383  if (S3D_video_format_type == 0x03 ||
2384  S3D_video_format_type == 0x04 ||
2385  S3D_video_format_type == 0x08 ||
2386  S3D_video_format_type == 0x23) {
2387 
2388  s1->has_stereo3d = 1;
2389 
2390  switch (S3D_video_format_type) {
2391  case 0x03:
2392  s1->stereo3d.type = AV_STEREO3D_SIDEBYSIDE;
2393  break;
2394  case 0x04:
2395  s1->stereo3d.type = AV_STEREO3D_TOPBOTTOM;
2396  break;
2397  case 0x08:
2398  s1->stereo3d.type = AV_STEREO3D_2D;
2399  break;
2400  case 0x23:
2401  s1->stereo3d.type = AV_STEREO3D_SIDEBYSIDE_QUINCUNX;
2402  break;
2403  }
2404  }
2405  } else if (mpeg_decode_a53_cc(avctx, p, buf_size)) {
2406  return;
2407  }
2408 }
2409 
2411  const uint8_t *buf, int buf_size)
2412 {
2413  Mpeg1Context *s1 = avctx->priv_data;
2414  MpegEncContext *s = &s1->mpeg_enc_ctx;
2415  int broken_link;
2416  int64_t tc;
2417 
2418  int ret = init_get_bits8(&s->gb, buf, buf_size);
2419  if (ret < 0)
2420  return ret;
2421 
2422  tc = s1->timecode_frame_start = get_bits(&s->gb, 25);
2423 
2424  s1->closed_gop = get_bits1(&s->gb);
2425  /* broken_link indicates that after editing the
2426  * reference frames of the first B-Frames after GOP I-Frame
2427  * are missing (open gop) */
2428  broken_link = get_bits1(&s->gb);
2429 
2430  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
2431  char tcbuf[AV_TIMECODE_STR_SIZE];
2433  av_log(s->avctx, AV_LOG_DEBUG,
2434  "GOP (%s) closed_gop=%d broken_link=%d\n",
2435  tcbuf, s1->closed_gop, broken_link);
2436  }
2437 
2438  return 0;
2439 }
2440 
2441 static int decode_chunks(AVCodecContext *avctx, AVFrame *picture,
2442  int *got_output, const uint8_t *buf, int buf_size)
2443 {
2444  Mpeg1Context *s = avctx->priv_data;
2445  MpegEncContext *s2 = &s->mpeg_enc_ctx;
2446  const uint8_t *buf_ptr = buf;
2447  const uint8_t *buf_end = buf + buf_size;
2448  int ret, input_size;
2449  int last_code = 0, skip_frame = 0;
2450  int picture_start_code_seen = 0;
2451 
2452  for (;;) {
2453  /* find next start code */
2454  uint32_t start_code = -1;
2455  buf_ptr = avpriv_find_start_code(buf_ptr, buf_end, &start_code);
2456  if (start_code > 0x1ff) {
2457  if (!skip_frame) {
2458  if (HAVE_THREADS &&
2459  (avctx->active_thread_type & FF_THREAD_SLICE) &&
2460  !avctx->hwaccel) {
2461  int i;
2462  av_assert0(avctx->thread_count > 1);
2463 
2464  avctx->execute(avctx, slice_decode_thread,
2465  &s2->thread_context[0], NULL,
2466  s->slice_count, sizeof(void *));
2467  for (i = 0; i < s->slice_count; i++)
2468  s2->er.error_count += s2->thread_context[i]->er.error_count;
2469  }
2470 
2471  ret = slice_end(avctx, picture);
2472  if (ret < 0)
2473  return ret;
2474  else if (ret) {
2475  // FIXME: merge with the stuff in mpeg_decode_slice
2476  if (s2->last_picture_ptr || s2->low_delay || s2->pict_type == AV_PICTURE_TYPE_B)
2477  *got_output = 1;
2478  }
2479  }
2480  s2->pict_type = 0;
2481 
2482  if (avctx->err_recognition & AV_EF_EXPLODE && s2->er.error_count)
2483  return AVERROR_INVALIDDATA;
2484 
2485 #if FF_API_FLAG_TRUNCATED
2486  return FFMAX(0, buf_ptr - buf - s2->parse_context.last_index);
2487 #else
2488  return FFMAX(0, buf_ptr - buf);
2489 #endif
2490  }
2491 
2492  input_size = buf_end - buf_ptr;
2493 
2494  if (avctx->debug & FF_DEBUG_STARTCODE)
2495  av_log(avctx, AV_LOG_DEBUG, "%3"PRIX32" at %"PTRDIFF_SPECIFIER" left %d\n",
2496  start_code, buf_ptr - buf, input_size);
2497 
2498  /* prepare data for next start code */
2499  switch (start_code) {
2500  case SEQ_START_CODE:
2501  if (last_code == 0) {
2502  mpeg1_decode_sequence(avctx, buf_ptr, input_size);
2503  if (buf != avctx->extradata)
2504  s->sync = 1;
2505  } else {
2506  av_log(avctx, AV_LOG_ERROR,
2507  "ignoring SEQ_START_CODE after %X\n", last_code);
2508  if (avctx->err_recognition & AV_EF_EXPLODE)
2509  return AVERROR_INVALIDDATA;
2510  }
2511  break;
2512 
2513  case PICTURE_START_CODE:
2514  if (picture_start_code_seen && s2->picture_structure == PICT_FRAME) {
2515  /* If it's a frame picture, there can't be more than one picture header.
2516  Yet, it does happen and we need to handle it. */
2517  av_log(avctx, AV_LOG_WARNING, "ignoring extra picture following a frame-picture\n");
2518  break;
2519  }
2520  picture_start_code_seen = 1;
2521 
2522  if (s2->width <= 0 || s2->height <= 0) {
2523  av_log(avctx, AV_LOG_ERROR, "Invalid frame dimensions %dx%d.\n",
2524  s2->width, s2->height);
2525  return AVERROR_INVALIDDATA;
2526  }
2527 
2528  if (s->tmpgexs){
2529  s2->intra_dc_precision= 3;
2530  s2->intra_matrix[0]= 1;
2531  }
2532  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
2533  !avctx->hwaccel && s->slice_count) {
2534  int i;
2535 
2536  avctx->execute(avctx, slice_decode_thread,
2537  s2->thread_context, NULL,
2538  s->slice_count, sizeof(void *));
2539  for (i = 0; i < s->slice_count; i++)
2540  s2->er.error_count += s2->thread_context[i]->er.error_count;
2541  s->slice_count = 0;
2542  }
2543  if (last_code == 0 || last_code == SLICE_MIN_START_CODE) {
2544  ret = mpeg_decode_postinit(avctx);
2545  if (ret < 0) {
2546  av_log(avctx, AV_LOG_ERROR,
2547  "mpeg_decode_postinit() failure\n");
2548  return ret;
2549  }
2550 
2551  /* We have a complete image: we try to decompress it. */
2552  if (mpeg1_decode_picture(avctx, buf_ptr, input_size) < 0)
2553  s2->pict_type = 0;
2554  s->first_slice = 1;
2555  last_code = PICTURE_START_CODE;
2556  } else {
2557  av_log(avctx, AV_LOG_ERROR,
2558  "ignoring pic after %X\n", last_code);
2559  if (avctx->err_recognition & AV_EF_EXPLODE)
2560  return AVERROR_INVALIDDATA;
2561  }
2562  break;
2563  case EXT_START_CODE:
2564  ret = init_get_bits8(&s2->gb, buf_ptr, input_size);
2565  if (ret < 0)
2566  return ret;
2567 
2568  switch (get_bits(&s2->gb, 4)) {
2569  case 0x1:
2570  if (last_code == 0) {
2572  } else {
2573  av_log(avctx, AV_LOG_ERROR,
2574  "ignoring seq ext after %X\n", last_code);
2575  if (avctx->err_recognition & AV_EF_EXPLODE)
2576  return AVERROR_INVALIDDATA;
2577  }
2578  break;
2579  case 0x2:
2581  break;
2582  case 0x3:
2584  break;
2585  case 0x7:
2587  break;
2588  case 0x8:
2589  if (last_code == PICTURE_START_CODE) {
2591  if (ret < 0)
2592  return ret;
2593  } else {
2594  av_log(avctx, AV_LOG_ERROR,
2595  "ignoring pic cod ext after %X\n", last_code);
2596  if (avctx->err_recognition & AV_EF_EXPLODE)
2597  return AVERROR_INVALIDDATA;
2598  }
2599  break;
2600  }
2601  break;
2602  case USER_START_CODE:
2603  mpeg_decode_user_data(avctx, buf_ptr, input_size);
2604  break;
2605  case GOP_START_CODE:
2606  if (last_code == 0) {
2607  s2->first_field = 0;
2608  ret = mpeg_decode_gop(avctx, buf_ptr, input_size);
2609  if (ret < 0)
2610  return ret;
2611  s->sync = 1;
2612  } else {
2613  av_log(avctx, AV_LOG_ERROR,
2614  "ignoring GOP_START_CODE after %X\n", last_code);
2615  if (avctx->err_recognition & AV_EF_EXPLODE)
2616  return AVERROR_INVALIDDATA;
2617  }
2618  break;
2619  default:
2621  start_code <= SLICE_MAX_START_CODE && last_code == PICTURE_START_CODE) {
2622  if (s2->progressive_sequence && !s2->progressive_frame) {
2623  s2->progressive_frame = 1;
2624  av_log(s2->avctx, AV_LOG_ERROR,
2625  "interlaced frame in progressive sequence, ignoring\n");
2626  }
2627 
2628  if (s2->picture_structure == 0 ||
2629  (s2->progressive_frame && s2->picture_structure != PICT_FRAME)) {
2630  av_log(s2->avctx, AV_LOG_ERROR,
2631  "picture_structure %d invalid, ignoring\n",
2632  s2->picture_structure);
2633  s2->picture_structure = PICT_FRAME;
2634  }
2635 
2636  if (s2->progressive_sequence && !s2->frame_pred_frame_dct)
2637  av_log(s2->avctx, AV_LOG_WARNING, "invalid frame_pred_frame_dct\n");
2638 
2639  if (s2->picture_structure == PICT_FRAME) {
2640  s2->first_field = 0;
2641  s2->v_edge_pos = 16 * s2->mb_height;
2642  } else {
2643  s2->first_field ^= 1;
2644  s2->v_edge_pos = 8 * s2->mb_height;
2645  memset(s2->mbskip_table, 0, s2->mb_stride * s2->mb_height);
2646  }
2647  }
2649  start_code <= SLICE_MAX_START_CODE && last_code != 0) {
2650  const int field_pic = s2->picture_structure != PICT_FRAME;
2651  int mb_y = start_code - SLICE_MIN_START_CODE;
2652  last_code = SLICE_MIN_START_CODE;
2653  if (s2->codec_id != AV_CODEC_ID_MPEG1VIDEO && s2->mb_height > 2800/16)
2654  mb_y += (*buf_ptr&0xE0)<<2;
2655 
2656  mb_y <<= field_pic;
2657  if (s2->picture_structure == PICT_BOTTOM_FIELD)
2658  mb_y++;
2659 
2660  if (buf_end - buf_ptr < 2) {
2661  av_log(s2->avctx, AV_LOG_ERROR, "slice too small\n");
2662  return AVERROR_INVALIDDATA;
2663  }
2664 
2665  if (mb_y >= s2->mb_height) {
2666  av_log(s2->avctx, AV_LOG_ERROR,
2667  "slice below image (%d >= %d)\n", mb_y, s2->mb_height);
2668  return AVERROR_INVALIDDATA;
2669  }
2670 
2671  if (!s2->last_picture_ptr) {
2672  /* Skip B-frames if we do not have reference frames and
2673  * GOP is not closed. */
2674  if (s2->pict_type == AV_PICTURE_TYPE_B) {
2675  if (!s->closed_gop) {
2676  skip_frame = 1;
2677  av_log(s2->avctx, AV_LOG_DEBUG,
2678  "Skipping B slice due to open GOP\n");
2679  break;
2680  }
2681  }
2682  }
2683  if (s2->pict_type == AV_PICTURE_TYPE_I || (s2->avctx->flags2 & AV_CODEC_FLAG2_SHOW_ALL))
2684  s->sync = 1;
2685  if (!s2->next_picture_ptr) {
2686  /* Skip P-frames if we do not have a reference frame or
2687  * we have an invalid header. */
2688  if (s2->pict_type == AV_PICTURE_TYPE_P && !s->sync) {
2689  skip_frame = 1;
2690  av_log(s2->avctx, AV_LOG_DEBUG,
2691  "Skipping P slice due to !sync\n");
2692  break;
2693  }
2694  }
2695  if ((avctx->skip_frame >= AVDISCARD_NONREF &&
2696  s2->pict_type == AV_PICTURE_TYPE_B) ||
2697  (avctx->skip_frame >= AVDISCARD_NONKEY &&
2698  s2->pict_type != AV_PICTURE_TYPE_I) ||
2699  avctx->skip_frame >= AVDISCARD_ALL) {
2700  skip_frame = 1;
2701  break;
2702  }
2703 
2704  if (!s->mpeg_enc_ctx_allocated)
2705  break;
2706 
2707  if (s2->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
2708  if (mb_y < avctx->skip_top ||
2709  mb_y >= s2->mb_height - avctx->skip_bottom)
2710  break;
2711  }
2712 
2713  if (!s2->pict_type) {
2714  av_log(avctx, AV_LOG_ERROR, "Missing picture start code\n");
2715  if (avctx->err_recognition & AV_EF_EXPLODE)
2716  return AVERROR_INVALIDDATA;
2717  break;
2718  }
2719 
2720  if (s->first_slice) {
2721  skip_frame = 0;
2722  s->first_slice = 0;
2723  if ((ret = mpeg_field_start(s2, buf, buf_size)) < 0)
2724  return ret;
2725  }
2726  if (!s2->current_picture_ptr) {
2727  av_log(avctx, AV_LOG_ERROR,
2728  "current_picture not initialized\n");
2729  return AVERROR_INVALIDDATA;
2730  }
2731 
2732  if (HAVE_THREADS &&
2733  (avctx->active_thread_type & FF_THREAD_SLICE) &&
2734  !avctx->hwaccel) {
2735  int threshold = (s2->mb_height * s->slice_count +
2736  s2->slice_context_count / 2) /
2737  s2->slice_context_count;
2738  av_assert0(avctx->thread_count > 1);
2739  if (threshold <= mb_y) {
2740  MpegEncContext *thread_context = s2->thread_context[s->slice_count];
2741 
2742  thread_context->start_mb_y = mb_y;
2743  thread_context->end_mb_y = s2->mb_height;
2744  if (s->slice_count) {
2745  s2->thread_context[s->slice_count - 1]->end_mb_y = mb_y;
2746  ret = ff_update_duplicate_context(thread_context, s2);
2747  if (ret < 0)
2748  return ret;
2749  }
2750  ret = init_get_bits8(&thread_context->gb, buf_ptr, input_size);
2751  if (ret < 0)
2752  return ret;
2753  s->slice_count++;
2754  }
2755  buf_ptr += 2; // FIXME add minimum number of bytes per slice
2756  } else {
2757  ret = mpeg_decode_slice(s2, mb_y, &buf_ptr, input_size);
2758  emms_c();
2759 
2760  if (ret < 0) {
2761  if (avctx->err_recognition & AV_EF_EXPLODE)
2762  return ret;
2763  if (s2->resync_mb_x >= 0 && s2->resync_mb_y >= 0)
2764  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2765  s2->resync_mb_y, s2->mb_x, s2->mb_y,
2767  } else {
2768  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2769  s2->resync_mb_y, s2->mb_x - 1, s2->mb_y,
2771  }
2772  }
2773  }
2774  break;
2775  }
2776  }
2777 }
2778 
2779 static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture,
2780  int *got_output, AVPacket *avpkt)
2781 {
2782  const uint8_t *buf = avpkt->data;
2783  int ret;
2784  int buf_size = avpkt->size;
2785  Mpeg1Context *s = avctx->priv_data;
2786  MpegEncContext *s2 = &s->mpeg_enc_ctx;
2787 
2788  if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == SEQ_END_CODE)) {
2789  /* special case for last picture */
2790  if (s2->low_delay == 0 && s2->next_picture_ptr) {
2791  int ret = av_frame_ref(picture, s2->next_picture_ptr->f);
2792  if (ret < 0)
2793  return ret;
2794 
2795  s2->next_picture_ptr = NULL;
2796 
2797  *got_output = 1;
2798  }
2799  return buf_size;
2800  }
2801 
2802 #if FF_API_FLAG_TRUNCATED
2803  if (s2->avctx->flags & AV_CODEC_FLAG_TRUNCATED) {
2804  int next = ff_mpeg1_find_frame_end(&s2->parse_context, buf,
2805  buf_size, NULL);
2806 
2807  if (ff_combine_frame(&s2->parse_context, next,
2808  (const uint8_t **) &buf, &buf_size) < 0)
2809  return buf_size;
2810  }
2811 #endif
2812 
2813  if (s->mpeg_enc_ctx_allocated == 0 && ( s2->codec_tag == AV_RL32("VCR2")
2814  || s2->codec_tag == AV_RL32("BW10")
2815  ))
2816  vcr2_init_sequence(avctx);
2817 
2818  s->slice_count = 0;
2819 
2820  if (avctx->extradata && !s->extradata_decoded) {
2821  ret = decode_chunks(avctx, picture, got_output,
2822  avctx->extradata, avctx->extradata_size);
2823  if (*got_output) {
2824  av_log(avctx, AV_LOG_ERROR, "picture in extradata\n");
2825  av_frame_unref(picture);
2826  *got_output = 0;
2827  }
2828  s->extradata_decoded = 1;
2829  if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE)) {
2830  s2->current_picture_ptr = NULL;
2831  return ret;
2832  }
2833  }
2834 
2835  ret = decode_chunks(avctx, picture, got_output, buf, buf_size);
2836  if (ret<0 || *got_output) {
2837  s2->current_picture_ptr = NULL;
2838 
2839  if (s->timecode_frame_start != -1 && *got_output) {
2840  char tcbuf[AV_TIMECODE_STR_SIZE];
2841  AVFrameSideData *tcside = av_frame_new_side_data(picture,
2843  sizeof(int64_t));
2844  if (!tcside)
2845  return AVERROR(ENOMEM);
2846  memcpy(tcside->data, &s->timecode_frame_start, sizeof(int64_t));
2847 
2848  av_timecode_make_mpeg_tc_string(tcbuf, s->timecode_frame_start);
2849  av_dict_set(&picture->metadata, "timecode", tcbuf, 0);
2850 
2851  s->timecode_frame_start = -1;
2852  }
2853  }
2854 
2855  return ret;
2856 }
2857 
2858 static void flush(AVCodecContext *avctx)
2859 {
2860  Mpeg1Context *s = avctx->priv_data;
2861 
2862  s->sync = 0;
2863  s->closed_gop = 0;
2864 
2865  av_buffer_unref(&s->a53_buf_ref);
2866  ff_mpeg_flush(avctx);
2867 }
2868 
2870 {
2871  Mpeg1Context *s = avctx->priv_data;
2872 
2873  if (s->mpeg_enc_ctx_allocated)
2874  ff_mpv_common_end(&s->mpeg_enc_ctx);
2875  av_buffer_unref(&s->a53_buf_ref);
2876  return 0;
2877 }
2878 
2880  .p.name = "mpeg1video",
2881  CODEC_LONG_NAME("MPEG-1 video"),
2882  .p.type = AVMEDIA_TYPE_VIDEO,
2883  .p.id = AV_CODEC_ID_MPEG1VIDEO,
2884  .priv_data_size = sizeof(Mpeg1Context),
2886  .close = mpeg_decode_end,
2888  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2889 #if FF_API_FLAG_TRUNCATED
2890  AV_CODEC_CAP_TRUNCATED |
2891 #endif
2893  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2894  .flush = flush,
2895  .p.max_lowres = 3,
2896  UPDATE_THREAD_CONTEXT(mpeg_decode_update_thread_context),
2897  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2898 #if CONFIG_MPEG1_NVDEC_HWACCEL
2899  HWACCEL_NVDEC(mpeg1),
2900 #endif
2901 #if CONFIG_MPEG1_VDPAU_HWACCEL
2902  HWACCEL_VDPAU(mpeg1),
2903 #endif
2904 #if CONFIG_MPEG1_VIDEOTOOLBOX_HWACCEL
2905  HWACCEL_VIDEOTOOLBOX(mpeg1),
2906 #endif
2907  NULL
2908  },
2909 };
2910 
2912  .p.name = "mpeg2video",
2913  CODEC_LONG_NAME("MPEG-2 video"),
2914  .p.type = AVMEDIA_TYPE_VIDEO,
2915  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2916  .priv_data_size = sizeof(Mpeg1Context),
2918  .close = mpeg_decode_end,
2920  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2921 #if FF_API_FLAG_TRUNCATED
2922  AV_CODEC_CAP_TRUNCATED |
2923 #endif
2925  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2926  .flush = flush,
2927  .p.max_lowres = 3,
2929  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2930 #if CONFIG_MPEG2_DXVA2_HWACCEL
2931  HWACCEL_DXVA2(mpeg2),
2932 #endif
2933 #if CONFIG_MPEG2_D3D11VA_HWACCEL
2934  HWACCEL_D3D11VA(mpeg2),
2935 #endif
2936 #if CONFIG_MPEG2_D3D11VA2_HWACCEL
2937  HWACCEL_D3D11VA2(mpeg2),
2938 #endif
2939 #if CONFIG_MPEG2_NVDEC_HWACCEL
2940  HWACCEL_NVDEC(mpeg2),
2941 #endif
2942 #if CONFIG_MPEG2_VAAPI_HWACCEL
2943  HWACCEL_VAAPI(mpeg2),
2944 #endif
2945 #if CONFIG_MPEG2_VDPAU_HWACCEL
2946  HWACCEL_VDPAU(mpeg2),
2947 #endif
2948 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
2949  HWACCEL_VIDEOTOOLBOX(mpeg2),
2950 #endif
2951  NULL
2952  },
2953 };
2954 
2955 //legacy decoder
2957  .p.name = "mpegvideo",
2958  CODEC_LONG_NAME("MPEG-1 video"),
2959  .p.type = AVMEDIA_TYPE_VIDEO,
2960  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2961  .priv_data_size = sizeof(Mpeg1Context),
2963  .close = mpeg_decode_end,
2965  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2966 #if FF_API_FLAG_TRUNCATED
2967  AV_CODEC_CAP_TRUNCATED |
2968 #endif
2970  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2971  .flush = flush,
2972  .p.max_lowres = 3,
2973 };
2974 
2975 typedef struct IPUContext {
2977 
2978  int flags;
2979  DECLARE_ALIGNED(32, int16_t, block)[6][64];
2980 } IPUContext;
2981 
2983  int *got_frame, AVPacket *avpkt)
2984 {
2985  IPUContext *s = avctx->priv_data;
2986  MpegEncContext *m = &s->m;
2987  GetBitContext *gb = &m->gb;
2988  int ret;
2989 
2990  ret = ff_get_buffer(avctx, frame, 0);
2991  if (ret < 0)
2992  return ret;
2993 
2994  ret = init_get_bits8(gb, avpkt->data, avpkt->size);
2995  if (ret < 0)
2996  return ret;
2997 
2998  s->flags = get_bits(gb, 8);
2999  m->intra_dc_precision = s->flags & 3;
3000  m->q_scale_type = !!(s->flags & 0x40);
3001  m->intra_vlc_format = !!(s->flags & 0x20);
3002  m->alternate_scan = !!(s->flags & 0x10);
3003 
3004  if (s->flags & 0x10) {
3007  } else {
3010  }
3011 
3012  m->last_dc[0] = m->last_dc[1] = m->last_dc[2] = 1 << (7 + (s->flags & 3));
3013  m->qscale = 1;
3014 
3015  for (int y = 0; y < avctx->height; y += 16) {
3016  int intraquant;
3017 
3018  for (int x = 0; x < avctx->width; x += 16) {
3019  if (x || y) {
3020  if (!get_bits1(gb))
3021  return AVERROR_INVALIDDATA;
3022  }
3023  if (get_bits1(gb)) {
3024  intraquant = 0;
3025  } else {
3026  if (!get_bits1(gb))
3027  return AVERROR_INVALIDDATA;
3028  intraquant = 1;
3029  }
3030 
3031  if (s->flags & 4)
3032  skip_bits1(gb);
3033 
3034  if (intraquant)
3035  m->qscale = mpeg_get_qscale(m);
3036 
3037  memset(s->block, 0, sizeof(s->block));
3038 
3039  for (int n = 0; n < 6; n++) {
3040  if (s->flags & 0x80) {
3042  m->intra_matrix,
3044  m->last_dc, s->block[n],
3045  n, m->qscale);
3046  if (ret >= 0)
3047  m->block_last_index[n] = ret;
3048  } else {
3049  ret = mpeg2_decode_block_intra(m, s->block[n], n);
3050  }
3051 
3052  if (ret < 0)
3053  return ret;
3054  }
3055 
3056  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x,
3057  frame->linesize[0], s->block[0]);
3058  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x + 8,
3059  frame->linesize[0], s->block[1]);
3060  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x,
3061  frame->linesize[0], s->block[2]);
3062  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x + 8,
3063  frame->linesize[0], s->block[3]);
3064  m->idsp.idct_put(frame->data[1] + (y >> 1) * frame->linesize[1] + (x >> 1),
3065  frame->linesize[1], s->block[4]);
3066  m->idsp.idct_put(frame->data[2] + (y >> 1) * frame->linesize[2] + (x >> 1),
3067  frame->linesize[2], s->block[5]);
3068  }
3069  }
3070 
3071  align_get_bits(gb);
3072  if (get_bits_left(gb) != 32)
3073  return AVERROR_INVALIDDATA;
3074 
3075  frame->pict_type = AV_PICTURE_TYPE_I;
3076  frame->key_frame = 1;
3077  *got_frame = 1;
3078 
3079  return avpkt->size;
3080 }
3081 
3083 {
3084  IPUContext *s = avctx->priv_data;
3085  MpegEncContext *m = &s->m;
3086 
3087  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
3088 
3089  ff_mpv_decode_init(m, avctx);
3090  ff_mpv_idct_init(m);
3092 
3093  for (int i = 0; i < 64; i++) {
3094  int j = m->idsp.idct_permutation[i];
3096  m->intra_matrix[j] = v;
3097  m->chroma_intra_matrix[j] = v;
3098  }
3099 
3100  for (int i = 0; i < 64; i++) {
3101  int j = m->idsp.idct_permutation[i];
3103  m->inter_matrix[j] = v;
3104  m->chroma_inter_matrix[j] = v;
3105  }
3106 
3107  return 0;
3108 }
3109 
3111 {
3112  IPUContext *s = avctx->priv_data;
3113 
3114  ff_mpv_common_end(&s->m);
3115 
3116  return 0;
3117 }
3118 
3120  .p.name = "ipu",
3121  CODEC_LONG_NAME("IPU Video"),
3122  .p.type = AVMEDIA_TYPE_VIDEO,
3123  .p.id = AV_CODEC_ID_IPU,
3124  .priv_data_size = sizeof(IPUContext),
3125  .init = ipu_decode_init,
3127  .close = ipu_decode_end,
3128  .p.capabilities = AV_CODEC_CAP_DR1,
3129  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
3130 };
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:38
vcr2_init_sequence
static int vcr2_init_sequence(AVCodecContext *avctx)
Definition: mpeg12dec.c:2151
ff_mpv_common_init
av_cold int ff_mpv_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:682
hwconfig.h
AVCodecContext::hwaccel
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1369
MB_TYPE_L0
#define MB_TYPE_L0
Definition: mpegutils.h:60
MV_TYPE_16X16
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:265
Mpeg1Context::has_afd
int has_afd
Definition: mpeg12dec.c:72
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:253
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
level
uint8_t level
Definition: svq3.c:204
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: defs.h:51
Mpeg1Context::a53_buf_ref
AVBufferRef * a53_buf_ref
Definition: mpeg12dec.c:70
ff_mpeg2_aspect
const AVRational ff_mpeg2_aspect[16]
Definition: mpeg12data.c:380
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:42
show_bits_long
static unsigned int show_bits_long(GetBitContext *s, int n)
Show 0-32 bits.
Definition: get_bits.h:474
mpeg_decode_a53_cc
static int mpeg_decode_a53_cc(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:2203
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:664
FMT_MPEG1
@ FMT_MPEG1
Definition: mpegutils.h:117
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ff_mpv_export_qp_table
int ff_mpv_export_qp_table(const MpegEncContext *s, AVFrame *f, const Picture *p, int qp_type)
Definition: mpegvideo_dec.c:502
AV_STEREO3D_SIDEBYSIDE_QUINCUNX
@ AV_STEREO3D_SIDEBYSIDE_QUINCUNX
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:114
FF_MPV_QSCALE_TYPE_MPEG2
#define FF_MPV_QSCALE_TYPE_MPEG2
Definition: mpegvideodec.h:41
mem_internal.h
mpeg_decode_frame
static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture, int *got_output, AVPacket *avpkt)
Definition: mpeg12dec.c:2779
MpegEncContext::gb
GetBitContext gb
Definition: mpegvideo.h:433
AV_EF_COMPLIANT
#define AV_EF_COMPLIANT
consider all spec non compliances as errors
Definition: defs.h:55
SEQ_END_CODE
#define SEQ_END_CODE
Definition: mpeg12.h:28
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, size_t size)
Add a new side data to a frame.
Definition: frame.c:679
check_scantable_index
#define check_scantable_index(ctx, x)
Definition: mpeg12dec.c:142
AV_FRAME_DATA_A53_CC
@ AV_FRAME_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:59
MT_FIELD
#define MT_FIELD
Definition: mpeg12dec.c:654
EXT_START_CODE
#define EXT_START_CODE
Definition: cavs.h:39
MV_TYPE_16X8
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:267
ff_mbincr_vlc
VLC ff_mbincr_vlc
Definition: mpeg12.c:123
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
matrix
Definition: vc1dsp.c:42
AVPanScan
Pan Scan area.
Definition: defs.h:97
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1351
SLICE_MAX_START_CODE
#define SLICE_MAX_START_CODE
Definition: cavs.h:38
MB_TYPE_16x8
#define MB_TYPE_16x8
Definition: mpegutils.h:48
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:256
ipu_decode_init
static av_cold int ipu_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:3082
ff_update_duplicate_context
int ff_update_duplicate_context(MpegEncContext *dst, const MpegEncContext *src)
Definition: mpegvideo.c:490
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
start_code
static const uint8_t start_code[]
Definition: videotoolboxenc.c:198
ff_mpv_report_decode_progress
void ff_mpv_report_decode_progress(MpegEncContext *s)
Definition: mpegvideo_dec.c:569
w
uint8_t w
Definition: llviddspenc.c:38
HWACCEL_DXVA2
#define HWACCEL_DXVA2(codec)
Definition: hwconfig.h:67
ff_mpegvideo_decoder
const FFCodec ff_mpegvideo_decoder
Definition: mpeg12dec.c:2956
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:374
ipu_decode_end
static av_cold int ipu_decode_end(AVCodecContext *avctx)
Definition: mpeg12dec.c:3110
mpeg_decode_mb
static int mpeg_decode_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpeg12dec.c:659
Mpeg1Context::closed_gop
int closed_gop
Definition: mpeg12dec.c:81
mpeg2_decode_block_intra
static int mpeg2_decode_block_intra(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:483
HWACCEL_D3D11VA2
#define HWACCEL_D3D11VA2(codec)
Definition: hwconfig.h:69
ff_reverse
const uint8_t ff_reverse[256]
Definition: reverse.c:23
MpegEncContext::last_dc
int last_dc[3]
last DC values for MPEG-1
Definition: mpegvideo.h:179
MB_TYPE_16x16
#define MB_TYPE_16x16
Definition: mpegutils.h:47
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:247
FFCodec
Definition: codec_internal.h:119
mpeg2_fast_decode_block_intra
static int mpeg2_fast_decode_block_intra(MpegEncContext *s, int16_t *block, int n)
Changing this would eat up any speed benefits it has.
Definition: mpeg12dec.c:569
PICT_BOTTOM_FIELD
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:37
ff_er_add_slice
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
Definition: error_resilience.c:822
ff_init_block_index
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:857
reverse.h
mpegvideo.h
MB_TYPE_L1
#define MB_TYPE_L1
Definition: mpegutils.h:61
UPDATE_CACHE
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:216
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
Mpeg1Context::first_slice
int first_slice
Definition: mpeg12dec.c:83
ER_DC_END
#define ER_DC_END
Definition: error_resilience.h:35
mpeg_decode_postinit
static int mpeg_decode_postinit(AVCodecContext *avctx)
Definition: mpeg12dec.c:1175
ff_add_cpb_side_data
AVCPBProperties * ff_add_cpb_side_data(AVCodecContext *avctx)
Add a CPB properties side data to an encoding context.
Definition: utils.c:1023
mpegutils.h
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
ER_MV_ERROR
#define ER_MV_ERROR
Definition: error_resilience.h:33
thread.h
ff_mb_pat_vlc
VLC ff_mb_pat_vlc
Definition: mpeg12.c:126
SEQ_START_CODE
#define SEQ_START_CODE
Definition: mpeg12.h:29
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1328
MV_TYPE_DMV
#define MV_TYPE_DMV
2 vectors, special mpeg2 Dual Prime Vectors
Definition: mpegvideo.h:269
GET_CACHE
#define GET_CACHE(name, gb)
Definition: get_bits.h:253
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:371
ff_mpeg2_rl_vlc
RL_VLC_ELEM ff_mpeg2_rl_vlc[674]
Definition: mpeg12.c:129
Mpeg1Context::save_aspect
AVRational save_aspect
Definition: mpeg12dec.c:75
MpegEncContext::intra_scantable
ScanTable intra_scantable
Definition: mpegvideo.h:84
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:1735
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:325
MB_TYPE_ZERO_MV
#define MB_TYPE_ZERO_MV
Definition: mpeg12dec.c:88
MT_DMV
#define MT_DMV
Definition: mpeg12dec.c:657
ParseContext
Definition: parser.h:28
ff_mpv_reconstruct_mb
void ff_mpv_reconstruct_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpegvideo_dec.c:1010
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:123
decode_chunks
static int decode_chunks(AVCodecContext *avctx, AVFrame *picture, int *got_output, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2441
AVCodecContext::skip_frame
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:1698
mpeg_decode_quant_matrix_extension
static void mpeg_decode_quant_matrix_extension(MpegEncContext *s)
Definition: mpeg12dec.c:1495
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1466
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
wrap
#define wrap(func)
Definition: neontest.h:65
timecode.h
GetBitContext
Definition: get_bits.h:107
AV_EF_BITSTREAM
#define AV_EF_BITSTREAM
detect bitstream specification deviations
Definition: defs.h:49
USES_LIST
#define USES_LIST(a, list)
Definition: mpegutils.h:92
slice_decode_thread
static int slice_decode_thread(AVCodecContext *c, void *arg)
Definition: mpeg12dec.c:1960
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:478
IDCTDSPContext::idct_put
void(* idct_put)(uint8_t *dest, ptrdiff_t line_size, int16_t *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: idctdsp.h:63
MB_TYPE_CBP
#define MB_TYPE_CBP
Definition: mpegutils.h:64
val
static double val(void *priv, double ch)
Definition: aeval.c:77
Mpeg1Context::tmpgexs
int tmpgexs
Definition: mpeg12dec.c:82
HWACCEL_VDPAU
#define HWACCEL_VDPAU(codec)
Definition: hwconfig.h:75
AV_CODEC_FLAG_LOW_DELAY
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
Definition: avcodec.h:280
mpeg12_pixfmt_list_444
static enum AVPixelFormat mpeg12_pixfmt_list_444[]
Definition: mpeg12dec.c:1147
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:586
ff_print_debug_info
void ff_print_debug_info(const MpegEncContext *s, const Picture *p, AVFrame *pict)
Definition: mpegvideo_dec.c:495
mpeg1_decode_sequence
static int mpeg1_decode_sequence(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2061
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
HAS_CBP
#define HAS_CBP(a)
Definition: mpegutils.h:94
AVRational::num
int num
Numerator.
Definition: rational.h:59
GOP_START_CODE
#define GOP_START_CODE
Definition: mpeg12.h:30
IPUContext
Definition: mpeg12dec.c:2975
mpeg1_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg1_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:1107
ff_mpv_common_end
void ff_mpv_common_end(MpegEncContext *s)
Definition: mpegvideo.c:788
mpeg12.h
mpegvideodec.h
ff_mpeg2video_decoder
const FFCodec ff_mpeg2video_decoder
Definition: mpeg12dec.c:2911
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
Mpeg1Context::frame_rate_index
unsigned frame_rate_index
Definition: mpeg12dec.c:79
ipu_decode_frame
static int ipu_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: mpeg12dec.c:2982
ER_DC_ERROR
#define ER_DC_ERROR
Definition: error_resilience.h:32
av_cold
#define av_cold
Definition: attributes.h:90
mpeg2_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg2_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:1118
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:524
mpeg1_decode_picture
static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1315
flush
static void flush(AVCodecContext *avctx)
Definition: mpeg12dec.c:2858
Mpeg1Context::save_progressive_seq
int save_progressive_seq
Definition: mpeg12dec.c:76
CLOSE_READER
#define CLOSE_READER(name, gb)
Definition: get_bits.h:187
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:500
AVCodecContext::has_b_frames
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:694
A53_MAX_CC_COUNT
#define A53_MAX_CC_COUNT
Definition: mpeg12dec.c:61
Mpeg1Context::repeat_field
int repeat_field
Definition: mpeg12dec.c:66
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:298
stereo3d.h
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:256
ff_mpeg1_aspect
const float ff_mpeg1_aspect[16]
Definition: mpeg12data.c:359
AVCodecContext::ticks_per_frame
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:530
s1
#define s1
Definition: regdef.h:38
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2013
Mpeg1Context::mpeg_enc_ctx_allocated
int mpeg_enc_ctx_allocated
Definition: mpeg12dec.c:65
SHOW_SBITS
#define SHOW_SBITS(name, gb, num)
Definition: get_bits.h:250
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
ff_mpeg_er_frame_start
void ff_mpeg_er_frame_start(MpegEncContext *s)
Definition: mpeg_er.c:47
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
Mpeg1Context::aspect_ratio_info
unsigned aspect_ratio_info
Definition: mpeg12dec.c:74
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
mpeg_decode_sequence_display_extension
static void mpeg_decode_sequence_display_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1414
Mpeg1Context::pan_scan
AVPanScan pan_scan
Definition: mpeg12dec.c:67
get_sbits
static int get_sbits(GetBitContext *s, int n)
Definition: get_bits.h:310
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
PICT_TOP_FIELD
#define PICT_TOP_FIELD
Definition: mpegutils.h:36
decode.h
mpeg12_pixfmt_list_422
static enum AVPixelFormat mpeg12_pixfmt_list_422[]
Definition: mpeg12dec.c:1142
SKIP_BITS
#define SKIP_BITS(name, gb, num)
Definition: get_bits.h:231
IS_INTRA
#define IS_INTRA(x, y)
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AVCodecContext::rc_max_rate
int64_t rc_max_rate
maximum bitrate
Definition: avcodec.h:1223
AVCPBProperties
This structure describes the bitrate properties of an encoded bitstream.
Definition: defs.h:126
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:264
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:408
arg
const char * arg
Definition: jacosubdec.c:67
if
if(ret)
Definition: filter_design.txt:179
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:76
Mpeg1Context::rc_buffer_size
int rc_buffer_size
Definition: mpeg12dec.c:77
MB_PTYPE_VLC_BITS
#define MB_PTYPE_VLC_BITS
Definition: mpeg12vlc.h:39
Mpeg1Context::save_width
int save_width
Definition: mpeg12dec.c:76
PTRDIFF_SPECIFIER
#define PTRDIFF_SPECIFIER
Definition: internal.h:149
NULL
#define NULL
Definition: coverity.c:32
run
uint8_t run
Definition: svq3.c:203
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:982
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
ER_AC_ERROR
#define ER_AC_ERROR
Definition: error_resilience.h:31
SLICE_MIN_START_CODE
#define SLICE_MIN_START_CODE
Definition: mpeg12.h:32
ff_mpv_idct_init
av_cold void ff_mpv_idct_init(MpegEncContext *s)
Definition: mpegvideo.c:342
Mpeg1Context::sync
int sync
Definition: mpeg12dec.c:80
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:682
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:684
AVCodecContext::bit_rate
int64_t bit_rate
the average bitrate
Definition: avcodec.h:448
mpeg_decode_picture_display_extension
static void mpeg_decode_picture_display_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1438
MpegEncContext::inter_matrix
uint16_t inter_matrix[64]
Definition: mpegvideo.h:301
AV_CODEC_FLAG2_FAST
#define AV_CODEC_FLAG2_FAST
Allow non spec compliant speedup tricks.
Definition: avcodec.h:303
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:378
profiles.h
AV_CODEC_FLAG_TRUNCATED
#define AV_CODEC_FLAG_TRUNCATED
Input bitstream might be truncated at a random location instead of only at frame boundaries.
Definition: avcodec.h:271
LAST_SKIP_BITS
#define LAST_SKIP_BITS(name, gb, num)
Definition: get_bits.h:237
MB_TYPE_QUANT
#define MB_TYPE_QUANT
Definition: mpegutils.h:63
avpriv_find_start_code
const uint8_t * avpriv_find_start_code(const uint8_t *p, const uint8_t *end, uint32_t *state)
lowres
static int lowres
Definition: ffplay.c:335
av_frame_new_side_data_from_buf
AVFrameSideData * av_frame_new_side_data_from_buf(AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef *buf)
Add a new side data to a frame from an existing AVBufferRef.
Definition: frame.c:647
ff_mpeg1_rl_vlc
RL_VLC_ELEM ff_mpeg1_rl_vlc[680]
Definition: mpeg12.c:128
MB_BTYPE_VLC_BITS
#define MB_BTYPE_VLC_BITS
Definition: mpeg12vlc.h:40
UPDATE_THREAD_CONTEXT
#define UPDATE_THREAD_CONTEXT(func)
Definition: codec_internal.h:273
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
mpeg12codecs.h
get_vlc2
static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:631
AV_FRAME_DATA_AFD
@ AV_FRAME_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:90
AVCodecContext::level
int level
level
Definition: avcodec.h:1676
Mpeg1Context::save_height
int save_height
Definition: mpeg12dec.c:76
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
MpegEncContext::idsp
IDCTDSPContext idsp
Definition: mpegvideo.h:221
ff_mb_ptype_vlc
VLC ff_mb_ptype_vlc
Definition: mpeg12.c:124
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
quant_matrix_rebuild
static void quant_matrix_rebuild(uint16_t *matrix, const uint8_t *old_perm, const uint8_t *new_perm)
Definition: mpeg12dec.c:1095
ff_mpeg1_find_frame_end
int ff_mpeg1_find_frame_end(ParseContext *pc, const uint8_t *buf, int buf_size, AVCodecParserContext *s)
Find the end of the current frame in the bitstream.
Definition: mpeg12.c:175
startcode.h
s2
#define s2
Definition: regdef.h:39
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:75
check_marker
static int check_marker(void *logctx, GetBitContext *s, const char *msg)
Definition: mpegvideodec.h:73
AVCodecContext::flags2
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:485
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1461
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:259
AVPacket::size
int size
Definition: packet.h:375
dc
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff) *mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
MpegEncContext::qscale
int qscale
QP.
Definition: mpegvideo.h:198
AV_CODEC_ID_IPU
@ AV_CODEC_ID_IPU
Definition: codec_id.h:309
AV_FRAME_DATA_PANSCAN
@ AV_FRAME_DATA_PANSCAN
The data is the AVPanScan struct defined in libavcodec.
Definition: frame.h:53
RL_VLC_ELEM
Definition: vlc.h:37
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:353
MT_FRAME
#define MT_FRAME
Definition: mpeg12dec.c:655
codec_internal.h
shift
static int shift(int a, int b)
Definition: bonk.c:257
IPUContext::flags
int flags
Definition: mpeg12dec.c:2978
MpegEncContext::intra_matrix
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:299
ff_mpeg1_clean_buffers
void ff_mpeg1_clean_buffers(MpegEncContext *s)
Definition: mpeg12.c:106
ff_mpeg1video_decoder
const FFCodec ff_mpeg1video_decoder
Definition: mpeg12dec.c:2879
AV_RB32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:96
FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
#define FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
The decoder extracts and fills its parameters even if the frame is skipped due to the skip_frame sett...
Definition: codec_internal.h:54
AVFrameSideData::data
uint8_t * data
Definition: frame.h:238
MB_TYPE_SKIP
#define MB_TYPE_SKIP
Definition: mpegutils.h:55
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1478
ff_mpeg_draw_horiz_band
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo_dec.c:532
PICTURE_START_CODE
#define PICTURE_START_CODE
Definition: mpeg12.h:31
USER_START_CODE
#define USER_START_CODE
Definition: cavs.h:40
AVCodecContext::skip_bottom
int skip_bottom
Number of macroblock rows at the bottom which are skipped.
Definition: avcodec.h:913
AVCodecHWConfigInternal
Definition: hwconfig.h:29
ff_mpeg1_default_intra_matrix
const uint16_t ff_mpeg1_default_intra_matrix[256]
Definition: mpeg12data.c:31
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:162
ff_mpv_frame_start
int ff_mpv_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function called after decoding the header and before a frame is decoded.
Definition: mpegvideo_dec.c:271
MB_TYPE_INTERLACED
#define MB_TYPE_INTERLACED
Definition: mpegutils.h:51
OPEN_READER
#define OPEN_READER(name, gb)
Definition: get_bits.h:176
ff_mpeg_flush
void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:540
height
#define height
Mpeg1Context::has_stereo3d
int has_stereo3d
Definition: mpeg12dec.c:69
mpeg_decode_init
static av_cold int mpeg_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:1049
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:117
HWACCEL_D3D11VA
#define HWACCEL_D3D11VA(codec)
Definition: hwconfig.h:79
mpegvideodata.h
attributes.h
ff_mpeg1_decode_block_intra
int ff_mpeg1_decode_block_intra(GetBitContext *gb, const uint16_t *quant_matrix, const uint8_t *scantable, int last_dc[3], int16_t *block, int index, int qscale)
Definition: mpeg12.c:238
MV_TYPE_FIELD
#define MV_TYPE_FIELD
2 vectors, one per field
Definition: mpegvideo.h:268
skip_bits1
static void skip_bits1(GetBitContext *s)
Definition: get_bits.h:403
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:333
HWACCEL_NVDEC
#define HWACCEL_NVDEC(codec)
Definition: hwconfig.h:71
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
ff_combine_frame
int ff_combine_frame(ParseContext *pc, int next, const uint8_t **buf, int *buf_size)
Combine the (truncated) bitstream to a complete frame.
Definition: parser.c:199
FF_THREAD_FRAME
#define FF_THREAD_FRAME
Decode more than one frame at once.
Definition: avcodec.h:1477
ff_mpeg2_video_profiles
const AVProfile ff_mpeg2_video_profiles[]
Definition: profiles.c:100
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:187
DECLARE_ALIGNED
#define DECLARE_ALIGNED(n, t, v)
Definition: mem.h:116
MB_TYPE_L0L1
#define MB_TYPE_L0L1
Definition: mpegutils.h:62
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:302
ff_init_scantable
av_cold void ff_init_scantable(const uint8_t *permutation, ScanTable *st, const uint8_t *src_scantable)
Definition: mpegvideo.c:321
MpegEncContext::block_last_index
int block_last_index[12]
last non zero coefficient in block
Definition: mpegvideo.h:75
av_assert2
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:64
MpegEncContext::chroma_inter_matrix
uint16_t chroma_inter_matrix[64]
Definition: mpegvideo.h:302
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AV_CODEC_FLAG2_SHOW_ALL
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:331
AVCodecContext::properties
unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:1847
ff_alternate_vertical_scan
const uint8_t ff_alternate_vertical_scan[64]
Definition: mpegvideodata.c:63
btype2mb_type
static const uint32_t btype2mb_type[11]
Definition: mpeg12dec.c:100
AVCodecContext::extradata
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:499
AVHWAccel::decode_slice
int(* decode_slice)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size)
Callback for each slice.
Definition: avcodec.h:2159
show_bits
static unsigned int show_bits(GetBitContext *s, int n)
Show 1-25 bits.
Definition: get_bits.h:361
internal.h
ff_mpv_decode_init
void ff_mpv_decode_init(MpegEncContext *s, AVCodecContext *avctx)
Initialize the given MpegEncContext for decoding.
Definition: mpegvideo_dec.c:42
AV_STEREO3D_TOPBOTTOM
@ AV_STEREO3D_TOPBOTTOM
Views are on top of each other.
Definition: stereo3d.h:76
AVCPBProperties::max_bitrate
int64_t max_bitrate
Maximum bitrate of the stream, in bits per second.
Definition: defs.h:131
IS_QUANT
#define IS_QUANT(a)
Definition: mpegutils.h:88
ff_mpeg12_init_vlcs
av_cold void ff_mpeg12_init_vlcs(void)
Definition: mpeg12.c:164
FF_DEBUG_STARTCODE
#define FF_DEBUG_STARTCODE
Definition: avcodec.h:1335
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_d2q
AVRational av_d2q(double d, int max)
Convert a double precision floating point number to a rational.
Definition: rational.c:106
MB_PAT_VLC_BITS
#define MB_PAT_VLC_BITS
Definition: mpeg12vlc.h:38
mpeg1_decode_block_inter
static int mpeg1_decode_block_inter(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:151
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:487
ptype2mb_type
static const uint32_t ptype2mb_type[7]
Definition: mpeg12dec.c:90
IPUContext::m
MpegEncContext m
Definition: mpeg12dec.c:2976
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:211
MpegEncContext::intra_vlc_format
int intra_vlc_format
Definition: mpegvideo.h:452
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:989
MAX_INDEX
#define MAX_INDEX
Definition: mpeg12dec.c:141
AVCodecContext::height
int height
Definition: avcodec.h:571
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:608
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:644
HWACCEL_VIDEOTOOLBOX
#define HWACCEL_VIDEOTOOLBOX(codec)
Definition: hwconfig.h:77
Mpeg1Context::stereo3d
AVStereo3D stereo3d
Definition: mpeg12dec.c:68
idctdsp.h
avcodec.h
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
GET_RL_VLC
#define GET_RL_VLC(level, run, name, gb, table, bits, max_depth, need_update)
Definition: get_bits.h:585
ff_zigzag_direct
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
ff_mpeg12_frame_rate_tab
const AVRational ff_mpeg12_frame_rate_tab[]
Definition: mpeg12framerate.c:24
mpeg_decode_gop
static int mpeg_decode_gop(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2410
ret
ret
Definition: filter_design.txt:187
AV_EF_AGGRESSIVE
#define AV_EF_AGGRESSIVE
consider things that a sane encoder/muxer should not do as an error
Definition: defs.h:56
pred
static const float pred[4]
Definition: siprdata.h:259
AV_FRAME_DATA_GOP_TIMECODE
@ AV_FRAME_DATA_GOP_TIMECODE
The GOP timecode in 25 bit timecode format.
Definition: frame.h:125
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ff_mpeg1_default_non_intra_matrix
const uint16_t ff_mpeg1_default_non_intra_matrix[64]
Definition: mpeg12data.c:42
AVCPBProperties::buffer_size
int64_t buffer_size
The size of the buffer to which the ratecontrol is applied, in bits.
Definition: defs.h:147
mpeg1_fast_decode_block_inter
static int mpeg1_fast_decode_block_inter(MpegEncContext *s, int16_t *block, int n)
Changing this would eat up any speed benefits it has.
Definition: mpeg12dec.c:239
align_get_bits
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:540
TEX_VLC_BITS
#define TEX_VLC_BITS
Definition: dvdec.c:146
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
mpeg_get_pixelformat
static enum AVPixelFormat mpeg_get_pixelformat(AVCodecContext *avctx)
Definition: mpeg12dec.c:1152
AV_CODEC_FLAG2_CHUNKS
#define AV_CODEC_FLAG2_CHUNKS
Input bitstream might be truncated at a packet boundaries instead of only at frame boundaries.
Definition: avcodec.h:322
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
mpeg12data.h
mpeg2_fast_decode_block_non_intra
static int mpeg2_fast_decode_block_non_intra(MpegEncContext *s, int16_t *block, int n)
Changing this would eat up any speed benefits it has.
Definition: mpeg12dec.c:412
mpeg_field_start
static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1572
ff_mpeg_update_thread_context
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: mpegvideo_dec.c:58
skip_1stop_8data_bits
static int skip_1stop_8data_bits(GetBitContext *gb)
Definition: get_bits.h:669
AVCodecContext
main external API structure.
Definition: avcodec.h:398
AVCodecContext::active_thread_type
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1485
av_timecode_make_mpeg_tc_string
char * av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit)
Get the timecode string from the 25-bit timecode format (MPEG GOP format).
Definition: timecode.c:167
MpegEncContext::intra_dc_precision
int intra_dc_precision
Definition: mpegvideo.h:446
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1517
SHOW_UBITS
#define SHOW_UBITS(name, gb, num)
Definition: get_bits.h:249
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:276
mpeg12dec.h
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:683
AVRational::den
int den
Denominator.
Definition: rational.h:60
error_resilience.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AVCodecContext::profile
int profile
profile
Definition: avcodec.h:1550
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:630
ff_mb_btype_vlc
VLC ff_mb_btype_vlc
Definition: mpeg12.c:125
sign_extend
static av_const int sign_extend(int val, unsigned bits)
Definition: mathops.h:133
ff_thread_get_format
FF_DISABLE_DEPRECATION_WARNINGS enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Wrapper around get_format() for frame-multithreaded codecs.
Definition: pthread_frame.c:1054
ff_mpv_frame_end
void ff_mpv_frame_end(MpegEncContext *s)
Definition: mpegvideo_dec.c:487
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
Mpeg1Context::slice_count
int slice_count
Definition: mpeg12dec.c:73
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:82
VLC::table
VLCElem * table
Definition: vlc.h:33
FF_CODEC_PROPERTY_CLOSED_CAPTIONS
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:1849
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
av_buffer_realloc
int av_buffer_realloc(AVBufferRef **pbuf, size_t size)
Reallocate a given buffer.
Definition: buffer.c:183
AVCodecContext::debug
int debug
debug
Definition: avcodec.h:1327
AVHWAccel::start_frame
int(* start_frame)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size)
Called at the beginning of each frame or field picture.
Definition: avcodec.h:2132
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:586
get_dmv
static int get_dmv(MpegEncContext *s)
Definition: mpeg12dec.c:645
tc
#define tc
Definition: regdef.h:69
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:275
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mpeg_decode_end
static av_cold int mpeg_decode_end(AVCodecContext *avctx)
Definition: mpeg12dec.c:2869
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
MpegEncContext::inter_scantable
ScanTable inter_scantable
if inter == intra then intra should be used to reduce the cache usage
Definition: mpegvideo.h:79
IDCTDSPContext::idct_permutation
uint8_t idct_permutation[64]
IDCT input permutation.
Definition: idctdsp.h:87
ff_ipu_decoder
const FFCodec ff_ipu_decoder
Definition: mpeg12dec.c:3119
av_stereo3d_create_side_data
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:34
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:236
ER_MV_END
#define ER_MV_END
Definition: error_resilience.h:36
ff_mv_vlc
VLC ff_mv_vlc
Definition: mpeg12.c:118
MpegEncContext::q_scale_type
int q_scale_type
Definition: mpegvideo.h:450
AVCodecContext::codec_tag
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:423
Mpeg1Context::mpeg_enc_ctx
MpegEncContext mpeg_enc_ctx
Definition: mpeg12dec.c:64
ff_tlog
#define ff_tlog(ctx,...)
Definition: internal.h:162
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
MV_DIR_FORWARD
#define MV_DIR_FORWARD
Definition: mpegvideo.h:261
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:425
ScanTable::permutated
uint8_t permutated[64]
Definition: mpegvideo.h:63
ff_er_frame_end
void ff_er_frame_end(ERContext *s)
Definition: error_resilience.c:892
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
mpeg_get_qscale
static int mpeg_get_qscale(MpegEncContext *s)
Definition: mpegvideodec.h:64
mpeg_decode_sequence_extension
static void mpeg_decode_sequence_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1364
HWACCEL_VAAPI
#define HWACCEL_VAAPI(codec)
Definition: hwconfig.h:73
mpeg_er.h
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:571
int32_t
int32_t
Definition: audioconvert.c:56
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
Mpeg1Context::frame_rate_ext
AVRational frame_rate_ext
Definition: mpeg12dec.c:78
mpeg_decode_motion
static int mpeg_decode_motion(MpegEncContext *s, int fcode, int pred)
Definition: mpeg12dec.c:115
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
IPUContext::block
int16_t block[6][64]
Definition: mpeg12dec.c:2979
mpeg_decode_user_data
static void mpeg_decode_user_data(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:2340
h
h
Definition: vp9dsp_template.c:2038
MpegEncContext::end_mb_y
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:147
ER_AC_END
#define ER_AC_END
Definition: error_resilience.h:34
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:173
av_image_check_sar
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:323
MV_VLC_BITS
#define MV_VLC_BITS
Definition: mpeg12vlc.h:34
Mpeg1Context::timecode_frame_start
int64_t timecode_frame_start
Definition: mpeg12dec.c:85
MpegEncContext::start_mb_y
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:146
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:72
MpegEncContext::alternate_scan
int alternate_scan
Definition: mpegvideo.h:453
DECODE_SLICE_OK
#define DECODE_SLICE_OK
Definition: mpeg12dec.c:1676
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
DECODE_SLICE_ERROR
#define DECODE_SLICE_ERROR
Definition: mpeg12dec.c:1675
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:70
load_matrix
static int load_matrix(MpegEncContext *s, uint16_t matrix0[64], uint16_t matrix1[64], int intra)
Definition: mpeg12dec.c:1472
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:768
decode_dc
static int decode_dc(GetBitContext *gb, int component)
Definition: mpeg12dec.h:28
Mpeg1Context::afd
uint8_t afd
Definition: mpeg12dec.c:71
Mpeg1Context
Definition: mpeg12dec.c:63
MpegEncContext::chroma_intra_matrix
uint16_t chroma_intra_matrix[64]
Definition: mpegvideo.h:300
mpeg_decode_picture_coding_extension
static int mpeg_decode_picture_coding_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1509
Mpeg1Context::extradata_decoded
int extradata_decoded
Definition: mpeg12dec.c:84
mpeg2_decode_block_non_intra
static int mpeg2_decode_block_non_intra(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:323
MB_TYPE_INTRA
#define MB_TYPE_INTRA
Definition: mpegutils.h:66
MBINCR_VLC_BITS
#define MBINCR_VLC_BITS
Definition: mpeg12vlc.h:37
mpeg_decode_slice
static int mpeg_decode_slice(MpegEncContext *s, int mb_y, const uint8_t **buf, int buf_size)
Decode a slice.
Definition: mpeg12dec.c:1684
re
float re
Definition: fft.c:79
rl_vlc
static VLC rl_vlc[2]
Definition: mobiclip.c:277