FFmpeg
huffyuvdec.c
Go to the documentation of this file.
1 /*
2  * huffyuv decoder
3  *
4  * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * see https://multimedia.cx/huffyuv.txt for a description of
7  * the algorithm used
8  *
9  * This file is part of FFmpeg.
10  *
11  * FFmpeg is free software; you can redistribute it and/or
12  * modify it under the terms of the GNU Lesser General Public
13  * License as published by the Free Software Foundation; either
14  * version 2.1 of the License, or (at your option) any later version.
15  *
16  * FFmpeg is distributed in the hope that it will be useful,
17  * but WITHOUT ANY WARRANTY; without even the implied warranty of
18  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19  * Lesser General Public License for more details.
20  *
21  * You should have received a copy of the GNU Lesser General Public
22  * License along with FFmpeg; if not, write to the Free Software
23  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24  *
25  * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
26  */
27 
28 /**
29  * @file
30  * huffyuv decoder
31  */
32 
33 #define UNCHECKED_BITSTREAM_READER 1
34 
35 #include "config_components.h"
36 
37 #include "avcodec.h"
38 #include "bswapdsp.h"
39 #include "codec_internal.h"
40 #include "get_bits.h"
41 #include "huffyuv.h"
42 #include "huffyuvdsp.h"
43 #include "lossless_videodsp.h"
44 #include "thread.h"
45 #include "libavutil/imgutils.h"
46 #include "libavutil/pixdesc.h"
47 
48 #define VLC_BITS 12
49 
50 typedef struct HYuvDecContext {
56  int version;
57  int yuy2; //use yuy2 instead of 422P
58  int bgr32; //use bgr32 instead of bgr24
59  int bps;
60  int n; // 1<<bps
61  int vlc_n; // number of vlc codes (FFMIN(1<<bps, MAX_VLC_N))
62  int alpha;
63  int chroma;
64  int yuv;
67  int flags;
68  int context;
70 
71  uint8_t *temp[3];
72  uint16_t *temp16[3]; ///< identical to temp but 16bit type
73  uint8_t len[4][MAX_VLC_N];
74  uint32_t bits[4][MAX_VLC_N];
75  uint32_t pix_bgr_map[1<<VLC_BITS];
76  VLC vlc[8]; //Y,U,V,A,YY,YU,YV,AA
77  uint8_t *bitstream_buffer;
78  unsigned int bitstream_buffer_size;
83 
84 
85 #define classic_shift_luma_table_size 42
87  34, 36, 35, 69, 135, 232, 9, 16, 10, 24, 11, 23, 12, 16, 13, 10,
88  14, 8, 15, 8, 16, 8, 17, 20, 16, 10, 207, 206, 205, 236, 11, 8,
89  10, 21, 9, 23, 8, 8, 199, 70, 69, 68, 0,
90  0,0,0,0,0,0,0,0,
91 };
92 
93 #define classic_shift_chroma_table_size 59
95  66, 36, 37, 38, 39, 40, 41, 75, 76, 77, 110, 239, 144, 81, 82, 83,
96  84, 85, 118, 183, 56, 57, 88, 89, 56, 89, 154, 57, 58, 57, 26, 141,
97  57, 56, 58, 57, 58, 57, 184, 119, 214, 245, 116, 83, 82, 49, 80, 79,
98  78, 77, 44, 75, 41, 40, 39, 38, 37, 36, 34, 0,
99  0,0,0,0,0,0,0,0,
100 };
101 
102 static const unsigned char classic_add_luma[256] = {
103  3, 9, 5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
104  73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
105  68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
106  35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
107  37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
108  35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
109  27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
110  15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
111  12, 17, 19, 13, 4, 9, 2, 11, 1, 7, 8, 0, 16, 3, 14, 6,
112  12, 10, 5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
113  18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
114  28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
115  28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
116  62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
117  54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
118  46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13, 7, 8,
119 };
120 
121 static const unsigned char classic_add_chroma[256] = {
122  3, 1, 2, 2, 2, 2, 3, 3, 7, 5, 7, 5, 8, 6, 11, 9,
123  7, 13, 11, 10, 9, 8, 7, 5, 9, 7, 6, 4, 7, 5, 8, 7,
124  11, 8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
125  43, 45, 76, 81, 46, 82, 75, 55, 56, 144, 58, 80, 60, 74, 147, 63,
126  143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
127  80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
128  17, 14, 5, 6, 100, 54, 47, 50, 51, 53, 106, 107, 108, 109, 110, 111,
129  112, 113, 114, 115, 4, 117, 118, 92, 94, 121, 122, 3, 124, 103, 2, 1,
130  0, 129, 130, 131, 120, 119, 126, 125, 136, 137, 138, 139, 140, 141, 142, 134,
131  135, 132, 133, 104, 64, 101, 62, 57, 102, 95, 93, 59, 61, 28, 97, 96,
132  52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
133  19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10, 9, 8, 36,
134  7, 128, 127, 105, 123, 116, 35, 34, 33, 145, 31, 79, 42, 146, 78, 26,
135  83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
136  14, 16, 17, 18, 20, 21, 12, 14, 15, 9, 10, 6, 9, 6, 5, 8,
137  6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
138 };
139 
140 static int read_len_table(uint8_t *dst, GetBitContext *gb, int n)
141 {
142  int i, val, repeat;
143 
144  for (i = 0; i < n;) {
145  repeat = get_bits(gb, 3);
146  val = get_bits(gb, 5);
147  if (repeat == 0)
148  repeat = get_bits(gb, 8);
149  if (i + repeat > n || get_bits_left(gb) < 0) {
150  av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
151  return AVERROR_INVALIDDATA;
152  }
153  while (repeat--)
154  dst[i++] = val;
155  }
156  return 0;
157 }
158 
160 {
161  int ret;
162  uint16_t *symbols = av_mallocz(5 << VLC_BITS);
163  uint16_t *bits;
164  uint8_t *len;
165  if (!symbols)
166  return AVERROR(ENOMEM);
167  bits = symbols + (1 << VLC_BITS);
168  len = (uint8_t *)(bits + (1 << VLC_BITS));
169 
170  if (s->bitstream_bpp < 24 || s->version > 2) {
171  int p, i, y, u;
172  for (p = 0; p < 4; p++) {
173  int p0 = s->version > 2 ? p : 0;
174  for (i = y = 0; y < s->vlc_n; y++) {
175  int len0 = s->len[p0][y];
176  int limit = VLC_BITS - len0;
177  if (limit <= 0 || !len0)
178  continue;
179  if ((sign_extend(y, 8) & (s->vlc_n-1)) != y)
180  continue;
181  for (u = 0; u < s->vlc_n; u++) {
182  int len1 = s->len[p][u];
183  if (len1 > limit || !len1)
184  continue;
185  if ((sign_extend(u, 8) & (s->vlc_n-1)) != u)
186  continue;
187  av_assert0(i < (1 << VLC_BITS));
188  len[i] = len0 + len1;
189  bits[i] = (s->bits[p0][y] << len1) + s->bits[p][u];
190  symbols[i] = (y << 8) + (u & 0xFF);
191  i++;
192  }
193  }
194  ff_free_vlc(&s->vlc[4 + p]);
195  if ((ret = ff_init_vlc_sparse(&s->vlc[4 + p], VLC_BITS, i, len, 1, 1,
196  bits, 2, 2, symbols, 2, 2, 0)) < 0)
197  goto out;
198  }
199  } else {
200  uint8_t (*map)[4] = (uint8_t(*)[4]) s->pix_bgr_map;
201  int i, b, g, r, code;
202  int p0 = s->decorrelate;
203  int p1 = !s->decorrelate;
204  /* Restrict the range to +/-16 because that's pretty much guaranteed
205  * to cover all the combinations that fit in 11 bits total, and it
206  * does not matter if we miss a few rare codes. */
207  for (i = 0, g = -16; g < 16; g++) {
208  int len0 = s->len[p0][g & 255];
209  int limit0 = VLC_BITS - len0;
210  if (limit0 < 2 || !len0)
211  continue;
212  for (b = -16; b < 16; b++) {
213  int len1 = s->len[p1][b & 255];
214  int limit1 = limit0 - len1;
215  if (limit1 < 1 || !len1)
216  continue;
217  code = (s->bits[p0][g & 255] << len1) + s->bits[p1][b & 255];
218  for (r = -16; r < 16; r++) {
219  int len2 = s->len[2][r & 255];
220  if (len2 > limit1 || !len2)
221  continue;
222  av_assert0(i < (1 << VLC_BITS));
223  len[i] = len0 + len1 + len2;
224  bits[i] = (code << len2) + s->bits[2][r & 255];
225  if (s->decorrelate) {
226  map[i][G] = g;
227  map[i][B] = g + b;
228  map[i][R] = g + r;
229  } else {
230  map[i][B] = g;
231  map[i][G] = b;
232  map[i][R] = r;
233  }
234  i++;
235  }
236  }
237  }
238  ff_free_vlc(&s->vlc[4]);
239  if ((ret = init_vlc(&s->vlc[4], VLC_BITS, i, len, 1, 1,
240  bits, 2, 2, 0)) < 0)
241  goto out;
242  }
243  ret = 0;
244 out:
245  av_freep(&symbols);
246  return ret;
247 }
248 
249 static int read_huffman_tables(HYuvDecContext *s, const uint8_t *src, int length)
250 {
251  GetBitContext gb;
252  int i, ret;
253  int count = 3;
254 
255  if ((ret = init_get_bits(&gb, src, length * 8)) < 0)
256  return ret;
257 
258  if (s->version > 2)
259  count = 1 + s->alpha + 2*s->chroma;
260 
261  for (i = 0; i < count; i++) {
262  if ((ret = read_len_table(s->len[i], &gb, s->vlc_n)) < 0)
263  return ret;
264  if ((ret = ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n)) < 0)
265  return ret;
266  ff_free_vlc(&s->vlc[i]);
267  if ((ret = init_vlc(&s->vlc[i], VLC_BITS, s->vlc_n, s->len[i], 1, 1,
268  s->bits[i], 4, 4, 0)) < 0)
269  return ret;
270  }
271 
272  if ((ret = generate_joint_tables(s)) < 0)
273  return ret;
274 
275  return (get_bits_count(&gb) + 7) / 8;
276 }
277 
279 {
280  GetBitContext gb;
281  int i, ret;
282 
285  if ((ret = read_len_table(s->len[0], &gb, 256)) < 0)
286  return ret;
287 
290  if ((ret = read_len_table(s->len[1], &gb, 256)) < 0)
291  return ret;
292 
293  for (i = 0; i < 256; i++)
294  s->bits[0][i] = classic_add_luma[i];
295  for (i = 0; i < 256; i++)
296  s->bits[1][i] = classic_add_chroma[i];
297 
298  if (s->bitstream_bpp >= 24) {
299  memcpy(s->bits[1], s->bits[0], 256 * sizeof(uint32_t));
300  memcpy(s->len[1], s->len[0], 256 * sizeof(uint8_t));
301  }
302  memcpy(s->bits[2], s->bits[1], 256 * sizeof(uint32_t));
303  memcpy(s->len[2], s->len[1], 256 * sizeof(uint8_t));
304 
305  for (i = 0; i < 4; i++) {
306  ff_free_vlc(&s->vlc[i]);
307  if ((ret = init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
308  s->bits[i], 4, 4, 0)) < 0)
309  return ret;
310  }
311 
312  if ((ret = generate_joint_tables(s)) < 0)
313  return ret;
314 
315  return 0;
316 }
317 
319 {
320  HYuvDecContext *s = avctx->priv_data;
321  int i;
322 
323  ff_huffyuv_common_end(s->temp, s->temp16);
324  av_freep(&s->bitstream_buffer);
325 
326  for (i = 0; i < 8; i++)
327  ff_free_vlc(&s->vlc[i]);
328 
329  return 0;
330 }
331 
333 {
334  HYuvDecContext *s = avctx->priv_data;
335  int ret;
336 
337  ret = av_image_check_size(avctx->width, avctx->height, 0, avctx);
338  if (ret < 0)
339  return ret;
340 
341  s->flags = avctx->flags;
342 
343  ff_bswapdsp_init(&s->bdsp);
344  ff_huffyuvdsp_init(&s->hdsp, avctx->pix_fmt);
345  ff_llviddsp_init(&s->llviddsp);
346  memset(s->vlc, 0, 4 * sizeof(VLC));
347 
348  s->interlaced = avctx->height > 288;
349  s->bgr32 = 1;
350 
351  if (avctx->extradata_size) {
352  if ((avctx->bits_per_coded_sample & 7) &&
353  avctx->bits_per_coded_sample != 12)
354  s->version = 1; // do such files exist at all?
355  else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0)
356  s->version = 2;
357  else
358  s->version = 3;
359  } else
360  s->version = 0;
361 
362  s->bps = 8;
363  s->n = 1<<s->bps;
364  s->vlc_n = FFMIN(s->n, MAX_VLC_N);
365  s->chroma = 1;
366  if (s->version >= 2) {
367  int method, interlace;
368 
369  if (avctx->extradata_size < 4)
370  return AVERROR_INVALIDDATA;
371 
372  method = avctx->extradata[0];
373  s->decorrelate = method & 64 ? 1 : 0;
374  s->predictor = method & 63;
375  if (s->version == 2) {
376  s->bitstream_bpp = avctx->extradata[1];
377  if (s->bitstream_bpp == 0)
378  s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
379  } else {
380  s->bps = (avctx->extradata[1] >> 4) + 1;
381  s->n = 1<<s->bps;
382  s->vlc_n = FFMIN(s->n, MAX_VLC_N);
383  s->chroma_h_shift = avctx->extradata[1] & 3;
384  s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3;
385  s->yuv = !!(avctx->extradata[2] & 1);
386  s->chroma= !!(avctx->extradata[2] & 3);
387  s->alpha = !!(avctx->extradata[2] & 4);
388  }
389  interlace = (avctx->extradata[2] & 0x30) >> 4;
390  s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
391  s->context = avctx->extradata[2] & 0x40 ? 1 : 0;
392 
393  if ((ret = read_huffman_tables(s, avctx->extradata + 4,
394  avctx->extradata_size - 4)) < 0)
395  return ret;
396  } else {
397  switch (avctx->bits_per_coded_sample & 7) {
398  case 1:
399  s->predictor = LEFT;
400  s->decorrelate = 0;
401  break;
402  case 2:
403  s->predictor = LEFT;
404  s->decorrelate = 1;
405  break;
406  case 3:
407  s->predictor = PLANE;
408  s->decorrelate = avctx->bits_per_coded_sample >= 24;
409  break;
410  case 4:
411  s->predictor = MEDIAN;
412  s->decorrelate = 0;
413  break;
414  default:
415  s->predictor = LEFT; // OLD
416  s->decorrelate = 0;
417  break;
418  }
419  s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
420  s->context = 0;
421 
422  if ((ret = read_old_huffman_tables(s)) < 0)
423  return ret;
424  }
425 
426  if (s->version <= 2) {
427  switch (s->bitstream_bpp) {
428  case 12:
429  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
430  s->yuv = 1;
431  break;
432  case 16:
433  if (s->yuy2)
434  avctx->pix_fmt = AV_PIX_FMT_YUYV422;
435  else
436  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
437  s->yuv = 1;
438  break;
439  case 24:
440  if (s->bgr32)
441  avctx->pix_fmt = AV_PIX_FMT_0RGB32;
442  else
443  avctx->pix_fmt = AV_PIX_FMT_BGR24;
444  break;
445  case 32:
446  av_assert0(s->bgr32);
447  avctx->pix_fmt = AV_PIX_FMT_RGB32;
448  s->alpha = 1;
449  break;
450  default:
451  return AVERROR_INVALIDDATA;
452  }
454  &s->chroma_h_shift,
455  &s->chroma_v_shift);
456  } else {
457  switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) {
458  case 0x070:
459  avctx->pix_fmt = AV_PIX_FMT_GRAY8;
460  break;
461  case 0x0F0:
462  avctx->pix_fmt = AV_PIX_FMT_GRAY16;
463  break;
464  case 0x470:
465  avctx->pix_fmt = AV_PIX_FMT_GBRP;
466  break;
467  case 0x480:
468  avctx->pix_fmt = AV_PIX_FMT_GBRP9;
469  break;
470  case 0x490:
471  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
472  break;
473  case 0x4B0:
474  avctx->pix_fmt = AV_PIX_FMT_GBRP12;
475  break;
476  case 0x4D0:
477  avctx->pix_fmt = AV_PIX_FMT_GBRP14;
478  break;
479  case 0x4F0:
480  avctx->pix_fmt = AV_PIX_FMT_GBRP16;
481  break;
482  case 0x570:
483  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
484  break;
485  case 0x670:
486  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
487  break;
488  case 0x680:
489  avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
490  break;
491  case 0x690:
492  avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
493  break;
494  case 0x6B0:
495  avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
496  break;
497  case 0x6D0:
498  avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
499  break;
500  case 0x6F0:
501  avctx->pix_fmt = AV_PIX_FMT_YUV444P16;
502  break;
503  case 0x671:
504  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
505  break;
506  case 0x681:
507  avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
508  break;
509  case 0x691:
510  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
511  break;
512  case 0x6B1:
513  avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
514  break;
515  case 0x6D1:
516  avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
517  break;
518  case 0x6F1:
519  avctx->pix_fmt = AV_PIX_FMT_YUV422P16;
520  break;
521  case 0x672:
522  avctx->pix_fmt = AV_PIX_FMT_YUV411P;
523  break;
524  case 0x674:
525  avctx->pix_fmt = AV_PIX_FMT_YUV440P;
526  break;
527  case 0x675:
528  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
529  break;
530  case 0x685:
531  avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
532  break;
533  case 0x695:
534  avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
535  break;
536  case 0x6B5:
537  avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
538  break;
539  case 0x6D5:
540  avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
541  break;
542  case 0x6F5:
543  avctx->pix_fmt = AV_PIX_FMT_YUV420P16;
544  break;
545  case 0x67A:
546  avctx->pix_fmt = AV_PIX_FMT_YUV410P;
547  break;
548  case 0x770:
549  avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
550  break;
551  case 0x780:
552  avctx->pix_fmt = AV_PIX_FMT_YUVA444P9;
553  break;
554  case 0x790:
556  break;
557  case 0x7F0:
559  break;
560  case 0x771:
561  avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
562  break;
563  case 0x781:
564  avctx->pix_fmt = AV_PIX_FMT_YUVA422P9;
565  break;
566  case 0x791:
568  break;
569  case 0x7F1:
571  break;
572  case 0x775:
573  avctx->pix_fmt = AV_PIX_FMT_YUVA420P;
574  break;
575  case 0x785:
576  avctx->pix_fmt = AV_PIX_FMT_YUVA420P9;
577  break;
578  case 0x795:
580  break;
581  case 0x7F5:
583  break;
584  default:
585  return AVERROR_INVALIDDATA;
586  }
587  }
588 
589  if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
590  av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
591  return AVERROR_INVALIDDATA;
592  }
593  if (s->predictor == MEDIAN && avctx->pix_fmt == AV_PIX_FMT_YUV422P &&
594  avctx->width % 4) {
595  av_log(avctx, AV_LOG_ERROR, "width must be a multiple of 4 "
596  "for this combination of colorspace and predictor type.\n");
597  return AVERROR_INVALIDDATA;
598  }
599 
600  if ((ret = ff_huffyuv_alloc_temp(s->temp, s->temp16, avctx->width)) < 0)
601  return ret;
602 
603  return 0;
604 }
605 
606 /** Subset of GET_VLC for use in hand-roller VLC code */
607 #define VLC_INTERN(dst, table, gb, name, bits, max_depth) \
608  code = table[index].sym; \
609  n = table[index].len; \
610  if (max_depth > 1 && n < 0) { \
611  LAST_SKIP_BITS(name, gb, bits); \
612  UPDATE_CACHE(name, gb); \
613  \
614  nb_bits = -n; \
615  index = SHOW_UBITS(name, gb, nb_bits) + code; \
616  code = table[index].sym; \
617  n = table[index].len; \
618  if (max_depth > 2 && n < 0) { \
619  LAST_SKIP_BITS(name, gb, nb_bits); \
620  UPDATE_CACHE(name, gb); \
621  \
622  nb_bits = -n; \
623  index = SHOW_UBITS(name, gb, nb_bits) + code; \
624  code = table[index].sym; \
625  n = table[index].len; \
626  } \
627  } \
628  dst = code; \
629  LAST_SKIP_BITS(name, gb, n)
630 
631 
632 #define GET_VLC_DUAL(dst0, dst1, name, gb, dtable, table1, table2, \
633  bits, max_depth, OP) \
634  do { \
635  unsigned int index = SHOW_UBITS(name, gb, bits); \
636  int code, n = dtable[index].len; \
637  \
638  if (n<=0) { \
639  int nb_bits; \
640  VLC_INTERN(dst0, table1, gb, name, bits, max_depth); \
641  \
642  UPDATE_CACHE(re, gb); \
643  index = SHOW_UBITS(name, gb, bits); \
644  VLC_INTERN(dst1, table2, gb, name, bits, max_depth); \
645  } else { \
646  code = dtable[index].sym; \
647  OP(dst0, dst1, code); \
648  LAST_SKIP_BITS(name, gb, n); \
649  } \
650  } while (0)
651 
652 #define OP8bits(dst0, dst1, code) dst0 = code>>8; dst1 = code
653 
654 #define READ_2PIX(dst0, dst1, plane1) \
655  UPDATE_CACHE(re, &s->gb); \
656  GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane1].table, \
657  s->vlc[0].table, s->vlc[plane1].table, VLC_BITS, 3, OP8bits)
658 
659 static void decode_422_bitstream(HYuvDecContext *s, int count)
660 {
661  int i, icount;
662  OPEN_READER(re, &s->gb);
663  count /= 2;
664 
665  icount = get_bits_left(&s->gb) / (32 * 4);
666  if (count >= icount) {
667  for (i = 0; i < icount; i++) {
668  READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
669  READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
670  }
671  for (; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
672  READ_2PIX(s->temp[0][2 * i ], s->temp[1][i], 1);
673  if (BITS_LEFT(re, &s->gb) <= 0) break;
674  READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
675  }
676  for (; i < count; i++)
677  s->temp[0][2 * i ] = s->temp[1][i] =
678  s->temp[0][2 * i + 1] = s->temp[2][i] = 0;
679  } else {
680  for (i = 0; i < count; i++) {
681  READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
682  READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
683  }
684  }
685  CLOSE_READER(re, &s->gb);
686 }
687 
688 #define READ_2PIX_PLANE(dst0, dst1, plane, OP) \
689  UPDATE_CACHE(re, &s->gb); \
690  GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane].table, \
691  s->vlc[plane].table, s->vlc[plane].table, VLC_BITS, 3, OP)
692 
693 #define OP14bits(dst0, dst1, code) dst0 = code>>8; dst1 = sign_extend(code, 8)
694 
695 /* TODO instead of restarting the read when the code isn't in the first level
696  * of the joint table, jump into the 2nd level of the individual table. */
697 #define READ_2PIX_PLANE16(dst0, dst1, plane){\
698  dst0 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
699  dst0 += get_bits(&s->gb, 2);\
700  dst1 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
701  dst1 += get_bits(&s->gb, 2);\
702 }
703 static void decode_plane_bitstream(HYuvDecContext *s, int width, int plane)
704 {
705  int i, count = width/2;
706 
707  if (s->bps <= 8) {
708  OPEN_READER(re, &s->gb);
709  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
710  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
711  READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
712  }
713  } else {
714  for(i=0; i<count; i++){
715  READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
716  }
717  }
718  if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
719  unsigned int index;
720  int nb_bits, code, n;
721  UPDATE_CACHE(re, &s->gb);
722  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
723  VLC_INTERN(s->temp[0][width-1], s->vlc[plane].table,
724  &s->gb, re, VLC_BITS, 3);
725  }
726  CLOSE_READER(re, &s->gb);
727  } else if (s->bps <= 14) {
728  OPEN_READER(re, &s->gb);
729  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
730  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
731  READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
732  }
733  } else {
734  for(i=0; i<count; i++){
735  READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
736  }
737  }
738  if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
739  unsigned int index;
740  int nb_bits, code, n;
741  UPDATE_CACHE(re, &s->gb);
742  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
743  VLC_INTERN(s->temp16[0][width-1], s->vlc[plane].table,
744  &s->gb, re, VLC_BITS, 3);
745  }
746  CLOSE_READER(re, &s->gb);
747  } else {
748  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
749  for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
750  READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
751  }
752  } else {
753  for(i=0; i<count; i++){
754  READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
755  }
756  }
757  if( width&1 && get_bits_left(&s->gb)>0 ) {
758  int dst = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;
759  s->temp16[0][width-1] = dst + get_bits(&s->gb, 2);
760  }
761  }
762 }
763 
764 static void decode_gray_bitstream(HYuvDecContext *s, int count)
765 {
766  int i;
767  OPEN_READER(re, &s->gb);
768  count /= 2;
769 
770  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
771  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
772  READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
773  }
774  } else {
775  for (i = 0; i < count; i++) {
776  READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
777  }
778  }
779  CLOSE_READER(re, &s->gb);
780 }
781 
783  int decorrelate, int alpha)
784 {
785  int i;
786  OPEN_READER(re, &s->gb);
787 
788  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
789  unsigned int index;
790  int code, n, nb_bits;
791 
792  UPDATE_CACHE(re, &s->gb);
793  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
794  n = s->vlc[4].table[index].len;
795 
796  if (n>0) {
797  code = s->vlc[4].table[index].sym;
798  *(uint32_t *) &s->temp[0][4 * i] = s->pix_bgr_map[code];
799  LAST_SKIP_BITS(re, &s->gb, n);
800  } else {
801  if (decorrelate) {
802  VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
803  &s->gb, re, VLC_BITS, 3);
804 
805  UPDATE_CACHE(re, &s->gb);
806  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
807  VLC_INTERN(code, s->vlc[0].table, &s->gb, re, VLC_BITS, 3);
808  s->temp[0][4 * i + B] = code + s->temp[0][4 * i + G];
809 
810  UPDATE_CACHE(re, &s->gb);
811  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
812  VLC_INTERN(code, s->vlc[2].table, &s->gb, re, VLC_BITS, 3);
813  s->temp[0][4 * i + R] = code + s->temp[0][4 * i + G];
814  } else {
815  VLC_INTERN(s->temp[0][4 * i + B], s->vlc[0].table,
816  &s->gb, re, VLC_BITS, 3);
817 
818  UPDATE_CACHE(re, &s->gb);
819  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
820  VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
821  &s->gb, re, VLC_BITS, 3);
822 
823  UPDATE_CACHE(re, &s->gb);
824  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
825  VLC_INTERN(s->temp[0][4 * i + R], s->vlc[2].table,
826  &s->gb, re, VLC_BITS, 3);
827  }
828  }
829  if (alpha) {
830  UPDATE_CACHE(re, &s->gb);
831  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
832  VLC_INTERN(s->temp[0][4 * i + A], s->vlc[2].table,
833  &s->gb, re, VLC_BITS, 3);
834  } else
835  s->temp[0][4 * i + A] = 0;
836  }
837  CLOSE_READER(re, &s->gb);
838 }
839 
840 static void decode_bgr_bitstream(HYuvDecContext *s, int count)
841 {
842  if (s->decorrelate) {
843  if (s->bitstream_bpp == 24)
844  decode_bgr_1(s, count, 1, 0);
845  else
846  decode_bgr_1(s, count, 1, 1);
847  } else {
848  if (s->bitstream_bpp == 24)
849  decode_bgr_1(s, count, 0, 0);
850  else
851  decode_bgr_1(s, count, 0, 1);
852  }
853 }
854 
855 static void draw_slice(HYuvDecContext *s, AVCodecContext *avctx, AVFrame *frame, int y)
856 {
857  int h, cy, i;
859 
860  if (!avctx->draw_horiz_band)
861  return;
862 
863  h = y - s->last_slice_end;
864  y -= h;
865 
866  if (s->bitstream_bpp == 12)
867  cy = y >> 1;
868  else
869  cy = y;
870 
871  offset[0] = frame->linesize[0] * y;
872  offset[1] = frame->linesize[1] * cy;
873  offset[2] = frame->linesize[2] * cy;
874  for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
875  offset[i] = 0;
876  emms_c();
877 
878  avctx->draw_horiz_band(avctx, frame, offset, y, 3, h);
879 
880  s->last_slice_end = y + h;
881 }
882 
883 static int left_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
884 {
885  if (s->bps <= 8) {
886  return s->llviddsp.add_left_pred(dst, src, w, acc);
887  } else {
888  return s->llviddsp.add_left_pred_int16(( uint16_t *)dst, (const uint16_t *)src, s->n-1, w, acc);
889  }
890 }
891 
892 static void add_bytes(HYuvDecContext *s, uint8_t *dst, uint8_t *src, int w)
893 {
894  if (s->bps <= 8) {
895  s->llviddsp.add_bytes(dst, src, w);
896  } else {
897  s->hdsp.add_int16((uint16_t*)dst, (const uint16_t*)src, s->n - 1, w);
898  }
899 }
900 
901 static void add_median_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
902 {
903  if (s->bps <= 8) {
904  s->llviddsp.add_median_pred(dst, src, diff, w, left, left_top);
905  } else {
906  s->hdsp.add_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src, (const uint16_t *)diff, s->n-1, w, left, left_top);
907  }
908 }
909 
910 static int decode_slice(AVCodecContext *avctx, AVFrame *p, int height,
911  int buf_size, int y_offset, int table_size)
912 {
913  HYuvDecContext *s = avctx->priv_data;
914  int fake_ystride, fake_ustride, fake_vstride;
915  const int width = avctx->width;
916  const int width2 = avctx->width >> 1;
917  int ret;
918 
919  if ((ret = init_get_bits8(&s->gb, s->bitstream_buffer + table_size, buf_size - table_size)) < 0)
920  return ret;
921 
922  fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
923  fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
924  fake_vstride = s->interlaced ? p->linesize[2] * 2 : p->linesize[2];
925 
926  if (s->version > 2) {
927  int plane;
928  for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
929  int left, lefttop, y;
930  int w = width;
931  int h = height;
932  int fake_stride = fake_ystride;
933 
934  if (s->chroma && (plane == 1 || plane == 2)) {
935  w >>= s->chroma_h_shift;
936  h >>= s->chroma_v_shift;
937  fake_stride = plane == 1 ? fake_ustride : fake_vstride;
938  }
939 
940  switch (s->predictor) {
941  case LEFT:
942  case PLANE:
943  decode_plane_bitstream(s, w, plane);
944  left = left_prediction(s, p->data[plane], s->temp[0], w, 0);
945 
946  for (y = 1; y < h; y++) {
947  uint8_t *dst = p->data[plane] + p->linesize[plane]*y;
948 
949  decode_plane_bitstream(s, w, plane);
950  left = left_prediction(s, dst, s->temp[0], w, left);
951  if (s->predictor == PLANE) {
952  if (y > s->interlaced) {
953  add_bytes(s, dst, dst - fake_stride, w);
954  }
955  }
956  }
957 
958  break;
959  case MEDIAN:
960  decode_plane_bitstream(s, w, plane);
961  left= left_prediction(s, p->data[plane], s->temp[0], w, 0);
962 
963  y = 1;
964  if (y >= h)
965  break;
966 
967  /* second line is left predicted for interlaced case */
968  if (s->interlaced) {
969  decode_plane_bitstream(s, w, plane);
970  left = left_prediction(s, p->data[plane] + p->linesize[plane], s->temp[0], w, left);
971  y++;
972  if (y >= h)
973  break;
974  }
975 
976  lefttop = p->data[plane][0];
977  decode_plane_bitstream(s, w, plane);
978  add_median_prediction(s, p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop);
979  y++;
980 
981  for (; y<h; y++) {
982  uint8_t *dst;
983 
984  decode_plane_bitstream(s, w, plane);
985 
986  dst = p->data[plane] + p->linesize[plane] * y;
987 
988  add_median_prediction(s, dst, dst - fake_stride, s->temp[0], w, &left, &lefttop);
989  }
990 
991  break;
992  }
993  }
994  draw_slice(s, avctx, p, height);
995  } else if (s->bitstream_bpp < 24) {
996  int y, cy;
997  int lefty, leftu, leftv;
998  int lefttopy, lefttopu, lefttopv;
999 
1000  if (s->yuy2) {
1001  p->data[0][3] = get_bits(&s->gb, 8);
1002  p->data[0][2] = get_bits(&s->gb, 8);
1003  p->data[0][1] = get_bits(&s->gb, 8);
1004  p->data[0][0] = get_bits(&s->gb, 8);
1005 
1006  av_log(avctx, AV_LOG_ERROR,
1007  "YUY2 output is not implemented yet\n");
1008  return AVERROR_PATCHWELCOME;
1009  } else {
1010  leftv =
1011  p->data[2][0 + y_offset * p->linesize[2]] = get_bits(&s->gb, 8);
1012  lefty =
1013  p->data[0][1 + y_offset * p->linesize[0]] = get_bits(&s->gb, 8);
1014  leftu =
1015  p->data[1][0 + y_offset * p->linesize[1]] = get_bits(&s->gb, 8);
1016  p->data[0][0 + y_offset * p->linesize[0]] = get_bits(&s->gb, 8);
1017 
1018  switch (s->predictor) {
1019  case LEFT:
1020  case PLANE:
1022  lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0] * y_offset + 2, s->temp[0],
1023  width - 2, lefty);
1024  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1025  leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[1] * y_offset + 1, s->temp[1], width2 - 1, leftu);
1026  leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[2] * y_offset + 1, s->temp[2], width2 - 1, leftv);
1027  }
1028 
1029  for (cy = y = 1; y < height; y++, cy++) {
1030  uint8_t *ydst, *udst, *vdst;
1031 
1032  if (s->bitstream_bpp == 12) {
1034 
1035  ydst = p->data[0] + p->linesize[0] * (y + y_offset);
1036 
1037  lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
1038  width, lefty);
1039  if (s->predictor == PLANE) {
1040  if (y > s->interlaced)
1041  s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
1042  }
1043  y++;
1044  if (y >= height)
1045  break;
1046  }
1047 
1048  draw_slice(s, avctx, p, y);
1049 
1050  ydst = p->data[0] + p->linesize[0] * (y + y_offset);
1051  udst = p->data[1] + p->linesize[1] * (cy + y_offset);
1052  vdst = p->data[2] + p->linesize[2] * (cy + y_offset);
1053 
1055  lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
1056  width, lefty);
1057  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1058  leftu = s->llviddsp.add_left_pred(udst, s->temp[1], width2, leftu);
1059  leftv = s->llviddsp.add_left_pred(vdst, s->temp[2], width2, leftv);
1060  }
1061  if (s->predictor == PLANE) {
1062  if (cy > s->interlaced) {
1063  s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
1064  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1065  s->llviddsp.add_bytes(udst, udst - fake_ustride, width2);
1066  s->llviddsp.add_bytes(vdst, vdst - fake_vstride, width2);
1067  }
1068  }
1069  }
1070  }
1071  draw_slice(s, avctx, p, height);
1072 
1073  break;
1074  case MEDIAN:
1075  /* first line except first 2 pixels is left predicted */
1077  lefty = s->llviddsp.add_left_pred(p->data[0] + 2, s->temp[0],
1078  width - 2, lefty);
1079  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1080  leftu = s->llviddsp.add_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
1081  leftv = s->llviddsp.add_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
1082  }
1083 
1084  cy = y = 1;
1085  if (y >= height)
1086  break;
1087 
1088  /* second line is left predicted for interlaced case */
1089  if (s->interlaced) {
1091  lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0],
1092  s->temp[0], width, lefty);
1093  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1094  leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
1095  leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
1096  }
1097  y++;
1098  cy++;
1099  if (y >= height)
1100  break;
1101  }
1102 
1103  /* next 4 pixels are left predicted too */
1104  decode_422_bitstream(s, 4);
1105  lefty = s->llviddsp.add_left_pred(p->data[0] + fake_ystride,
1106  s->temp[0], 4, lefty);
1107  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1108  leftu = s->llviddsp.add_left_pred(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
1109  leftv = s->llviddsp.add_left_pred(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
1110  }
1111 
1112  /* next line except the first 4 pixels is median predicted */
1113  lefttopy = p->data[0][3];
1115  s->llviddsp.add_median_pred(p->data[0] + fake_ystride + 4,
1116  p->data[0] + 4, s->temp[0],
1117  width - 4, &lefty, &lefttopy);
1118  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1119  lefttopu = p->data[1][1];
1120  lefttopv = p->data[2][1];
1121  s->llviddsp.add_median_pred(p->data[1] + fake_ustride + 2, p->data[1] + 2, s->temp[1], width2 - 2, &leftu, &lefttopu);
1122  s->llviddsp.add_median_pred(p->data[2] + fake_vstride + 2, p->data[2] + 2, s->temp[2], width2 - 2, &leftv, &lefttopv);
1123  }
1124  y++;
1125  cy++;
1126 
1127  for (; y < height; y++, cy++) {
1128  uint8_t *ydst, *udst, *vdst;
1129 
1130  if (s->bitstream_bpp == 12) {
1131  while (2 * cy > y) {
1133  ydst = p->data[0] + p->linesize[0] * y;
1134  s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
1135  s->temp[0], width,
1136  &lefty, &lefttopy);
1137  y++;
1138  }
1139  if (y >= height)
1140  break;
1141  }
1142  draw_slice(s, avctx, p, y);
1143 
1145 
1146  ydst = p->data[0] + p->linesize[0] * y;
1147  udst = p->data[1] + p->linesize[1] * cy;
1148  vdst = p->data[2] + p->linesize[2] * cy;
1149 
1150  s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
1151  s->temp[0], width,
1152  &lefty, &lefttopy);
1153  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1154  s->llviddsp.add_median_pred(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
1155  s->llviddsp.add_median_pred(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
1156  }
1157  }
1158 
1159  draw_slice(s, avctx, p, height);
1160  break;
1161  }
1162  }
1163  } else {
1164  int y;
1165  uint8_t left[4];
1166  const int last_line = (y_offset + height - 1) * p->linesize[0];
1167 
1168  if (s->bitstream_bpp == 32) {
1169  left[A] = p->data[0][last_line + A] = get_bits(&s->gb, 8);
1170  left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
1171  left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
1172  left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
1173  } else {
1174  left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
1175  left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
1176  left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
1177  left[A] = p->data[0][last_line + A] = 255;
1178  skip_bits(&s->gb, 8);
1179  }
1180 
1181  if (s->bgr32) {
1182  switch (s->predictor) {
1183  case LEFT:
1184  case PLANE:
1186  s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + last_line + 4,
1187  s->temp[0], width - 1, left);
1188 
1189  for (y = height - 2; y >= 0; y--) { // Yes it is stored upside down.
1191 
1192  s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + p->linesize[0] * (y + y_offset),
1193  s->temp[0], width, left);
1194  if (s->predictor == PLANE) {
1195  if (s->bitstream_bpp != 32)
1196  left[A] = 0;
1197  if (y < height - 1 - s->interlaced) {
1198  s->llviddsp.add_bytes(p->data[0] + p->linesize[0] * (y + y_offset),
1199  p->data[0] + p->linesize[0] * (y + y_offset) +
1200  fake_ystride, 4 * width);
1201  }
1202  }
1203  }
1204  // just 1 large slice as this is not possible in reverse order
1205  draw_slice(s, avctx, p, height);
1206  break;
1207  default:
1208  av_log(avctx, AV_LOG_ERROR,
1209  "prediction type not supported!\n");
1210  }
1211  } else {
1212  av_log(avctx, AV_LOG_ERROR,
1213  "BGR24 output is not implemented yet\n");
1214  return AVERROR_PATCHWELCOME;
1215  }
1216  }
1217 
1218  return 0;
1219 }
1220 
1221 static int decode_frame(AVCodecContext *avctx, AVFrame *p,
1222  int *got_frame, AVPacket *avpkt)
1223 {
1224  const uint8_t *buf = avpkt->data;
1225  int buf_size = avpkt->size;
1226  HYuvDecContext *s = avctx->priv_data;
1227  const int width = avctx->width;
1228  const int height = avctx->height;
1229  int slice, table_size = 0, ret, nb_slices;
1230  unsigned slices_info_offset;
1231  int slice_height;
1232 
1233  if (buf_size < (width * height + 7)/8)
1234  return AVERROR_INVALIDDATA;
1235 
1236  av_fast_padded_malloc(&s->bitstream_buffer,
1237  &s->bitstream_buffer_size,
1238  buf_size);
1239  if (!s->bitstream_buffer)
1240  return AVERROR(ENOMEM);
1241 
1242  s->bdsp.bswap_buf((uint32_t *) s->bitstream_buffer,
1243  (const uint32_t *) buf, buf_size / 4);
1244 
1245  if ((ret = ff_thread_get_buffer(avctx, p, 0)) < 0)
1246  return ret;
1247 
1248  if (s->context) {
1249  table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
1250  if (table_size < 0)
1251  return table_size;
1252  }
1253 
1254  if ((unsigned) (buf_size - table_size) >= INT_MAX / 8)
1255  return AVERROR_INVALIDDATA;
1256 
1257  s->last_slice_end = 0;
1258 
1259  if (avctx->codec_id == AV_CODEC_ID_HYMT &&
1260  (buf_size > 32 && AV_RL32(avpkt->data + buf_size - 16) == 0)) {
1261  slices_info_offset = AV_RL32(avpkt->data + buf_size - 4);
1262  slice_height = AV_RL32(avpkt->data + buf_size - 8);
1263  nb_slices = AV_RL32(avpkt->data + buf_size - 12);
1264  if (nb_slices * 8LL + slices_info_offset > buf_size - 16 ||
1265  s->chroma_v_shift ||
1266  slice_height <= 0 || nb_slices * (uint64_t)slice_height > height)
1267  return AVERROR_INVALIDDATA;
1268  } else {
1269  slice_height = height;
1270  nb_slices = 1;
1271  }
1272 
1273  for (slice = 0; slice < nb_slices; slice++) {
1274  int y_offset, slice_offset, slice_size;
1275 
1276  if (nb_slices > 1) {
1277  slice_offset = AV_RL32(avpkt->data + slices_info_offset + slice * 8);
1278  slice_size = AV_RL32(avpkt->data + slices_info_offset + slice * 8 + 4);
1279 
1280  if (slice_offset < 0 || slice_size <= 0 || (slice_offset&3) ||
1281  slice_offset + (int64_t)slice_size > buf_size)
1282  return AVERROR_INVALIDDATA;
1283 
1284  y_offset = height - (slice + 1) * slice_height;
1285  s->bdsp.bswap_buf((uint32_t *)s->bitstream_buffer,
1286  (const uint32_t *)(buf + slice_offset), slice_size / 4);
1287  } else {
1288  y_offset = 0;
1289  slice_offset = 0;
1290  slice_size = buf_size;
1291  }
1292 
1293  ret = decode_slice(avctx, p, slice_height, slice_size, y_offset, table_size);
1294  emms_c();
1295  if (ret < 0)
1296  return ret;
1297  }
1298 
1299  *got_frame = 1;
1300 
1301  return (get_bits_count(&s->gb) + 31) / 32 * 4 + table_size;
1302 }
1303 
1305  .p.name = "huffyuv",
1306  CODEC_LONG_NAME("Huffyuv / HuffYUV"),
1307  .p.type = AVMEDIA_TYPE_VIDEO,
1308  .p.id = AV_CODEC_ID_HUFFYUV,
1309  .priv_data_size = sizeof(HYuvDecContext),
1310  .init = decode_init,
1311  .close = decode_end,
1313  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
1315  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1316 };
1317 
1318 #if CONFIG_FFVHUFF_DECODER
1319 const FFCodec ff_ffvhuff_decoder = {
1320  .p.name = "ffvhuff",
1321  CODEC_LONG_NAME("Huffyuv FFmpeg variant"),
1322  .p.type = AVMEDIA_TYPE_VIDEO,
1323  .p.id = AV_CODEC_ID_FFVHUFF,
1324  .priv_data_size = sizeof(HYuvDecContext),
1325  .init = decode_init,
1326  .close = decode_end,
1328  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
1330  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1331 };
1332 #endif /* CONFIG_FFVHUFF_DECODER */
1333 
1334 #if CONFIG_HYMT_DECODER
1335 const FFCodec ff_hymt_decoder = {
1336  .p.name = "hymt",
1337  CODEC_LONG_NAME("HuffYUV MT"),
1338  .p.type = AVMEDIA_TYPE_VIDEO,
1339  .p.id = AV_CODEC_ID_HYMT,
1340  .priv_data_size = sizeof(HYuvDecContext),
1341  .init = decode_init,
1342  .close = decode_end,
1344  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
1346  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1347 };
1348 #endif /* CONFIG_HYMT_DECODER */
HuffYUVDSPContext
Definition: huffyuvdsp.h:25
VLC_INTERN
#define VLC_INTERN(dst, table, gb, name, bits, max_depth)
Subset of GET_VLC for use in hand-roller VLC code.
Definition: huffyuvdec.c:607
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:502
A
#define A(x)
Definition: vpx_arith.h:28
decode_bgr_bitstream
static void decode_bgr_bitstream(HYuvDecContext *s, int count)
Definition: huffyuvdec.c:840
bswapdsp.h
read_old_huffman_tables
static int read_old_huffman_tables(HYuvDecContext *s)
Definition: huffyuvdec.c:278
decorrelate
static void decorrelate(SnowContext *s, SubBand *b, IDWTELEM *src, int stride, int inverse, int use_median)
Definition: snowenc.c:1337
generate_joint_tables
static int generate_joint_tables(HYuvDecContext *s)
Definition: huffyuvdec.c:159
VLC_BITS
#define VLC_BITS
Definition: huffyuvdec.c:48
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:42
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:664
r
const char * r
Definition: vf_curves.c:126
acc
int acc
Definition: yuv2rgb.c:554
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
HYuvDecContext::alpha
int alpha
Definition: huffyuvdec.c:62
HYuvDecContext::context
int context
Definition: huffyuvdec.c:68
AV_CODEC_ID_HUFFYUV
@ AV_CODEC_ID_HUFFYUV
Definition: codec_id.h:77
out
FILE * out
Definition: movenc.c:54
decode_bgr_1
static av_always_inline void decode_bgr_1(HYuvDecContext *s, int count, int decorrelate, int alpha)
Definition: huffyuvdec.c:782
AV_CODEC_ID_HYMT
@ AV_CODEC_ID_HYMT
Definition: codec_id.h:295
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:262
MAX_VLC_N
#define MAX_VLC_N
Definition: huffyuv.h:50
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:256
HYuvDecContext::bgr32
int bgr32
Definition: huffyuvdec.c:58
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:494
HYuvDecContext::bitstream_buffer_size
unsigned int bitstream_buffer_size
Definition: huffyuvdec.c:78
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
pixdesc.h
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:501
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:374
huffyuvdsp.h
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:496
HYuvDecContext::n
int n
Definition: huffyuvdec.c:60
add_median_prediction
static void add_median_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
Definition: huffyuvdec.c:901
init_vlc
#define init_vlc(vlc, nb_bits, nb_codes, bits, bits_wrap, bits_size, codes, codes_wrap, codes_size, flags)
Definition: vlc.h:43
b
#define b
Definition: input.c:41
READ_2PIX
#define READ_2PIX(dst0, dst1, plane1)
Definition: huffyuvdec.c:654
classic_add_luma
static const unsigned char classic_add_luma[256]
Definition: huffyuvdec.c:102
R
#define R
Definition: huffyuv.h:44
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:459
FFCodec
Definition: codec_internal.h:127
HYuvDecContext::vlc_n
int vlc_n
Definition: huffyuvdec.c:61
MEDIAN
@ MEDIAN
Definition: huffyuv.h:55
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:69
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: huffyuvdec.c:332
UPDATE_CACHE
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:216
OP8bits
#define OP8bits(dst0, dst1, code)
Definition: huffyuvdec.c:652
OP14bits
#define OP14bits(dst0, dst1, code)
Definition: huffyuvdec.c:693
add_bytes
static void add_bytes(HYuvDecContext *s, uint8_t *dst, uint8_t *src, int w)
Definition: huffyuvdec.c:892
HYuvDecContext::version
int version
Definition: huffyuvdec.c:56
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:497
HYuvDecContext::bits
uint32_t bits[4][MAX_VLC_N]
Definition: huffyuvdec.c:74
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:493
thread.h
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:371
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:493
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:325
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:477
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:205
HYuvDecContext::predictor
Predictor predictor
Definition: huffyuvdec.c:52
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:475
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:503
GetBitContext
Definition: get_bits.h:107
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:457
classic_shift_chroma_table_size
#define classic_shift_chroma_table_size
Definition: huffyuvdec.c:93
ff_huffyuv_alloc_temp
av_cold int ff_huffyuv_alloc_temp(uint8_t *temp[3], uint16_t *temp16[3], int width)
Definition: huffyuv.c:63
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:506
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *p, int *got_frame, AVPacket *avpkt)
Definition: huffyuvdec.c:1221
val
static double val(void *priv, double ch)
Definition: aeval.c:77
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2916
classic_shift_luma
static const unsigned char classic_shift_luma[classic_shift_luma_table_size+AV_INPUT_BUFFER_PADDING_SIZE]
Definition: huffyuvdec.c:86
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:443
HYuvDecContext::chroma_h_shift
int chroma_h_shift
Definition: huffyuvdec.c:65
HYuvDecContext::temp16
uint16_t * temp16[3]
identical to temp but 16bit type
Definition: huffyuvdec.c:72
LLVidDSPContext
Definition: lossless_videodsp.h:28
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:462
HYuvDecContext::chroma
int chroma
Definition: huffyuvdec.c:63
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
decode_plane_bitstream
static void decode_plane_bitstream(HYuvDecContext *s, int width, int plane)
Definition: huffyuvdec.c:703
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:471
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:524
HYuvDecContext::yuy2
int yuy2
Definition: huffyuvdec.c:57
CLOSE_READER
#define CLOSE_READER(name, gb)
Definition: get_bits.h:187
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:528
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:306
s
#define s(width, name)
Definition: cbs_vp9.c:256
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:472
g
const char * g
Definition: vf_curves.c:127
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
bits
uint8_t bits
Definition: vp3data.h:128
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
B
#define B
Definition: huffyuv.h:42
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:456
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:470
get_bits.h
AV_CODEC_ID_FFVHUFF
@ AV_CODEC_ID_FFVHUFF
Definition: codec_id.h:119
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
ff_huffyuv_common_end
av_cold void ff_huffyuv_common_end(uint8_t *temp[3], uint16_t *temp16[3])
Definition: huffyuv.c:76
read_huffman_tables
static int read_huffman_tables(HYuvDecContext *s, const uint8_t *src, int length)
Definition: huffyuvdec.c:249
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:272
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:436
HYuvDecContext::temp
uint8_t * temp[3]
Definition: huffyuvdec.c:71
if
if(ret)
Definition: filter_design.txt:179
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:107
HYuvDecContext::bitstream_bpp
int bitstream_bpp
Definition: huffyuvdec.c:55
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:478
ff_bswapdsp_init
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
NULL
#define NULL
Definition: coverity.c:32
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
HYuvDecContext::gb
GetBitContext gb
Definition: huffyuvdec.c:51
HYuvDecContext::vlc
VLC vlc[8]
Definition: huffyuvdec.c:76
LAST_SKIP_BITS
#define LAST_SKIP_BITS(name, gb, num)
Definition: get_bits.h:237
ff_huffyuv_generate_bits_table
int ff_huffyuv_generate_bits_table(uint32_t *dst, const uint8_t *len_table, int n)
Definition: huffyuv.c:40
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:460
HYuvDecContext::llviddsp
LLVidDSPContext llviddsp
Definition: huffyuvdec.c:81
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
READ_2PIX_PLANE16
#define READ_2PIX_PLANE16(dst0, dst1, plane)
Definition: huffyuvdec.c:697
HYuvDecContext
Definition: huffyuvdec.c:50
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:474
get_vlc2
static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:631
HYuvDecContext::flags
int flags
Definition: huffyuvdec.c:67
index
int index
Definition: gxfenc.c:89
READ_2PIX_PLANE
#define READ_2PIX_PLANE(dst0, dst1, plane, OP)
Definition: huffyuvdec.c:688
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:301
AVPacket::size
int size
Definition: packet.h:375
codec_internal.h
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:464
AV_NUM_DATA_POINTERS
#define AV_NUM_DATA_POINTERS
Definition: frame.h:331
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:466
decode_end
static av_cold int decode_end(AVCodecContext *avctx)
Definition: huffyuvdec.c:318
LEFT
#define LEFT
Definition: cdgraphics.c:167
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:162
ff_huffyuvdsp_init
av_cold void ff_huffyuvdsp_init(HuffYUVDSPContext *c, enum AVPixelFormat pix_fmt)
Definition: huffyuvdsp.c:84
OPEN_READER
#define OPEN_READER(name, gb)
Definition: get_bits.h:176
height
#define height
Predictor
Definition: ratecontrol.h:35
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:432
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:167
classic_add_chroma
static const unsigned char classic_add_chroma[256]
Definition: huffyuvdec.c:121
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:498
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
decode_slice
static int decode_slice(AVCodecContext *avctx, AVFrame *p, int height, int buf_size, int y_offset, int table_size)
Definition: huffyuvdec.c:910
HYuvDecContext::interlaced
int interlaced
Definition: huffyuvdec.c:53
HYuvDecContext::bps
int bps
Definition: huffyuvdec.c:59
draw_slice
static void draw_slice(HYuvDecContext *s, AVCodecContext *avctx, AVFrame *frame, int y)
Definition: huffyuvdec.c:855
HYuvDecContext::chroma_v_shift
int chroma_v_shift
Definition: huffyuvdec.c:66
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1480
interlaced
uint8_t interlaced
Definition: mxfenc.c:2046
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AVCodecContext::extradata
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:527
ff_hymt_decoder
const FFCodec ff_hymt_decoder
ff_init_vlc_sparse
int ff_init_vlc_sparse(VLC *vlc, int nb_bits, int nb_codes, const void *bits, int bits_wrap, int bits_size, const void *codes, int codes_wrap, int codes_size, const void *symbols, int symbols_wrap, int symbols_size, int flags)
Definition: vlc.c:272
ff_huffyuv_decoder
const FFCodec ff_huffyuv_decoder
Definition: huffyuvdec.c:1304
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:476
HYuvDecContext::pix_bgr_map
uint32_t pix_bgr_map[1<< VLC_BITS]
Definition: huffyuvdec.c:75
av_fast_padded_malloc
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:49
av_always_inline
#define av_always_inline
Definition: attributes.h:49
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:191
len
int len
Definition: vorbis_enc_data.h:426
PLANE
@ PLANE
Definition: huffyuv.h:54
decode_gray_bitstream
static void decode_gray_bitstream(HYuvDecContext *s, int count)
Definition: huffyuvdec.c:764
AVCodecContext::height
int height
Definition: avcodec.h:598
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:635
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:458
ff_free_vlc
void ff_free_vlc(VLC *vlc)
Definition: vlc.c:375
avcodec.h
limit
static double limit(double x)
Definition: vf_pseudocolor.c:130
ret
ret
Definition: filter_design.txt:187
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AV_PIX_FMT_0RGB32
#define AV_PIX_FMT_0RGB32
Definition: pixfmt.h:436
classic_shift_chroma
static const unsigned char classic_shift_chroma[classic_shift_chroma_table_size+AV_INPUT_BUFFER_PADDING_SIZE]
Definition: huffyuvdec.c:94
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:495
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:463
AV_INPUT_BUFFER_PADDING_SIZE
#define AV_INPUT_BUFFER_PADDING_SIZE
Definition: defs.h:40
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
classic_shift_luma_table_size
#define classic_shift_luma_table_size
Definition: huffyuvdec.c:85
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
AVCodecContext::draw_horiz_band
void(* draw_horiz_band)(struct AVCodecContext *s, const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], int y, int type, int height)
If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band.
Definition: avcodec.h:660
BITS_LEFT
#define BITS_LEFT(name, gb)
Definition: get_bits.h:229
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:468
left_prediction
static int left_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
Definition: huffyuvdec.c:883
ff_llviddsp_init
void ff_llviddsp_init(LLVidDSPContext *c)
Definition: lossless_videodsp.c:113
AVCodecContext
main external API structure.
Definition: avcodec.h:426
HYuvDecContext::yuv
int yuv
Definition: huffyuvdec.c:64
SHOW_UBITS
#define SHOW_UBITS(name, gb, num)
Definition: get_bits.h:249
HYuvDecContext::len
uint8_t len[4][MAX_VLC_N]
Definition: huffyuvdec.c:73
VLC
Definition: vlc.h:31
sign_extend
static av_const int sign_extend(int val, unsigned bits)
Definition: mathops.h:133
G
#define G
Definition: huffyuv.h:43
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:158
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
lossless_videodsp.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:453
decode_422_bitstream
static void decode_422_bitstream(HYuvDecContext *s, int count)
Definition: huffyuvdec.c:659
HYuvDecContext::bdsp
BswapDSPContext bdsp
Definition: huffyuvdec.c:79
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
read_len_table
static int read_len_table(uint8_t *dst, GetBitContext *gb, int n)
Definition: huffyuvdec.c:140
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:598
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
ff_ffvhuff_decoder
const FFCodec ff_ffvhuff_decoder
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
BswapDSPContext
Definition: bswapdsp.h:24
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:469
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
huffyuv.h
HYuvDecContext::hdsp
HuffYUVDSPContext hdsp
Definition: huffyuvdec.c:80
HYuvDecContext::decorrelate
int decorrelate
Definition: huffyuvdec.c:54
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:166
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:467
HYuvDecContext::last_slice_end
int last_slice_end
Definition: huffyuvdec.c:69
re
float re
Definition: fft.c:79
HYuvDecContext::bitstream_buffer
uint8_t * bitstream_buffer
Definition: huffyuvdec.c:77