FFmpeg
huffyuvdec.c
Go to the documentation of this file.
1 /*
2  * huffyuv decoder
3  *
4  * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * see https://multimedia.cx/huffyuv.txt for a description of
7  * the algorithm used
8  *
9  * This file is part of FFmpeg.
10  *
11  * FFmpeg is free software; you can redistribute it and/or
12  * modify it under the terms of the GNU Lesser General Public
13  * License as published by the Free Software Foundation; either
14  * version 2.1 of the License, or (at your option) any later version.
15  *
16  * FFmpeg is distributed in the hope that it will be useful,
17  * but WITHOUT ANY WARRANTY; without even the implied warranty of
18  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19  * Lesser General Public License for more details.
20  *
21  * You should have received a copy of the GNU Lesser General Public
22  * License along with FFmpeg; if not, write to the Free Software
23  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24  *
25  * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
26  */
27 
28 /**
29  * @file
30  * huffyuv decoder
31  */
32 
33 #define UNCHECKED_BITSTREAM_READER 1
34 
35 #include "config_components.h"
36 
37 #include "avcodec.h"
38 #include "bswapdsp.h"
39 #include "bytestream.h"
40 #include "codec_internal.h"
41 #include "get_bits.h"
42 #include "huffyuv.h"
43 #include "huffyuvdsp.h"
44 #include "lossless_videodsp.h"
45 #include "thread.h"
46 #include "libavutil/emms.h"
47 #include "libavutil/imgutils.h"
48 #include "libavutil/mem.h"
49 #include "libavutil/pixdesc.h"
50 
51 #define VLC_BITS 12
52 
53 typedef struct HYuvDecContext {
59  int version;
60  int yuy2; //use yuy2 instead of 422P
61  int bgr32; //use bgr32 instead of bgr24
62  int bps;
63  int n; // 1<<bps
64  int vlc_n; // number of vlc codes (FFMIN(1<<bps, MAX_VLC_N))
65  int alpha;
66  int chroma;
67  int yuv;
70  int flags;
71  int context;
73 
74  union {
75  uint8_t *temp[3];
76  uint16_t *temp16[3];
77  };
78  uint8_t len[4][MAX_VLC_N];
79  uint32_t bits[4][MAX_VLC_N];
80  uint32_t pix_bgr_map[1<<VLC_BITS];
81  VLC vlc[8]; //Y,U,V,A,YY,YU,YV,AA
82  uint8_t *bitstream_buffer;
83  unsigned int bitstream_buffer_size;
88 
89 
90 static const uint8_t classic_shift_luma[] = {
91  34, 36, 35, 69, 135, 232, 9, 16, 10, 24, 11, 23, 12, 16, 13, 10,
92  14, 8, 15, 8, 16, 8, 17, 20, 16, 10, 207, 206, 205, 236, 11, 8,
93  10, 21, 9, 23, 8, 8, 199, 70, 69, 68,
94 };
95 
96 static const uint8_t classic_shift_chroma[] = {
97  66, 36, 37, 38, 39, 40, 41, 75, 76, 77, 110, 239, 144, 81, 82, 83,
98  84, 85, 118, 183, 56, 57, 88, 89, 56, 89, 154, 57, 58, 57, 26, 141,
99  57, 56, 58, 57, 58, 57, 184, 119, 214, 245, 116, 83, 82, 49, 80, 79,
100  78, 77, 44, 75, 41, 40, 39, 38, 37, 36, 34,
101 };
102 
103 static const unsigned char classic_add_luma[256] = {
104  3, 9, 5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
105  73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
106  68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
107  35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
108  37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
109  35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
110  27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
111  15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
112  12, 17, 19, 13, 4, 9, 2, 11, 1, 7, 8, 0, 16, 3, 14, 6,
113  12, 10, 5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
114  18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
115  28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
116  28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
117  62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
118  54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
119  46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13, 7, 8,
120 };
121 
122 static const unsigned char classic_add_chroma[256] = {
123  3, 1, 2, 2, 2, 2, 3, 3, 7, 5, 7, 5, 8, 6, 11, 9,
124  7, 13, 11, 10, 9, 8, 7, 5, 9, 7, 6, 4, 7, 5, 8, 7,
125  11, 8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
126  43, 45, 76, 81, 46, 82, 75, 55, 56, 144, 58, 80, 60, 74, 147, 63,
127  143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
128  80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
129  17, 14, 5, 6, 100, 54, 47, 50, 51, 53, 106, 107, 108, 109, 110, 111,
130  112, 113, 114, 115, 4, 117, 118, 92, 94, 121, 122, 3, 124, 103, 2, 1,
131  0, 129, 130, 131, 120, 119, 126, 125, 136, 137, 138, 139, 140, 141, 142, 134,
132  135, 132, 133, 104, 64, 101, 62, 57, 102, 95, 93, 59, 61, 28, 97, 96,
133  52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
134  19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10, 9, 8, 36,
135  7, 128, 127, 105, 123, 116, 35, 34, 33, 145, 31, 79, 42, 146, 78, 26,
136  83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
137  14, 16, 17, 18, 20, 21, 12, 14, 15, 9, 10, 6, 9, 6, 5, 8,
138  6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
139 };
140 
141 static int read_len_table(uint8_t *dst, GetByteContext *gb, int n)
142 {
143  int i, val, repeat;
144 
145  for (i = 0; i < n;) {
146  if (bytestream2_get_bytes_left(gb) <= 0)
147  goto error;
148  repeat = bytestream2_peek_byteu(gb) >> 5;
149  val = bytestream2_get_byteu(gb) & 0x1F;
150  if (repeat == 0) {
151  if (bytestream2_get_bytes_left(gb) <= 0)
152  goto error;
153  repeat = bytestream2_get_byteu(gb);
154  }
155  if (i + repeat > n)
156  goto error;
157  while (repeat--)
158  dst[i++] = val;
159  }
160  return 0;
161 
162 error:
163  av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
164  return AVERROR_INVALIDDATA;
165 }
166 
168 {
169  int ret;
170  uint16_t *symbols = av_mallocz(5 << VLC_BITS);
171  uint16_t *bits;
172  uint8_t *len;
173  if (!symbols)
174  return AVERROR(ENOMEM);
175  bits = symbols + (1 << VLC_BITS);
176  len = (uint8_t *)(bits + (1 << VLC_BITS));
177 
178  if (s->bitstream_bpp < 24 || s->version > 2) {
179  int count = 1 + s->alpha + 2 * s->chroma;
180  int p, i, y, u;
181  for (p = 0; p < count; p++) {
182  int p0 = s->version > 2 ? p : 0;
183  for (i = y = 0; y < s->vlc_n; y++) {
184  int len0 = s->len[p0][y];
185  int limit = VLC_BITS - len0;
186  if (limit <= 0 || !len0)
187  continue;
188  if ((sign_extend(y, 8) & (s->vlc_n-1)) != y)
189  continue;
190  for (u = 0; u < s->vlc_n; u++) {
191  int len1 = s->len[p][u];
192  if (len1 > limit || !len1)
193  continue;
194  if ((sign_extend(u, 8) & (s->vlc_n-1)) != u)
195  continue;
196  av_assert0(i < (1 << VLC_BITS));
197  len[i] = len0 + len1;
198  bits[i] = (s->bits[p0][y] << len1) + s->bits[p][u];
199  symbols[i] = (y << 8) + (u & 0xFF);
200  i++;
201  }
202  }
203  ff_vlc_free(&s->vlc[4 + p]);
204  if ((ret = ff_vlc_init_sparse(&s->vlc[4 + p], VLC_BITS, i, len, 1, 1,
205  bits, 2, 2, symbols, 2, 2, 0)) < 0)
206  goto out;
207  }
208  } else {
209  uint8_t (*map)[4] = (uint8_t(*)[4]) s->pix_bgr_map;
210  int i, b, g, r, code;
211  int p0 = s->decorrelate;
212  int p1 = !s->decorrelate;
213  /* Restrict the range to +/-16 because that's pretty much guaranteed
214  * to cover all the combinations that fit in 11 bits total, and it
215  * does not matter if we miss a few rare codes. */
216  for (i = 0, g = -16; g < 16; g++) {
217  int len0 = s->len[p0][g & 255];
218  int limit0 = VLC_BITS - len0;
219  if (limit0 < 2 || !len0)
220  continue;
221  for (b = -16; b < 16; b++) {
222  int len1 = s->len[p1][b & 255];
223  int limit1 = limit0 - len1;
224  if (limit1 < 1 || !len1)
225  continue;
226  code = (s->bits[p0][g & 255] << len1) + s->bits[p1][b & 255];
227  for (r = -16; r < 16; r++) {
228  int len2 = s->len[2][r & 255];
229  if (len2 > limit1 || !len2)
230  continue;
231  av_assert0(i < (1 << VLC_BITS));
232  len[i] = len0 + len1 + len2;
233  bits[i] = (code << len2) + s->bits[2][r & 255];
234  if (s->decorrelate) {
235  map[i][G] = g;
236  map[i][B] = g + b;
237  map[i][R] = g + r;
238  } else {
239  map[i][B] = g;
240  map[i][G] = b;
241  map[i][R] = r;
242  }
243  i++;
244  }
245  }
246  }
247  ff_vlc_free(&s->vlc[4]);
248  if ((ret = vlc_init(&s->vlc[4], VLC_BITS, i, len, 1, 1,
249  bits, 2, 2, 0)) < 0)
250  goto out;
251  }
252  ret = 0;
253 out:
254  av_freep(&symbols);
255  return ret;
256 }
257 
258 static int read_huffman_tables(HYuvDecContext *s, const uint8_t *src, int length)
259 {
260  GetByteContext gb;
261  int i, ret;
262  int count = 3;
263 
264  bytestream2_init(&gb, src, length);
265 
266  if (s->version > 2)
267  count = 1 + s->alpha + 2*s->chroma;
268 
269  for (i = 0; i < count; i++) {
270  if ((ret = read_len_table(s->len[i], &gb, s->vlc_n)) < 0)
271  return ret;
272  if ((ret = ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n)) < 0)
273  return ret;
274  ff_vlc_free(&s->vlc[i]);
275  if ((ret = vlc_init(&s->vlc[i], VLC_BITS, s->vlc_n, s->len[i], 1, 1,
276  s->bits[i], 4, 4, 0)) < 0)
277  return ret;
278  }
279 
280  if ((ret = generate_joint_tables(s)) < 0)
281  return ret;
282 
283  return bytestream2_tell(&gb);
284 }
285 
287 {
288  GetByteContext gb;
289  int i, ret;
290 
292  sizeof(classic_shift_luma));
293  ret = read_len_table(s->len[0], &gb, 256);
294  av_assert1(ret >= 0);
295 
297  sizeof(classic_shift_chroma));
298  ret = read_len_table(s->len[1], &gb, 256);
299  av_assert1(ret >= 0);
300 
301  for (i = 0; i < 256; i++)
302  s->bits[0][i] = classic_add_luma[i];
303  for (i = 0; i < 256; i++)
304  s->bits[1][i] = classic_add_chroma[i];
305 
306  if (s->bitstream_bpp >= 24) {
307  memcpy(s->bits[1], s->bits[0], 256 * sizeof(uint32_t));
308  memcpy(s->len[1], s->len[0], 256 * sizeof(uint8_t));
309  }
310  memcpy(s->bits[2], s->bits[1], 256 * sizeof(uint32_t));
311  memcpy(s->len[2], s->len[1], 256 * sizeof(uint8_t));
312 
313  for (i = 0; i < 4; i++) {
314  ff_vlc_free(&s->vlc[i]);
315  if ((ret = vlc_init(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
316  s->bits[i], 4, 4, 0)) < 0)
317  return ret;
318  }
319 
320  if ((ret = generate_joint_tables(s)) < 0)
321  return ret;
322 
323  return 0;
324 }
325 
327 {
328  HYuvDecContext *s = avctx->priv_data;
329  int i;
330 
331  for (int i = 0; i < 3; i++)
332  av_freep(&s->temp[i]);
333 
334  av_freep(&s->bitstream_buffer);
335 
336  for (i = 0; i < 8; i++)
337  ff_vlc_free(&s->vlc[i]);
338 
339  return 0;
340 }
341 
343 {
344  HYuvDecContext *s = avctx->priv_data;
345  int ret;
346 
347  ret = av_image_check_size(avctx->width, avctx->height, 0, avctx);
348  if (ret < 0)
349  return ret;
350 
351  s->flags = avctx->flags;
352 
353  ff_bswapdsp_init(&s->bdsp);
354  ff_huffyuvdsp_init(&s->hdsp, avctx->pix_fmt);
355  ff_llviddsp_init(&s->llviddsp);
356 
357  s->interlaced = avctx->height > 288;
358  s->bgr32 = 1;
359 
360  if (avctx->extradata_size) {
361  if ((avctx->bits_per_coded_sample & 7) &&
362  avctx->bits_per_coded_sample != 12)
363  s->version = 1; // do such files exist at all?
364  else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0)
365  s->version = 2;
366  else
367  s->version = 3;
368  } else
369  s->version = 0;
370 
371  s->bps = 8;
372  s->n = 1<<s->bps;
373  s->vlc_n = FFMIN(s->n, MAX_VLC_N);
374  s->chroma = 1;
375  if (s->version >= 2) {
376  int method, interlace;
377 
378  if (avctx->extradata_size < 4)
379  return AVERROR_INVALIDDATA;
380 
381  method = avctx->extradata[0];
382  s->decorrelate = method & 64 ? 1 : 0;
383  s->predictor = method & 63;
384  if (s->version == 2) {
385  s->bitstream_bpp = avctx->extradata[1];
386  if (s->bitstream_bpp == 0)
387  s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
388  } else {
389  s->bps = (avctx->extradata[1] >> 4) + 1;
390  s->n = 1<<s->bps;
391  s->vlc_n = FFMIN(s->n, MAX_VLC_N);
392  s->chroma_h_shift = avctx->extradata[1] & 3;
393  s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3;
394  s->yuv = !!(avctx->extradata[2] & 1);
395  s->chroma= !!(avctx->extradata[2] & 3);
396  s->alpha = !!(avctx->extradata[2] & 4);
397  }
398  interlace = (avctx->extradata[2] & 0x30) >> 4;
399  s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
400  s->context = avctx->extradata[2] & 0x40 ? 1 : 0;
401 
402  if ((ret = read_huffman_tables(s, avctx->extradata + 4,
403  avctx->extradata_size - 4)) < 0)
404  return ret;
405  } else {
406  switch (avctx->bits_per_coded_sample & 7) {
407  case 1:
408  s->predictor = LEFT;
409  s->decorrelate = 0;
410  break;
411  case 2:
412  s->predictor = LEFT;
413  s->decorrelate = 1;
414  break;
415  case 3:
416  s->predictor = PLANE;
417  s->decorrelate = avctx->bits_per_coded_sample >= 24;
418  break;
419  case 4:
420  s->predictor = MEDIAN;
421  s->decorrelate = 0;
422  break;
423  default:
424  s->predictor = LEFT; // OLD
425  s->decorrelate = 0;
426  break;
427  }
428  s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
429  s->context = 0;
430 
431  if ((ret = read_old_huffman_tables(s)) < 0)
432  return ret;
433  }
434 
435  if (s->version <= 2) {
436  switch (s->bitstream_bpp) {
437  case 12:
438  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
439  s->yuv = 1;
440  break;
441  case 16:
442  if (s->yuy2)
443  avctx->pix_fmt = AV_PIX_FMT_YUYV422;
444  else
445  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
446  s->yuv = 1;
447  break;
448  case 24:
449  if (s->bgr32)
450  avctx->pix_fmt = AV_PIX_FMT_0RGB32;
451  else
452  avctx->pix_fmt = AV_PIX_FMT_BGR24;
453  break;
454  case 32:
455  av_assert0(s->bgr32);
456  avctx->pix_fmt = AV_PIX_FMT_RGB32;
457  s->alpha = 1;
458  break;
459  default:
460  return AVERROR_INVALIDDATA;
461  }
463  &s->chroma_h_shift,
464  &s->chroma_v_shift);
465  } else {
466  switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) {
467  case 0x070:
468  avctx->pix_fmt = AV_PIX_FMT_GRAY8;
469  break;
470  case 0x0F0:
471  avctx->pix_fmt = AV_PIX_FMT_GRAY16;
472  break;
473  case 0x470:
474  avctx->pix_fmt = AV_PIX_FMT_GBRP;
475  break;
476  case 0x480:
477  avctx->pix_fmt = AV_PIX_FMT_GBRP9;
478  break;
479  case 0x490:
480  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
481  break;
482  case 0x4B0:
483  avctx->pix_fmt = AV_PIX_FMT_GBRP12;
484  break;
485  case 0x4D0:
486  avctx->pix_fmt = AV_PIX_FMT_GBRP14;
487  break;
488  case 0x4F0:
489  avctx->pix_fmt = AV_PIX_FMT_GBRP16;
490  break;
491  case 0x570:
492  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
493  break;
494  case 0x670:
495  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
496  break;
497  case 0x680:
498  avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
499  break;
500  case 0x690:
501  avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
502  break;
503  case 0x6B0:
504  avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
505  break;
506  case 0x6D0:
507  avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
508  break;
509  case 0x6F0:
510  avctx->pix_fmt = AV_PIX_FMT_YUV444P16;
511  break;
512  case 0x671:
513  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
514  break;
515  case 0x681:
516  avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
517  break;
518  case 0x691:
519  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
520  break;
521  case 0x6B1:
522  avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
523  break;
524  case 0x6D1:
525  avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
526  break;
527  case 0x6F1:
528  avctx->pix_fmt = AV_PIX_FMT_YUV422P16;
529  break;
530  case 0x672:
531  avctx->pix_fmt = AV_PIX_FMT_YUV411P;
532  break;
533  case 0x674:
534  avctx->pix_fmt = AV_PIX_FMT_YUV440P;
535  break;
536  case 0x675:
537  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
538  break;
539  case 0x685:
540  avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
541  break;
542  case 0x695:
543  avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
544  break;
545  case 0x6B5:
546  avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
547  break;
548  case 0x6D5:
549  avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
550  break;
551  case 0x6F5:
552  avctx->pix_fmt = AV_PIX_FMT_YUV420P16;
553  break;
554  case 0x67A:
555  avctx->pix_fmt = AV_PIX_FMT_YUV410P;
556  break;
557  case 0x770:
558  avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
559  break;
560  case 0x780:
561  avctx->pix_fmt = AV_PIX_FMT_YUVA444P9;
562  break;
563  case 0x790:
565  break;
566  case 0x7F0:
568  break;
569  case 0x771:
570  avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
571  break;
572  case 0x781:
573  avctx->pix_fmt = AV_PIX_FMT_YUVA422P9;
574  break;
575  case 0x791:
577  break;
578  case 0x7F1:
580  break;
581  case 0x775:
582  avctx->pix_fmt = AV_PIX_FMT_YUVA420P;
583  break;
584  case 0x785:
585  avctx->pix_fmt = AV_PIX_FMT_YUVA420P9;
586  break;
587  case 0x795:
589  break;
590  case 0x7F5:
592  break;
593  default:
594  return AVERROR_INVALIDDATA;
595  }
596  }
597 
598  if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
599  av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
600  return AVERROR_INVALIDDATA;
601  }
602  if (s->predictor == MEDIAN && avctx->pix_fmt == AV_PIX_FMT_YUV422P &&
603  avctx->width % 4) {
604  av_log(avctx, AV_LOG_ERROR, "width must be a multiple of 4 "
605  "for this combination of colorspace and predictor type.\n");
606  return AVERROR_INVALIDDATA;
607  }
608 
609  for (int i = 0; i < 3; i++) {
610  s->temp[i] = av_malloc(4 * avctx->width + 16);
611  if (!s->temp[i])
612  return AVERROR(ENOMEM);
613  }
614 
615  return 0;
616 }
617 
618 /** Subset of GET_VLC for use in hand-roller VLC code */
619 #define VLC_INTERN(dst, table, gb, name, bits, max_depth) \
620  code = table[index].sym; \
621  n = table[index].len; \
622  if (max_depth > 1 && n < 0) { \
623  LAST_SKIP_BITS(name, gb, bits); \
624  UPDATE_CACHE(name, gb); \
625  \
626  nb_bits = -n; \
627  index = SHOW_UBITS(name, gb, nb_bits) + code; \
628  code = table[index].sym; \
629  n = table[index].len; \
630  if (max_depth > 2 && n < 0) { \
631  LAST_SKIP_BITS(name, gb, nb_bits); \
632  UPDATE_CACHE(name, gb); \
633  \
634  nb_bits = -n; \
635  index = SHOW_UBITS(name, gb, nb_bits) + code; \
636  code = table[index].sym; \
637  n = table[index].len; \
638  } \
639  } \
640  dst = code; \
641  LAST_SKIP_BITS(name, gb, n)
642 
643 
644 #define GET_VLC_DUAL(dst0, dst1, name, gb, dtable, table1, table2, \
645  bits, max_depth, OP) \
646  do { \
647  unsigned int index = SHOW_UBITS(name, gb, bits); \
648  int code, n = dtable[index].len; \
649  \
650  if (n<=0) { \
651  int nb_bits; \
652  VLC_INTERN(dst0, table1, gb, name, bits, max_depth); \
653  \
654  UPDATE_CACHE(re, gb); \
655  index = SHOW_UBITS(name, gb, bits); \
656  VLC_INTERN(dst1, table2, gb, name, bits, max_depth); \
657  } else { \
658  code = dtable[index].sym; \
659  OP(dst0, dst1, code); \
660  LAST_SKIP_BITS(name, gb, n); \
661  } \
662  } while (0)
663 
664 #define OP8bits(dst0, dst1, code) dst0 = code>>8; dst1 = code
665 
666 #define READ_2PIX(dst0, dst1, plane1) \
667  UPDATE_CACHE(re, &s->gb); \
668  GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane1].table, \
669  s->vlc[0].table, s->vlc[plane1].table, VLC_BITS, 3, OP8bits)
670 
671 static void decode_422_bitstream(HYuvDecContext *s, int count)
672 {
673  int i, icount;
674  OPEN_READER(re, &s->gb);
675  count /= 2;
676 
677  icount = get_bits_left(&s->gb) / (32 * 4);
678  if (count >= icount) {
679  for (i = 0; i < icount; i++) {
680  READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
681  READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
682  }
683  for (; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
684  READ_2PIX(s->temp[0][2 * i ], s->temp[1][i], 1);
685  if (BITS_LEFT(re, &s->gb) <= 0) break;
686  READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
687  }
688  for (; i < count; i++)
689  s->temp[0][2 * i ] = s->temp[1][i] =
690  s->temp[0][2 * i + 1] = s->temp[2][i] = 0;
691  } else {
692  for (i = 0; i < count; i++) {
693  READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
694  READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
695  }
696  }
697  CLOSE_READER(re, &s->gb);
698 }
699 
700 #define READ_2PIX_PLANE(dst0, dst1, plane, OP) \
701  UPDATE_CACHE(re, &s->gb); \
702  GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane].table, \
703  s->vlc[plane].table, s->vlc[plane].table, VLC_BITS, 3, OP)
704 
705 #define OP14bits(dst0, dst1, code) dst0 = code>>8; dst1 = sign_extend(code, 8)
706 
707 /* TODO instead of restarting the read when the code isn't in the first level
708  * of the joint table, jump into the 2nd level of the individual table. */
709 #define READ_2PIX_PLANE16(dst0, dst1, plane){\
710  dst0 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)*4;\
711  dst0 += get_bits(&s->gb, 2);\
712  dst1 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)*4;\
713  dst1 += get_bits(&s->gb, 2);\
714 }
715 static void decode_plane_bitstream(HYuvDecContext *s, int width, int plane)
716 {
717  int i, count = width/2;
718 
719  if (s->bps <= 8) {
720  OPEN_READER(re, &s->gb);
721  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
722  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
723  READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
724  }
725  } else {
726  for(i=0; i<count; i++){
727  READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
728  }
729  }
730  if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
731  unsigned int index;
732  int nb_bits, code, n;
733  UPDATE_CACHE(re, &s->gb);
734  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
735  VLC_INTERN(s->temp[0][width-1], s->vlc[plane].table,
736  &s->gb, re, VLC_BITS, 3);
737  }
738  CLOSE_READER(re, &s->gb);
739  } else if (s->bps <= 14) {
740  OPEN_READER(re, &s->gb);
741  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
742  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
743  READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
744  }
745  } else {
746  for(i=0; i<count; i++){
747  READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
748  }
749  }
750  if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
751  unsigned int index;
752  int nb_bits, code, n;
753  UPDATE_CACHE(re, &s->gb);
754  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
755  VLC_INTERN(s->temp16[0][width-1], s->vlc[plane].table,
756  &s->gb, re, VLC_BITS, 3);
757  }
758  CLOSE_READER(re, &s->gb);
759  } else {
760  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
761  for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
762  READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
763  }
764  } else {
765  for(i=0; i<count; i++){
766  READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
767  }
768  }
769  if( width&1 && get_bits_left(&s->gb)>0 ) {
770  int dst = (unsigned)get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;
771  s->temp16[0][width-1] = dst + get_bits(&s->gb, 2);
772  }
773  }
774 }
775 
776 static void decode_gray_bitstream(HYuvDecContext *s, int count)
777 {
778  int i;
779  OPEN_READER(re, &s->gb);
780  count /= 2;
781 
782  if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
783  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
784  READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
785  }
786  for (; i < count; i++)
787  s->temp[0][2 * i] = s->temp[0][2 * i + 1] = 0;
788  } else {
789  for (i = 0; i < count; i++) {
790  READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
791  }
792  }
793  CLOSE_READER(re, &s->gb);
794 }
795 
797  int decorrelate, int alpha)
798 {
799  int i;
800  OPEN_READER(re, &s->gb);
801 
802  for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
803  unsigned int index;
804  int code, n, nb_bits;
805 
806  UPDATE_CACHE(re, &s->gb);
807  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
808  n = s->vlc[4].table[index].len;
809 
810  if (n>0) {
811  code = s->vlc[4].table[index].sym;
812  *(uint32_t *) &s->temp[0][4 * i] = s->pix_bgr_map[code];
813  LAST_SKIP_BITS(re, &s->gb, n);
814  } else {
815  if (decorrelate) {
816  VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
817  &s->gb, re, VLC_BITS, 3);
818 
819  UPDATE_CACHE(re, &s->gb);
820  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
821  VLC_INTERN(code, s->vlc[0].table, &s->gb, re, VLC_BITS, 3);
822  s->temp[0][4 * i + B] = code + s->temp[0][4 * i + G];
823 
824  UPDATE_CACHE(re, &s->gb);
825  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
826  VLC_INTERN(code, s->vlc[2].table, &s->gb, re, VLC_BITS, 3);
827  s->temp[0][4 * i + R] = code + s->temp[0][4 * i + G];
828  } else {
829  VLC_INTERN(s->temp[0][4 * i + B], s->vlc[0].table,
830  &s->gb, re, VLC_BITS, 3);
831 
832  UPDATE_CACHE(re, &s->gb);
833  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
834  VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
835  &s->gb, re, VLC_BITS, 3);
836 
837  UPDATE_CACHE(re, &s->gb);
838  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
839  VLC_INTERN(s->temp[0][4 * i + R], s->vlc[2].table,
840  &s->gb, re, VLC_BITS, 3);
841  }
842  }
843  if (alpha) {
844  UPDATE_CACHE(re, &s->gb);
845  index = SHOW_UBITS(re, &s->gb, VLC_BITS);
846  VLC_INTERN(s->temp[0][4 * i + A], s->vlc[2].table,
847  &s->gb, re, VLC_BITS, 3);
848  } else
849  s->temp[0][4 * i + A] = 0;
850  }
851  CLOSE_READER(re, &s->gb);
852 }
853 
854 static void decode_bgr_bitstream(HYuvDecContext *s, int count)
855 {
856  if (s->decorrelate) {
857  if (s->bitstream_bpp == 24)
858  decode_bgr_1(s, count, 1, 0);
859  else
860  decode_bgr_1(s, count, 1, 1);
861  } else {
862  if (s->bitstream_bpp == 24)
863  decode_bgr_1(s, count, 0, 0);
864  else
865  decode_bgr_1(s, count, 0, 1);
866  }
867 }
868 
869 static void draw_slice(HYuvDecContext *s, AVCodecContext *avctx, AVFrame *frame, int y)
870 {
871  int h, cy, i;
873 
874  if (!avctx->draw_horiz_band)
875  return;
876 
877  h = y - s->last_slice_end;
878  y -= h;
879 
880  if (s->bitstream_bpp == 12)
881  cy = y >> 1;
882  else
883  cy = y;
884 
885  offset[0] = frame->linesize[0] * y;
886  offset[1] = frame->linesize[1] * cy;
887  offset[2] = frame->linesize[2] * cy;
888  for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
889  offset[i] = 0;
890  emms_c();
891 
892  avctx->draw_horiz_band(avctx, frame, offset, y, 3, h);
893 
894  s->last_slice_end = y + h;
895 }
896 
897 static int left_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
898 {
899  if (s->bps <= 8) {
900  return s->llviddsp.add_left_pred(dst, src, w, acc);
901  } else {
902  return s->llviddsp.add_left_pred_int16(( uint16_t *)dst, (const uint16_t *)src, s->n-1, w, acc);
903  }
904 }
905 
906 static void add_bytes(HYuvDecContext *s, uint8_t *dst, uint8_t *src, int w)
907 {
908  if (s->bps <= 8) {
909  s->llviddsp.add_bytes(dst, src, w);
910  } else {
911  s->hdsp.add_int16((uint16_t*)dst, (const uint16_t*)src, s->n - 1, w);
912  }
913 }
914 
915 static void add_median_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
916 {
917  if (s->bps <= 8) {
918  s->llviddsp.add_median_pred(dst, src, diff, w, left, left_top);
919  } else {
920  s->hdsp.add_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src, (const uint16_t *)diff, s->n-1, w, left, left_top);
921  }
922 }
923 
924 static int decode_slice(AVCodecContext *avctx, AVFrame *p, int height,
925  int buf_size, int y_offset, int table_size)
926 {
927  HYuvDecContext *s = avctx->priv_data;
928  int fake_ystride, fake_ustride, fake_vstride;
929  const int width = avctx->width;
930  const int width2 = avctx->width >> 1;
931  int ret;
932 
933  if ((ret = init_get_bits8(&s->gb, s->bitstream_buffer + table_size, buf_size - table_size)) < 0)
934  return ret;
935 
936  fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
937  fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
938  fake_vstride = s->interlaced ? p->linesize[2] * 2 : p->linesize[2];
939 
940  if (s->version > 2) {
941  int plane;
942  for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
943  int left, lefttop, y;
944  int w = width;
945  int h = height;
946  int fake_stride = fake_ystride;
947 
948  if (s->chroma && (plane == 1 || plane == 2)) {
949  w >>= s->chroma_h_shift;
950  h >>= s->chroma_v_shift;
951  fake_stride = plane == 1 ? fake_ustride : fake_vstride;
952  }
953 
954  switch (s->predictor) {
955  case LEFT:
956  case PLANE:
957  decode_plane_bitstream(s, w, plane);
958  left = left_prediction(s, p->data[plane], s->temp[0], w, 0);
959 
960  for (y = 1; y < h; y++) {
961  uint8_t *dst = p->data[plane] + p->linesize[plane]*y;
962 
963  decode_plane_bitstream(s, w, plane);
964  left = left_prediction(s, dst, s->temp[0], w, left);
965  if (s->predictor == PLANE) {
966  if (y > s->interlaced) {
967  add_bytes(s, dst, dst - fake_stride, w);
968  }
969  }
970  }
971 
972  break;
973  case MEDIAN:
974  decode_plane_bitstream(s, w, plane);
975  left= left_prediction(s, p->data[plane], s->temp[0], w, 0);
976 
977  y = 1;
978  if (y >= h)
979  break;
980 
981  /* second line is left predicted for interlaced case */
982  if (s->interlaced) {
983  decode_plane_bitstream(s, w, plane);
984  left = left_prediction(s, p->data[plane] + p->linesize[plane], s->temp[0], w, left);
985  y++;
986  if (y >= h)
987  break;
988  }
989 
990  lefttop = p->data[plane][0];
991  decode_plane_bitstream(s, w, plane);
992  add_median_prediction(s, p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop);
993  y++;
994 
995  for (; y<h; y++) {
996  uint8_t *dst;
997 
998  decode_plane_bitstream(s, w, plane);
999 
1000  dst = p->data[plane] + p->linesize[plane] * y;
1001 
1002  add_median_prediction(s, dst, dst - fake_stride, s->temp[0], w, &left, &lefttop);
1003  }
1004 
1005  break;
1006  }
1007  }
1008  draw_slice(s, avctx, p, height);
1009  } else if (s->bitstream_bpp < 24) {
1010  int y, cy;
1011  int lefty, leftu, leftv;
1012  int lefttopy, lefttopu, lefttopv;
1013 
1014  if (s->yuy2) {
1015  p->data[0][3] = get_bits(&s->gb, 8);
1016  p->data[0][2] = get_bits(&s->gb, 8);
1017  p->data[0][1] = get_bits(&s->gb, 8);
1018  p->data[0][0] = get_bits(&s->gb, 8);
1019 
1020  av_log(avctx, AV_LOG_ERROR,
1021  "YUY2 output is not implemented yet\n");
1022  return AVERROR_PATCHWELCOME;
1023  } else {
1024  leftv =
1025  p->data[2][0 + y_offset * p->linesize[2]] = get_bits(&s->gb, 8);
1026  lefty =
1027  p->data[0][1 + y_offset * p->linesize[0]] = get_bits(&s->gb, 8);
1028  leftu =
1029  p->data[1][0 + y_offset * p->linesize[1]] = get_bits(&s->gb, 8);
1030  p->data[0][0 + y_offset * p->linesize[0]] = get_bits(&s->gb, 8);
1031 
1032  switch (s->predictor) {
1033  case LEFT:
1034  case PLANE:
1036  lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0] * y_offset + 2, s->temp[0],
1037  width - 2, lefty);
1038  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1039  leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[1] * y_offset + 1, s->temp[1], width2 - 1, leftu);
1040  leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[2] * y_offset + 1, s->temp[2], width2 - 1, leftv);
1041  }
1042 
1043  for (cy = y = 1; y < height; y++, cy++) {
1044  uint8_t *ydst, *udst, *vdst;
1045 
1046  if (s->bitstream_bpp == 12) {
1048 
1049  ydst = p->data[0] + p->linesize[0] * (y + y_offset);
1050 
1051  lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
1052  width, lefty);
1053  if (s->predictor == PLANE) {
1054  if (y > s->interlaced)
1055  s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
1056  }
1057  y++;
1058  if (y >= height)
1059  break;
1060  }
1061 
1062  draw_slice(s, avctx, p, y);
1063 
1064  ydst = p->data[0] + p->linesize[0] * (y + y_offset);
1065  udst = p->data[1] + p->linesize[1] * (cy + y_offset);
1066  vdst = p->data[2] + p->linesize[2] * (cy + y_offset);
1067 
1069  lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
1070  width, lefty);
1071  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1072  leftu = s->llviddsp.add_left_pred(udst, s->temp[1], width2, leftu);
1073  leftv = s->llviddsp.add_left_pred(vdst, s->temp[2], width2, leftv);
1074  }
1075  if (s->predictor == PLANE) {
1076  if (cy > s->interlaced) {
1077  s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
1078  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1079  s->llviddsp.add_bytes(udst, udst - fake_ustride, width2);
1080  s->llviddsp.add_bytes(vdst, vdst - fake_vstride, width2);
1081  }
1082  }
1083  }
1084  }
1085  draw_slice(s, avctx, p, height);
1086 
1087  break;
1088  case MEDIAN:
1089  /* first line except first 2 pixels is left predicted */
1091  lefty = s->llviddsp.add_left_pred(p->data[0] + 2, s->temp[0],
1092  width - 2, lefty);
1093  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1094  leftu = s->llviddsp.add_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
1095  leftv = s->llviddsp.add_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
1096  }
1097 
1098  cy = y = 1;
1099  if (y >= height)
1100  break;
1101 
1102  /* second line is left predicted for interlaced case */
1103  if (s->interlaced) {
1105  lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0],
1106  s->temp[0], width, lefty);
1107  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1108  leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
1109  leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
1110  }
1111  y++;
1112  cy++;
1113  if (y >= height)
1114  break;
1115  }
1116 
1117  /* next 4 pixels are left predicted too */
1118  decode_422_bitstream(s, 4);
1119  lefty = s->llviddsp.add_left_pred(p->data[0] + fake_ystride,
1120  s->temp[0], 4, lefty);
1121  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1122  leftu = s->llviddsp.add_left_pred(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
1123  leftv = s->llviddsp.add_left_pred(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
1124  }
1125 
1126  /* next line except the first 4 pixels is median predicted */
1127  lefttopy = p->data[0][3];
1129  s->llviddsp.add_median_pred(p->data[0] + fake_ystride + 4,
1130  p->data[0] + 4, s->temp[0],
1131  width - 4, &lefty, &lefttopy);
1132  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1133  lefttopu = p->data[1][1];
1134  lefttopv = p->data[2][1];
1135  s->llviddsp.add_median_pred(p->data[1] + fake_ustride + 2, p->data[1] + 2, s->temp[1], width2 - 2, &leftu, &lefttopu);
1136  s->llviddsp.add_median_pred(p->data[2] + fake_vstride + 2, p->data[2] + 2, s->temp[2], width2 - 2, &leftv, &lefttopv);
1137  }
1138  y++;
1139  cy++;
1140 
1141  for (; y < height; y++, cy++) {
1142  uint8_t *ydst, *udst, *vdst;
1143 
1144  if (s->bitstream_bpp == 12) {
1145  while (2 * cy > y) {
1147  ydst = p->data[0] + p->linesize[0] * y;
1148  s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
1149  s->temp[0], width,
1150  &lefty, &lefttopy);
1151  y++;
1152  }
1153  if (y >= height)
1154  break;
1155  }
1156  draw_slice(s, avctx, p, y);
1157 
1159 
1160  ydst = p->data[0] + p->linesize[0] * y;
1161  udst = p->data[1] + p->linesize[1] * cy;
1162  vdst = p->data[2] + p->linesize[2] * cy;
1163 
1164  s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
1165  s->temp[0], width,
1166  &lefty, &lefttopy);
1167  if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
1168  s->llviddsp.add_median_pred(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
1169  s->llviddsp.add_median_pred(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
1170  }
1171  }
1172 
1173  draw_slice(s, avctx, p, height);
1174  break;
1175  }
1176  }
1177  } else {
1178  int y;
1179  uint8_t left[4];
1180  const int last_line = (y_offset + height - 1) * p->linesize[0];
1181 
1182  if (s->bitstream_bpp == 32) {
1183  left[A] = p->data[0][last_line + A] = get_bits(&s->gb, 8);
1184  left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
1185  left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
1186  left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
1187  } else {
1188  left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
1189  left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
1190  left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
1191  left[A] = p->data[0][last_line + A] = 255;
1192  skip_bits(&s->gb, 8);
1193  }
1194 
1195  if (s->bgr32) {
1196  switch (s->predictor) {
1197  case LEFT:
1198  case PLANE:
1200  s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + last_line + 4,
1201  s->temp[0], width - 1, left);
1202 
1203  for (y = height - 2; y >= 0; y--) { // Yes it is stored upside down.
1205 
1206  s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + p->linesize[0] * (y + y_offset),
1207  s->temp[0], width, left);
1208  if (s->predictor == PLANE) {
1209  if (s->bitstream_bpp != 32)
1210  left[A] = 0;
1211  if (y < height - 1 - s->interlaced) {
1212  s->llviddsp.add_bytes(p->data[0] + p->linesize[0] * (y + y_offset),
1213  p->data[0] + p->linesize[0] * (y + y_offset) +
1214  fake_ystride, 4 * width);
1215  }
1216  }
1217  }
1218  // just 1 large slice as this is not possible in reverse order
1219  draw_slice(s, avctx, p, height);
1220  break;
1221  default:
1222  av_log(avctx, AV_LOG_ERROR,
1223  "prediction type not supported!\n");
1224  }
1225  } else {
1226  av_log(avctx, AV_LOG_ERROR,
1227  "BGR24 output is not implemented yet\n");
1228  return AVERROR_PATCHWELCOME;
1229  }
1230  }
1231 
1232  return 0;
1233 }
1234 
1235 static int decode_frame(AVCodecContext *avctx, AVFrame *p,
1236  int *got_frame, AVPacket *avpkt)
1237 {
1238  const uint8_t *buf = avpkt->data;
1239  int buf_size = avpkt->size;
1240  HYuvDecContext *s = avctx->priv_data;
1241  const int width = avctx->width;
1242  const int height = avctx->height;
1243  int slice, table_size = 0, ret, nb_slices;
1244  unsigned slices_info_offset;
1245  int slice_height;
1246 
1247  if (buf_size < (width * height + 7)/8)
1248  return AVERROR_INVALIDDATA;
1249 
1250  av_fast_padded_malloc(&s->bitstream_buffer,
1251  &s->bitstream_buffer_size,
1252  buf_size);
1253  if (!s->bitstream_buffer)
1254  return AVERROR(ENOMEM);
1255 
1256  s->bdsp.bswap_buf((uint32_t *) s->bitstream_buffer,
1257  (const uint32_t *) buf, buf_size / 4);
1258 
1259  if ((ret = ff_thread_get_buffer(avctx, p, 0)) < 0)
1260  return ret;
1261 
1262  if (s->context) {
1263  table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
1264  if (table_size < 0)
1265  return table_size;
1266  }
1267 
1268  if ((unsigned) (buf_size - table_size) >= INT_MAX / 8)
1269  return AVERROR_INVALIDDATA;
1270 
1271  s->last_slice_end = 0;
1272 
1273  if (avctx->codec_id == AV_CODEC_ID_HYMT &&
1274  (buf_size > 32 && AV_RL32(avpkt->data + buf_size - 16) == 0)) {
1275  slices_info_offset = AV_RL32(avpkt->data + buf_size - 4);
1276  slice_height = AV_RL32(avpkt->data + buf_size - 8);
1277  nb_slices = AV_RL32(avpkt->data + buf_size - 12);
1278  if (nb_slices * 8LL + slices_info_offset > buf_size - 16 ||
1279  s->chroma_v_shift ||
1280  slice_height <= 0 || nb_slices * (uint64_t)slice_height > height)
1281  return AVERROR_INVALIDDATA;
1282  } else {
1283  slice_height = height;
1284  nb_slices = 1;
1285  }
1286 
1287  for (slice = 0; slice < nb_slices; slice++) {
1288  int y_offset, slice_offset, slice_size;
1289 
1290  if (nb_slices > 1) {
1291  slice_offset = AV_RL32(avpkt->data + slices_info_offset + slice * 8);
1292  slice_size = AV_RL32(avpkt->data + slices_info_offset + slice * 8 + 4);
1293 
1294  if (slice_offset < 0 || slice_size <= 0 || (slice_offset&3) ||
1295  slice_offset + (int64_t)slice_size > buf_size)
1296  return AVERROR_INVALIDDATA;
1297 
1298  y_offset = height - (slice + 1) * slice_height;
1299  s->bdsp.bswap_buf((uint32_t *)s->bitstream_buffer,
1300  (const uint32_t *)(buf + slice_offset), slice_size / 4);
1301  } else {
1302  y_offset = 0;
1303  slice_offset = 0;
1304  slice_size = buf_size;
1305  }
1306 
1307  ret = decode_slice(avctx, p, slice_height, slice_size, y_offset, table_size);
1308  emms_c();
1309  if (ret < 0)
1310  return ret;
1311  }
1312 
1313  *got_frame = 1;
1314 
1315  return (get_bits_count(&s->gb) + 31) / 32 * 4 + table_size;
1316 }
1317 
1319  .p.name = "huffyuv",
1320  CODEC_LONG_NAME("Huffyuv / HuffYUV"),
1321  .p.type = AVMEDIA_TYPE_VIDEO,
1322  .p.id = AV_CODEC_ID_HUFFYUV,
1323  .priv_data_size = sizeof(HYuvDecContext),
1324  .init = decode_init,
1325  .close = decode_end,
1327  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
1329  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1330 };
1331 
1332 #if CONFIG_FFVHUFF_DECODER
1333 const FFCodec ff_ffvhuff_decoder = {
1334  .p.name = "ffvhuff",
1335  CODEC_LONG_NAME("Huffyuv FFmpeg variant"),
1336  .p.type = AVMEDIA_TYPE_VIDEO,
1337  .p.id = AV_CODEC_ID_FFVHUFF,
1338  .priv_data_size = sizeof(HYuvDecContext),
1339  .init = decode_init,
1340  .close = decode_end,
1342  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
1344  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1345 };
1346 #endif /* CONFIG_FFVHUFF_DECODER */
1347 
1348 #if CONFIG_HYMT_DECODER
1349 const FFCodec ff_hymt_decoder = {
1350  .p.name = "hymt",
1351  CODEC_LONG_NAME("HuffYUV MT"),
1352  .p.type = AVMEDIA_TYPE_VIDEO,
1353  .p.id = AV_CODEC_ID_HYMT,
1354  .priv_data_size = sizeof(HYuvDecContext),
1355  .init = decode_init,
1356  .close = decode_end,
1358  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
1360  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1361 };
1362 #endif /* CONFIG_HYMT_DECODER */
read_len_table
static int read_len_table(uint8_t *dst, GetByteContext *gb, int n)
Definition: huffyuvdec.c:141
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:32
HuffYUVDSPContext
Definition: huffyuvdsp.h:25
VLC_INTERN
#define VLC_INTERN(dst, table, gb, name, bits, max_depth)
Subset of GET_VLC for use in hand-roller VLC code.
Definition: huffyuvdec.c:619
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:562
A
#define A(x)
Definition: vpx_arith.h:28
decode_bgr_bitstream
static void decode_bgr_bitstream(HYuvDecContext *s, int count)
Definition: huffyuvdec.c:854
bswapdsp.h
read_old_huffman_tables
static int read_old_huffman_tables(HYuvDecContext *s)
Definition: huffyuvdec.c:286
decorrelate
static void decorrelate(SnowContext *s, SubBand *b, IDWTELEM *src, int stride, int inverse, int use_median)
Definition: snowenc.c:1513
generate_joint_tables
static int generate_joint_tables(HYuvDecContext *s)
Definition: huffyuvdec.c:167
VLC_BITS
#define VLC_BITS
Definition: huffyuvdec.c:51
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:43
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:695
r
const char * r
Definition: vf_curves.c:127
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
HYuvDecContext::alpha
int alpha
Definition: huffyuvdec.c:65
HYuvDecContext::context
int context
Definition: huffyuvdec.c:71
AV_CODEC_ID_HUFFYUV
@ AV_CODEC_ID_HUFFYUV
Definition: codec_id.h:77
out
FILE * out
Definition: movenc.c:55
decode_bgr_1
static av_always_inline void decode_bgr_1(HYuvDecContext *s, int count, int decorrelate, int alpha)
Definition: huffyuvdec.c:796
AV_CODEC_ID_HYMT
@ AV_CODEC_ID_HYMT
Definition: codec_id.h:296
GetByteContext
Definition: bytestream.h:33
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:251
int64_t
long long int64_t
Definition: coverity.c:34
MAX_VLC_N
#define MAX_VLC_N
Definition: huffyuv.h:50
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:266
HYuvDecContext::bgr32
int bgr32
Definition: huffyuvdec.c:61
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:554
HYuvDecContext::bitstream_buffer_size
unsigned int bitstream_buffer_size
Definition: huffyuvdec.c:83
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
pixdesc.h
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:561
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:539
huffyuvdsp.h
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:556
HYuvDecContext::n
int n
Definition: huffyuvdec.c:63
add_median_prediction
static void add_median_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
Definition: huffyuvdec.c:915
b
#define b
Definition: input.c:41
READ_2PIX
#define READ_2PIX(dst0, dst1, plane1)
Definition: huffyuvdec.c:666
classic_add_luma
static const unsigned char classic_add_luma[256]
Definition: huffyuvdec.c:103
R
#define R
Definition: huffyuv.h:44
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:515
FFCodec
Definition: codec_internal.h:127
HYuvDecContext::vlc_n
int vlc_n
Definition: huffyuvdec.c:64
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:76
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: huffyuvdec.c:342
UPDATE_CACHE
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:225
OP8bits
#define OP8bits(dst0, dst1, code)
Definition: huffyuvdec.c:664
OP14bits
#define OP14bits(dst0, dst1, code)
Definition: huffyuvdec.c:705
add_bytes
static void add_bytes(HYuvDecContext *s, uint8_t *dst, uint8_t *src, int w)
Definition: huffyuvdec.c:906
HYuvDecContext::version
int version
Definition: huffyuvdec.c:59
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:557
HYuvDecContext::bits
uint32_t bits[4][MAX_VLC_N]
Definition: huffyuvdec.c:79
thread.h
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:431
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:381
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:553
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:335
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:533
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:212
HYuvDecContext::predictor
Predictor predictor
Definition: huffyuvdec.c:55
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:531
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:563
GetBitContext
Definition: get_bits.h:108
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:513
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:508
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *p, int *got_frame, AVPacket *avpkt)
Definition: huffyuvdec.c:1235
val
static double val(void *priv, double ch)
Definition: aeval.c:77
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:3276
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:499
HYuvDecContext::chroma_h_shift
int chroma_h_shift
Definition: huffyuvdec.c:68
HYuvDecContext::temp16
uint16_t * temp16[3]
Definition: huffyuvdec.c:76
LLVidDSPContext
Definition: lossless_videodsp.h:28
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:518
HYuvDecContext::chroma
int chroma
Definition: huffyuvdec.c:66
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
av_cold
#define av_cold
Definition: attributes.h:90
decode_plane_bitstream
static void decode_plane_bitstream(HYuvDecContext *s, int width, int plane)
Definition: huffyuvdec.c:715
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:527
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:545
HYuvDecContext::yuy2
int yuy2
Definition: huffyuvdec.c:60
emms_c
#define emms_c()
Definition: emms.h:63
CLOSE_READER
#define CLOSE_READER(name, gb)
Definition: get_bits.h:188
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:538
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:311
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:528
g
const char * g
Definition: vf_curves.c:128
ff_thread_get_buffer
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1053
bits
uint8_t bits
Definition: vp3data.h:128
MEDIAN
#define MEDIAN(x)
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
vlc_init
#define vlc_init(vlc, nb_bits, nb_codes, bits, bits_wrap, bits_size, codes, codes_wrap, codes_size, flags)
Definition: vlc.h:62
B
#define B
Definition: huffyuv.h:42
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:512
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:526
get_bits.h
AV_CODEC_ID_FFVHUFF
@ AV_CODEC_ID_FFVHUFF
Definition: codec_id.h:119
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
read_huffman_tables
static int read_huffman_tables(HYuvDecContext *s, const uint8_t *src, int length)
Definition: huffyuvdec.c:258
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:296
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:461
HYuvDecContext::temp
uint8_t * temp[3]
Definition: huffyuvdec.c:75
if
if(ret)
Definition: filter_design.txt:179
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:110
HYuvDecContext::bitstream_bpp
int bitstream_bpp
Definition: huffyuvdec.c:58
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:534
ff_bswapdsp_init
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
NULL
#define NULL
Definition: coverity.c:32
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
HYuvDecContext::gb
GetBitContext gb
Definition: huffyuvdec.c:54
HYuvDecContext::vlc
VLC vlc[8]
Definition: huffyuvdec.c:81
LAST_SKIP_BITS
#define LAST_SKIP_BITS(name, gb, num)
Definition: get_bits.h:247
classic_shift_chroma
static const uint8_t classic_shift_chroma[]
Definition: huffyuvdec.c:96
ff_huffyuv_generate_bits_table
int ff_huffyuv_generate_bits_table(uint32_t *dst, const uint8_t *len_table, int n)
Definition: huffyuv.c:40
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:516
HYuvDecContext::llviddsp
LLVidDSPContext llviddsp
Definition: huffyuvdec.c:86
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
READ_2PIX_PLANE16
#define READ_2PIX_PLANE16(dst0, dst1, plane)
Definition: huffyuvdec.c:709
HYuvDecContext
Definition: huffyuvdec.c:53
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:530
get_vlc2
static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:652
HYuvDecContext::flags
int flags
Definition: huffyuvdec.c:70
index
int index
Definition: gxfenc.c:90
READ_2PIX_PLANE
#define READ_2PIX_PLANE(dst0, dst1, plane, OP)
Definition: huffyuvdec.c:700
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:322
AVPacket::size
int size
Definition: packet.h:540
height
#define height
Definition: dsp.h:85
codec_internal.h
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:424
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:520
AV_NUM_DATA_POINTERS
#define AV_NUM_DATA_POINTERS
Definition: frame.h:411
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:522
decode_end
static av_cold int decode_end(AVCodecContext *avctx)
Definition: huffyuvdec.c:326
LEFT
#define LEFT
Definition: cdgraphics.c:171
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:166
ff_huffyuvdsp_init
av_cold void ff_huffyuvdsp_init(HuffYUVDSPContext *c, enum AVPixelFormat pix_fmt)
Definition: huffyuvdsp.c:84
OPEN_READER
#define OPEN_READER(name, gb)
Definition: get_bits.h:177
Predictor
Definition: ratecontrol.h:33
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:488
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:174
classic_add_chroma
static const unsigned char classic_add_chroma[256]
Definition: huffyuvdec.c:122
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:558
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
decode_slice
static int decode_slice(AVCodecContext *avctx, AVFrame *p, int height, int buf_size, int y_offset, int table_size)
Definition: huffyuvdec.c:924
ff_vlc_init_sparse
int ff_vlc_init_sparse(VLC *vlc, int nb_bits, int nb_codes, const void *bits, int bits_wrap, int bits_size, const void *codes, int codes_wrap, int codes_size, const void *symbols, int symbols_wrap, int symbols_size, int flags)
Build VLC decoding tables suitable for use with get_vlc2().
Definition: vlc.c:250
HYuvDecContext::interlaced
int interlaced
Definition: huffyuvdec.c:56
HYuvDecContext::bps
int bps
Definition: huffyuvdec.c:62
draw_slice
static void draw_slice(HYuvDecContext *s, AVCodecContext *avctx, AVFrame *frame, int y)
Definition: huffyuvdec.c:869
HYuvDecContext::chroma_v_shift
int chroma_v_shift
Definition: huffyuvdec.c:69
emms.h
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1586
interlaced
uint8_t interlaced
Definition: mxfenc.c:2286
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AVCodecContext::extradata
uint8_t * extradata
Out-of-band global headers that may be used by some codecs.
Definition: avcodec.h:537
ff_hymt_decoder
const FFCodec ff_hymt_decoder
ff_huffyuv_decoder
const FFCodec ff_huffyuv_decoder
Definition: huffyuvdec.c:1318
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:532
HYuvDecContext::pix_bgr_map
uint32_t pix_bgr_map[1<< VLC_BITS]
Definition: huffyuvdec.c:80
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:56
av_fast_padded_malloc
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:52
av_always_inline
#define av_always_inline
Definition: attributes.h:49
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
len
int len
Definition: vorbis_enc_data.h:426
PLANE
@ PLANE
Definition: huffyuv.h:54
decode_gray_bitstream
static void decode_gray_bitstream(HYuvDecContext *s, int count)
Definition: huffyuvdec.c:776
AVCodecContext::height
int height
Definition: avcodec.h:632
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:671
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:514
avcodec.h
limit
static double limit(double x)
Definition: vf_pseudocolor.c:142
ff_vlc_free
void ff_vlc_free(VLC *vlc)
Definition: vlc.c:580
ret
ret
Definition: filter_design.txt:187
close
static void close(AVCodecParserContext *s)
Definition: ffv1_parser.c:73
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AV_PIX_FMT_0RGB32
#define AV_PIX_FMT_0RGB32
Definition: pixfmt.h:492
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:555
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:519
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
AVCodecContext::draw_horiz_band
void(* draw_horiz_band)(struct AVCodecContext *s, const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], int y, int type, int height)
If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band.
Definition: avcodec.h:772
BITS_LEFT
#define BITS_LEFT(name, gb)
Definition: get_bits.h:239
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:524
left_prediction
static int left_prediction(HYuvDecContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
Definition: huffyuvdec.c:897
ff_llviddsp_init
void ff_llviddsp_init(LLVidDSPContext *c)
Definition: lossless_videodsp.c:113
AVCodecContext
main external API structure.
Definition: avcodec.h:451
HYuvDecContext::yuv
int yuv
Definition: huffyuvdec.c:67
SHOW_UBITS
#define SHOW_UBITS(name, gb, num)
Definition: get_bits.h:259
HYuvDecContext::len
uint8_t len[4][MAX_VLC_N]
Definition: huffyuvdec.c:78
VLC
Definition: vlc.h:36
sign_extend
static av_const int sign_extend(int val, unsigned bits)
Definition: mathops.h:131
G
#define G
Definition: huffyuv.h:43
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:165
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
lossless_videodsp.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
decode_422_bitstream
static void decode_422_bitstream(HYuvDecContext *s, int count)
Definition: huffyuvdec.c:671
HYuvDecContext::bdsp
BswapDSPContext bdsp
Definition: huffyuvdec.c:84
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
classic_shift_luma
static const uint8_t classic_shift_luma[]
Definition: huffyuvdec.c:90
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:632
bytestream.h
imgutils.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:455
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:79
ff_ffvhuff_decoder
const FFCodec ff_ffvhuff_decoder
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
BswapDSPContext
Definition: bswapdsp.h:24
h
h
Definition: vp9dsp_template.c:2070
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:525
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
huffyuv.h
width
#define width
Definition: dsp.h:85
HYuvDecContext::hdsp
HuffYUVDSPContext hdsp
Definition: huffyuvdec.c:85
HYuvDecContext::decorrelate
int decorrelate
Definition: huffyuvdec.c:57
src
#define src
Definition: vp8dsp.c:248
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:173
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:523
HYuvDecContext::last_slice_end
int last_slice_end
Definition: huffyuvdec.c:72
HYuvDecContext::bitstream_buffer
uint8_t * bitstream_buffer
Definition: huffyuvdec.c:82