FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
ffv1dec.c
Go to the documentation of this file.
1 /*
2  * FFV1 decoder
3  *
4  * Copyright (c) 2003-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * FF Video Codec 1 (a lossless codec) decoder
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/crc.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/imgutils.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/timer.h"
34 #include "avcodec.h"
35 #include "internal.h"
36 #include "get_bits.h"
37 #include "rangecoder.h"
38 #include "golomb.h"
39 #include "mathops.h"
40 #include "ffv1.h"
41 
43  int is_signed)
44 {
45  if (get_rac(c, state + 0))
46  return 0;
47  else {
48  int i, e, a;
49  e = 0;
50  while (get_rac(c, state + 1 + FFMIN(e, 9))) // 1..10
51  e++;
52 
53  a = 1;
54  for (i = e - 1; i >= 0; i--)
55  a += a + get_rac(c, state + 22 + FFMIN(i, 9)); // 22..31
56 
57  e = -(is_signed && get_rac(c, state + 11 + FFMIN(e, 10))); // 11..21
58  return (a ^ e) - e;
59  }
60 }
61 
62 static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
63 {
64  return get_symbol_inline(c, state, is_signed);
65 }
66 
67 static inline int get_vlc_symbol(GetBitContext *gb, VlcState *const state,
68  int bits)
69 {
70  int k, i, v, ret;
71 
72  i = state->count;
73  k = 0;
74  while (i < state->error_sum) { // FIXME: optimize
75  k++;
76  i += i;
77  }
78 
79  v = get_sr_golomb(gb, k, 12, bits);
80  av_dlog(NULL, "v:%d bias:%d error:%d drift:%d count:%d k:%d",
81  v, state->bias, state->error_sum, state->drift, state->count, k);
82 
83 #if 0 // JPEG LS
84  if (k == 0 && 2 * state->drift <= -state->count)
85  v ^= (-1);
86 #else
87  v ^= ((2 * state->drift + state->count) >> 31);
88 #endif
89 
90  ret = fold(v + state->bias, bits);
91 
92  update_vlc_state(state, v);
93 
94  return ret;
95 }
96 
98  int16_t *sample[2],
99  int plane_index, int bits)
100 {
101  PlaneContext *const p = &s->plane[plane_index];
102  RangeCoder *const c = &s->c;
103  int x;
104  int run_count = 0;
105  int run_mode = 0;
106  int run_index = s->run_index;
107 
108  if (s->slice_coding_mode == 1) {
109  int i;
110  for (x = 0; x < w; x++) {
111  int v = 0;
112  for (i=0; i<bits; i++) {
113  uint8_t state = 128;
114  v += v + get_rac(c, &state);
115  }
116  sample[1][x] = v;
117  }
118  return;
119  }
120 
121  for (x = 0; x < w; x++) {
122  int diff, context, sign;
123 
124  context = get_context(p, sample[1] + x, sample[0] + x, sample[1] + x);
125  if (context < 0) {
126  context = -context;
127  sign = 1;
128  } else
129  sign = 0;
130 
131  av_assert2(context < p->context_count);
132 
133  if (s->ac) {
134  diff = get_symbol_inline(c, p->state[context], 1);
135  } else {
136  if (context == 0 && run_mode == 0)
137  run_mode = 1;
138 
139  if (run_mode) {
140  if (run_count == 0 && run_mode == 1) {
141  if (get_bits1(&s->gb)) {
142  run_count = 1 << ff_log2_run[run_index];
143  if (x + run_count <= w)
144  run_index++;
145  } else {
146  if (ff_log2_run[run_index])
147  run_count = get_bits(&s->gb, ff_log2_run[run_index]);
148  else
149  run_count = 0;
150  if (run_index)
151  run_index--;
152  run_mode = 2;
153  }
154  }
155  run_count--;
156  if (run_count < 0) {
157  run_mode = 0;
158  run_count = 0;
159  diff = get_vlc_symbol(&s->gb, &p->vlc_state[context],
160  bits);
161  if (diff >= 0)
162  diff++;
163  } else
164  diff = 0;
165  } else
166  diff = get_vlc_symbol(&s->gb, &p->vlc_state[context], bits);
167 
168  av_dlog(s->avctx, "count:%d index:%d, mode:%d, x:%d pos:%d\n",
169  run_count, run_index, run_mode, x, get_bits_count(&s->gb));
170  }
171 
172  if (sign)
173  diff = -diff;
174 
175  sample[1][x] = (predict(sample[1] + x, sample[0] + x) + diff) &
176  ((1 << bits) - 1);
177  }
178  s->run_index = run_index;
179 }
180 
182  int w, int h, int stride, int plane_index)
183 {
184  int x, y;
185  int16_t *sample[2];
186  sample[0] = s->sample_buffer + 3;
187  sample[1] = s->sample_buffer + w + 6 + 3;
188 
189  s->run_index = 0;
190 
191  memset(s->sample_buffer, 0, 2 * (w + 6) * sizeof(*s->sample_buffer));
192 
193  for (y = 0; y < h; y++) {
194  int16_t *temp = sample[0]; // FIXME: try a normal buffer
195 
196  sample[0] = sample[1];
197  sample[1] = temp;
198 
199  sample[1][-1] = sample[0][0];
200  sample[0][w] = sample[0][w - 1];
201 
202 // { START_TIMER
203  if (s->avctx->bits_per_raw_sample <= 8) {
204  decode_line(s, w, sample, plane_index, 8);
205  for (x = 0; x < w; x++)
206  src[x + stride * y] = sample[1][x];
207  } else {
208  decode_line(s, w, sample, plane_index, s->avctx->bits_per_raw_sample);
209  if (s->packed_at_lsb) {
210  for (x = 0; x < w; x++) {
211  ((uint16_t*)(src + stride*y))[x] = sample[1][x];
212  }
213  } else {
214  for (x = 0; x < w; x++) {
215  ((uint16_t*)(src + stride*y))[x] = sample[1][x] << (16 - s->avctx->bits_per_raw_sample);
216  }
217  }
218  }
219 // STOP_TIMER("decode-line") }
220  }
221 }
222 
223 static void decode_rgb_frame(FFV1Context *s, uint8_t *src[3], int w, int h, int stride[3])
224 {
225  int x, y, p;
226  int16_t *sample[4][2];
227  int lbd = s->avctx->bits_per_raw_sample <= 8;
228  int bits = s->avctx->bits_per_raw_sample > 0 ? s->avctx->bits_per_raw_sample : 8;
229  int offset = 1 << bits;
230 
231  for (x = 0; x < 4; x++) {
232  sample[x][0] = s->sample_buffer + x * 2 * (w + 6) + 3;
233  sample[x][1] = s->sample_buffer + (x * 2 + 1) * (w + 6) + 3;
234  }
235 
236  s->run_index = 0;
237 
238  memset(s->sample_buffer, 0, 8 * (w + 6) * sizeof(*s->sample_buffer));
239 
240  for (y = 0; y < h; y++) {
241  for (p = 0; p < 3 + s->transparency; p++) {
242  int16_t *temp = sample[p][0]; // FIXME: try a normal buffer
243 
244  sample[p][0] = sample[p][1];
245  sample[p][1] = temp;
246 
247  sample[p][1][-1]= sample[p][0][0 ];
248  sample[p][0][ w]= sample[p][0][w-1];
249  if (lbd && s->slice_coding_mode == 0)
250  decode_line(s, w, sample[p], (p + 1)/2, 9);
251  else
252  decode_line(s, w, sample[p], (p + 1)/2, bits + (s->slice_coding_mode != 1));
253  }
254  for (x = 0; x < w; x++) {
255  int g = sample[0][1][x];
256  int b = sample[1][1][x];
257  int r = sample[2][1][x];
258  int a = sample[3][1][x];
259 
260  if (s->slice_coding_mode != 1) {
261  b -= offset;
262  r -= offset;
263  g -= (b * s->slice_rct_by_coef + r * s->slice_rct_ry_coef) >> 2;
264  b += g;
265  r += g;
266  }
267 
268  if (lbd)
269  *((uint32_t*)(src[0] + x*4 + stride[0]*y)) = b + (g<<8) + (r<<16) + (a<<24);
270  else {
271  *((uint16_t*)(src[0] + x*2 + stride[0]*y)) = b;
272  *((uint16_t*)(src[1] + x*2 + stride[1]*y)) = g;
273  *((uint16_t*)(src[2] + x*2 + stride[2]*y)) = r;
274  }
275  }
276  }
277 }
278 
280 {
281  RangeCoder *c = &fs->c;
283  unsigned ps, i, context_count;
284  memset(state, 128, sizeof(state));
285 
286  av_assert0(f->version > 2);
287 
288  fs->slice_x = get_symbol(c, state, 0) * f->width ;
289  fs->slice_y = get_symbol(c, state, 0) * f->height;
290  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
291  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
292 
293  fs->slice_x /= f->num_h_slices;
294  fs->slice_y /= f->num_v_slices;
295  fs->slice_width = fs->slice_width /f->num_h_slices - fs->slice_x;
296  fs->slice_height = fs->slice_height/f->num_v_slices - fs->slice_y;
297  if ((unsigned)fs->slice_width > f->width || (unsigned)fs->slice_height > f->height)
298  return -1;
299  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
300  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
301  return -1;
302 
303  for (i = 0; i < f->plane_count; i++) {
304  PlaneContext * const p = &fs->plane[i];
305  int idx = get_symbol(c, state, 0);
306  if (idx > (unsigned)f->quant_table_count) {
307  av_log(f->avctx, AV_LOG_ERROR, "quant_table_index out of range\n");
308  return -1;
309  }
310  p->quant_table_index = idx;
311  memcpy(p->quant_table, f->quant_tables[idx], sizeof(p->quant_table));
312  context_count = f->context_count[idx];
313 
314  if (p->context_count < context_count) {
315  av_freep(&p->state);
316  av_freep(&p->vlc_state);
317  }
319  }
320 
321  ps = get_symbol(c, state, 0);
322  if (ps == 1) {
323  f->cur->interlaced_frame = 1;
324  f->cur->top_field_first = 1;
325  } else if (ps == 2) {
326  f->cur->interlaced_frame = 1;
327  f->cur->top_field_first = 0;
328  } else if (ps == 3) {
329  f->cur->interlaced_frame = 0;
330  }
331  f->cur->sample_aspect_ratio.num = get_symbol(c, state, 0);
332  f->cur->sample_aspect_ratio.den = get_symbol(c, state, 0);
333  if (fs->version > 3) {
334  fs->slice_reset_contexts = get_rac(c, state);
335  fs->slice_coding_mode = get_symbol(c, state, 0);
336  if (fs->slice_coding_mode != 1) {
337  fs->slice_rct_by_coef = get_symbol(c, state, 0);
338  fs->slice_rct_ry_coef = get_symbol(c, state, 0);
339  if ((uint64_t)fs->slice_rct_by_coef + (uint64_t)fs->slice_rct_ry_coef > 4) {
340  av_log(f->avctx, AV_LOG_ERROR, "slice_rct_y_coef out of range\n");
341  return AVERROR_INVALIDDATA;
342  }
343  }
344  }
345  return 0;
346 }
347 
348 static int decode_slice(AVCodecContext *c, void *arg)
349 {
350  FFV1Context *fs = *(void **)arg;
351  FFV1Context *f = fs->avctx->priv_data;
352  int width, height, x, y, ret;
353  const int ps = av_pix_fmt_desc_get(c->pix_fmt)->comp[0].step_minus1 + 1;
354  AVFrame * const p = f->cur;
355  int i, si;
356 
357  for( si=0; fs != f->slice_context[si]; si ++)
358  ;
359 
360  if(f->fsrc && !p->key_frame)
362 
363  if(f->fsrc && !p->key_frame) {
364  FFV1Context *fssrc = f->fsrc->slice_context[si];
365  FFV1Context *fsdst = f->slice_context[si];
366  av_assert1(fsdst->plane_count == fssrc->plane_count);
367  av_assert1(fsdst == fs);
368 
369  if (!p->key_frame)
370  fsdst->slice_damaged |= fssrc->slice_damaged;
371 
372  for (i = 0; i < f->plane_count; i++) {
373  PlaneContext *psrc = &fssrc->plane[i];
374  PlaneContext *pdst = &fsdst->plane[i];
375 
376  av_free(pdst->state);
377  av_free(pdst->vlc_state);
378  memcpy(pdst, psrc, sizeof(*pdst));
379  pdst->state = NULL;
380  pdst->vlc_state = NULL;
381 
382  if (fssrc->ac) {
383  pdst->state = av_malloc(CONTEXT_SIZE * psrc->context_count);
384  memcpy(pdst->state, psrc->state, CONTEXT_SIZE * psrc->context_count);
385  } else {
386  pdst->vlc_state = av_malloc(sizeof(*pdst->vlc_state) * psrc->context_count);
387  memcpy(pdst->vlc_state, psrc->vlc_state, sizeof(*pdst->vlc_state) * psrc->context_count);
388  }
389  }
390  }
391 
392  fs->slice_rct_by_coef = 1;
393  fs->slice_rct_ry_coef = 1;
394 
395  if (f->version > 2) {
396  if (ffv1_init_slice_state(f, fs) < 0)
397  return AVERROR(ENOMEM);
398  if (decode_slice_header(f, fs) < 0) {
399  fs->slice_damaged = 1;
400  return AVERROR_INVALIDDATA;
401  }
402  }
403  if ((ret = ffv1_init_slice_state(f, fs)) < 0)
404  return ret;
405  if (f->cur->key_frame || fs->slice_reset_contexts)
406  ffv1_clear_slice_state(f, fs);
407 
408  width = fs->slice_width;
409  height = fs->slice_height;
410  x = fs->slice_x;
411  y = fs->slice_y;
412 
413  if (!fs->ac) {
414  if (f->version == 3 && f->micro_version > 1 || f->version > 3)
415  get_rac(&fs->c, (uint8_t[]) { 129 });
416  fs->ac_byte_count = f->version > 2 || (!x && !y) ? fs->c.bytestream - fs->c.bytestream_start - 1 : 0;
417  init_get_bits(&fs->gb,
418  fs->c.bytestream_start + fs->ac_byte_count,
419  (fs->c.bytestream_end - fs->c.bytestream_start - fs->ac_byte_count) * 8);
420  }
421 
422  av_assert1(width && height);
423  if (f->colorspace == 0) {
424  const int chroma_width = FF_CEIL_RSHIFT(width, f->chroma_h_shift);
425  const int chroma_height = FF_CEIL_RSHIFT(height, f->chroma_v_shift);
426  const int cx = x >> f->chroma_h_shift;
427  const int cy = y >> f->chroma_v_shift;
428  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0], width, height, p->linesize[0], 0);
429 
430  if (f->chroma_planes) {
431  decode_plane(fs, p->data[1] + ps*cx+cy*p->linesize[1], chroma_width, chroma_height, p->linesize[1], 1);
432  decode_plane(fs, p->data[2] + ps*cx+cy*p->linesize[2], chroma_width, chroma_height, p->linesize[2], 1);
433  }
434  if (fs->transparency)
435  decode_plane(fs, p->data[3] + ps*x + y*p->linesize[3], width, height, p->linesize[3], 2);
436  } else {
437  uint8_t *planes[3] = { p->data[0] + ps * x + y * p->linesize[0],
438  p->data[1] + ps * x + y * p->linesize[1],
439  p->data[2] + ps * x + y * p->linesize[2] };
440  decode_rgb_frame(fs, planes, width, height, p->linesize);
441  }
442  if (fs->ac && f->version > 2) {
443  int v;
444  get_rac(&fs->c, (uint8_t[]) { 129 });
445  v = fs->c.bytestream_end - fs->c.bytestream - 2 - 5*f->ec;
446  if (v) {
447  av_log(f->avctx, AV_LOG_ERROR, "bytestream end mismatching by %d\n", v);
448  fs->slice_damaged = 1;
449  }
450  }
451 
452  emms_c();
453 
454  ff_thread_report_progress(&f->picture, si, 0);
455 
456  return 0;
457 }
458 
459 static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
460 {
461  int v;
462  int i = 0;
464 
465  memset(state, 128, sizeof(state));
466 
467  for (v = 0; i < 128; v++) {
468  unsigned len = get_symbol(c, state, 0) + 1;
469 
470  if (len > 128 - i)
471  return AVERROR_INVALIDDATA;
472 
473  while (len--) {
474  quant_table[i] = scale * v;
475  i++;
476  }
477  }
478 
479  for (i = 1; i < 128; i++)
480  quant_table[256 - i] = -quant_table[i];
481  quant_table[128] = -quant_table[127];
482 
483  return 2 * v - 1;
484 }
485 
487  int16_t quant_table[MAX_CONTEXT_INPUTS][256])
488 {
489  int i;
490  int context_count = 1;
491 
492  for (i = 0; i < 5; i++) {
493  context_count *= read_quant_table(c, quant_table[i], context_count);
494  if (context_count > 32768U) {
495  return AVERROR_INVALIDDATA;
496  }
497  }
498  return (context_count + 1) / 2;
499 }
500 
502 {
503  RangeCoder *const c = &f->c;
505  int i, j, k, ret;
506  uint8_t state2[32][CONTEXT_SIZE];
507 
508  memset(state2, 128, sizeof(state2));
509  memset(state, 128, sizeof(state));
510 
512  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
513 
514  f->version = get_symbol(c, state, 0);
515  if (f->version < 2) {
516  av_log(f->avctx, AV_LOG_ERROR, "Invalid version in global header\n");
517  return AVERROR_INVALIDDATA;
518  }
519  if (f->version > 2) {
520  c->bytestream_end -= 4;
521  f->micro_version = get_symbol(c, state, 0);
522  }
523  f->ac = f->avctx->coder_type = get_symbol(c, state, 0);
524  if (f->ac > 1) {
525  for (i = 1; i < 256; i++)
526  f->state_transition[i] = get_symbol(c, state, 1) + c->one_state[i];
527  }
528 
529  f->colorspace = get_symbol(c, state, 0); //YUV cs type
530  f->avctx->bits_per_raw_sample = get_symbol(c, state, 0);
531  f->chroma_planes = get_rac(c, state);
532  f->chroma_h_shift = get_symbol(c, state, 0);
533  f->chroma_v_shift = get_symbol(c, state, 0);
534  f->transparency = get_rac(c, state);
535  f->plane_count = 1 + (f->chroma_planes || f->version<4) + f->transparency;
536  f->num_h_slices = 1 + get_symbol(c, state, 0);
537  f->num_v_slices = 1 + get_symbol(c, state, 0);
538 
539  if (f->num_h_slices > (unsigned)f->width || !f->num_h_slices ||
540  f->num_v_slices > (unsigned)f->height || !f->num_v_slices
541  ) {
542  av_log(f->avctx, AV_LOG_ERROR, "slice count invalid\n");
543  return AVERROR_INVALIDDATA;
544  }
545 
546  f->quant_table_count = get_symbol(c, state, 0);
547  if (f->quant_table_count > (unsigned)MAX_QUANT_TABLES)
548  return AVERROR_INVALIDDATA;
549 
550  for (i = 0; i < f->quant_table_count; i++) {
551  f->context_count[i] = read_quant_tables(c, f->quant_tables[i]);
552  if (f->context_count[i] < 0) {
553  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
554  return AVERROR_INVALIDDATA;
555  }
556  }
557  if ((ret = ffv1_allocate_initial_states(f)) < 0)
558  return ret;
559 
560  for (i = 0; i < f->quant_table_count; i++)
561  if (get_rac(c, state)) {
562  for (j = 0; j < f->context_count[i]; j++)
563  for (k = 0; k < CONTEXT_SIZE; k++) {
564  int pred = j ? f->initial_states[i][j - 1][k] : 128;
565  f->initial_states[i][j][k] =
566  (pred + get_symbol(c, state2[k], 1)) & 0xFF;
567  }
568  }
569 
570  if (f->version > 2) {
571  f->ec = get_symbol(c, state, 0);
572  if (f->micro_version > 2)
573  f->intra = get_symbol(c, state, 0);
574  }
575 
576  if (f->version > 2) {
577  unsigned v;
580  if (v) {
581  av_log(f->avctx, AV_LOG_ERROR, "CRC mismatch %X!\n", v);
582  return AVERROR_INVALIDDATA;
583  }
584  }
585 
586  if (f->avctx->debug & FF_DEBUG_PICT_INFO)
588  "global: ver:%d.%d, coder:%d, colorspace: %d bpr:%d chroma:%d(%d:%d), alpha:%d slices:%dx%d qtabs:%d ec:%d intra:%d\n",
589  f->version, f->micro_version,
590  f->ac,
591  f->colorspace,
594  f->transparency,
595  f->num_h_slices, f->num_v_slices,
597  f->ec,
598  f->intra
599  );
600  return 0;
601 }
602 
603 static int read_header(FFV1Context *f)
604 {
606  int i, j, context_count = -1; //-1 to avoid warning
607  RangeCoder *const c = &f->slice_context[0]->c;
608 
609  memset(state, 128, sizeof(state));
610 
611  if (f->version < 2) {
613  unsigned v= get_symbol(c, state, 0);
614  if (v >= 2) {
615  av_log(f->avctx, AV_LOG_ERROR, "invalid version %d in ver01 header\n", v);
616  return AVERROR_INVALIDDATA;
617  }
618  f->version = v;
619  f->ac = f->avctx->coder_type = get_symbol(c, state, 0);
620  if (f->ac > 1) {
621  for (i = 1; i < 256; i++)
622  f->state_transition[i] = get_symbol(c, state, 1) + c->one_state[i];
623  }
624 
625  colorspace = get_symbol(c, state, 0); //YUV cs type
626  bits_per_raw_sample = f->version > 0 ? get_symbol(c, state, 0) : f->avctx->bits_per_raw_sample;
627  chroma_planes = get_rac(c, state);
628  chroma_h_shift = get_symbol(c, state, 0);
629  chroma_v_shift = get_symbol(c, state, 0);
630  transparency = get_rac(c, state);
631 
632  if (f->plane_count) {
633  if ( colorspace != f->colorspace
634  || bits_per_raw_sample != f->avctx->bits_per_raw_sample
635  || chroma_planes != f->chroma_planes
636  || chroma_h_shift!= f->chroma_h_shift
637  || chroma_v_shift!= f->chroma_v_shift
638  || transparency != f->transparency) {
639  av_log(f->avctx, AV_LOG_ERROR, "Invalid change of global parameters\n");
640  return AVERROR_INVALIDDATA;
641  }
642  }
643 
644  f->colorspace = colorspace;
650 
651  f->plane_count = 2 + f->transparency;
652  }
653 
654  if (f->colorspace == 0) {
655  if (f->avctx->skip_alpha) f->transparency = 0;
656  if (!f->transparency && !f->chroma_planes) {
657  if (f->avctx->bits_per_raw_sample <= 8)
659  else
661  } else if (f->avctx->bits_per_raw_sample<=8 && !f->transparency) {
662  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
663  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P; break;
664  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P; break;
665  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P; break;
666  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P; break;
667  case 0x20: f->avctx->pix_fmt = AV_PIX_FMT_YUV411P; break;
668  case 0x22: f->avctx->pix_fmt = AV_PIX_FMT_YUV410P; break;
669  }
670  } else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency) {
671  switch(16*f->chroma_h_shift + f->chroma_v_shift) {
672  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P; break;
673  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P; break;
674  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P; break;
675  }
676  } else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency) {
677  f->packed_at_lsb = 1;
678  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
679  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P9; break;
680  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P9; break;
681  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P9; break;
682  }
683  } else if (f->avctx->bits_per_raw_sample == 9 && f->transparency) {
684  f->packed_at_lsb = 1;
685  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
686  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P9; break;
687  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P9; break;
688  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P9; break;
689  }
690  } else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency) {
691  f->packed_at_lsb = 1;
692  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
693  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P10; break;
694  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P10; break;
695  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P10; break;
696  }
697  } else if (f->avctx->bits_per_raw_sample == 10 && f->transparency) {
698  f->packed_at_lsb = 1;
699  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
700  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P10; break;
701  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P10; break;
702  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P10; break;
703  }
704  } else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency){
705  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
706  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P16; break;
707  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P16; break;
708  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P16; break;
709  }
710  } else if (f->avctx->bits_per_raw_sample == 16 && f->transparency){
711  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
712  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P16; break;
713  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P16; break;
714  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P16; break;
715  }
716  }
717  } else if (f->colorspace == 1) {
718  if (f->chroma_h_shift || f->chroma_v_shift) {
720  "chroma subsampling not supported in this colorspace\n");
721  return AVERROR(ENOSYS);
722  }
723  if ( f->avctx->bits_per_raw_sample == 9)
725  else if (f->avctx->bits_per_raw_sample == 10)
727  else if (f->avctx->bits_per_raw_sample == 12)
729  else if (f->avctx->bits_per_raw_sample == 14)
731  else
733  else f->avctx->pix_fmt = AV_PIX_FMT_0RGB32;
734  } else {
735  av_log(f->avctx, AV_LOG_ERROR, "colorspace not supported\n");
736  return AVERROR(ENOSYS);
737  }
738  if (f->avctx->pix_fmt == AV_PIX_FMT_NONE) {
739  av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
740  return AVERROR(ENOSYS);
741  }
742 
743  av_dlog(f->avctx, "%d %d %d\n",
745  if (f->version < 2) {
746  context_count = read_quant_tables(c, f->quant_table);
747  if (context_count < 0) {
748  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
749  return AVERROR_INVALIDDATA;
750  }
751  } else if (f->version < 3) {
752  f->slice_count = get_symbol(c, state, 0);
753  } else {
754  const uint8_t *p = c->bytestream_end;
755  for (f->slice_count = 0;
756  f->slice_count < MAX_SLICES && 3 < p - c->bytestream_start;
757  f->slice_count++) {
758  int trailer = 3 + 5*!!f->ec;
759  int size = AV_RB24(p-trailer);
760  if (size + trailer > p - c->bytestream_start)
761  break;
762  p -= size + trailer;
763  }
764  }
765  if (f->slice_count > (unsigned)MAX_SLICES || f->slice_count <= 0) {
766  av_log(f->avctx, AV_LOG_ERROR, "slice count %d is invalid\n", f->slice_count);
767  return AVERROR_INVALIDDATA;
768  }
769 
770  for (j = 0; j < f->slice_count; j++) {
771  FFV1Context *fs = f->slice_context[j];
772  fs->ac = f->ac;
773  fs->packed_at_lsb = f->packed_at_lsb;
774 
775  fs->slice_damaged = 0;
776 
777  if (f->version == 2) {
778  fs->slice_x = get_symbol(c, state, 0) * f->width ;
779  fs->slice_y = get_symbol(c, state, 0) * f->height;
780  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
781  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
782 
783  fs->slice_x /= f->num_h_slices;
784  fs->slice_y /= f->num_v_slices;
785  fs->slice_width = fs->slice_width / f->num_h_slices - fs->slice_x;
786  fs->slice_height = fs->slice_height / f->num_v_slices - fs->slice_y;
787  if ((unsigned)fs->slice_width > f->width ||
788  (unsigned)fs->slice_height > f->height)
789  return AVERROR_INVALIDDATA;
790  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
791  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
792  return AVERROR_INVALIDDATA;
793  }
794 
795  for (i = 0; i < f->plane_count; i++) {
796  PlaneContext *const p = &fs->plane[i];
797 
798  if (f->version == 2) {
799  int idx = get_symbol(c, state, 0);
800  if (idx > (unsigned)f->quant_table_count) {
802  "quant_table_index out of range\n");
803  return AVERROR_INVALIDDATA;
804  }
805  p->quant_table_index = idx;
806  memcpy(p->quant_table, f->quant_tables[idx],
807  sizeof(p->quant_table));
808  context_count = f->context_count[idx];
809  } else {
810  memcpy(p->quant_table, f->quant_table, sizeof(p->quant_table));
811  }
812 
813  if (f->version <= 2) {
814  av_assert0(context_count >= 0);
815  if (p->context_count < context_count) {
816  av_freep(&p->state);
817  av_freep(&p->vlc_state);
818  }
820  }
821  }
822  }
823  return 0;
824 }
825 
827 {
828  FFV1Context *f = avctx->priv_data;
829  int ret;
830 
831  if ((ret = ffv1_common_init(avctx)) < 0)
832  return ret;
833 
834  if (avctx->extradata && (ret = read_extra_header(f)) < 0)
835  return ret;
836 
837  if ((ret = ffv1_init_slice_contexts(f)) < 0)
838  return ret;
839 
840  avctx->internal->allocate_progress = 1;
841 
842  return 0;
843 }
844 
845 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
846 {
847  const uint8_t *buf = avpkt->data;
848  int buf_size = avpkt->size;
849  FFV1Context *f = avctx->priv_data;
850  RangeCoder *const c = &f->slice_context[0]->c;
851  int i, ret;
852  uint8_t keystate = 128;
853  const uint8_t *buf_p;
854  AVFrame *p;
855 
856  if (f->last_picture.f)
859 
860  f->cur = p = f->picture.f;
861 
862  if (f->version < 3 && avctx->field_order > AV_FIELD_PROGRESSIVE) {
863  /* we have interlaced material flagged in container */
864  p->interlaced_frame = 1;
865  if (avctx->field_order == AV_FIELD_TT || avctx->field_order == AV_FIELD_TB)
866  p->top_field_first = 1;
867  }
868 
869  f->avctx = avctx;
870  ff_init_range_decoder(c, buf, buf_size);
871  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
872 
873  p->pict_type = AV_PICTURE_TYPE_I; //FIXME I vs. P
874  if (get_rac(c, &keystate)) {
875  p->key_frame = 1;
876  f->key_frame_ok = 0;
877  if ((ret = read_header(f)) < 0)
878  return ret;
879  f->key_frame_ok = 1;
880  } else {
881  if (!f->key_frame_ok) {
882  av_log(avctx, AV_LOG_ERROR,
883  "Cannot decode non-keyframe without valid keyframe\n");
884  return AVERROR_INVALIDDATA;
885  }
886  p->key_frame = 0;
887  }
888 
889  if ((ret = ff_thread_get_buffer(avctx, &f->picture, AV_GET_BUFFER_FLAG_REF)) < 0)
890  return ret;
891 
892  if (avctx->debug & FF_DEBUG_PICT_INFO)
893  av_log(avctx, AV_LOG_DEBUG, "ver:%d keyframe:%d coder:%d ec:%d slices:%d bps:%d\n",
894  f->version, p->key_frame, f->ac, f->ec, f->slice_count, f->avctx->bits_per_raw_sample);
895 
896  ff_thread_finish_setup(avctx);
897 
898  buf_p = buf + buf_size;
899  for (i = f->slice_count - 1; i >= 0; i--) {
900  FFV1Context *fs = f->slice_context[i];
901  int trailer = 3 + 5*!!f->ec;
902  int v;
903 
904  if (i || f->version > 2) v = AV_RB24(buf_p-trailer) + trailer;
905  else v = buf_p - c->bytestream_start;
906  if (buf_p - c->bytestream_start < v) {
907  av_log(avctx, AV_LOG_ERROR, "Slice pointer chain broken\n");
908  return AVERROR_INVALIDDATA;
909  }
910  buf_p -= v;
911 
912  if (f->ec) {
913  unsigned crc = av_crc(av_crc_get_table(AV_CRC_32_IEEE), 0, buf_p, v);
914  if (crc) {
915  int64_t ts = avpkt->pts != AV_NOPTS_VALUE ? avpkt->pts : avpkt->dts;
916  av_log(f->avctx, AV_LOG_ERROR, "CRC mismatch %X!", crc);
917  if (ts != AV_NOPTS_VALUE && avctx->pkt_timebase.num) {
918  av_log(f->avctx, AV_LOG_ERROR, "at %f seconds\n", ts*av_q2d(avctx->pkt_timebase));
919  } else if (ts != AV_NOPTS_VALUE) {
920  av_log(f->avctx, AV_LOG_ERROR, "at %"PRId64"\n", ts);
921  } else {
922  av_log(f->avctx, AV_LOG_ERROR, "\n");
923  }
924  fs->slice_damaged = 1;
925  }
926  }
927 
928  if (i) {
929  ff_init_range_decoder(&fs->c, buf_p, v);
930  } else
931  fs->c.bytestream_end = (uint8_t *)(buf_p + v);
932 
933  fs->avctx = avctx;
934  fs->cur = p;
935  }
936 
937  avctx->execute(avctx,
938  decode_slice,
939  &f->slice_context[0],
940  NULL,
941  f->slice_count,
942  sizeof(void*));
943 
944  for (i = f->slice_count - 1; i >= 0; i--) {
945  FFV1Context *fs = f->slice_context[i];
946  int j;
947  if (fs->slice_damaged && f->last_picture.f->data[0]) {
948  const uint8_t *src[4];
949  uint8_t *dst[4];
950  ff_thread_await_progress(&f->last_picture, INT_MAX, 0);
951  for (j = 0; j < 4; j++) {
952  int sh = (j == 1 || j == 2) ? f->chroma_h_shift : 0;
953  int sv = (j == 1 || j == 2) ? f->chroma_v_shift : 0;
954  dst[j] = p->data[j] + p->linesize[j] *
955  (fs->slice_y >> sv) + (fs->slice_x >> sh);
956  src[j] = f->last_picture.f->data[j] + f->last_picture.f->linesize[j] *
957  (fs->slice_y >> sv) + (fs->slice_x >> sh);
958  }
959  av_image_copy(dst, p->linesize, (const uint8_t **)src,
960  f->last_picture.f->linesize,
961  avctx->pix_fmt,
962  fs->slice_width,
963  fs->slice_height);
964  }
965  }
966  ff_thread_report_progress(&f->picture, INT_MAX, 0);
967 
968  f->picture_number++;
969 
970  if (f->last_picture.f)
972  f->cur = NULL;
973  if ((ret = av_frame_ref(data, f->picture.f)) < 0)
974  return ret;
975 
976  *got_frame = 1;
977 
978  return buf_size;
979 }
980 
982 {
983  FFV1Context *f = avctx->priv_data;
984  int i, ret;
985 
986  f->picture.f = NULL;
987  f->last_picture.f = NULL;
988  f->sample_buffer = NULL;
989  f->slice_count = 0;
990 
991  for (i = 0; i < f->quant_table_count; i++) {
992  av_assert0(f->version > 1);
994  f->context_count[i] * sizeof(*f->initial_states[i]));
995  }
996 
997  f->picture.f = av_frame_alloc();
999 
1000  if ((ret = ffv1_init_slice_contexts(f)) < 0)
1001  return ret;
1002 
1003  return 0;
1004 }
1005 
1006 static void copy_fields(FFV1Context *fsdst, FFV1Context *fssrc, FFV1Context *fsrc)
1007 {
1008  fsdst->version = fsrc->version;
1009  fsdst->micro_version = fsrc->micro_version;
1010  fsdst->chroma_planes = fsrc->chroma_planes;
1011  fsdst->chroma_h_shift = fsrc->chroma_h_shift;
1012  fsdst->chroma_v_shift = fsrc->chroma_v_shift;
1013  fsdst->transparency = fsrc->transparency;
1014  fsdst->plane_count = fsrc->plane_count;
1015  fsdst->ac = fsrc->ac;
1016  fsdst->colorspace = fsrc->colorspace;
1017 
1018  fsdst->ec = fsrc->ec;
1019  fsdst->intra = fsrc->intra;
1020  fsdst->slice_damaged = fssrc->slice_damaged;
1021  fsdst->key_frame_ok = fsrc->key_frame_ok;
1022 
1024  fsdst->packed_at_lsb = fsrc->packed_at_lsb;
1025  fsdst->slice_count = fsrc->slice_count;
1026  if (fsrc->version<3){
1027  fsdst->slice_x = fssrc->slice_x;
1028  fsdst->slice_y = fssrc->slice_y;
1029  fsdst->slice_width = fssrc->slice_width;
1030  fsdst->slice_height = fssrc->slice_height;
1031  }
1032 }
1033 
1035 {
1036  FFV1Context *fsrc = src->priv_data;
1037  FFV1Context *fdst = dst->priv_data;
1038  int i, ret;
1039 
1040  if (dst == src)
1041  return 0;
1042 
1043  {
1044  FFV1Context bak = *fdst;
1045  memcpy(fdst, fsrc, sizeof(*fdst));
1046  memcpy(fdst->initial_states, bak.initial_states, sizeof(fdst->initial_states));
1047  memcpy(fdst->slice_context, bak.slice_context , sizeof(fdst->slice_context));
1048  fdst->picture = bak.picture;
1049  fdst->last_picture = bak.last_picture;
1050  for (i = 0; i<fdst->num_h_slices * fdst->num_v_slices; i++) {
1051  FFV1Context *fssrc = fsrc->slice_context[i];
1052  FFV1Context *fsdst = fdst->slice_context[i];
1053  copy_fields(fsdst, fssrc, fsrc);
1054  }
1055  av_assert0(!fdst->plane[0].state);
1056  av_assert0(!fdst->sample_buffer);
1057  }
1058 
1059  av_assert1(fdst->slice_count == fsrc->slice_count);
1060 
1061 
1062  ff_thread_release_buffer(dst, &fdst->picture);
1063  if (fsrc->picture.f->data[0]) {
1064  if ((ret = ff_thread_ref_frame(&fdst->picture, &fsrc->picture)) < 0)
1065  return ret;
1066  }
1067 
1068  fdst->fsrc = fsrc;
1069 
1070  return 0;
1071 }
1072 
1074  .name = "ffv1",
1075  .long_name = NULL_IF_CONFIG_SMALL("FFmpeg video codec #1"),
1076  .type = AVMEDIA_TYPE_VIDEO,
1077  .id = AV_CODEC_ID_FFV1,
1078  .priv_data_size = sizeof(FFV1Context),
1079  .init = decode_init,
1080  .close = ffv1_close,
1081  .decode = decode_frame,
1083  .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
1084  .capabilities = CODEC_CAP_DR1 /*| CODEC_CAP_DRAW_HORIZ_BAND*/ |
1086 };