FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
utvideodec.c
Go to the documentation of this file.
1 /*
2  * Ut Video decoder
3  * Copyright (c) 2011 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Ut Video decoder
25  */
26 
27 #include <inttypes.h>
28 #include <stdlib.h>
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 
32 #include "libavutil/intreadwrite.h"
33 #include "avcodec.h"
34 #include "bswapdsp.h"
35 #include "bytestream.h"
36 #include "get_bits.h"
37 #include "internal.h"
38 #include "thread.h"
39 #include "utvideo.h"
40 
41 static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
42 {
43  int i;
44  HuffEntry he[1024];
45  int last;
46  uint32_t codes[1024];
47  uint8_t bits[1024];
48  uint16_t syms[1024];
49  uint32_t code;
50 
51  *fsym = -1;
52  for (i = 0; i < 1024; i++) {
53  he[i].sym = i;
54  he[i].len = *src++;
55  }
56  qsort(he, 1024, sizeof(*he), ff_ut10_huff_cmp_len);
57 
58  if (!he[0].len) {
59  *fsym = he[0].sym;
60  return 0;
61  }
62 
63  last = 1023;
64  while (he[last].len == 255 && last)
65  last--;
66 
67  if (he[last].len > 32) {
68  return -1;
69  }
70 
71  code = 1;
72  for (i = last; i >= 0; i--) {
73  codes[i] = code >> (32 - he[i].len);
74  bits[i] = he[i].len;
75  syms[i] = he[i].sym;
76  code += 0x80000000u >> (he[i].len - 1);
77  }
78 #define VLC_BITS 11
79  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
80  bits, sizeof(*bits), sizeof(*bits),
81  codes, sizeof(*codes), sizeof(*codes),
82  syms, sizeof(*syms), sizeof(*syms), 0);
83 }
84 
85 static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
86 {
87  int i;
88  HuffEntry he[256];
89  int last;
90  uint32_t codes[256];
91  uint8_t bits[256];
92  uint8_t syms[256];
93  uint32_t code;
94 
95  *fsym = -1;
96  for (i = 0; i < 256; i++) {
97  he[i].sym = i;
98  he[i].len = *src++;
99  }
100  qsort(he, 256, sizeof(*he), ff_ut_huff_cmp_len);
101 
102  if (!he[0].len) {
103  *fsym = he[0].sym;
104  return 0;
105  }
106 
107  last = 255;
108  while (he[last].len == 255 && last)
109  last--;
110 
111  if (he[last].len > 32)
112  return -1;
113 
114  code = 1;
115  for (i = last; i >= 0; i--) {
116  codes[i] = code >> (32 - he[i].len);
117  bits[i] = he[i].len;
118  syms[i] = he[i].sym;
119  code += 0x80000000u >> (he[i].len - 1);
120  }
121 
122  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
123  bits, sizeof(*bits), sizeof(*bits),
124  codes, sizeof(*codes), sizeof(*codes),
125  syms, sizeof(*syms), sizeof(*syms), 0);
126 }
127 
128 static int decode_plane10(UtvideoContext *c, int plane_no,
129  uint16_t *dst, int step, ptrdiff_t stride,
130  int width, int height,
131  const uint8_t *src, const uint8_t *huff,
132  int use_pred)
133 {
134  int i, j, slice, pix, ret;
135  int sstart, send;
136  VLC vlc;
137  GetBitContext gb;
138  int prev, fsym;
139 
140  if ((ret = build_huff10(huff, &vlc, &fsym)) < 0) {
141  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
142  return ret;
143  }
144  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
145  send = 0;
146  for (slice = 0; slice < c->slices; slice++) {
147  uint16_t *dest;
148 
149  sstart = send;
150  send = (height * (slice + 1) / c->slices);
151  dest = dst + sstart * stride;
152 
153  prev = 0x200;
154  for (j = sstart; j < send; j++) {
155  for (i = 0; i < width * step; i += step) {
156  pix = fsym;
157  if (use_pred) {
158  prev += pix;
159  prev &= 0x3FF;
160  pix = prev;
161  }
162  dest[i] = pix;
163  }
164  dest += stride;
165  }
166  }
167  return 0;
168  }
169 
170  send = 0;
171  for (slice = 0; slice < c->slices; slice++) {
172  uint16_t *dest;
173  int slice_data_start, slice_data_end, slice_size;
174 
175  sstart = send;
176  send = (height * (slice + 1) / c->slices);
177  dest = dst + sstart * stride;
178 
179  // slice offset and size validation was done earlier
180  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
181  slice_data_end = AV_RL32(src + slice * 4);
182  slice_size = slice_data_end - slice_data_start;
183 
184  if (!slice_size) {
185  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
186  "yet a slice has a length of zero.\n");
187  goto fail;
188  }
189 
190  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
191  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
192  (uint32_t *)(src + slice_data_start + c->slices * 4),
193  (slice_data_end - slice_data_start + 3) >> 2);
194  init_get_bits(&gb, c->slice_bits, slice_size * 8);
195 
196  prev = 0x200;
197  for (j = sstart; j < send; j++) {
198  int ws = width * step;
199  for (i = 0; i < ws; i += step) {
200  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
201  if (pix < 0) {
202  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
203  goto fail;
204  }
205  if (use_pred) {
206  prev += pix;
207  prev &= 0x3FF;
208  pix = prev;
209  }
210  dest[i] = pix;
211  }
212  dest += stride;
213  if (get_bits_left(&gb) < 0) {
215  "Slice decoding ran out of bits\n");
216  goto fail;
217  }
218  }
219  if (get_bits_left(&gb) > 32)
221  "%d bits left after decoding slice\n", get_bits_left(&gb));
222  }
223 
224  ff_free_vlc(&vlc);
225 
226  return 0;
227 fail:
228  ff_free_vlc(&vlc);
229  return AVERROR_INVALIDDATA;
230 }
231 
232 static int decode_plane(UtvideoContext *c, int plane_no,
233  uint8_t *dst, int step, ptrdiff_t stride,
234  int width, int height,
235  const uint8_t *src, int use_pred)
236 {
237  int i, j, slice, pix;
238  int sstart, send;
239  VLC vlc;
240  GetBitContext gb;
241  int prev, fsym;
242  const int cmask = c->interlaced ? ~(1 + 2 * (!plane_no && c->avctx->pix_fmt == AV_PIX_FMT_YUV420P)) : ~(!plane_no && c->avctx->pix_fmt == AV_PIX_FMT_YUV420P);
243 
244  if (build_huff(src, &vlc, &fsym)) {
245  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
246  return AVERROR_INVALIDDATA;
247  }
248  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
249  send = 0;
250  for (slice = 0; slice < c->slices; slice++) {
251  uint8_t *dest;
252 
253  sstart = send;
254  send = (height * (slice + 1) / c->slices) & cmask;
255  dest = dst + sstart * stride;
256 
257  prev = 0x80;
258  for (j = sstart; j < send; j++) {
259  for (i = 0; i < width * step; i += step) {
260  pix = fsym;
261  if (use_pred) {
262  prev += pix;
263  pix = prev;
264  }
265  dest[i] = pix;
266  }
267  dest += stride;
268  }
269  }
270  return 0;
271  }
272 
273  src += 256;
274 
275  send = 0;
276  for (slice = 0; slice < c->slices; slice++) {
277  uint8_t *dest;
278  int slice_data_start, slice_data_end, slice_size;
279 
280  sstart = send;
281  send = (height * (slice + 1) / c->slices) & cmask;
282  dest = dst + sstart * stride;
283 
284  // slice offset and size validation was done earlier
285  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
286  slice_data_end = AV_RL32(src + slice * 4);
287  slice_size = slice_data_end - slice_data_start;
288 
289  if (!slice_size) {
290  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
291  "yet a slice has a length of zero.\n");
292  goto fail;
293  }
294 
295  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
296  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
297  (uint32_t *)(src + slice_data_start + c->slices * 4),
298  (slice_data_end - slice_data_start + 3) >> 2);
299  init_get_bits(&gb, c->slice_bits, slice_size * 8);
300 
301  prev = 0x80;
302  for (j = sstart; j < send; j++) {
303  int ws = width * step;
304  for (i = 0; i < ws; i += step) {
305  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
306  if (pix < 0) {
307  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
308  goto fail;
309  }
310  if (use_pred) {
311  prev += pix;
312  pix = prev;
313  }
314  dest[i] = pix;
315  }
316  if (get_bits_left(&gb) < 0) {
318  "Slice decoding ran out of bits\n");
319  goto fail;
320  }
321  dest += stride;
322  }
323  if (get_bits_left(&gb) > 32)
325  "%d bits left after decoding slice\n", get_bits_left(&gb));
326  }
327 
328  ff_free_vlc(&vlc);
329 
330  return 0;
331 fail:
332  ff_free_vlc(&vlc);
333  return AVERROR_INVALIDDATA;
334 }
335 
336 #undef A
337 #undef B
338 #undef C
339 
341  int width, int height, int slices, int rmode)
342 {
343  int i, j, slice;
344  int A, B, C;
345  uint8_t *bsrc;
346  int slice_start, slice_height;
347  const int cmask = ~rmode;
348 
349  for (slice = 0; slice < slices; slice++) {
350  slice_start = ((slice * height) / slices) & cmask;
351  slice_height = ((((slice + 1) * height) / slices) & cmask) -
352  slice_start;
353 
354  if (!slice_height)
355  continue;
356  bsrc = src + slice_start * stride;
357 
358  // first line - left neighbour prediction
359  bsrc[0] += 0x80;
360  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
361  bsrc += stride;
362  if (slice_height <= 1)
363  continue;
364  // second line - first element has top prediction, the rest uses median
365  C = bsrc[-stride];
366  bsrc[0] += C;
367  A = bsrc[0];
368  for (i = 1; i < width; i++) {
369  B = bsrc[i - stride];
370  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
371  C = B;
372  A = bsrc[i];
373  }
374  bsrc += stride;
375  // the rest of lines use continuous median prediction
376  for (j = 2; j < slice_height; j++) {
377  c->llviddsp.add_median_pred(bsrc, bsrc - stride,
378  bsrc, width, &A, &B);
379  bsrc += stride;
380  }
381  }
382 }
383 
384 /* UtVideo interlaced mode treats every two lines as a single one,
385  * so restoring function should take care of possible padding between
386  * two parts of the same "line".
387  */
389  int width, int height, int slices, int rmode)
390 {
391  int i, j, slice;
392  int A, B, C;
393  uint8_t *bsrc;
394  int slice_start, slice_height;
395  const int cmask = ~(rmode ? 3 : 1);
396  const ptrdiff_t stride2 = stride << 1;
397 
398  for (slice = 0; slice < slices; slice++) {
399  slice_start = ((slice * height) / slices) & cmask;
400  slice_height = ((((slice + 1) * height) / slices) & cmask) -
401  slice_start;
402  slice_height >>= 1;
403  if (!slice_height)
404  continue;
405 
406  bsrc = src + slice_start * stride;
407 
408  // first line - left neighbour prediction
409  bsrc[0] += 0x80;
410  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
411  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
412  bsrc += stride2;
413  if (slice_height <= 1)
414  continue;
415  // second line - first element has top prediction, the rest uses median
416  C = bsrc[-stride2];
417  bsrc[0] += C;
418  A = bsrc[0];
419  for (i = 1; i < width; i++) {
420  B = bsrc[i - stride2];
421  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
422  C = B;
423  A = bsrc[i];
424  }
425  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
426  bsrc + stride, width, &A, &B);
427  bsrc += stride2;
428  // the rest of lines use continuous median prediction
429  for (j = 2; j < slice_height; j++) {
430  c->llviddsp.add_median_pred(bsrc, bsrc - stride2,
431  bsrc, width, &A, &B);
432  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
433  bsrc + stride, width, &A, &B);
434  bsrc += stride2;
435  }
436  }
437 }
438 
440  int width, int height, int slices, int rmode)
441 {
442  int i, j, slice;
443  int A, B, C;
444  uint8_t *bsrc;
445  int slice_start, slice_height;
446  const int cmask = ~rmode;
447 
448  for (slice = 0; slice < slices; slice++) {
449  slice_start = ((slice * height) / slices) & cmask;
450  slice_height = ((((slice + 1) * height) / slices) & cmask) -
451  slice_start;
452 
453  if (!slice_height)
454  continue;
455  bsrc = src + slice_start * stride;
456 
457  // first line - left neighbour prediction
458  bsrc[0] += 0x80;
459  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
460  bsrc += stride;
461  if (slice_height <= 1)
462  continue;
463  for (j = 1; j < slice_height; j++) {
464  // second line - first element has top prediction, the rest uses gradient
465  bsrc[0] = (bsrc[0] + bsrc[-stride]) & 0xFF;
466  for (i = 1; i < width; i++) {
467  A = bsrc[i - stride];
468  B = bsrc[i - (stride + 1)];
469  C = bsrc[i - 1];
470  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
471  }
472  bsrc += stride;
473  }
474  }
475 }
476 
478  int width, int height, int slices, int rmode)
479 {
480  int i, j, slice;
481  int A, B, C;
482  uint8_t *bsrc;
483  int slice_start, slice_height;
484  const int cmask = ~(rmode ? 3 : 1);
485  const ptrdiff_t stride2 = stride << 1;
486 
487  for (slice = 0; slice < slices; slice++) {
488  slice_start = ((slice * height) / slices) & cmask;
489  slice_height = ((((slice + 1) * height) / slices) & cmask) -
490  slice_start;
491  slice_height >>= 1;
492  if (!slice_height)
493  continue;
494 
495  bsrc = src + slice_start * stride;
496 
497  // first line - left neighbour prediction
498  bsrc[0] += 0x80;
499  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
500  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
501  bsrc += stride2;
502  if (slice_height <= 1)
503  continue;
504  for (j = 1; j < slice_height; j++) {
505  // second line - first element has top prediction, the rest uses gradient
506  bsrc[0] = (bsrc[0] + bsrc[-stride2]) & 0xFF;
507  for (i = 1; i < width; i++) {
508  A = bsrc[i - stride2];
509  B = bsrc[i - (stride2 + 1)];
510  C = bsrc[i - 1];
511  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
512  }
513  A = bsrc[-stride];
514  B = bsrc[-(1 + stride + stride - width)];
515  C = bsrc[width - 1];
516  bsrc[stride] = (A - B + C + bsrc[stride]) & 0xFF;
517  for (i = 1; i < width; i++) {
518  A = bsrc[i - stride];
519  B = bsrc[i - (1 + stride)];
520  C = bsrc[i - 1 + stride];
521  bsrc[i + stride] = (A - B + C + bsrc[i + stride]) & 0xFF;
522  }
523  bsrc += stride2;
524  }
525  }
526 }
527 
528 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
529  AVPacket *avpkt)
530 {
531  const uint8_t *buf = avpkt->data;
532  int buf_size = avpkt->size;
533  UtvideoContext *c = avctx->priv_data;
534  int i, j;
535  const uint8_t *plane_start[5];
536  int plane_size, max_slice_size = 0, slice_start, slice_end, slice_size;
537  int ret;
538  GetByteContext gb;
539  ThreadFrame frame = { .f = data };
540 
541  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
542  return ret;
543 
544  /* parse plane structure to get frame flags and validate slice offsets */
545  bytestream2_init(&gb, buf, buf_size);
546  if (c->pro) {
548  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
549  return AVERROR_INVALIDDATA;
550  }
551  c->frame_info = bytestream2_get_le32u(&gb);
552  c->slices = ((c->frame_info >> 16) & 0xff) + 1;
553  for (i = 0; i < c->planes; i++) {
554  plane_start[i] = gb.buffer;
555  if (bytestream2_get_bytes_left(&gb) < 1024 + 4 * c->slices) {
556  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
557  return AVERROR_INVALIDDATA;
558  }
559  slice_start = 0;
560  slice_end = 0;
561  for (j = 0; j < c->slices; j++) {
562  slice_end = bytestream2_get_le32u(&gb);
563  if (slice_end < 0 || slice_end < slice_start ||
564  bytestream2_get_bytes_left(&gb) < slice_end) {
565  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
566  return AVERROR_INVALIDDATA;
567  }
568  slice_size = slice_end - slice_start;
569  slice_start = slice_end;
570  max_slice_size = FFMAX(max_slice_size, slice_size);
571  }
572  plane_size = slice_end;
573  bytestream2_skipu(&gb, plane_size);
574  bytestream2_skipu(&gb, 1024);
575  }
576  plane_start[c->planes] = gb.buffer;
577  } else {
578  for (i = 0; i < c->planes; i++) {
579  plane_start[i] = gb.buffer;
580  if (bytestream2_get_bytes_left(&gb) < 256 + 4 * c->slices) {
581  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
582  return AVERROR_INVALIDDATA;
583  }
584  bytestream2_skipu(&gb, 256);
585  slice_start = 0;
586  slice_end = 0;
587  for (j = 0; j < c->slices; j++) {
588  slice_end = bytestream2_get_le32u(&gb);
589  if (slice_end < 0 || slice_end < slice_start ||
590  bytestream2_get_bytes_left(&gb) < slice_end) {
591  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
592  return AVERROR_INVALIDDATA;
593  }
594  slice_size = slice_end - slice_start;
595  slice_start = slice_end;
596  max_slice_size = FFMAX(max_slice_size, slice_size);
597  }
598  plane_size = slice_end;
599  bytestream2_skipu(&gb, plane_size);
600  }
601  plane_start[c->planes] = gb.buffer;
603  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
604  return AVERROR_INVALIDDATA;
605  }
606  c->frame_info = bytestream2_get_le32u(&gb);
607  }
608  av_log(avctx, AV_LOG_DEBUG, "frame information flags %"PRIX32"\n",
609  c->frame_info);
610 
611  c->frame_pred = (c->frame_info >> 8) & 3;
612 
613  max_slice_size += 4*avctx->width;
614 
616  max_slice_size + AV_INPUT_BUFFER_PADDING_SIZE);
617 
618  if (!c->slice_bits) {
619  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
620  return AVERROR(ENOMEM);
621  }
622 
623  switch (c->avctx->pix_fmt) {
624  case AV_PIX_FMT_GBRP:
625  case AV_PIX_FMT_GBRAP:
626  for (i = 0; i < c->planes; i++) {
627  ret = decode_plane(c, i, frame.f->data[i], 1,
628  frame.f->linesize[i], avctx->width,
629  avctx->height, plane_start[i],
630  c->frame_pred == PRED_LEFT);
631  if (ret)
632  return ret;
633  if (c->frame_pred == PRED_MEDIAN) {
634  if (!c->interlaced) {
635  restore_median_planar(c, frame.f->data[i],
636  frame.f->linesize[i], avctx->width,
637  avctx->height, c->slices, 0);
638  } else {
639  restore_median_planar_il(c, frame.f->data[i],
640  frame.f->linesize[i],
641  avctx->width, avctx->height, c->slices,
642  0);
643  }
644  } else if (c->frame_pred == PRED_GRADIENT) {
645  if (!c->interlaced) {
646  restore_gradient_planar(c, frame.f->data[i],
647  frame.f->linesize[i], avctx->width,
648  avctx->height, c->slices, 0);
649  } else {
650  restore_gradient_planar_il(c, frame.f->data[i],
651  frame.f->linesize[i],
652  avctx->width, avctx->height, c->slices,
653  0);
654  }
655  }
656  }
657  c->utdsp.restore_rgb_planes(frame.f->data[2], frame.f->data[0], frame.f->data[1],
658  frame.f->linesize[2], frame.f->linesize[0], frame.f->linesize[1],
659  avctx->width, avctx->height);
660  break;
661  case AV_PIX_FMT_GBRAP10:
662  case AV_PIX_FMT_GBRP10:
663  for (i = 0; i < c->planes; i++) {
664  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], 1,
665  frame.f->linesize[i] / 2, avctx->width,
666  avctx->height, plane_start[i],
667  plane_start[i + 1] - 1024,
668  c->frame_pred == PRED_LEFT);
669  if (ret)
670  return ret;
671  }
672  c->utdsp.restore_rgb_planes10((uint16_t *)frame.f->data[2], (uint16_t *)frame.f->data[0], (uint16_t *)frame.f->data[1],
673  frame.f->linesize[2] / 2, frame.f->linesize[0] / 2, frame.f->linesize[1] / 2,
674  avctx->width, avctx->height);
675  break;
676  case AV_PIX_FMT_YUV420P:
677  for (i = 0; i < 3; i++) {
678  ret = decode_plane(c, i, frame.f->data[i], 1, frame.f->linesize[i],
679  avctx->width >> !!i, avctx->height >> !!i,
680  plane_start[i], c->frame_pred == PRED_LEFT);
681  if (ret)
682  return ret;
683  if (c->frame_pred == PRED_MEDIAN) {
684  if (!c->interlaced) {
685  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
686  avctx->width >> !!i, avctx->height >> !!i,
687  c->slices, !i);
688  } else {
689  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
690  avctx->width >> !!i,
691  avctx->height >> !!i,
692  c->slices, !i);
693  }
694  } else if (c->frame_pred == PRED_GRADIENT) {
695  if (!c->interlaced) {
696  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
697  avctx->width >> !!i, avctx->height >> !!i,
698  c->slices, !i);
699  } else {
700  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
701  avctx->width >> !!i,
702  avctx->height >> !!i,
703  c->slices, !i);
704  }
705  }
706  }
707  break;
708  case AV_PIX_FMT_YUV422P:
709  for (i = 0; i < 3; i++) {
710  ret = decode_plane(c, i, frame.f->data[i], 1, frame.f->linesize[i],
711  avctx->width >> !!i, avctx->height,
712  plane_start[i], c->frame_pred == PRED_LEFT);
713  if (ret)
714  return ret;
715  if (c->frame_pred == PRED_MEDIAN) {
716  if (!c->interlaced) {
717  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
718  avctx->width >> !!i, avctx->height,
719  c->slices, 0);
720  } else {
721  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
722  avctx->width >> !!i, avctx->height,
723  c->slices, 0);
724  }
725  } else if (c->frame_pred == PRED_GRADIENT) {
726  if (!c->interlaced) {
727  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
728  avctx->width >> !!i, avctx->height,
729  c->slices, 0);
730  } else {
731  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
732  avctx->width >> !!i, avctx->height,
733  c->slices, 0);
734  }
735  }
736  }
737  break;
738  case AV_PIX_FMT_YUV444P:
739  for (i = 0; i < 3; i++) {
740  ret = decode_plane(c, i, frame.f->data[i], 1, frame.f->linesize[i],
741  avctx->width, avctx->height,
742  plane_start[i], c->frame_pred == PRED_LEFT);
743  if (ret)
744  return ret;
745  if (c->frame_pred == PRED_MEDIAN) {
746  if (!c->interlaced) {
747  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
748  avctx->width, avctx->height,
749  c->slices, 0);
750  } else {
751  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
752  avctx->width, avctx->height,
753  c->slices, 0);
754  }
755  } else if (c->frame_pred == PRED_GRADIENT) {
756  if (!c->interlaced) {
757  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
758  avctx->width, avctx->height,
759  c->slices, 0);
760  } else {
761  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
762  avctx->width, avctx->height,
763  c->slices, 0);
764  }
765  }
766  }
767  break;
769  for (i = 0; i < 3; i++) {
770  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], 1, frame.f->linesize[i] / 2,
771  avctx->width >> !!i, avctx->height,
772  plane_start[i], plane_start[i + 1] - 1024, c->frame_pred == PRED_LEFT);
773  if (ret)
774  return ret;
775  }
776  break;
777  }
778 
779  frame.f->key_frame = 1;
780  frame.f->pict_type = AV_PICTURE_TYPE_I;
781  frame.f->interlaced_frame = !!c->interlaced;
782 
783  *got_frame = 1;
784 
785  /* always report that the buffer was completely consumed */
786  return buf_size;
787 }
788 
790 {
791  UtvideoContext * const c = avctx->priv_data;
792 
793  c->avctx = avctx;
794 
796  ff_bswapdsp_init(&c->bdsp);
798 
799  if (avctx->extradata_size >= 16) {
800  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
801  avctx->extradata[3], avctx->extradata[2],
802  avctx->extradata[1], avctx->extradata[0]);
803  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
804  AV_RB32(avctx->extradata + 4));
805  c->frame_info_size = AV_RL32(avctx->extradata + 8);
806  c->flags = AV_RL32(avctx->extradata + 12);
807 
808  if (c->frame_info_size != 4)
809  avpriv_request_sample(avctx, "Frame info not 4 bytes");
810  av_log(avctx, AV_LOG_DEBUG, "Encoding parameters %08"PRIX32"\n", c->flags);
811  c->slices = (c->flags >> 24) + 1;
812  c->compression = c->flags & 1;
813  c->interlaced = c->flags & 0x800;
814  } else if (avctx->extradata_size == 8) {
815  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
816  avctx->extradata[3], avctx->extradata[2],
817  avctx->extradata[1], avctx->extradata[0]);
818  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
819  AV_RB32(avctx->extradata + 4));
820  c->interlaced = 0;
821  c->pro = 1;
822  c->frame_info_size = 4;
823  } else {
824  av_log(avctx, AV_LOG_ERROR,
825  "Insufficient extradata size %d, should be at least 16\n",
826  avctx->extradata_size);
827  return AVERROR_INVALIDDATA;
828  }
829 
830  c->slice_bits_size = 0;
831 
832  switch (avctx->codec_tag) {
833  case MKTAG('U', 'L', 'R', 'G'):
834  c->planes = 3;
835  avctx->pix_fmt = AV_PIX_FMT_GBRP;
836  break;
837  case MKTAG('U', 'L', 'R', 'A'):
838  c->planes = 4;
839  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
840  break;
841  case MKTAG('U', 'L', 'Y', '0'):
842  c->planes = 3;
843  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
844  avctx->colorspace = AVCOL_SPC_BT470BG;
845  break;
846  case MKTAG('U', 'L', 'Y', '2'):
847  c->planes = 3;
848  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
849  avctx->colorspace = AVCOL_SPC_BT470BG;
850  break;
851  case MKTAG('U', 'L', 'Y', '4'):
852  c->planes = 3;
853  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
854  avctx->colorspace = AVCOL_SPC_BT470BG;
855  break;
856  case MKTAG('U', 'Q', 'Y', '2'):
857  c->planes = 3;
858  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
859  break;
860  case MKTAG('U', 'Q', 'R', 'G'):
861  c->planes = 3;
862  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
863  break;
864  case MKTAG('U', 'Q', 'R', 'A'):
865  c->planes = 4;
866  avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
867  break;
868  case MKTAG('U', 'L', 'H', '0'):
869  c->planes = 3;
870  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
871  avctx->colorspace = AVCOL_SPC_BT709;
872  break;
873  case MKTAG('U', 'L', 'H', '2'):
874  c->planes = 3;
875  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
876  avctx->colorspace = AVCOL_SPC_BT709;
877  break;
878  case MKTAG('U', 'L', 'H', '4'):
879  c->planes = 3;
880  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
881  avctx->colorspace = AVCOL_SPC_BT709;
882  break;
883  default:
884  av_log(avctx, AV_LOG_ERROR, "Unknown Ut Video FOURCC provided (%08X)\n",
885  avctx->codec_tag);
886  return AVERROR_INVALIDDATA;
887  }
888 
889  return 0;
890 }
891 
893 {
894  UtvideoContext * const c = avctx->priv_data;
895 
896  av_freep(&c->slice_bits);
897 
898  return 0;
899 }
900 
902  .name = "utvideo",
903  .long_name = NULL_IF_CONFIG_SMALL("Ut Video"),
904  .type = AVMEDIA_TYPE_VIDEO,
905  .id = AV_CODEC_ID_UTVIDEO,
906  .priv_data_size = sizeof(UtvideoContext),
907  .init = decode_init,
908  .close = decode_end,
909  .decode = decode_frame,
911  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
912 };
void(* bswap_buf)(uint32_t *dst, const uint32_t *src, int w)
Definition: bswapdsp.h:25
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:486
static void restore_median_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:388
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
int ff_ut10_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:43
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
#define C
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:399
uint32_t flags
Definition: utvideo.h:75
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVFrame * f
Definition: thread.h:36
Definition: vf_geq.c:47
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
void(* restore_rgb_planes)(uint8_t *src_r, uint8_t *src_g, uint8_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:28
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:490
int slice_bits_size
Definition: utvideo.h:85
int ff_init_vlc_sparse(VLC *vlc_arg, int nb_bits, int nb_codes, const void *bits, int bits_wrap, int bits_size, const void *codes, int codes_wrap, int codes_size, const void *symbols, int symbols_wrap, int symbols_size, int flags)
Definition: bitstream.c:268
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:180
int size
Definition: avcodec.h:1680
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:395
static av_cold int decode_end(AVCodecContext *avctx)
Definition: utvideodec.c:892
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1989
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
#define src
Definition: vp8dsp.c:254
AVCodec.
Definition: avcodec.h:3739
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
static int decode_plane(UtvideoContext *c, int plane_no, uint8_t *dst, int step, ptrdiff_t stride, int width, int height, const uint8_t *src, int use_pred)
Definition: utvideodec.c:232
int interlaced
Definition: utvideo.h:79
void void avpriv_request_sample(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t bits
Definition: crc.c:296
uint8_t
#define av_cold
Definition: attributes.h:82
static void restore_gradient_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:439
static void restore_gradient_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:477
Multithreading support functions.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1876
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:87
uint32_t frame_info
Definition: utvideo.h:75
static AVFrame * frame
#define height
uint8_t * data
Definition: avcodec.h:1679
const uint8_t * buffer
Definition: bytestream.h:34
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:170
bitstream reader API header.
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
av_cold void ff_utvideodsp_init(UTVideoDSPContext *c)
Definition: utvideodsp.c:75
#define A(x)
Definition: vp56_arith.h:28
#define av_log(a,...)
static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:85
BswapDSPContext bdsp
Definition: utvideo.h:71
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:587
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static av_cold int decode_init(AVCodecContext *avctx)
Definition: utvideodec.c:789
#define AVERROR(e)
Definition: error.h:43
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:179
AVCodecContext * avctx
Definition: utvideo.h:69
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
uint16_t width
Definition: gdv.c:47
const char * name
Name of the codec implementation.
Definition: avcodec.h:3746
uint32_t frame_info_size
Definition: utvideo.h:75
#define FFMAX(a, b)
Definition: common.h:94
#define fail()
Definition: checkasm.h:109
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1065
Definition: vlc.h:26
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:469
static void restore_median_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:340
int compression
Definition: utvideo.h:78
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:284
int width
picture width / height.
Definition: avcodec.h:1948
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:554
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: utvideodec.c:528
Common Ut Video header.
int frame_pred
Definition: utvideo.h:80
uint8_t len
Definition: magicyuv.c:49
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:1761
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:1793
void * buf
Definition: avisynth_c.h:690
#define VLC_BITS
int extradata_size
Definition: avcodec.h:1877
void ff_llviddsp_init(LLVidDSPContext *c)
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2491
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:425
#define mid_pred
Definition: mathops.h:97
static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:41
#define u(width,...)
void(* restore_rgb_planes10)(uint16_t *src_r, uint16_t *src_g, uint16_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:31
uint8_t * slice_bits
Definition: utvideo.h:84
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:380
int ff_ut_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:37
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
LLVidDSPContext llviddsp
Definition: utvideo.h:72
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:105
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal api header.
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:229
static double c[64]
uint16_t sym
Definition: magicyuv.c:48
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
Definition: avcodec.h:777
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2052
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
void * priv_data
Definition: avcodec.h:1803
int(* add_left_pred)(uint8_t *dst, const uint8_t *src, ptrdiff_t w, int left)
int len
VLC_TYPE(* table)[2]
code, bits
Definition: vlc.h:28
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:279
UTVideoDSPContext utdsp
Definition: utvideo.h:70
#define av_freep(p)
#define stride
#define MKTAG(a, b, c, d)
Definition: common.h:342
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
AVCodec ff_utvideo_decoder
Definition: utvideodec.c:901
This structure stores compressed data.
Definition: avcodec.h:1656
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:354
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:1002
void(* add_median_pred)(uint8_t *dst, const uint8_t *top, const uint8_t *diff, ptrdiff_t w, int *left, int *left_top)
static int decode_plane10(UtvideoContext *c, int plane_no, uint16_t *dst, int step, ptrdiff_t stride, int width, int height, const uint8_t *src, const uint8_t *huff, int use_pred)
Definition: utvideodec.c:128