FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
interplayvideo.c
Go to the documentation of this file.
1 /*
2  * Interplay MVE Video Decoder
3  * Copyright (c) 2003 The FFmpeg Project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the Interplay MVE format, visit:
26  * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27  * This code is written in such a way that the identifiers match up
28  * with the encoding descriptions in the document.
29  *
30  * This decoder presently only supports a PAL8 output colorspace.
31  *
32  * An Interplay video frame consists of 2 parts: The decoding map and
33  * the video data. A demuxer must load these 2 parts together in a single
34  * buffer before sending it through the stream to this decoder.
35  */
36 
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
40 
41 #include "avcodec.h"
42 #include "bytestream.h"
43 #include "hpeldsp.h"
44 #define BITSTREAM_READER_LE
45 #include "get_bits.h"
46 #include "internal.h"
47 
48 #define PALETTE_COUNT 256
49 
50 typedef struct IpvideoContext {
51 
56  const unsigned char *decoding_map;
58 
59  int is_16bpp;
61  unsigned char *pixel_ptr;
62  int line_inc;
63  int stride;
65 
66  uint32_t pal[256];
68 
69 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
70 {
71  int current_offset = s->pixel_ptr - dst->data[0];
72  int motion_offset = current_offset + delta_y * dst->linesize[0]
73  + delta_x * (1 + s->is_16bpp);
74  if (motion_offset < 0) {
75  av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
76  return AVERROR_INVALIDDATA;
77  } else if (motion_offset > s->upper_motion_limit_offset) {
78  av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
79  motion_offset, s->upper_motion_limit_offset);
80  return AVERROR_INVALIDDATA;
81  }
82  if (!src->data[0]) {
83  av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
84  return AVERROR(EINVAL);
85  }
86  s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
87  dst->linesize[0], 8);
88  return 0;
89 }
90 
92 {
93  return copy_from(s, s->last_frame, frame, 0, 0);
94 }
95 
97 {
98  return copy_from(s, s->second_last_frame, frame, 0, 0);
99 }
100 
102 {
103  unsigned char B;
104  int x, y;
105 
106  /* copy block from 2 frames ago using a motion vector; need 1 more byte */
107  if (!s->is_16bpp) {
108  B = bytestream2_get_byte(&s->stream_ptr);
109  } else {
110  B = bytestream2_get_byte(&s->mv_ptr);
111  }
112 
113  if (B < 56) {
114  x = 8 + (B % 7);
115  y = B / 7;
116  } else {
117  x = -14 + ((B - 56) % 29);
118  y = 8 + ((B - 56) / 29);
119  }
120 
121  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
122  return copy_from(s, s->second_last_frame, frame, x, y);
123 }
124 
126 {
127  unsigned char B;
128  int x, y;
129 
130  /* copy 8x8 block from current frame from an up/left block */
131 
132  /* need 1 more byte for motion */
133  if (!s->is_16bpp) {
134  B = bytestream2_get_byte(&s->stream_ptr);
135  } else {
136  B = bytestream2_get_byte(&s->mv_ptr);
137  }
138 
139  if (B < 56) {
140  x = -(8 + (B % 7));
141  y = -(B / 7);
142  } else {
143  x = -(-14 + ((B - 56) % 29));
144  y = -( 8 + ((B - 56) / 29));
145  }
146 
147  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
148  return copy_from(s, frame, frame, x, y);
149 }
150 
152 {
153  int x, y;
154  unsigned char B, BL, BH;
155 
156  /* copy a block from the previous frame; need 1 more byte */
157  if (!s->is_16bpp) {
158  B = bytestream2_get_byte(&s->stream_ptr);
159  } else {
160  B = bytestream2_get_byte(&s->mv_ptr);
161  }
162 
163  BL = B & 0x0F;
164  BH = (B >> 4) & 0x0F;
165  x = -8 + BL;
166  y = -8 + BH;
167 
168  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
169  return copy_from(s, s->last_frame, frame, x, y);
170 }
171 
173 {
174  signed char x, y;
175 
176  /* copy a block from the previous frame using an expanded range;
177  * need 2 more bytes */
178  x = bytestream2_get_byte(&s->stream_ptr);
179  y = bytestream2_get_byte(&s->stream_ptr);
180 
181  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
182  return copy_from(s, s->last_frame, frame, x, y);
183 }
184 
186 {
187  /* mystery opcode? skip multiple blocks? */
188  av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
189 
190  /* report success */
191  return 0;
192 }
193 
195 {
196  int x, y;
197  unsigned char P[2];
198  unsigned int flags;
199 
200  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
201  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
202  return AVERROR_INVALIDDATA;
203  }
204 
205  /* 2-color encoding */
206  P[0] = bytestream2_get_byte(&s->stream_ptr);
207  P[1] = bytestream2_get_byte(&s->stream_ptr);
208 
209  if (P[0] <= P[1]) {
210 
211  /* need 8 more bytes from the stream */
212  for (y = 0; y < 8; y++) {
213  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
214  for (; flags != 1; flags >>= 1)
215  *s->pixel_ptr++ = P[flags & 1];
216  s->pixel_ptr += s->line_inc;
217  }
218 
219  } else {
220 
221  /* need 2 more bytes from the stream */
222  flags = bytestream2_get_le16(&s->stream_ptr);
223  for (y = 0; y < 8; y += 2) {
224  for (x = 0; x < 8; x += 2, flags >>= 1) {
225  s->pixel_ptr[x ] =
226  s->pixel_ptr[x + 1 ] =
227  s->pixel_ptr[x + s->stride] =
228  s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
229  }
230  s->pixel_ptr += s->stride * 2;
231  }
232  }
233 
234  /* report success */
235  return 0;
236 }
237 
239 {
240  int x, y;
241  unsigned char P[4];
242  unsigned int flags = 0;
243 
244  if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
245  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
246  return AVERROR_INVALIDDATA;
247  }
248 
249  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
250  * either top and bottom or left and right halves */
251  P[0] = bytestream2_get_byte(&s->stream_ptr);
252  P[1] = bytestream2_get_byte(&s->stream_ptr);
253 
254  if (P[0] <= P[1]) {
255  for (y = 0; y < 16; y++) {
256  // new values for each 4x4 block
257  if (!(y & 3)) {
258  if (y) {
259  P[0] = bytestream2_get_byte(&s->stream_ptr);
260  P[1] = bytestream2_get_byte(&s->stream_ptr);
261  }
262  flags = bytestream2_get_le16(&s->stream_ptr);
263  }
264 
265  for (x = 0; x < 4; x++, flags >>= 1)
266  *s->pixel_ptr++ = P[flags & 1];
267  s->pixel_ptr += s->stride - 4;
268  // switch to right half
269  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
270  }
271 
272  } else {
273  flags = bytestream2_get_le32(&s->stream_ptr);
274  P[2] = bytestream2_get_byte(&s->stream_ptr);
275  P[3] = bytestream2_get_byte(&s->stream_ptr);
276 
277  if (P[2] <= P[3]) {
278 
279  /* vertical split; left & right halves are 2-color encoded */
280 
281  for (y = 0; y < 16; y++) {
282  for (x = 0; x < 4; x++, flags >>= 1)
283  *s->pixel_ptr++ = P[flags & 1];
284  s->pixel_ptr += s->stride - 4;
285  // switch to right half
286  if (y == 7) {
287  s->pixel_ptr -= 8 * s->stride - 4;
288  P[0] = P[2];
289  P[1] = P[3];
290  flags = bytestream2_get_le32(&s->stream_ptr);
291  }
292  }
293 
294  } else {
295 
296  /* horizontal split; top & bottom halves are 2-color encoded */
297 
298  for (y = 0; y < 8; y++) {
299  if (y == 4) {
300  P[0] = P[2];
301  P[1] = P[3];
302  flags = bytestream2_get_le32(&s->stream_ptr);
303  }
304 
305  for (x = 0; x < 8; x++, flags >>= 1)
306  *s->pixel_ptr++ = P[flags & 1];
307  s->pixel_ptr += s->line_inc;
308  }
309  }
310  }
311 
312  /* report success */
313  return 0;
314 }
315 
317 {
318  int x, y;
319  unsigned char P[4];
320 
321  if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
322  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
323  return AVERROR_INVALIDDATA;
324  }
325 
326  /* 4-color encoding */
328 
329  if (P[0] <= P[1]) {
330  if (P[2] <= P[3]) {
331 
332  /* 1 of 4 colors for each pixel, need 16 more bytes */
333  for (y = 0; y < 8; y++) {
334  /* get the next set of 8 2-bit flags */
335  int flags = bytestream2_get_le16(&s->stream_ptr);
336  for (x = 0; x < 8; x++, flags >>= 2)
337  *s->pixel_ptr++ = P[flags & 0x03];
338  s->pixel_ptr += s->line_inc;
339  }
340 
341  } else {
342  uint32_t flags;
343 
344  /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
345  flags = bytestream2_get_le32(&s->stream_ptr);
346 
347  for (y = 0; y < 8; y += 2) {
348  for (x = 0; x < 8; x += 2, flags >>= 2) {
349  s->pixel_ptr[x ] =
350  s->pixel_ptr[x + 1 ] =
351  s->pixel_ptr[x + s->stride] =
352  s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
353  }
354  s->pixel_ptr += s->stride * 2;
355  }
356 
357  }
358  } else {
359  uint64_t flags;
360 
361  /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
362  flags = bytestream2_get_le64(&s->stream_ptr);
363  if (P[2] <= P[3]) {
364  for (y = 0; y < 8; y++) {
365  for (x = 0; x < 8; x += 2, flags >>= 2) {
366  s->pixel_ptr[x ] =
367  s->pixel_ptr[x + 1] = P[flags & 0x03];
368  }
369  s->pixel_ptr += s->stride;
370  }
371  } else {
372  for (y = 0; y < 8; y += 2) {
373  for (x = 0; x < 8; x++, flags >>= 2) {
374  s->pixel_ptr[x ] =
375  s->pixel_ptr[x + s->stride] = P[flags & 0x03];
376  }
377  s->pixel_ptr += s->stride * 2;
378  }
379  }
380  }
381 
382  /* report success */
383  return 0;
384 }
385 
387 {
388  int x, y;
389  unsigned char P[8];
390  int flags = 0;
391 
392  if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
393  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
394  return AVERROR_INVALIDDATA;
395  }
396 
398 
399  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
400  * either top and bottom or left and right halves */
401  if (P[0] <= P[1]) {
402 
403  /* 4-color encoding for each quadrant; need 32 bytes */
404  for (y = 0; y < 16; y++) {
405  // new values for each 4x4 block
406  if (!(y & 3)) {
407  if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
408  flags = bytestream2_get_le32(&s->stream_ptr);
409  }
410 
411  for (x = 0; x < 4; x++, flags >>= 2)
412  *s->pixel_ptr++ = P[flags & 0x03];
413 
414  s->pixel_ptr += s->stride - 4;
415  // switch to right half
416  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
417  }
418 
419  } else {
420  // vertical split?
421  int vert;
422  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
423 
424  bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
425  vert = P[4] <= P[5];
426 
427  /* 4-color encoding for either left and right or top and bottom
428  * halves */
429 
430  for (y = 0; y < 16; y++) {
431  for (x = 0; x < 4; x++, flags >>= 2)
432  *s->pixel_ptr++ = P[flags & 0x03];
433 
434  if (vert) {
435  s->pixel_ptr += s->stride - 4;
436  // switch to right half
437  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
438  } else if (y & 1) s->pixel_ptr += s->line_inc;
439 
440  // load values for second half
441  if (y == 7) {
442  memcpy(P, P + 4, 4);
443  flags = bytestream2_get_le64(&s->stream_ptr);
444  }
445  }
446  }
447 
448  /* report success */
449  return 0;
450 }
451 
453 {
454  int y;
455 
456  /* 64-color encoding (each pixel in block is a different color) */
457  for (y = 0; y < 8; y++) {
459  s->pixel_ptr += s->stride;
460  }
461 
462  /* report success */
463  return 0;
464 }
465 
467 {
468  int x, y;
469 
470  /* 16-color block encoding: each 2x2 block is a different color */
471  for (y = 0; y < 8; y += 2) {
472  for (x = 0; x < 8; x += 2) {
473  s->pixel_ptr[x ] =
474  s->pixel_ptr[x + 1 ] =
475  s->pixel_ptr[x + s->stride] =
476  s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
477  }
478  s->pixel_ptr += s->stride * 2;
479  }
480 
481  /* report success */
482  return 0;
483 }
484 
486 {
487  int y;
488  unsigned char P[2];
489 
490  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
491  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
492  return AVERROR_INVALIDDATA;
493  }
494 
495  /* 4-color block encoding: each 4x4 block is a different color */
496  for (y = 0; y < 8; y++) {
497  if (!(y & 3)) {
498  P[0] = bytestream2_get_byte(&s->stream_ptr);
499  P[1] = bytestream2_get_byte(&s->stream_ptr);
500  }
501  memset(s->pixel_ptr, P[0], 4);
502  memset(s->pixel_ptr + 4, P[1], 4);
503  s->pixel_ptr += s->stride;
504  }
505 
506  /* report success */
507  return 0;
508 }
509 
511 {
512  int y;
513  unsigned char pix;
514 
515  /* 1-color encoding: the whole block is 1 solid color */
516  pix = bytestream2_get_byte(&s->stream_ptr);
517 
518  for (y = 0; y < 8; y++) {
519  memset(s->pixel_ptr, pix, 8);
520  s->pixel_ptr += s->stride;
521  }
522 
523  /* report success */
524  return 0;
525 }
526 
528 {
529  int x, y;
530  unsigned char sample[2];
531 
532  /* dithered encoding */
533  sample[0] = bytestream2_get_byte(&s->stream_ptr);
534  sample[1] = bytestream2_get_byte(&s->stream_ptr);
535 
536  for (y = 0; y < 8; y++) {
537  for (x = 0; x < 8; x += 2) {
538  *s->pixel_ptr++ = sample[ y & 1 ];
539  *s->pixel_ptr++ = sample[!(y & 1)];
540  }
541  s->pixel_ptr += s->line_inc;
542  }
543 
544  /* report success */
545  return 0;
546 }
547 
549 {
550  signed char x, y;
551 
552  /* copy a block from the second last frame using an expanded range */
553  x = bytestream2_get_byte(&s->stream_ptr);
554  y = bytestream2_get_byte(&s->stream_ptr);
555 
556  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
557  return copy_from(s, s->second_last_frame, frame, x, y);
558 }
559 
561 {
562  int x, y;
563  uint16_t P[2];
564  unsigned int flags;
565  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
566 
567  /* 2-color encoding */
568  P[0] = bytestream2_get_le16(&s->stream_ptr);
569  P[1] = bytestream2_get_le16(&s->stream_ptr);
570 
571  if (!(P[0] & 0x8000)) {
572 
573  for (y = 0; y < 8; y++) {
574  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
575  for (; flags != 1; flags >>= 1)
576  *pixel_ptr++ = P[flags & 1];
577  pixel_ptr += s->line_inc;
578  }
579 
580  } else {
581 
582  flags = bytestream2_get_le16(&s->stream_ptr);
583  for (y = 0; y < 8; y += 2) {
584  for (x = 0; x < 8; x += 2, flags >>= 1) {
585  pixel_ptr[x ] =
586  pixel_ptr[x + 1 ] =
587  pixel_ptr[x + s->stride] =
588  pixel_ptr[x + 1 + s->stride] = P[flags & 1];
589  }
590  pixel_ptr += s->stride * 2;
591  }
592  }
593 
594  return 0;
595 }
596 
598 {
599  int x, y;
600  uint16_t P[4];
601  unsigned int flags = 0;
602  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
603 
604  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
605  * either top and bottom or left and right halves */
606  P[0] = bytestream2_get_le16(&s->stream_ptr);
607  P[1] = bytestream2_get_le16(&s->stream_ptr);
608 
609  if (!(P[0] & 0x8000)) {
610 
611  for (y = 0; y < 16; y++) {
612  // new values for each 4x4 block
613  if (!(y & 3)) {
614  if (y) {
615  P[0] = bytestream2_get_le16(&s->stream_ptr);
616  P[1] = bytestream2_get_le16(&s->stream_ptr);
617  }
618  flags = bytestream2_get_le16(&s->stream_ptr);
619  }
620 
621  for (x = 0; x < 4; x++, flags >>= 1)
622  *pixel_ptr++ = P[flags & 1];
623  pixel_ptr += s->stride - 4;
624  // switch to right half
625  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
626  }
627 
628  } else {
629 
630  flags = bytestream2_get_le32(&s->stream_ptr);
631  P[2] = bytestream2_get_le16(&s->stream_ptr);
632  P[3] = bytestream2_get_le16(&s->stream_ptr);
633 
634  if (!(P[2] & 0x8000)) {
635 
636  /* vertical split; left & right halves are 2-color encoded */
637 
638  for (y = 0; y < 16; y++) {
639  for (x = 0; x < 4; x++, flags >>= 1)
640  *pixel_ptr++ = P[flags & 1];
641  pixel_ptr += s->stride - 4;
642  // switch to right half
643  if (y == 7) {
644  pixel_ptr -= 8 * s->stride - 4;
645  P[0] = P[2];
646  P[1] = P[3];
647  flags = bytestream2_get_le32(&s->stream_ptr);
648  }
649  }
650 
651  } else {
652 
653  /* horizontal split; top & bottom halves are 2-color encoded */
654 
655  for (y = 0; y < 8; y++) {
656  if (y == 4) {
657  P[0] = P[2];
658  P[1] = P[3];
659  flags = bytestream2_get_le32(&s->stream_ptr);
660  }
661 
662  for (x = 0; x < 8; x++, flags >>= 1)
663  *pixel_ptr++ = P[flags & 1];
664  pixel_ptr += s->line_inc;
665  }
666  }
667  }
668 
669  /* report success */
670  return 0;
671 }
672 
674 {
675  int x, y;
676  uint16_t P[4];
677  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
678 
679  /* 4-color encoding */
680  for (x = 0; x < 4; x++)
681  P[x] = bytestream2_get_le16(&s->stream_ptr);
682 
683  if (!(P[0] & 0x8000)) {
684  if (!(P[2] & 0x8000)) {
685 
686  /* 1 of 4 colors for each pixel */
687  for (y = 0; y < 8; y++) {
688  /* get the next set of 8 2-bit flags */
689  int flags = bytestream2_get_le16(&s->stream_ptr);
690  for (x = 0; x < 8; x++, flags >>= 2)
691  *pixel_ptr++ = P[flags & 0x03];
692  pixel_ptr += s->line_inc;
693  }
694 
695  } else {
696  uint32_t flags;
697 
698  /* 1 of 4 colors for each 2x2 block */
699  flags = bytestream2_get_le32(&s->stream_ptr);
700 
701  for (y = 0; y < 8; y += 2) {
702  for (x = 0; x < 8; x += 2, flags >>= 2) {
703  pixel_ptr[x ] =
704  pixel_ptr[x + 1 ] =
705  pixel_ptr[x + s->stride] =
706  pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
707  }
708  pixel_ptr += s->stride * 2;
709  }
710 
711  }
712  } else {
713  uint64_t flags;
714 
715  /* 1 of 4 colors for each 2x1 or 1x2 block */
716  flags = bytestream2_get_le64(&s->stream_ptr);
717  if (!(P[2] & 0x8000)) {
718  for (y = 0; y < 8; y++) {
719  for (x = 0; x < 8; x += 2, flags >>= 2) {
720  pixel_ptr[x ] =
721  pixel_ptr[x + 1] = P[flags & 0x03];
722  }
723  pixel_ptr += s->stride;
724  }
725  } else {
726  for (y = 0; y < 8; y += 2) {
727  for (x = 0; x < 8; x++, flags >>= 2) {
728  pixel_ptr[x ] =
729  pixel_ptr[x + s->stride] = P[flags & 0x03];
730  }
731  pixel_ptr += s->stride * 2;
732  }
733  }
734  }
735 
736  /* report success */
737  return 0;
738 }
739 
741 {
742  int x, y;
743  uint16_t P[8];
744  int flags = 0;
745  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
746 
747  for (x = 0; x < 4; x++)
748  P[x] = bytestream2_get_le16(&s->stream_ptr);
749 
750  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
751  * either top and bottom or left and right halves */
752  if (!(P[0] & 0x8000)) {
753 
754  /* 4-color encoding for each quadrant */
755  for (y = 0; y < 16; y++) {
756  // new values for each 4x4 block
757  if (!(y & 3)) {
758  if (y)
759  for (x = 0; x < 4; x++)
760  P[x] = bytestream2_get_le16(&s->stream_ptr);
761  flags = bytestream2_get_le32(&s->stream_ptr);
762  }
763 
764  for (x = 0; x < 4; x++, flags >>= 2)
765  *pixel_ptr++ = P[flags & 0x03];
766 
767  pixel_ptr += s->stride - 4;
768  // switch to right half
769  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
770  }
771 
772  } else {
773  // vertical split?
774  int vert;
775  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
776 
777  for (x = 4; x < 8; x++)
778  P[x] = bytestream2_get_le16(&s->stream_ptr);
779  vert = !(P[4] & 0x8000);
780 
781  /* 4-color encoding for either left and right or top and bottom
782  * halves */
783 
784  for (y = 0; y < 16; y++) {
785  for (x = 0; x < 4; x++, flags >>= 2)
786  *pixel_ptr++ = P[flags & 0x03];
787 
788  if (vert) {
789  pixel_ptr += s->stride - 4;
790  // switch to right half
791  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
792  } else if (y & 1) pixel_ptr += s->line_inc;
793 
794  // load values for second half
795  if (y == 7) {
796  memcpy(P, P + 4, 8);
797  flags = bytestream2_get_le64(&s->stream_ptr);
798  }
799  }
800  }
801 
802  /* report success */
803  return 0;
804 }
805 
807 {
808  int x, y;
809  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
810 
811  /* 64-color encoding (each pixel in block is a different color) */
812  for (y = 0; y < 8; y++) {
813  for (x = 0; x < 8; x++)
814  pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
815  pixel_ptr += s->stride;
816  }
817 
818  /* report success */
819  return 0;
820 }
821 
823 {
824  int x, y;
825  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
826 
827  /* 16-color block encoding: each 2x2 block is a different color */
828  for (y = 0; y < 8; y += 2) {
829  for (x = 0; x < 8; x += 2) {
830  pixel_ptr[x ] =
831  pixel_ptr[x + 1 ] =
832  pixel_ptr[x + s->stride] =
833  pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
834  }
835  pixel_ptr += s->stride * 2;
836  }
837 
838  /* report success */
839  return 0;
840 }
841 
843 {
844  int x, y;
845  uint16_t P[2];
846  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
847 
848  /* 4-color block encoding: each 4x4 block is a different color */
849  for (y = 0; y < 8; y++) {
850  if (!(y & 3)) {
851  P[0] = bytestream2_get_le16(&s->stream_ptr);
852  P[1] = bytestream2_get_le16(&s->stream_ptr);
853  }
854  for (x = 0; x < 8; x++)
855  pixel_ptr[x] = P[x >> 2];
856  pixel_ptr += s->stride;
857  }
858 
859  /* report success */
860  return 0;
861 }
862 
864 {
865  int x, y;
866  uint16_t pix;
867  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
868 
869  /* 1-color encoding: the whole block is 1 solid color */
870  pix = bytestream2_get_le16(&s->stream_ptr);
871 
872  for (y = 0; y < 8; y++) {
873  for (x = 0; x < 8; x++)
874  pixel_ptr[x] = pix;
875  pixel_ptr += s->stride;
876  }
877 
878  /* report success */
879  return 0;
880 }
881 
882 static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
891 };
892 
893 static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
902 };
903 
905 {
906  int x, y;
907  unsigned char opcode;
908  int ret;
909  GetBitContext gb;
910 
911  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
912  if (!s->is_16bpp) {
913  /* this is PAL8, so make the palette available */
914  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
915 
916  s->stride = frame->linesize[0];
917  } else {
918  s->stride = frame->linesize[0] >> 1;
919  s->mv_ptr = s->stream_ptr;
920  bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
921  }
922  s->line_inc = s->stride - 8;
923  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
924  + (s->avctx->width - 8) * (1 + s->is_16bpp);
925 
927  for (y = 0; y < s->avctx->height; y += 8) {
928  for (x = 0; x < s->avctx->width; x += 8) {
929  opcode = get_bits(&gb, 4);
930 
931  ff_tlog(s->avctx,
932  " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
933  x, y, opcode, bytestream2_tell(&s->stream_ptr));
934 
935  if (!s->is_16bpp) {
936  s->pixel_ptr = frame->data[0] + x
937  + y*frame->linesize[0];
938  ret = ipvideo_decode_block[opcode](s, frame);
939  } else {
940  s->pixel_ptr = frame->data[0] + x*2
941  + y*frame->linesize[0];
942  ret = ipvideo_decode_block16[opcode](s, frame);
943  }
944  if (ret != 0) {
945  av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
946  s->avctx->frame_number, x, y);
947  return;
948  }
949  }
950  }
951  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
953  "decode finished with %d bytes left over\n",
955  }
956 }
957 
959 {
960  IpvideoContext *s = avctx->priv_data;
961 
962  s->avctx = avctx;
963 
964  s->is_16bpp = avctx->bits_per_coded_sample == 16;
966 
967  ff_hpeldsp_init(&s->hdsp, avctx->flags);
968 
969  s->last_frame = av_frame_alloc();
971  if (!s->last_frame || !s->second_last_frame) {
974  return AVERROR(ENOMEM);
975  }
976 
977  return 0;
978 }
979 
981  void *data, int *got_frame,
982  AVPacket *avpkt)
983 {
984  const uint8_t *buf = avpkt->data;
985  int buf_size = avpkt->size;
986  IpvideoContext *s = avctx->priv_data;
987  AVFrame *frame = data;
988  int ret;
989 
990  /* decoding map contains 4 bits of information per 8x8 block */
991  s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
992 
993  /* compressed buffer needs to be large enough to at least hold an entire
994  * decoding map */
995  if (buf_size < s->decoding_map_size)
996  return buf_size;
997 
1001  }
1002 
1003  s->decoding_map = buf;
1005  buf_size - s->decoding_map_size);
1006 
1007  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1008  return ret;
1009 
1010  if (!s->is_16bpp) {
1012  if (pal) {
1013  frame->palette_has_changed = 1;
1014  memcpy(s->pal, pal, AVPALETTE_SIZE);
1015  }
1016  }
1017 
1018  ipvideo_decode_opcodes(s, frame);
1019 
1020  *got_frame = 1;
1021 
1022  /* shuffle frames */
1025  if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
1026  return ret;
1027 
1028  /* report that the buffer was completely consumed */
1029  return buf_size;
1030 }
1031 
1033 {
1034  IpvideoContext *s = avctx->priv_data;
1035 
1038 
1039  return 0;
1040 }
1041 
1043  .name = "interplayvideo",
1044  .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1045  .type = AVMEDIA_TYPE_VIDEO,
1047  .priv_data_size = sizeof(IpvideoContext),
1049  .close = ipvideo_decode_end,
1051  .capabilities = CODEC_CAP_DR1 | CODEC_CAP_PARAM_CHANGE,
1052 };
#define ff_tlog(ctx,...)
Definition: internal.h:60
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:631
#define P
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:171
const unsigned char * decoding_map
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
unsigned char * pixel_ptr
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:260
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
int size
Definition: avcodec.h:1163
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1444
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:131
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
#define sample
AVCodec.
Definition: avcodec.h:3181
static int(*const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame)
if()
Definition: avfilter.c:975
uint8_t
#define av_cold
Definition: attributes.h:74
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:135
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
8 bit with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:74
#define AVPALETTE_SIZE
Definition: pixfmt.h:33
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:363
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
#define CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:789
static AVFrame * frame
uint8_t * data
Definition: avcodec.h:1162
bitstream reader API header.
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:2720
#define av_log(a,...)
AVFrame * last_frame
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
#define CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: avcodec.h:874
An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows:
Definition: avcodec.h:996
#define AVERROR(e)
Definition: error.h:43
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:162
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:148
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:175
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:261
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:152
int flags
CODEC_FLAG_*.
Definition: avcodec.h:1335
static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
const char * name
Name of the codec implementation.
Definition: avcodec.h:3188
Libavcodec external API header.
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
GetByteContext stream_ptr
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
av_cold void ff_hpeldsp_init(HpelDSPContext *c, int flags)
Definition: hpeldsp.c:338
Half-pel DSP context.
Definition: hpeldsp.h:45
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
float y
ret
Definition: avfilter.c:974
int width
picture width / height.
Definition: avcodec.h:1414
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
AVFrame * second_last_frame
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:186
Half-pel DSP functions.
AVS_Value src
Definition: avisynth_c.h:482
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:199
main external API structure.
Definition: avcodec.h:1241
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
GetByteContext mv_ptr
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: utils.c:1035
op_pixels_func put_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: hpeldsp.h:56
HpelDSPContext hdsp
void * buf
Definition: avisynth_c.h:553
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
AVCodecContext * avctx
static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:410
int palette_has_changed
Tell user application that palette has changed from previous frame.
Definition: frame.h:377
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
AVCodec ff_interplay_video_decoder
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:462
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
static int flags
Definition: cpu.c:47
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:182
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
static int decode(AVCodecContext *avctx, void *data, int *got_sub, AVPacket *avpkt)
Definition: ccaption_dec.c:522
uint32_t pal[256]
static void ipvideo_decode_opcodes(IpvideoContext *s, AVFrame *frame)
common internal api header.
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
int upper_motion_limit_offset
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:352
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
void * priv_data
Definition: avcodec.h:1283
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:2016
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
uint8_t * av_packet_get_side_data(AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
Definition: avpacket.c:324
#define FFSWAP(type, a, b)
Definition: common.h:69
static int(*const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
This structure stores compressed data.
Definition: avcodec.h:1139
static int ipvideo_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:969
Definition: vf_geq.c:45
for(j=16;j >0;--j)