FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
interplayvideo.c
Go to the documentation of this file.
1 /*
2  * Interplay MVE Video Decoder
3  * Copyright (C) 2003 The FFmpeg project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the Interplay MVE format, visit:
26  * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27  * This code is written in such a way that the identifiers match up
28  * with the encoding descriptions in the document.
29  *
30  * This decoder presently only supports a PAL8 output colorspace.
31  *
32  * An Interplay video frame consists of 2 parts: The decoding map and
33  * the video data. A demuxer must load these 2 parts together in a single
34  * buffer before sending it through the stream to this decoder.
35  */
36 
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
40 
41 #include "libavutil/intreadwrite.h"
42 
43 #define BITSTREAM_READER_LE
44 #include "avcodec.h"
45 #include "bytestream.h"
46 #include "get_bits.h"
47 #include "hpeldsp.h"
48 #include "internal.h"
49 
50 #define PALETTE_COUNT 256
51 
52 typedef struct IpvideoContext {
53 
58 
59  /* For format 0x10 */
62 
63  const unsigned char *decoding_map;
65  const unsigned char *skip_map;
67 
68  int is_16bpp;
70  unsigned char *pixel_ptr;
71  int line_inc;
72  int stride;
74 
75  uint32_t pal[256];
77 
78 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
79 {
80  int current_offset = s->pixel_ptr - dst->data[0];
81  int motion_offset = current_offset + delta_y * dst->linesize[0]
82  + delta_x * (1 + s->is_16bpp);
83  if (motion_offset < 0) {
84  av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
85  return AVERROR_INVALIDDATA;
86  } else if (motion_offset > s->upper_motion_limit_offset) {
87  av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
88  motion_offset, s->upper_motion_limit_offset);
89  return AVERROR_INVALIDDATA;
90  }
91  if (!src->data[0]) {
92  av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
93  return AVERROR(EINVAL);
94  }
95  s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
96  dst->linesize[0], 8);
97  return 0;
98 }
99 
101 {
102  return copy_from(s, s->last_frame, frame, 0, 0);
103 }
104 
106 {
107  return copy_from(s, s->second_last_frame, frame, 0, 0);
108 }
109 
111 {
112  unsigned char B;
113  int x, y;
114 
115  /* copy block from 2 frames ago using a motion vector; need 1 more byte */
116  if (!s->is_16bpp) {
117  B = bytestream2_get_byte(&s->stream_ptr);
118  } else {
119  B = bytestream2_get_byte(&s->mv_ptr);
120  }
121 
122  if (B < 56) {
123  x = 8 + (B % 7);
124  y = B / 7;
125  } else {
126  x = -14 + ((B - 56) % 29);
127  y = 8 + ((B - 56) / 29);
128  }
129 
130  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
131  return copy_from(s, s->second_last_frame, frame, x, y);
132 }
133 
135 {
136  unsigned char B;
137  int x, y;
138 
139  /* copy 8x8 block from current frame from an up/left block */
140 
141  /* need 1 more byte for motion */
142  if (!s->is_16bpp) {
143  B = bytestream2_get_byte(&s->stream_ptr);
144  } else {
145  B = bytestream2_get_byte(&s->mv_ptr);
146  }
147 
148  if (B < 56) {
149  x = -(8 + (B % 7));
150  y = -(B / 7);
151  } else {
152  x = -(-14 + ((B - 56) % 29));
153  y = -( 8 + ((B - 56) / 29));
154  }
155 
156  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
157  return copy_from(s, frame, frame, x, y);
158 }
159 
161 {
162  int x, y;
163  unsigned char B, BL, BH;
164 
165  /* copy a block from the previous frame; need 1 more byte */
166  if (!s->is_16bpp) {
167  B = bytestream2_get_byte(&s->stream_ptr);
168  } else {
169  B = bytestream2_get_byte(&s->mv_ptr);
170  }
171 
172  BL = B & 0x0F;
173  BH = (B >> 4) & 0x0F;
174  x = -8 + BL;
175  y = -8 + BH;
176 
177  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
178  return copy_from(s, s->last_frame, frame, x, y);
179 }
180 
182 {
183  signed char x, y;
184 
185  /* copy a block from the previous frame using an expanded range;
186  * need 2 more bytes */
187  x = bytestream2_get_byte(&s->stream_ptr);
188  y = bytestream2_get_byte(&s->stream_ptr);
189 
190  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
191  return copy_from(s, s->last_frame, frame, x, y);
192 }
193 
195 {
196  /* mystery opcode? skip multiple blocks? */
197  av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
198 
199  /* report success */
200  return 0;
201 }
202 
204 {
205  int x, y;
206  unsigned char P[2];
207  unsigned int flags;
208 
209  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
210  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
211  return AVERROR_INVALIDDATA;
212  }
213 
214  /* 2-color encoding */
215  P[0] = bytestream2_get_byte(&s->stream_ptr);
216  P[1] = bytestream2_get_byte(&s->stream_ptr);
217 
218  if (P[0] <= P[1]) {
219 
220  /* need 8 more bytes from the stream */
221  for (y = 0; y < 8; y++) {
222  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
223  for (; flags != 1; flags >>= 1)
224  *s->pixel_ptr++ = P[flags & 1];
225  s->pixel_ptr += s->line_inc;
226  }
227 
228  } else {
229 
230  /* need 2 more bytes from the stream */
231  flags = bytestream2_get_le16(&s->stream_ptr);
232  for (y = 0; y < 8; y += 2) {
233  for (x = 0; x < 8; x += 2, flags >>= 1) {
234  s->pixel_ptr[x ] =
235  s->pixel_ptr[x + 1 ] =
236  s->pixel_ptr[x + s->stride] =
237  s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
238  }
239  s->pixel_ptr += s->stride * 2;
240  }
241  }
242 
243  /* report success */
244  return 0;
245 }
246 
248 {
249  int x, y;
250  unsigned char P[4];
251  unsigned int flags = 0;
252 
253  if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
254  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
255  return AVERROR_INVALIDDATA;
256  }
257 
258  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
259  * either top and bottom or left and right halves */
260  P[0] = bytestream2_get_byte(&s->stream_ptr);
261  P[1] = bytestream2_get_byte(&s->stream_ptr);
262 
263  if (P[0] <= P[1]) {
264  for (y = 0; y < 16; y++) {
265  // new values for each 4x4 block
266  if (!(y & 3)) {
267  if (y) {
268  P[0] = bytestream2_get_byte(&s->stream_ptr);
269  P[1] = bytestream2_get_byte(&s->stream_ptr);
270  }
271  flags = bytestream2_get_le16(&s->stream_ptr);
272  }
273 
274  for (x = 0; x < 4; x++, flags >>= 1)
275  *s->pixel_ptr++ = P[flags & 1];
276  s->pixel_ptr += s->stride - 4;
277  // switch to right half
278  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
279  }
280 
281  } else {
282  flags = bytestream2_get_le32(&s->stream_ptr);
283  P[2] = bytestream2_get_byte(&s->stream_ptr);
284  P[3] = bytestream2_get_byte(&s->stream_ptr);
285 
286  if (P[2] <= P[3]) {
287 
288  /* vertical split; left & right halves are 2-color encoded */
289 
290  for (y = 0; y < 16; y++) {
291  for (x = 0; x < 4; x++, flags >>= 1)
292  *s->pixel_ptr++ = P[flags & 1];
293  s->pixel_ptr += s->stride - 4;
294  // switch to right half
295  if (y == 7) {
296  s->pixel_ptr -= 8 * s->stride - 4;
297  P[0] = P[2];
298  P[1] = P[3];
299  flags = bytestream2_get_le32(&s->stream_ptr);
300  }
301  }
302 
303  } else {
304 
305  /* horizontal split; top & bottom halves are 2-color encoded */
306 
307  for (y = 0; y < 8; y++) {
308  if (y == 4) {
309  P[0] = P[2];
310  P[1] = P[3];
311  flags = bytestream2_get_le32(&s->stream_ptr);
312  }
313 
314  for (x = 0; x < 8; x++, flags >>= 1)
315  *s->pixel_ptr++ = P[flags & 1];
316  s->pixel_ptr += s->line_inc;
317  }
318  }
319  }
320 
321  /* report success */
322  return 0;
323 }
324 
326 {
327  int x, y;
328  unsigned char P[4];
329 
330  if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
331  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
332  return AVERROR_INVALIDDATA;
333  }
334 
335  /* 4-color encoding */
337 
338  if (P[0] <= P[1]) {
339  if (P[2] <= P[3]) {
340 
341  /* 1 of 4 colors for each pixel, need 16 more bytes */
342  for (y = 0; y < 8; y++) {
343  /* get the next set of 8 2-bit flags */
344  int flags = bytestream2_get_le16(&s->stream_ptr);
345  for (x = 0; x < 8; x++, flags >>= 2)
346  *s->pixel_ptr++ = P[flags & 0x03];
347  s->pixel_ptr += s->line_inc;
348  }
349 
350  } else {
351  uint32_t flags;
352 
353  /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
354  flags = bytestream2_get_le32(&s->stream_ptr);
355 
356  for (y = 0; y < 8; y += 2) {
357  for (x = 0; x < 8; x += 2, flags >>= 2) {
358  s->pixel_ptr[x ] =
359  s->pixel_ptr[x + 1 ] =
360  s->pixel_ptr[x + s->stride] =
361  s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
362  }
363  s->pixel_ptr += s->stride * 2;
364  }
365 
366  }
367  } else {
368  uint64_t flags;
369 
370  /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
371  flags = bytestream2_get_le64(&s->stream_ptr);
372  if (P[2] <= P[3]) {
373  for (y = 0; y < 8; y++) {
374  for (x = 0; x < 8; x += 2, flags >>= 2) {
375  s->pixel_ptr[x ] =
376  s->pixel_ptr[x + 1] = P[flags & 0x03];
377  }
378  s->pixel_ptr += s->stride;
379  }
380  } else {
381  for (y = 0; y < 8; y += 2) {
382  for (x = 0; x < 8; x++, flags >>= 2) {
383  s->pixel_ptr[x ] =
384  s->pixel_ptr[x + s->stride] = P[flags & 0x03];
385  }
386  s->pixel_ptr += s->stride * 2;
387  }
388  }
389  }
390 
391  /* report success */
392  return 0;
393 }
394 
396 {
397  int x, y;
398  unsigned char P[8];
399  int flags = 0;
400 
401  if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
402  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
403  return AVERROR_INVALIDDATA;
404  }
405 
407 
408  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
409  * either top and bottom or left and right halves */
410  if (P[0] <= P[1]) {
411 
412  /* 4-color encoding for each quadrant; need 32 bytes */
413  for (y = 0; y < 16; y++) {
414  // new values for each 4x4 block
415  if (!(y & 3)) {
416  if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
417  flags = bytestream2_get_le32(&s->stream_ptr);
418  }
419 
420  for (x = 0; x < 4; x++, flags >>= 2)
421  *s->pixel_ptr++ = P[flags & 0x03];
422 
423  s->pixel_ptr += s->stride - 4;
424  // switch to right half
425  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
426  }
427 
428  } else {
429  // vertical split?
430  int vert;
431  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
432 
433  bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
434  vert = P[4] <= P[5];
435 
436  /* 4-color encoding for either left and right or top and bottom
437  * halves */
438 
439  for (y = 0; y < 16; y++) {
440  for (x = 0; x < 4; x++, flags >>= 2)
441  *s->pixel_ptr++ = P[flags & 0x03];
442 
443  if (vert) {
444  s->pixel_ptr += s->stride - 4;
445  // switch to right half
446  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
447  } else if (y & 1) s->pixel_ptr += s->line_inc;
448 
449  // load values for second half
450  if (y == 7) {
451  memcpy(P, P + 4, 4);
452  flags = bytestream2_get_le64(&s->stream_ptr);
453  }
454  }
455  }
456 
457  /* report success */
458  return 0;
459 }
460 
462 {
463  int y;
464 
465  /* 64-color encoding (each pixel in block is a different color) */
466  for (y = 0; y < 8; y++) {
468  s->pixel_ptr += s->stride;
469  }
470 
471  /* report success */
472  return 0;
473 }
474 
476 {
477  int x, y;
478 
479  /* 16-color block encoding: each 2x2 block is a different color */
480  for (y = 0; y < 8; y += 2) {
481  for (x = 0; x < 8; x += 2) {
482  s->pixel_ptr[x ] =
483  s->pixel_ptr[x + 1 ] =
484  s->pixel_ptr[x + s->stride] =
485  s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
486  }
487  s->pixel_ptr += s->stride * 2;
488  }
489 
490  /* report success */
491  return 0;
492 }
493 
495 {
496  int y;
497  unsigned char P[2];
498 
499  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
500  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
501  return AVERROR_INVALIDDATA;
502  }
503 
504  /* 4-color block encoding: each 4x4 block is a different color */
505  for (y = 0; y < 8; y++) {
506  if (!(y & 3)) {
507  P[0] = bytestream2_get_byte(&s->stream_ptr);
508  P[1] = bytestream2_get_byte(&s->stream_ptr);
509  }
510  memset(s->pixel_ptr, P[0], 4);
511  memset(s->pixel_ptr + 4, P[1], 4);
512  s->pixel_ptr += s->stride;
513  }
514 
515  /* report success */
516  return 0;
517 }
518 
520 {
521  int y;
522  unsigned char pix;
523 
524  /* 1-color encoding: the whole block is 1 solid color */
525  pix = bytestream2_get_byte(&s->stream_ptr);
526 
527  for (y = 0; y < 8; y++) {
528  memset(s->pixel_ptr, pix, 8);
529  s->pixel_ptr += s->stride;
530  }
531 
532  /* report success */
533  return 0;
534 }
535 
537 {
538  int x, y;
539  unsigned char sample[2];
540 
541  /* dithered encoding */
542  sample[0] = bytestream2_get_byte(&s->stream_ptr);
543  sample[1] = bytestream2_get_byte(&s->stream_ptr);
544 
545  for (y = 0; y < 8; y++) {
546  for (x = 0; x < 8; x += 2) {
547  *s->pixel_ptr++ = sample[ y & 1 ];
548  *s->pixel_ptr++ = sample[!(y & 1)];
549  }
550  s->pixel_ptr += s->line_inc;
551  }
552 
553  /* report success */
554  return 0;
555 }
556 
558 {
559  signed char x, y;
560 
561  /* copy a block from the second last frame using an expanded range */
562  x = bytestream2_get_byte(&s->stream_ptr);
563  y = bytestream2_get_byte(&s->stream_ptr);
564 
565  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
566  return copy_from(s, s->second_last_frame, frame, x, y);
567 }
568 
570 {
571  int x, y;
572  uint16_t P[2];
573  unsigned int flags;
574  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
575 
576  /* 2-color encoding */
577  P[0] = bytestream2_get_le16(&s->stream_ptr);
578  P[1] = bytestream2_get_le16(&s->stream_ptr);
579 
580  if (!(P[0] & 0x8000)) {
581 
582  for (y = 0; y < 8; y++) {
583  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
584  for (; flags != 1; flags >>= 1)
585  *pixel_ptr++ = P[flags & 1];
586  pixel_ptr += s->line_inc;
587  }
588 
589  } else {
590 
591  flags = bytestream2_get_le16(&s->stream_ptr);
592  for (y = 0; y < 8; y += 2) {
593  for (x = 0; x < 8; x += 2, flags >>= 1) {
594  pixel_ptr[x ] =
595  pixel_ptr[x + 1 ] =
596  pixel_ptr[x + s->stride] =
597  pixel_ptr[x + 1 + s->stride] = P[flags & 1];
598  }
599  pixel_ptr += s->stride * 2;
600  }
601  }
602 
603  return 0;
604 }
605 
607 {
608  int x, y;
609  uint16_t P[4];
610  unsigned int flags = 0;
611  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
612 
613  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
614  * either top and bottom or left and right halves */
615  P[0] = bytestream2_get_le16(&s->stream_ptr);
616  P[1] = bytestream2_get_le16(&s->stream_ptr);
617 
618  if (!(P[0] & 0x8000)) {
619 
620  for (y = 0; y < 16; y++) {
621  // new values for each 4x4 block
622  if (!(y & 3)) {
623  if (y) {
624  P[0] = bytestream2_get_le16(&s->stream_ptr);
625  P[1] = bytestream2_get_le16(&s->stream_ptr);
626  }
627  flags = bytestream2_get_le16(&s->stream_ptr);
628  }
629 
630  for (x = 0; x < 4; x++, flags >>= 1)
631  *pixel_ptr++ = P[flags & 1];
632  pixel_ptr += s->stride - 4;
633  // switch to right half
634  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
635  }
636 
637  } else {
638 
639  flags = bytestream2_get_le32(&s->stream_ptr);
640  P[2] = bytestream2_get_le16(&s->stream_ptr);
641  P[3] = bytestream2_get_le16(&s->stream_ptr);
642 
643  if (!(P[2] & 0x8000)) {
644 
645  /* vertical split; left & right halves are 2-color encoded */
646 
647  for (y = 0; y < 16; y++) {
648  for (x = 0; x < 4; x++, flags >>= 1)
649  *pixel_ptr++ = P[flags & 1];
650  pixel_ptr += s->stride - 4;
651  // switch to right half
652  if (y == 7) {
653  pixel_ptr -= 8 * s->stride - 4;
654  P[0] = P[2];
655  P[1] = P[3];
656  flags = bytestream2_get_le32(&s->stream_ptr);
657  }
658  }
659 
660  } else {
661 
662  /* horizontal split; top & bottom halves are 2-color encoded */
663 
664  for (y = 0; y < 8; y++) {
665  if (y == 4) {
666  P[0] = P[2];
667  P[1] = P[3];
668  flags = bytestream2_get_le32(&s->stream_ptr);
669  }
670 
671  for (x = 0; x < 8; x++, flags >>= 1)
672  *pixel_ptr++ = P[flags & 1];
673  pixel_ptr += s->line_inc;
674  }
675  }
676  }
677 
678  /* report success */
679  return 0;
680 }
681 
683 {
684  int x, y;
685  uint16_t P[4];
686  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
687 
688  /* 4-color encoding */
689  for (x = 0; x < 4; x++)
690  P[x] = bytestream2_get_le16(&s->stream_ptr);
691 
692  if (!(P[0] & 0x8000)) {
693  if (!(P[2] & 0x8000)) {
694 
695  /* 1 of 4 colors for each pixel */
696  for (y = 0; y < 8; y++) {
697  /* get the next set of 8 2-bit flags */
698  int flags = bytestream2_get_le16(&s->stream_ptr);
699  for (x = 0; x < 8; x++, flags >>= 2)
700  *pixel_ptr++ = P[flags & 0x03];
701  pixel_ptr += s->line_inc;
702  }
703 
704  } else {
705  uint32_t flags;
706 
707  /* 1 of 4 colors for each 2x2 block */
708  flags = bytestream2_get_le32(&s->stream_ptr);
709 
710  for (y = 0; y < 8; y += 2) {
711  for (x = 0; x < 8; x += 2, flags >>= 2) {
712  pixel_ptr[x ] =
713  pixel_ptr[x + 1 ] =
714  pixel_ptr[x + s->stride] =
715  pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
716  }
717  pixel_ptr += s->stride * 2;
718  }
719 
720  }
721  } else {
722  uint64_t flags;
723 
724  /* 1 of 4 colors for each 2x1 or 1x2 block */
725  flags = bytestream2_get_le64(&s->stream_ptr);
726  if (!(P[2] & 0x8000)) {
727  for (y = 0; y < 8; y++) {
728  for (x = 0; x < 8; x += 2, flags >>= 2) {
729  pixel_ptr[x ] =
730  pixel_ptr[x + 1] = P[flags & 0x03];
731  }
732  pixel_ptr += s->stride;
733  }
734  } else {
735  for (y = 0; y < 8; y += 2) {
736  for (x = 0; x < 8; x++, flags >>= 2) {
737  pixel_ptr[x ] =
738  pixel_ptr[x + s->stride] = P[flags & 0x03];
739  }
740  pixel_ptr += s->stride * 2;
741  }
742  }
743  }
744 
745  /* report success */
746  return 0;
747 }
748 
750 {
751  int x, y;
752  uint16_t P[8];
753  int flags = 0;
754  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
755 
756  for (x = 0; x < 4; x++)
757  P[x] = bytestream2_get_le16(&s->stream_ptr);
758 
759  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
760  * either top and bottom or left and right halves */
761  if (!(P[0] & 0x8000)) {
762 
763  /* 4-color encoding for each quadrant */
764  for (y = 0; y < 16; y++) {
765  // new values for each 4x4 block
766  if (!(y & 3)) {
767  if (y)
768  for (x = 0; x < 4; x++)
769  P[x] = bytestream2_get_le16(&s->stream_ptr);
770  flags = bytestream2_get_le32(&s->stream_ptr);
771  }
772 
773  for (x = 0; x < 4; x++, flags >>= 2)
774  *pixel_ptr++ = P[flags & 0x03];
775 
776  pixel_ptr += s->stride - 4;
777  // switch to right half
778  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
779  }
780 
781  } else {
782  // vertical split?
783  int vert;
784  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
785 
786  for (x = 4; x < 8; x++)
787  P[x] = bytestream2_get_le16(&s->stream_ptr);
788  vert = !(P[4] & 0x8000);
789 
790  /* 4-color encoding for either left and right or top and bottom
791  * halves */
792 
793  for (y = 0; y < 16; y++) {
794  for (x = 0; x < 4; x++, flags >>= 2)
795  *pixel_ptr++ = P[flags & 0x03];
796 
797  if (vert) {
798  pixel_ptr += s->stride - 4;
799  // switch to right half
800  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
801  } else if (y & 1) pixel_ptr += s->line_inc;
802 
803  // load values for second half
804  if (y == 7) {
805  memcpy(P, P + 4, 8);
806  flags = bytestream2_get_le64(&s->stream_ptr);
807  }
808  }
809  }
810 
811  /* report success */
812  return 0;
813 }
814 
816 {
817  int x, y;
818  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
819 
820  /* 64-color encoding (each pixel in block is a different color) */
821  for (y = 0; y < 8; y++) {
822  for (x = 0; x < 8; x++)
823  pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
824  pixel_ptr += s->stride;
825  }
826 
827  /* report success */
828  return 0;
829 }
830 
832 {
833  int x, y;
834  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
835 
836  /* 16-color block encoding: each 2x2 block is a different color */
837  for (y = 0; y < 8; y += 2) {
838  for (x = 0; x < 8; x += 2) {
839  pixel_ptr[x ] =
840  pixel_ptr[x + 1 ] =
841  pixel_ptr[x + s->stride] =
842  pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
843  }
844  pixel_ptr += s->stride * 2;
845  }
846 
847  /* report success */
848  return 0;
849 }
850 
852 {
853  int x, y;
854  uint16_t P[2];
855  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
856 
857  /* 4-color block encoding: each 4x4 block is a different color */
858  for (y = 0; y < 8; y++) {
859  if (!(y & 3)) {
860  P[0] = bytestream2_get_le16(&s->stream_ptr);
861  P[1] = bytestream2_get_le16(&s->stream_ptr);
862  }
863  for (x = 0; x < 8; x++)
864  pixel_ptr[x] = P[x >> 2];
865  pixel_ptr += s->stride;
866  }
867 
868  /* report success */
869  return 0;
870 }
871 
873 {
874  int x, y;
875  uint16_t pix;
876  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
877 
878  /* 1-color encoding: the whole block is 1 solid color */
879  pix = bytestream2_get_le16(&s->stream_ptr);
880 
881  for (y = 0; y < 8; y++) {
882  for (x = 0; x < 8; x++)
883  pixel_ptr[x] = pix;
884  pixel_ptr += s->stride;
885  }
886 
887  /* report success */
888  return 0;
889 }
890 
900 };
901 
911 };
912 
913 static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
914 {
915  int line;
916 
917  if (!opcode) {
918  for (line = 0; line < 8; ++line) {
920  s->pixel_ptr += s->stride;
921  }
922  } else {
923  /* Don't try to copy second_last_frame data on the first frames */
924  if (s->avctx->frame_number > 2)
925  copy_from(s, s->second_last_frame, frame, 0, 0);
926  }
927 }
928 
929 static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
930 {
931  int off_x, off_y;
932 
933  if (opcode < 0) {
934  off_x = ((uint16_t)opcode - 0xC000) % frame->linesize[0];
935  off_y = ((uint16_t)opcode - 0xC000) / frame->linesize[0];
936  copy_from(s, s->last_frame, frame, off_x, off_y);
937  } else if (opcode > 0) {
938  off_x = ((uint16_t)opcode - 0x4000) % frame->linesize[0];
939  off_y = ((uint16_t)opcode - 0x4000) / frame->linesize[0];
940  copy_from(s, frame, frame, off_x, off_y);
941  }
942 }
943 
944 static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
946 };
947 
949 {
950  int pass, x, y;
951  int16_t opcode;
952  GetByteContext decoding_map_ptr;
953 
954  /* this is PAL8, so make the palette available */
955  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
956  s->stride = frame->linesize[0];
957 
958  s->line_inc = s->stride - 8;
959  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
960  + (s->avctx->width - 8) * (1 + s->is_16bpp);
961 
962  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
963 
964  for (pass = 0; pass < 2; ++pass) {
965  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
966  for (y = 0; y < s->avctx->height; y += 8) {
967  for (x = 0; x < s->avctx->width; x += 8) {
968  opcode = bytestream2_get_le16(&decoding_map_ptr);
969 
970  ff_tlog(s->avctx,
971  " block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n",
972  x, y, opcode, bytestream2_tell(&s->stream_ptr));
973 
974  s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0];
976  }
977  }
978  }
979 
980  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
982  "decode finished with %d bytes left over\n",
984  }
985 }
986 
987 static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
988 {
989  int line;
990 
991  if (!opcode) {
992  for (line = 0; line < 8; ++line) {
994  s->pixel_ptr += s->stride;
995  }
996  }
997 }
998 
999 static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
1000 {
1001  int off_x, off_y;
1002 
1003  if (opcode < 0) {
1004  off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->linesize[0];
1005  off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->linesize[0];
1006  copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y);
1007  } else if (opcode > 0) {
1008  off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->linesize[0];
1009  off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->linesize[0];
1010  copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y);
1011  }
1012 }
1013 
1014 static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
1016 };
1017 
1019 {
1020  int pass, x, y, changed_block;
1021  int16_t opcode, skip;
1022  GetByteContext decoding_map_ptr;
1023  GetByteContext skip_map_ptr;
1024 
1025  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1026 
1027  /* this is PAL8, so make the palette available */
1028  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1029  s->stride = frame->linesize[0];
1030 
1031  s->line_inc = s->stride - 8;
1032  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1033  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1034 
1035  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
1036  bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size);
1037 
1038  for (pass = 0; pass < 2; ++pass) {
1039  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
1040  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1041  skip = bytestream2_get_le16(&skip_map_ptr);
1042 
1043  for (y = 0; y < s->avctx->height; y += 8) {
1044  for (x = 0; x < s->avctx->width; x += 8) {
1045  s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0];
1046 
1047  while (skip <= 0) {
1048  if (skip != -0x8000 && skip) {
1049  opcode = bytestream2_get_le16(&decoding_map_ptr);
1050  ipvideo_format_10_passes[pass](s, frame, opcode);
1051  break;
1052  }
1053  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1054  return;
1055  skip = bytestream2_get_le16(&skip_map_ptr);
1056  }
1057  skip *= 2;
1058  }
1059  }
1060  }
1061 
1062  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1063  skip = bytestream2_get_le16(&skip_map_ptr);
1064  for (y = 0; y < s->avctx->height; y += 8) {
1065  for (x = 0; x < s->avctx->width; x += 8) {
1066  changed_block = 0;
1067  s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0];
1068 
1069  while (skip <= 0) {
1070  if (skip != -0x8000 && skip) {
1071  changed_block = 1;
1072  break;
1073  }
1074  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1075  return;
1076  skip = bytestream2_get_le16(&skip_map_ptr);
1077  }
1078 
1079  if (changed_block) {
1080  copy_from(s, s->cur_decode_frame, frame, 0, 0);
1081  } else {
1082  /* Don't try to copy last_frame data on the first frame */
1083  if (s->avctx->frame_number)
1084  copy_from(s, s->last_frame, frame, 0, 0);
1085  }
1086  skip *= 2;
1087  }
1088  }
1089 
1091 
1092  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1094  "decode finished with %d bytes left over\n",
1096  }
1097 }
1098 
1100 {
1101  int x, y;
1102  unsigned char opcode;
1103  int ret;
1104  GetBitContext gb;
1105 
1106  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1107  if (!s->is_16bpp) {
1108  /* this is PAL8, so make the palette available */
1109  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1110 
1111  s->stride = frame->linesize[0];
1112  } else {
1113  s->stride = frame->linesize[0] >> 1;
1114  s->mv_ptr = s->stream_ptr;
1115  bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
1116  }
1117  s->line_inc = s->stride - 8;
1118  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1119  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1120 
1121  init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
1122  for (y = 0; y < s->avctx->height; y += 8) {
1123  for (x = 0; x < s->avctx->width; x += 8) {
1124  if (get_bits_left(&gb) < 4)
1125  return;
1126  opcode = get_bits(&gb, 4);
1127 
1128  ff_tlog(s->avctx,
1129  " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
1130  x, y, opcode, bytestream2_tell(&s->stream_ptr));
1131 
1132  if (!s->is_16bpp) {
1133  s->pixel_ptr = frame->data[0] + x
1134  + y*frame->linesize[0];
1135  ret = ipvideo_decode_block[opcode](s, frame);
1136  } else {
1137  s->pixel_ptr = frame->data[0] + x*2
1138  + y*frame->linesize[0];
1139  ret = ipvideo_decode_block16[opcode](s, frame);
1140  }
1141  if (ret != 0) {
1142  av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
1143  s->avctx->frame_number, x, y);
1144  return;
1145  }
1146  }
1147  }
1148  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1150  "decode finished with %d bytes left over\n",
1152  }
1153 }
1154 
1156 {
1157  IpvideoContext *s = avctx->priv_data;
1158  int ret;
1159 
1160  s->avctx = avctx;
1161 
1162  s->is_16bpp = avctx->bits_per_coded_sample == 16;
1164 
1165  ff_hpeldsp_init(&s->hdsp, avctx->flags);
1166 
1167  s->last_frame = av_frame_alloc();
1171  if (!s->last_frame || !s->second_last_frame ||
1172  !s->cur_decode_frame || !s->prev_decode_frame) {
1173  ret = AVERROR(ENOMEM);
1174  goto error;
1175  }
1176 
1177  s->cur_decode_frame->width = avctx->width;
1178  s->prev_decode_frame->width = avctx->width;
1179  s->cur_decode_frame->height = avctx->height;
1180  s->prev_decode_frame->height = avctx->height;
1181  s->cur_decode_frame->format = avctx->pix_fmt;
1182  s->prev_decode_frame->format = avctx->pix_fmt;
1183 
1184  ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
1185  if (ret < 0)
1186  goto error;
1187 
1188  ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
1189  if (ret < 0)
1190  goto error;
1191 
1192  return 0;
1193 error:
1198  return ret;
1199 }
1200 
1202  void *data, int *got_frame,
1203  AVPacket *avpkt)
1204 {
1205  const uint8_t *buf = avpkt->data;
1206  int buf_size = avpkt->size;
1207  IpvideoContext *s = avctx->priv_data;
1208  AVFrame *frame = data;
1209  int ret;
1210  int send_buffer;
1211  int frame_format;
1212  int video_data_size;
1213 
1219  }
1220 
1221  if (!s->cur_decode_frame->data[0]) {
1222  ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
1223  if (ret < 0)
1224  return ret;
1225 
1226  ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
1227  if (ret < 0) {
1229  return ret;
1230  }
1231  }
1232 
1233  if (buf_size < 8)
1234  return AVERROR_INVALIDDATA;
1235 
1236  frame_format = AV_RL8(buf);
1237  send_buffer = AV_RL8(buf + 1);
1238  video_data_size = AV_RL16(buf + 2);
1239  s->decoding_map_size = AV_RL16(buf + 4);
1240  s->skip_map_size = AV_RL16(buf + 6);
1241 
1242  switch(frame_format) {
1243  case 0x06:
1244  if (s->decoding_map_size) {
1245  av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n");
1246  return AVERROR_INVALIDDATA;
1247  }
1248 
1249  if (s->skip_map_size) {
1250  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n");
1251  return AVERROR_INVALIDDATA;
1252  }
1253 
1254  if (s->is_16bpp) {
1255  av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n");
1256  return AVERROR_INVALIDDATA;
1257  }
1258 
1259  /* Decoding map for 0x06 frame format is at the top of pixeldata */
1260  s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2;
1261  s->decoding_map = buf + 8 + 14; /* 14 bits of op data */
1262  video_data_size -= s->decoding_map_size + 14;
1263  if (video_data_size <= 0)
1264  return AVERROR_INVALIDDATA;
1265 
1266  if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size)
1267  return AVERROR_INVALIDDATA;
1268 
1269  bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size);
1270 
1271  break;
1272 
1273  case 0x10:
1274  if (! s->decoding_map_size) {
1275  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n");
1276  return AVERROR_INVALIDDATA;
1277  }
1278 
1279  if (! s->skip_map_size) {
1280  av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n");
1281  return AVERROR_INVALIDDATA;
1282  }
1283 
1284  if (s->is_16bpp) {
1285  av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n");
1286  return AVERROR_INVALIDDATA;
1287  }
1288 
1289  if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size)
1290  return AVERROR_INVALIDDATA;
1291 
1292  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1293  s->decoding_map = buf + 8 + video_data_size;
1294  s->skip_map = buf + 8 + video_data_size + s->decoding_map_size;
1295 
1296  break;
1297 
1298  case 0x11:
1299  if (! s->decoding_map_size) {
1300  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n");
1301  return AVERROR_INVALIDDATA;
1302  }
1303 
1304  if (s->skip_map_size) {
1305  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n");
1306  return AVERROR_INVALIDDATA;
1307  }
1308 
1309  if (buf_size < 8 + video_data_size + s->decoding_map_size)
1310  return AVERROR_INVALIDDATA;
1311 
1312  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1313  s->decoding_map = buf + 8 + video_data_size;
1314 
1315  break;
1316 
1317  default:
1318  av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format);
1319  }
1320 
1321  /* ensure we can't overread the packet */
1322  if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) {
1323  av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n");
1324  return AVERROR_INVALIDDATA;
1325  }
1326 
1327  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1328  return ret;
1329 
1330  if (!s->is_16bpp) {
1331  int size;
1332  const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, &size);
1333  if (pal && size == AVPALETTE_SIZE) {
1334  frame->palette_has_changed = 1;
1335  memcpy(s->pal, pal, AVPALETTE_SIZE);
1336  } else if (pal) {
1337  av_log(avctx, AV_LOG_ERROR, "Palette size %d is wrong\n", size);
1338  }
1339  }
1340 
1341  switch(frame_format) {
1342  case 0x06:
1344  break;
1345  case 0x10:
1347  break;
1348  case 0x11:
1350  break;
1351  }
1352 
1353  *got_frame = send_buffer;
1354 
1355  /* shuffle frames */
1358  if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
1359  return ret;
1360 
1361  /* report that the buffer was completely consumed */
1362  return buf_size;
1363 }
1364 
1366 {
1367  IpvideoContext *s = avctx->priv_data;
1368 
1373 
1374  return 0;
1375 }
1376 
1378  .name = "interplayvideo",
1379  .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1380  .type = AVMEDIA_TYPE_VIDEO,
1382  .priv_data_size = sizeof(IpvideoContext),
1384  .close = ipvideo_decode_end,
1386  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
1387 };
#define ff_tlog(ctx,...)
Definition: internal.h:75
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:768
#define P
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
const unsigned char * decoding_map
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
unsigned char * pixel_ptr
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:269
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
static void(*const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
int size
Definition: avcodec.h:1431
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1727
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
static void(*const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
#define src
Definition: vp8dsp.c:254
#define sample
AVCodec.
Definition: avcodec.h:3408
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:87
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)
static int(*const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame)
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:73
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:441
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
static AVFrame * frame
static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
uint8_t * data
Definition: avcodec.h:1430
static int flags
Definition: log.c:55
bitstream reader API header.
static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
ptrdiff_t size
Definition: opengl_enc.c:101
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:2734
#define av_log(a,...)
#define AV_RL8(x)
Definition: intreadwrite.h:398
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:596
AVFrame * last_frame
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
int width
Definition: frame.h:276
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette...
Definition: avcodec.h:1149
An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows:
Definition: avcodec.h:1175
#define AVERROR(e)
Definition: error.h:43
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
Definition: avpacket.c:350
static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define B
Definition: huffyuvdsp.h:32
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:263
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:1598
Definition: graph2dot.c:48
static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
const char * name
Name of the codec implementation.
Definition: avcodec.h:3415
static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
GetByteContext stream_ptr
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
#define pass
Definition: fft_template.c:593
av_cold void ff_hpeldsp_init(HpelDSPContext *c, int flags)
Definition: hpeldsp.c:338
Half-pel DSP context.
Definition: hpeldsp.h:45
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
int width
picture width / height.
Definition: avcodec.h:1690
typedef void(APIENTRY *FF_PFNGLACTIVETEXTUREPROC)(GLenum texture)
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
AVFrame * second_last_frame
static void error(const char *err)
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:188
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:291
Half-pel DSP functions.
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
main external API structure.
Definition: avcodec.h:1518
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
GetByteContext mv_ptr
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1891
op_pixels_func put_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: hpeldsp.h:56
HpelDSPContext hdsp
void * buf
Definition: avisynth_c.h:690
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
AVCodecContext * avctx
static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:433
int palette_has_changed
Tell user application that palette has changed from previous frame.
Definition: frame.h:375
AVFrame * cur_decode_frame
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
AVCodec ff_interplay_video_decoder
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:551
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
static int op(uint8_t **dst, const uint8_t *dst_end, GetByteContext *gb, int pixel, int count, int *x, int width, int linesize)
Perform decode operation.
Definition: anm.c:78
uint32_t pal[256]
int
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:279
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: avcodec.h:1023
int upper_motion_limit_offset
AVFrame * prev_decode_frame
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:356
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
void * priv_data
Definition: avcodec.h:1545
const unsigned char * skip_map
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:208
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:2204
int height
Definition: frame.h:276
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
#define FFSWAP(type, a, b)
Definition: common.h:99
static int(*const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
This structure stores compressed data.
Definition: avcodec.h:1407
static int ipvideo_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1135
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:959
for(j=16;j >0;--j)