FFmpeg
interplayvideo.c
Go to the documentation of this file.
1 /*
2  * Interplay MVE Video Decoder
3  * Copyright (C) 2003 The FFmpeg project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the Interplay MVE format, visit:
26  * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27  * This code is written in such a way that the identifiers match up
28  * with the encoding descriptions in the document.
29  *
30  * This decoder presently only supports a PAL8 output colorspace.
31  *
32  * An Interplay video frame consists of 2 parts: The decoding map and
33  * the video data. A demuxer must load these 2 parts together in a single
34  * buffer before sending it through the stream to this decoder.
35  */
36 
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
40 
41 #include "libavutil/intreadwrite.h"
42 
43 #define BITSTREAM_READER_LE
44 #include "avcodec.h"
45 #include "bytestream.h"
46 #include "codec_internal.h"
47 #include "decode.h"
48 #include "get_bits.h"
49 #include "hpeldsp.h"
50 #include "internal.h"
51 
52 #define PALETTE_COUNT 256
53 
54 typedef struct IpvideoContext {
55 
60 
61  /* For format 0x10 */
64 
65  const unsigned char *decoding_map;
67  const unsigned char *skip_map;
69 
70  int is_16bpp;
72  unsigned char *pixel_ptr;
73  int line_inc;
74  int stride;
76 
77  uint32_t pal[256];
79 
80 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
81 {
82  int width = dst->width;
83  int current_offset = s->pixel_ptr - dst->data[0];
84  int x = (current_offset % dst->linesize[0]) / (1 + s->is_16bpp);
85  int y = current_offset / dst->linesize[0];
86  int dx = delta_x + x - ((delta_x + x >= width) - (delta_x + x < 0)) * width;
87  int dy = delta_y + y + (delta_x + x >= width) - (delta_x + x < 0);
88  int motion_offset = dy * src->linesize[0] + dx * (1 + s->is_16bpp);
89 
90  if (motion_offset < 0) {
91  av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
92  return AVERROR_INVALIDDATA;
93  } else if (motion_offset > s->upper_motion_limit_offset) {
94  av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
95  motion_offset, s->upper_motion_limit_offset);
96  return AVERROR_INVALIDDATA;
97  }
98  if (!src->data[0]) {
99  av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
100  return AVERROR(EINVAL);
101  }
102  s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
103  dst->linesize[0], 8);
104  return 0;
105 }
106 
108 {
109  return copy_from(s, s->last_frame, frame, 0, 0);
110 }
111 
113 {
114  return copy_from(s, s->second_last_frame, frame, 0, 0);
115 }
116 
118 {
119  unsigned char B;
120  int x, y;
121 
122  /* copy block from 2 frames ago using a motion vector; need 1 more byte */
123  if (!s->is_16bpp) {
124  B = bytestream2_get_byte(&s->stream_ptr);
125  } else {
126  B = bytestream2_get_byte(&s->mv_ptr);
127  }
128 
129  if (B < 56) {
130  x = 8 + (B % 7);
131  y = B / 7;
132  } else {
133  x = -14 + ((B - 56) % 29);
134  y = 8 + ((B - 56) / 29);
135  }
136 
137  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
138  return copy_from(s, s->second_last_frame, frame, x, y);
139 }
140 
142 {
143  unsigned char B;
144  int x, y;
145 
146  /* copy 8x8 block from current frame from an up/left block */
147 
148  /* need 1 more byte for motion */
149  if (!s->is_16bpp) {
150  B = bytestream2_get_byte(&s->stream_ptr);
151  } else {
152  B = bytestream2_get_byte(&s->mv_ptr);
153  }
154 
155  if (B < 56) {
156  x = -(8 + (B % 7));
157  y = -(B / 7);
158  } else {
159  x = -(-14 + ((B - 56) % 29));
160  y = -( 8 + ((B - 56) / 29));
161  }
162 
163  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
164  return copy_from(s, frame, frame, x, y);
165 }
166 
168 {
169  int x, y;
170  unsigned char B, BL, BH;
171 
172  /* copy a block from the previous frame; need 1 more byte */
173  if (!s->is_16bpp) {
174  B = bytestream2_get_byte(&s->stream_ptr);
175  } else {
176  B = bytestream2_get_byte(&s->mv_ptr);
177  }
178 
179  BL = B & 0x0F;
180  BH = (B >> 4) & 0x0F;
181  x = -8 + BL;
182  y = -8 + BH;
183 
184  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
185  return copy_from(s, s->last_frame, frame, x, y);
186 }
187 
189 {
190  signed char x, y;
191 
192  /* copy a block from the previous frame using an expanded range;
193  * need 2 more bytes */
194  x = bytestream2_get_byte(&s->stream_ptr);
195  y = bytestream2_get_byte(&s->stream_ptr);
196 
197  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
198  return copy_from(s, s->last_frame, frame, x, y);
199 }
200 
202 {
203  /* mystery opcode? skip multiple blocks? */
204  av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
205 
206  /* report success */
207  return 0;
208 }
209 
211 {
212  int x, y;
213  unsigned char P[2];
214  unsigned int flags;
215 
216  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
217  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
218  return AVERROR_INVALIDDATA;
219  }
220 
221  /* 2-color encoding */
222  P[0] = bytestream2_get_byte(&s->stream_ptr);
223  P[1] = bytestream2_get_byte(&s->stream_ptr);
224 
225  if (P[0] <= P[1]) {
226 
227  /* need 8 more bytes from the stream */
228  for (y = 0; y < 8; y++) {
229  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
230  for (; flags != 1; flags >>= 1)
231  *s->pixel_ptr++ = P[flags & 1];
232  s->pixel_ptr += s->line_inc;
233  }
234 
235  } else {
236 
237  /* need 2 more bytes from the stream */
238  flags = bytestream2_get_le16(&s->stream_ptr);
239  for (y = 0; y < 8; y += 2) {
240  for (x = 0; x < 8; x += 2, flags >>= 1) {
241  s->pixel_ptr[x ] =
242  s->pixel_ptr[x + 1 ] =
243  s->pixel_ptr[x + s->stride] =
244  s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
245  }
246  s->pixel_ptr += s->stride * 2;
247  }
248  }
249 
250  /* report success */
251  return 0;
252 }
253 
255 {
256  int x, y;
257  unsigned char P[4];
258  unsigned int flags = 0;
259 
260  if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
261  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
262  return AVERROR_INVALIDDATA;
263  }
264 
265  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
266  * either top and bottom or left and right halves */
267  P[0] = bytestream2_get_byte(&s->stream_ptr);
268  P[1] = bytestream2_get_byte(&s->stream_ptr);
269 
270  if (P[0] <= P[1]) {
271  for (y = 0; y < 16; y++) {
272  // new values for each 4x4 block
273  if (!(y & 3)) {
274  if (y) {
275  P[0] = bytestream2_get_byte(&s->stream_ptr);
276  P[1] = bytestream2_get_byte(&s->stream_ptr);
277  }
278  flags = bytestream2_get_le16(&s->stream_ptr);
279  }
280 
281  for (x = 0; x < 4; x++, flags >>= 1)
282  *s->pixel_ptr++ = P[flags & 1];
283  s->pixel_ptr += s->stride - 4;
284  // switch to right half
285  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
286  }
287 
288  } else {
289  flags = bytestream2_get_le32(&s->stream_ptr);
290  P[2] = bytestream2_get_byte(&s->stream_ptr);
291  P[3] = bytestream2_get_byte(&s->stream_ptr);
292 
293  if (P[2] <= P[3]) {
294 
295  /* vertical split; left & right halves are 2-color encoded */
296 
297  for (y = 0; y < 16; y++) {
298  for (x = 0; x < 4; x++, flags >>= 1)
299  *s->pixel_ptr++ = P[flags & 1];
300  s->pixel_ptr += s->stride - 4;
301  // switch to right half
302  if (y == 7) {
303  s->pixel_ptr -= 8 * s->stride - 4;
304  P[0] = P[2];
305  P[1] = P[3];
306  flags = bytestream2_get_le32(&s->stream_ptr);
307  }
308  }
309 
310  } else {
311 
312  /* horizontal split; top & bottom halves are 2-color encoded */
313 
314  for (y = 0; y < 8; y++) {
315  if (y == 4) {
316  P[0] = P[2];
317  P[1] = P[3];
318  flags = bytestream2_get_le32(&s->stream_ptr);
319  }
320 
321  for (x = 0; x < 8; x++, flags >>= 1)
322  *s->pixel_ptr++ = P[flags & 1];
323  s->pixel_ptr += s->line_inc;
324  }
325  }
326  }
327 
328  /* report success */
329  return 0;
330 }
331 
333 {
334  int x, y;
335  unsigned char P[4];
336 
337  if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
338  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
339  return AVERROR_INVALIDDATA;
340  }
341 
342  /* 4-color encoding */
343  bytestream2_get_buffer(&s->stream_ptr, P, 4);
344 
345  if (P[0] <= P[1]) {
346  if (P[2] <= P[3]) {
347 
348  /* 1 of 4 colors for each pixel, need 16 more bytes */
349  for (y = 0; y < 8; y++) {
350  /* get the next set of 8 2-bit flags */
351  int flags = bytestream2_get_le16(&s->stream_ptr);
352  for (x = 0; x < 8; x++, flags >>= 2)
353  *s->pixel_ptr++ = P[flags & 0x03];
354  s->pixel_ptr += s->line_inc;
355  }
356 
357  } else {
358  uint32_t flags;
359 
360  /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
361  flags = bytestream2_get_le32(&s->stream_ptr);
362 
363  for (y = 0; y < 8; y += 2) {
364  for (x = 0; x < 8; x += 2, flags >>= 2) {
365  s->pixel_ptr[x ] =
366  s->pixel_ptr[x + 1 ] =
367  s->pixel_ptr[x + s->stride] =
368  s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
369  }
370  s->pixel_ptr += s->stride * 2;
371  }
372 
373  }
374  } else {
375  uint64_t flags;
376 
377  /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
378  flags = bytestream2_get_le64(&s->stream_ptr);
379  if (P[2] <= P[3]) {
380  for (y = 0; y < 8; y++) {
381  for (x = 0; x < 8; x += 2, flags >>= 2) {
382  s->pixel_ptr[x ] =
383  s->pixel_ptr[x + 1] = P[flags & 0x03];
384  }
385  s->pixel_ptr += s->stride;
386  }
387  } else {
388  for (y = 0; y < 8; y += 2) {
389  for (x = 0; x < 8; x++, flags >>= 2) {
390  s->pixel_ptr[x ] =
391  s->pixel_ptr[x + s->stride] = P[flags & 0x03];
392  }
393  s->pixel_ptr += s->stride * 2;
394  }
395  }
396  }
397 
398  /* report success */
399  return 0;
400 }
401 
403 {
404  int x, y;
405  unsigned char P[8];
406  int flags = 0;
407 
408  if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
409  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
410  return AVERROR_INVALIDDATA;
411  }
412 
413  bytestream2_get_buffer(&s->stream_ptr, P, 4);
414 
415  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
416  * either top and bottom or left and right halves */
417  if (P[0] <= P[1]) {
418 
419  /* 4-color encoding for each quadrant; need 32 bytes */
420  for (y = 0; y < 16; y++) {
421  // new values for each 4x4 block
422  if (!(y & 3)) {
423  if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
424  flags = bytestream2_get_le32(&s->stream_ptr);
425  }
426 
427  for (x = 0; x < 4; x++, flags >>= 2)
428  *s->pixel_ptr++ = P[flags & 0x03];
429 
430  s->pixel_ptr += s->stride - 4;
431  // switch to right half
432  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
433  }
434 
435  } else {
436  // vertical split?
437  int vert;
438  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
439 
440  bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
441  vert = P[4] <= P[5];
442 
443  /* 4-color encoding for either left and right or top and bottom
444  * halves */
445 
446  for (y = 0; y < 16; y++) {
447  for (x = 0; x < 4; x++, flags >>= 2)
448  *s->pixel_ptr++ = P[flags & 0x03];
449 
450  if (vert) {
451  s->pixel_ptr += s->stride - 4;
452  // switch to right half
453  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
454  } else if (y & 1) s->pixel_ptr += s->line_inc;
455 
456  // load values for second half
457  if (y == 7) {
458  memcpy(P, P + 4, 4);
459  flags = bytestream2_get_le64(&s->stream_ptr);
460  }
461  }
462  }
463 
464  /* report success */
465  return 0;
466 }
467 
469 {
470  int y;
471 
472  /* 64-color encoding (each pixel in block is a different color) */
473  for (y = 0; y < 8; y++) {
474  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
475  s->pixel_ptr += s->stride;
476  }
477 
478  /* report success */
479  return 0;
480 }
481 
483 {
484  int x, y;
485 
486  /* 16-color block encoding: each 2x2 block is a different color */
487  for (y = 0; y < 8; y += 2) {
488  for (x = 0; x < 8; x += 2) {
489  s->pixel_ptr[x ] =
490  s->pixel_ptr[x + 1 ] =
491  s->pixel_ptr[x + s->stride] =
492  s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
493  }
494  s->pixel_ptr += s->stride * 2;
495  }
496 
497  /* report success */
498  return 0;
499 }
500 
502 {
503  int y;
504  unsigned char P[2];
505 
506  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
507  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
508  return AVERROR_INVALIDDATA;
509  }
510 
511  /* 4-color block encoding: each 4x4 block is a different color */
512  for (y = 0; y < 8; y++) {
513  if (!(y & 3)) {
514  P[0] = bytestream2_get_byte(&s->stream_ptr);
515  P[1] = bytestream2_get_byte(&s->stream_ptr);
516  }
517  memset(s->pixel_ptr, P[0], 4);
518  memset(s->pixel_ptr + 4, P[1], 4);
519  s->pixel_ptr += s->stride;
520  }
521 
522  /* report success */
523  return 0;
524 }
525 
527 {
528  int y;
529  unsigned char pix;
530 
531  /* 1-color encoding: the whole block is 1 solid color */
532  pix = bytestream2_get_byte(&s->stream_ptr);
533 
534  for (y = 0; y < 8; y++) {
535  memset(s->pixel_ptr, pix, 8);
536  s->pixel_ptr += s->stride;
537  }
538 
539  /* report success */
540  return 0;
541 }
542 
544 {
545  int x, y;
546  unsigned char sample[2];
547 
548  /* dithered encoding */
549  sample[0] = bytestream2_get_byte(&s->stream_ptr);
550  sample[1] = bytestream2_get_byte(&s->stream_ptr);
551 
552  for (y = 0; y < 8; y++) {
553  for (x = 0; x < 8; x += 2) {
554  *s->pixel_ptr++ = sample[ y & 1 ];
555  *s->pixel_ptr++ = sample[!(y & 1)];
556  }
557  s->pixel_ptr += s->line_inc;
558  }
559 
560  /* report success */
561  return 0;
562 }
563 
565 {
566  signed char x, y;
567 
568  /* copy a block from the second last frame using an expanded range */
569  x = bytestream2_get_byte(&s->stream_ptr);
570  y = bytestream2_get_byte(&s->stream_ptr);
571 
572  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
573  return copy_from(s, s->second_last_frame, frame, x, y);
574 }
575 
577 {
578  int x, y;
579  uint16_t P[2];
580  unsigned int flags;
581  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
582 
583  /* 2-color encoding */
584  P[0] = bytestream2_get_le16(&s->stream_ptr);
585  P[1] = bytestream2_get_le16(&s->stream_ptr);
586 
587  if (!(P[0] & 0x8000)) {
588 
589  for (y = 0; y < 8; y++) {
590  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
591  for (; flags != 1; flags >>= 1)
592  *pixel_ptr++ = P[flags & 1];
593  pixel_ptr += s->line_inc;
594  }
595 
596  } else {
597 
598  flags = bytestream2_get_le16(&s->stream_ptr);
599  for (y = 0; y < 8; y += 2) {
600  for (x = 0; x < 8; x += 2, flags >>= 1) {
601  pixel_ptr[x ] =
602  pixel_ptr[x + 1 ] =
603  pixel_ptr[x + s->stride] =
604  pixel_ptr[x + 1 + s->stride] = P[flags & 1];
605  }
606  pixel_ptr += s->stride * 2;
607  }
608  }
609 
610  return 0;
611 }
612 
614 {
615  int x, y;
616  uint16_t P[4];
617  unsigned int flags = 0;
618  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
619 
620  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
621  * either top and bottom or left and right halves */
622  P[0] = bytestream2_get_le16(&s->stream_ptr);
623  P[1] = bytestream2_get_le16(&s->stream_ptr);
624 
625  if (!(P[0] & 0x8000)) {
626 
627  for (y = 0; y < 16; y++) {
628  // new values for each 4x4 block
629  if (!(y & 3)) {
630  if (y) {
631  P[0] = bytestream2_get_le16(&s->stream_ptr);
632  P[1] = bytestream2_get_le16(&s->stream_ptr);
633  }
634  flags = bytestream2_get_le16(&s->stream_ptr);
635  }
636 
637  for (x = 0; x < 4; x++, flags >>= 1)
638  *pixel_ptr++ = P[flags & 1];
639  pixel_ptr += s->stride - 4;
640  // switch to right half
641  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
642  }
643 
644  } else {
645 
646  flags = bytestream2_get_le32(&s->stream_ptr);
647  P[2] = bytestream2_get_le16(&s->stream_ptr);
648  P[3] = bytestream2_get_le16(&s->stream_ptr);
649 
650  if (!(P[2] & 0x8000)) {
651 
652  /* vertical split; left & right halves are 2-color encoded */
653 
654  for (y = 0; y < 16; y++) {
655  for (x = 0; x < 4; x++, flags >>= 1)
656  *pixel_ptr++ = P[flags & 1];
657  pixel_ptr += s->stride - 4;
658  // switch to right half
659  if (y == 7) {
660  pixel_ptr -= 8 * s->stride - 4;
661  P[0] = P[2];
662  P[1] = P[3];
663  flags = bytestream2_get_le32(&s->stream_ptr);
664  }
665  }
666 
667  } else {
668 
669  /* horizontal split; top & bottom halves are 2-color encoded */
670 
671  for (y = 0; y < 8; y++) {
672  if (y == 4) {
673  P[0] = P[2];
674  P[1] = P[3];
675  flags = bytestream2_get_le32(&s->stream_ptr);
676  }
677 
678  for (x = 0; x < 8; x++, flags >>= 1)
679  *pixel_ptr++ = P[flags & 1];
680  pixel_ptr += s->line_inc;
681  }
682  }
683  }
684 
685  /* report success */
686  return 0;
687 }
688 
690 {
691  int x, y;
692  uint16_t P[4];
693  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
694 
695  /* 4-color encoding */
696  for (x = 0; x < 4; x++)
697  P[x] = bytestream2_get_le16(&s->stream_ptr);
698 
699  if (!(P[0] & 0x8000)) {
700  if (!(P[2] & 0x8000)) {
701 
702  /* 1 of 4 colors for each pixel */
703  for (y = 0; y < 8; y++) {
704  /* get the next set of 8 2-bit flags */
705  int flags = bytestream2_get_le16(&s->stream_ptr);
706  for (x = 0; x < 8; x++, flags >>= 2)
707  *pixel_ptr++ = P[flags & 0x03];
708  pixel_ptr += s->line_inc;
709  }
710 
711  } else {
712  uint32_t flags;
713 
714  /* 1 of 4 colors for each 2x2 block */
715  flags = bytestream2_get_le32(&s->stream_ptr);
716 
717  for (y = 0; y < 8; y += 2) {
718  for (x = 0; x < 8; x += 2, flags >>= 2) {
719  pixel_ptr[x ] =
720  pixel_ptr[x + 1 ] =
721  pixel_ptr[x + s->stride] =
722  pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
723  }
724  pixel_ptr += s->stride * 2;
725  }
726 
727  }
728  } else {
729  uint64_t flags;
730 
731  /* 1 of 4 colors for each 2x1 or 1x2 block */
732  flags = bytestream2_get_le64(&s->stream_ptr);
733  if (!(P[2] & 0x8000)) {
734  for (y = 0; y < 8; y++) {
735  for (x = 0; x < 8; x += 2, flags >>= 2) {
736  pixel_ptr[x ] =
737  pixel_ptr[x + 1] = P[flags & 0x03];
738  }
739  pixel_ptr += s->stride;
740  }
741  } else {
742  for (y = 0; y < 8; y += 2) {
743  for (x = 0; x < 8; x++, flags >>= 2) {
744  pixel_ptr[x ] =
745  pixel_ptr[x + s->stride] = P[flags & 0x03];
746  }
747  pixel_ptr += s->stride * 2;
748  }
749  }
750  }
751 
752  /* report success */
753  return 0;
754 }
755 
757 {
758  int x, y;
759  uint16_t P[8];
760  int flags = 0;
761  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
762 
763  for (x = 0; x < 4; x++)
764  P[x] = bytestream2_get_le16(&s->stream_ptr);
765 
766  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
767  * either top and bottom or left and right halves */
768  if (!(P[0] & 0x8000)) {
769 
770  /* 4-color encoding for each quadrant */
771  for (y = 0; y < 16; y++) {
772  // new values for each 4x4 block
773  if (!(y & 3)) {
774  if (y)
775  for (x = 0; x < 4; x++)
776  P[x] = bytestream2_get_le16(&s->stream_ptr);
777  flags = bytestream2_get_le32(&s->stream_ptr);
778  }
779 
780  for (x = 0; x < 4; x++, flags >>= 2)
781  *pixel_ptr++ = P[flags & 0x03];
782 
783  pixel_ptr += s->stride - 4;
784  // switch to right half
785  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
786  }
787 
788  } else {
789  // vertical split?
790  int vert;
791  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
792 
793  for (x = 4; x < 8; x++)
794  P[x] = bytestream2_get_le16(&s->stream_ptr);
795  vert = !(P[4] & 0x8000);
796 
797  /* 4-color encoding for either left and right or top and bottom
798  * halves */
799 
800  for (y = 0; y < 16; y++) {
801  for (x = 0; x < 4; x++, flags >>= 2)
802  *pixel_ptr++ = P[flags & 0x03];
803 
804  if (vert) {
805  pixel_ptr += s->stride - 4;
806  // switch to right half
807  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
808  } else if (y & 1) pixel_ptr += s->line_inc;
809 
810  // load values for second half
811  if (y == 7) {
812  memcpy(P, P + 4, 8);
813  flags = bytestream2_get_le64(&s->stream_ptr);
814  }
815  }
816  }
817 
818  /* report success */
819  return 0;
820 }
821 
823 {
824  int x, y;
825  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
826 
827  /* 64-color encoding (each pixel in block is a different color) */
828  for (y = 0; y < 8; y++) {
829  for (x = 0; x < 8; x++)
830  pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
831  pixel_ptr += s->stride;
832  }
833 
834  /* report success */
835  return 0;
836 }
837 
839 {
840  int x, y;
841  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
842 
843  /* 16-color block encoding: each 2x2 block is a different color */
844  for (y = 0; y < 8; y += 2) {
845  for (x = 0; x < 8; x += 2) {
846  pixel_ptr[x ] =
847  pixel_ptr[x + 1 ] =
848  pixel_ptr[x + s->stride] =
849  pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
850  }
851  pixel_ptr += s->stride * 2;
852  }
853 
854  /* report success */
855  return 0;
856 }
857 
859 {
860  int x, y;
861  uint16_t P[2];
862  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
863 
864  /* 4-color block encoding: each 4x4 block is a different color */
865  for (y = 0; y < 8; y++) {
866  if (!(y & 3)) {
867  P[0] = bytestream2_get_le16(&s->stream_ptr);
868  P[1] = bytestream2_get_le16(&s->stream_ptr);
869  }
870  for (x = 0; x < 8; x++)
871  pixel_ptr[x] = P[x >> 2];
872  pixel_ptr += s->stride;
873  }
874 
875  /* report success */
876  return 0;
877 }
878 
880 {
881  int x, y;
882  uint16_t pix;
883  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
884 
885  /* 1-color encoding: the whole block is 1 solid color */
886  pix = bytestream2_get_le16(&s->stream_ptr);
887 
888  for (y = 0; y < 8; y++) {
889  for (x = 0; x < 8; x++)
890  pixel_ptr[x] = pix;
891  pixel_ptr += s->stride;
892  }
893 
894  /* report success */
895  return 0;
896 }
897 
907 };
908 
918 };
919 
921 {
922  int line;
923 
924  if (!opcode) {
925  for (line = 0; line < 8; ++line) {
926  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
927  s->pixel_ptr += s->stride;
928  }
929  } else {
930  /* Don't try to copy second_last_frame data on the first frames */
931  if (s->avctx->frame_number > 2)
932  copy_from(s, s->second_last_frame, frame, 0, 0);
933  }
934 }
935 
937 {
938  int off_x, off_y;
939 
940  if (opcode < 0) {
941  off_x = ((uint16_t)opcode - 0xC000) % frame->width;
942  off_y = ((uint16_t)opcode - 0xC000) / frame->width;
943  copy_from(s, s->last_frame, frame, off_x, off_y);
944  } else if (opcode > 0) {
945  off_x = ((uint16_t)opcode - 0x4000) % frame->width;
946  off_y = ((uint16_t)opcode - 0x4000) / frame->width;
947  copy_from(s, frame, frame, off_x, off_y);
948  }
949 }
950 
951 static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
953 };
954 
956 {
957  int pass, x, y;
958  int16_t opcode;
959  GetByteContext decoding_map_ptr;
960 
961  /* this is PAL8, so make the palette available */
962  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
963  s->stride = frame->linesize[0];
964 
965  s->line_inc = s->stride - 8;
966  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
967  + (s->avctx->width - 8) * (1 + s->is_16bpp);
968 
969  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
970 
971  for (pass = 0; pass < 2; ++pass) {
972  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
973  for (y = 0; y < s->avctx->height; y += 8) {
974  for (x = 0; x < s->avctx->width; x += 8) {
975  opcode = bytestream2_get_le16(&decoding_map_ptr);
976 
977  ff_tlog(s->avctx,
978  " block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n",
979  x, y, opcode, bytestream2_tell(&s->stream_ptr));
980 
981  s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0];
983  }
984  }
985  }
986 
987  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
988  av_log(s->avctx, AV_LOG_DEBUG,
989  "decode finished with %d bytes left over\n",
990  bytestream2_get_bytes_left(&s->stream_ptr));
991  }
992 }
993 
995 {
996  int line;
997 
998  if (!opcode) {
999  for (line = 0; line < 8; ++line) {
1000  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
1001  s->pixel_ptr += s->stride;
1002  }
1003  }
1004 }
1005 
1007 {
1008  int off_x, off_y;
1009 
1010  if (opcode < 0) {
1011  off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->width;
1012  off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->width;
1013  copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y);
1014  } else if (opcode > 0) {
1015  off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->width;
1016  off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->width;
1017  copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y);
1018  }
1019 }
1020 
1021 static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
1023 };
1024 
1026 {
1027  int pass, x, y, changed_block;
1028  int16_t opcode, skip;
1029  GetByteContext decoding_map_ptr;
1030  GetByteContext skip_map_ptr;
1031 
1032  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1033 
1034  /* this is PAL8, so make the palette available */
1035  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1036  s->stride = frame->linesize[0];
1037 
1038  s->line_inc = s->stride - 8;
1039  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1040  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1041 
1042  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
1043  bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size);
1044 
1045  for (pass = 0; pass < 2; ++pass) {
1046  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
1047  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1048  skip = bytestream2_get_le16(&skip_map_ptr);
1049 
1050  for (y = 0; y < s->avctx->height; y += 8) {
1051  for (x = 0; x < s->avctx->width; x += 8) {
1052  s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0];
1053 
1054  while (skip <= 0) {
1055  if (skip != -0x8000 && skip) {
1056  opcode = bytestream2_get_le16(&decoding_map_ptr);
1057  ipvideo_format_10_passes[pass](s, frame, opcode);
1058  break;
1059  }
1060  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1061  return;
1062  skip = bytestream2_get_le16(&skip_map_ptr);
1063  }
1064  skip *= 2;
1065  }
1066  }
1067  }
1068 
1069  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1070  skip = bytestream2_get_le16(&skip_map_ptr);
1071  for (y = 0; y < s->avctx->height; y += 8) {
1072  for (x = 0; x < s->avctx->width; x += 8) {
1073  changed_block = 0;
1074  s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0];
1075 
1076  while (skip <= 0) {
1077  if (skip != -0x8000 && skip) {
1078  changed_block = 1;
1079  break;
1080  }
1081  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1082  return;
1083  skip = bytestream2_get_le16(&skip_map_ptr);
1084  }
1085 
1086  if (changed_block) {
1087  copy_from(s, s->cur_decode_frame, frame, 0, 0);
1088  } else {
1089  /* Don't try to copy last_frame data on the first frame */
1090  if (s->avctx->frame_number)
1091  copy_from(s, s->last_frame, frame, 0, 0);
1092  }
1093  skip *= 2;
1094  }
1095  }
1096 
1097  FFSWAP(AVFrame*, s->prev_decode_frame, s->cur_decode_frame);
1098 
1099  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1100  av_log(s->avctx, AV_LOG_DEBUG,
1101  "decode finished with %d bytes left over\n",
1102  bytestream2_get_bytes_left(&s->stream_ptr));
1103  }
1104 }
1105 
1107 {
1108  int x, y;
1109  unsigned char opcode;
1110  int ret;
1111  GetBitContext gb;
1112 
1113  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1114  if (!s->is_16bpp) {
1115  /* this is PAL8, so make the palette available */
1116  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1117 
1118  s->stride = frame->linesize[0];
1119  } else {
1120  s->stride = frame->linesize[0] >> 1;
1121  s->mv_ptr = s->stream_ptr;
1122  bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
1123  }
1124  s->line_inc = s->stride - 8;
1125  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1126  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1127 
1128  init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
1129  for (y = 0; y < s->avctx->height; y += 8) {
1130  for (x = 0; x < s->avctx->width; x += 8) {
1131  if (get_bits_left(&gb) < 4)
1132  return;
1133  opcode = get_bits(&gb, 4);
1134 
1135  ff_tlog(s->avctx,
1136  " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
1137  x, y, opcode, bytestream2_tell(&s->stream_ptr));
1138 
1139  if (!s->is_16bpp) {
1140  s->pixel_ptr = frame->data[0] + x
1141  + y*frame->linesize[0];
1142  ret = ipvideo_decode_block[opcode](s, frame);
1143  } else {
1144  s->pixel_ptr = frame->data[0] + x*2
1145  + y*frame->linesize[0];
1146  ret = ipvideo_decode_block16[opcode](s, frame);
1147  }
1148  if (ret != 0) {
1149  av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
1150  s->avctx->frame_number, x, y);
1151  return;
1152  }
1153  }
1154  }
1155  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1156  av_log(s->avctx, AV_LOG_DEBUG,
1157  "decode finished with %d bytes left over\n",
1158  bytestream2_get_bytes_left(&s->stream_ptr));
1159  }
1160 }
1161 
1163 {
1164  IpvideoContext *s = avctx->priv_data;
1165 
1166  s->avctx = avctx;
1167 
1168  s->is_16bpp = avctx->bits_per_coded_sample == 16;
1169  avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
1170 
1171  ff_hpeldsp_init(&s->hdsp, avctx->flags);
1172 
1173  s->last_frame = av_frame_alloc();
1174  s->second_last_frame = av_frame_alloc();
1175  s->cur_decode_frame = av_frame_alloc();
1176  s->prev_decode_frame = av_frame_alloc();
1177  if (!s->last_frame || !s->second_last_frame ||
1178  !s->cur_decode_frame || !s->prev_decode_frame) {
1179  return AVERROR(ENOMEM);
1180  }
1181 
1182  s->cur_decode_frame->width = avctx->width;
1183  s->prev_decode_frame->width = avctx->width;
1184  s->cur_decode_frame->height = avctx->height;
1185  s->prev_decode_frame->height = avctx->height;
1186  s->cur_decode_frame->format = avctx->pix_fmt;
1187  s->prev_decode_frame->format = avctx->pix_fmt;
1188 
1189  return 0;
1190 }
1191 
1193  int *got_frame, AVPacket *avpkt)
1194 {
1195  const uint8_t *buf = avpkt->data;
1196  int buf_size = avpkt->size;
1197  IpvideoContext *s = avctx->priv_data;
1198  int ret;
1199  int send_buffer;
1200  int frame_format;
1201  int video_data_size;
1202 
1204  av_frame_unref(s->last_frame);
1205  av_frame_unref(s->second_last_frame);
1206  av_frame_unref(s->cur_decode_frame);
1207  av_frame_unref(s->prev_decode_frame);
1208  }
1209 
1210  if (!s->cur_decode_frame->data[0]) {
1211  ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
1212  if (ret < 0)
1213  return ret;
1214 
1215  ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
1216  if (ret < 0) {
1217  av_frame_unref(s->cur_decode_frame);
1218  return ret;
1219  }
1220  }
1221 
1222  if (buf_size < 8)
1223  return AVERROR_INVALIDDATA;
1224 
1225  frame_format = AV_RL8(buf);
1226  send_buffer = AV_RL8(buf + 1);
1227  video_data_size = AV_RL16(buf + 2);
1228  s->decoding_map_size = AV_RL16(buf + 4);
1229  s->skip_map_size = AV_RL16(buf + 6);
1230 
1231  switch (frame_format) {
1232  case 0x06:
1233  if (s->decoding_map_size) {
1234  av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n");
1235  return AVERROR_INVALIDDATA;
1236  }
1237 
1238  if (s->skip_map_size) {
1239  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n");
1240  return AVERROR_INVALIDDATA;
1241  }
1242 
1243  if (s->is_16bpp) {
1244  av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n");
1245  return AVERROR_INVALIDDATA;
1246  }
1247 
1248  /* Decoding map for 0x06 frame format is at the top of pixeldata */
1249  s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2;
1250  s->decoding_map = buf + 8 + 14; /* 14 bits of op data */
1251  video_data_size -= s->decoding_map_size + 14;
1252  if (video_data_size <= 0 || s->decoding_map_size == 0)
1253  return AVERROR_INVALIDDATA;
1254 
1255  if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size)
1256  return AVERROR_INVALIDDATA;
1257 
1258  bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size);
1259 
1260  break;
1261 
1262  case 0x10:
1263  if (! s->decoding_map_size) {
1264  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n");
1265  return AVERROR_INVALIDDATA;
1266  }
1267 
1268  if (! s->skip_map_size) {
1269  av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n");
1270  return AVERROR_INVALIDDATA;
1271  }
1272 
1273  if (s->is_16bpp) {
1274  av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n");
1275  return AVERROR_INVALIDDATA;
1276  }
1277 
1278  if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size)
1279  return AVERROR_INVALIDDATA;
1280 
1281  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1282  s->decoding_map = buf + 8 + video_data_size;
1283  s->skip_map = buf + 8 + video_data_size + s->decoding_map_size;
1284 
1285  break;
1286 
1287  case 0x11:
1288  if (! s->decoding_map_size) {
1289  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n");
1290  return AVERROR_INVALIDDATA;
1291  }
1292 
1293  if (s->skip_map_size) {
1294  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n");
1295  return AVERROR_INVALIDDATA;
1296  }
1297 
1298  if (buf_size < 8 + video_data_size + s->decoding_map_size)
1299  return AVERROR_INVALIDDATA;
1300 
1301  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1302  s->decoding_map = buf + 8 + video_data_size;
1303 
1304  break;
1305 
1306  default:
1307  av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format);
1308  }
1309 
1310  /* ensure we can't overread the packet */
1311  if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) {
1312  av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n");
1313  return AVERROR_INVALIDDATA;
1314  }
1315 
1316  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1317  return ret;
1318 
1319  if (!s->is_16bpp) {
1320  frame->palette_has_changed = ff_copy_palette(s->pal, avpkt, avctx);
1321  }
1322 
1323  switch (frame_format) {
1324  case 0x06:
1326  break;
1327  case 0x10:
1329  break;
1330  case 0x11:
1332  break;
1333  }
1334 
1335  *got_frame = send_buffer;
1336 
1337  /* shuffle frames */
1338  av_frame_unref(s->second_last_frame);
1339  FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
1340  if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
1341  return ret;
1342 
1343  /* report that the buffer was completely consumed */
1344  return buf_size;
1345 }
1346 
1348 {
1349  IpvideoContext *s = avctx->priv_data;
1350 
1351  av_frame_free(&s->last_frame);
1352  av_frame_free(&s->second_last_frame);
1353  av_frame_free(&s->cur_decode_frame);
1354  av_frame_free(&s->prev_decode_frame);
1355 
1356  return 0;
1357 }
1358 
1360  .p.name = "interplayvideo",
1361  .p.long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1362  .p.type = AVMEDIA_TYPE_VIDEO,
1364  .priv_data_size = sizeof(IpvideoContext),
1366  .close = ipvideo_decode_end,
1368  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
1370 };
IpvideoContext::decoding_map
const unsigned char * decoding_map
Definition: interplayvideo.c:65
ipvideo_decode_init
static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
Definition: interplayvideo.c:1162
IpvideoContext::is_16bpp
int is_16bpp
Definition: interplayvideo.c:70
ipvideo_decode_block_opcode_0xB
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:468
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:39
ipvideo_decode_block_opcode_0x3
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:141
IpvideoContext::skip_map_size
int skip_map_size
Definition: interplayvideo.c:68
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:850
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ipvideo_format_10_passes
static void(*const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
Definition: interplayvideo.c:1021
GetByteContext
Definition: bytestream.h:33
AV_PKT_DATA_PARAM_CHANGE
@ AV_PKT_DATA_PARAM_CHANGE
An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows:
Definition: packet.h:73
ipvideo_decode_block_opcode_0x2
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:117
ipvideo_decode_block_opcode_0xC
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:482
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
ipvideo_decode_block_opcode_0x9
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:332
bytestream2_seek
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
AVFrame::width
int width
Definition: frame.h:397
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:374
IpvideoContext::mv_ptr
GetByteContext mv_ptr
Definition: interplayvideo.c:71
IpvideoContext::cur_decode_frame
AVFrame * cur_decode_frame
Definition: interplayvideo.c:62
FFCodec
Definition: codec_internal.h:112
ipvideo_format_10_secondpass
static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:1006
AV_RL8
#define AV_RL8(x)
Definition: intreadwrite.h:398
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:660
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
init
static int init
Definition: av_tx.c:47
ipvideo_format_06_passes
static void(*const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
Definition: interplayvideo.c:951
ipvideo_decode_block
static int(*const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:898
ipvideo_decode_block_opcode_0x7_16
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:576
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:380
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:116
GetBitContext
Definition: get_bits.h:62
BL
#define BL(type, name)
Definition: vf_shear.c:161
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:469
ipvideo_decode_format_10_opcodes
static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:1025
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
IpvideoContext::prev_decode_frame
AVFrame * prev_decode_frame
Definition: interplayvideo.c:63
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
ipvideo_decode_block_opcode_0x8_16
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:613
ipvideo_decode_block_opcode_0x4
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:167
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:254
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:256
IpvideoContext::skip_map
const unsigned char * skip_map
Definition: interplayvideo.c:67
ipvideo_decode_frame
static int ipvideo_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: interplayvideo.c:1192
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:367
op
static int op(uint8_t **dst, const uint8_t *dst_end, GetByteContext *gb, int pixel, int count, int *x, int width, int linesize)
Perform decode operation.
Definition: anm.c:76
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
decode.h
get_bits.h
AV_RL16
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:94
IpvideoContext::decoding_map_size
int decoding_map_size
Definition: interplayvideo.c:66
IpvideoContext::upper_motion_limit_offset
int upper_motion_limit_offset
Definition: interplayvideo.c:75
ipvideo_decode_block_opcode_0x6
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:201
ipvideo_decode_block_opcode_0x9_16
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:689
IpvideoContext::stream_ptr
GetByteContext stream_ptr
Definition: interplayvideo.c:71
pass
#define pass
Definition: fft_template.c:608
ff_hpeldsp_init
av_cold void ff_hpeldsp_init(HpelDSPContext *c, int flags)
Definition: hpeldsp.c:338
ipvideo_decode_block_opcode_0xA
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:402
ipvideo_decode_block_opcode_0x1
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:112
if
if(ret)
Definition: filter_design.txt:179
ipvideo_format_10_firstpass
static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:994
NULL
#define NULL
Definition: coverity.c:32
AV_CODEC_ID_INTERPLAY_VIDEO
@ AV_CODEC_ID_INTERPLAY_VIDEO
Definition: codec_id.h:89
IpvideoContext::stride
int stride
Definition: interplayvideo.c:74
ipvideo_decode_block_opcode_0x6_16
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:564
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
ipvideo_decode_format_11_opcodes
static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:1106
bytestream2_get_buffer
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:267
ipvideo_format_06_firstpass
static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:920
ipvideo_decode_block_opcode_0xE
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:526
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
ipvideo_decode_end
static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
Definition: interplayvideo.c:1347
IpvideoContext::second_last_frame
AVFrame * second_last_frame
Definition: interplayvideo.c:58
IpvideoContext::hdsp
HpelDSPContext hdsp
Definition: interplayvideo.c:57
HpelDSPContext
Half-pel DSP context.
Definition: hpeldsp.h:45
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1403
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
ipvideo_decode_block_opcode_0x0
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:107
ipvideo_decode_block_opcode_0x8
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:254
AVPacket::size
int size
Definition: packet.h:375
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:343
codec_internal.h
P
#define P
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
sample
#define sample
Definition: flacdsp_template.c:44
ipvideo_format_06_secondpass
static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:936
line
Definition: graph2dot.c:48
ipvideo_decode_block_opcode_0x5
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:188
ipvideo_decode_block_opcode_0xB_16
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:822
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1441
copy_from
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
Definition: interplayvideo.c:80
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: avpacket.c:251
ipvideo_decode_block_opcode_0xC_16
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:838
AV_PIX_FMT_RGB555
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:394
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: codec_internal.h:31
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:477
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:203
IpvideoContext::avctx
AVCodecContext * avctx
Definition: interplayvideo.c:56
AVCodecContext::height
int height
Definition: avcodec.h:562
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:599
ipvideo_decode_block_opcode_0xF
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:543
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
B
#define B
Definition: huffyuvdsp.h:32
AVCodecContext
main external API structure.
Definition: avcodec.h:389
ipvideo_decode_block_opcode_0x7
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:210
ipvideo_decode_format_06_opcodes
static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:955
IpvideoContext::pal
uint32_t pal[256]
Definition: interplayvideo.c:77
AV_CODEC_CAP_PARAM_CHANGE
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: codec.h:121
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
ff_interplay_video_decoder
const FFCodec ff_interplay_video_decoder
Definition: interplayvideo.c:1359
ipvideo_decode_block_opcode_0xE_16
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:879
IpvideoContext::last_frame
AVFrame * last_frame
Definition: interplayvideo.c:59
ff_tlog
#define ff_tlog(ctx,...)
Definition: internal.h:207
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:416
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
ipvideo_decode_block16
static int(*const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:909
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:562
bytestream.h
hpeldsp.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
ipvideo_decode_block_opcode_0xD
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:501
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
IpvideoContext::line_inc
int line_inc
Definition: interplayvideo.c:73
IpvideoContext::pixel_ptr
unsigned char * pixel_ptr
Definition: interplayvideo.c:72
ff_copy_palette
int ff_copy_palette(void *dst, const AVPacket *src, void *logctx)
Check whether the side-data of src contains a palette of size AVPALETTE_SIZE; if so,...
Definition: decode.c:1624
ipvideo_decode_block_opcode_0xD_16
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:858
IpvideoContext
Definition: interplayvideo.c:54
int
int
Definition: ffmpeg_filter.c:153
line
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted line
Definition: swscale.txt:40
ipvideo_decode_block_opcode_0xA_16
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:756