38 #define PALETTE_COUNT 256 39 #define CHECK_STREAM_PTR(n) \ 40 if ((stream_ptr + n) > s->size ) { \ 41 av_log(s->avctx, AV_LOG_ERROR, " MS Video-1 warning: stream_ptr out of bounds (%d >= %d)\n", \ 42 stream_ptr + n, s->size); \ 51 const unsigned char *
buf;
88 int block_ptr, pixel_ptr;
92 int blocks_wide, blocks_high;
98 unsigned char byte_a, byte_b;
101 unsigned char colors[8];
102 unsigned char *pixels = s->
frame->
data[0];
109 total_blocks = blocks_wide * blocks_high;
111 row_dec = stride + 4;
113 for (block_y = blocks_high; block_y > 0; block_y--) {
114 block_ptr = ((block_y * 4) - 1) *
stride;
115 for (block_x = blocks_wide; block_x > 0; block_x--) {
118 block_ptr += block_inc;
124 pixel_ptr = block_ptr;
128 byte_a = s->
buf[stream_ptr++];
129 byte_b = s->
buf[stream_ptr++];
132 if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0))
134 else if ((byte_b & 0xFC) == 0x84) {
136 skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
137 }
else if (byte_b < 0x80) {
139 flags = (byte_b << 8) | byte_a;
142 colors[0] = s->
buf[stream_ptr++];
143 colors[1] = s->
buf[stream_ptr++];
145 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
146 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
147 pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
148 pixel_ptr -= row_dec;
150 }
else if (byte_b >= 0x90) {
152 flags = (byte_b << 8) | byte_a;
155 memcpy(colors, &s->
buf[stream_ptr], 8);
158 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
159 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
160 pixels[pixel_ptr++] =
161 colors[((pixel_y & 0x2) << 1) +
162 (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
163 pixel_ptr -= row_dec;
169 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
170 for (pixel_x = 0; pixel_x < 4; pixel_x++)
171 pixels[pixel_ptr++] = colors[0];
172 pixel_ptr -= row_dec;
176 block_ptr += block_inc;
188 int block_ptr, pixel_ptr;
190 int pixel_x, pixel_y;
191 int block_x, block_y;
192 int blocks_wide, blocks_high;
198 unsigned char byte_a, byte_b;
199 unsigned short flags;
201 unsigned short colors[8];
202 unsigned short *pixels = (
unsigned short *)s->
frame->
data[0];
209 total_blocks = blocks_wide * blocks_high;
213 for (block_y = blocks_high; block_y > 0; block_y--) {
214 block_ptr = ((block_y * 4) - 1) *
stride;
215 for (block_x = blocks_wide; block_x > 0; block_x--) {
218 block_ptr += block_inc;
224 pixel_ptr = block_ptr;
228 byte_a = s->
buf[stream_ptr++];
229 byte_b = s->
buf[stream_ptr++];
232 if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0)) {
234 }
else if ((byte_b & 0xFC) == 0x84) {
236 skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
237 }
else if (byte_b < 0x80) {
239 flags = (byte_b << 8) | byte_a;
247 if (colors[0] & 0x8000) {
263 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
264 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
265 pixels[pixel_ptr++] =
266 colors[((pixel_y & 0x2) << 1) +
267 (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
268 pixel_ptr -= row_dec;
272 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
273 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
274 pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
275 pixel_ptr -= row_dec;
280 colors[0] = (byte_b << 8) | byte_a;
282 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
283 for (pixel_x = 0; pixel_x < 4; pixel_x++)
284 pixels[pixel_ptr++] = colors[0];
285 pixel_ptr -= row_dec;
289 block_ptr += block_inc;
296 void *
data,
int *got_frame,
300 int buf_size = avpkt->
size;
308 if (buf_size < (avctx->
width/4) * (avctx->
height/4) / 512) {
static int msvideo1_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
This structure describes decoded (raw) audio or video data.
ptrdiff_t const GLvoid * data
static av_cold int init(AVCodecContext *avctx)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
static av_cold int msvideo1_decode_end(AVCodecContext *avctx)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
8 bits with AV_PIX_FMT_RGB32 palette
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
const unsigned char * buf
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette...
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Identical in function to ff_get_buffer(), except it reuses the existing buffer if available...
const char * name
Name of the codec implementation.
AVCodec ff_msvideo1_decoder
static void msvideo1_decode_16bit(Msvideo1Context *s)
common internal API header
int width
picture width / height.
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
main external API structure.
int palette_has_changed
Tell user application that palette has changed from previous frame.
static void msvideo1_decode_8bit(Msvideo1Context *s)
#define CHECK_STREAM_PTR(n)
#define flags(name, subs,...)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static av_cold int msvideo1_decode_init(AVCodecContext *avctx)
GLint GLenum GLboolean GLsizei stride
common internal api header.
#define AV_PIX_FMT_RGB555
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
This structure stores compressed data.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.