FFmpeg
v210dec.c
Go to the documentation of this file.
1 /*
2  * V210 decoder
3  *
4  * Copyright (C) 2009 Michael Niedermayer <michaelni@gmx.at>
5  * Copyright (c) 2009 Baptiste Coudurier <baptiste dot coudurier at gmail dot com>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include "avcodec.h"
25 #include "codec_internal.h"
26 #include "v210dec.h"
27 #include "v210dec_init.h"
28 #include "libavutil/bswap.h"
29 #include "libavutil/imgutils.h"
30 #include "libavutil/internal.h"
31 #include "libavutil/intreadwrite.h"
32 #include "thread.h"
33 
34 typedef struct ThreadData {
35  AVFrame *frame;
36  const uint8_t *buf;
37  int stride;
38 } ThreadData;
39 
41 {
42  V210DecContext *s = avctx->priv_data;
43 
45  avctx->bits_per_raw_sample = 10;
46 
47  s->thread_count = av_clip(avctx->thread_count, 1, avctx->height/4);
48  s->aligned_input = 0;
50 
51  return 0;
52 }
53 
54 static void decode_row(const uint32_t *src, uint16_t *y, uint16_t *u, uint16_t *v, const int width,
55  void (*unpack_frame)(const uint32_t *src, uint16_t *y, uint16_t *u, uint16_t *v, int width))
56 {
57  uint32_t val;
58  int w = (FFMAX(0, width - 12) / 12) * 12;
59 
60  unpack_frame(src, y, u, v, w);
61 
62  y += w;
63  u += w >> 1;
64  v += w >> 1;
65  src += (w << 1) / 3;
66 
67  while (w < width - 5) {
68  READ_PIXELS(u, y, v);
69  READ_PIXELS(y, u, y);
70  READ_PIXELS(v, y, u);
71  READ_PIXELS(y, v, y);
72  w += 6;
73  }
74 
75  if (w++ < width) {
76  READ_PIXELS(u, y, v);
77 
78  if (w++ < width) {
79  val = av_le2ne32(*src++);
80  *y++ = val & 0x3FF;
81 
82  if (w++ < width) {
83  *u++ = (val >> 10) & 0x3FF;
84  *y++ = (val >> 20) & 0x3FF;
85  val = av_le2ne32(*src++);
86  *v++ = val & 0x3FF;
87 
88  if (w++ < width) {
89  *y++ = (val >> 10) & 0x3FF;
90 
91  if (w++ < width) {
92  *u++ = (val >> 20) & 0x3FF;
93  val = av_le2ne32(*src++);
94  *y++ = val & 0x3FF;
95  *v++ = (val >> 10) & 0x3FF;
96 
97  if (w++ < width)
98  *y++ = (val >> 20) & 0x3FF;
99  }
100  }
101  }
102  }
103  }
104 }
105 
106 static int v210_decode_slice(AVCodecContext *avctx, void *arg, int jobnr, int threadnr)
107 {
108  V210DecContext *s = avctx->priv_data;
109  ThreadData *td = arg;
110  AVFrame *frame = td->frame;
111  int stride = td->stride;
112  int slice_start = (avctx->height * jobnr) / s->thread_count;
113  int slice_end = (avctx->height * (jobnr+1)) / s->thread_count;
114  const uint8_t *psrc = td->buf + stride * slice_start;
115  int16_t *py = (uint16_t*)frame->data[0] + slice_start * frame->linesize[0] / 2;
116  int16_t *pu = (uint16_t*)frame->data[1] + slice_start * frame->linesize[1] / 2;
117  int16_t *pv = (uint16_t*)frame->data[2] + slice_start * frame->linesize[2] / 2;
118 
119  for (int h = slice_start; h < slice_end; h++) {
120  decode_row((const uint32_t *)psrc, py, pu, pv, avctx->width, s->unpack_frame);
121  psrc += stride;
122  py += frame->linesize[0] / 2;
123  pu += frame->linesize[1] / 2;
124  pv += frame->linesize[2] / 2;
125  }
126 
127  return 0;
128 }
129 
130 static int v210_stride(int width, int align) {
131  int aligned_width = ((width + align - 1) / align) * align;
132  return aligned_width * 8 / 3;
133 }
134 
135 static int decode_frame(AVCodecContext *avctx, AVFrame *pic,
136  int *got_frame, AVPacket *avpkt)
137 {
138  V210DecContext *s = avctx->priv_data;
139  ThreadData td;
140  int ret, stride, aligned_input;
141  const uint8_t *psrc = avpkt->data;
142 
143  if (s->custom_stride )
144  stride = s->custom_stride > 0 ? s->custom_stride : 0;
145  else {
146  stride = v210_stride(avctx->width, 48);
147  if (avpkt->size < stride * avctx->height) {
148  int align;
149  for (align = 24; align >= 6; align >>= 1) {
150  int small_stride = v210_stride(avctx->width, align);
151  if (avpkt->size == small_stride * avctx->height) {
152  stride = small_stride;
153  if (!s->stride_warning_shown)
154  av_log(avctx, AV_LOG_WARNING, "Broken v210 with too small padding (%d byte) detected\n", align * 8 / 3);
155  s->stride_warning_shown = 1;
156  break;
157  }
158  }
159  if (align < 6 && avctx->codec_tag == MKTAG('b', 'x', 'y', '2'))
160  stride = 0;
161  }
162  }
163 
164  if (stride == 0 && ((avctx->width & 1) || (int64_t)avctx->width * avctx->height > INT_MAX / 6)) {
165  av_log(avctx, AV_LOG_ERROR, "Strideless v210 is not supported for size %dx%d\n", avctx->width, avctx->height);
166  return AVERROR_INVALIDDATA;
167  }
168 
169  if (stride > 0 && avpkt->size < (int64_t)stride * avctx->height ||
170  stride == 0 && avpkt->size < v210_stride(avctx->width * avctx->height, 6)) {
171  av_log(avctx, AV_LOG_ERROR, "packet too small\n");
172  return AVERROR_INVALIDDATA;
173  }
174  if ( avctx->codec_tag == MKTAG('C', '2', '1', '0')
175  && avpkt->size > 64
176  && AV_RN32(psrc) == AV_RN32("INFO")
177  && avpkt->size - 64 >= stride * avctx->height)
178  psrc += 64;
179 
180  aligned_input = !((uintptr_t)psrc & 0x1f) && !(stride & 0x1f);
181  if (aligned_input != s->aligned_input) {
182  s->aligned_input = aligned_input;
184  }
185 
186  if ((ret = ff_thread_get_buffer(avctx, pic, 0)) < 0)
187  return ret;
188 
190  pic->flags |= AV_FRAME_FLAG_KEY;
191 
192  if (stride) {
193  td.stride = stride;
194  td.buf = psrc;
195  td.frame = pic;
196  avctx->execute2(avctx, v210_decode_slice, &td, NULL, s->thread_count);
197  } else {
198  uint8_t *pointers[4];
199  int linesizes[4];
200  int ret = av_image_alloc(pointers, linesizes, avctx->width, avctx->height, avctx->pix_fmt, 1);
201  if (ret < 0)
202  return ret;
203  decode_row((const uint32_t *)psrc, (uint16_t *)pointers[0], (uint16_t *)pointers[1], (uint16_t *)pointers[2], avctx->width * avctx->height, s->unpack_frame);
204  av_image_copy2(pic->data, pic->linesize, pointers, linesizes,
205  avctx->pix_fmt, avctx->width, avctx->height);
206  av_freep(&pointers[0]);
207  }
208 
209  if (avctx->field_order > AV_FIELD_PROGRESSIVE) {
210  /* we have interlaced material flagged in container */
212  if (avctx->field_order == AV_FIELD_TT || avctx->field_order == AV_FIELD_TB)
214  }
215 
216  *got_frame = 1;
217 
218  return avpkt->size;
219 }
220 
221 #define V210DEC_FLAGS AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
222 static const AVOption v210dec_options[] = {
223  {"custom_stride", "Custom V210 stride", offsetof(V210DecContext, custom_stride), AV_OPT_TYPE_INT,
224  {.i64 = 0}, -1, INT_MAX, V210DEC_FLAGS},
225  {NULL}
226 };
227 
228 static const AVClass v210dec_class = {
229  .class_name = "V210 Decoder",
230  .item_name = av_default_item_name,
231  .option = v210dec_options,
232  .version = LIBAVUTIL_VERSION_INT,
233 };
234 
236  .p.name = "v210",
237  CODEC_LONG_NAME("Uncompressed 4:2:2 10-bit"),
238  .p.type = AVMEDIA_TYPE_VIDEO,
239  .p.id = AV_CODEC_ID_V210,
240  .priv_data_size = sizeof(V210DecContext),
241  .init = decode_init,
243  .p.capabilities = AV_CODEC_CAP_DR1 |
246  .p.priv_class = &v210dec_class,
247 };
v210dec_options
static const AVOption v210dec_options[]
Definition: v210dec.c:222
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
td
#define td
Definition: regdef.h:70
v210_stride
static int v210_stride(int width, int align)
Definition: v210dec.c:130
av_clip
#define av_clip
Definition: common.h:98
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:250
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: defs.h:200
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: v210dec.c:40
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:375
ThreadData::stride
int stride
Definition: v210dec.c:37
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:522
AVCodecContext::field_order
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:708
READ_PIXELS
#define READ_PIXELS(a, b, c)
Definition: v210dec_init.h:34
AVOption
AVOption.
Definition: opt.h:346
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *pic, int *got_frame, AVPacket *avpkt)
Definition: v210dec.c:135
FFCodec
Definition: codec_internal.h:127
ThreadData::frame
AVFrame * frame
Definition: dsddec.c:70
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:647
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
v210dec_init.h
thread.h
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:396
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in FFCodec caps_internal and use ff_thread_get_buffer() to allocate frames. Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:639
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: defs.h:201
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
ThreadData::buf
const uint8_t * buf
Definition: v210dec.c:36
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1582
unpack_frame
static void unpack_frame(EVRCContext *e)
Frame unpacking for RATE_FULL, RATE_HALF and RATE_QUANT.
Definition: evrcdec.c:110
val
static double val(void *priv, double ch)
Definition: aeval.c:78
AV_FIELD_TB
@ AV_FIELD_TB
Top coded first, bottom displayed first.
Definition: defs.h:203
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: vvcdec.c:685
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:626
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:287
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
ff_v210dec_init
static av_unused av_cold void ff_v210dec_init(V210DecContext *s)
Definition: v210dec_init.h:54
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1725
AVCodecContext::bits_per_raw_sample
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1574
pointers
Undefined Behavior In the C some operations are like signed integer dereferencing freed pointers
Definition: undefined.txt:4
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:272
V210DecContext
Definition: v210dec.h:26
arg
const char * arg
Definition: jacosubdec.c:67
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:110
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
AV_RN32
#define AV_RN32(p)
Definition: intreadwrite.h:362
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
av_image_alloc
int av_image_alloc(uint8_t *pointers[4], int linesizes[4], int w, int h, enum AVPixelFormat pix_fmt, int align)
Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordi...
Definition: imgutils.c:218
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:477
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:365
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:523
codec_internal.h
AV_CODEC_ID_V210
@ AV_CODEC_ID_V210
Definition: codec_id.h:179
align
static const uint8_t *BS_FUNC() align(BSCTX *bc)
Skip bits to a byte boundary.
Definition: bitstream_template.h:411
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:114
ff_v210_decoder
const FFCodec ff_v210_decoder
Definition: v210dec.c:235
v210dec_class
static const AVClass v210dec_class
Definition: v210dec.c:228
internal.h
ThreadData
Used for passing data between threads.
Definition: dsddec.c:69
av_le2ne32
#define av_le2ne32(x)
Definition: bswap.h:98
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
AVCodecContext::height
int height
Definition: avcodec.h:618
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:657
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:634
avcodec.h
stride
#define stride
Definition: h264pred_template.c:537
pv
#define pv
Definition: regdef.h:60
ret
ret
Definition: filter_design.txt:187
bswap.h
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVCodecContext
main external API structure.
Definition: avcodec.h:445
decode_row
static void decode_row(const uint32_t *src, uint16_t *y, uint16_t *u, uint16_t *v, const int width, void(*unpack_frame)(const uint32_t *src, uint16_t *y, uint16_t *u, uint16_t *v, int width))
Definition: v210dec.c:54
V210DEC_FLAGS
#define V210DEC_FLAGS
Definition: v210dec.c:221
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVCodecContext::codec_tag
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:470
AVPacket
This structure stores compressed data.
Definition: packet.h:499
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:472
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
v210_decode_slice
static int v210_decode_slice(AVCodecContext *avctx, void *arg, int jobnr, int threadnr)
Definition: v210dec.c:106
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:618
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:420
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
h
h
Definition: vp9dsp_template.c:2038
AVCodecContext::execute2
int(* execute2)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg, int jobnr, int threadnr), void *arg2, int *ret, int count)
The codec may call this to execute several independent things.
Definition: avcodec.h:1631
v210dec.h