25 #include <binder/ProcessState.h>
26 #include <media/stagefright/MetaData.h>
27 #include <media/stagefright/MediaBufferGroup.h>
28 #include <media/stagefright/MediaDebug.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <media/stagefright/OMXClient.h>
31 #include <media/stagefright/OMXCodec.h>
32 #include <utils/List.h>
42 #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
44 using namespace android;
78 std::map<int64_t, TimeStamp> *
ts_map;
95 buf_group.add_buffer(
new MediaBuffer(
frame_size));
111 const MediaSource::ReadOptions *
options) {
115 if (
s->thread_exited)
116 return ERROR_END_OF_STREAM;
119 while (
s->in_queue->empty())
122 frame = *
s->in_queue->begin();
126 ret = buf_group.acquire_buffer(buffer);
128 memcpy((*buffer)->data(), frame->
buffer, frame->
size);
129 (*buffer)->set_range(0, frame->
size);
130 (*buffer)->meta_data()->clear();
131 (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->
key);
132 (*buffer)->meta_data()->setInt64(kKeyTime, frame->
time);
139 s->in_queue->erase(
s->in_queue->begin());
165 int64_t out_frame_index = 0;
178 if (frame->
status == OK) {
179 sp<MetaData> outFormat = (*s->
decoder)->getFormat();
180 outFormat->findInt32(kKeyWidth , &w);
181 outFormat->findInt32(kKeyHeight, &h);
199 (w & 15 || h & 15)) {
200 if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
215 src_data[0] = (
uint8_t*)buffer->data();
216 src_data[1] = src_data[0] + src_linesize[0] *
h;
217 src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
219 src_data, src_linesize,
222 buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
223 if (out_frame_index && s->
ts_map->count(out_frame_index) > 0) {
226 s->
ts_map->erase(out_frame_index);
229 }
else if (frame->
status == INFO_FORMAT_CHANGED) {
259 sp<MetaData> meta, outFormat;
287 meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
288 meta->setInt32(kKeyWidth, avctx->
width);
289 meta->setInt32(kKeyHeight, avctx->
height);
292 android::ProcessState::self()->startThreadPool();
294 s->
source =
new sp<MediaSource>();
298 s->
ts_map =
new std::map<int64_t, TimeStamp>;
299 s->
client =
new OMXClient;
307 if (s->
client->connect() != OK) {
313 s->
decoder =
new sp<MediaSource>();
316 OMXCodec::kClientNeedsFramebuffer);
317 if ((*s->
decoder)->start() != OK) {
324 outFormat = (*s->
decoder)->getFormat();
325 outFormat->findInt32(kKeyColorFormat, &colorFormat);
327 colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
329 else if (colorFormat == OMX_COLOR_FormatYCbYCr)
331 else if (colorFormat == OMX_COLOR_FormatCbYCrY)
362 int orig_size = avpkt->
size;
372 if (avpkt && avpkt->
data) {
393 if (!
frame->buffer) {
401 frame->size = orig_size;
403 memcpy(
frame->buffer, ptr, orig_size);
411 frame->status = ERROR_END_OF_STREAM;
448 ret_frame =
frame->vframe;
449 status =
frame->status;
452 if (status == ERROR_END_OF_STREAM)
567 "libstagefright_h264",
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane...
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
ptrdiff_t const GLvoid * data
sp< MediaSource > * decoder
sp< MetaData > source_meta
virtual status_t start(MetaData *params)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define OMX_QCOM_COLOR_FormatYVU420SemiPlanar
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
MediaBufferGroup buf_group
static int push_frame(AVFilterContext *ctx, unsigned in_no, AVFrame *buf)
static av_cold int Stagefright_init(AVCodecContext *avctx)
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
CustomSource(AVCodecContext *avctx, sp< MetaData > meta)
const OptionDef options[]
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
const char * decoder_component
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
void av_bitstream_filter_close(AVBitStreamFilterContext *bsf)
Release bitstream filter context.
void * decode_thread(void *arg)
static int Stagefright_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Libavcodec external API header.
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
int flags
A combination of AV_PKT_FLAG values.
#define FF_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
as above, but U and V bytes are swapped
volatile sig_atomic_t thread_started
int width
picture width / height.
AVBitStreamFilterContext * av_bitstream_filter_init(const char *name)
Create and initialize a bitstream filter context given a bitstream filter name.
static av_always_inline int pthread_join(pthread_t thread, void **value_ptr)
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
AVBitStreamFilterContext * bsfc
int64_t reordered_opaque
opaque 64bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque
virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options)
volatile sig_atomic_t stop_decode
static av_always_inline int pthread_create(pthread_t *thread, const pthread_attr_t *attr, void *(*start_routine)(void *), void *arg)
int av_bitstream_filter_filter(AVBitStreamFilterContext *bsfc, AVCodecContext *avctx, const char *args, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int keyframe)
Filter bitstream.
char * av_strdup(const char *s)
Duplicate the string s.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
main external API structure.
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
int64_t reordered_opaque
reordered opaque 64bit (generally an integer or a double precision float PTS but can be anything)...
pthread_mutex_t out_mutex
pthread_t decode_thread_id
static int64_t pts
Global timestamp for the audio frames.
List< Frame * > * out_queue
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
List< Frame * > * in_queue
std::map< int64_t, TimeStamp > * ts_map
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
common internal api header.
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
AVCodec ff_libstagefright_h264_decoder
virtual sp< MetaData > getFormat()
static av_always_inline int pthread_mutex_unlock(pthread_mutex_t *mutex)
static av_cold int Stagefright_close(AVCodecContext *avctx)
volatile sig_atomic_t thread_exited
static av_always_inline int pthread_mutex_lock(pthread_mutex_t *mutex)
This structure stores compressed data.
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
sp< MediaSource > * source