30 #include <OMX_Component.h> 51 s.nLowPart = value & 0xffffffff;
52 s.nHighPart = value >> 32;
57 return (((int64_t)value.nHighPart) << 32) | value.nLowPart;
60 #define to_omx_ticks(x) (x) 61 #define from_omx_ticks(x) (x) 64 #define INIT_STRUCT(x) do { \ 65 x.nSize = sizeof(x); \ 66 x.nVersion = s->version; \ 68 #define CHECK(x) do { \ 69 if (x != OMX_ErrorNone) { \ 70 av_log(avctx, AV_LOG_ERROR, \ 71 "err %x (%d) on line %d\n", x, x, __LINE__); \ 72 return AVERROR_UNKNOWN; \ 82 OMX_ERRORTYPE (*
ptr_GetHandle)(OMX_HANDLETYPE*, OMX_STRING, OMX_PTR, OMX_CALLBACKTYPE*);
92 snprintf(buf,
sizeof(buf),
"%s%s", prefix ? prefix :
"", symbol);
93 return dlsym(handle, buf);
97 const char *libname,
const char *prefix,
101 s->
lib2 = dlopen(libname2, RTLD_NOW | RTLD_GLOBAL);
114 s->
lib = dlopen(libname, RTLD_NOW | RTLD_GLOBAL);
142 static const char *
const libnames[] = {
144 "/opt/vc/lib/libopenmaxil.so",
"/opt/vc/lib/libbcm_host.so",
146 "libOMX_Core.so",
NULL,
147 "libOmxCore.so",
NULL,
151 const char*
const* nameptr;
155 omx_context =
av_mallocz(
sizeof(*omx_context));
159 ret =
omx_try_load(omx_context, logctx, libname, prefix, NULL);
165 for (nameptr = libnames; *nameptr; nameptr += 2)
166 if (!(ret =
omx_try_load(omx_context, logctx, nameptr[0], prefix, nameptr[1])))
185 dlclose(omx_context->
lib);
197 char component_name[OMX_MAX_STRINGNAME_SIZE];
233 int* array_size, OMX_BUFFERHEADERTYPE **
array,
234 OMX_BUFFERHEADERTYPE *
buffer)
237 array[(*array_size)++] =
buffer;
243 int* array_size, OMX_BUFFERHEADERTYPE **
array,
246 OMX_BUFFERHEADERTYPE *
buffer;
252 if (*array_size > 0) {
255 memmove(&array[0], &array[1], (*array_size) *
sizeof(OMX_BUFFERHEADERTYPE*));
263 static OMX_ERRORTYPE
event_handler(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_EVENTTYPE event,
264 OMX_U32 data1, OMX_U32 data2, OMX_PTR event_data)
278 case OMX_EventCmdComplete:
279 if (data1 == OMX_CommandStateSet) {
285 }
else if (data1 == OMX_CommandPortDisable) {
287 }
else if (data1 == OMX_CommandPortEnable) {
291 (uint32_t) data1, (uint32_t) data2);
294 case OMX_EventPortSettingsChanged:
299 event, (uint32_t) data1, (uint32_t) data2);
302 return OMX_ErrorNone;
306 OMX_BUFFERHEADERTYPE *
buffer)
310 if (buffer->pAppPrivate) {
311 if (buffer->pOutputPortPrivate)
315 buffer->pAppPrivate =
NULL;
320 return OMX_ErrorNone;
324 OMX_BUFFERHEADERTYPE *
buffer)
329 return OMX_ErrorNone;
339 const char *role,
char *
str,
int str_size)
347 av_strlcpy(str,
"OMX.broadcom.video_encode", str_size);
359 for (i = 0; i < num; i++) {
360 components[
i] =
av_mallocz(OMX_MAX_STRINGNAME_SIZE);
361 if (!components[i]) {
369 for (i = 0; i < num; i++)
379 while (s->
state != state && s->
error == OMX_ErrorNone)
381 if (s->
error != OMX_ErrorNone)
390 OMX_PARAM_COMPONENTROLETYPE role_params = { 0 };
391 OMX_PORT_PARAM_TYPE video_port_params = { 0 };
392 OMX_PARAM_PORTDEFINITIONTYPE in_port_params = { 0 }, out_port_params = { 0 };
393 OMX_VIDEO_PARAM_PORTFORMATTYPE video_port_format = { 0 };
394 OMX_VIDEO_PARAM_BITRATETYPE vid_param_bitrate = { 0 };
398 s->
version.s.nVersionMajor = 1;
399 s->
version.s.nVersionMinor = 1;
403 if (err != OMX_ErrorNone) {
410 av_strlcpy(role_params.cRole, role,
sizeof(role_params.cRole));
412 OMX_SetParameter(s->
handle, OMX_IndexParamStandardComponentRole, &role_params);
415 err = OMX_GetParameter(s->
handle, OMX_IndexParamVideoInit, &video_port_params);
419 for (i = 0; i < video_port_params.nPorts; i++) {
420 int port = video_port_params.nStartPortNumber +
i;
421 OMX_PARAM_PORTDEFINITIONTYPE port_params = { 0 };
423 port_params.nPortIndex = port;
424 err = OMX_GetParameter(s->
handle, OMX_IndexParamPortDefinition, &port_params);
425 if (err != OMX_ErrorNone) {
429 if (port_params.eDir == OMX_DirInput && s->
in_port < 0) {
430 in_port_params = port_params;
432 }
else if (port_params.eDir == OMX_DirOutput && s->
out_port < 0) {
433 out_port_params = port_params;
445 video_port_format.nIndex =
i;
446 video_port_format.nPortIndex = s->
in_port;
447 if (OMX_GetParameter(s->
handle, OMX_IndexParamVideoPortFormat, &video_port_format) != OMX_ErrorNone)
449 if (video_port_format.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
450 video_port_format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar) {
456 av_log(avctx,
AV_LOG_ERROR,
"No supported pixel formats (%d formats available)\n", i);
460 in_port_params.bEnabled = OMX_TRUE;
461 in_port_params.bPopulated = OMX_FALSE;
462 in_port_params.eDomain = OMX_PortDomainVideo;
464 in_port_params.format.video.pNativeRender =
NULL;
465 in_port_params.format.video.bFlagErrorConcealment = OMX_FALSE;
466 in_port_params.format.video.eColorFormat = s->
color_format;
471 in_port_params.format.video.nStride = s->
stride;
472 in_port_params.format.video.nSliceHeight = s->
plane_size;
473 in_port_params.format.video.nFrameWidth = avctx->
width;
474 in_port_params.format.video.nFrameHeight = avctx->
height;
480 err = OMX_SetParameter(s->
handle, OMX_IndexParamPortDefinition, &in_port_params);
482 err = OMX_GetParameter(s->
handle, OMX_IndexParamPortDefinition, &in_port_params);
484 s->
stride = in_port_params.format.video.nStride;
485 s->
plane_size = in_port_params.format.video.nSliceHeight;
488 err = OMX_GetParameter(s->
handle, OMX_IndexParamPortDefinition, &out_port_params);
489 out_port_params.bEnabled = OMX_TRUE;
490 out_port_params.bPopulated = OMX_FALSE;
491 out_port_params.eDomain = OMX_PortDomainVideo;
492 out_port_params.format.video.pNativeRender =
NULL;
493 out_port_params.format.video.nFrameWidth = avctx->
width;
494 out_port_params.format.video.nFrameHeight = avctx->
height;
495 out_port_params.format.video.nStride = 0;
496 out_port_params.format.video.nSliceHeight = 0;
497 out_port_params.format.video.nBitrate = avctx->
bit_rate;
498 out_port_params.format.video.xFramerate = in_port_params.format.video.xFramerate;
499 out_port_params.format.video.bFlagErrorConcealment = OMX_FALSE;
501 out_port_params.format.video.eCompressionFormat = OMX_VIDEO_CodingMPEG4;
503 out_port_params.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
505 err = OMX_SetParameter(s->
handle, OMX_IndexParamPortDefinition, &out_port_params);
507 err = OMX_GetParameter(s->
handle, OMX_IndexParamPortDefinition, &out_port_params);
512 vid_param_bitrate.nPortIndex = s->
out_port;
513 vid_param_bitrate.eControlRate = OMX_Video_ControlRateVariable;
514 vid_param_bitrate.nTargetBitrate = avctx->
bit_rate;
515 err = OMX_SetParameter(s->
handle, OMX_IndexParamVideoBitrate, &vid_param_bitrate);
516 if (err != OMX_ErrorNone)
520 OMX_VIDEO_PARAM_AVCTYPE avc = { 0 };
523 err = OMX_GetParameter(s->
handle, OMX_IndexParamVideoAvc, &avc);
529 avc.eProfile = OMX_VIDEO_AVCProfileBaseline;
532 avc.eProfile = OMX_VIDEO_AVCProfileMain;
535 avc.eProfile = OMX_VIDEO_AVCProfileHigh;
540 err = OMX_SetParameter(s->
handle, OMX_IndexParamVideoAvc, &avc);
544 err = OMX_SendCommand(s->
handle, OMX_CommandStateSet, OMX_StateIdle,
NULL);
558 if (err == OMX_ErrorNone)
572 err = OMX_SendCommand(s->
handle, OMX_CommandStateSet, OMX_StateExecuting,
NULL);
581 if (err != OMX_ErrorNone) {
595 executing = s->
state == OMX_StateExecuting;
599 OMX_SendCommand(s->
handle, OMX_CommandStateSet, OMX_StateIdle,
NULL);
601 OMX_SendCommand(s->
handle, OMX_CommandStateSet, OMX_StateLoaded,
NULL);
606 buffer->pBuffer =
NULL;
644 OMX_BUFFERHEADERTYPE *
buffer;
659 s->
state = OMX_StateLoaded;
660 s->
error = OMX_ErrorNone;
664 role =
"video_encoder.mpeg4";
667 role =
"video_encoder.avc";
685 if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
694 err = OMX_FillThisBuffer(s->
handle, buffer);
695 if (err != OMX_ErrorNone) {
706 int nals[32] = { 0 };
736 OMX_BUFFERHEADERTYPE*
buffer;
753 if (frame->
linesize[0] == src_linesize[0] &&
754 frame->
linesize[1] == src_linesize[1] &&
755 frame->
linesize[2] == src_linesize[2] &&
756 frame->
data[1] == src[1] &&
757 frame->
data[2] == src[2]) {
767 buffer->pAppPrivate = local;
768 buffer->pOutputPortPrivate =
NULL;
769 buffer->pBuffer = local->
data[0];
777 if (image_buffer_size >= 0)
784 buffer->pAppPrivate = buf;
786 buffer->pOutputPortPrivate = (
void*) 1;
787 buffer->pBuffer = buf;
797 buffer->nFlags = OMX_BUFFERFLAG_ENDOFFRAME;
804 OMX_CONFIG_BOOLEANTYPE config = {0, };
806 config.bEnabled = OMX_TRUE;
807 err = OMX_SetConfig(s->
handle, OMX_IndexConfigBrcmVideoRequestIFrame, &config);
808 if (err != OMX_ErrorNone) {
812 OMX_CONFIG_INTRAREFRESHVOPTYPE config = {0, };
815 config.IntraRefreshVOP = OMX_TRUE;
816 err = OMX_SetConfig(s->
handle, OMX_IndexConfigVideoIntraVOPRefresh, &config);
817 if (err != OMX_ErrorNone) {
822 err = OMX_EmptyThisBuffer(s->
handle, buffer);
823 if (err != OMX_ErrorNone) {
832 buffer->nFilledLen = 0;
833 buffer->nFlags = OMX_BUFFERFLAG_EOS;
834 buffer->pAppPrivate = buffer->pOutputPortPrivate =
NULL;
835 err = OMX_EmptyThisBuffer(s->
handle, buffer);
836 if (err != OMX_ErrorNone) {
844 while (!*got_packet && ret == 0 && !s->
got_eos) {
850 !frame || had_partial);
854 if (buffer->nFlags & OMX_BUFFERFLAG_EOS)
866 if (!(buffer->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) || !pkt->
data) {
876 if (buffer->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
896 memcpy(pkt->
data + s->
output_buf_size, buffer->pBuffer + buffer->nOffset, buffer->nFilledLen);
900 if (buffer->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
906 if (buffer->nFlags & OMX_BUFFERFLAG_SYNCFRAME)
912 err = OMX_FillThisBuffer(s->
handle, buffer);
913 if (err != OMX_ErrorNone) {
930 #define OFFSET(x) offsetof(OMXCodecContext, x) 931 #define VDE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_ENCODING_PARAM 932 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM 936 {
"zerocopy",
"Try to avoid copying input frames if possible",
OFFSET(input_zerocopy),
AV_OPT_TYPE_INT, { .i64 = CONFIG_OMX_RPI }, 0, 1,
VE },
966 .priv_class = &omx_mpeg4enc_class,
987 .priv_class = &omx_h264enc_class,
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
#define FF_PROFILE_H264_MAIN
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
const struct AVCodec * codec
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
OMX_ERRORTYPE(* ptr_Deinit)(void)
This structure describes decoded (raw) audio or video data.
#define pthread_mutex_lock(a)
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
static av_cold int find_component(OMXContext *omx_context, void *logctx, const char *role, char *str, int str_size)
OMX_ERRORTYPE(* ptr_FreeHandle)(OMX_HANDLETYPE)
#define AV_LOG_WARNING
Something somehow does not look correct.
int64_t bit_rate
the average bitrate
#define LIBAVUTIL_VERSION_INT
static av_cold int init(AVCodecContext *avctx)
OMX_BUFFERHEADERTYPE ** in_buffer_headers
int av_image_fill_arrays(uint8_t *dst_data[4], int dst_linesize[4], const uint8_t *src, enum AVPixelFormat pix_fmt, int width, int height, int align)
Setup the data pointers and linesizes based on the specified image parameters and the provided array...
const char * av_default_item_name(void *ptr)
Return the context name.
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Convenience header that includes libavutil's core.
static OMX_ERRORTYPE fill_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_BUFFERHEADERTYPE *buffer)
static av_cold int omx_encode_end(AVCodecContext *avctx)
static OMX_BUFFERHEADERTYPE * get_buffer(pthread_mutex_t *mutex, pthread_cond_t *cond, int *array_size, OMX_BUFFERHEADERTYPE **array, int wait)
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
static OMX_ERRORTYPE empty_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_BUFFERHEADERTYPE *buffer)
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
static enum AVPixelFormat omx_encoder_pix_fmts[]
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int64_t min_size)
Check AVPacket size and/or allocate data.
#define FF_PROFILE_H264_BASELINE
OMX_ERRORTYPE(* ptr_GetHandle)(OMX_HANDLETYPE *, OMX_STRING, OMX_PTR, OMX_CALLBACKTYPE *)
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
pthread_cond_t input_cond
OMX_BUFFERHEADERTYPE ** done_out_buffers
GLsizei GLboolean const GLfloat * value
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
int av_packet_from_data(AVPacket *pkt, uint8_t *data, int size)
Initialize a reference-counted packet from av_malloc()ed data.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
OMX_ERRORTYPE(* ptr_GetComponentsOfRole)(OMX_STRING, OMX_U32 *, OMX_U8 **)
#define AV_LOG_VERBOSE
Detailed information.
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
H.264 common definitions.
int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align)
Return the size in bytes of the amount of data required to store an image with the given parameters...
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
pthread_cond_t state_cond
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
static av_cold int wait_for_state(OMXCodecContext *s, OMX_STATETYPE state)
int flags
AV_CODEC_FLAG_*.
#define FF_PROFILE_H264_HIGH
pthread_mutex_t output_mutex
const char * name
Name of the codec implementation.
OMX_COLOR_FORMATTYPE color_format
static void append_buffer(pthread_mutex_t *mutex, pthread_cond_t *cond, int *array_size, OMX_BUFFERHEADERTYPE **array, OMX_BUFFERHEADERTYPE *buffer)
AVCodec ff_mpeg4_omx_encoder
size_t av_strlcpy(char *dst, const char *src, size_t size)
Copy the string src to dst, but no more than size - 1 bytes, and null-terminate dst.
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
static int omx_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet)
int flags
A combination of AV_PKT_FLAG values.
static av_cold void * dlsym_prefixed(void *handle, const char *symbol, const char *prefix)
OMX_BUFFERHEADERTYPE ** out_buffer_headers
enum AVPictureType pict_type
Picture type of the frame.
OMX_BUFFERHEADERTYPE ** free_in_buffers
int width
picture width / height.
typedef void(APIENTRY *FF_PFNGLACTIVETEXTUREPROC)(GLenum texture)
#define FF_PROFILE_UNKNOWN
static av_cold int omx_encode_init(AVCodecContext *avctx)
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
static const AVOption options[]
pthread_cond_t output_cond
static av_cold int omx_try_load(OMXContext *s, void *logctx, const char *libname, const char *prefix, const char *libname2)
#define pthread_mutex_unlock(a)
OMX_ERRORTYPE(* ptr_Init)(void)
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
static const AVClass omx_mpeg4enc_class
#define AV_LOG_INFO
Standard information.
int av_reallocp(void *ptr, size_t size)
Allocate, reallocate, or free a block of memory through a pointer to a pointer.
Libavcodec external API header.
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
main external API structure.
OMX_ERRORTYPE(* ptr_ComponentNameEnum)(OMX_STRING, OMX_U32, OMX_U32)
Describe the class of an AVClass context structure.
char component_name[OMX_MAX_STRINGNAME_SIZE]
static av_cold void omx_deinit(OMXContext *omx_context)
static av_cold int omx_component_init(AVCodecContext *avctx, const char *role)
static enum AVPixelFormat pix_fmts[]
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
static const OMX_CALLBACKTYPE callbacks
static OMX_ERRORTYPE event_handler(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2, OMX_PTR event_data)
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
int(* cond)(enum AVPixelFormat pix_fmt)
int av_strstart(const char *str, const char *pfx, const char **ptr)
Return non-zero if pfx is a prefix of str.
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
common internal api header.
common internal and external API header
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
pthread_mutex_t input_mutex
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
pthread_mutex_t state_mutex
#define from_omx_ticks(x)
static av_always_inline int pthread_cond_broadcast(pthread_cond_t *cond)
static const AVClass omx_h264enc_class
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
static int array[MAX_W *MAX_W]
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
static av_cold void cleanup(OMXCodecContext *s)
AVPixelFormat
Pixel format.
This structure stores compressed data.
AVCodec ff_h264_omx_encoder
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
static av_cold OMXContext * omx_init(void *logctx, const char *libname, const char *prefix)
OMX_ERRORTYPE(* ptr_GetRolesOfComponent)(OMX_STRING, OMX_U32 *, OMX_U8 **)
void * av_mallocz_array(size_t nmemb, size_t size)