21 #ifndef AVUTIL_MEM_INTERNAL_H 22 #define AVUTIL_MEM_INTERNAL_H 32 #if !FF_API_DECLARE_ALIGNED 78 #if defined(__INTEL_COMPILER) && __INTEL_COMPILER < 1110 || defined(__SUNPRO_C) 79 #define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v 80 #define DECLARE_ASM_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v 81 #define DECLARE_ASM_CONST(n,t,v) const t __attribute__ ((aligned (n))) v 82 #elif defined(__DJGPP__) 83 #define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (FFMIN(n, 16)))) v 84 #define DECLARE_ASM_ALIGNED(n,t,v) t av_used __attribute__ ((aligned (FFMIN(n, 16)))) v 85 #define DECLARE_ASM_CONST(n,t,v) static const t av_used __attribute__ ((aligned (FFMIN(n, 16)))) v 86 #elif defined(__GNUC__) || defined(__clang__) 87 #define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v 88 #define DECLARE_ASM_ALIGNED(n,t,v) t av_used __attribute__ ((aligned (n))) v 89 #define DECLARE_ASM_CONST(n,t,v) static const t av_used __attribute__ ((aligned (n))) v 90 #elif defined(_MSC_VER) 91 #define DECLARE_ALIGNED(n,t,v) __declspec(align(n)) t v 92 #define DECLARE_ASM_ALIGNED(n,t,v) __declspec(align(n)) t v 93 #define DECLARE_ASM_CONST(n,t,v) __declspec(align(n)) static const t v 95 #define DECLARE_ALIGNED(n,t,v) t v 96 #define DECLARE_ASM_ALIGNED(n,t,v) t v 97 #define DECLARE_ASM_CONST(n,t,v) static const t v 105 #define LOCAL_ALIGNED_A(a, t, v, s, o, ...) \ 106 uint8_t la_##v[sizeof(t s o) + (a)]; \ 107 t (*v) o = (void *)FFALIGN((uintptr_t)la_##v, a) 109 #define LOCAL_ALIGNED_D(a, t, v, s, o, ...) \ 110 DECLARE_ALIGNED(a, t, la_##v) s o; \ 113 #define LOCAL_ALIGNED(a, t, v, ...) LOCAL_ALIGNED_##a(t, v, __VA_ARGS__) 115 #if HAVE_LOCAL_ALIGNED 116 # define LOCAL_ALIGNED_4(t, v, ...) E1(LOCAL_ALIGNED_D(4, t, v, __VA_ARGS__,,)) 118 # define LOCAL_ALIGNED_4(t, v, ...) E1(LOCAL_ALIGNED_A(4, t, v, __VA_ARGS__,,)) 121 #if HAVE_LOCAL_ALIGNED 122 # define LOCAL_ALIGNED_8(t, v, ...) E1(LOCAL_ALIGNED_D(8, t, v, __VA_ARGS__,,)) 124 # define LOCAL_ALIGNED_8(t, v, ...) E1(LOCAL_ALIGNED_A(8, t, v, __VA_ARGS__,,)) 127 #if HAVE_LOCAL_ALIGNED 128 # define LOCAL_ALIGNED_16(t, v, ...) E1(LOCAL_ALIGNED_D(16, t, v, __VA_ARGS__,,)) 130 # define LOCAL_ALIGNED_16(t, v, ...) E1(LOCAL_ALIGNED_A(16, t, v, __VA_ARGS__,,)) 133 #if HAVE_LOCAL_ALIGNED 134 # define LOCAL_ALIGNED_32(t, v, ...) E1(LOCAL_ALIGNED_D(32, t, v, __VA_ARGS__,,)) 136 # define LOCAL_ALIGNED_32(t, v, ...) E1(LOCAL_ALIGNED_A(32, t, v, __VA_ARGS__,,)) 143 memcpy(&val, ptr,
sizeof(val));
144 if (min_size <= *size) {
148 min_size =
FFMAX(min_size + min_size / 16 + 32, min_size);
151 memcpy(ptr, &val,
sizeof(val));
Memory handling functions.
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
#define av_assert0(cond)
assert() equivalent, that is always enabled.
simple assert() macros that are a bit more flexible than ISO C assert().
Libavutil version macros.
static double val(void *priv, double ch)
static int ff_fast_malloc(void *ptr, unsigned int *size, size_t min_size, int zero_realloc)