Go to the documentation of this file.
30 #define MAX_CHANNELS 2
32 #define randomize_buffers() \
35 for (i = 0; i < BUF_SIZE*MAX_CHANNELS; i++) { \
36 int32_t r = sign_extend(rnd(), 24); \
51 if (
check_func(
c.decorrelate_stereo,
"alac_decorrelate_stereo")) {
52 int len = (
rnd() & 0xFF) + 1;
66 report(
"decorrelate_stereo");
69 #undef randomize_buffers
70 #define randomize_buffers() \
73 for (i = 0; i < BUF_SIZE; i++) { \
74 for (j = 0; j < ch; j++) { \
75 int32_t r = sign_extend(rnd(), 24); \
94 static const char *
const channels[2] = {
"mono",
"stereo" };
98 for (ch = 1; ch <= 2; ch++) {
100 int len = (
rnd() & 0xFF) + 1;
113 report(
"append_extra_bits");
av_cold void ff_alacdsp_init(ALACDSPContext *c)
void checkasm_check_alacdsp(void)
#define check_func(func,...)
#define randomize_buffers()
static void check_append_extra_bits(void)
#define LOCAL_ALIGNED_16(t, v,...)
static void check_decorrelate_stereo(void)
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
static int weight(int i, int blen, int offset)
static int ref[MAX_W *MAX_W]
static int shift(int a, int b)
#define declare_func(ret,...)