Go to the documentation of this file.
29 #define randomize_buf(buf, size) \
31 for (int i = 0; i < size; i++) \
32 buf[i] = (uint8_t)rnd(); \
42 const uint8_t *
src2,
int w);
49 const int size[] = {15, 2043, 4096};
73 const uint8_t *top,
int w,
int bpp);
75 const int bpps[] = {3, 4, 6, 8};
78 if (
check_func(
c->add_paeth_prediction,
"add_paeth_prediction_%d", bpp)) {
80 uint8_t *dst0 = &dst0_buf[bpp];
81 uint8_t *dst1 = &dst1_buf[bpp];
82 uint8_t *top = &top_buf[bpp];
86 memcpy(dst1_buf, dst0_buf,
BUF_SIZE);
96 for (
int j =
w - 3; j <
w; j++)
115 report(
"add_paeth_prediction");
#define declare_func_emms(cpu_flags, ret,...)
#define check_func(func,...)
#define FF_ARRAY_ELEMS(a)
#define LOCAL_ALIGNED_16(t, v,...)
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
av_cold void ff_pngdsp_init(PNGDSPContext *dsp)
#define i(width, name, range_min, range_max)
void checkasm_check_png(void)
static void check_add_paeth_prediction(const PNGDSPContext *c)
#define AV_CPU_FLAG_MMX
standard MMX
#define randomize_buf(buf, size)
#define checkasm_check(prefix,...)
static void check_add_bytes_l2(const PNGDSPContext *c)