void ff_put_pixels16_y2_no_rnd_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_avg_pixels8_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels16_x2_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels8_x2_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels16_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels16_y2_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels16_x2_no_rnd_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels8_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
av_cold void ff_hpeldsp_init_armv6(HpelDSPContext *c, int flags)
void ff_put_pixels8_y2_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_put_pixels8_y2_no_rnd_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
void ff_avg_pixels16_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)
#define flags(name, subs,...)
void ff_put_pixels8_x2_no_rnd_armv6(uint8_t *, const uint8_t *, ptrdiff_t, int)