This adds a hand-optimized assembly version for get_cabac much like the existing one, but it works if the table offsets are RIP-relative. Compared to the non-RIP-relative version this adds 2 lea instructions and it needs one extra register. There is a surprisingly large performance improvement over the c version (more so than the generated assembly seems to suggest) just in get_cabac, I measured roughly 40% faster for get_cabac on a K8. However, overall the difference is not that big, I measured roughly 5% on a test clip on a K8 and a Core2. Hopefully it still compiles on x86 32bit... v2: incorporated feedback from Loren Merritt to avoid rip-relative movs for every table, and got rid of unnecessary @GOTPCREL. v3: apply similar fixes to the the decode_significance functions, and use same macro arguments for non-pic case. v4: prettify inline asm arguments, add a non-fast-cmov version (as I expect the c code to be faster otherwise since both cmov and sbb suck hard on a Prescott, even can't construct the mask with a 64bit shift as that's just as terrible - it's quite difficult to find usable instructions on that chip...). This is tested to work but not on a P4, in theory it _should_ be fast there. Signed-off-by: Michael Niedermayer <michaelni@gmx.at>tags/n0.11
@@ -1659,7 +1659,7 @@ decode_cabac_residual_internal(H264Context *h, DCTELEM *block, | |||
index[coeff_count++] = last;\ | |||
} | |||
const uint8_t *sig_off = significant_coeff_flag_offset_8x8[MB_FIELD]; | |||
#if ARCH_X86 && HAVE_7REGS && !defined(BROKEN_RELOCATIONS) | |||
#if ARCH_X86 && HAVE_7REGS | |||
coeff_count= decode_significance_8x8_x86(CC, significant_coeff_ctx_base, index, | |||
last_coeff_ctx_base, sig_off); | |||
} else { | |||
@@ -24,8 +24,71 @@ | |||
#include "libavcodec/cabac.h" | |||
#include "libavutil/attributes.h" | |||
#include "libavutil/x86_cpu.h" | |||
#include "libavutil/internal.h" | |||
#include "config.h" | |||
#ifdef BROKEN_RELOCATIONS | |||
#define RIP_ARG , "r"(rip) | |||
#if HAVE_FAST_CMOV | |||
#define BRANCHLESS_GET_CABAC_UPDATE(ret, retq, low, range, tmp) \ | |||
"cmp "low" , "tmp" \n\t"\ | |||
"cmova %%ecx , "range" \n\t"\ | |||
"sbb %%rcx , %%rcx \n\t"\ | |||
"and %%ecx , "tmp" \n\t"\ | |||
"xor %%rcx , "retq" \n\t"\ | |||
"sub "tmp" , "low" \n\t" | |||
#else /* HAVE_FAST_CMOV */ | |||
#define BRANCHLESS_GET_CABAC_UPDATE(ret, retq, low, range, tmp) \ | |||
/* P4 Prescott has crappy cmov,sbb,64bit shift so avoid them */ \ | |||
"sub "low" , "tmp" \n\t"\ | |||
"sar $31 , "tmp" \n\t"\ | |||
"sub %%ecx , "range" \n\t"\ | |||
"and "tmp" , "range" \n\t"\ | |||
"add %%ecx , "range" \n\t"\ | |||
"shl $17 , %%ecx \n\t"\ | |||
"and "tmp" , %%ecx \n\t"\ | |||
"sub %%ecx , "low" \n\t"\ | |||
"xor "tmp" , "ret" \n\t"\ | |||
"movslq "ret" , "retq" \n\t" | |||
#endif /* HAVE_FAST_CMOV */ | |||
#define BRANCHLESS_GET_CABAC(ret, retq, statep, low, lowword, range, rangeq, tmp, tmpbyte, byte, end, rip) \ | |||
"movzbl "statep" , "ret" \n\t"\ | |||
"mov "range" , "tmp" \n\t"\ | |||
"and $0xC0 , "range" \n\t"\ | |||
"lea ("ret", "range", 2), %%ecx \n\t"\ | |||
"movzbl ff_h264_lps_range-1b("rip", %%rcx), "range" \n\t"\ | |||
"sub "range" , "tmp" \n\t"\ | |||
"mov "tmp" , %%ecx \n\t"\ | |||
"shl $17 , "tmp" \n\t"\ | |||
BRANCHLESS_GET_CABAC_UPDATE(ret, retq, low, range, tmp) \ | |||
"movzbl ff_h264_norm_shift-1b("rip", "rangeq"), %%ecx \n\t"\ | |||
"shl %%cl , "range" \n\t"\ | |||
"movzbl ff_h264_mlps_state-1b+128("rip", "retq"), "tmp" \n\t"\ | |||
"shl %%cl , "low" \n\t"\ | |||
"mov "tmpbyte" , "statep" \n\t"\ | |||
"test "lowword" , "lowword" \n\t"\ | |||
" jnz 2f \n\t"\ | |||
"mov "byte" , %%"REG_c" \n\t"\ | |||
"add"OPSIZE" $2 , "byte" \n\t"\ | |||
"movzwl (%%"REG_c") , "tmp" \n\t"\ | |||
"lea -1("low") , %%ecx \n\t"\ | |||
"xor "low" , %%ecx \n\t"\ | |||
"shr $15 , %%ecx \n\t"\ | |||
"bswap "tmp" \n\t"\ | |||
"shr $15 , "tmp" \n\t"\ | |||
"movzbl ff_h264_norm_shift-1b("rip", %%rcx), %%ecx \n\t"\ | |||
"sub $0xFFFF , "tmp" \n\t"\ | |||
"neg %%ecx \n\t"\ | |||
"add $7 , %%ecx \n\t"\ | |||
"shl %%cl , "tmp" \n\t"\ | |||
"add "tmp" , "low" \n\t"\ | |||
"2: \n\t" | |||
#else /* BROKEN_RELOCATIONS */ | |||
#define RIP_ARG | |||
#if HAVE_FAST_CMOV | |||
#define BRANCHLESS_GET_CABAC_UPDATE(ret, statep, low, range, tmp)\ | |||
"mov "tmp" , %%ecx \n\t"\ | |||
@@ -51,7 +114,7 @@ | |||
"xor "tmp" , "ret" \n\t" | |||
#endif /* HAVE_FAST_CMOV */ | |||
#define BRANCHLESS_GET_CABAC(ret, statep, low, lowword, range, tmp, tmpbyte, byte, end) \ | |||
#define BRANCHLESS_GET_CABAC(ret, retq, statep, low, lowword, range, rangeq, tmp, tmpbyte, byte, end, rip) \ | |||
"movzbl "statep" , "ret" \n\t"\ | |||
"mov "range" , "tmp" \n\t"\ | |||
"and $0xC0 , "range" \n\t"\ | |||
@@ -81,28 +144,39 @@ | |||
"add "tmp" , "low" \n\t"\ | |||
"2: \n\t" | |||
#endif /* BROKEN_RELOCATIONS */ | |||
#if HAVE_7REGS && !defined(BROKEN_RELOCATIONS) && !(defined(__i386) && defined(__clang__) && (__clang_major__<2 || (__clang_major__==2 && __clang_minor__<10)))\ | |||
&& !(defined(__i386) && !defined(__clang__) && defined(__llvm__) && __GNUC__==4 && __GNUC_MINOR__==2 && __GNUC_PATCHLEVEL__<=1) | |||
#if HAVE_7REGS && !(defined(__i386) && defined(__clang__) && (__clang_major__<2 || (__clang_major__==2 && __clang_minor__<10)))\ | |||
&& !(defined(__i386) && !defined(__clang__) && defined(__llvm__) && __GNUC__==4 && __GNUC_MINOR__==2 && __GNUC_PATCHLEVEL__<=1) | |||
#define get_cabac_inline get_cabac_inline_x86 | |||
static av_always_inline int get_cabac_inline_x86(CABACContext *c, | |||
uint8_t *const state) | |||
{ | |||
int bit, tmp; | |||
#ifdef BROKEN_RELOCATIONS | |||
int *rip; | |||
__asm__ volatile( | |||
"1: \n\t" | |||
"lea 1b(%%rip), %0 \n\t" | |||
: "=&r"(rip) | |||
); | |||
#endif | |||
__asm__ volatile( | |||
BRANCHLESS_GET_CABAC("%0", "(%4)", "%1", "%w1", | |||
"%2", "%3", "%b3", | |||
"%a6(%5)", "%a7(%5)") | |||
BRANCHLESS_GET_CABAC("%0", "%q0", "(%4)", "%1", "%w1", | |||
"%2", "%q2", "%3", "%b3", | |||
"%a6(%5)", "%a7(%5)", "%8") | |||
: "=&r"(bit), "+&r"(c->low), "+&r"(c->range), "=&q"(tmp) | |||
: "r"(state), "r"(c), | |||
"i"(offsetof(CABACContext, bytestream)), | |||
"i"(offsetof(CABACContext, bytestream_end)) | |||
"i"(offsetof(CABACContext, bytestream_end)) RIP_ARG | |||
: "%"REG_c, "memory" | |||
); | |||
return bit & 1; | |||
} | |||
#endif /* HAVE_7REGS && !defined(BROKEN_RELOCATIONS) */ | |||
#endif /* HAVE_7REGS */ | |||
#define get_cabac_bypass_sign get_cabac_bypass_sign_x86 | |||
static av_always_inline int get_cabac_bypass_sign_x86(CABACContext *c, int val) | |||
@@ -36,7 +36,7 @@ | |||
//FIXME use some macros to avoid duplicating get_cabac (cannot be done yet | |||
//as that would make optimization work hard) | |||
#if HAVE_7REGS && !defined(BROKEN_RELOCATIONS) | |||
#if HAVE_7REGS | |||
static int decode_significance_x86(CABACContext *c, int max_coeff, | |||
uint8_t *significant_coeff_ctx_base, | |||
int *index, x86_reg last_off){ | |||
@@ -45,20 +45,31 @@ static int decode_significance_x86(CABACContext *c, int max_coeff, | |||
int minusindex= 4-(intptr_t)index; | |||
int bit; | |||
x86_reg coeff_count; | |||
#ifdef BROKEN_RELOCATIONS | |||
int *rip; | |||
__asm__ volatile( | |||
"1: \n\t" | |||
"lea 1b(%%rip), %0 \n\t" | |||
: "=&r"(rip) | |||
); | |||
#endif | |||
__asm__ volatile( | |||
"3: \n\t" | |||
BRANCHLESS_GET_CABAC("%4", "(%1)", "%3", "%w3", | |||
"%5", "%k0", "%b0", | |||
"%a11(%6)", "%a12(%6)") | |||
BRANCHLESS_GET_CABAC("%4", "%q4", "(%1)", "%3", "%w3", | |||
"%5", "%q5", "%k0", "%b0", | |||
"%a11(%6)", "%a12(%6)", "%13") | |||
"test $1, %4 \n\t" | |||
" jz 4f \n\t" | |||
"add %10, %1 \n\t" | |||
BRANCHLESS_GET_CABAC("%4", "(%1)", "%3", "%w3", | |||
"%5", "%k0", "%b0", | |||
"%a11(%6)", "%a12(%6)") | |||
BRANCHLESS_GET_CABAC("%4", "%q4", "(%1)", "%3", "%w3", | |||
"%5", "%q5", "%k0", "%b0", | |||
"%a11(%6)", "%a12(%6)", "%13") | |||
"sub %10, %1 \n\t" | |||
"mov %2, %0 \n\t" | |||
@@ -86,7 +97,7 @@ static int decode_significance_x86(CABACContext *c, int max_coeff, | |||
"+&r"(c->low), "=&r"(bit), "+&r"(c->range) | |||
: "r"(c), "m"(minusstart), "m"(end), "m"(minusindex), "m"(last_off), | |||
"i"(offsetof(CABACContext, bytestream)), | |||
"i"(offsetof(CABACContext, bytestream_end)) | |||
"i"(offsetof(CABACContext, bytestream_end)) RIP_ARG | |||
: "%"REG_c, "memory" | |||
); | |||
return coeff_count; | |||
@@ -100,6 +111,17 @@ static int decode_significance_8x8_x86(CABACContext *c, | |||
x86_reg coeff_count; | |||
x86_reg last=0; | |||
x86_reg state; | |||
#ifdef BROKEN_RELOCATIONS | |||
int *rip; | |||
__asm__ volatile( | |||
"1: \n\t" | |||
"lea 1b(%%rip), %0 \n\t" | |||
: "=&r"(rip) | |||
); | |||
#endif | |||
__asm__ volatile( | |||
"mov %1, %6 \n\t" | |||
"3: \n\t" | |||
@@ -108,20 +130,24 @@ static int decode_significance_8x8_x86(CABACContext *c, | |||
"movzbl (%0, %6), %k6 \n\t" | |||
"add %9, %6 \n\t" | |||
BRANCHLESS_GET_CABAC("%4", "(%6)", "%3", "%w3", | |||
"%5", "%k0", "%b0", | |||
"%a12(%7)", "%a13(%7)") | |||
BRANCHLESS_GET_CABAC("%4", "%q4", "(%6)", "%3", "%w3", | |||
"%5", "%q5", "%k0", "%b0", | |||
"%a12(%7)", "%a13(%7)", "%14") | |||
"mov %1, %k6 \n\t" | |||
"test $1, %4 \n\t" | |||
" jz 4f \n\t" | |||
"movzbl "MANGLE(last_coeff_flag_offset_8x8)"(%k6), %k6\n\t" | |||
#ifdef BROKEN_RELOCATIONS | |||
"movzbl last_coeff_flag_offset_8x8-1b(%14, %q6), %k6\n\t" | |||
#else | |||
"movzbl last_coeff_flag_offset_8x8(%k6), %k6\n\t" | |||
#endif | |||
"add %11, %6 \n\t" | |||
BRANCHLESS_GET_CABAC("%4", "(%6)", "%3", "%w3", | |||
"%5", "%k0", "%b0", | |||
"%a12(%7)", "%a13(%7)") | |||
BRANCHLESS_GET_CABAC("%4", "%q4", "(%6)", "%3", "%w3", | |||
"%5", "%q5", "%k0", "%b0", | |||
"%a12(%7)", "%a13(%7)", "%14") | |||
"mov %2, %0 \n\t" | |||
"mov %1, %k6 \n\t" | |||
@@ -147,7 +173,7 @@ static int decode_significance_8x8_x86(CABACContext *c, | |||
: "r"(c), "m"(minusindex), "m"(significant_coeff_ctx_base), | |||
"m"(sig_off), "m"(last_coeff_ctx_base), | |||
"i"(offsetof(CABACContext, bytestream)), | |||
"i"(offsetof(CABACContext, bytestream_end)) | |||
"i"(offsetof(CABACContext, bytestream_end)) RIP_ARG | |||
: "%"REG_c, "memory" | |||
); | |||
return coeff_count; | |||