1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Revert "h264: assembly version of get_cabac for x86_64 with PIC (v4)"

This broke compilation on darwin, revert until a better solution is found.

This reverts commit a812b599b5.
This commit is contained in:
Michael Niedermayer 2012-04-21 02:09:27 +02:00
parent e9b1d5ae5e
commit 9849515214
3 changed files with 25 additions and 125 deletions

View File

@ -1659,7 +1659,7 @@ decode_cabac_residual_internal(H264Context *h, DCTELEM *block,
index[coeff_count++] = last;\
}
const uint8_t *sig_off = significant_coeff_flag_offset_8x8[MB_FIELD];
#if ARCH_X86 && HAVE_7REGS
#if ARCH_X86 && HAVE_7REGS && !defined(BROKEN_RELOCATIONS)
coeff_count= decode_significance_8x8_x86(CC, significant_coeff_ctx_base, index,
last_coeff_ctx_base, sig_off);
} else {

View File

@ -24,71 +24,8 @@
#include "libavcodec/cabac.h"
#include "libavutil/attributes.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/internal.h"
#include "config.h"
#ifdef BROKEN_RELOCATIONS
#define RIP_ARG , "r"(rip)
#if HAVE_FAST_CMOV
#define BRANCHLESS_GET_CABAC_UPDATE(ret, retq, low, range, tmp) \
"cmp "low" , "tmp" \n\t"\
"cmova %%ecx , "range" \n\t"\
"sbb %%rcx , %%rcx \n\t"\
"and %%ecx , "tmp" \n\t"\
"xor %%rcx , "retq" \n\t"\
"sub "tmp" , "low" \n\t"
#else /* HAVE_FAST_CMOV */
#define BRANCHLESS_GET_CABAC_UPDATE(ret, retq, low, range, tmp) \
/* P4 Prescott has crappy cmov,sbb,64bit shift so avoid them */ \
"sub "low" , "tmp" \n\t"\
"sar $31 , "tmp" \n\t"\
"sub %%ecx , "range" \n\t"\
"and "tmp" , "range" \n\t"\
"add %%ecx , "range" \n\t"\
"shl $17 , %%ecx \n\t"\
"and "tmp" , %%ecx \n\t"\
"sub %%ecx , "low" \n\t"\
"xor "tmp" , "ret" \n\t"\
"movslq "ret" , "retq" \n\t"
#endif /* HAVE_FAST_CMOV */
#define BRANCHLESS_GET_CABAC(ret, retq, statep, low, lowword, range, rangeq, tmp, tmpbyte, byte, end, rip) \
"movzbl "statep" , "ret" \n\t"\
"mov "range" , "tmp" \n\t"\
"and $0xC0 , "range" \n\t"\
"lea ("ret", "range", 2), %%ecx \n\t"\
"movzbl ff_h264_lps_range-1b("rip", %%rcx), "range" \n\t"\
"sub "range" , "tmp" \n\t"\
"mov "tmp" , %%ecx \n\t"\
"shl $17 , "tmp" \n\t"\
BRANCHLESS_GET_CABAC_UPDATE(ret, retq, low, range, tmp) \
"movzbl ff_h264_norm_shift-1b("rip", "rangeq"), %%ecx \n\t"\
"shl %%cl , "range" \n\t"\
"movzbl ff_h264_mlps_state-1b+128("rip", "retq"), "tmp" \n\t"\
"shl %%cl , "low" \n\t"\
"mov "tmpbyte" , "statep" \n\t"\
"test "lowword" , "lowword" \n\t"\
" jnz 2f \n\t"\
"mov "byte" , %%"REG_c" \n\t"\
"add"OPSIZE" $2 , "byte" \n\t"\
"movzwl (%%"REG_c") , "tmp" \n\t"\
"lea -1("low") , %%ecx \n\t"\
"xor "low" , %%ecx \n\t"\
"shr $15 , %%ecx \n\t"\
"bswap "tmp" \n\t"\
"shr $15 , "tmp" \n\t"\
"movzbl ff_h264_norm_shift-1b("rip", %%rcx), %%ecx \n\t"\
"sub $0xFFFF , "tmp" \n\t"\
"neg %%ecx \n\t"\
"add $7 , %%ecx \n\t"\
"shl %%cl , "tmp" \n\t"\
"add "tmp" , "low" \n\t"\
"2: \n\t"
#else /* BROKEN_RELOCATIONS */
#define RIP_ARG
#if HAVE_FAST_CMOV
#define BRANCHLESS_GET_CABAC_UPDATE(ret, statep, low, range, tmp)\
"mov "tmp" , %%ecx \n\t"\
@ -114,7 +51,7 @@
"xor "tmp" , "ret" \n\t"
#endif /* HAVE_FAST_CMOV */
#define BRANCHLESS_GET_CABAC(ret, retq, statep, low, lowword, range, rangeq, tmp, tmpbyte, byte, end, rip) \
#define BRANCHLESS_GET_CABAC(ret, statep, low, lowword, range, tmp, tmpbyte, byte, end) \
"movzbl "statep" , "ret" \n\t"\
"mov "range" , "tmp" \n\t"\
"and $0xC0 , "range" \n\t"\
@ -144,39 +81,28 @@
"add "tmp" , "low" \n\t"\
"2: \n\t"
#endif /* BROKEN_RELOCATIONS */
#if HAVE_7REGS && !(defined(__i386) && defined(__clang__) && (__clang_major__<2 || (__clang_major__==2 && __clang_minor__<10)))\
&& !(defined(__i386) && !defined(__clang__) && defined(__llvm__) && __GNUC__==4 && __GNUC_MINOR__==2 && __GNUC_PATCHLEVEL__<=1)
#if HAVE_7REGS && !defined(BROKEN_RELOCATIONS) && !(defined(__i386) && defined(__clang__) && (__clang_major__<2 || (__clang_major__==2 && __clang_minor__<10)))\
&& !(defined(__i386) && !defined(__clang__) && defined(__llvm__) && __GNUC__==4 && __GNUC_MINOR__==2 && __GNUC_PATCHLEVEL__<=1)
#define get_cabac_inline get_cabac_inline_x86
static av_always_inline int get_cabac_inline_x86(CABACContext *c,
uint8_t *const state)
{
int bit, tmp;
#ifdef BROKEN_RELOCATIONS
int *rip;
__asm__ volatile(
"1: \n\t"
"lea 1b(%%rip), %0 \n\t"
: "=&r"(rip)
);
#endif
__asm__ volatile(
BRANCHLESS_GET_CABAC("%0", "%q0", "(%4)", "%1", "%w1",
"%2", "%q2", "%3", "%b3",
"%a6(%5)", "%a7(%5)", "%8")
BRANCHLESS_GET_CABAC("%0", "(%4)", "%1", "%w1",
"%2", "%3", "%b3",
"%a6(%5)", "%a7(%5)")
: "=&r"(bit), "+&r"(c->low), "+&r"(c->range), "=&q"(tmp)
: "r"(state), "r"(c),
"i"(offsetof(CABACContext, bytestream)),
"i"(offsetof(CABACContext, bytestream_end)) RIP_ARG
"i"(offsetof(CABACContext, bytestream_end))
: "%"REG_c, "memory"
);
return bit & 1;
}
#endif /* HAVE_7REGS */
#endif /* HAVE_7REGS && !defined(BROKEN_RELOCATIONS) */
#define get_cabac_bypass_sign get_cabac_bypass_sign_x86
static av_always_inline int get_cabac_bypass_sign_x86(CABACContext *c, int val)

View File

@ -36,7 +36,7 @@
//FIXME use some macros to avoid duplicating get_cabac (cannot be done yet
//as that would make optimization work hard)
#if HAVE_7REGS
#if HAVE_7REGS && !defined(BROKEN_RELOCATIONS)
static int decode_significance_x86(CABACContext *c, int max_coeff,
uint8_t *significant_coeff_ctx_base,
int *index, x86_reg last_off){
@ -45,31 +45,20 @@ static int decode_significance_x86(CABACContext *c, int max_coeff,
int minusindex= 4-(intptr_t)index;
int bit;
x86_reg coeff_count;
#ifdef BROKEN_RELOCATIONS
int *rip;
__asm__ volatile(
"1: \n\t"
"lea 1b(%%rip), %0 \n\t"
: "=&r"(rip)
);
#endif
__asm__ volatile(
"3: \n\t"
BRANCHLESS_GET_CABAC("%4", "%q4", "(%1)", "%3", "%w3",
"%5", "%q5", "%k0", "%b0",
"%a11(%6)", "%a12(%6)", "%13")
BRANCHLESS_GET_CABAC("%4", "(%1)", "%3", "%w3",
"%5", "%k0", "%b0",
"%a11(%6)", "%a12(%6)")
"test $1, %4 \n\t"
" jz 4f \n\t"
"add %10, %1 \n\t"
BRANCHLESS_GET_CABAC("%4", "%q4", "(%1)", "%3", "%w3",
"%5", "%q5", "%k0", "%b0",
"%a11(%6)", "%a12(%6)", "%13")
BRANCHLESS_GET_CABAC("%4", "(%1)", "%3", "%w3",
"%5", "%k0", "%b0",
"%a11(%6)", "%a12(%6)")
"sub %10, %1 \n\t"
"mov %2, %0 \n\t"
@ -97,7 +86,7 @@ static int decode_significance_x86(CABACContext *c, int max_coeff,
"+&r"(c->low), "=&r"(bit), "+&r"(c->range)
: "r"(c), "m"(minusstart), "m"(end), "m"(minusindex), "m"(last_off),
"i"(offsetof(CABACContext, bytestream)),
"i"(offsetof(CABACContext, bytestream_end)) RIP_ARG
"i"(offsetof(CABACContext, bytestream_end))
: "%"REG_c, "memory"
);
return coeff_count;
@ -111,17 +100,6 @@ static int decode_significance_8x8_x86(CABACContext *c,
x86_reg coeff_count;
x86_reg last=0;
x86_reg state;
#ifdef BROKEN_RELOCATIONS
int *rip;
__asm__ volatile(
"1: \n\t"
"lea 1b(%%rip), %0 \n\t"
: "=&r"(rip)
);
#endif
__asm__ volatile(
"mov %1, %6 \n\t"
"3: \n\t"
@ -130,24 +108,20 @@ static int decode_significance_8x8_x86(CABACContext *c,
"movzbl (%0, %6), %k6 \n\t"
"add %9, %6 \n\t"
BRANCHLESS_GET_CABAC("%4", "%q4", "(%6)", "%3", "%w3",
"%5", "%q5", "%k0", "%b0",
"%a12(%7)", "%a13(%7)", "%14")
BRANCHLESS_GET_CABAC("%4", "(%6)", "%3", "%w3",
"%5", "%k0", "%b0",
"%a12(%7)", "%a13(%7)")
"mov %1, %k6 \n\t"
"test $1, %4 \n\t"
" jz 4f \n\t"
#ifdef BROKEN_RELOCATIONS
"movzbl last_coeff_flag_offset_8x8-1b(%14, %q6), %k6\n\t"
#else
"movzbl last_coeff_flag_offset_8x8(%k6), %k6\n\t"
#endif
"movzbl "MANGLE(last_coeff_flag_offset_8x8)"(%k6), %k6\n\t"
"add %11, %6 \n\t"
BRANCHLESS_GET_CABAC("%4", "%q4", "(%6)", "%3", "%w3",
"%5", "%q5", "%k0", "%b0",
"%a12(%7)", "%a13(%7)", "%14")
BRANCHLESS_GET_CABAC("%4", "(%6)", "%3", "%w3",
"%5", "%k0", "%b0",
"%a12(%7)", "%a13(%7)")
"mov %2, %0 \n\t"
"mov %1, %k6 \n\t"
@ -173,7 +147,7 @@ static int decode_significance_8x8_x86(CABACContext *c,
: "r"(c), "m"(minusindex), "m"(significant_coeff_ctx_base),
"m"(sig_off), "m"(last_coeff_ctx_base),
"i"(offsetof(CABACContext, bytestream)),
"i"(offsetof(CABACContext, bytestream_end)) RIP_ARG
"i"(offsetof(CABACContext, bytestream_end))
: "%"REG_c, "memory"
);
return coeff_count;