Searched refs:VPX_ARCH_X86_64 (Results 1 – 25 of 49) sorted by relevance
12
46 #if VPX_ARCH_X86_6462 #if VPX_ARCH_X86_6482 #if VPX_ARCH_X86_64259 #if VPX_ARCH_X86_64 in x86_readtsc()277 #if VPX_ARCH_X86_64 in x86_readtsc64()299 #if VPX_ARCH_X86_64 in x86_readtscp()325 #if VPX_ARCH_X86_64350 #elif VPX_ARCH_X86_64
24 ifeq ($(VPX_ARCH_X86_64),yes)29 ifeq ($(VPX_ARCH_X86_64),yes)33 ifeq ($(VPX_ARCH_X86)$(VPX_ARCH_X86_64),yes)
20 #if (VPX_ARCH_X86 || VPX_ARCH_X86_64) && HAVE_MMX
15 #if VPX_ARCH_X86 || VPX_ARCH_X86_6429 #if VPX_ARCH_X86 || VPX_ARCH_X86_6442 #if VPX_ARCH_X86 || VPX_ARCH_X86_64 in main()
31 #if defined(_WIN64) && VPX_ARCH_X86_64143 #if VPX_ARCH_X86 || VPX_ARCH_X86_64
25 #if HAVE_SSE2 && VPX_ARCH_X86_6471 #if VPX_ARCH_X86_64 in vp8_loop_filter_bh_sse2()104 #if VPX_ARCH_X86_64 in vp8_loop_filter_bv_sse2()
15 #elif VPX_ARCH_X86 || VPX_ARCH_X86_6495 #elif VPX_ARCH_X86 || VPX_ARCH_X86_64 in vp8_machine_specific_config()
117 #if VPX_ARCH_X86_64 in vp9_diamond_search_sad_avx()141 #if VPX_ARCH_X86_64 in vp9_diamond_search_sad_avx()178 #if VPX_ARCH_X86_64 // sizeof(intptr_t) == 8 in vp9_diamond_search_sad_avx()297 #if VPX_ARCH_X86_64 in vp9_diamond_search_sad_avx()
61 %if VPX_ARCH_X86_64108 %if VPX_ARCH_X86_64
48 %if VPX_ARCH_X86_6457 %if VPX_ARCH_X86_64168 %if VPX_ARCH_X86_64 == 0263 %elif VPX_ARCH_X86_64 ; memory284 %if VPX_ARCH_X86_64 == 0321 %if VPX_ARCH_X86_64610 %elif VPX_ARCH_X86_64 ; *nix x64 ;=============================================951 %if VPX_ARCH_X86_64 || cpuflag(sse2)1008 %if VPX_ARCH_X86_641029 %if VPX_ARCH_X86_64
94 %if VPX_ARCH_X86_64271 %if VPX_ARCH_X86_64275 %if VPX_ARCH_X86_64 && mmsize == 16498 %if VPX_ARCH_X86_64502 %if VPX_ARCH_X86_64 && mmsize == 16620 %if VPX_ARCH_X86_64624 %if VPX_ARCH_X86_64 && mmsize == 16719 %if VPX_ARCH_X86_64723 %if VPX_ARCH_X86_64 && mmsize == 16846 %if VPX_ARCH_X86_64[all …]
29 ; TODO(slavarnway): using xmm registers for these on VPX_ARCH_X86_64 +51 %if VPX_ARCH_X86_6471 %if VPX_ARCH_X86_6482 %if VPX_ARCH_X86_64342 %if VPX_ARCH_X86_64 && X86_SUBPIX_VFILTER_PREFER_SLOW_CELERON364 %if VPX_ARCH_X86_64470 ; VPX_ARCH_X86_64570 %endif ; VPX_ARCH_X86_64586 %if VPX_ARCH_X86_64657 ; VPX_ARCH_X86_64[all …]
67 %if VPX_ARCH_X86_64200 %if VPX_ARCH_X86_64280 %if VPX_ARCH_X86_64361 %if VPX_ARCH_X86_64442 %if VPX_ARCH_X86_64
117 %if VPX_ARCH_X86_64678 %if VPX_ARCH_X86_64682 %if VPX_ARCH_X86_64 && %1 > 4978 %if VPX_ARCH_X86_64982 %if VPX_ARCH_X86_64 && %1 > 4
72 VP8_COMMON_SRCS-$(VPX_ARCH_X86)$(VPX_ARCH_X86_64) += common/x86/vp8_asm_stubs.c73 VP8_COMMON_SRCS-$(VPX_ARCH_X86)$(VPX_ARCH_X86_64) += common/x86/loopfilter_x86.c95 ifeq ($(VPX_ARCH_X86_64),yes)
54 #if VPX_ARCH_X86 || VPX_ARCH_X86_64
4 %define VPX_ARCH_X86_64 0
16 #define VPX_ARCH_X86_64 0 macro
4 %define VPX_ARCH_X86_64 1
16 #define VPX_ARCH_X86_64 1 macro
65 #if VPX_ARCH_X86 || VPX_ARCH_X86_64
90 DSP_SRCS-$(VPX_ARCH_X86)$(VPX_ARCH_X86_64) += x86/convolve.h208 ifeq ($(VPX_ARCH_X86_64),yes)319 ifeq ($(VPX_ARCH_X86_64),yes)387 ifeq ($(VPX_ARCH_X86_64),yes)