Home
last modified time | relevance | path

Searched refs:ebx (Results 1 – 25 of 103) sorted by relevance

12345

/arch/x86/math-emu/
Dreg_norm.S27 pushl %ebx
29 movl PARAM1,%ebx
31 movl SIGH(%ebx),%edx
32 movl SIGL(%ebx),%eax
43 subw $32,EXP(%ebx) /* This can cause an underflow */
52 subw %cx,EXP(%ebx) /* This can cause an underflow */
54 movl %edx,SIGH(%ebx)
55 movl %eax,SIGL(%ebx)
58 cmpw EXP_OVER,EXP(%ebx)
61 cmpw EXP_UNDER,EXP(%ebx)
[all …]
Dwm_shrx.S45 pushl %ebx
46 movl (%esi),%ebx /* lsl */
49 shrd %cl,%ebx,%eax
50 shrd %cl,%edx,%ebx
52 movl %ebx,(%esi)
54 popl %ebx
117 pushl %ebx
132 xorl %ebx,%ebx
133 shrd %cl,%eax,%ebx
136 orl %ebx,%ebx /* test these 32 bits */
[all …]
Dround_Xsig.S28 pushl %ebx /* Reserve some space */
29 pushl %ebx
35 movl 4(%esi),%ebx
44 movl %ebx,%edx
45 movl %eax,%ebx
55 shld %cl,%ebx,%edx
56 shld %cl,%eax,%ebx
63 addl $1,%ebx
72 movl %ebx,4(%esi)
78 popl %ebx
[all …]
Dreg_u_sub.S40 pushl %ebx
64 movl SIGL(%edi),%ebx /* register ls word */
82 shrd %cl,%ebx,%edx
83 shrd %cl,%eax,%ebx
96 orl %ebx,%ebx
102 movl %eax,%ebx
107 movl %ebx,%edx
108 movl %eax,%ebx
118 orl %ebx,%ebx
137 orl %ebx,%ebx
[all …]
Dreg_u_add.S39 pushl %ebx
50 movl SIGL(%esi),%ebx
60 movl SIGL(%edi),%ebx
82 shrd %cl,%ebx,%edx
83 shrd %cl,%eax,%ebx
96 orl %ebx,%ebx
102 movl %eax,%ebx
107 movl %ebx,%edx
108 movl %eax,%ebx
117 orl %ebx,%ebx
[all …]
Dreg_u_div.S86 pushl %ebx
89 movl REGB,%ebx
93 movswl EXP(%ebx),%eax
111 testl $0x80000000, SIGH(%ebx) /* Divisor */
116 cmpl $0,SIGL(%ebx)
120 movl SIGH(%ebx),%ecx /* The divisor */
197 cmpl SIGH(%ebx),%edx /* Test for imminent overflow */
201 cmpl SIGL(%ebx),%eax
208 subl SIGL(%ebx),%eax
209 sbbl SIGH(%ebx),%edx /* Prevent the overflow */
[all …]
Ddiv_Xsig.S86 pushl %ebx
89 movl PARAM2,%ebx /* pointer to denom */
92 testl $0x80000000, XsigH(%ebx) /* Divisor */
136 movl XsigH(%ebx),%ecx
152 mull XsigH(%ebx) /* mul by the ms dw of the denom */
158 mull XsigL(%ebx) /* now mul the ls dw of the denom */
172 movl XsigL(%ebx),%eax
173 movl XsigH(%ebx),%edx
191 cmpl XsigH(%ebx),%edx
195 cmpl XsigL(%ebx),%eax
[all …]
Dreg_round.S116 pushl %ebx
120 movl SIGL(%edi),%ebx
127 pushl %ebx /* adjust the stack pointer */
203 orl %ebx,%ecx
218 orl %ebx,%ebx
231 xorl %ebx,%ebx
239 orl %ebx,%ecx
245 xorl %ebx,%ebx
281 movl %ebx,%ecx
289 movl %ebx,%ecx
[all …]
Dreg_u_mul.S56 pushl %ebx
69 xorl %ebx,%ebx
79 adcl %edx,%ebx
85 adcl %edx,%ebx
90 addl %eax,%ebx
117 rcll $1,%ebx
138 pop %ebx
142 popl %ebx
Dwm_sqrt.S85 pushl %ebx
348 movl %edi,%ebx
369 movl %edx,%ebx /* 2nd ls word of square */
374 addl %eax,%ebx
375 addl %eax,%ebx
378 cmp $0xffffffb0,%ebx
381 cmp $0x00000050,%ebx
390 or %ebx,%ebx
395 or %ebx,%edx
424 movl %edx,%ebx /* 2nd ls word of square */
[all …]
Dshr_Xsig.S34 pushl %ebx
36 movl 4(%esi),%ebx /* midl */
38 shrd %cl,%ebx,%eax
39 shrd %cl,%edx,%ebx
42 movl %ebx,4(%esi)
44 popl %ebx
/arch/x86/lib/
Dchecksum_32.S56 pushl_cfi %ebx
57 CFI_REL_OFFSET ebx, 0
69 movzbl (%esi), %ebx
70 adcl %ebx, %eax
89 1: movl (%esi), %ebx
90 adcl %ebx, %eax
91 movl 4(%esi), %ebx
92 adcl %ebx, %eax
93 movl 8(%esi), %ebx
94 adcl %ebx, %eax
[all …]
Datomic64_cx8_32.S27 movl %ebx, %eax
71 SAVE ebx
81 movl %eax, %ebx
83 \ins\()l %esi, %ebx
90 movl %ebx, %eax
94 RESTORE ebx
107 SAVE ebx
111 movl %eax, %ebx
113 \ins\()l $1, %ebx
120 movl %ebx, %eax
[all …]
Dmsr-reg.S24 movl 12(%rdi), %ebx
34 movl %ebx, 12(%r10)
56 pushl_cfi %ebx
64 movl 12(%eax), %ebx
78 movl %ebx, 12(%eax)
86 popl_cfi %ebx
/arch/x86/um/
Dchecksum_32.S54 pushl %ebx
73 1: movl (%esi), %ebx
74 adcl %ebx, %eax
75 movl 4(%esi), %ebx
76 adcl %ebx, %eax
77 movl 8(%esi), %ebx
78 adcl %ebx, %eax
79 movl 12(%esi), %ebx
80 adcl %ebx, %eax
81 movl 16(%esi), %ebx
[all …]
Dstub_32.S31 pop %ebx
42 pop %ebx
43 cmp %ebx, %eax
/arch/x86/kernel/cpu/
Dvmware.c40 #define VMWARE_PORT(cmd, eax, ebx, ecx, edx) \ argument
42 "=a"(eax), "=c"(ecx), "=d"(edx), "=b"(ebx) : \
50 uint32_t eax, ebx, ecx, edx; in __vmware_platform() local
51 VMWARE_PORT(GETVERSION, eax, ebx, ecx, edx); in __vmware_platform()
52 return eax != (uint32_t)-1 && ebx == VMWARE_HYPERVISOR_MAGIC; in __vmware_platform()
58 uint32_t eax, ebx, ecx, edx; in vmware_get_tsc_khz() local
60 VMWARE_PORT(GETHZ, eax, ebx, ecx, edx); in vmware_get_tsc_khz()
62 tsc_hz = eax | (((uint64_t)ebx) << 32); in vmware_get_tsc_khz()
80 uint32_t eax, ebx, ecx, edx; in vmware_platform_setup() local
82 VMWARE_PORT(GETHZ, eax, ebx, ecx, edx); in vmware_platform_setup()
[all …]
Dtopology.c22 #define LEVEL_MAX_SIBLINGS(ebx) ((ebx) & 0xffff) argument
32 unsigned int eax, ebx, ecx, edx, sub_index; in detect_extended_topology() local
40 cpuid_count(0xb, SMT_LEVEL, &eax, &ebx, &ecx, &edx); in detect_extended_topology()
45 if (ebx == 0 || (LEAFB_SUBTYPE(ecx) != SMT_TYPE)) in detect_extended_topology()
58 core_level_siblings = smp_num_siblings = LEVEL_MAX_SIBLINGS(ebx); in detect_extended_topology()
63 cpuid_count(0xb, sub_index, &eax, &ebx, &ecx, &edx); in detect_extended_topology()
69 core_level_siblings = LEVEL_MAX_SIBLINGS(ebx); in detect_extended_topology()
/arch/x86/boot/compressed/
Dhead_32.S165 movl %ebp, %ebx
168 addl %eax, %ebx
170 andl %eax, %ebx
171 cmpl $LOAD_PHYSICAL_ADDR, %ebx
174 movl $LOAD_PHYSICAL_ADDR, %ebx
178 addl $z_extract_offset, %ebx
181 leal boot_stack_end(%ebx), %esp
193 leal (_bss-4)(%ebx), %edi
204 leal relocated(%ebx), %eax
215 leal _bss(%ebx), %edi
[all …]
Dhead_64.S99 movl %ebp, %ebx
102 addl %eax, %ebx
104 andl %eax, %ebx
105 cmpl $LOAD_PHYSICAL_ADDR, %ebx
108 movl $LOAD_PHYSICAL_ADDR, %ebx
112 addl $z_extract_offset, %ebx
132 leal pgtable(%ebx), %edi
138 leal pgtable + 0(%ebx), %edi
143 leal pgtable + 0x1000(%ebx), %edi
153 leal pgtable + 0x2000(%ebx), %edi
[all …]
/arch/x86/kernel/
Drelocate_kernel_32.S44 pushl %ebx
61 movl 20+4(%esp), %ebx /* page_list */
78 movl %ebx, CP_PA_BACKUP_PAGES_MAP(%edi)
138 pushl %ebx
159 xorl %ebx, %ebx
176 popl %ebx
177 subl $(1b - relocate_kernel), %ebx
178 movl CP_VA_CONTROL_PAGE(%ebx), %edi
179 lea PAGE_SIZE(%ebx), %esp
180 movl CP_PA_SWAP_PAGE(%ebx), %eax
[all …]
/arch/x86/crypto/
Dsalsa20-i586-asm_32.S17 # ebx_stack = ebx
18 movl %ebx,84(%esp)
32 movl 16(%esp,%eax),%ebx
34 sub $0,%ebx
106 cmp $64,%ebx
114 mov %ebx,%ecx
127 movl %ebx,76(%esp)
135 movl 176(%esp),%ebx
143 movl %ebx,112(%esp)
151 movl 192(%esp),%ebx
[all …]
/arch/x86/kernel/cpu/microcode/
Dcore_early.c34 #define CPUID_IS(a, b, c, ebx, ecx, edx) \ argument
35 (!((ebx ^ (a))|(edx ^ (b))|(ecx ^ (c))))
49 u32 ebx, ecx = 0, edx; in x86_vendor() local
51 native_cpuid(&eax, &ebx, &ecx, &edx); in x86_vendor()
53 if (CPUID_IS(CPUID_INTEL1, CPUID_INTEL2, CPUID_INTEL3, ebx, ecx, edx)) in x86_vendor()
56 if (CPUID_IS(CPUID_AMD1, CPUID_AMD2, CPUID_AMD3, ebx, ecx, edx)) in x86_vendor()
65 u32 ebx, ecx = 0, edx; in x86_family() local
68 native_cpuid(&eax, &ebx, &ecx, &edx); in x86_family()
/arch/x86/boot/
Dpmjump.S29 xorl %ebx, %ebx
31 shll $4, %ebx
32 addl %ebx, 2f
60 addl %ebx, %esp
69 xorl %ebx, %ebx
/arch/x86/kvm/
Dcpuid.h21 void kvm_cpuid(struct kvm_vcpu *vcpu, u32 *eax, u32 *ebx, u32 *ecx, u32 *edx);
40 return best && (best->ebx & bit(X86_FEATURE_TSC_ADJUST)); in guest_cpuid_has_tsc_adjust()
48 return best && (best->ebx & bit(X86_FEATURE_SMEP)); in guest_cpuid_has_smep()
56 return best && (best->ebx & bit(X86_FEATURE_SMAP)); in guest_cpuid_has_smap()
64 return best && (best->ebx & bit(X86_FEATURE_FSGSBASE)); in guest_cpuid_has_fsgsbase()
96 return best && best->ebx == X86EMUL_CPUID_VENDOR_AuthenticAMD_ebx; in guest_cpuid_is_amd()
112 return best && (best->ebx & bit(X86_FEATURE_RTM)); in guest_cpuid_has_rtm()
120 return best && (best->ebx & bit(X86_FEATURE_MPX)); in guest_cpuid_has_mpx()

12345