/arch/x86/kernel/ |
D | efi_stub_32.S | 39 movl $1f, %edx 40 subl $__PAGE_OFFSET, %edx 41 jmp *%edx 50 popl %edx 51 movl %edx, saved_return_addr 55 movl $2f, %edx 56 subl $__PAGE_OFFSET, %edx 57 pushl %edx 62 movl %cr0, %edx 63 andl $0x7fffffff, %edx [all …]
|
D | verify_cpu_64.S | 59 cmpl $0x69746e65,%edx 68 andl $REQUIRED_MASK0,%edx 69 xorl $REQUIRED_MASK0,%edx 79 andl $REQUIRED_MASK1,%edx 80 xorl $REQUIRED_MASK1,%edx 86 andl $SSE_MASK,%edx 87 cmpl $SSE_MASK,%edx
|
D | entry_32.S | 127 pushl %edx; \ 129 CFI_REL_OFFSET edx, 0;\ 136 movl $(__USER_DS), %edx; \ 137 movl %edx, %ds; \ 138 movl %edx, %es; \ 139 movl $(__KERNEL_PERCPU), %edx; \ 140 movl %edx, %fs 149 popl %edx; \ 151 CFI_RESTORE edx;\ 216 CFI_OFFSET edx, PT_EDX-PT_OLDESP;\ [all …]
|
/arch/x86/math-emu/ |
D | wm_shrx.S | 47 movl 4(%esi),%edx /* msl */ 50 shrd %cl,%edx,%ebx 51 shr %cl,%edx 53 movl %edx,4(%esi) 65 movl 4(%esi),%edx /* msl */ 66 shrd %cl,%edx,%eax 67 shr %cl,%edx 68 movl %edx,(%esi) 81 xorl %edx,%edx 82 movl %edx,(%esi) [all …]
|
D | round_Xsig.S | 34 movl 8(%esi),%edx 40 orl %edx,%edx /* ms bits */ 44 movl %ebx,%edx 51 bsrl %edx,%ecx /* get the required shift in %ecx */ 55 shld %cl,%ebx,%edx 64 adcl $0,%edx 67 movl $0x80000000,%edx 71 movl %edx,8(%esi) 94 movl 8(%esi),%edx 100 orl %edx,%edx /* ms bits */ [all …]
|
D | reg_u_div.S | 92 movswl EXP(%esi),%edx 94 subl %eax,%edx 95 addl EXP_BIAS,%edx 98 cmpl EXP_WAY_UNDER,%edx 102 movl EXP_WAY_UNDER,%edx 121 movl SIGH(%esi),%edx /* Dividend */ 124 cmpl %ecx,%edx 128 subl %ecx,%edx /* Prevent the overflow */ 190 movl SIGH(%esi),%edx 197 cmpl SIGH(%ebx),%edx /* Test for imminent overflow */ [all …]
|
D | reg_u_sub.S | 67 movl PARAM6,%edx 70 xorl %edx,%edx /* register extension */ 82 shrd %cl,%ebx,%edx 94 shrd %cl,%eax,%edx 99 orl $1,%edx /* record the fact in the extension */ 107 movl %ebx,%edx 117 movl %eax,%edx 121 orl $1,%edx 129 movl %eax,%edx 130 rcrl %edx [all …]
|
D | div_Xsig.S | 129 movl FPU_accum_3,%edx 142 mov %edx,%eax 155 sbbl %edx,FPU_accum_3 161 sbbl %edx,FPU_accum_2 173 movl XsigH(%ebx),%edx 175 sbbl %edx,FPU_accum_2 187 movl FPU_accum_2,%edx /* get the reduced num */ 191 cmpl XsigH(%ebx),%edx 202 sbbl XsigH(%ebx),%edx 203 movl %edx,FPU_accum_2 [all …]
|
D | mul_Xsig.S | 42 movl %edx,-12(%ebp) 47 adcl %edx,-8(%ebp) 53 adcl %edx,-4(%ebp) 82 movl %edx,-12(%ebp) 86 addl %edx,-12(%ebp) 93 adcl %edx,-8(%ebp) 99 adcl %edx,-8(%ebp) 105 adcl %edx,-4(%ebp) 135 movl %edx,-12(%ebp) 139 addl %edx,-12(%ebp) [all …]
|
D | shr_Xsig.S | 37 movl 8(%esi),%edx /* msl */ 39 shrd %cl,%edx,%ebx 40 shr %cl,%edx 43 movl %edx,8(%esi) 55 movl 8(%esi),%edx /* msl */ 56 shrd %cl,%edx,%eax 57 shr %cl,%edx 59 movl %edx,4(%esi) 72 xorl %edx,%edx 74 movl %edx,4(%esi) [all …]
|
D | reg_round.S | 121 movl PARAM2,%edx 204 orl %edx,%ecx 221 orl %edx,%edx 240 orl %edx,%ecx 283 orl %edx,%ecx 297 orl %edx,%edx 315 orl %edx,%ecx 348 orl %edx,%edx 356 orl %edx,%edx 361 cmpl $0x80000000,%edx [all …]
|
D | wm_sqrt.S | 91 xorl %edx,%edx 100 rcrl $1,%edx 108 movl %edx,FPU_fsqrt_arg_0 115 shll %edx /* max result was 7fff... */ 116 testl $0x80000000,%edx /* but min was 3fff... */ 119 movl $0x80000000,%edx /* round up */ 122 movl %edx,%esi /* Our first guess */ 139 movl %ecx,%edx /* msw of the arg / 2 */ 144 movl %ecx,%edx 149 movl %ecx,%edx [all …]
|
D | reg_norm.S | 31 movl SIGH(%ebx),%edx 34 orl %edx,%edx /* ms bits */ 41 movl %eax,%edx 47 bsrl %edx,%ecx /* get the required shift in %ecx */ 50 shld %cl,%eax,%edx 54 movl %edx,SIGH(%ebx) 108 movl SIGH(%ebx),%edx 111 orl %edx,%edx /* ms bits */ 118 movl %eax,%edx 124 bsrl %edx,%ecx /* get the required shift in %ecx */ [all …]
|
D | reg_u_add.S | 45 movl %ecx,%edx 54 movl PARAM7,%edx 67 xorl %edx,%edx /* clear the extension */ 82 shrd %cl,%ebx,%edx 94 shrd %cl,%eax,%edx 99 orl $1,%edx /* record the fact in the extension */ 107 movl %ebx,%edx 116 movl %eax,%edx 120 orl $1,%edx 124 movl $1,%edx /* The shifted nr always at least one '1' */ [all …]
|
/arch/um/sys-i386/ |
D | setjmp.S | 23 movl %eax,%edx 25 movl 4(%esp),%edx 29 movl %ebx,(%edx) 30 movl %esp,4(%edx) # Post-return %esp! 32 movl %ebp,8(%edx) 33 movl %esi,12(%edx) 34 movl %edi,16(%edx) 35 movl %ecx,20(%edx) # Return address 46 xchgl %eax,%edx 48 movl 4(%esp),%edx # jmp_ptr address [all …]
|
D | checksum.S | 68 movl %ecx, %edx 92 2: movl %edx, %ecx 93 andl $0x1c, %edx 95 shrl $2, %edx # This clears CF 98 dec %edx 131 movl %ecx, %edx 198 movl %edx, %ecx 280 SRC( movl 4(%esi), %edx ) 283 adcl %edx, %eax 284 DST( movl %edx, 4(%edi) ) [all …]
|
/arch/x86/kernel/cpu/ |
D | vmware.c | 35 #define VMWARE_PORT(cmd, eax, ebx, ecx, edx) \ argument 37 "=a"(eax), "=c"(ecx), "=d"(edx), "=b"(ebx) : \ 45 uint32_t eax, ebx, ecx, edx; in __vmware_platform() local 46 VMWARE_PORT(GETVERSION, eax, ebx, ecx, edx); in __vmware_platform() 53 uint32_t eax, ebx, ecx, edx; in __vmware_get_tsc_khz() local 55 VMWARE_PORT(GETHZ, eax, ebx, ecx, edx); in __vmware_get_tsc_khz() 73 unsigned int eax, ebx, ecx, edx; in vmware_platform() local 76 cpuid(CPUID_VMWARE_INFO_LEAF, &eax, &ebx, &ecx, &edx); in vmware_platform() 79 memcpy(hyper_vendor_id + 8, &edx, 4); in vmware_platform()
|
/arch/x86/crypto/ |
D | salsa20-i586-asm_32.S | 25 movl 4(%esp,%eax),%edx 38 movl 0(%edx),%eax 40 movl 4(%edx),%ecx 42 movl 8(%edx),%ebp 46 movl 12(%edx),%eax 50 movl 16(%edx),%ecx 54 movl 20(%edx),%ebp 58 movl 24(%edx),%eax 62 movl 28(%edx),%ecx 66 movl 32(%edx),%ebp [all …]
|
/arch/x86/power/ |
D | hibernate_asm_32.S | 34 movl restore_pblist, %edx 38 testl %edx, %edx 41 movl pbe_address(%edx), %esi 42 movl pbe_orig_address(%edx), %edi 48 movl pbe_next(%edx), %edx 60 movl %ecx, %edx 61 andl $~(X86_CR4_PGE), %edx 62 movl %edx, %cr4; # turn off PGE
|
/arch/x86/lib/ |
D | copy_user_64.S | 46 subl %ecx,%edx 55 103: addl %ecx,%edx /* ecx is zerorest also */ 109 movl %edx,%ecx 114 movl %edx,%eax 135 cmpl $8,%edx 138 movl %edx,%ecx 139 andl $63,%edx 162 17: movl %edx,%ecx 163 andl $7,%edx 172 20: andl %edx,%edx [all …]
|
D | thunk_32.S | 13 pushl %edx; \ 15 popl %edx; \ 22 pushl %edx; \ 24 popl %edx; \ 35 pushl %edx 39 popl %edx
|
D | copy_user_nocache_64.S | 26 subl %ecx,%edx 35 103: addl %ecx,%edx /* ecx is zerorest also */ 53 cmpl $8,%edx 56 movl %edx,%ecx 57 andl $63,%edx 80 17: movl %edx,%ecx 81 andl $7,%edx 90 20: andl %edx,%edx 92 movl %edx,%ecx 105 addl %ecx,%edx [all …]
|
D | checksum_32.S | 86 movl %ecx, %edx 110 2: movl %edx, %ecx 111 andl $0x1c, %edx 113 shrl $2, %edx # This clears CF 116 dec %edx 164 movl %ecx, %edx 246 movl %edx, %ecx 344 SRC( movl 4(%esi), %edx ) 347 adcl %edx, %eax 348 DST( movl %edx, 4(%edi) ) [all …]
|
/arch/x86/include/asm/ |
D | xcr.h | 29 u32 eax, edx; in xgetbv() local 32 : "=a" (eax), "=d" (edx) in xgetbv() 34 return eax + ((u64)edx << 32); in xgetbv() 40 u32 edx = value >> 32; in xsetbv() local 43 : : "a" (eax), "d" (edx), "c" (index)); in xsetbv()
|
/arch/x86/boot/ |
D | cpucheck.c | 204 u32 eax, edx; in check_cpu() local 206 asm("rdmsr" : "=a" (eax), "=d" (edx) : "c" (ecx)); in check_cpu() 208 asm("wrmsr" : : "a" (eax), "d" (edx), "c" (ecx)); in check_cpu() 219 u32 eax, edx; in check_cpu() local 221 asm("rdmsr" : "=a" (eax), "=d" (edx) : "c" (ecx)); in check_cpu() 223 asm("wrmsr" : : "a" (eax), "d" (edx), "c" (ecx)); in check_cpu() 231 u32 eax, edx; in check_cpu() local 234 asm("rdmsr" : "=a" (eax), "=d" (edx) : "c" (ecx)); in check_cpu() 235 asm("wrmsr" : : "a" (~0), "d" (edx), "c" (ecx)); in check_cpu() 239 asm("wrmsr" : : "a" (eax), "d" (edx), "c" (ecx)); in check_cpu()
|