/arch/x86/lib/ |
D | memmove_64.S | 36 cmp %rdi, %rsi 38 mov %rsi, %r8 63 movq 0*8(%rsi), %r11 64 movq 1*8(%rsi), %r10 65 movq 2*8(%rsi), %r9 66 movq 3*8(%rsi), %r8 67 leaq 4*8(%rsi), %rsi 83 movq -8(%rsi, %rdx), %r11 97 movq (%rsi), %r11 99 leaq -8(%rsi, %rdx), %rsi [all …]
|
D | copy_page_64.S | 35 movq (%rsi), %rax 36 movq 8 (%rsi), %rbx 37 movq 16 (%rsi), %rdx 38 movq 24 (%rsi), %r8 39 movq 32 (%rsi), %r9 40 movq 40 (%rsi), %r10 41 movq 48 (%rsi), %r11 42 movq 56 (%rsi), %r12 44 prefetcht0 5*64(%rsi) 55 leaq 64 (%rsi), %rsi [all …]
|
D | memcpy_64.S | 77 movq 0*8(%rsi), %r8 78 movq 1*8(%rsi), %r9 79 movq 2*8(%rsi), %r10 80 movq 3*8(%rsi), %r11 81 leaq 4*8(%rsi), %rsi 96 addq %rdx, %rsi 106 movq -1*8(%rsi), %r8 107 movq -2*8(%rsi), %r9 108 movq -3*8(%rsi), %r10 109 movq -4*8(%rsi), %r11 [all …]
|
D | copy_user_nocache_64.S | 27 100: movb (%rsi),%al 29 incq %rsi 60 1: movq (%rsi),%r8 61 2: movq 1*8(%rsi),%r9 62 3: movq 2*8(%rsi),%r10 63 4: movq 3*8(%rsi),%r11 68 9: movq 4*8(%rsi),%r8 69 10: movq 5*8(%rsi),%r9 70 11: movq 6*8(%rsi),%r10 71 12: movq 7*8(%rsi),%r11 [all …]
|
D | csum-copy_64.S | 129 movq %rbx, (%rsi) 131 movq %r8, 8(%rsi) 133 movq %r11, 16(%rsi) 135 movq %rdx, 24(%rsi) 138 movq %r10, 32(%rsi) 140 movq %rbp, 40(%rsi) 142 movq %r14, 48(%rsi) 144 movq %r13, 56(%rsi) 149 leaq 64(%rsi), %rsi 170 movq %rbx, (%rsi) [all …]
|
D | copy_user_64.S | 54 100: movb (%rsi),%al 56 incq %rsi 93 movq %rsi,%rcx 142 1: movq (%rsi),%r8 143 2: movq 1*8(%rsi),%r9 144 3: movq 2*8(%rsi),%r10 145 4: movq 3*8(%rsi),%r11 150 9: movq 4*8(%rsi),%r8 151 10: movq 5*8(%rsi),%r9 152 11: movq 6*8(%rsi),%r10 [all …]
|
D | cmpxchg16b_emu.S | 46 cmpq SEG_PREFIX(%rsi), %rax 48 cmpq SEG_PREFIX 8(%rsi), %rdx 51 movq %rbx, SEG_PREFIX(%rsi) 52 movq %rcx, SEG_PREFIX 8(%rsi)
|
D | rwsem.S | 68 pushq_cfi %rsi; CFI_REL_OFFSET rsi, 0; \ 81 popq_cfi %rsi; CFI_RESTORE rsi; \
|
/arch/x86/kernel/ |
D | relocate_kernel_64.S | 61 movq PTR(VA_CONTROL_PAGE)(%rsi), %r11 78 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8 81 movq PTR(PA_TABLE_PAGE)(%rsi), %r9 84 movq PTR(PA_SWAP_PAGE)(%rsi), %r10 158 xorq %rsi, %rsi 216 xorq %rsi, %rsi 242 movq %rcx, %rsi /* For ever source page do a copy */ 243 andq $0xfffffffffffff000, %rsi 246 movq %rsi, %rax 253 movq %rdx, %rsi [all …]
|
D | entry_64.S | 82 movq 8(%rbp), %rsi 122 movq 8(%rbp), %rsi 142 movq 0x38(%rsp), %rsi 144 subq $MCOUNT_INSN_SIZE, %rsi 279 CFI_REL_OFFSET rsi, RSI+\offset-ARGOFFSET 307 movq_cfi rsi, RSI-RBP 320 movq %rsp, %rsi 323 testl $3, CS-RBP(%rsi) 334 CFI_DEF_CFA_REGISTER rsi 337 pushq %rsi [all …]
|
/arch/x86/crypto/ |
D | salsa20-x86_64-asm_64.S | 13 mov %rsi,%rsi 92 leaq 192(%rsp),%rsi 99 movq %rsi,144(%rsp) 109 movq 64(%rsp),%rsi 111 mov %rsi,%rcx 113 shr $32,%rsi 249 xor %rbp,%rsi 251 lea (%r15,%rsi),%rbp 257 lea (%rsi,%r8),%rbp 273 lea (%rsi,%rdx),%rbp [all …]
|
D | ghash-clmulni-intel_asm.S | 101 movups (%rsi), SHASH 122 movups (%rsi), IN1 127 add $16, %rsi 142 movups (%rsi), %xmm0
|
D | blowfish-x86_64-asm_64.S | 35 #define RIO %rsi 58 #define RT1 %rsi 132 movq %rsi, %r10; 171 movq %rsi, %r10; 320 movq %rsi, %r11; 368 movq %rsi, %r11;
|
D | camellia-x86_64-asm_64.S | 52 #define RIO %rsi 75 #define RT0 %rsi 204 movq %rsi, RDST; 255 movq %rsi, RDST; 443 movq %rsi, RDST; 496 movq %rsi, RDST;
|
/arch/x86/platform/efi/ |
D | efi_stub_64.S | 33 mov 0x8(%rsp), %rsi; \ 34 mov %rsi, %cr0; \ 49 mov %rsi, %rcx 59 mov %rsi, %rcx 70 mov %rsi, %rcx 82 mov %rsi, %rcx 95 mov %rsi, %rcx 111 mov %rsi, %rcx
|
/arch/x86/boot/compressed/ |
D | head_64.S | 211 mov %rdx, %rsi 213 movq %rax,%rsi 225 subq BP_pref_address(%rsi), %rax 260 movl BP_kernel_alignment(%rsi), %eax 283 pushq %rsi 284 leaq (_bss-8)(%rip), %rsi 291 popq %rsi 328 pushq %rsi /* Save the real mode argument */ 329 movq %rsi, %rdi /* real mode address */ 330 leaq boot_heap(%rip), %rsi /* malloc area for uncompression */ [all …]
|
/arch/x86/um/ |
D | setjmp_64.S | 24 pop %rsi # Return address, and adjust the stack 28 push %rsi # Make the call/return stack happy 34 movq %rsi,56(%rdi) # Return address
|
D | stub_64.S | 47 pop %rsi
|
/arch/x86/include/asm/ |
D | ftrace.h | 12 movq %rsi, 24(%rsp) 22 movq 24(%rsp), %rsi
|
D | calling.h | 92 movq_cfi rsi, 7*8 136 movq_cfi_restore 7*8, rsi 152 movq \offset+56(%rsp), %rsi
|
/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 31 movq saved_rsi, %rsi 49 movq %rsi, pt_regs_si(%rax) 71 movq %rsi, saved_rsi 94 movq pt_regs_si(%rax), %rsi
|
/arch/x86/net/ |
D | bpf_jit.S | 36 mov (SKBDATA,%rsi),%eax 53 movzwl (SKBDATA,%rsi),%eax 68 movzbl (SKBDATA,%rsi),%eax 87 movzbl (SKBDATA,%rsi),%ebx
|
/arch/x86/xen/ |
D | xen-asm.S | 125 push %rsi 137 pop %rsi
|
D | xen-head.S | 23 mov %rsi,xen_start_info
|
/arch/x86/power/ |
D | hibernate_asm_64.S | 29 movq %rsi, pt_regs_si(%rax) 86 movq pbe_address(%rdx), %rsi 126 movq pt_regs_si(%rax), %rsi
|