/arch/x86/kernel/ |
D | mcount_64.S | 81 pushq \added+8*2(%rsp) 83 movq %rsp, %rbp 85 pushq \added+8*3(%rsp) 88 pushq \added+8(%rsp) 91 movq %rsp, %rbp 97 subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp 98 movq %rax, RAX(%rsp) 99 movq %rcx, RCX(%rsp) 100 movq %rdx, RDX(%rsp) 101 movq %rsi, RSI(%rsp) [all …]
|
D | head_64.S | 221 movq stack_start(%rip), %rsp 304 movq stack_start(%rip),%rsp 331 # 104(%rsp) %rflags 332 # 96(%rsp) %cs 333 # 88(%rsp) %rip 334 # 80(%rsp) error code 340 pushq $i # 72(%rsp) Vector number 354 cmpl $2,(%rsp) # X86_TRAP_NMI 361 pushq %rax # 64(%rsp) 362 pushq %rcx # 56(%rsp) [all …]
|
D | relocate_kernel_64.S | 62 movq %rsp, RSP(%r11) 95 lea PAGE_SIZE(%r8), %rsp 174 leaq PAGE_SIZE(%r10), %rsp 178 movq 0(%rsp), %rbp 187 lea PAGE_SIZE(%r8), %rsp 194 movq RSP(%r8), %rsp
|
/arch/x86/platform/efi/ |
D | efi_stub_64.S | 16 mov %rsp, %rax; \ 17 subq $0x70, %rsp; \ 18 and $~0xf, %rsp; \ 19 mov %rax, (%rsp); \ 22 mov %rax, 0x8(%rsp); \ 23 movaps %xmm0, 0x60(%rsp); \ 24 movaps %xmm1, 0x50(%rsp); \ 25 movaps %xmm2, 0x40(%rsp); \ 26 movaps %xmm3, 0x30(%rsp); \ 27 movaps %xmm4, 0x20(%rsp); \ [all …]
|
D | efi_thunk_64.S | 34 movq %rsp, efi_saved_sp(%rip) 35 movq efi_scratch+25(%rip), %rsp 49 subq $16, %rsp 52 movl %ebx, 8(%rsp) 58 movq efi_saved_sp(%rip), %rsp 77 subq $32, %rsp 78 movl %esi, 0x0(%rsp) 79 movl %edx, 0x4(%rsp) 80 movl %ecx, 0x8(%rsp) 82 movl %esi, 0xc(%rsp) [all …]
|
/arch/x86/entry/ |
D | calling.h | 94 addq $-(15*8+\addskip), %rsp 99 movq %r11, 6*8+\offset(%rsp) 102 movq %r10, 7*8+\offset(%rsp) 103 movq %r9, 8*8+\offset(%rsp) 104 movq %r8, 9*8+\offset(%rsp) 107 movq %rax, 10*8+\offset(%rsp) 110 movq %rcx, 11*8+\offset(%rsp) 112 movq %rdx, 12*8+\offset(%rsp) 113 movq %rsi, 13*8+\offset(%rsp) 114 movq %rdi, 14*8+\offset(%rsp) [all …]
|
D | entry_64.S | 60 btl $9, EFLAGS(%rsp) /* interrupts off? */ 93 btl $9, EFLAGS(%rsp) /* interrupts off? */ 148 movq %rsp, PER_CPU_VAR(rsp_scratch) 149 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 175 sub $(6*8), %rsp /* pt_regs->bp, bx, r12-15 not saved */ 177 testl $_TIF_WORK_SYSCALL_ENTRY, ASM_THREAD_INFO(TI_flags, %rsp, SIZEOF_PTREGS) 197 movq %rax, RAX(%rsp) 218 testl $_TIF_ALLWORK_MASK, ASM_THREAD_INFO(TI_flags, %rsp, SIZEOF_PTREGS) 223 movq RIP(%rsp), %rcx 224 movq EFLAGS(%rsp), %r11 [all …]
|
D | entry_64_compat.S | 56 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 78 orl $X86_EFLAGS_IF, (%rsp) /* Fix saved flags */ 124 testl $X86_EFLAGS_NT, EFLAGS(%rsp) 134 movq %rsp, %rdi 180 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 224 movq %rsp, %rdi 234 movq RBX(%rsp), %rbx /* pt_regs->rbx */ 235 movq RBP(%rsp), %rbp /* pt_regs->rbp */ 236 movq EFLAGS(%rsp), %r11 /* pt_regs->flags (in r11) */ 237 movq RIP(%rsp), %rcx /* pt_regs->ip (in rcx) */ [all …]
|
/arch/x86/xen/ |
D | xen-asm_64.S | 26 mov 8+0(%rsp), %rcx 27 mov 8+8(%rsp), %r11 57 movq %rsp, PER_CPU_VAR(rsp_scratch) 58 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 76 movq %rsp, PER_CPU_VAR(rsp_scratch) 77 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 109 mov 0*8(%rsp), %rcx 110 mov 1*8(%rsp), %r11 111 mov 5*8(%rsp), %rsp 138 lea 16(%rsp), %rsp /* strip %rcx, %r11 */
|
/arch/x86/crypto/ |
D | chacha20-avx2-x86_64.S | 43 mov %rsp, %r8 44 and $~31, %rsp 45 sub $0x80, %rsp 65 vmovdqa %ymm0,0x00(%rsp) 66 vmovdqa %ymm1,0x20(%rsp) 67 vmovdqa %ymm2,0x40(%rsp) 68 vmovdqa %ymm3,0x60(%rsp) 81 vpaddd 0x00(%rsp),%ymm4,%ymm0 82 vmovdqa %ymm0,0x00(%rsp) 86 vpaddd 0x20(%rsp),%ymm5,%ymm0 [all …]
|
D | salsa20-x86_64-asm_64.S | 5 mov %rsp,%r11 8 sub %r11,%rsp 26 movq %r11,0(%rsp) 28 movq %r12,8(%rsp) 30 movq %r13,16(%rsp) 32 movq %r14,24(%rsp) 34 movq %r15,32(%rsp) 36 movq %rbx,40(%rsp) 38 movq %rbp,48(%rsp) 56 movq %rcx,56(%rsp) [all …]
|
D | chacha20-ssse3-x86_64.S | 160 mov %rsp,%r11 161 sub $0x80,%rsp 162 and $~63,%rsp 190 movdqa %xmm0,0x00(%rsp) 191 movdqa %xmm1,0x10(%rsp) 192 movdqa %xmm2,0x20(%rsp) 193 movdqa %xmm3,0x30(%rsp) 206 movdqa 0x00(%rsp),%xmm0 208 movdqa %xmm0,0x00(%rsp) 212 movdqa 0x10(%rsp),%xmm0 [all …]
|
D | sha1_ni_asm.S | 99 mov %rsp, RSPSAVE 100 sub $FRAME_SIZE, %rsp 101 and $~0xF, %rsp 117 movdqa E0, (0*16)(%rsp) 118 movdqa ABCD, (1*16)(%rsp) 277 sha1nexte (0*16)(%rsp), E0 278 paddd (1*16)(%rsp), ABCD 291 mov RSPSAVE, %rsp
|
D | sha512-avx2-asm.S | 188 add frame_XFER(%rsp),h # h = k + w + h # -- 250 add 1*8+frame_XFER(%rsp), h # h = k + w + h # -- 307 add 2*8+frame_XFER(%rsp), h # h = k + w + h # -- 365 add 3*8+frame_XFER(%rsp), h # h = k + w + h # -- 426 add frame_XFER(%rsp), h # h = k + w + h # -- 464 add 8*1+frame_XFER(%rsp), h # h = k + w + h # -- 502 add 8*2+frame_XFER(%rsp), h # h = k + w + h # -- 540 add 8*3+frame_XFER(%rsp), h # h = k + w + h # -- 573 mov %rsp, %rax 574 sub $frame_size, %rsp [all …]
|
/arch/x86/lib/ |
D | csum-copy_64.S | 53 subq $7*8, %rsp 54 movq %rbx, 2*8(%rsp) 55 movq %r12, 3*8(%rsp) 56 movq %r14, 4*8(%rsp) 57 movq %r13, 5*8(%rsp) 58 movq %r15, 6*8(%rsp) 60 movq %r8, (%rsp) 61 movq %r9, 1*8(%rsp) 202 movq 2*8(%rsp), %rbx 203 movq 3*8(%rsp), %r12 [all …]
|
D | copy_page_64.S | 22 subq $2*8, %rsp 23 movq %rbx, (%rsp) 24 movq %r12, 1*8(%rsp) 82 movq (%rsp), %rbx 83 movq 1*8(%rsp), %r12 84 addq $2*8, %rsp
|
/arch/x86/crypto/sha-mb/ |
D | sha1_mb_mgr_submit_avx2.S | 105 mov %rsp, %r10 106 sub $STACK_SPACE, %rsp 107 and $~31, %rsp 109 mov %rbx, (%rsp) 110 mov %r10, 8*2(%rsp) #save old rsp 111 mov %rbp, 8*3(%rsp) 112 mov %r12, 8*4(%rsp) 113 mov %r13, 8*5(%rsp) 114 mov %r14, 8*6(%rsp) 115 mov %r15, 8*7(%rsp) [all …]
|
D | sha1_mb_mgr_flush_avx2.S | 116 mov %rsp, %r10 117 sub $STACK_SPACE, %rsp 118 and $~31, %rsp 119 mov %rbx, _GPR_SAVE(%rsp) 120 mov %r10, _GPR_SAVE+8*1(%rsp) #save rsp 121 mov %rbp, _GPR_SAVE+8*3(%rsp) 122 mov %r12, _GPR_SAVE+8*4(%rsp) 123 mov %r13, _GPR_SAVE+8*5(%rsp) 124 mov %r14, _GPR_SAVE+8*6(%rsp) 125 mov %r15, _GPR_SAVE+8*7(%rsp) [all …]
|
D | sha1_x8_avx2.S | 168 vpaddd \memW*32(%rsp), \regE, \regE 179 vmovdqu offset(%rsp), W14 182 vpxor offset(%rsp), W16, W16 184 vpxor offset(%rsp), W16, W16 192 vmovdqu \regF, offset(%rsp) 301 #save rsp 302 mov %rsp, RSP_SAVE 303 sub $FRAMESZ, %rsp 305 #align rsp to 32 Bytes 306 and $~0x1F, %rsp [all …]
|
/arch/x86/um/ |
D | stub_64.S | 8 mov %rbx, %rsp 9 add $0x10, %rsp 12 mov 0x0(%rsp), %rax 21 mov %rsp, 8(%rbx) 24 add %rax, %rsp
|
D | setjmp_64.S | 10 # %rsp (post-return) 27 movq %rsp,8(%rdi) # Post-return %rsp! 46 movq 8(%rdi),%rsp
|
/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 29 subq $8, %rsp 31 movl %eax, 4(%rsp) 33 movl %eax, (%rsp) 46 subq $32, %rsp 47 movl %esi, 0x0(%rsp) 48 movl %edx, 0x4(%rsp) 49 movl %ecx, 0x8(%rsp) 51 movl %esi, 0xc(%rsp) 53 movl %esi, 0x10(%rsp) 73 1: addq $32, %rsp [all …]
|
/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 27 movq saved_rsp, %rsp 42 subq $8, %rsp 47 movq %rsp, pt_regs_sp(%rax) 67 movq %rsp, saved_rsp 73 addq $8, %rsp 94 movq pt_regs_sp(%rax), %rsp 115 movq %rsp, %rdi 120 addq $8, %rsp
|
/arch/s390/pci/ |
D | pci_clp.c | 19 static inline void zpci_err_clp(unsigned int rsp, int rc) in zpci_err_clp() argument 22 unsigned int rsp; in zpci_err_clp() member 24 } __packed data = {rsp, rc}; in zpci_err_clp() 94 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) in clp_query_pci_fngrp() 98 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fngrp() 147 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) { in clp_query_pci_fn() 155 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fn() 218 if (rrb->response.hdr.rsp == CLP_RC_SETPCIFN_BUSY) { in clp_set_pci_fn() 224 } while (rrb->response.hdr.rsp == CLP_RC_SETPCIFN_BUSY); in clp_set_pci_fn() 226 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) in clp_set_pci_fn() [all …]
|
/arch/x86/purgatory/ |
D | entry64.S | 32 leaq stack_init(%rip), %rsp 46 movq rsp(%rip), %rsp 67 rsp: .quad 0x0 label
|