/arch/x86/kernel/ |
D | ftrace_64.S | 71 pushq \added+8*2(%rsp) 73 movq %rsp, %rbp 75 pushq \added+8*3(%rsp) 77 movq %rsp, %rbp 83 subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp 84 movq %rax, RAX(%rsp) 85 movq %rcx, RCX(%rsp) 86 movq %rdx, RDX(%rsp) 87 movq %rsi, RSI(%rsp) 88 movq %rdi, RDI(%rsp) [all …]
|
D | head_64.S | 74 leaq (__end_init_task - SIZEOF_PTREGS)(%rip), %rsp 166 movq initial_stack(%rip), %rsp 255 movq initial_stack(%rip), %rsp 285 pushq $i # 72(%rsp) Vector number 305 movq 8(%rsp), %rsi /* RSI = vector number */ 306 movq %rdi, 8(%rsp) /* pt_regs->di = RDI */ 330 movq %rsp,%rdi /* RDI = pt_regs; RSI is already trapnr */
|
D | relocate_kernel_64.S | 61 movq %rsp, RSP(%r11) 100 lea PAGE_SIZE(%r8), %rsp 194 leaq PAGE_SIZE(%r10), %rsp 198 movq 0(%rsp), %rbp 207 lea PAGE_SIZE(%r8), %rsp 214 movq RSP(%r8), %rsp
|
/arch/x86/platform/efi/ |
D | efi_stub_64.S | 17 mov %rsp, %rax; \ 18 subq $0x70, %rsp; \ 19 and $~0xf, %rsp; \ 20 mov %rax, (%rsp); \ 23 mov %rax, 0x8(%rsp); \ 24 movaps %xmm0, 0x60(%rsp); \ 25 movaps %xmm1, 0x50(%rsp); \ 26 movaps %xmm2, 0x40(%rsp); \ 27 movaps %xmm3, 0x30(%rsp); \ 28 movaps %xmm4, 0x20(%rsp); \ [all …]
|
D | efi_thunk_64.S | 35 movq %rsp, efi_saved_sp(%rip) 36 movq efi_scratch(%rip), %rsp 50 subq $16, %rsp 53 movl %ebx, 8(%rsp) 59 movq efi_saved_sp(%rip), %rsp 78 subq $32, %rsp 79 movl %esi, 0x0(%rsp) 80 movl %edx, 0x4(%rsp) 81 movl %ecx, 0x8(%rsp) 83 movl %esi, 0xc(%rsp) [all …]
|
/arch/x86/entry/ |
D | entry_64.S | 66 TRACE_IRQS_FLAGS EFLAGS(%rsp) 95 btl $9, EFLAGS(%rsp) /* interrupts off? */ 155 movq %rsp, PER_CPU_VAR(cpu_tss_rw + TSS_sp2) 156 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp 157 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 174 movq %rsp, %rsi 188 movq RCX(%rsp), %rcx 189 movq RIP(%rsp), %r11 217 cmpq $__USER_CS, CS(%rsp) /* CS must match SYSRET */ 220 movq R11(%rsp), %r11 [all …]
|
D | entry_64_compat.S | 55 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp 57 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 79 orl $X86_EFLAGS_IF, (%rsp) /* Fix saved flags */ 131 testl $X86_EFLAGS_NT|X86_EFLAGS_AC|X86_EFLAGS_TF, EFLAGS(%rsp) 141 movq %rsp, %rdi 210 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp 213 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 261 movq %rsp, %rdi 278 movq RBX(%rsp), %rbx /* pt_regs->rbx */ 279 movq RBP(%rsp), %rbp /* pt_regs->rbp */ [all …]
|
/arch/x86/xen/ |
D | xen-asm_64.S | 99 movq %rsp, PER_CPU_VAR(cpu_tss_rw + TSS_sp2) 100 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 136 movq $__USER_DS, 4*8(%rsp) 137 movq $__USER_CS, 1*8(%rsp) 154 movq $__USER32_DS, 4*8(%rsp) 155 movq $__USER32_CS, 1*8(%rsp) 162 mov 0*8(%rsp), %rcx 163 mov 1*8(%rsp), %r11 164 mov 5*8(%rsp), %rsp 172 lea 16(%rsp), %rsp /* strip %rcx, %r11 */
|
/arch/x86/lib/ |
D | csum-copy_64.S | 57 subq $7*8, %rsp 58 movq %rbx, 2*8(%rsp) 59 movq %r12, 3*8(%rsp) 60 movq %r14, 4*8(%rsp) 61 movq %r13, 5*8(%rsp) 62 movq %r15, 6*8(%rsp) 64 movq %r8, (%rsp) 65 movq %r9, 1*8(%rsp) 206 movq 2*8(%rsp), %rbx 207 movq 3*8(%rsp), %r12 [all …]
|
D | copy_page_64.S | 25 subq $2*8, %rsp 26 movq %rbx, (%rsp) 27 movq %r12, 1*8(%rsp) 85 movq (%rsp), %rbx 86 movq 1*8(%rsp), %r12 87 addq $2*8, %rsp
|
/arch/x86/crypto/ |
D | chacha-ssse3-x86_64.S | 180 lea 8(%rsp),%r10 181 sub $0x10,%rsp 182 and $~31,%rsp 185 mov %rsp,%rdi 189 pxor 0x00(%rsp),%xmm0 190 movdqa %xmm0,0x00(%rsp) 192 mov %rsp,%rsi 197 lea -8(%r10),%rsp 240 lea 8(%rsp),%r10 241 sub $0x80,%rsp [all …]
|
D | chacha-avx2-x86_64.S | 207 lea 8(%rsp),%r10 208 sub $0x10,%rsp 209 and $~31,%rsp 212 mov %rsp,%rdi 216 vpxor 0x00(%rsp),%xmm7,%xmm7 217 vmovdqa %xmm7,0x00(%rsp) 219 mov %rsp,%rsi 224 lea -8(%r10),%rsp 512 lea 8(%rsp),%r10 513 sub $0x10,%rsp [all …]
|
D | sha1_ni_asm.S | 99 mov %rsp, RSPSAVE 100 sub $FRAME_SIZE, %rsp 101 and $~0xF, %rsp 117 movdqa E0, (0*16)(%rsp) 118 movdqa ABCD, (1*16)(%rsp) 277 sha1nexte (0*16)(%rsp), E0 278 paddd (1*16)(%rsp), ABCD 291 mov RSPSAVE, %rsp
|
D | sha512-avx2-asm.S | 189 add frame_XFER(%rsp),h # h = k + w + h # -- 251 add 1*8+frame_XFER(%rsp), h # h = k + w + h # -- 308 add 2*8+frame_XFER(%rsp), h # h = k + w + h # -- 366 add 3*8+frame_XFER(%rsp), h # h = k + w + h # -- 427 add frame_XFER(%rsp), h # h = k + w + h # -- 465 add 8*1+frame_XFER(%rsp), h # h = k + w + h # -- 503 add 8*2+frame_XFER(%rsp), h # h = k + w + h # -- 541 add 8*3+frame_XFER(%rsp), h # h = k + w + h # -- 575 mov %rsp, %rax 576 sub $frame_size, %rsp [all …]
|
D | sha512-ssse3-asm.S | 100 #define W_t(i) 8*i+frame_W(%rsp) 103 #define WK_2(i) 8*((i%2))+frame_WK(%rsp) 287 mov %rsp, %rax 288 sub $frame_size, %rsp 289 and $~(0x20 - 1), %rsp 290 mov %rax, frame_RSPSAVE(%rsp) 293 mov %rbx, frame_GPRSAVE(%rsp) 294 mov %r12, frame_GPRSAVE +8*1(%rsp) 295 mov %r13, frame_GPRSAVE +8*2(%rsp) 296 mov %r14, frame_GPRSAVE +8*3(%rsp) [all …]
|
D | sha512-avx-asm.S | 103 #define W_t(i) 8*i+frame_W(%rsp) 106 #define WK_2(i) 8*((i%2))+frame_WK(%rsp) 286 mov %rsp, %rax 287 sub $frame_size, %rsp 288 and $~(0x20 - 1), %rsp 289 mov %rax, frame_RSPSAVE(%rsp) 292 mov %rbx, frame_GPRSAVE(%rsp) 293 mov %r12, frame_GPRSAVE +8*1(%rsp) 294 mov %r13, frame_GPRSAVE +8*2(%rsp) 295 mov %r14, frame_GPRSAVE +8*3(%rsp) [all …]
|
D | sha256-avx2-asm.S | 163 addl \disp(%rsp, SRND), h # h = k + w + h # -- 211 addl offset(%rsp, SRND), h # h = k + w + h # -- 262 addl offset(%rsp, SRND), h # h = k + w + h # -- 312 addl offset(%rsp, SRND), h # h = k + w + h # -- 378 addl \disp(%rsp, SRND), h # h = k + w + h # -- 417 addl offset(%rsp, SRND), h # h = k + w + h # -- 457 addl offset(%rsp, SRND), h # h = k + w + h # -- 497 addl offset(%rsp, SRND), h # h = k + w + h # -- 537 mov %rsp, %rax 538 subq $STACK_SIZE, %rsp [all …]
|
/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 30 subq $8, %rsp 32 movl %eax, 4(%rsp) 34 movl %eax, (%rsp) 47 subq $32, %rsp 48 movl %esi, 0x0(%rsp) 49 movl %edx, 0x4(%rsp) 50 movl %ecx, 0x8(%rsp) 52 movl %esi, 0xc(%rsp) 54 movl %esi, 0x10(%rsp) 74 1: addq $32, %rsp [all …]
|
/arch/x86/um/ |
D | stub_64.S | 9 mov %rbx, %rsp 10 add $0x10, %rsp 13 mov 0x0(%rsp), %rax 22 mov %rsp, 8(%rbx) 25 add %rax, %rsp
|
D | setjmp_64.S | 11 # %rsp (post-return) 28 movq %rsp,8(%rdi) # Post-return %rsp! 47 movq 8(%rdi),%rsp
|
/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 34 movq saved_rsp, %rsp 47 subq $8, %rsp 52 movq %rsp, pt_regs_sp(%rax) 72 movq %rsp, saved_rsp 78 addq $8, %rsp 99 movq pt_regs_sp(%rax), %rsp 120 movq %rsp, %rdi 125 addq $8, %rsp
|
/arch/x86/purgatory/ |
D | entry64.S | 30 leaq stack_init(%rip), %rsp 44 movq rsp(%rip), %rsp 65 rsp: .quad 0x0 label
|
/arch/s390/pci/ |
D | pci_clp.c | 35 static inline void zpci_err_clp(unsigned int rsp, int rc) in zpci_err_clp() argument 38 unsigned int rsp; in zpci_err_clp() member 40 } __packed data = {rsp, rc}; in zpci_err_clp() 132 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) in clp_query_pci_fngrp() 136 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fngrp() 193 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) { in clp_query_pci_fn() 200 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fn() 266 if (rrb->response.hdr.rsp == CLP_RC_SETPCIFN_BUSY) { in clp_set_pci_fn() 272 } while (rrb->response.hdr.rsp == CLP_RC_SETPCIFN_BUSY); in clp_set_pci_fn() 274 if (rc || rrb->response.hdr.rsp != CLP_RC_OK) { in clp_set_pci_fn() [all …]
|
/arch/x86/include/asm/ |
D | unwind_hints.h | 48 .macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 iret=0 49 .if \base == %rsp 81 .macro UNWIND_HINT_IRET_REGS base=%rsp offset=0
|
/arch/x86/mm/ |
D | mem_encrypt_boot.S | 34 movq %rsp, %rbp /* RBP now has original stack pointer */ 38 leaq PAGE_SIZE(%rax), %rsp /* Set new stack pointer */ 65 movq %rbp, %rsp /* Restore original stack pointer */
|