/arch/x86/kernel/ |
D | ftrace_64.S | 68 pushq \added+8*2(%rsp) 70 movq %rsp, %rbp 72 pushq \added+8*3(%rsp) 74 movq %rsp, %rbp 80 subq $(FRAME_SIZE), %rsp 81 movq %rax, RAX(%rsp) 82 movq %rcx, RCX(%rsp) 83 movq %rdx, RDX(%rsp) 84 movq %rsi, RSI(%rsp) 85 movq %rdi, RDI(%rsp) [all …]
|
D | head_64.S | 65 leaq (__end_init_task - FRAME_SIZE)(%rip), %rsp 217 movq initial_stack(%rip), %rsp 298 movq initial_stack(%rip), %rsp 321 movq %rsp, %rdi 322 movq ORIG_RAX(%rsp), %rsi 331 addq $8, %rsp 364 pushq $i # 72(%rsp) Vector number 385 movq 8(%rsp), %rsi /* RSI = vector number */ 386 movq %rdi, 8(%rsp) /* pt_regs->di = RDI */ 402 movq %rsp,%rdi /* RDI = pt_regs; RSI is already trapnr */ [all …]
|
D | sev_verify_cbit.S | 80 xorq %rsp, %rsp 81 subq $0x1000, %rsp
|
D | relocate_kernel_64.S | 64 movq %rsp, RSP(%r11) 103 lea PAGE_SIZE(%r8), %rsp 203 leaq PAGE_SIZE(%r10), %rsp 208 movq 0(%rsp), %rbp 214 lea PAGE_SIZE(%r8), %rsp 225 movq RSP(%r8), %rsp
|
/arch/x86/entry/ |
D | entry_64.S | 92 movq %rsp, PER_CPU_VAR(cpu_tss_rw + TSS_sp2) 93 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp 94 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 110 movq %rsp, %rdi 130 movq RCX(%rsp), %rcx 131 movq RIP(%rsp), %r11 159 cmpq $__USER_CS, CS(%rsp) /* CS must match SYSRET */ 162 movq R11(%rsp), %r11 163 cmpq %r11, EFLAGS(%rsp) /* R11 == RFLAGS */ 189 cmpq $__USER_DS, SS(%rsp) /* SS must match SYSRET */ [all …]
|
D | entry_64_compat.S | 60 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 139 testl $X86_EFLAGS_NT|X86_EFLAGS_AC|X86_EFLAGS_TF, EFLAGS(%rsp) 143 movq %rsp, %rdi 213 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp 216 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 263 movq %rsp, %rdi 279 movq RBX(%rsp), %rbx /* pt_regs->rbx */ 280 movq RBP(%rsp), %rbp /* pt_regs->rbp */ 281 movq EFLAGS(%rsp), %r11 /* pt_regs->flags (in r11) */ 282 movq RIP(%rsp), %rcx /* pt_regs->ip (in rcx) */ [all …]
|
/arch/x86/platform/efi/ |
D | efi_thunk_64.S | 36 movq %rsp, %rax 37 movq efi_mixed_mode_stack_pa(%rip), %rsp 51 subq $28, %rsp 52 movl %ebx, 0x0(%rsp) /* return address */ 53 movl %esi, 0x4(%rsp) 54 movl %edx, 0x8(%rsp) 55 movl %ecx, 0xc(%rsp) 56 movl %r8d, 0x10(%rsp) 57 movl %r9d, 0x14(%rsp) 64 1: movq 24(%rsp), %rsp
|
D | efi_stub_64.S | 15 movq %rsp, %rbp 16 and $~0xf, %rsp 18 subq $48, %rsp 19 mov %r9, 32(%rsp) 20 mov %rax, 40(%rsp)
|
/arch/x86/crypto/ |
D | chacha-ssse3-x86_64.S | 180 lea 8(%rsp),%r10 181 sub $0x10,%rsp 182 and $~31,%rsp 185 mov %rsp,%rdi 189 pxor 0x00(%rsp),%xmm0 190 movdqa %xmm0,0x00(%rsp) 192 mov %rsp,%rsi 197 lea -8(%r10),%rsp 240 lea 8(%rsp),%r10 241 sub $0x80,%rsp [all …]
|
D | chacha-avx2-x86_64.S | 207 lea 8(%rsp),%r10 208 sub $0x10,%rsp 209 and $~31,%rsp 212 mov %rsp,%rdi 216 vpxor 0x00(%rsp),%xmm7,%xmm7 217 vmovdqa %xmm7,0x00(%rsp) 219 mov %rsp,%rsi 224 lea -8(%r10),%rsp 512 lea 8(%rsp),%r10 513 sub $0x10,%rsp [all …]
|
D | sha1_ni_asm.S | 98 mov %rsp, %rbp 99 sub $FRAME_SIZE, %rsp 100 and $~0xF, %rsp 116 movdqa E0, (0*16)(%rsp) 117 movdqa ABCD, (1*16)(%rsp) 276 sha1nexte (0*16)(%rsp), E0 277 paddd (1*16)(%rsp), ABCD 290 mov %rbp, %rsp
|
D | poly1305-x86_64-cryptogams.pl | 355 mov 0(%rsp),$ctx 362 mov 8(%rsp),%r15 364 mov 16(%rsp),%r14 366 mov 24(%rsp),%r13 368 mov 32(%rsp),%r12 370 mov 40(%rsp),%rbx 372 lea 48(%rsp),%rsp 439 mov %rsp,%rbp 624 mov %rsp,%rbp 758 mov %rsp,%rbp [all …]
|
D | sha256-avx2-asm.S | 160 addl \disp(%rsp, SRND), h # h = k + w + h # -- 208 addl offset(%rsp, SRND), h # h = k + w + h # -- 259 addl offset(%rsp, SRND), h # h = k + w + h # -- 309 addl offset(%rsp, SRND), h # h = k + w + h # -- 375 addl \disp(%rsp, SRND), h # h = k + w + h # -- 414 addl offset(%rsp, SRND), h # h = k + w + h # -- 454 addl offset(%rsp, SRND), h # h = k + w + h # -- 494 addl offset(%rsp, SRND), h # h = k + w + h # -- 535 mov %rsp, %rbp 537 subq $STACK_SIZE, %rsp [all …]
|
/arch/x86/um/ |
D | stub_64.S | 8 mov %rsp, %rbx 9 add $0x10, %rsp 12 mov 0x0(%rsp), %rax 21 mov %rsp, 8(%rbx) 24 add %rax, %rsp
|
D | setjmp_64.S | 11 # %rsp (post-return) 28 movq %rsp,8(%rdi) # Post-return %rsp! 47 movq 8(%rdi),%rsp
|
/arch/x86/lib/ |
D | csum-copy_64.S | 40 subq $5*8, %rsp 41 movq %rbx, 0*8(%rsp) 42 movq %r12, 1*8(%rsp) 43 movq %r14, 2*8(%rsp) 44 movq %r13, 3*8(%rsp) 45 movq %r15, 4*8(%rsp) 198 movq 0*8(%rsp), %rbx 199 movq 1*8(%rsp), %r12 200 movq 2*8(%rsp), %r14 201 movq 3*8(%rsp), %r13 [all …]
|
D | copy_page_64.S | 25 subq $2*8, %rsp 26 movq %rbx, (%rsp) 27 movq %r12, 1*8(%rsp) 85 movq (%rsp), %rbx 86 movq 1*8(%rsp), %r12 87 addq $2*8, %rsp
|
/arch/x86/entry/vdso/ |
D | vsgx.S | 33 mov %rsp, %rbp 83 .cfi_def_cfa %rsp, 8 105 mov %rsp, %rcx 111 mov %rsp, %rbx 118 and $-0x10, %rsp 136 lea 0x10(%rsp, %rbx), %rsp
|
/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 41 subq $64, %rsp 42 movl %esi, 0x0(%rsp) 43 movl %edx, 0x4(%rsp) 44 movl %ecx, 0x8(%rsp) 45 movl %r8d, 0xc(%rsp) 46 movl %r9d, 0x10(%rsp) 48 leaq 0x14(%rsp), %rbx 74 1: addq $64, %rsp
|
D | idt_handlers_64.S | 40 movq %rsp, %rdi 42 movq ORIG_RAX(%rsp), %rsi 63 addq $8, %rsp
|
/arch/x86/xen/ |
D | xen-asm.S | 210 addq $8, %rsp /* skip regs->orig_ax */ 240 movq $__USER_DS, 4*8(%rsp) 241 movq $__USER_CS, 1*8(%rsp) 259 movq $__USER32_DS, 4*8(%rsp) 260 movq $__USER32_CS, 1*8(%rsp) 280 movq $__USER32_DS, 4*8(%rsp) 281 movq $__USER32_CS, 1*8(%rsp) 291 lea 16(%rsp), %rsp /* strip %rcx, %r11 */
|
/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 36 movq saved_rsp, %rsp 50 subq $8, %rsp 55 movq %rsp, pt_regs_sp(%rax) 75 movq %rsp, saved_rsp 81 addq $8, %rsp 102 movq pt_regs_sp(%rax), %rsp 123 movq %rsp, %rdi 128 addq $8, %rsp
|
/arch/x86/purgatory/ |
D | entry64.S | 30 leaq stack_init(%rip), %rsp 44 movq rsp(%rip), %rsp 66 rsp: .quad 0x0 label
|
/arch/x86/include/asm/ |
D | unwind_hints.h | 18 .macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 partial=0 19 .if \base == %rsp 51 .macro UNWIND_HINT_IRET_REGS base=%rsp offset=0
|
/arch/s390/pci/ |
D | pci_clp.c | 37 static inline void zpci_err_clp(unsigned int rsp, int rc) in zpci_err_clp() argument 40 unsigned int rsp; in zpci_err_clp() member 42 } __packed data = {rsp, rc}; in zpci_err_clp() 135 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) in clp_query_pci_fngrp() 139 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fngrp() 202 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) { in clp_query_pci_fn() 209 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fn() 247 if (rrb->response.hdr.rsp == CLP_RC_SETPCIFN_BUSY) { in clp_set_pci_fn() 253 } while (rrb->response.hdr.rsp == CLP_RC_SETPCIFN_BUSY); in clp_set_pci_fn() 255 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) { in clp_set_pci_fn() [all …]
|