Lines Matching refs:rsp
66 TRACE_IRQS_FLAGS EFLAGS(%rsp)
95 btl $9, EFLAGS(%rsp) /* interrupts off? */
155 movq %rsp, PER_CPU_VAR(cpu_tss_rw + TSS_sp2)
156 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp
157 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
174 movq %rsp, %rsi
184 movq RCX(%rsp), %rcx
185 movq RIP(%rsp), %r11
213 cmpq $__USER_CS, CS(%rsp) /* CS must match SYSRET */
216 movq R11(%rsp), %r11
217 cmpq %r11, EFLAGS(%rsp) /* R11 == RFLAGS */
243 cmpq $__USER_DS, SS(%rsp) /* SS must match SYSRET */
259 movq %rsp, %rdi
260 movq PER_CPU_VAR(cpu_tss_rw + TSS_sp0), %rsp
274 popq %rsp
296 movq %rsp, TASK_threadsp(%rdi)
297 movq TASK_threadsp(%rsi), %rsp
343 movq %rsp, %rdi
358 movq $0, RAX(%rsp)
418 leaq 8(%rsp), \old_rsp
420 movq %rsp, \old_rsp
446 movq PER_CPU_VAR(hardirq_stack_ptr), %rsp
455 cmpq -8(%rsp), \old_rsp
484 popq %rsp
519 testb $3, CS-ORIG_RAX+8(%rsp)
533 movq %rsp, %rdi
534 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
559 testb $3, CS+8(%rsp)
593 addq $-0x80, (%rsp) /* Adjust vector to [-256, -1] range */
604 addq $-0x80, (%rsp) /* Adjust vector to [-256, -1] range */
615 testb $3, CS(%rsp)
620 mov %rsp,%rdi
627 testb $3, CS(%rsp)
638 movq %rsp, %rdi
639 movq PER_CPU_VAR(cpu_tss_rw + TSS_sp0), %rsp
670 btl $9, EFLAGS(%rsp) /* were interrupts off? */
685 testb $3, CS(%rsp)
691 addq $8, %rsp /* skip regs->orig_ax */
705 testb $4, (SS-RIP)(%rsp)
748 movq (1*8)(%rsp), %rax /* user RIP */
750 movq (2*8)(%rsp), %rax /* user CS */
752 movq (3*8)(%rsp), %rax /* user RFLAGS */
754 movq (5*8)(%rsp), %rax /* user SS */
756 movq (4*8)(%rsp), %rax /* user RSP */
776 movq %rax, %rsp
895 testb $3, CS(%rsp)
901 movq %rsp, %rdi /* pt_regs pointer */
904 movq ORIG_RAX(%rsp), %rsi /* get error code */
905 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
991 testb $3, CS-ORIG_RAX(%rsp) /* If coming from userspace, switch stacks */
1000 testb $3, CS-ORIG_RAX(%rsp)
1003 pushq 5*8(%rsp)
1054 TRACE_IRQS_FLAGS (%rsp)
1080 mov %rsp, %rbp
1111 movq %rdi, %rsp /* we don't return, adjust the stack frame */
1140 cmpw %cx, 0x10(%rsp)
1143 cmpw %cx, 0x18(%rsp)
1146 cmpw %cx, 0x20(%rsp)
1149 cmpw %cx, 0x28(%rsp)
1152 movq (%rsp), %rcx
1153 movq 8(%rsp), %r11
1154 addq $0x30, %rsp
1159 movq (%rsp), %rcx
1160 movq 8(%rsp), %r11
1161 addq $0x30, %rsp
1292 testb $3, CS+8(%rsp)
1307 movq %rsp, %rdi /* arg0 = pt_regs pointer */
1309 movq %rax, %rsp /* switch stack */
1327 cmpq %rcx, RIP+8(%rsp)
1330 cmpq %rax, RIP+8(%rsp)
1332 cmpq $.Lgs_change, RIP+8(%rsp)
1347 movq %rcx, RIP+8(%rsp)
1363 mov %rsp, %rdi
1365 mov %rax, %rsp
1373 testb $3, CS(%rsp)
1432 testb $3, CS-RIP+8(%rsp)
1450 movq %rsp, %rdx
1451 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
1469 movq %rsp, %rdi
1533 cmpq 8(%rsp), %rdx
1536 cmpq 8(%rsp), %rdx
1545 cmpl $1, -8(%rsp)
1560 lea 6*8(%rsp), %rdx
1562 cmpq %rdx, 4*8(%rsp)
1567 cmpq %rdx, 4*8(%rsp)
1573 testb $(X86_EFLAGS_DF >> 8), (3*8 + 1)(%rsp)
1583 subq $8, %rsp
1584 leaq -10*8(%rsp), %rdx
1592 addq $(6*8), %rsp
1602 movq (%rsp), %rdx
1608 subq $(5*8), %rsp
1612 pushq 11*8(%rsp)
1624 pushq %rsp /* RSP (minus 8 because of the previous push) */
1625 addq $8, (%rsp) /* Fix up RSP */
1650 movq $1, 10*8(%rsp) /* Set "NMI executing". */
1657 addq $(10*8), %rsp
1659 pushq -6*8(%rsp)
1661 subq $(5*8), %rsp
1682 movq %rsp, %rdi
1700 addq $6*8, %rsp
1712 movq $0, 5*8(%rsp) /* clear "NMI executing" */
1741 leaq -PTREGS_SIZE(%rax), %rsp