Lines Matching refs:rsp
95 movq RIP(%rsp), %rdi
99 movq SS+16(%rsp), %rsi
141 movq %r15, R15(%rsp)
142 movq %r14, R14(%rsp)
143 movq %r13, R13(%rsp)
144 movq %r12, R12(%rsp)
145 movq %r11, R11(%rsp)
146 movq %r10, R10(%rsp)
147 movq %rbp, RBP(%rsp)
148 movq %rbx, RBX(%rsp)
150 movq SS(%rsp), %rcx
151 movq %rcx, EFLAGS(%rsp)
154 movq %rcx, SS(%rsp)
156 movq %rcx, CS(%rsp)
158 leaq SS+16(%rsp), %rcx
159 movq %rcx, RSP(%rsp)
162 leaq (%rsp), %rcx
168 movq EFLAGS(%rsp), %rax
169 movq %rax, SS(%rsp)
172 movq RIP(%rsp), %rax
173 movq %rax, SS+8(%rsp)
176 movq R15(%rsp), %r15
177 movq R14(%rsp), %r14
178 movq R13(%rsp), %r13
179 movq R12(%rsp), %r12
180 movq R10(%rsp), %r10
181 movq RBP(%rsp), %rbp
182 movq RBX(%rsp), %rbx
221 movq RIP(%rsp), %rdi
223 movq SS+16(%rsp), %rsi
243 leaq SS+16(%rsp), %rdi
249 movq RIP(%rsp), %rsi
260 subq $24, %rsp
263 movq %rax, (%rsp)
264 movq %rdx, 8(%rsp)
270 movq 8(%rsp), %rdx
271 movq (%rsp), %rax
272 addq $24, %rsp
291 bt $9,EFLAGS-\offset(%rsp) /* interrupts off? */
324 bt $9,EFLAGS-\offset(%rsp) /* interrupts off? */
347 movq \tmp,RSP+\offset(%rsp)
348 movq $__USER_DS,SS+\offset(%rsp)
349 movq $__USER_CS,CS+\offset(%rsp)
350 movq $-1,RCX+\offset(%rsp)
351 movq R11+\offset(%rsp),\tmp /* get eflags */
352 movq \tmp,EFLAGS+\offset(%rsp)
356 movq RSP+\offset(%rsp),\tmp
358 movq EFLAGS+\offset(%rsp),\tmp
359 movq \tmp,R11+\offset(%rsp)
368 CFI_REL_OFFSET rsp,0
379 addq $8*6, %rsp
390 CFI_DEF_CFA rsp,8+\offset
402 CFI_REL_OFFSET rsp, RSP+\offset-RIP
464 movq %rsp, %rsi
466 leaq -RBP(%rsp),%rdi /* arg1 for handler */
477 cmovzq PER_CPU_VAR(irq_stack_ptr),%rsp
493 movq 5*8+16(%rsp), %r11 /* save return address */
500 movq %r11, 8(%rsp) /* return address */
557 testl $3, CS-ARGOFFSET(%rsp) # from kernel_thread?
567 subq $REST_SKIP, %rsp # leave space for volatiles
571 movl $0, RAX(%rsp)
612 CFI_DEF_CFA rsp,KERNEL_STACK_OFFSET
623 movq %rsp,PER_CPU_VAR(old_rsp)
624 movq PER_CPU_VAR(kernel_stack),%rsp
631 movq %rax,ORIG_RAX-ARGOFFSET(%rsp)
632 movq %rcx,RIP-ARGOFFSET(%rsp)
634 testl $_TIF_WORK_SYSCALL_ENTRY,TI_flags+THREAD_INFO(%rsp,RIP-ARGOFFSET)
646 movq %rax,RAX-ARGOFFSET(%rsp)
658 movl TI_flags+THREAD_INFO(%rsp,RIP-ARGOFFSET),%edx
666 movq RIP-ARGOFFSET(%rsp),%rcx
670 movq PER_CPU_VAR(old_rsp), %rsp
703 movq $-ENOSYS,RAX-ARGOFFSET(%rsp)
729 movq RAX-ARGOFFSET(%rsp),%rsi /* second arg, syscall return value */
741 testl $(_TIF_WORK_SYSCALL_ENTRY & ~_TIF_SYSCALL_AUDIT),TI_flags+THREAD_INFO(%rsp,RIP-ARGOFFSET)
745 movq $-ENOSYS,RAX(%rsp) /* ptrace can change this for a bad syscall */
747 movq %rsp,%rdi
765 movq %rax,RAX-ARGOFFSET(%rsp)
811 leaq 8(%rsp),%rdi # &ptregs -> arg1
820 movq %rsp,%rdi # &ptregs -> arg1
880 addq $8, %rsp
885 movq %rax,RAX(%rsp)
897 addq $8, %rsp
902 movq %rax,RAX(%rsp) # fixme, this could be done at the higher layer
911 addq $8, %rsp
916 movq %rax,RAX(%rsp) # fixme, this could be done at the higher layer
924 addq $8, %rsp
930 movq %rax,RAX(%rsp)
988 subq $ORIG_RAX-RBP, %rsp
1006 addq $-0x80,(%rsp) /* Adjust vector to [-256,-1] range */
1017 leaq ARGOFFSET-RBP(%rsi), %rsp
1018 CFI_DEF_CFA_REGISTER rsp
1023 testl $3,CS-ARGOFFSET(%rsp)
1067 testb $4,(SS-RIP)(%rsp)
1083 movq (2*8)(%rsp),%rax /* RIP */
1085 movq (3*8)(%rsp),%rax /* CS */
1087 movq (4*8)(%rsp),%rax /* RFLAGS */
1089 movq (6*8)(%rsp),%rax /* SS */
1091 movq (5*8)(%rsp),%rax /* RSP */
1097 movq %rax,%rsp
1142 movq $-1,ORIG_RAX(%rsp)
1144 movq %rsp,%rdi # &pt_regs
1160 bt $9,EFLAGS-ARGOFFSET(%rsp) /* interrupts off? */
1242 subq $ORIG_RAX-R15, %rsp
1246 movq %rsp,%rdi /* pt_regs pointer */
1260 subq $ORIG_RAX-R15, %rsp
1264 movq %rsp,%rdi /* pt_regs pointer */
1279 subq $ORIG_RAX-R15, %rsp
1283 movq %rsp,%rdi /* pt_regs pointer */
1298 subq $ORIG_RAX-R15, %rsp
1302 movq %rsp,%rdi /* pt_regs pointer */
1303 movq ORIG_RAX(%rsp),%rsi /* get error code */
1304 movq $-1,ORIG_RAX(%rsp) /* no syscall to restart */
1317 subq $ORIG_RAX-R15, %rsp
1322 movq %rsp,%rdi /* pt_regs pointer */
1323 movq ORIG_RAX(%rsp),%rsi /* get error code */
1324 movq $-1,ORIG_RAX(%rsp) /* no syscall to restart */
1377 mov %rsp,%rbp
1380 cmove PER_CPU_VAR(irq_stack_ptr),%rsp
1385 CFI_DEF_CFA_REGISTER rsp
1414 movq %rdi, %rsp # we don't return, adjust the stack frame
1541 testl $3,CS(%rsp)
1557 movq %rsp,%rdi /* &pt_regs */
1559 movq %rax,%rsp /* switch stack for scheduling */
1566 movq %rsp,%rdi /* arg1: &pt_regs */
1606 testl $3,CS+8(%rsp)
1624 cmpq %rcx,RIP+8(%rsp)
1627 cmpq %rax,RIP+8(%rsp)
1629 cmpq $gs_change,RIP+8(%rsp)
1635 movq %rcx,RIP+8(%rsp)
1716 cmpl $__KERNEL_CS, 16(%rsp)
1723 cmpl $1, -8(%rsp)
1733 lea 6*8(%rsp), %rdx
1734 test_in_nmi rdx, 4*8(%rsp), nested_nmi, first_nmi
1744 cmpq 8(%rsp), %rdx
1747 cmpq 8(%rsp), %rdx
1752 leaq -1*8(%rsp), %rdx
1753 movq %rdx, %rsp
1755 leaq -10*8(%rsp), %rdx
1763 addq $(6*8), %rsp
1811 movq (%rsp), %rdx
1820 subq $(5*8), %rsp
1825 pushq_cfi 11*8(%rsp)
1846 movq $1, 10*8(%rsp)
1849 addq $(10*8), %rsp
1852 pushq_cfi -6*8(%rsp)
1854 subq $(5*8), %rsp
1864 subq $ORIG_RAX-R15, %rsp
1888 movq %rsp,%rdi
1908 movq $0, 5*8(%rsp)