• Home
  • Raw
  • Download

Lines Matching refs:esp

72 	addl	$(4 + \pop), %esp
98 add $\pop, %esp
103 99: movl $0, (%esp)
110 98: mov PT_GS(%esp), %gs
114 99: movl $0, PT_GS(%esp)
124 movl \reg, PT_GS(%esp)
148 testl $USER_SEGMENT_RPL_MASK, PT_CS(%esp)
188 andl $0x0000ffff, 4*4(%esp)
191 testl $X86_EFLAGS_VM, 5*4(%esp)
194 testl $USER_SEGMENT_RPL_MASK, 4*4(%esp)
197 orl $CS_FROM_KERNEL, 4*4(%esp)
232 pushl %esp # sp (points at ss)
233 addl $7*4, (%esp) # point sp back at the previous context
234 pushl 7*4(%esp) # flags
235 pushl 7*4(%esp) # cs
236 pushl 7*4(%esp) # ip
237 pushl 7*4(%esp) # orig_eax
238 pushl 7*4(%esp) # gs / function
239 pushl 7*4(%esp) # fs
251 testl $CS_FROM_KERNEL, 1*4(%esp)
261 movl 5*4(%esp), %eax # (modified) regs->sp
263 movl 4*4(%esp), %ecx # flags
266 movl 3*4(%esp), %ecx # cs
270 movl 2*4(%esp), %ecx # ip
273 movl 1*4(%esp), %ecx # eax
277 lea -4*4(%eax), %esp
354 4: movl $0, (%esp)
356 5: movl $0, (%esp)
358 6: movl $0, (%esp)
396 movl PT_EFLAGS(%esp), %eax # mix EFLAGS, SS and CS
402 movb PT_OLDSS(%esp), %ah
403 movb PT_CS(%esp), %al
419 mov %esp, %edx /* load kernel esp */
420 mov PT_OLDESP(%esp), %eax /* load userspace esp */
434 lss (%esp), %esp /* switch to espfix segment */
464 subl %esp, %ecx /* ecx = (end of entry_stack) - esp */
469 movl %esp, %esi
481 movl PT_EFLAGS(%esp), %ecx # mix EFLAGS and CS
482 movb PT_CS(%esp), %cl
485 movl PT_CS(%esp), %ecx
511 movl %edi, %esp
564 orl $CS_FROM_ENTRY_STACK, PT_CS(%esp)
572 orl $CS_FROM_USER_CR3, PT_CS(%esp)
604 testl $(X86_EFLAGS_VM), PT_EFLAGS(%esp)
616 movl %esp, %esi
631 movl %ebx, %esp
650 testl $CS_FROM_ENTRY_STACK, PT_CS(%esp)
656 andl $(~CS_FROM_ENTRY_STACK), PT_CS(%esp)
659 movl %esp, %esi
682 movl %ebx, %esp
688 testl $CS_FROM_USER_CR3, PT_CS(%esp)
692 andl $(~CS_FROM_USER_CR3), PT_CS(%esp)
728 movl %esp, %eax
729 movl PT_ORIG_EAX(%esp), %edx /* get the vector from stack */
730 movl $-1, PT_ORIG_EAX(%esp) /* no syscall to restart */
777 movl %esp, TASK_threadsp(%eax)
778 movl TASK_threadsp(%edx), %esp
842 movl %esp, %eax
854 movl $0, PT_EAX(%esp)
915 movl TSS_entry2task_stack(%esp), %esp
945 testl $X86_EFLAGS_NT|X86_EFLAGS_AC|X86_EFLAGS_TF, PT_EFLAGS(%esp)
949 movl %esp, %eax
968 movl PT_EFLAGS(%esp), %edi
969 movl PT_EAX(%esp), %esi
974 movl PT_EIP(%esp), %edx /* pt_regs->ip */
975 movl PT_OLDESP(%esp), %ecx /* pt_regs->sp */
976 1: mov PT_FS(%esp), %fs
980 addl $2*4, %esp /* skip pt_regs->cx and pt_regs->dx */
986 movl %eax, %esp
996 btrl $X86_EFLAGS_IF_BIT, (%esp)
1009 2: movl $0, PT_FS(%esp)
1056 movl %esp, %eax
1118 subl $2*4, %esp
1119 sgdt (%esp)
1120 movl 2(%esp), %ecx /* GDT address */
1128 addl $2*4, %esp
1130 addl %esp, %eax /* the adjusted stack pointer */
1133 lss (%esp), %esp /* switch to the normal stack segment */
1157 movl PT_GS(%esp), %edi # get the function address
1162 movl PT_ORIG_EAX(%esp), %edx # get the error code
1163 movl $-1, PT_ORIG_EAX(%esp) # no syscall to restart
1165 movl %esp, %eax # pt_regs pointer
1170 movl PT_EFLAGS(%esp), %eax # mix EFLAGS and CS
1171 movb PT_CS(%esp), %al
1177 movl PT_CS(%esp), %eax
1189 movl %esp, %eax
1259 movl %esp, %eax # pt_regs pointer
1277 movl %esp, %ebx
1278 movl PER_CPU_VAR(cpu_current_top_of_stack), %esp
1280 movl %ebx, %esp
1284 testl $CS_FROM_ESPFIX, PT_CS(%esp)
1298 pushl %esp
1299 addl $4, (%esp)
1302 pushl 4*4(%esp) # flags
1303 pushl 4*4(%esp) # cs
1304 pushl 4*4(%esp) # ip
1312 xorl $(CS_FROM_ESPFIX | CS_FROM_KERNEL), PT_CS(%esp)
1315 movl %esp, %eax # pt_regs pointer
1330 lss (1+5+6)*4(%esp), %esp # back to espfix stack
1341 leal -TOP_OF_KERNEL_STACK_PADDING-PTREGS_SIZE(%esi), %esp