• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/* SPDX-License-Identifier: GPL-2.0-only */
2
3/*
4 * For dropping from long mode to protected mode.
5 *
6 * For reference see "AMD64 ArchitectureProgrammer's Manual Volume 2",
7 * Document 24593-Rev. 3.31-July 2019 Chapter 5.3
8 *
9 * Clobbers: rax, rbx, rcx, rdx
10 */
11.code64
12
13#include <cpu/x86/msr.h>
14#include <cpu/x86/cr.h>
15#if defined(__RAMSTAGE__)
16#include <arch/ram_segs.h>
17#define CODE_SEG RAM_CODE_SEG
18#define DATA_SEG RAM_DATA_SEG
19#else
20#include <arch/rom_segs.h>
21#define CODE_SEG ROM_CODE_SEG
22#define DATA_SEG ROM_DATA_SEG
23#endif
24
25drop_longmode:
26#if !ENV_CACHE_AS_RAM
27	/* Ensure cache is clean. */
28	wbinvd
29#endif
30	/* Set  32-bit code segment and ss */
31	mov	$CODE_SEG, %rcx
32	/* SetCodeSelector32 will drop us to protected mode on return */
33	call	SetCodeSelector32
34
35	/* Skip SetCodeSelector32 */
36.code32
37	jmp	__longmode_compatibility
38
39.align 8
40.code64
41SetCodeSelector32:
42	# pop the return address from stack
43	pop	%rbx
44
45	# save rsp because we need to push it after ss
46	mov	%rsp, %rdx
47
48	# use iret to jump to a 32-bit offset in a new code segment
49	# iret will pop cs:rip, flags, then ss:rsp
50	mov	%ss, %ax	# need to push ss, but push ss instruction
51	push	%rax		# not valid in x64 mode, so use ax
52	push	%rdx		# the rsp to load
53	pushfq			# push rflags
54	push	%rcx		# cx is code segment selector from caller
55	push	%rbx		# push the IP for the next instruction
56
57	# the iretq will behave like ret, with the new cs/ss value loaded
58	iretq
59
60.align 4
61.code32
62__longmode_compatibility:
63	/* Running in 32-bit compatibility mode */
64
65	/* Use flat data segment */
66	movl	$DATA_SEG, %eax
67	movl	%eax, %ds
68	movl	%eax, %es
69	movl	%eax, %ss
70	movl	%eax, %fs
71
72	/* Disable paging. */
73	movl	%cr0, %eax
74	andl	$(~CR0_PG), %eax
75	movl	%eax, %cr0
76
77	/* Disable long mode. */
78	movl	$(IA32_EFER), %ecx
79	rdmsr
80	andl	$(~EFER_LME), %eax
81	wrmsr
82
83	/* Disable PAE. */
84	movl	%cr4, %eax
85	andl	$(~CR4_PAE), %eax
86	movl	%eax, %cr4
87
88	/* Clear page table register */
89	xor	%eax, %eax
90	movl	%eax, %cr3
91
92__longmode_exit:
93