1;------------------------------------------------------------------------------ ; 2; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR> 3; This program and the accompanying materials 4; are licensed and made available under the terms and conditions of the BSD License 5; which accompanies this distribution. The full text of the license may be found at 6; http://opensource.org/licenses/bsd-license.php. 7; 8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, 9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. 10; 11; Module Name: 12; 13; SmmInit.nasm 14; 15; Abstract: 16; 17; Functions for relocating SMBASE's for all processors 18; 19;------------------------------------------------------------------------------- 20 21extern ASM_PFX(SmmInitHandler) 22extern ASM_PFX(mRebasedFlag) 23extern ASM_PFX(mSmmRelocationOriginalAddress) 24 25global ASM_PFX(gSmmCr3) 26global ASM_PFX(gSmmCr4) 27global ASM_PFX(gSmmCr0) 28global ASM_PFX(gSmmJmpAddr) 29global ASM_PFX(gSmmInitStack) 30global ASM_PFX(gcSmiInitGdtr) 31global ASM_PFX(gcSmmInitSize) 32global ASM_PFX(gcSmmInitTemplate) 33global ASM_PFX(mRebasedFlagAddr32) 34global ASM_PFX(mSmmRelocationOriginalAddressPtr32) 35 36 DEFAULT REL 37 SECTION .text 38 39ASM_PFX(gcSmiInitGdtr): 40 DW 0 41 DQ 0 42 43global ASM_PFX(SmmStartup) 44ASM_PFX(SmmStartup): 45 DB 0x66, 0xb8 ; mov eax, imm32 46ASM_PFX(gSmmCr3): DD 0 47 mov cr3, rax 48 DB 0x66, 0x2e 49 lgdt [ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))] 50 DB 0x66, 0xb8 ; mov eax, imm32 51ASM_PFX(gSmmCr4): DD 0 52 or ah, 2 ; enable XMM registers access 53 mov cr4, rax 54 DB 0x66 55 mov ecx, 0xc0000080 ; IA32_EFER MSR 56 rdmsr 57 or ah, 1 ; set LME bit 58 wrmsr 59 DB 0x66, 0xb8 ; mov eax, imm32 60ASM_PFX(gSmmCr0): DD 0 61 mov cr0, rax ; enable protected mode & paging 62 DB 0x66, 0xea ; far jmp to long mode 63ASM_PFX(gSmmJmpAddr): DQ @LongMode 64@LongMode: ; long-mode starts here 65 DB 0x48, 0xbc ; mov rsp, imm64 66ASM_PFX(gSmmInitStack): DQ 0 67 and sp, 0xfff0 ; make sure RSP is 16-byte aligned 68 ; 69 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save 70 ; them before calling C-function. 71 ; 72 sub rsp, 0x60 73 movdqa [rsp], xmm0 74 movdqa [rsp + 0x10], xmm1 75 movdqa [rsp + 0x20], xmm2 76 movdqa [rsp + 0x30], xmm3 77 movdqa [rsp + 0x40], xmm4 78 movdqa [rsp + 0x50], xmm5 79 80 add rsp, -0x20 81 call ASM_PFX(SmmInitHandler) 82 add rsp, 0x20 83 84 ; 85 ; Restore XMM0~5 after calling C-function. 86 ; 87 movdqa xmm0, [rsp] 88 movdqa xmm1, [rsp + 0x10] 89 movdqa xmm2, [rsp + 0x20] 90 movdqa xmm3, [rsp + 0x30] 91 movdqa xmm4, [rsp + 0x40] 92 movdqa xmm5, [rsp + 0x50] 93 94 rsm 95 96BITS 16 97ASM_PFX(gcSmmInitTemplate): 98 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000] 99 sub ebp, 0x30000 100 jmp ebp 101@L1: 102 DQ ASM_PFX(SmmStartup) 103 104ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate) 105 106BITS 64 107global ASM_PFX(SmmRelocationSemaphoreComplete) 108ASM_PFX(SmmRelocationSemaphoreComplete): 109 push rax 110 mov rax, [ASM_PFX(mRebasedFlag)] 111 mov byte [rax], 1 112 pop rax 113 jmp [ASM_PFX(mSmmRelocationOriginalAddress)] 114 115; 116; Semaphore code running in 32-bit mode 117; 118global ASM_PFX(SmmRelocationSemaphoreComplete32) 119ASM_PFX(SmmRelocationSemaphoreComplete32): 120 ; 121 ; mov byte ptr [], 1 122 ; 123 db 0xc6, 0x5 124ASM_PFX(mRebasedFlagAddr32): dd 0 125 db 1 126 ; 127 ; jmp dword ptr [] 128 ; 129 db 0xff, 0x25 130ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0 131