1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 3 #ifndef __ARM_ARM64_ASM_H 4 #define __ARM_ARM64_ASM_H 5 6 #define ENDPROC(name) \ 7 .type name, %function; \ 8 END(name) 9 10 #define ENTRY_WITH_ALIGN(name, bits) \ 11 .section .text.name, "ax", %progbits; \ 12 .global name; \ 13 .align bits; \ 14 name: 15 16 #define ENTRY(name) ENTRY_WITH_ALIGN(name, 2) 17 18 #define END(name) \ 19 .size name, .-name 20 21 /* 22 * Certain SoCs have an alignment requirement for the CPU reset vector. 23 * Align to a 64 byte typical cacheline for now. 24 */ 25 #define CPU_RESET_ENTRY(name) ENTRY_WITH_ALIGN(name, 6) 26 27 #define ENTRY_WEAK(name) \ 28 ENTRY(name) \ 29 .weak name \ 30 31 #if CONFIG_ARM64_CURRENT_EL == 1 32 #define CURRENT_EL(reg) reg##_el1 33 #elif CONFIG_ARM64_CURRENT_EL == 2 34 #define CURRENT_EL(reg) reg##_el2 35 #elif CONFIG_ARM64_CURRENT_EL == 3 36 #define CURRENT_EL(reg) reg##_el3 37 #else 38 #error "Invalid setting for CONFIG_ARM64_CURRENT_EL!" 39 #endif 40 41 #endif /* __ARM_ARM64_ASM_H */ 42