/arch/parisc/include/asm/ |
D | cache.h | 43 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 46 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 47 ALTERNATIVE(ALT_COND_NO_SPLIT_TLB, INSN_NOP) \ 50 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 54 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ 55 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) \ 58 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ 59 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) :::"memory")
|
D | alternative.h | 36 #define ALTERNATIVE(cond, replacement) "!0:" \ macro 45 #define ALTERNATIVE(from, to, cond, replacement)\ macro
|
/arch/x86/include/asm/ |
D | smap.h | 25 ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP 28 ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP 58 ALTERNATIVE("", "pushf; pop %0; " __ASM_CLAC "\n\t", in smap_save() 68 ALTERNATIVE("", "push %0; popf\n\t", in smap_restore() 75 ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP) 77 ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP)
|
D | barrier.h | 15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ 17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \ 19 #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
|
D | arch_hweight.h | 19 asm (ALTERNATIVE("call __sw_hweight32", "popcntl %1, %0", X86_FEATURE_POPCNT) in __arch_hweight32() 47 asm (ALTERNATIVE("call __sw_hweight64", "popcntq %1, %0", X86_FEATURE_POPCNT) in __arch_hweight64()
|
D | alternative.h | 161 #define ALTERNATIVE(oldinstr, newinstr, feature) \ macro 212 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory") 229 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ 248 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ 253 asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \ 328 .macro ALTERNATIVE oldinstr, newinstr, feature
|
D | irqflags.h | 145 #define SWAPGS ALTERNATIVE "swapgs", "", X86_FEATURE_XENPV
|
/arch/arm64/include/asm/ |
D | irqflags.h | 35 asm volatile(ALTERNATIVE( in arch_local_irq_enable() 54 asm volatile(ALTERNATIVE( in arch_local_irq_disable() 70 asm volatile(ALTERNATIVE( in arch_local_save_flags() 85 asm volatile(ALTERNATIVE( in arch_irqs_disabled_flags() 122 asm volatile(ALTERNATIVE( in arch_local_irq_restore()
|
D | io.h | 52 asm volatile(ALTERNATIVE("ldrb %w0, [%1]", in __raw_readb() 64 asm volatile(ALTERNATIVE("ldrh %w0, [%1]", in __raw_readw() 75 asm volatile(ALTERNATIVE("ldr %w0, [%1]", in __raw_readl() 86 asm volatile(ALTERNATIVE("ldr %0, [%1]", in __raw_readq()
|
D | mte-kasan.h | 55 asm volatile(ALTERNATIVE("nop", SET_PSTATE_TCO(0), in mte_disable_tco() 61 asm volatile(ALTERNATIVE("nop", SET_PSTATE_TCO(1), in mte_enable_tco()
|
D | uaccess.h | 153 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, in __uaccess_disable_hw_pan() 159 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, in __uaccess_enable_hw_pan() 182 asm volatile(ALTERNATIVE("nop", SET_PSTATE_TCO(0), in __uaccess_disable_tco() 188 asm volatile(ALTERNATIVE("nop", SET_PSTATE_TCO(1), in __uaccess_enable_tco()
|
D | lse.h | 37 ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS)
|
D | alternative-macros.h | 208 #define ALTERNATIVE(oldinstr, newinstr, ...) \ macro 232 #define ALTERNATIVE(oldinstr, newinstr, feature, ...) \
|
D | rwonce.h | 17 ALTERNATIVE( \
|
D | kvm_hyp.h | 25 asm volatile(ALTERNATIVE(__mrs_s("%0", r##nvh), \ 35 asm volatile(ALTERNATIVE(__msr_s(r##nvh, "%x0"), \
|
/arch/x86/entry/ |
D | calling.h | 161 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID 167 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 178 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 181 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID 218 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI 236 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 238 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID 300 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS 329 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS 357 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_USER [all …]
|
D | entry_64.S | 127 ALTERNATIVE "", "jmp swapgs_restore_regs_and_return_to_usermode", \ 148 ALTERNATIVE "shl $(64 - 48), %rcx; sar $(64 - 48), %rcx", \ 344 ALTERNATIVE "call error_entry; movq %rax, %rsp", \ 606 ALTERNATIVE "", "jmp xenpv_restore_regs_and_return_to_usermode", X86_FEATURE_XENPV 767 2: ALTERNATIVE "", "mfence", X86_BUG_SWAPGS_FENCE 784 ALTERNATIVE "", "ZAP_GS", X86_BUG_NULL_SEG 911 ALTERNATIVE "jmp .Lparanoid_entry_checkgs", "", X86_FEATURE_FSGSBASE 997 ALTERNATIVE "jmp .Lparanoid_exit_checkgs", "", X86_FEATURE_FSGSBASE 1434 ALTERNATIVE "jmp nmi_no_fsgsbase", "", X86_FEATURE_FSGSBASE
|
D | entry_64_compat.S | 146 ALTERNATIVE "testl %eax, %eax; jz swapgs_restore_regs_and_return_to_usermode", \ 266 ALTERNATIVE "testl %eax, %eax; jz swapgs_restore_regs_and_return_to_usermode", \ 376 ALTERNATIVE "", "jmp .Lint80_keep_stack", X86_FEATURE_XENPV
|
/arch/riscv/include/asm/ |
D | errata_list.h | 20 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault), \ 26 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault), \ 33 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID, \
|
/arch/s390/include/asm/ |
D | alternative.h | 109 #define ALTERNATIVE(oldinstr, altinstr, facility) \ macro 142 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory") 150 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ 155 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) \
|
/arch/parisc/kernel/ |
D | pacache.S | 106 ALTERNATIVE(88b, fitdone, ALT_COND_NO_SPLIT_TLB, INSN_NOP) 244 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP) 306 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) 570 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 571 ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB) 709 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 785 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 817 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) 844 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 876 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) [all …]
|
/arch/s390/kernel/ |
D | entry.S | 86 ALTERNATIVE ".insn s,0xb2050000,\savearea", \ 113 ALTERNATIVE "", ".long 0xb2e8c000", 82 117 ALTERNATIVE "", ".long 0xb2e8d000", 82 121 ALTERNATIVE "TSTMSK \tif_ptr,\tif_mask; jz .+8; .long 0xb2e8d000", \ 127 ALTERNATIVE "jz .+8; .long 0xb2e8c000", \ 219 ALTERNATIVE "", ".insn s,0xb2800000,_LPP_OFFSET", 40
|
/arch/x86/entry/vdso/vdso32/ |
D | system_call.S | 60 ALTERNATIVE "", SYSENTER_SEQUENCE, X86_FEATURE_SEP
|
/arch/x86/lib/ |
D | getuser.S | 38 #define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC 42 ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
|
D | memmove_64.S | 42 ALTERNATIVE "cmp $0x20, %rdx; jb 1f", "", X86_FEATURE_FSRM 43 ALTERNATIVE "", "jmp .Lmemmove_erms", X86_FEATURE_ERMS
|