Home
last modified time | relevance | path

Searched full:alternative (Results 1 – 25 of 1072) sorted by relevance

12345678910>>...43

/kernel/linux/linux-5.10/arch/x86/include/asm/
Dsmap.h14 #include <asm/alternative.h>
25 ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP
28 ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP
43 /* Note: a barrier is implicit in alternative() */ in clac()
44 alternative("", __ASM_CLAC, X86_FEATURE_SMAP); in clac()
49 /* Note: a barrier is implicit in alternative() */ in stac()
50 alternative("", __ASM_STAC, X86_FEATURE_SMAP); in stac()
58 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP) in smap_save()
69 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP) in smap_restore()
77 ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP)
[all …]
Dalternative.h17 * Alternative inline assembly for SMP.
27 * The SMP alternative tables can be kept after boot and contain both
71 * Debug flag that can be tested to see whether alternative
128 * Pad the second replacement alternative with additional NOPs if it is
129 * additionally longer than the first replacement alternative.
160 /* alternative assembly primitive: */
161 #define ALTERNATIVE(oldinstr, newinstr, feature) \ macro
200 * Alternative instructions for different CPU types or capabilities.
211 #define alternative(oldinstr, newinstr, feature) \ macro
212 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory")
[all …]
Dbarrier.h5 #include <asm/alternative.h>
15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \
17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \
19 #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
52 #define barrier_nospec() alternative("", "lfence", X86_FEATURE_LFENCE_RDTSC)
/kernel/linux/linux-4.19/arch/x86/include/asm/
Dsmap.h26 #include <asm/alternative-asm.h>
31 ALTERNATIVE "", __stringify(__ASM_CLAC), X86_FEATURE_SMAP
34 ALTERNATIVE "", __stringify(__ASM_STAC), X86_FEATURE_SMAP
45 #include <asm/alternative.h>
51 /* Note: a barrier is implicit in alternative() */ in clac()
52 alternative("", __stringify(__ASM_CLAC), X86_FEATURE_SMAP); in clac()
57 /* Note: a barrier is implicit in alternative() */ in stac()
58 alternative("", __stringify(__ASM_STAC), X86_FEATURE_SMAP); in stac()
63 ALTERNATIVE("", __stringify(__ASM_CLAC), X86_FEATURE_SMAP)
65 ALTERNATIVE("", __stringify(__ASM_STAC), X86_FEATURE_SMAP)
Dalternative.h13 * Alternative inline assembly for SMP.
23 * The SMP alternative tables can be kept after boot and contain both
58 * Debug flag that can be tested to see whether alternative
115 * Pad the second replacement alternative with additional NOPs if it is
116 * additionally longer than the first replacement alternative.
135 /* alternative assembly primitive: */
136 #define ALTERNATIVE(oldinstr, newinstr, feature) \ macro
157 * Alternative instructions for different CPU types or capabilities.
168 #define alternative(oldinstr, newinstr, feature) \ macro
169 asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory")
[all …]
Dbarrier.h5 #include <asm/alternative.h>
15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \
17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \
19 #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
Dnospec-branch.h8 #include <asm/alternative.h>
9 #include <asm/alternative-asm.h>
60 * This should be used immediately before a retpoline alternative. It tells
86 * Do not use these directly; they only exist to make the ALTERNATIVE
146 ALTERNATIVE "jmp .Lskip_rsb_\@", \
263 ALTERNATIVE("jmp 910f", in vmexit_fill_RSB()
275 asm volatile(ALTERNATIVE("", "wrmsr", %c[feature]) in alternative_msr_write()
/kernel/linux/linux-5.10/tools/testing/selftests/net/
Daltnames.sh21 check_err $? "Failed to add short alternative name"
24 check_err $? "Failed to do link show with short alternative name"
27 check_err $? "Failed to get short alternative name from link show JSON"
30 check_err $? "Got unexpected short alternative name from link show JSON"
36 check_err $? "Failed to add long alternative name"
39 check_err $? "Failed to do link show with long alternative name"
42 check_err $? "Failed to get long alternative name from link show JSON"
45 check_err $? "Got unexpected long alternative name from link show JSON"
48 check_err $? "Failed to delete short alternative name"
51 check_fail $? "Unexpected success while trying to do link show with deleted short alternative name"
/kernel/linux/linux-5.10/arch/parisc/include/asm/
Dcache.h9 #include <asm/alternative.h>
43 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \
46 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \
47 ALTERNATIVE(ALT_COND_NO_SPLIT_TLB, INSN_NOP) \
50 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \
54 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \
55 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) \
58 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \
59 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) :::"memory")
Dalternative.h35 /* Alternative SMP implementation. */
36 #define ALTERNATIVE(cond, replacement) "!0:" \ macro
45 #define ALTERNATIVE(from, to, cond, replacement)\ macro
/kernel/linux/linux-4.19/arch/arm64/include/asm/
Dalternative.h53 * alternative assembly primitive:
129 * Alternative sequences
143 * alternative sequence it is defined in (branches into an
144 * alternative sequence are not fixed up).
148 * Begin an alternative code sequence.
177 * Provide the other half of the alternative code sequence.
190 * Complete an alternative code sequence.
202 * Callback-based alternative epilogue
209 * Provides a trivial alternative or default sequence consisting solely
289 * Usage: asm(ALTERNATIVE(oldinstr, newinstr, feature));
[all …]
Duaccess.h21 #include <asm/alternative.h>
60 asm(ALTERNATIVE("nop", SET_PSTATE_UAO(1), ARM64_HAS_UAO)); in set_fs()
62 asm(ALTERNATIVE("nop", SET_PSTATE_UAO(0), ARM64_HAS_UAO, in set_fs()
190 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, in __uaccess_disable_hw_pan()
196 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, in __uaccess_enable_hw_pan()
203 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), alt, \
210 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), alt, \
267 "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \
338 "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \
Dkvm_hyp.h30 asm volatile(ALTERNATIVE("mrs %0, " __stringify(r##nvh),\
40 asm volatile(ALTERNATIVE("msr " __stringify(r##nvh) ", %x0",\
54 asm volatile(ALTERNATIVE("mrs %0, " __stringify(r##_EL2),\
64 asm volatile(ALTERNATIVE("msr " __stringify(r##_EL2) ", %x0",\
116 asm volatile(ALTERNATIVE("nop \n", \
/kernel/linux/linux-5.10/arch/arm64/include/asm/
Dalternative.h53 * alternative assembly primitive:
129 * Alternative sequences
143 * alternative sequence it is defined in (branches into an
144 * alternative sequence are not fixed up).
148 * Begin an alternative code sequence.
177 * Provide the other half of the alternative code sequence.
190 * Complete an alternative code sequence.
202 * Callback-based alternative epilogue
209 * Provides a trivial alternative or default sequence consisting solely
289 * Usage: asm(ALTERNATIVE(oldinstr, newinstr, feature));
[all …]
Dirqflags.h8 #include <asm/alternative.h>
37 asm volatile(ALTERNATIVE( in arch_local_irq_enable()
56 asm volatile(ALTERNATIVE( in arch_local_irq_disable()
72 asm volatile(ALTERNATIVE( in arch_local_save_flags()
87 asm volatile(ALTERNATIVE( in arch_irqs_disabled_flags()
124 asm volatile(ALTERNATIVE( in arch_local_irq_restore()
Duaccess.h10 #include <asm/alternative.h>
47 asm(ALTERNATIVE("nop", SET_PSTATE_UAO(1), ARM64_HAS_UAO)); in set_fs()
49 asm(ALTERNATIVE("nop", SET_PSTATE_UAO(0), ARM64_HAS_UAO, in set_fs()
179 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, in __uaccess_disable_hw_pan()
185 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, in __uaccess_enable_hw_pan()
192 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), alt, \
199 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), alt, \
258 "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \
323 "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \
/kernel/linux/linux-5.10/arch/s390/include/asm/
Dalternative.h36 * | alternative instr 1 |
39 * | alternative instr 2 | padding
46 * | alternative instr |
108 /* alternative assembly primitive: */
109 #define ALTERNATIVE(oldinstr, altinstr, facility) \ macro
130 * Alternative instructions for different CPU types or capabilities.
141 #define alternative(oldinstr, altinstr, facility) \ macro
142 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")
/kernel/linux/linux-4.19/arch/s390/include/asm/
Dalternative.h36 * | alternative instr 1 |
39 * | alternative instr 2 | padding
46 * | alternative instr |
108 /* alternative assembly primitive: */
109 #define ALTERNATIVE(oldinstr, altinstr, facility) \ macro
130 * Alternative instructions for different CPU types or capabilities.
141 #define alternative(oldinstr, altinstr, facility) \ macro
142 asm volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")
/kernel/linux/linux-5.10/arch/x86/um/asm/
Dbarrier.h5 #include <asm/alternative.h>
14 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
15 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
16 #define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
/kernel/linux/linux-5.10/arch/x86/entry/
Dcalling.h188 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID
194 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
205 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
208 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
245 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI
263 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
265 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
327 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS
356 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS
384 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_USER
[all …]
/kernel/linux/linux-4.19/arch/x86/entry/
Dcalling.h194 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID
200 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
211 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
214 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
251 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI
269 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
271 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
330 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_USER
333 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_KERNEL
/kernel/linux/linux-4.19/arch/x86/um/asm/
Dbarrier.h21 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
23 #define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
/kernel/linux/linux-5.10/arch/parisc/kernel/
Dpacache.S26 #include <asm/alternative.h>
106 ALTERNATIVE(88b, fitdone, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
244 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
306 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
570 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
571 ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
709 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
785 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
817 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
844 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
[all …]
/kernel/linux/linux-5.10/arch/s390/kernel/
Dalternative.c3 #include <asm/alternative.h>
71 * alternative code can overwrite previously scanned alternative code. in __apply_alternatives()
/kernel/linux/linux-4.19/arch/s390/kernel/
Dalternative.c3 #include <asm/alternative.h>
71 * alternative code can overwrite previously scanned alternative code. in __apply_alternatives()

12345678910>>...43