| /kernel/linux/linux-6.6/tools/perf/arch/x86/tests/ |
| D | insn-x86-dat-src.c | 21 asm volatile("rdtsc"); /* Start here */ in main() 25 asm volatile("vcvtph2ps %xmm3,%ymm5"); in main() 31 asm volatile("cmovno %rax,%rbx"); in main() 32 asm volatile("cmovno 0x12345678(%rax),%rcx"); in main() 33 asm volatile("cmovno 0x12345678(%rax),%cx"); in main() 35 asm volatile("cmove %rax,%rbx"); in main() 36 asm volatile("cmove 0x12345678(%rax),%rcx"); in main() 37 asm volatile("cmove 0x12345678(%rax),%cx"); in main() 39 asm volatile("seto 0x12345678(%rax)"); in main() 40 asm volatile("setno 0x12345678(%rax)"); in main() [all …]
|
| /kernel/linux/linux-5.10/tools/perf/arch/x86/tests/ |
| D | insn-x86-dat-src.c | 21 asm volatile("rdtsc"); /* Start here */ in main() 25 asm volatile("vcvtph2ps %xmm3,%ymm5"); in main() 31 asm volatile("cmovno %rax,%rbx"); in main() 32 asm volatile("cmovno 0x12345678(%rax),%rcx"); in main() 33 asm volatile("cmovno 0x12345678(%rax),%cx"); in main() 35 asm volatile("cmove %rax,%rbx"); in main() 36 asm volatile("cmove 0x12345678(%rax),%rcx"); in main() 37 asm volatile("cmove 0x12345678(%rax),%cx"); in main() 39 asm volatile("seto 0x12345678(%rax)"); in main() 40 asm volatile("setno 0x12345678(%rax)"); in main() [all …]
|
| /kernel/linux/linux-5.10/lib/raid6/ |
| D | sse2.c | 48 asm volatile("movdqa %0,%%xmm0" : : "m" (raid6_sse_constants.x1d[0])); in raid6_sse21_gen_syndrome() 49 asm volatile("pxor %xmm5,%xmm5"); /* Zero temp */ in raid6_sse21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome() 53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome() 54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 55 asm volatile("movdqa %xmm2,%xmm4"); /* Q[0] */ in raid6_sse21_gen_syndrome() 56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome() 59 asm volatile("pcmpgtb %xmm4,%xmm5"); in raid6_sse21_gen_syndrome() 60 asm volatile("paddb %xmm4,%xmm4"); in raid6_sse21_gen_syndrome() [all …]
|
| D | avx2.c | 46 asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0])); in raid6_avx21_gen_syndrome() 47 asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */ in raid6_avx21_gen_syndrome() 50 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome() 51 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 53 asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */ in raid6_avx21_gen_syndrome() 54 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome() 57 asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5"); in raid6_avx21_gen_syndrome() 58 asm volatile("vpaddb %ymm4,%ymm4,%ymm4"); in raid6_avx21_gen_syndrome() [all …]
|
| D | recov_ssse3.c | 54 asm volatile("movdqa %0,%%xmm7" : : "m" (x0f[0])); in raid6_2data_recov_ssse3() 57 asm volatile("movdqa %0,%%xmm6" : : "m" (qmul[0])); in raid6_2data_recov_ssse3() 58 asm volatile("movdqa %0,%%xmm14" : : "m" (pbmul[0])); in raid6_2data_recov_ssse3() 59 asm volatile("movdqa %0,%%xmm15" : : "m" (pbmul[16])); in raid6_2data_recov_ssse3() 67 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3() 68 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3() 69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3() 70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() [all …]
|
| D | recov_loongarch_simd.c | 69 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lsx() 70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx() 71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx() 72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx() 76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx() 77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx() 78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx() 79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx() 81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx() 82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx() [all …]
|
| D | recov_avx2.c | 53 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f)); in raid6_2data_recov_avx2() 57 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2() 58 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2() 59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2() 60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 63 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); in raid6_2data_recov_avx2() 64 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); in raid6_2data_recov_avx2() 73 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0])); in raid6_2data_recov_avx2() [all …]
|
| D | loongarch_simd.c | 54 asm volatile("vld $vr0, %0" : : "m"(dptr[z0][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 55 asm volatile("vld $vr1, %0" : : "m"(dptr[z0][d+1*NSIZE])); in raid6_lsx_gen_syndrome() 56 asm volatile("vld $vr2, %0" : : "m"(dptr[z0][d+2*NSIZE])); in raid6_lsx_gen_syndrome() 57 asm volatile("vld $vr3, %0" : : "m"(dptr[z0][d+3*NSIZE])); in raid6_lsx_gen_syndrome() 58 asm volatile("vori.b $vr4, $vr0, 0"); in raid6_lsx_gen_syndrome() 59 asm volatile("vori.b $vr5, $vr1, 0"); in raid6_lsx_gen_syndrome() 60 asm volatile("vori.b $vr6, $vr2, 0"); in raid6_lsx_gen_syndrome() 61 asm volatile("vori.b $vr7, $vr3, 0"); in raid6_lsx_gen_syndrome() 64 asm volatile("vld $vr8, %0" : : "m"(dptr[z][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 65 asm volatile("vld $vr9, %0" : : "m"(dptr[z][d+1*NSIZE])); in raid6_lsx_gen_syndrome() [all …]
|
| D | sse1.c | 52 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_sse11_gen_syndrome() 53 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_sse11_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome() 57 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 59 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_sse11_gen_syndrome() 60 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 62 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome() 63 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_sse11_gen_syndrome() 64 asm volatile("paddb %mm4,%mm4"); in raid6_sse11_gen_syndrome() [all …]
|
| D | mmx.c | 47 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_mmx1_gen_syndrome() 48 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_mmx1_gen_syndrome() 51 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_mmx1_gen_syndrome() 52 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_mmx1_gen_syndrome() 54 asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d])); in raid6_mmx1_gen_syndrome() 55 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_mmx1_gen_syndrome() 56 asm volatile("paddb %mm4,%mm4"); in raid6_mmx1_gen_syndrome() 57 asm volatile("pand %mm0,%mm5"); in raid6_mmx1_gen_syndrome() 58 asm volatile("pxor %mm5,%mm4"); in raid6_mmx1_gen_syndrome() 59 asm volatile("pxor %mm5,%mm5"); in raid6_mmx1_gen_syndrome() [all …]
|
| /kernel/linux/linux-6.6/lib/raid6/ |
| D | sse2.c | 48 asm volatile("movdqa %0,%%xmm0" : : "m" (raid6_sse_constants.x1d[0])); in raid6_sse21_gen_syndrome() 49 asm volatile("pxor %xmm5,%xmm5"); /* Zero temp */ in raid6_sse21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome() 53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome() 54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 55 asm volatile("movdqa %xmm2,%xmm4"); /* Q[0] */ in raid6_sse21_gen_syndrome() 56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome() 59 asm volatile("pcmpgtb %xmm4,%xmm5"); in raid6_sse21_gen_syndrome() 60 asm volatile("paddb %xmm4,%xmm4"); in raid6_sse21_gen_syndrome() [all …]
|
| D | avx2.c | 46 asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0])); in raid6_avx21_gen_syndrome() 47 asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */ in raid6_avx21_gen_syndrome() 50 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome() 51 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 53 asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */ in raid6_avx21_gen_syndrome() 54 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome() 57 asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5"); in raid6_avx21_gen_syndrome() 58 asm volatile("vpaddb %ymm4,%ymm4,%ymm4"); in raid6_avx21_gen_syndrome() [all …]
|
| D | recov_ssse3.c | 54 asm volatile("movdqa %0,%%xmm7" : : "m" (x0f[0])); in raid6_2data_recov_ssse3() 57 asm volatile("movdqa %0,%%xmm6" : : "m" (qmul[0])); in raid6_2data_recov_ssse3() 58 asm volatile("movdqa %0,%%xmm14" : : "m" (pbmul[0])); in raid6_2data_recov_ssse3() 59 asm volatile("movdqa %0,%%xmm15" : : "m" (pbmul[16])); in raid6_2data_recov_ssse3() 67 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3() 68 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3() 69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3() 70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() [all …]
|
| D | recov_loongarch_simd.c | 69 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lsx() 70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx() 71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx() 72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx() 76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx() 77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx() 78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx() 79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx() 81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx() 82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx() [all …]
|
| D | recov_avx2.c | 53 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f)); in raid6_2data_recov_avx2() 57 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2() 58 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2() 59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2() 60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 63 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); in raid6_2data_recov_avx2() 64 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); in raid6_2data_recov_avx2() 73 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0])); in raid6_2data_recov_avx2() [all …]
|
| D | loongarch_simd.c | 54 asm volatile("vld $vr0, %0" : : "m"(dptr[z0][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 55 asm volatile("vld $vr1, %0" : : "m"(dptr[z0][d+1*NSIZE])); in raid6_lsx_gen_syndrome() 56 asm volatile("vld $vr2, %0" : : "m"(dptr[z0][d+2*NSIZE])); in raid6_lsx_gen_syndrome() 57 asm volatile("vld $vr3, %0" : : "m"(dptr[z0][d+3*NSIZE])); in raid6_lsx_gen_syndrome() 58 asm volatile("vori.b $vr4, $vr0, 0"); in raid6_lsx_gen_syndrome() 59 asm volatile("vori.b $vr5, $vr1, 0"); in raid6_lsx_gen_syndrome() 60 asm volatile("vori.b $vr6, $vr2, 0"); in raid6_lsx_gen_syndrome() 61 asm volatile("vori.b $vr7, $vr3, 0"); in raid6_lsx_gen_syndrome() 64 asm volatile("vld $vr8, %0" : : "m"(dptr[z][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 65 asm volatile("vld $vr9, %0" : : "m"(dptr[z][d+1*NSIZE])); in raid6_lsx_gen_syndrome() [all …]
|
| D | sse1.c | 52 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_sse11_gen_syndrome() 53 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_sse11_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome() 57 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 59 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_sse11_gen_syndrome() 60 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 62 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome() 63 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_sse11_gen_syndrome() 64 asm volatile("paddb %mm4,%mm4"); in raid6_sse11_gen_syndrome() [all …]
|
| D | mmx.c | 47 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_mmx1_gen_syndrome() 48 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_mmx1_gen_syndrome() 51 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_mmx1_gen_syndrome() 52 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_mmx1_gen_syndrome() 54 asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d])); in raid6_mmx1_gen_syndrome() 55 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_mmx1_gen_syndrome() 56 asm volatile("paddb %mm4,%mm4"); in raid6_mmx1_gen_syndrome() 57 asm volatile("pand %mm0,%mm5"); in raid6_mmx1_gen_syndrome() 58 asm volatile("pxor %mm5,%mm4"); in raid6_mmx1_gen_syndrome() 59 asm volatile("pxor %mm5,%mm5"); in raid6_mmx1_gen_syndrome() [all …]
|
| /kernel/linux/linux-6.6/arch/ia64/include/uapi/asm/ |
| D | gcc_intrin.h | 13 /* define this macro to get some asm stmts included in 'c' files */ 18 #define ia64_barrier() asm volatile ("":::"memory") 20 #define ia64_stop() asm volatile (";;"::) 22 #define ia64_invala_gr(regnum) asm volatile ("invala.e r%0" :: "i"(regnum)) 24 #define ia64_invala_fr(regnum) asm volatile ("invala.e f%0" :: "i"(regnum)) 26 #define ia64_flushrs() asm volatile ("flushrs;;":::"memory") 28 #define ia64_loadrs() asm volatile ("loadrs;;":::"memory") 38 asm volatile ("mov psr.l=%0" :: "r"(val) : "memory"); \ 41 asm volatile ("mov ar%0=%1" :: \ 46 asm volatile ("mov cr%0=%1" :: \ [all …]
|
| /kernel/linux/linux-5.10/arch/ia64/include/uapi/asm/ |
| D | gcc_intrin.h | 13 /* define this macro to get some asm stmts included in 'c' files */ 18 #define ia64_barrier() asm volatile ("":::"memory") 20 #define ia64_stop() asm volatile (";;"::) 22 #define ia64_invala_gr(regnum) asm volatile ("invala.e r%0" :: "i"(regnum)) 24 #define ia64_invala_fr(regnum) asm volatile ("invala.e f%0" :: "i"(regnum)) 26 #define ia64_flushrs() asm volatile ("flushrs;;":::"memory") 28 #define ia64_loadrs() asm volatile ("loadrs;;":::"memory") 38 asm volatile ("mov psr.l=%0" :: "r"(val) : "memory"); \ 41 asm volatile ("mov ar%0=%1" :: \ 46 asm volatile ("mov cr%0=%1" :: \ [all …]
|
| /kernel/linux/linux-5.10/tools/perf/ |
| D | check-headers.sh | 30 arch/x86/include/asm/disabled-features.h 31 arch/x86/include/asm/required-features.h 32 arch/x86/include/asm/cpufeatures.h 33 arch/x86/include/asm/inat_types.h 34 arch/x86/include/asm/emulate_prefix.h 35 arch/x86/include/asm/irq_vectors.h 36 arch/x86/include/asm/msr-index.h 37 arch/x86/include/uapi/asm/prctl.h 40 arch/arm/include/uapi/asm/perf_regs.h 41 arch/arm64/include/uapi/asm/perf_regs.h [all …]
|
| /kernel/linux/linux-6.6/tools/perf/ |
| D | check-headers.sh | 36 "arch/x86/include/asm/disabled-features.h" 37 "arch/x86/include/asm/required-features.h" 38 "arch/x86/include/asm/cpufeatures.h" 39 "arch/x86/include/asm/inat_types.h" 40 "arch/x86/include/asm/emulate_prefix.h" 41 "arch/x86/include/asm/irq_vectors.h" 42 "arch/x86/include/asm/msr-index.h" 43 "arch/x86/include/uapi/asm/prctl.h" 46 "arch/arm/include/uapi/asm/perf_regs.h" 47 "arch/arm64/include/uapi/asm/perf_regs.h" [all …]
|
| /kernel/linux/linux-5.10/arch/s390/include/asm/ |
| D | kvm_para.h | 24 #include <uapi/asm/kvm_para.h> 25 #include <asm/diag.h> 29 register unsigned long __nr asm("1") = nr; in __kvm_hypercall0() 30 register long __rc asm("2"); in __kvm_hypercall0() 32 asm volatile ("diag 2,4,0x500\n" in __kvm_hypercall0() 45 register unsigned long __nr asm("1") = nr; in __kvm_hypercall1() 46 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall1() 47 register long __rc asm("2"); in __kvm_hypercall1() 49 asm volatile ("diag 2,4,0x500\n" in __kvm_hypercall1() 63 register unsigned long __nr asm("1") = nr; in __kvm_hypercall2() [all …]
|
| /kernel/linux/linux-6.6/arch/s390/kernel/ |
| D | fpu.c | 11 #include <asm/fpu/types.h> 12 #include <asm/fpu/api.h> 13 #include <asm/vx-insn.h> 25 asm volatile("stfpc %0" : "=Q" (state->fpc)); in __kernel_fpu_begin() 30 asm volatile("std 0,%0" : "=Q" (state->fprs[0])); in __kernel_fpu_begin() 31 asm volatile("std 1,%0" : "=Q" (state->fprs[1])); in __kernel_fpu_begin() 32 asm volatile("std 2,%0" : "=Q" (state->fprs[2])); in __kernel_fpu_begin() 33 asm volatile("std 3,%0" : "=Q" (state->fprs[3])); in __kernel_fpu_begin() 34 asm volatile("std 4,%0" : "=Q" (state->fprs[4])); in __kernel_fpu_begin() 35 asm volatile("std 5,%0" : "=Q" (state->fprs[5])); in __kernel_fpu_begin() [all …]
|
| /kernel/linux/linux-6.6/arch/x86/kvm/ |
| D | fpu.h | 6 #include <asm/fpu/api.h> 21 case 0: asm("movdqa %%xmm0, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 22 case 1: asm("movdqa %%xmm1, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 23 case 2: asm("movdqa %%xmm2, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 24 case 3: asm("movdqa %%xmm3, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 25 case 4: asm("movdqa %%xmm4, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 26 case 5: asm("movdqa %%xmm5, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 27 case 6: asm("movdqa %%xmm6, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 28 case 7: asm("movdqa %%xmm7, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() 30 case 8: asm("movdqa %%xmm8, %0" : "=m"(*data)); break; in _kvm_read_sse_reg() [all …]
|