| /kernel/linux/linux-6.6/tools/perf/arch/x86/tests/ |
| D | insn-x86-dat-src.c | 21 asm volatile("rdtsc"); /* Start here */ in main() 25 asm volatile("vcvtph2ps %xmm3,%ymm5"); in main() 31 asm volatile("cmovno %rax,%rbx"); in main() 32 asm volatile("cmovno 0x12345678(%rax),%rcx"); in main() 33 asm volatile("cmovno 0x12345678(%rax),%cx"); in main() 35 asm volatile("cmove %rax,%rbx"); in main() 36 asm volatile("cmove 0x12345678(%rax),%rcx"); in main() 37 asm volatile("cmove 0x12345678(%rax),%cx"); in main() 39 asm volatile("seto 0x12345678(%rax)"); in main() 40 asm volatile("setno 0x12345678(%rax)"); in main() [all …]
|
| /kernel/linux/linux-5.10/tools/perf/arch/x86/tests/ |
| D | insn-x86-dat-src.c | 21 asm volatile("rdtsc"); /* Start here */ in main() 25 asm volatile("vcvtph2ps %xmm3,%ymm5"); in main() 31 asm volatile("cmovno %rax,%rbx"); in main() 32 asm volatile("cmovno 0x12345678(%rax),%rcx"); in main() 33 asm volatile("cmovno 0x12345678(%rax),%cx"); in main() 35 asm volatile("cmove %rax,%rbx"); in main() 36 asm volatile("cmove 0x12345678(%rax),%rcx"); in main() 37 asm volatile("cmove 0x12345678(%rax),%cx"); in main() 39 asm volatile("seto 0x12345678(%rax)"); in main() 40 asm volatile("setno 0x12345678(%rax)"); in main() [all …]
|
| /kernel/linux/linux-5.10/drivers/video/fbdev/kyro/ |
| D | STG4000Reg.h | 76 volatile u32 Thread0Enable; /* 0x0000 */ 77 volatile u32 Thread1Enable; /* 0x0004 */ 78 volatile u32 Thread0Recover; /* 0x0008 */ 79 volatile u32 Thread1Recover; /* 0x000C */ 80 volatile u32 Thread0Step; /* 0x0010 */ 81 volatile u32 Thread1Step; /* 0x0014 */ 82 volatile u32 VideoInStatus; /* 0x0018 */ 83 volatile u32 Core2InSignStart; /* 0x001C */ 84 volatile u32 Core1ResetVector; /* 0x0020 */ 85 volatile u32 Core1ROMOffset; /* 0x0024 */ [all …]
|
| /kernel/linux/linux-6.6/drivers/video/fbdev/kyro/ |
| D | STG4000Reg.h | 76 volatile u32 Thread0Enable; /* 0x0000 */ 77 volatile u32 Thread1Enable; /* 0x0004 */ 78 volatile u32 Thread0Recover; /* 0x0008 */ 79 volatile u32 Thread1Recover; /* 0x000C */ 80 volatile u32 Thread0Step; /* 0x0010 */ 81 volatile u32 Thread1Step; /* 0x0014 */ 82 volatile u32 VideoInStatus; /* 0x0018 */ 83 volatile u32 Core2InSignStart; /* 0x001C */ 84 volatile u32 Core1ResetVector; /* 0x0020 */ 85 volatile u32 Core1ROMOffset; /* 0x0024 */ [all …]
|
| /kernel/linux/linux-5.10/lib/raid6/ |
| D | sse2.c | 48 asm volatile("movdqa %0,%%xmm0" : : "m" (raid6_sse_constants.x1d[0])); in raid6_sse21_gen_syndrome() 49 asm volatile("pxor %xmm5,%xmm5"); /* Zero temp */ in raid6_sse21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome() 53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome() 54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 55 asm volatile("movdqa %xmm2,%xmm4"); /* Q[0] */ in raid6_sse21_gen_syndrome() 56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome() 59 asm volatile("pcmpgtb %xmm4,%xmm5"); in raid6_sse21_gen_syndrome() 60 asm volatile("paddb %xmm4,%xmm4"); in raid6_sse21_gen_syndrome() [all …]
|
| D | avx2.c | 46 asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0])); in raid6_avx21_gen_syndrome() 47 asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */ in raid6_avx21_gen_syndrome() 50 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome() 51 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 53 asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */ in raid6_avx21_gen_syndrome() 54 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome() 57 asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5"); in raid6_avx21_gen_syndrome() 58 asm volatile("vpaddb %ymm4,%ymm4,%ymm4"); in raid6_avx21_gen_syndrome() [all …]
|
| D | recov_ssse3.c | 54 asm volatile("movdqa %0,%%xmm7" : : "m" (x0f[0])); in raid6_2data_recov_ssse3() 57 asm volatile("movdqa %0,%%xmm6" : : "m" (qmul[0])); in raid6_2data_recov_ssse3() 58 asm volatile("movdqa %0,%%xmm14" : : "m" (pbmul[0])); in raid6_2data_recov_ssse3() 59 asm volatile("movdqa %0,%%xmm15" : : "m" (pbmul[16])); in raid6_2data_recov_ssse3() 67 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3() 68 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3() 69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3() 70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() [all …]
|
| D | recov_loongarch_simd.c | 69 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lsx() 70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx() 71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx() 72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx() 76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx() 77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx() 78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx() 79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx() 81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx() 82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx() [all …]
|
| D | recov_avx2.c | 53 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f)); in raid6_2data_recov_avx2() 57 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2() 58 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2() 59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2() 60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 63 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); in raid6_2data_recov_avx2() 64 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); in raid6_2data_recov_avx2() 73 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0])); in raid6_2data_recov_avx2() [all …]
|
| D | loongarch_simd.c | 54 asm volatile("vld $vr0, %0" : : "m"(dptr[z0][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 55 asm volatile("vld $vr1, %0" : : "m"(dptr[z0][d+1*NSIZE])); in raid6_lsx_gen_syndrome() 56 asm volatile("vld $vr2, %0" : : "m"(dptr[z0][d+2*NSIZE])); in raid6_lsx_gen_syndrome() 57 asm volatile("vld $vr3, %0" : : "m"(dptr[z0][d+3*NSIZE])); in raid6_lsx_gen_syndrome() 58 asm volatile("vori.b $vr4, $vr0, 0"); in raid6_lsx_gen_syndrome() 59 asm volatile("vori.b $vr5, $vr1, 0"); in raid6_lsx_gen_syndrome() 60 asm volatile("vori.b $vr6, $vr2, 0"); in raid6_lsx_gen_syndrome() 61 asm volatile("vori.b $vr7, $vr3, 0"); in raid6_lsx_gen_syndrome() 64 asm volatile("vld $vr8, %0" : : "m"(dptr[z][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 65 asm volatile("vld $vr9, %0" : : "m"(dptr[z][d+1*NSIZE])); in raid6_lsx_gen_syndrome() [all …]
|
| D | sse1.c | 52 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_sse11_gen_syndrome() 53 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_sse11_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome() 57 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 59 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_sse11_gen_syndrome() 60 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 62 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome() 63 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_sse11_gen_syndrome() 64 asm volatile("paddb %mm4,%mm4"); in raid6_sse11_gen_syndrome() [all …]
|
| /kernel/linux/linux-6.6/lib/raid6/ |
| D | sse2.c | 48 asm volatile("movdqa %0,%%xmm0" : : "m" (raid6_sse_constants.x1d[0])); in raid6_sse21_gen_syndrome() 49 asm volatile("pxor %xmm5,%xmm5"); /* Zero temp */ in raid6_sse21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome() 53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome() 54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 55 asm volatile("movdqa %xmm2,%xmm4"); /* Q[0] */ in raid6_sse21_gen_syndrome() 56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome() 59 asm volatile("pcmpgtb %xmm4,%xmm5"); in raid6_sse21_gen_syndrome() 60 asm volatile("paddb %xmm4,%xmm4"); in raid6_sse21_gen_syndrome() [all …]
|
| D | avx2.c | 46 asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0])); in raid6_avx21_gen_syndrome() 47 asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */ in raid6_avx21_gen_syndrome() 50 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome() 51 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome() 52 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 53 asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */ in raid6_avx21_gen_syndrome() 54 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome() 57 asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5"); in raid6_avx21_gen_syndrome() 58 asm volatile("vpaddb %ymm4,%ymm4,%ymm4"); in raid6_avx21_gen_syndrome() [all …]
|
| D | recov_ssse3.c | 54 asm volatile("movdqa %0,%%xmm7" : : "m" (x0f[0])); in raid6_2data_recov_ssse3() 57 asm volatile("movdqa %0,%%xmm6" : : "m" (qmul[0])); in raid6_2data_recov_ssse3() 58 asm volatile("movdqa %0,%%xmm14" : : "m" (pbmul[0])); in raid6_2data_recov_ssse3() 59 asm volatile("movdqa %0,%%xmm15" : : "m" (pbmul[16])); in raid6_2data_recov_ssse3() 67 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3() 68 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3() 69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3() 70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() [all …]
|
| D | recov_loongarch_simd.c | 69 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lsx() 70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx() 71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx() 72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx() 76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx() 77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx() 78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx() 79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx() 81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx() 82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx() [all …]
|
| D | recov_avx2.c | 53 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f)); in raid6_2data_recov_avx2() 57 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2() 58 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2() 59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2() 60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 63 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); in raid6_2data_recov_avx2() 64 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); in raid6_2data_recov_avx2() 73 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0])); in raid6_2data_recov_avx2() [all …]
|
| D | loongarch_simd.c | 54 asm volatile("vld $vr0, %0" : : "m"(dptr[z0][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 55 asm volatile("vld $vr1, %0" : : "m"(dptr[z0][d+1*NSIZE])); in raid6_lsx_gen_syndrome() 56 asm volatile("vld $vr2, %0" : : "m"(dptr[z0][d+2*NSIZE])); in raid6_lsx_gen_syndrome() 57 asm volatile("vld $vr3, %0" : : "m"(dptr[z0][d+3*NSIZE])); in raid6_lsx_gen_syndrome() 58 asm volatile("vori.b $vr4, $vr0, 0"); in raid6_lsx_gen_syndrome() 59 asm volatile("vori.b $vr5, $vr1, 0"); in raid6_lsx_gen_syndrome() 60 asm volatile("vori.b $vr6, $vr2, 0"); in raid6_lsx_gen_syndrome() 61 asm volatile("vori.b $vr7, $vr3, 0"); in raid6_lsx_gen_syndrome() 64 asm volatile("vld $vr8, %0" : : "m"(dptr[z][d+0*NSIZE])); in raid6_lsx_gen_syndrome() 65 asm volatile("vld $vr9, %0" : : "m"(dptr[z][d+1*NSIZE])); in raid6_lsx_gen_syndrome() [all …]
|
| D | sse1.c | 52 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_sse11_gen_syndrome() 53 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_sse11_gen_syndrome() 56 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome() 57 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome() 58 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 59 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_sse11_gen_syndrome() 60 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome() 62 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome() 63 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_sse11_gen_syndrome() 64 asm volatile("paddb %mm4,%mm4"); in raid6_sse11_gen_syndrome() [all …]
|
| /kernel/linux/linux-5.10/arch/arm/mach-iop32x/ |
| D | iop3xx.h | 56 #define IOP3XX_ATUVID (volatile u16 *)IOP3XX_REG_ADDR(0x0100) 57 #define IOP3XX_ATUDID (volatile u16 *)IOP3XX_REG_ADDR(0x0102) 58 #define IOP3XX_ATUCMD (volatile u16 *)IOP3XX_REG_ADDR(0x0104) 59 #define IOP3XX_ATUSR (volatile u16 *)IOP3XX_REG_ADDR(0x0106) 60 #define IOP3XX_ATURID (volatile u8 *)IOP3XX_REG_ADDR(0x0108) 61 #define IOP3XX_ATUCCR (volatile u32 *)IOP3XX_REG_ADDR(0x0109) 62 #define IOP3XX_ATUCLSR (volatile u8 *)IOP3XX_REG_ADDR(0x010c) 63 #define IOP3XX_ATULT (volatile u8 *)IOP3XX_REG_ADDR(0x010d) 64 #define IOP3XX_ATUHTR (volatile u8 *)IOP3XX_REG_ADDR(0x010e) 65 #define IOP3XX_ATUBIST (volatile u8 *)IOP3XX_REG_ADDR(0x010f) [all …]
|
| /kernel/liteos_a/arch/arm/arm/include/ |
| D | arm.h | 42 __asm__ volatile("mrc p15, 0, %0, c1,c0,0" : "=r"(val)); in OsArmReadSctlr() 48 __asm__ volatile("mcr p15, 0, %0, c1,c0,0" ::"r"(val)); in OsArmWriteSctlr() 49 __asm__ volatile("isb" ::: "memory"); in OsArmWriteSctlr() 55 __asm__ volatile("mrc p15, 0, %0, c1,c0,1" : "=r"(val)); in OsArmReadActlr() 61 __asm__ volatile("mcr p15, 0, %0, c1,c0,1" ::"r"(val)); in OsArmWriteActlr() 62 __asm__ volatile("isb" ::: "memory"); in OsArmWriteActlr() 68 __asm__ volatile("mrc p15, 0, %0, c1,c0,2" : "=r"(val)); in OsArmReadCpacr() 74 __asm__ volatile("mcr p15, 0, %0, c1,c0,2" ::"r"(val)); in OsArmWriteCpacr() 75 __asm__ volatile("isb" ::: "memory"); in OsArmWriteCpacr() 81 __asm__ volatile("mrc p15, 0, %0, c2,c0,0" : "=r"(val)); in OsArmReadTtbr() [all …]
|
| /kernel/linux/linux-5.10/arch/mips/include/asm/txx9/ |
| D | tx3927.h | 26 volatile unsigned long cr[8]; 27 volatile unsigned long tr[3]; 28 volatile unsigned long cmd; 29 volatile unsigned long smrs[2]; 33 volatile unsigned long cr[8]; 38 volatile unsigned long cha; 39 volatile unsigned long sar; 40 volatile unsigned long dar; 41 volatile unsigned long cntr; 42 volatile unsigned long sair; [all …]
|
| /kernel/linux/linux-5.10/arch/m68k/include/asm/ |
| D | mvme147hw.h | 23 #define m147_rtc ((MK48T02 * volatile)0xfffe07f8) 27 volatile u_long dma_tadr; 28 volatile u_long dma_dadr; 29 volatile u_long dma_bcr; 30 volatile u_long dma_hr; 31 volatile u_short t1_preload; 32 volatile u_short t1_count; 33 volatile u_short t2_preload; 34 volatile u_short t2_count; 35 volatile u_char t1_int_cntrl; [all …]
|
| /kernel/linux/linux-6.6/arch/m68k/include/asm/ |
| D | mvme147hw.h | 23 #define m147_rtc ((MK48T02 * volatile)0xfffe07f8) 27 volatile u_long dma_tadr; 28 volatile u_long dma_dadr; 29 volatile u_long dma_bcr; 30 volatile u_long dma_hr; 31 volatile u_short t1_preload; 32 volatile u_short t1_count; 33 volatile u_short t2_preload; 34 volatile u_short t2_count; 35 volatile u_char t1_int_cntrl; [all …]
|
| /kernel/linux/linux-6.6/arch/mips/include/asm/ip32/ |
| D | mace.h | 24 volatile unsigned int error_addr; 25 volatile unsigned int error; 49 volatile unsigned int control; 70 volatile unsigned int rev; 72 volatile unsigned int config_addr; 74 volatile unsigned char b[4]; 75 volatile unsigned short w[2]; 76 volatile unsigned int l; 98 volatile u64 mac_ctrl; 99 volatile unsigned long int_stat; [all …]
|
| /kernel/linux/linux-5.10/arch/mips/include/asm/ip32/ |
| D | mace.h | 24 volatile unsigned int error_addr; 25 volatile unsigned int error; 49 volatile unsigned int control; 70 volatile unsigned int rev; 72 volatile unsigned int config_addr; 74 volatile unsigned char b[4]; 75 volatile unsigned short w[2]; 76 volatile unsigned int l; 98 volatile u64 mac_ctrl; 99 volatile unsigned long int_stat; [all …]
|