/arch/xtensa/boot/boot-redboot/ |
D | bootstrap.S | 94 mov.n a8, a4 99 s32i a10, a8, 0 100 s32i a11, a8, 4 103 s32i a10, a8, 8 104 s32i a11, a8, 12 105 addi a8, a8, 16 107 blt a8, a5, 1b 141 l32i a8, a2, 0 142 s32i a8, a6, 0 169 add a8, a0, a4 [all …]
|
/arch/xtensa/lib/ |
D | checksum.S | 145 l8ui a8, a2, 3 /* bits 0.. 8 */ 149 slli a8, a8, 24 153 or a7, a7, a8 248 SRC( l32i a8, a2, 4 ) 250 DST( s32i a8, a3, 4 ) 252 ONES_ADD(a5, a8) 254 SRC( l32i a8, a2, 12 ) 256 DST( s32i a8, a3, 12 ) 258 ONES_ADD(a5, a8) 260 SRC( l32i a8, a2, 20 ) [all …]
|
D | memcopy.S | 140 movi a8, 3 # if source is not aligned, 141 _bany a3, a8, .Lsrcunaligned # then use shifting copy 150 slli a8, a7, 4 151 add a8, a8, a3 # a8 = end of last 16B source chunk 165 bne a3, a8, .Loop1 # continue loop if a3:src != a8:src_end 218 and a11, a3, a8 # save unalignment offset for below 231 l32i a8, a3, 8 235 src_b a7, a7, a8 238 src_b a8, a8, a9 239 s32i a8, a5, 8 [all …]
|
D | usercopy.S | 90 movi a8, 3 # if source is also aligned, 91 bnone a3, a8, .Laligned # then use word copy 160 slli a8, a7, 4 161 add a8, a8, a3 # a8 = end of last 16B source chunk 175 blt a3, a8, .Loop1 218 and a10, a3, a8 # save unalignment offset for below 230 EX(l32i, a8, a3, 8, l_fixup) 234 ALIGN( a7, a7, a8) 237 ALIGN( a8, a8, a9) 238 EX(s32i, a8, a5, 8, s_fixup) [all …]
|
D | strnlen_user.S | 47 # a8/ mask3 63 movi a8, MASK3 # mask for byte 3 85 bnone a9, a8, .Lz3 # if byte 3 is zero 137 bany a9, a8, .Laligned # if byte 3 (of word, not string) is nonzero
|
D | strncpy_user.S | 50 # a8/ mask3 68 movi a8, MASK3 # mask for byte 3 126 bnone a9, a8, .Lz3 # if byte 3 is zero 139 bnone a9, a8, .Lz1 # if byte 3 is zero
|
/arch/xtensa/mm/ |
D | misc.S | 64 l32i a8, a3, 0 66 s32i a8, a2, 0 69 l32i a8, a3, 8 71 s32i a8, a2, 8 74 l32i a8, a3, 16 76 s32i a8, a2, 16 79 l32i a8, a3, 24 81 s32i a8, a2, 24 188 addi a8, a3, 1 # way1 190 wdtlb a7, a8 [all …]
|
/arch/xtensa/kernel/ |
D | align.S | 172 s32i a8, a2, PT_AREG8 185 rsr a8, excvaddr # load unaligned memory address 236 and a3, a3, a8 # align memory address 238 __ssa8 a8 330 l32i a8, a2, PT_AREG8 375 and a4, a4, a8 # align memory address 384 __ssa8r a8 385 __src_b a8, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE) 392 and a5, a5, a8 # mask 393 __sh a8, a3 # shift value [all …]
|
D | entry.S | 156 s32i a8, a1, PT_AREG8 649 l32i a8, a1, PT_AREG8 874 _bbci.l a8, 30, 8f 1105 s32i a8, a2, PT_AREG8 1166 s32e a8, a4, -32 1781 l32i a8, a2, PT_AREG4 1826 mov a8, a8 1855 save_xtregs_user a4 a6 a8 a9 a12 a13 THREAD_XTREGS_USER 1902 load_xtregs_user a5 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
|
D | vectors.S | 753 s32e a8, a0, -32 770 l32e a8, a11, -32
|
/arch/c6x/include/asm/ |
D | syscall.h | 72 *args++ = regs->a8; in syscall_get_arguments() 110 regs->a8 = *args++; in syscall_set_arguments()
|
/arch/arm/boot/dts/ |
D | sh7372.dtsi | 21 compatible = "arm,cortex-a8";
|
D | imx53.dtsi | 51 compatible = "arm,cortex-a8"; 760 compatible = "arm,cortex-a8-pmu";
|
D | omap3.dtsi | 35 compatible = "arm,cortex-a8"; 47 compatible = "arm,cortex-a8-pmu";
|
D | am33xx.dtsi | 44 compatible = "arm,cortex-a8"; 70 compatible = "arm,cortex-a8-pmu";
|
/arch/tile/kernel/ |
D | hvglue_trace.c | 159 #define __HV_DECL8(t8, a8, ...) t8 a8, __HV_DECL7(__VA_ARGS__) argument 168 #define __HV_PASS8(t8, a8, ...) a8, __HV_PASS7(__VA_ARGS__) argument
|
/arch/c6x/kernel/ |
D | signal.c | 45 COPY(sp); COPY(a4); COPY(b4); COPY(a6); COPY(b6); COPY(a8); COPY(b8); in restore_sigcontext() 110 COPY(sp); COPY(a4); COPY(b4); COPY(a6); COPY(b6); COPY(a8); COPY(b8); in setup_sigcontext()
|
D | asm-offsets.c | 57 OFFSET(REGS_A8, pt_regs, a8); in foo()
|
D | traps.c | 45 pr_err("A8: %08lx B8: %08lx\n", regs->a8, regs->b8); in show_regs()
|
/arch/c6x/include/uapi/asm/ |
D | ptrace.h | 154 REG_PAIR(a9, a8);
|
/arch/powerpc/boot/dts/ |
D | mpc8349emitxgp.dts | 102 dma@82a8 {
|
D | asp834x-redboot.dts | 121 dma@82a8 {
|
D | sbc8349.dts | 98 dma@82a8 {
|
D | mpc832x_rdb.dts | 103 dma@82a8 {
|
D | mpc8349emitx.dts | 155 dma@82a8 {
|