/arch/riscv/lib/ |
D | memset.S | 13 move t0, a0 /* Preserve return value */ 23 addi a3, t0, SZREG-1 25 beq a3, t0, 2f /* Skip if already aligned */ 27 sub a4, a3, t0 29 sb a1, 0(t0) 30 addi t0, t0, 1 31 bltu t0, a3, 1b 48 add a3, t0, a4 56 sub t0, t0, a4 67 REG_S a1, 0(t0) [all …]
|
/arch/mips/include/asm/mach-loongson64/ |
D | kernel-entry-init.h | 23 mfc0 t0, CP0_CONFIG3 24 or t0, (0x1 << 7) 25 mtc0 t0, CP0_CONFIG3 27 mfc0 t0, CP0_PAGEGRAIN 28 or t0, (0x1 << 29) 29 mtc0 t0, CP0_PAGEGRAIN 31 mfc0 t0, CP0_PRID 33 andi t1, t0, PRID_IMP_MASK 38 andi t0, (PRID_IMP_MASK | PRID_REV_MASK) 39 slti t0, t0, (PRID_IMP_LOONGSON_64C | PRID_REV_LOONGSON3A_R2_0) [all …]
|
/arch/mips/power/ |
D | hibernate_asm.S | 15 PTR_LA t0, saved_regs 16 PTR_S ra, PT_R31(t0) 17 PTR_S sp, PT_R29(t0) 18 PTR_S fp, PT_R30(t0) 19 PTR_S gp, PT_R28(t0) 20 PTR_S s0, PT_R16(t0) 21 PTR_S s1, PT_R17(t0) 22 PTR_S s2, PT_R18(t0) 23 PTR_S s3, PT_R19(t0) 24 PTR_S s4, PT_R20(t0) [all …]
|
/arch/mips/kernel/ |
D | cps-vec.S | 117 li t0, CAUSEF_IV 118 mtc0 t0, CP0_CAUSE 121 li t0, ST0_CU1 | ST0_CU0 | ST0_BEV | STATUS_BITDEPS 122 mtc0 t0, CP0_STATUS 135 li t0, 0xff 136 sw t0, GCR_CL_COHERENCE_OFS(v1) 140 1: mfc0 t0, CP0_CONFIG 141 ori t0, 0x7 142 xori t0, 0x7 143 or t0, t0, s0 [all …]
|
D | bmips_5xxx_init.S | 33 and t0, kva, t2 ; \ 36 9: cache op, 0(t0) ; \ 37 bne t0, t1, 9b ; \ 38 addu t0, linesize ; \ 123 move t0, a0 150 move a0, t0 178 move a0, t0 216 move t0, a0 242 move a0, t0 269 move a0, t0 [all …]
|
D | octeon_switch.S | 34 dmfc0 t0, $11,7 /* CvmMemCtl */ 35 bbit0 t0, 6, 3f /* Is user access enabled? */ 39 andi t0, 0x3f 41 sll t0, 7-LONGLOG-1 48 subu t0, 1 /* Decrement loop var */ 53 bnez t0, 2b /* Loop until we've copied it all */ 58 dmfc0 t0, $11,7 /* CvmMemCtl */ 59 xori t0, t0, 0x40 /* Bit 6 is CVMSEG user enable */ 60 dmtc0 t0, $11,7 /* CvmMemCtl */ 77 PTR_ADDU t0, $28, _THREAD_SIZE - 32 [all …]
|
D | entry.S | 43 LONG_L t0, PT_STATUS(sp) # returning to kernel mode? 44 andi t0, t0, KU_USER 45 beqz t0, resume_kernel 52 andi t0, a2, _TIF_WORK_MASK # (ignoring syscall_trace) 53 bnez t0, work_pending 59 lw t0, TI_PRE_COUNT($28) 60 bnez t0, restore_all 61 LONG_L t0, TI_FLAGS($28) 62 andi t1, t0, _TIF_NEED_RESCHED 64 LONG_L t0, PT_STATUS(sp) # Interrupts off? [all …]
|
D | scall64-o32.S | 36 dsubu t0, v0, __NR_O32_Linux # check syscall number 37 sltiu t0, t0, __NR_O32_Linux_syscalls 40 beqz t0, not_o32_scall 65 ld t0, PT_R29(sp) # get old user stack pointer 66 daddu t1, t0, 32 69 load_a4: lw a4, 16(t0) # argument #5 from usp 70 load_a5: lw a5, 20(t0) # argument #6 from usp 71 load_a6: lw a6, 24(t0) # argument #7 from usp 72 load_a7: lw a7, 28(t0) # argument #8 from usp 83 LONG_L t0, TI_FLAGS($28) # syscall tracing enabled? [all …]
|
D | scall64-n32.S | 35 dsubu t0, v0, __NR_N32_Linux # check syscall number 36 sltiu t0, t0, __NR_N32_Linux_syscalls 43 beqz t0, not_n32_scall 48 LONG_L t0, TI_FLAGS($28) # syscall tracing enabled? 49 and t0, t1, t0 50 bnez t0, n32_syscall_trace_entry 53 dsll t0, v0, 3 # offset into table 54 ld t2, (sysn32_call_table - (__NR_N32_Linux * 8))(t0) 58 li t0, -EMAXERRNO - 1 # error? 59 sltu t0, t0, v0 [all …]
|
D | scall64-n64.S | 51 LONG_L t0, TI_FLAGS($28) # syscall tracing enabled? 52 and t0, t1, t0 53 bnez t0, syscall_trace_entry 57 sltiu t0, t2, __NR_64_Linux_syscalls 58 beqz t0, illegal_syscall 60 dsll t0, t2, 3 # offset into table 62 daddu t0, t2, t0 63 ld t2, (t0) # syscall routine 68 li t0, -EMAXERRNO - 1 # error? 69 sltu t0, t0, v0 [all …]
|
D | head.S | 37 mfc0 t0, CP0_STATUS 38 or t0, ST0_KERNEL_CUMASK|\set|0x1f|\clr 39 xor t0, 0x1f|\clr 40 mtc0 t0, CP0_STATUS 92 PTR_LA t0, 0f 93 jr t0 107 lw t0, (t2) 108 beq t0, t1, dtb_found 123 PTR_LA t0, __bss_start # clear .bss 124 LONG_S zero, (t0) [all …]
|
/arch/mips/include/asm/mach-ip27/ |
D | kernel-entry-init.h | 34 dli t0, 0xffffffffc0000000 35 dmtc0 t0, CP0_ENTRYHI 36 li t0, 0x1c000 # Offset of text into node memory 39 or t1, t1, t0 # Physical load address of kernel text 40 or t2, t2, t0 # Physical load address of kernel data 45 li t0, ((PAGE_GLOBAL | PAGE_VALID | CACHE_CACHABLE_COW) >> 6) 46 or t0, t0, t1 47 mtc0 t0, CP0_ENTRYLO0 # physaddr, VG, cach exlwr 48 li t0, ((PAGE_GLOBAL | PAGE_VALID | PAGE_DIRTY | CACHE_CACHABLE_COW) >> 6) 49 or t0, t0, t2 [all …]
|
/arch/mips/netlogic/common/ |
D | reset.S | 59 li t0, LSU_DEFEATURE 60 mfcr t1, t0 64 mtcr t1, t0 66 li t0, ICU_DEFEATURE 67 mfcr t1, t0 69 mtcr t1, t0 71 li t0, SCHED_DEFEATURE 73 mtcr t1, t0 82 mfc0 t0, CP0_PAGEMASK, 1 84 or t0, t1 [all …]
|
D | smpboot.S | 61 li t0, CKSEG1ADDR(RESET_VEC_PHYS) 65 daddu t2, t0 89 PTR_LA t0, nlm_early_init_secondary 90 jalr t0 93 PTR_LA t0, smp_bootstrap 94 jr t0 105 mfc0 t0, $15, 1 /* read ebase */ 106 andi t0, 0x1f /* t0 has the processor_id() */ 107 andi t2, t0, 0x3 /* thread num */ 108 sll t0, 2 /* offset in cpu array */ [all …]
|
/arch/arm/crypto/ |
D | sha512-armv4.pl | 73 $t0="r9"; 97 mov $t0,$Elo,lsr#14 101 eor $t0,$t0,$Ehi,lsl#18 105 eor $t0,$t0,$Elo,lsr#18 107 eor $t0,$t0,$Ehi,lsl#14 109 eor $t0,$t0,$Ehi,lsr#9 111 eor $t0,$t0,$Elo,lsl#23 113 adds $Tlo,$Tlo,$t0 114 ldr $t0,[sp,#$Foff+0] @ f.lo 122 eor $t0,$t0,$t2 [all …]
|
/arch/mips/alchemy/common/ |
D | sleeper.S | 56 lw t0, 0(t1) 57 jalr t0 93 la t0, 1f 95 cache 0x14, 0(t0) 96 cache 0x14, 32(t0) 97 cache 0x14, 64(t0) 98 cache 0x14, 96(t0) 119 la t0, 1f 121 cache 0x14, 0(t0) 122 cache 0x14, 32(t0) [all …]
|
/arch/mips/include/asm/mach-malta/ |
D | kernel-entry-init.h | 52 li t0, ((MIPS_SEGCFG_MK << MIPS_SEGCFG_AM_SHIFT) | \ 58 or t0, t2 59 mtc0 t0, CP0_SEGCTL0 62 li t0, ((MIPS_SEGCFG_MUSUK << MIPS_SEGCFG_AM_SHIFT) | \ 69 ins t0, t1, 16, 3 70 mtc0 t0, CP0_SEGCTL1 73 li t0, ((MIPS_SEGCFG_MUSUK << MIPS_SEGCFG_AM_SHIFT) | \ 79 or t0, t2 80 mtc0 t0, CP0_SEGCTL2 83 mfc0 t0, $16, 5 [all …]
|
/arch/csky/abiv2/ |
D | strcmp.S | 18 ldw t0, (a3, 0) 21 cmpne t0, t1 24 tstnbz t0 28 ldw t0, (a3, 4) 30 cmpne t0, t1 32 tstnbz t0 35 ldw t0, (a3, 8) 37 cmpne t0, t1 39 tstnbz t0 42 ldw t0, (a3, 12) [all …]
|
D | strcpy.S | 10 andi t0, a1, 3 11 bnez t0, 11f 87 xtrb0 t0, a2 88 st.b t0, (a3) 89 bez t0, 10f 90 xtrb1 t0, a2 91 st.b t0, (a3, 1) 92 bez t0, 10f 93 xtrb2 t0, a2 94 st.b t0, (a3, 2) [all …]
|
/arch/mips/net/ |
D | bpf_jit_asm.S | 49 slti t0, offset, 0; \ 50 bgtz t0, bpf_slow_path_##TYPE##_neg; \ 56 slt t0, $r_s0, offset; \ 57 bgtz t0, bpf_slow_path_##TYPE; \ 70 wsbh t0, $r_A 71 rotr $r_A, t0, 16 73 sll t0, $r_A, 24 76 or t0, t0, t1 79 or t0, t0, t2 81 or $r_A, t0, t1 [all …]
|
/arch/mips/cavium-octeon/ |
D | octeon-memcpy.S | 105 #undef t0 109 #define t0 $8 macro 185 EXC( LOAD t0, UNIT(0)(src), l_exc) 190 EXC( STORE t0, UNIT(0)(dst), s_exc_p16u) 194 EXC( LOAD t0, UNIT(4)(src), l_exc_copy) 198 EXC( STORE t0, UNIT(4)(dst), s_exc_p12u) 204 EXC( LOAD t0, UNIT(-8)(src), l_exc_copy_rewind16) 208 EXC( STORE t0, UNIT(-8)(dst), s_exc_p8u) 212 EXC( LOAD t0, UNIT(-4)(src), l_exc_copy_rewind16) 216 EXC( STORE t0, UNIT(-4)(dst), s_exc_p4u) [all …]
|
/arch/ia64/lib/ |
D | memcpy.S | 32 # define t0 r18 macro 54 or t0=in0,in1 57 or t0=t0,in2 78 and t0=0x7,t0 81 cmp.ne p6,p0=t0,r0 185 and t0=-8,src // t0 = src & ~7 188 ld8 t0=[t0] // t0 = 1st source word 198 shr.u t0=t0,t2 204 or t0=t0,t1 208 (p3) st1 [dst]=t0,1 [all …]
|
/arch/mips/lib/ |
D | csum_partial.S | 25 #undef t0 29 #define t0 $8 macro 121 lbu t0, (src) 124 sll t0, t0, 8 126 ADDC(sum, t0) 134 lhu t0, (src) 136 ADDC(sum, t0) 148 LOAD32 t0, 0x00(src) 150 ADDC(sum, t0) 159 ld t0, 0x00(src) [all …]
|
/arch/alpha/lib/ |
D | strchr.S | 23 ldq_u t0, 0(a0) # .. e1 : load first quadword 29 cmpbge zero, t0, t2 # .. e1 : bits set iff byte == zero 35 xor t0, a1, t1 # .. e1 : make bytes == c zero 37 or t2, t3, t0 # e1 : bits set iff char match or zero match 38 andnot t0, t4, t0 # e0 : clear garbage bits 39 bne t0, $found # .. e1 (zdb) 41 $loop: ldq t0, 8(v0) # e0 : 44 xor t0, a1, t1 # .. e1 (ev5 data stall) 45 cmpbge zero, t0, t2 # e0 : bits set iff byte == 0 47 or t2, t3, t0 # e0 : [all …]
|
D | stxncpy.S | 59 mskql t0, a1, t0 # e0 : assemble the first output word 61 or t0, t3, t0 # e0 : 69 stq_u t0, 0(a0) # e0 : 71 ldq_u t0, 0(a1) # e0 : 74 cmpbge zero, t0, t8 # .. e1 (stall) 100 zapnot t0, t8, t0 # e0 : clear src bytes > null 102 or t0, t1, t0 # e1 : 104 1: stq_u t0, 0(a0) # e0 : 123 and a0, 7, t0 # .. e1 : find dest misalignment 125 addq a2, t0, a2 # .. e1 : bias count by dest misalignment [all …]
|