/arch/xtensa/lib/ |
D | strnlen_user.S | 82 bnone a9, a5, .Lz0 # if byte 0 is zero 83 bnone a9, a6, .Lz1 # if byte 1 is zero 84 bnone a9, a7, .Lz2 # if byte 2 is zero 85 bnone a9, a8, .Lz3 # if byte 3 is zero 96 bnone a9, a5, .Lz0 # if byte 0 is zero 97 bnone a9, a6, .Lz1 # if byte 1 is zero 110 # the zero byte in order to include the NULL terminator in the count. 112 .Lz3: # byte 3 is zero 113 addi a4, a4, 3 # point to zero byte 114 .Lz0: # byte 0 is zero [all …]
|
D | strncpy_user.S | 64 beqz a4, .Lret # if len is zero 81 beqz a9, .Lret # if byte 0 is zero 84 beqz a4, .Lret # if len is zero 91 beqz a9, .Lret # if byte 0 is zero 94 beqz a4, .Lret # if len is zero 98 beqz a9, .Lret # if byte 0 is zero 122 bnone a9, a5, .Lz0 # if byte 0 is zero 123 bnone a9, a6, .Lz1 # if byte 1 is zero 124 bnone a9, a7, .Lz2 # if byte 2 is zero 126 bnone a9, a8, .Lz3 # if byte 3 is zero [all …]
|
/arch/arm/mach-zynq/ |
D | platsmp.c | 46 static u8 __iomem *zero; in zynq_cpun_start() local 53 zero = ioremap(0, trampoline_code_size); in zynq_cpun_start() 54 if (!zero) { in zynq_cpun_start() 59 zero = (__force u8 __iomem *)PAGE_OFFSET; in zynq_cpun_start() 68 memcpy((__force void *)zero, &zynq_secondary_trampoline, in zynq_cpun_start() 70 writel(address, zero + trampoline_size); in zynq_cpun_start() 77 iounmap(zero); in zynq_cpun_start()
|
/arch/nios2/kernel/ |
D | insnemu.S | 127 stw zero, 0(sp) /* Save zero on stack to avoid special case for r0. */ 224 bne r7, zero, multiply 274 bne r7, zero, unsigned_division 282 bge r3,zero,dividend_is_nonnegative 283 sub r3, zero, r3 /* -r3 */ 285 bge r5, zero, divisor_is_nonnegative 286 sub r5, zero, r5 /* -r5 */ 316 cmplt r7, r3, zero /* r7 = MSB of r3 */ 337 bne r14, zero, divide_loop 352 bge r17, zero, quotient_is_nonnegative [all …]
|
/arch/tile/mm/ |
D | migrate_32.S | 120 move r0, zero /* cache_pa */ 121 move r1, zero 124 auli r2, zero, ha16(HV_FLUSH_EVICT_L2) /* cache_control */ 128 move r4, zero /* tlb_va */ 129 move r5, zero /* tlb_length */ 132 move r6, zero /* tlb_pgsize */ 133 move r7, zero /* tlb_cpumask */ 136 move r8, zero /* asids */ 137 move r9, zero /* asidcount */
|
D | migrate_64.S | 105 move r0, zero /* cache_pa */ 114 move r3, zero /* tlb_va */ 117 move r4, zero /* tlb_length */ 118 move r5, zero /* tlb_pgsize */ 121 move r6, zero /* tlb_cpumask */ 122 move r7, zero /* asids */ 125 move r8, zero /* asidcount */
|
/arch/alpha/lib/ |
D | strrchr.S | 21 zapnot a1, 1, a1 # e0 : zero extend our test character 22 mov zero, t6 # .. e1 : t6 is last match aligned addr 24 mov zero, t8 # .. e1 : t8 is last match byte compare mask 32 cmpbge zero, t0, t1 # .. e1 : bits set iff byte == zero 35 xor t0, a1, t2 # e0 : make bytes == c zero 36 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 37 cmpbge zero, t2, t3 # e0 : bits set iff byte == c 49 cmpbge zero, t0, t1 # .. e1 : bits set iff byte == zero 50 cmpbge zero, t2, t3 # e0 : bits set iff byte == c 84 mov zero, v0 # e0 :
|
D | strchr.S | 21 zapnot a1, 1, a1 # e0 : zero extend the search character 28 cmpbge zero, t0, t2 # .. e1 : bits set iff byte == zero 32 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 34 xor t0, a1, t1 # .. e1 : make bytes == c zero 35 cmpbge zero, t1, t3 # e0 : bits set iff byte == c 36 or t2, t3, t0 # e1 : bits set iff char match or zero match 44 cmpbge zero, t0, t2 # e0 : bits set iff byte == 0 45 cmpbge zero, t1, t3 # .. e1 : bits set iff byte == c 67 mov zero, v0 # e0 :
|
D | ev67-strrchr.S | 39 mov zero, t6 # E : t6 is last match aligned addr 42 mov zero, t8 # E : t8 is last match byte compare mask 54 cmpbge zero, t0, t1 # E : bits set iff byte == zero 56 xor t0, a1, t2 # E : make bytes == c zero 57 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 59 cmpbge zero, t2, t3 # E : bits set iff byte == c 76 cmpbge zero, t0, t1 # E : bits set iff byte == zero 77 cmpbge zero, t2, t3 # E : bits set iff byte == c 102 subq t5, t2, t5 # E : Normalize leading zero count
|
D | ev67-strchr.S | 48 cmpbge zero, t0, t2 # E : bits set iff byte == zero 49 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 53 xor t0, a1, t1 # E : make bytes == c zero 54 cmpbge zero, t1, t3 # E : bits set iff byte == c 55 or t2, t3, t0 # E : bits set iff char match or zero match 71 cmpbge zero, t0, t2 # E : bits set iff byte == 0 73 cmpbge zero, t1, t3 # E : bits set iff byte == c
|
D | stxncpy.S | 54 lda t2, -1 # e1 : build a mask against false zero 59 cmpbge zero, t2, t8 # .. e1 : bits set iff null found 73 cmpbge zero, t0, t8 # .. e1 (stall) 128 addq zero, 1, t10 # .. e1 : 164 or t0, t6, t6 # e1 : mask original data for zero test 165 cmpbge zero, t6, t8 # e0 : 174 cmpbge zero, t2, t8 # e0 : find nulls in second partial 187 cmpbge zero, t2, t8 # .. e1 : 216 cmpbge zero, t2, t8 # e1 : test new word for eos (stall) 232 cmpbge zero, t0, t8 # e0 : is the null in this first bit? [all …]
|
D | stxcpy.S | 46 lda t2, -1 # e1 : build a mask against false zero 51 cmpbge zero, t2, t8 # .. e1 : bits set iff null found 64 cmpbge zero, t1, t8 # e0 (stall) 142 cmpbge zero, t6, t8 # .. e1 : 148 cmpbge zero, t2, t8 # e0 : testing eos 159 cmpbge zero, t2, t8 # .. e1 : 186 cmpbge zero, t2, t8 # e0 : test new word for eos 201 cmpbge zero, t1, t8 # e0 : is the null in this first bit? 208 cmpbge zero, t1, t8 # .. e1 : 243 mov zero, t0 # .. e1 : [all …]
|
D | ev6-stxcpy.S | 57 lda t2, -1 # E : build a mask against false zero 63 cmpbge zero, t2, t8 # E : bits set iff null found 80 cmpbge zero, t1, t8 # E : (3 cycle stall) 165 cmpbge zero, t6, t8 # E : (stall) 171 cmpbge zero, t2, t8 # E : testing eos (stall) 183 cmpbge zero, t2, t8 # E : (stall for t2) 212 cmpbge zero, t2, t8 # E : test new word for eos 228 cmpbge zero, t1, t8 # E : is the null in this first bit? (stall) 235 cmpbge zero, t1, t8 # E : (stall) 271 mov zero, t0 # E : [all …]
|
D | ev6-stxncpy.S | 65 lda t2, -1 # E : build a mask against false zero 71 cmpbge zero, t2, t8 # E : bits set iff null found 96 cmpbge zero, t0, t8 # E : 161 addq zero, 1, t10 # E : 204 or t0, t6, t6 # E : mask original data for zero test (stall) 206 cmpbge zero, t6, t8 # E : 216 cmpbge zero, t2, t8 # E : find nulls in second partial 229 cmpbge zero, t2, t8 # E : 260 cmpbge zero, t2, t8 # E : test new word for eos 276 cmpbge zero, t0, t8 # E : is the null in this first bit? (stall) [all …]
|
/arch/arm/include/asm/ |
D | tlbflush.h | 324 const int zero = 0; in __local_flush_tlb_all() local 327 tlb_op(TLB_V4_U_FULL | TLB_V6_U_FULL, "c8, c7, 0", zero); in __local_flush_tlb_all() 328 tlb_op(TLB_V4_D_FULL | TLB_V6_D_FULL, "c8, c6, 0", zero); in __local_flush_tlb_all() 329 tlb_op(TLB_V4_I_FULL | TLB_V6_I_FULL, "c8, c5, 0", zero); in __local_flush_tlb_all() 334 const int zero = 0; in local_flush_tlb_all() local 341 tlb_op(TLB_V7_UIS_FULL, "c8, c7, 0", zero); in local_flush_tlb_all() 351 const int zero = 0; in __flush_tlb_all() local 358 tlb_op(TLB_V7_UIS_FULL, "c8, c3, 0", zero); in __flush_tlb_all() 368 const int zero = 0; in __local_flush_tlb_mm() local 374 tlb_op(TLB_V4_U_FULL, "c8, c7, 0", zero); in __local_flush_tlb_mm() [all …]
|
/arch/x86/lib/ |
D | csum-partial_64.c | 57 unsigned long zero; in do_csum() local 68 zero = 0; in do_csum() 81 : [src] "r" (buff), [zero] "r" (zero), in do_csum() 94 "r" (zero), "0" (result)); in do_csum()
|
/arch/arc/kernel/ |
D | fpu.c | 38 const unsigned int zero = 0; in fpu_save_restore() local 45 : "r" (zero), "r" (*(readfrom + 1)), "r" (*(readfrom)) in fpu_save_restore() 53 : "r" (zero), "r" (*(readfrom + 3)), "r" (*(readfrom + 2)) in fpu_save_restore()
|
/arch/m68k/fpsp040/ |
D | do_func.S | 8 | tbldo.sa. Cases of zero, infinity and NaN are handled in 101 | Load a signed zero to fp0 and set inex2/ainex 110 bsr ld_mzero |if neg, load neg zero, return here 113 | Load a signed zero to fp0; do not set inex2/ainex 118 bne ld_mzero |if neg, load neg zero 119 bra ld_pzero |load positive zero 169 fbeq t_dz2 |if = -1, divide by zero exception 183 | Test for 1.0 as an input argument, returning +zero. Also check 256 .long smod_oper | 00,01 norm,zero = nan with operr 259 .long smod_zro | 01,00 zero,norm = +-zero [all …]
|
D | tbldo.S | 59 .long szero |$01-1 fint zero 68 .long szero |$02-1 fsinh zero 77 .long szero |$03-1 fintrz zero 104 .long szero |$06-1 flognp1 zero 122 .long szero |$08-1 fetoxm1 zero 131 .long szero |$09-1 ftanh zero 140 .long szero |$0a-1 fatan zero 158 .long szero |$0c-1 fasin zero 167 .long szero |$0d-1 fatanh zero 176 .long szero |$0e-1 fsin zero [all …]
|
/arch/unicore32/include/asm/ |
D | tlbflush.h | 68 const int zero = 0; in local_flush_tlb_all() local 72 : : "r" (zero) : "cc"); in local_flush_tlb_all() 77 const int zero = 0; in local_flush_tlb_mm() local 82 : : "r" (zero) : "cc"); in local_flush_tlb_mm()
|
/arch/mips/kernel/ |
D | relocate_kernel.S | 37 beq s3, zero, 1f 44 beq s3, zero, 1f 51 beq s3, zero, 1f 56 beq s3, zero, process_entry 67 beq s6, zero, process_entry 83 LONG_S zero,(t0) 128 bne s0, zero,1b
|
/arch/mips/alchemy/common/ |
D | sleeper.S | 77 sw zero, 0x0078(t3) /* sys_slppwr */ 79 sw zero, 0x007c(t3) /* sys_sleep */ 106 sw zero, 0x001c(a0) /* Precharge */ 108 sw zero, 0x0020(a0) /* Auto Refresh */ 110 sw zero, 0x0030(a0) /* Sleep */ 132 sw zero, 0x08c0(a0) /* Precharge */ 134 sw zero, 0x08d0(a0) /* Self Refresh */ 141 beq t2, zero, 2b 178 sw zero, 0x868(a0) /* mem_sdportcfga */ 194 sw zero, 0x08c8(a0) /* mem_sdautoref */
|
/arch/m68k/math-emu/ |
D | fp_util.S | 99 clr.l %d1 | sign defaults to zero 101 jeq fp_l2e_zero | is source zero? 114 | source is zero 135 jeq fp_s2e_small | zero / denormal? 149 | exponent is zero, so explizit bit is already zero too 173 jeq fp_d2e_small | zero / denormal? 194 | exponent is zero, so explizit bit is already zero too
|
/arch/tile/kernel/ |
D | relocate_kernel_32.S | 90 move r0, zero /* cache_pa */ 91 move r1, zero 94 auli r2, zero, ha16(HV_FLUSH_EVICT_L2) /* cache_control */ 98 move r4, zero /* tlb_va */ 99 move r5, zero /* tlb_length */ 102 move r6, zero /* tlb_pgsize */ 103 move r7, zero /* tlb_cpumask */ 106 move r8, zero /* asids */ 110 move r9, zero /* asidcount */
|
/arch/mips/net/ |
D | bpf_jit_asm.S | 84 move $r_ret, zero 105 move $r_ret, zero 116 move $r_ret, zero 149 LONG_ADDIU a3, zero, SIZE; \ 157 INT_S zero, (4 * SZREG)($r_sp); \ 168 move $r_ret, zero; \ 256 move $r_ret, zero; \ 284 addiu $r_ret, zero, 1
|