/arch/powerpc/kernel/ |
D | cpu_setup_6xx.S | 194 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 195 cror 4*cr0+eq,4*cr0+eq,4*cr2+eq 344 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 346 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 347 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 348 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 349 cror 4*cr0+eq,4*cr0+eq,4*cr5+eq 350 cror 4*cr0+eq,4*cr0+eq,4*cr7+eq 415 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 417 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq [all …]
|
/arch/hexagon/lib/ |
D | memset.S | 42 p0 = cmp.eq(r2, #0) 72 p1 = cmp.eq(r2, #1) 85 p1 = cmp.eq(r2, #2) 98 p1 = cmp.eq(r2, #4) 111 p1 = cmp.eq(r3, #1) 127 p1 = cmp.eq(r2, #8) 138 p1 = cmp.eq(r2, #4) 149 p1 = cmp.eq(r2, #2) 193 p1 = cmp.eq(r2, #1) 209 p0 = cmp.eq(r2, #2) [all …]
|
D | memcpy.S | 199 p2 = cmp.eq(len, #0); /* =0 */ 202 p1 = cmp.eq(ptr_in, ptr_out); /* attempt to overwrite self */ 275 p1 = cmp.eq(prolog, #0); 281 nokernel = cmp.eq(kernel,#0); 290 p2 = cmp.eq(kernel, #1); /* skip ovr if kernel == 0 */ 360 nokernel = cmp.eq(kernel, #0); /* after adjustment, recheck */ 381 p3 = cmp.eq(kernel, rest); 450 noepilog = cmp.eq(epilog,#0); 457 p3 = cmp.eq(epilogdws, #0); 469 p3 = cmp.eq(kernel, #0);
|
/arch/ia64/lib/ |
D | strlen.S | 102 cmp.eq p6,p0=r0,r0 // sets p6 to true for cmp.and 117 cmp.eq.and p6,p0=8,val1 // p6 = p6 and val1==8 118 cmp.eq.and p6,p0=8,val2 // p6 = p6 and mask==8 128 cmp.eq p8,p9=8,val1 // p6 = val1 had zero (disambiguate) 135 cmp.eq.and p7,p0=8,val1// val1==8? 172 cmp.eq p0,p6=r0,r0 // nullify first ld8 in loop 182 cmp.eq p6,p0=8,val1 // val1==8 ?
|
D | strlen_user.S | 104 cmp.eq p6,p0=r0,r0 // sets p6 (required because of // cmp.and) 120 cmp.eq.and p6,p0=8,val1 // p6 = p6 and val1==8 121 cmp.eq.and p6,p0=8,val2 // p6 = p6 and mask==8 131 cmp.eq p8,p9=8,val1 // p6 = val1 had zero (disambiguate) 138 cmp.eq.and p7,p0=8,val1// val1==8? 169 cmp.eq p0,p6=r0,r0 // nullify first ld8 in loop 179 cmp.eq p6,p0=8,val1 // val1==8 ?
|
/arch/arm64/lib/ |
D | strncmp.S | 98 ccmp endloop, #0, #0, eq 99 b.eq .Lloop_aligned 106 b.eq .Lnot_limit 177 b.eq .Ltinycmp 181 b.eq .Lstart_align /*the last bytes are equal....*/ 191 b.eq .Lrecal_offset 206 ccmp endloop, #0, #0, eq /*has_null is ZERO: no null byte*/ 228 csinv endloop, diff, xzr, eq 240 ccmp endloop, #0, #0, eq /*has_null is ZERO: no null byte*/ 241 b.eq .Lloopcmp_proc [all …]
|
D | memcmp.S | 88 b.eq .Lnot_limit 157 b.eq .Ltinycmp 160 b.eq .Lstart_align 170 b.eq .Lrecal_offset 244 b.eq .Lret0 252 b.eq .Ltiny8proc
|
D | memset.S | 87 b.eq .Laligned 106 b.eq 3f 108 b.eq 1f 184 b.eq 2f /* Already aligned. */
|
D | strchr.S | 40 csel x0, x0, xzr, eq
|
D | copy_template.S | 61 b.eq .LSrcAligned 98 b.eq .Ltiny15 100 b.eq 1f
|
/arch/arm64/kernel/ |
D | entry.S | 170 b.eq 1f // TTBR0 access already disabled 257 b.eq 3f 472 b.eq el1_da 474 b.eq el1_ia 476 b.eq el1_undef 478 b.eq el1_sp_pc 480 b.eq el1_sp_pc 482 b.eq el1_undef 528 cinc x24, x24, eq // set bit '0' 597 b.eq el0_svc [all …]
|
D | entry32.S | 46 csel w1, w3, w1, eq 53 csel w1, w3, w1, eq
|
/arch/hexagon/mm/ |
D | strnlen_user.S | 52 P0 = cmp.eq(mod8,#0); 63 P0 = cmp.eq(tmp1,#0); 70 P0 = cmp.eq(mod8,#0); 84 P0 = vcmpb.eq(dbuf,dcmp); 96 P0 = cmp.eq(tmp1,#32);
|
/arch/ia64/hp/sim/boot/ |
D | boot_head.S | 71 static: cmp.eq p6,p7=PAL_PTCE_INFO,r28 79 1: cmp.eq p6,p7=PAL_FREQ_RATIOS,r28 86 1: cmp.eq p6,p7=PAL_RSE_INFO,r28 93 1: cmp.eq p6,p7=PAL_CACHE_FLUSH,r28 /* PAL_CACHE_FLUSH */ 111 1: cmp.eq p6,p7=PAL_PERF_MON_INFO,r28 147 1: cmp.eq p6,p7=PAL_VM_SUMMARY,r28 156 1: cmp.eq p6,p7=PAL_MEM_ATTRIB,r28
|
/arch/arc/lib/ |
D | strlen.S | 24 mov.eq r7,r4 27 or.eq r12,r12,r1 41 or.eq r12,r12,r1 60 mov.eq r1,r12 72 mov.eq r2,r6
|
/arch/powerpc/kernel/vdso64/ |
D | gettimeofday.S | 66 cror cr0*4+eq,cr0*4+eq,cr1*4+eq 145 cror cr0*4+eq,cr0*4+eq,cr1*4+eq
|
/arch/powerpc/kernel/vdso32/ |
D | gettimeofday.S | 77 cror cr0*4+eq,cr0*4+eq,cr1*4+eq 160 cror cr0*4+eq,cr0*4+eq,cr1*4+eq
|
/arch/powerpc/mm/ |
D | slb_low.S | 191 crset 4*cr7+eq 217 crnot 4*cr0+eq,4*cr0+eq 294 crclr 4*cr0+eq /* set result to "success" */ 313 crclr 4*cr0+eq /* set result to "success" */
|
/arch/ia64/kernel/ |
D | ivt.S | 134 cmp.eq p6,p7=5,r17 // is IFA pointing into to region 5? 148 cmp.eq p7,p6=0,r21 // unused address bits all zeroes? 157 (p7) cmp.eq p6,p7=r17,r0 // was pgd_present(*pgd) == NULL? 164 (p7) cmp.eq.or.andcm p6,p7=r29,r0 // was pud_present(*pud) == NULL? 173 (p7) cmp.eq.or.andcm p6,p7=r20,r0 // was pmd_present(*pmd) == NULL? 413 (p9) cmp.eq.or.andcm p6,p7=IA64_ISR_CODE_LFETCH,r22 // check isr.code field 461 cmp.eq p6,p7=5,r17 // is faulting address in region 5? 478 cmp.eq p7,p6=0,r21 // unused address bits all zeroes? 487 (p7) cmp.eq p6,p7=r17,r0 // was pgd_present(*pgd) == NULL? 494 (p7) cmp.eq.or.andcm p6,p7=r17,r0 // was pud_present(*pud) == NULL? [all …]
|
/arch/arm64/mm/ |
D | cache.S | 129 b.eq 1f 133 b.eq 2f 193 b.eq __dma_inv_range
|
/arch/tile/lib/ |
D | spinlock_32.c | 249 u32 val, eq, mask; in arch_write_unlock() local 264 eq = __insn_seqb(val, val << (WR_CURR_SHIFT - WR_NEXT_SHIFT)); in arch_write_unlock() 265 val = __insn_mz(eq & mask, val); in arch_write_unlock()
|
/arch/arm64/include/asm/ |
D | asm-uaccess.h | 10 csel \dst, \dst, \addr, eq
|
/arch/sh/kernel/ |
D | head_32.S | 174 cmp/eq r0, r8 /* Check for valid __MEMORY_START mappings */ 178 cmp/eq r9, r10 283 cmp/eq r0, r10 302 cmp/eq #0, r0 ! skip clear if set to zero
|
/arch/sh/lib/ |
D | udivsi3_i4i-Os.S | 51 cmp/eq r5,r0 121 cmp/eq r5,r0 130 cmp/eq r5,r0
|
D | memchr.S | 19 cmp/eq r1,r5
|