Home
last modified time | relevance | path

Searched refs:eq (Results 1 – 25 of 85) sorted by relevance

1234

/arch/powerpc/kernel/
Dcpu_setup_6xx.S194 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
195 cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
344 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
346 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
347 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
348 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
349 cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
350 cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
415 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
417 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
[all …]
/arch/hexagon/lib/
Dmemset.S42 p0 = cmp.eq(r2, #0)
72 p1 = cmp.eq(r2, #1)
85 p1 = cmp.eq(r2, #2)
98 p1 = cmp.eq(r2, #4)
111 p1 = cmp.eq(r3, #1)
127 p1 = cmp.eq(r2, #8)
138 p1 = cmp.eq(r2, #4)
149 p1 = cmp.eq(r2, #2)
193 p1 = cmp.eq(r2, #1)
209 p0 = cmp.eq(r2, #2)
[all …]
Dmemcpy.S199 p2 = cmp.eq(len, #0); /* =0 */
202 p1 = cmp.eq(ptr_in, ptr_out); /* attempt to overwrite self */
275 p1 = cmp.eq(prolog, #0);
281 nokernel = cmp.eq(kernel,#0);
290 p2 = cmp.eq(kernel, #1); /* skip ovr if kernel == 0 */
360 nokernel = cmp.eq(kernel, #0); /* after adjustment, recheck */
381 p3 = cmp.eq(kernel, rest);
450 noepilog = cmp.eq(epilog,#0);
457 p3 = cmp.eq(epilogdws, #0);
469 p3 = cmp.eq(kernel, #0);
/arch/ia64/lib/
Dstrlen.S102 cmp.eq p6,p0=r0,r0 // sets p6 to true for cmp.and
117 cmp.eq.and p6,p0=8,val1 // p6 = p6 and val1==8
118 cmp.eq.and p6,p0=8,val2 // p6 = p6 and mask==8
128 cmp.eq p8,p9=8,val1 // p6 = val1 had zero (disambiguate)
135 cmp.eq.and p7,p0=8,val1// val1==8?
172 cmp.eq p0,p6=r0,r0 // nullify first ld8 in loop
182 cmp.eq p6,p0=8,val1 // val1==8 ?
Dstrlen_user.S104 cmp.eq p6,p0=r0,r0 // sets p6 (required because of // cmp.and)
120 cmp.eq.and p6,p0=8,val1 // p6 = p6 and val1==8
121 cmp.eq.and p6,p0=8,val2 // p6 = p6 and mask==8
131 cmp.eq p8,p9=8,val1 // p6 = val1 had zero (disambiguate)
138 cmp.eq.and p7,p0=8,val1// val1==8?
169 cmp.eq p0,p6=r0,r0 // nullify first ld8 in loop
179 cmp.eq p6,p0=8,val1 // val1==8 ?
/arch/arm64/lib/
Dstrncmp.S98 ccmp endloop, #0, #0, eq
99 b.eq .Lloop_aligned
106 b.eq .Lnot_limit
177 b.eq .Ltinycmp
181 b.eq .Lstart_align /*the last bytes are equal....*/
191 b.eq .Lrecal_offset
206 ccmp endloop, #0, #0, eq /*has_null is ZERO: no null byte*/
228 csinv endloop, diff, xzr, eq
240 ccmp endloop, #0, #0, eq /*has_null is ZERO: no null byte*/
241 b.eq .Lloopcmp_proc
[all …]
Dmemcmp.S88 b.eq .Lnot_limit
157 b.eq .Ltinycmp
160 b.eq .Lstart_align
170 b.eq .Lrecal_offset
244 b.eq .Lret0
252 b.eq .Ltiny8proc
Dmemset.S87 b.eq .Laligned
106 b.eq 3f
108 b.eq 1f
184 b.eq 2f /* Already aligned. */
Dstrchr.S40 csel x0, x0, xzr, eq
Dcopy_template.S61 b.eq .LSrcAligned
98 b.eq .Ltiny15
100 b.eq 1f
/arch/arm64/kernel/
Dentry.S170 b.eq 1f // TTBR0 access already disabled
257 b.eq 3f
472 b.eq el1_da
474 b.eq el1_ia
476 b.eq el1_undef
478 b.eq el1_sp_pc
480 b.eq el1_sp_pc
482 b.eq el1_undef
528 cinc x24, x24, eq // set bit '0'
597 b.eq el0_svc
[all …]
Dentry32.S46 csel w1, w3, w1, eq
53 csel w1, w3, w1, eq
/arch/hexagon/mm/
Dstrnlen_user.S52 P0 = cmp.eq(mod8,#0);
63 P0 = cmp.eq(tmp1,#0);
70 P0 = cmp.eq(mod8,#0);
84 P0 = vcmpb.eq(dbuf,dcmp);
96 P0 = cmp.eq(tmp1,#32);
/arch/ia64/hp/sim/boot/
Dboot_head.S71 static: cmp.eq p6,p7=PAL_PTCE_INFO,r28
79 1: cmp.eq p6,p7=PAL_FREQ_RATIOS,r28
86 1: cmp.eq p6,p7=PAL_RSE_INFO,r28
93 1: cmp.eq p6,p7=PAL_CACHE_FLUSH,r28 /* PAL_CACHE_FLUSH */
111 1: cmp.eq p6,p7=PAL_PERF_MON_INFO,r28
147 1: cmp.eq p6,p7=PAL_VM_SUMMARY,r28
156 1: cmp.eq p6,p7=PAL_MEM_ATTRIB,r28
/arch/arc/lib/
Dstrlen.S24 mov.eq r7,r4
27 or.eq r12,r12,r1
41 or.eq r12,r12,r1
60 mov.eq r1,r12
72 mov.eq r2,r6
/arch/powerpc/kernel/vdso64/
Dgettimeofday.S66 cror cr0*4+eq,cr0*4+eq,cr1*4+eq
145 cror cr0*4+eq,cr0*4+eq,cr1*4+eq
/arch/powerpc/kernel/vdso32/
Dgettimeofday.S77 cror cr0*4+eq,cr0*4+eq,cr1*4+eq
160 cror cr0*4+eq,cr0*4+eq,cr1*4+eq
/arch/powerpc/mm/
Dslb_low.S191 crset 4*cr7+eq
217 crnot 4*cr0+eq,4*cr0+eq
294 crclr 4*cr0+eq /* set result to "success" */
313 crclr 4*cr0+eq /* set result to "success" */
/arch/ia64/kernel/
Divt.S134 cmp.eq p6,p7=5,r17 // is IFA pointing into to region 5?
148 cmp.eq p7,p6=0,r21 // unused address bits all zeroes?
157 (p7) cmp.eq p6,p7=r17,r0 // was pgd_present(*pgd) == NULL?
164 (p7) cmp.eq.or.andcm p6,p7=r29,r0 // was pud_present(*pud) == NULL?
173 (p7) cmp.eq.or.andcm p6,p7=r20,r0 // was pmd_present(*pmd) == NULL?
413 (p9) cmp.eq.or.andcm p6,p7=IA64_ISR_CODE_LFETCH,r22 // check isr.code field
461 cmp.eq p6,p7=5,r17 // is faulting address in region 5?
478 cmp.eq p7,p6=0,r21 // unused address bits all zeroes?
487 (p7) cmp.eq p6,p7=r17,r0 // was pgd_present(*pgd) == NULL?
494 (p7) cmp.eq.or.andcm p6,p7=r17,r0 // was pud_present(*pud) == NULL?
[all …]
/arch/arm64/mm/
Dcache.S129 b.eq 1f
133 b.eq 2f
193 b.eq __dma_inv_range
/arch/tile/lib/
Dspinlock_32.c249 u32 val, eq, mask; in arch_write_unlock() local
264 eq = __insn_seqb(val, val << (WR_CURR_SHIFT - WR_NEXT_SHIFT)); in arch_write_unlock()
265 val = __insn_mz(eq & mask, val); in arch_write_unlock()
/arch/arm64/include/asm/
Dasm-uaccess.h10 csel \dst, \dst, \addr, eq
/arch/sh/kernel/
Dhead_32.S174 cmp/eq r0, r8 /* Check for valid __MEMORY_START mappings */
178 cmp/eq r9, r10
283 cmp/eq r0, r10
302 cmp/eq #0, r0 ! skip clear if set to zero
/arch/sh/lib/
Dudivsi3_i4i-Os.S51 cmp/eq r5,r0
121 cmp/eq r5,r0
130 cmp/eq r5,r0
Dmemchr.S19 cmp/eq r1,r5

1234