/arch/powerpc/kernel/vdso32/ |
D | getcpu.S | 23 cmpwi cr1,r4,0 30 beqlr cr1 39 cmpwi cr1, r4, 0 45 beqlr cr1
|
/arch/powerpc/lib/ |
D | memcmp_32.S | 29 cmplwi cr1, r3, 2 30 blt- cr1, 4f 35 beqlr cr1
|
D | memcmp_64.S | 48 cmpwi cr1,r3,0; \ 102 cmpdi cr1,r5,0 115 beq cr1,.Lzero 287 cmpld cr1,rC,rD 301 bne cr1,.LcmpCD 312 cmpld cr1,rC,rD 328 bne cr1,.LcmpCD 336 cmpld cr1,rC,rD 345 bne cr1,.LcmpCD 361 cmpld cr1,rC,rD [all …]
|
D | memcpy_64.S | 42 cmpldi cr1,r5,16 46 blt cr1,.Lshort_copy 75 blt cr1,3f 162 cmpwi cr1,r5,8 165 ble cr1,6f 188 cmpldi cr1,r5,16
|
D | mem_64.S | 25 cmplw cr1,r5,r0 37 cmplw cr1,r5,r0 /* do we get that far? */ 41 blt cr1,8f
|
D | copyuser_64.S | 57 cmpldi cr1,r5,16 70 blt cr1,.Lshort_copy 92 blt cr1,.Ldo_tail /* if < 16 bytes to copy */ 94 cmpdi cr1,r0,0 106 beq cr1,72f 227 6: cmpwi cr1,r5,8 231 ble cr1,7f 270 cmpldi cr1,r5,16
|
D | string_64.S | 86 cmpdi cr1,r4,512 88 bgt cr1,.Llong_clear
|
/arch/powerpc/kernel/ |
D | cpu_setup_6xx.S | 189 cmpwi cr1,r10,9 191 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 334 cmplwi cr1,r3,0x000c /* 7400 */ 342 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 346 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 355 beq cr1,1f 405 cmplwi cr1,r3,0x000c /* 7400 */ 413 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 417 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 436 1: beq cr1,2f
|
D | head_8xx.S | 331 cmpwi cr1, r11, RPN_PATTERN 332 beq- cr1, FixupDAR /* must be a buggy dcbX, icbi insn. */ 358 cmplwi cr1, r11, (.Ldtlbie - PAGE_OFFSET)@l 360 cror 4*cr1+eq, 4*cr1+eq, 4*cr7+eq 361 bne cr1, 1f 406 cmpli cr1, r11, PAGE_OFFSET@h 408 blt+ cr1, 3f 430 cmpwi cr1, r10, 2028 /* Is dcbz? */ 431 beq+ cr1, 142f 432 cmpwi cr1, r10, 940 /* Is dcbi? */ [all …]
|
D | reloc_64.S | 57 cmpdi cr1,r8,0 59 beq cr1,.Lout
|
D | entry_64.S | 195 cmpd cr1,r6,r9 /* or is new ESID the same as current ESID? */ 196 cror eq,4*cr1+eq,eq
|
/arch/powerpc/kernel/vdso64/ |
D | getcpu.S | 22 cmpdi cr1,r4,0 27 1: beq cr1,2f
|
/arch/s390/lib/ |
D | uaccess.c | 22 unsigned long cr1, cr7; in debug_user_asce() local 24 __ctl_store(cr1, 1, 1); in debug_user_asce() 26 if (cr1 == S390_lowcore.kernel_asce && cr7 == S390_lowcore.user_asce) in debug_user_asce() 31 exit ? "exit" : "entry", cr1, cr7, in debug_user_asce()
|
/arch/arm/mach-sa1100/ |
D | ssp.c | 161 ssp->cr1 = Ser4SSCR1; in ssp_save_state() 177 Ser4SSCR1 = ssp->cr1; in ssp_restore_state()
|
/arch/arm/include/asm/hardware/ |
D | ssp.h | 12 unsigned int cr1; member
|
/arch/powerpc/boot/ |
D | string.S | 157 cmpw cr1,r0,r5 160 ble cr1,3b 203 5: cmpw cr1,r0,r5 206 ble cr1,3b
|
D | ppc_asm.h | 14 #define cr1 1 macro
|
D | crt0.S | 171 cmpdi cr1,r8,0 173 beq cr1,3f
|
/arch/powerpc/mm/book3s32/ |
D | hash_low.S | 488 cmpwi cr1,r6,1 491 ble cr1,19f 576 8: ble cr1,9f /* if all ptes checked */ 581 cmpwi cr1,r6,1 584 bgt cr1,81b
|
/arch/powerpc/platforms/cell/ |
D | cbe_thermal.c | 302 union spe_reg cr1; in init_default_values() local 332 cr1.val = 0x0404040404040404ull; in init_default_values() 357 out_be64(&pmd_regs->tm_cr1.val, cr1.val); in init_default_values()
|
/arch/arm/include/asm/ |
D | vfp.h | 14 #define FPSCR cr1
|
/arch/s390/kernel/ |
D | head64.S | 46 .quad 0 # cr1: primary space segment table
|
/arch/powerpc/mm/nohash/ |
D | tlb_low_64e.S | 309 crclr cr1*4+eq /* set cr1.eq = 0 for non-recursive */ 317 2: cmpd cr1,r15,r10 /* recursive lock due to mcheck/crit/etc? */ 318 beq cr1,3b /* unlock will happen if cr1.eq = 0 */ 445 beq cr1,1f /* no unlock if lock was recursively grabbed */ 503 cmpldi cr1,r15,8 /* Check for vmalloc region */ 504 beq+ cr1,tlb_miss_common_e6500
|
/arch/s390/include/asm/ |
D | ptrace.h | 99 unsigned long cr1; member
|
/arch/powerpc/kvm/ |
D | book3s_segment.S | 215 cmpwi cr1, r0, 0 391 beq cr1, 1f
|