/arch/powerpc/lib/ |
D | mem_64.S | 23 mr r6,r3 28 stb r4,0(r6) 29 addi r6,r6,1 31 sth r4,0(r6) 32 addi r6,r6,2 34 stw r4,0(r6) 35 addi r6,r6,4 40 4: std r4,0(r6) 41 std r4,8(r6) 42 std r4,16(r6) [all …]
|
D | checksum_64.S | 55 rlwimi r5,r6,16,0,15 /* put proto in upper half of len */ 80 srdi. r6,r4,3 /* less than 8 bytes? */ 89 rldicl. r6,r3,64-1,64-2 /* r6 = (r3 & 0x3) >> 1 */ 93 sub r6,r7,r6 94 mtctr r6 97 lhz r6,0(r3) /* align to doubleword */ 100 adde r0,r0,r6 109 srdi. r6,r4,7 112 srdi r6,r4,6 113 subi r6,r6,1 [all …]
|
D | checksum_32.S | 48 rlwimi r5,r6,16,0,15 /* put proto in upper half of len */ 68 srwi. r6,r4,2 76 srwi. r6,r4,2 /* # words to do */ 78 1: mtctr r6 107 addic r0,r6,0 110 srwi. r6,r5,2 114 81: lhz r6,4(r3) /* do 2 bytes to get aligned */ 117 91: sth r6,4(r4) 119 addc r0,r0,r6 120 srwi. r6,r5,2 /* # words to do */ [all …]
|
D | copy_32.S | 21 stw r7,4(r6); \ 22 stw r8,8(r6); \ 23 stw r9,12(r6); \ 24 stwu r10,16(r6) 36 stw r7,4(r6); \ 38 stw r8,8(r6); \ 40 stw r9,12(r6); \ 42 stwu r10,16(r6) 80 addi r6,r3,-4 83 stwu r4,4(r6) [all …]
|
D | string.S | 34 addi r6,r3,-1 38 stbu r0,1(r6) 44 2: stbu r0,1(r6) /* clear it out if so */ 100 addi r6,r3,-1 102 1: lbzu r3,1(r6) 123 addi r6,r3,-4 129 11: stwu r5,4(r6) 132 andi. r0,r6,3 134 subf r6,r0,r6 139 1: stwu r5,4(r6) [all …]
|
/arch/powerpc/kernel/ |
D | fsl_booke_entry_mapping.S | 4 invstr: mflr r6 /* Make it accessible */ 11 tlbsx 0,r6 /* search MSR[IS], SPID=PID0 */ 25 tlbsx 0,r6 /* search MSR[IS], SPID=PID1 */ 33 tlbsx 0,r6 /* Fall through, we had to match */ 47 li r6,0 /* Set Entry counter to 0 */ 49 rlwimi r7,r6,16,4,15 /* Setup MAS0 = TLBSEL | ESEL(r6) */ 54 cmpw r3,r6 59 skpinv: addi r6,r6,1 /* Increment */ 60 cmpw r6,r9 /* Are we done? */ 64 li r6,0x04 [all …]
|
/arch/arm/mm/ |
D | abort-lv4t.S | 61 and r6, r8, r7 63 add r6, r6, r9, lsr #1 65 add r6, r6, r9, lsr #2 67 add r6, r6, r9, lsr #3 68 add r6, r6, r6, lsr #8 69 add r6, r6, r6, lsr #4 70 and r6, r6, #15 @ r6 = no. of registers to transfer. 74 subne r7, r7, r6, lsl #2 @ Undo increment 75 addeq r7, r7, r6, lsl #2 @ Undo decrement 85 andne r6, r8, #0xf00 @ { immediate high nibble [all …]
|
/arch/score/lib/ |
D | string.S | 32 cmpi.c r6, 0 33 mv r9, r6 38 2: lb r6, [r5] 39 cmp.c r6, r8 46 3: lbu r6, [r5, 1]+ 47 4: sb r6, [r4, 1]+ 77 0: lb r6, [r4] 79 cmp.c r6, r7 85 1: lb r6, [r4,1]+ 86 cmpi.c r6, 0 [all …]
|
/arch/arm/lib/ |
D | io-readsb.S | 32 .Linsb_aligned: stmfd sp!, {r4 - r6, lr} 41 ldrb r6, [r0] 46 orr r3, r3, r6, put_byte_3 47 ldrb r6, [r0] 52 orr r4, r4, r6, put_byte_2 53 ldrb r6, [r0] 58 orr r5, r5, r6, put_byte_1 59 ldrb r6, [r0] 64 mov r6, r6, put_byte_0 65 orr r6, r6, ip, put_byte_1 [all …]
|
/arch/hexagon/lib/ |
D | memset.S | 40 r6 = #8 define 48 r9 = sub(r6, r7) /* bytes until double alignment */ 70 r6 = #1 define 83 r6 = #2 define 96 r6 = #4 define 116 r6 = #8 define 136 r6 = #4 define 171 r6 = r0 define 196 r6 = add(r0, #1) define 203 p0 = tstbit(r6,#1) [all …]
|
/arch/powerpc/platforms/83xx/ |
D | suspend-asm.S | 67 lwz r6, 4(r4) 70 stw r6, SS_MEMSAVE+4(r3) 73 mfspr r6, SPRN_HID1 77 stw r6, SS_HID+4(r3) 82 mfspr r6, SPRN_IBCR 89 stw r6, SS_IBCR(r3) 96 mfspr r6, SPRN_SPRG2 102 stw r6, SS_SPRG+8(r3) 108 mfspr r6, SPRN_DBAT1U 113 stw r6, SS_DBAT+0x08(r3) [all …]
|
/arch/powerpc/mm/ |
D | hash_low_32.S | 58 11: lwz r6,0(r8) 59 cmpwi 0,r6,0 61 10: lwarx r6,0,r8 62 cmpwi 0,r6,0 118 lwarx r6,0,r8 /* get linux-style pte, flag word */ 119 andc. r5,r3,r6 /* check access & ~permission */ 125 or r5,r0,r6 /* set accessed/dirty bits */ 128 subf r10,r6,r8 /* create false data dependency */ 130 lwzx r10,r6,r10 /* Get upper PTE word */ 212 addis r6,r7,mmu_hash_lock@ha [all …]
|
/arch/sh/lib/ |
D | memset.S | 16 tst r6,r6 18 add r6,r4 20 cmp/gt r6,r0 26 sub r0,r6 38 mov r6,r0 40 shlr r0 ! r0 = r6 >> 3 48 and r0,r6 49 tst r6,r6 53 dt r6
|
D | checksum.S | 65 addc r0, r6 ! t=0 from previous tst 66 mov r6, r0 67 shll8 r6 70 or r0, r6 85 addc r0, r6 87 add #1, r6 153 addc r0, r6 161 addc r0, r6 163 addc r0, r6 169 mov r6, r0 [all …]
|
D | memset-sh4.S | 17 add r6,r4 18 cmp/gt r6,r0 24 sub r0,r6 38 cmp/gt r6,r0 ! (MT) 64 > len => slow loop 41 mov r6,r0 57 add #-4, r6 60 mov r6,r2 73 add #-0x20, r6 82 cmp/ge r0, r6 85 mov r6,r0 [all …]
|
/arch/m32r/lib/ |
D | strlen.S | 17 mv r6, r0 || ldi r2, #0 22 ld r0, @r6+ 28 ld r1, @r6+ || not r4, r0 32 ld r0, @r6+ || not r4, r1 43 ldb r1, @r6 || addi r6, #1 48 ldi r0, #4 || addi r6, #-8 51 ldb r1, @r6 || addi r6, #1 63 mv r6, r0 69 ld r0, @r6+ 75 ld r1, @r6+ [all …]
|
/arch/powerpc/boot/ |
D | string.S | 30 addi r6,r3,-1 34 stbu r0,1(r6) 101 addi r6,r3,-4 104 stwu r4,4(r6) 106 andi. r0,r6,3 108 subf r6,r0,r6 112 1: stwu r4,4(r6) 118 addi r6,r6,3 119 8: stbu r4,1(r6) 132 addi r6,r3,-4 [all …]
|
D | div64.S | 20 lwz r5,0(r3) # get the dividend into r5/r6 21 lwz r6,4(r3) 37 andc r11,r6,r10 # ever be too large, only too small) 46 subfc r6,r10,r6 # take the product from the divisor, 50 3: cmplw r6,r4 52 divwu r0,r6,r4 # perform the remaining 32-bit division 55 subf r6,r10,r6 58 mr r3,r6 # return the remainder in r3 77 subfic r6,r5,32 80 slw r6,r3,r6 # t1 = count > 31 ? 0 : MSW << (32-count) [all …]
|
/arch/microblaze/kernel/ |
D | hw_exception_handler.S | 93 lwi r6, r1, PT_R6; \ 314 swi r6, r1, PT_R6 343 addk r6, r5, r5; /* << 1 */ 344 addk r6, r6, r6; /* << 2 */ 351 lwi r5, r6, TOPHYS(exception_debug_table) 353 swi r5, r6, TOPHYS(exception_debug_table) 357 lwi r6, r6, TOPHYS(_MB_HW_ExceptionVectorTable) 358 bra r6 365 mfs r6, rmsr; 367 swi r6, r1, 0; /* RMSR_OFFSET */ [all …]
|
/arch/powerpc/kernel/vdso64/ |
D | cacheflush.S | 38 andc r6,r11,r5 /* round low to line bdy */ 39 subf r8,r6,r4 /* compute length */ 46 1: dcbst 0,r6 47 add r6,r6,r7 55 andc r6,r11,r5 /* round low to line bdy */ 56 subf r8,r6,r4 /* compute length */ 63 2: icbi 0,r6 64 add r6,r6,r7
|
/arch/powerpc/kernel/vdso32/ |
D | cacheflush.S | 38 andc r6,r11,r5 /* round low to line bdy */ 39 subf r8,r6,r4 /* compute length */ 46 1: dcbst 0,r6 47 add r6,r6,r7 55 andc r6,r11,r5 /* round low to line bdy */ 56 subf r8,r6,r4 /* compute length */ 63 2: icbi 0,r6 64 add r6,r6,r7
|
/arch/microblaze/lib/ |
D | mulsi3.S | 18 beqi r6, result_is_zero /* multiply by zero */ 20 xor r4, r5, r6 /* get the sign of the result */ 23 bgei r6, r6_pos 24 rsubi r6, r6, 0 /* make r6 positive */ 30 srl r6, r6 33 bneid r6, l2
|
/arch/openrisc/kernel/ |
D | head.S | 59 #define EMERGENCY_PRINT_STORE_GPR6 l.sw 0x28(r0),r6 60 #define EMERGENCY_PRINT_LOAD_GPR6 l.lwz r6,0x28(r0) 90 #define EXCEPTION_STORE_GPR6 l.sw 0x74(r0),r6 91 #define EXCEPTION_LOAD_GPR6 l.lwz r6,0x74(r0) 459 CLEAR_GPR(r6) 526 LOAD_SYMBOL_2_GPR(r6,SPR_ITLBMR_BASE(0)) 530 l.mtspr r6,r0,0x0 533 l.addi r6,r6,1 595 CLEAR_GPR(r6) 645 l.mfspr r6,r0,SPR_SR [all …]
|
/arch/arm/kernel/ |
D | head-common.S | 52 ldr r6, =OF_DT_MAGIC @ is it a DTB? 53 cmp r5, r6 60 ldr r6, =ATAG_CORE 61 cmp r5, r6 83 ldmia r3!, {r4, r5, r6, r7} 85 1: cmpne r5, r6 91 1: cmp r6, r7 92 strcc fp, [r6],#4 95 ARM( ldmia r3, {r4, r5, r6, r7, sp}) 96 THUMB( ldmia r3, {r4, r5, r6, r7} ) [all …]
|
/arch/powerpc/kvm/ |
D | book3s_hv_rmhandlers.S | 67 li r6,MSR_IR | MSR_DR 68 andc r6,r10,r6 71 mtsrr1 r6 108 li r6, XICS_QIRR 112 stbcix r0, r5, r6 /* clear it */ 159 lwz r6, VCPU_PMC + 8(r4) 169 mtspr SPRN_PMC3, r6 179 ld r6, VCPU_MMCR + 16(r4) 181 mtspr SPRN_MMCRA, r6 204 ld r6, VCPU_SPRG1(r4) [all …]
|