/arch/powerpc/platforms/52xx/ |
D | mpc52xx_sleep.S | 21 lwz r8, 0x14(r6) /* intr->main_mask */ 22 ori r8, r8, 0x1 23 xori r8, r8, 0x1 24 stw r8, 0x14(r6) 28 li r8, 0x1 29 stw r8, 0x40(r6) /* intr->main_emulate */ 69 lwz r8, 0x4(r4) /* sdram->ctrl */ 71 oris r8, r8, 0x8000 /* mode_en */ 72 stw r8, 0x4(r4) 75 ori r8, r8, 0x0002 /* soft_pre */ [all …]
|
/arch/powerpc/lib/ |
D | strlen_32.S | 55 2: subf r8, r7, r9 56 and. r8, r8, r6 58 andc. r8, r8, r9 60 andc r8, r9, r6 62 subfe r8, r6, r8 63 nor r8, r8, r9 64 cntlzw r8, r8 66 srwi r8, r8, 3 67 add r3, r3, r8 72 orc r8, r8, r8 [all …]
|
D | copypage_64.S | 31 srd r8,r5,r11 33 mtctr r8 41 srdi r8,r5,7 /* page is copied in 128 byte strides */ 42 addi r8,r8,-1 /* one stride copied outside loop */ 44 mtctr r8 49 ldu r8,24(r4) 55 std r8,32(r3) 65 ld r8,64(r4) 71 std r8,96(r3) 81 ldu r8,128(r4) [all …]
|
/arch/powerpc/mm/book3s32/ |
D | hash_low.S | 52 lis r8, (mmu_hash_lock - PAGE_OFFSET)@h 53 ori r8, r8, (mmu_hash_lock - PAGE_OFFSET)@l 56 11: lwz r6,0(r8) 59 10: lwarx r6,0,r8 62 stwcx. r0,0,r8 69 mfspr r8,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 71 lwz r5,PGDIR(r8) /* virt page-table root */ 79 lwz r8,0(r5) /* get pmd entry */ 80 rlwinm. r8,r8,0,0,19 /* extract address of pte page */ 82 rlwinm r8,r4,13,19,29 /* Compute pgdir/pmd offset */ [all …]
|
D | nohash_low.S | 16 lwz r8,TASK_CPU(r2) 17 oris r8,r8,11 29 stwcx. r8,0,r9 48 lwz r8,TASK_CPU(r2) 49 oris r8,r8,10 61 stwcx. r8,0,r9
|
/arch/arm/lib/ |
D | memset.S | 38 stmfd sp!, {r8, lr} 41 UNWIND( .save {r8, lr} ) 42 mov r8, r1 46 stmiage ip!, {r1, r3, r8, lr} @ 64 bytes at a time. 47 stmiage ip!, {r1, r3, r8, lr} 48 stmiage ip!, {r1, r3, r8, lr} 49 stmiage ip!, {r1, r3, r8, lr} 51 ldmfdeq sp!, {r8, pc} @ Now <64 bytes to go. 56 stmiane ip!, {r1, r3, r8, lr} 57 stmiane ip!, {r1, r3, r8, lr} [all …]
|
/arch/powerpc/kernel/vdso32/ |
D | cacheflush.S | 45 subf r8,r6,r4 /* compute length */ 46 add r8,r8,r5 /* ensure we get enough */ 49 srw. r8,r8,r9 /* compute line count */ 51 srwi. r8, r8, L1_CACHE_SHIFT 56 mtctr r8 72 subf r8,r6,r4 /* compute length */ 73 add r8,r8,r5 75 srw. r8,r8,r9 /* compute line count */ 79 mtctr r8
|
/arch/powerpc/kernel/vdso64/ |
D | cacheflush.S | 38 subf r8,r6,r4 /* compute length */ 39 add r8,r8,r5 /* ensure we get enough */ 41 srd. r8,r8,r9 /* compute line count */ 44 mtctr r8 55 subf r8,r6,r4 /* compute length */ 56 add r8,r8,r5 58 srd. r8,r8,r9 /* compute line count */ 61 mtctr r8
|
/arch/powerpc/kexec/ |
D | relocate_32.S | 162 1: mflr r8 163 addi r8, r8, (2f-1b) /* Find the target offset */ 166 mtspr SPRN_SRR0, r8 182 li r8, 0 /* PageNumber */ 186 rotlwi r3, r8, 28 /* Create EPN (bits 0-3) */ 195 addi r8, r8, 1 /* Increment PN */ 197 cmpwi r8, 8 /* Are we done ? */ 206 1: mflr r8 207 and r8, r8, r11 /* Get our offset within page */ 208 addi r8, r8, (2f-1b) [all …]
|
/arch/sh/kernel/cpu/sh2a/ |
D | entry.S | 71 mov r2,r8 ! r8 = previus stack top 74 mov.l @r8+,r2 75 mov.l @r8+,r0 76 mov.l @r8+,r1 84 mov r2,r8 ! r8 = previous stack top 87 mov.l @r8+,r2 ! old R2 88 mov.l @r8+,r0 ! old R0 89 mov.l @r8+,r1 ! old R1 90 mov.l @r8+,r10 ! old PC 91 mov.l @r8+,r11 ! old SR [all …]
|
/arch/ia64/lib/ |
D | ip_fast_csum.S | 33 #define ret0 r8 119 add r8=r16,r17 122 add r8=r8,r18 124 add r8=r8,r19 127 add r8=r8,r15 129 shr.u r10=r8,32 // now fold sum into short 130 zxt4 r11=r8 132 add r8=r10,r11 134 shr.u r10=r8,16 // yeah, keep it rolling 135 zxt2 r11=r8 [all …]
|
D | strncpy_from_user.S | 25 mov r8=0 34 EX(.Lexit, ld1 r8=[in1],1) 36 EX(.Lexit, st1 [in0]=r8,1) 37 cmp.ne p6,p7=r8,r0 42 (p6) mov r8=in2 // buffer filled up---return buffer length 43 (p7) sub r8=in1,r9,1 // return string length (excluding NUL character)
|
/arch/microblaze/lib/ |
D | fastcopy.S | 103 andi r8, r6, 0xfffffffc /* as = s & ~3 */ 105 lwi r11, r8, 0 /* h = *(as + 0) */ 115 lwi r12, r8, 4 /* v = *(as + 4) */ 120 lwi r12, r8, 8 /* v = *(as + 8) */ 125 lwi r12, r8, 12 /* v = *(as + 12) */ 130 lwi r12, r8, 16 /* v = *(as + 16) */ 135 lwi r12, r8, 20 /* v = *(as + 20) */ 140 lwi r12, r8, 24 /* v = *(as + 24) */ 145 lwi r12, r8, 28 /* v = *(as + 28) */ 150 lwi r12, r8, 32 /* v = *(as + 32) */ [all …]
|
/arch/arm/mm/ |
D | abort-lv4t.S | 29 ldr r8, [r4] @ read arm instruction 31 tst r8, #1 << 20 @ L = 1 -> write? 33 and r7, r8, #15 << 24 58 mov r1, r8 62 tst r8, #1 << 21 @ check writeback bit 67 and r6, r8, r7 68 and r9, r8, r7, lsl #1 70 and r9, r8, r7, lsl #2 72 and r9, r8, r7, lsl #3 77 and r9, r8, #15 << 16 @ Extract 'n' from instruction [all …]
|
/arch/sh/kernel/ |
D | relocate_kernel.S | 32 mov.l r8, @-r15 45 stc sr, r8 46 or r9, r8 47 ldc r8, sr 60 stc sr, r8 61 and r9, r8 62 ldc r8, sr 92 stc sr, r8 93 and r9, r8 94 ldc r8, sr [all …]
|
/arch/ia64/include/asm/ |
D | futex.h | 11 register unsigned long r8 __asm ("r8") = 0; \ 17 : "+r" (r8), "=r" (oldval) \ 20 ret = r8; \ 25 register unsigned long r8 __asm ("r8") = 0; \ 38 : "+r" (r8), "=r" (val), "=&r" (oldval), \ 42 if (unlikely (r8)) \ 45 ret = r8; \ 92 register unsigned long r8 __asm ("r8") = 0; in futex_atomic_cmpxchg_inatomic() 100 : "+r" (r8), "=&r" (prev) in futex_atomic_cmpxchg_inatomic() 105 return r8; in futex_atomic_cmpxchg_inatomic()
|
/arch/powerpc/boot/ |
D | crt0.S | 71 9: lwz r8,0(r12) /* get tag */ 72 cmpwi r8,0 74 cmpwi r8,RELA 78 11: addis r8,r8,(-RELACOUNT)@ha 79 cmpwi r8,RELACOUNT@l 110 lwz r8,p_etext-p_base(r10) 114 cmplw cr0,r9,r8 121 lwz r8,p_end-p_base(r10) 125 cmplw cr0,r9,r8 129 lwz r8,p_pstack-p_base(r10) [all …]
|
/arch/powerpc/kvm/ |
D | book3s_hv_interrupts.S | 67 ori r8, r9, LPCR_HDICE 68 mtspr SPRN_LPCR, r8 70 mfspr r8,SPRN_DEC 72 extsw r8,r8 73 mtspr SPRN_HDEC,r8 74 add r8,r8,r7 75 std r8,HSTATE_DECEXP(r13) 112 mfspr r8, SPRN_MMCR2 138 std r8, HSTATE_MMCR2(r13) 153 mfspr r8, SPRN_PMC5 [all …]
|
D | book3s_64_slb.S | 43 li r8, 0 44 stb r8, 3(r11) 120 li r8, SLB_NUM_BOLTED 121 stb r8, 3(r11) 127 li r8, SLBSHADOW_SAVEAREA 131 LDX_BE r10, r11, r8 137 addi r8, r8, SHADOW_SLB_ENTRY_LEN
|
/arch/powerpc/crypto/ |
D | aes-spe-keys.S | 32 xor r8,r8,r8; \ 79 LOAD_KEY(r8,r4,12) 83 stw r8,12(r3) 88 mr r14,r8 /* apply LS_BOX to 4th temp */ 95 xor r8,r8,r7 99 stw r8,12(r3) 121 LOAD_KEY(r8,r4,12) 127 stw r8,12(r3) 141 xor r8,r8,r7 142 xor r9,r9,r8 [all …]
|
/arch/csky/abiv1/ |
D | memcpy.S | 39 stw r8, (sp, 0) 43 ldw r8, (r3, 8) 47 stw r8, (r7, 8) 54 ldw r8, (sp, 0) 158 stw r8, (sp, 0) 165 ldw r8, (r3, 8) 179 mov r10, r8 180 GET_AFTER_BITS r8 24 181 or r8, r1 190 stw r8, (r7, 8) [all …]
|
/arch/arm/mach-omap1/ |
D | ams-delta-fiq-handler.S | 103 mov r8, #2 @ reset FIQ agreement 104 str r8, [r12, #IRQ_CONTROL_REG_OFFSET] 109 mov r8, #1 110 orr r8, r11, r8, lsl r10 @ mask spurious interrupt 111 str r8, [r12, #IRQ_MIR_REG_OFFSET] 143 ldr r8, [r12, #OMAP1510_GPIO_DATA_INPUT] @ fetch GPIO input 149 ands r8, r8, #KEYBRD_DATA_MASK @ check start bit - detected? 152 @ r8 contains KEYBRD_DATA_MASK, use it 153 str r8, [r9, #BUF_STATE] @ enter data processing state 168 @ r8 still contains GPIO input bits [all …]
|
/arch/x86/crypto/ |
D | aegis128-aesni-asm.S | 78 mov LEN, %r8 79 and $0x1, %r8 82 mov LEN, %r8 83 and $0x1E, %r8 84 add SRC, %r8 85 mov (%r8), %r9b 88 mov LEN, %r8 89 and $0x2, %r8 92 mov LEN, %r8 93 and $0x1C, %r8 [all …]
|
/arch/x86/kernel/ |
D | relocate_kernel_64.S | 80 movq %r8, %r12 86 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8 103 lea PAGE_SIZE(%r8), %rsp 106 addq $(identity_mapped - relocate_kernel), %r8 107 pushq %r8 209 leaq relocate_kernel(%rip), %r8 210 movq CP_PA_SWAP_PAGE(%r8), %r10 211 movq CP_PA_BACKUP_PAGES_MAP(%r8), %rdi 212 movq CP_PA_TABLE_PAGE(%r8), %rax 214 lea PAGE_SIZE(%r8), %rsp [all …]
|
/arch/arm/crypto/ |
D | chacha-scalar-core.S | 37 X8_X10 .req r8 // shared by x8 and x10 180 __ldrd r8, r9, sp, 32 182 add X0, X0, r8 186 _le32_bswap_4x X0, X1, X2, X3, r8 187 ldmia r12!, {r8-r11} 188 eor X0, X0, r8 195 __ldrd r8, r9, sp, 48 197 add X4, r8, X4, ror #brot 202 _le32_bswap_4x X4, X5, X6, X7, r8 211 __ldrd r8, r9, sp, 32 [all …]
|