/arch/sh/kernel/ |
D | head_64.S | 179 movi MMUIR_END, r22 183 bne r21, r22, tr1 188 movi MMUDR_END, r22 192 bne r21, r22, tr1 196 movi MMUIR_TEXT_L, r22 /* PTEL first */ 197 add.l r22, r63, r22 /* Sign extend */ 198 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */ 199 movi MMUIR_TEXT_H, r22 /* PTEH last */ 200 add.l r22, r63, r22 /* Sign extend */ 201 putcfg r21, 0, r22 /* Set MMUIR[0].PTEH */ [all …]
|
/arch/sh/boot/compressed/ |
D | head_64.S | 66 movi ITLB_LAST_VAR_UNRESTRICTED+TLB_STEP, r22 69 bne r21, r22, tr1 74 movi DTLB_LAST_VAR_UNRESTRICTED+TLB_STEP, r22 77 bne r21, r22, tr1 81 movi MMUIR_TEXT_L, r22 /* PTEL first */ 82 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */ 83 movi MMUIR_TEXT_H, r22 /* PTEH last */ 84 putcfg r21, 0, r22 /* Set MMUIR[0].PTEH */ 88 movi MMUDR_CACHED_L, r22 /* PTEL first */ 89 putcfg r21, 1, r22 /* Set MMUDR[0].PTEL */ [all …]
|
/arch/ia64/lib/ |
D | flush.S | 34 sub r22=in1,r0,1 // last byte address 37 shr.u r22=r22,r20 // (last byte address) / (stride size) 40 sub r8=r22,r23 // number of strides - 1 85 add r22=in1,in0 88 sub r22=r22,r0,1 // last byte address 91 shr.u r22=r22,r20 // (last byte address) / (stride size) 94 sub r8=r22,r23 // number of strides - 1
|
D | ip_fast_csum.S | 48 ld4 r22=[in0],8 53 add r22=r22,r23 55 add r20=r20,r22 104 ld4 r22=[in0],4 112 add r17=r22,r23
|
/arch/sh/lib64/ |
D | memcpy.S | 148 sub r2, r7, r22 156 ldx.q r22, r6, r0 167 ldx.q r22, r36, r63 168 alloco r22, 32 169 addi r22, 32, r22 170 ldx.q r22, r19, r23 171 sthi.q r22, -25, r0 172 ldx.q r22, r20, r24 173 ldx.q r22, r21, r25 174 stlo.q r22, -32, r0 [all …]
|
D | copy_user_memcpy.S | 163 sub r2, r7, r22 171 ldx.q r22, r6, r0 182 ! ldx.q r22, r36, r63 ! TAKum03020 183 alloco r22, 32 185 addi r22, 32, r22 186 ldx.q r22, r19, r23 187 sthi.q r22, -25, r0 188 ldx.q r22, r20, r24 189 ldx.q r22, r21, r25 190 stlo.q r22, -32, r0 [all …]
|
D | udivsi3.S | 12 addz.l r5,r63,r22 13 nsb r22,r0 14 shlld r22,r0,r25 36 mulu.l r18,r22,r20 45 mulu.l r19,r22,r20 52 sub r25,r22,r25 54 mulu.l r19,r22,r20
|
D | udivdi3.S | 7 nsb r4,r22 8 shlld r3,r22,r6 14 sub r63,r22,r20 // r63 == 64 % 64 25 addi r22,-31,r0 93 addi r22,32,r0
|
D | copy_page.S | 62 addi r60, 0x80, r22 71 ldx.q r2, r22, r63 ! prefetch 4 lines hence
|
/arch/parisc/kernel/ |
D | pacache.S | 68 LDREG ITLB_SID_COUNT(%r1), %r22 91 addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */ 102 addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */ 112 LDREG DTLB_SID_COUNT(%r1), %r22 135 addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */ 146 addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */ 199 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ 243 mtsm %r22 /* restore I-bit */ 260 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ 305 mtsm %r22 /* restore I-bit */ [all …]
|
D | sys_parisc32.c | 20 int r22, int r21, int r20) in sys32_unimplemented() argument
|
/arch/ia64/kernel/ |
D | ivt.S | 126 shr.u r22=r21,3 134 (p8) shr r22=r22,r27 138 shr.u r18=r22,PGDIR_SHIFT // get bottom portion of pgd index bit 153 shr.u r28=r22,PUD_SHIFT // shift pud index into position 155 shr.u r18=r22,PMD_SHIFT // shift pmd index into position 164 shr.u r18=r22,PMD_SHIFT // shift pmd index into position 174 shr.u r19=r22,PAGE_SHIFT // shift pte index into position 183 MOV_FROM_IHA(r22) // get the VHPT address that caused the TLB miss 191 MOV_TO_IFA(r22, r24) 239 (p6) ptc.l r22,r27 // purge PTE page translation [all …]
|
D | minstate.h | 73 (pUStk) addl r22=IA64_RBS_OFFSET,r1; /* compute base of RBS */ \ 76 (pUStk) lfetch.fault.excl.nt1 [r22]; \ 80 (pUStk) mov ar.bspstore=r22; /* switch to kernel RBS */ \ 108 (pUStk) sub r18=r18,r22; /* r18=RSE.ndirty*8 */ \ 178 .mem.offset 0,0; st8.spill [r2]=r22,16; \ 217 (pUStk) sub r16=r18,r22; \ 242 mov ar.bspstore=r22 \
|
D | mca_drv_asm.S | 27 addl r22=IA64_RBS_OFFSET,r2 29 mov ar.bspstore=r22
|
D | fsys.S | 201 add r22 = IA64_GTOD_WALL_TIME_OFFSET,r20 // wall_time 207 (p15) add r22 = IA64_GTOD_MONO_TIME_OFFSET,r20 // monotonic_time 239 ld8 r9 = [r22],IA64_TIME_SN_SPEC_SNSEC_OFFSET // sec 241 ld8 r8 = [r22],-IA64_TIME_SN_SPEC_SNSEC_OFFSET // snsec 478 addl r22=IA64_RBS_OFFSET,r2 // A compute base of RBS 482 lfetch.fault.excl.nt1 [r22] // M0|1 prefetch register backing-store 497 mov ar.bspstore=r22 // M2 (6 cyc) switch to kernel RBS 522 sub r22=r19,r18 // stime before leave kernel 527 add r20=r20,r22 // sum stime 538 SSM_PSR_I(p0, p6, r22) // M2 we're on kernel stacks now, reenable irqs
|
D | entry.S | 178 adds r22=IA64_TASK_THREAD_KSP_OFFSET,r13 184 st8 [r22]=sp // save kernel stack pointer of old task 293 mov r22=b1 308 st8 [r14]=r22,SW(B4)-SW(B1) // save b1 381 ld8 r22=[r3],16 // restore b1 419 mov b1=r22 697 MOV_FROM_ITC(pUStk, p9, r22, r19) // fetch time at leave 741 mov r22=r0 // A clear r22 749 MOV_FROM_PSR(pKStk, r22, r21) // M2 read PSR now that interrupts are disabled 786 st8 [r14]=r22 // M save time at leave [all …]
|
/arch/parisc/include/asm/ |
D | unistd.h | 86 #define K_LOAD_ARGS_5(r26,r25,r24,r23,r22) \ argument 87 register unsigned long __r22 __asm__("r22") = (unsigned long)(r22); \ 89 #define K_LOAD_ARGS_6(r26,r25,r24,r23,r22,r21) \ argument 91 K_LOAD_ARGS_5(r26,r25,r24,r23,r22)
|
/arch/microblaze/lib/ |
D | uaccess_old.S | 111 5: lwi r22, r6, 0x0010 + offset; \ 119 13: swi r22, r5, 0x0010 + offset; \ 199 swi r22, r1, 24 222 lwi r22, r1, 24 242 lwi r22, r1, 24
|
/arch/powerpc/kernel/ |
D | idle_book3s.S | 63 std r22,-8*9(r1) 108 ld r22,-8*9(r1) 165 std r22,-8*9(r1)
|
/arch/csky/abiv2/ |
D | memcpy.S | 44 ldw r22, (r1, 8) 48 stw r22, (r0, 8)
|
D | memmove.S | 50 ldw r22, (r1, 8) 54 stw r22, (r0, 8)
|
/arch/powerpc/lib/ |
D | memcpy_mcsafe_64.S | 26 ld r22,STK_REG(R22)(r1) 98 std r22,STK_REG(R22)(r1) 123 err2; ld r22,120(r4) 140 err2; std r22,120(r3) 180 ld r22,STK_REG(R22)(r1)
|
/arch/arc/include/asm/ |
D | unwind.h | 36 unsigned long r22; member 95 PTREGS_INFO(r22), \
|
/arch/alpha/include/uapi/asm/ |
D | ptrace.h | 33 unsigned long r22; member
|
/arch/arc/include/uapi/asm/ |
D | ptrace.h | 45 unsigned long r25, r24, r23, r22, r21, r20; member
|