Home
last modified time | relevance | path

Searched refs:r22 (Results 1 – 25 of 108) sorted by relevance

12345

/arch/sh/kernel/
Dhead_64.S179 movi MMUIR_END, r22
183 bne r21, r22, tr1
188 movi MMUDR_END, r22
192 bne r21, r22, tr1
196 movi MMUIR_TEXT_L, r22 /* PTEL first */
197 add.l r22, r63, r22 /* Sign extend */
198 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */
199 movi MMUIR_TEXT_H, r22 /* PTEH last */
200 add.l r22, r63, r22 /* Sign extend */
201 putcfg r21, 0, r22 /* Set MMUIR[0].PTEH */
[all …]
/arch/sh/boot/compressed/
Dhead_64.S66 movi ITLB_LAST_VAR_UNRESTRICTED+TLB_STEP, r22
69 bne r21, r22, tr1
74 movi DTLB_LAST_VAR_UNRESTRICTED+TLB_STEP, r22
77 bne r21, r22, tr1
81 movi MMUIR_TEXT_L, r22 /* PTEL first */
82 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */
83 movi MMUIR_TEXT_H, r22 /* PTEH last */
84 putcfg r21, 0, r22 /* Set MMUIR[0].PTEH */
88 movi MMUDR_CACHED_L, r22 /* PTEL first */
89 putcfg r21, 1, r22 /* Set MMUDR[0].PTEL */
[all …]
/arch/ia64/lib/
Dflush.S34 sub r22=in1,r0,1 // last byte address
37 shr.u r22=r22,r20 // (last byte address) / (stride size)
40 sub r8=r22,r23 // number of strides - 1
85 add r22=in1,in0
88 sub r22=r22,r0,1 // last byte address
91 shr.u r22=r22,r20 // (last byte address) / (stride size)
94 sub r8=r22,r23 // number of strides - 1
Dip_fast_csum.S48 ld4 r22=[in0],8
53 add r22=r22,r23
55 add r20=r20,r22
104 ld4 r22=[in0],4
112 add r17=r22,r23
/arch/sh/lib64/
Dmemcpy.S148 sub r2, r7, r22
156 ldx.q r22, r6, r0
167 ldx.q r22, r36, r63
168 alloco r22, 32
169 addi r22, 32, r22
170 ldx.q r22, r19, r23
171 sthi.q r22, -25, r0
172 ldx.q r22, r20, r24
173 ldx.q r22, r21, r25
174 stlo.q r22, -32, r0
[all …]
Dcopy_user_memcpy.S163 sub r2, r7, r22
171 ldx.q r22, r6, r0
182 ! ldx.q r22, r36, r63 ! TAKum03020
183 alloco r22, 32
185 addi r22, 32, r22
186 ldx.q r22, r19, r23
187 sthi.q r22, -25, r0
188 ldx.q r22, r20, r24
189 ldx.q r22, r21, r25
190 stlo.q r22, -32, r0
[all …]
Dudivsi3.S12 addz.l r5,r63,r22
13 nsb r22,r0
14 shlld r22,r0,r25
36 mulu.l r18,r22,r20
45 mulu.l r19,r22,r20
52 sub r25,r22,r25
54 mulu.l r19,r22,r20
Dudivdi3.S7 nsb r4,r22
8 shlld r3,r22,r6
14 sub r63,r22,r20 // r63 == 64 % 64
25 addi r22,-31,r0
93 addi r22,32,r0
Dcopy_page.S62 addi r60, 0x80, r22
71 ldx.q r2, r22, r63 ! prefetch 4 lines hence
/arch/parisc/kernel/
Dpacache.S68 LDREG ITLB_SID_COUNT(%r1), %r22
91 addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */
102 addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */
112 LDREG DTLB_SID_COUNT(%r1), %r22
135 addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */
146 addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */
199 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
243 mtsm %r22 /* restore I-bit */
260 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
305 mtsm %r22 /* restore I-bit */
[all …]
Dsys_parisc32.c20 int r22, int r21, int r20) in sys32_unimplemented() argument
/arch/ia64/kernel/
Divt.S126 shr.u r22=r21,3
134 (p8) shr r22=r22,r27
138 shr.u r18=r22,PGDIR_SHIFT // get bottom portion of pgd index bit
153 shr.u r28=r22,PUD_SHIFT // shift pud index into position
155 shr.u r18=r22,PMD_SHIFT // shift pmd index into position
164 shr.u r18=r22,PMD_SHIFT // shift pmd index into position
174 shr.u r19=r22,PAGE_SHIFT // shift pte index into position
183 MOV_FROM_IHA(r22) // get the VHPT address that caused the TLB miss
191 MOV_TO_IFA(r22, r24)
239 (p6) ptc.l r22,r27 // purge PTE page translation
[all …]
Dminstate.h73 (pUStk) addl r22=IA64_RBS_OFFSET,r1; /* compute base of RBS */ \
76 (pUStk) lfetch.fault.excl.nt1 [r22]; \
80 (pUStk) mov ar.bspstore=r22; /* switch to kernel RBS */ \
108 (pUStk) sub r18=r18,r22; /* r18=RSE.ndirty*8 */ \
178 .mem.offset 0,0; st8.spill [r2]=r22,16; \
217 (pUStk) sub r16=r18,r22; \
242 mov ar.bspstore=r22 \
Dmca_drv_asm.S27 addl r22=IA64_RBS_OFFSET,r2
29 mov ar.bspstore=r22
Dfsys.S201 add r22 = IA64_GTOD_WALL_TIME_OFFSET,r20 // wall_time
207 (p15) add r22 = IA64_GTOD_MONO_TIME_OFFSET,r20 // monotonic_time
239 ld8 r9 = [r22],IA64_TIME_SN_SPEC_SNSEC_OFFSET // sec
241 ld8 r8 = [r22],-IA64_TIME_SN_SPEC_SNSEC_OFFSET // snsec
478 addl r22=IA64_RBS_OFFSET,r2 // A compute base of RBS
482 lfetch.fault.excl.nt1 [r22] // M0|1 prefetch register backing-store
497 mov ar.bspstore=r22 // M2 (6 cyc) switch to kernel RBS
522 sub r22=r19,r18 // stime before leave kernel
527 add r20=r20,r22 // sum stime
538 SSM_PSR_I(p0, p6, r22) // M2 we're on kernel stacks now, reenable irqs
Dentry.S178 adds r22=IA64_TASK_THREAD_KSP_OFFSET,r13
184 st8 [r22]=sp // save kernel stack pointer of old task
293 mov r22=b1
308 st8 [r14]=r22,SW(B4)-SW(B1) // save b1
381 ld8 r22=[r3],16 // restore b1
419 mov b1=r22
697 MOV_FROM_ITC(pUStk, p9, r22, r19) // fetch time at leave
741 mov r22=r0 // A clear r22
749 MOV_FROM_PSR(pKStk, r22, r21) // M2 read PSR now that interrupts are disabled
786 st8 [r14]=r22 // M save time at leave
[all …]
/arch/parisc/include/asm/
Dunistd.h86 #define K_LOAD_ARGS_5(r26,r25,r24,r23,r22) \ argument
87 register unsigned long __r22 __asm__("r22") = (unsigned long)(r22); \
89 #define K_LOAD_ARGS_6(r26,r25,r24,r23,r22,r21) \ argument
91 K_LOAD_ARGS_5(r26,r25,r24,r23,r22)
/arch/microblaze/lib/
Duaccess_old.S111 5: lwi r22, r6, 0x0010 + offset; \
119 13: swi r22, r5, 0x0010 + offset; \
199 swi r22, r1, 24
222 lwi r22, r1, 24
242 lwi r22, r1, 24
/arch/powerpc/kernel/
Didle_book3s.S63 std r22,-8*9(r1)
108 ld r22,-8*9(r1)
165 std r22,-8*9(r1)
/arch/csky/abiv2/
Dmemcpy.S44 ldw r22, (r1, 8)
48 stw r22, (r0, 8)
Dmemmove.S50 ldw r22, (r1, 8)
54 stw r22, (r0, 8)
/arch/powerpc/lib/
Dmemcpy_mcsafe_64.S26 ld r22,STK_REG(R22)(r1)
98 std r22,STK_REG(R22)(r1)
123 err2; ld r22,120(r4)
140 err2; std r22,120(r3)
180 ld r22,STK_REG(R22)(r1)
/arch/arc/include/asm/
Dunwind.h36 unsigned long r22; member
95 PTREGS_INFO(r22), \
/arch/alpha/include/uapi/asm/
Dptrace.h33 unsigned long r22; member
/arch/arc/include/uapi/asm/
Dptrace.h45 unsigned long r25, r24, r23, r22, r21, r20; member

12345