Home
last modified time | relevance | path

Searched refs:r20 (Results 1 – 25 of 105) sorted by relevance

12345

/arch/ia64/lib/
Dip_fast_csum.S42 (p7) ld4 r20=[in0],8
50 add r20=r20,r21
53 add r20=r20,r22
55 add r20=r20,r24
57 shr.u ret0=r20,16 // now need to add the carry
58 zxt2 r20=r20
60 add r20=ret0,r20
62 shr.u ret0=r20,16 // add carry again
63 zxt2 r20=r20
65 add r20=ret0,r20
[all …]
Dflush.S31 ld8 r20=[r3] // r20: stride shift
34 shr.u r23=in0,r20 // start / (stride size)
35 shr.u r22=r22,r20 // (last byte address) / (stride size)
36 shl r21=r21,r20 // r21: stride size of the i-cache(s)
39 shl r24=r23,r20 // r24: addresses for "fc.i" =
84 ld8 r20=[r3] // r20: stride shift
87 shr.u r23=in0,r20 // start / (stride size)
88 shr.u r22=r22,r20 // (last byte address) / (stride size)
89 shl r21=r21,r20 // r21: stride size of the i-cache(s)
92 shl r24=r23,r20 // r24: addresses for "fc" =
/arch/ia64/xen/
Dxenivt.S40 movl r20=XSI_PSR_I_ADDR
42 ld8 r20=[r20]
44 adds r20=-1,r20 // vcpu_info->evtchn_upcall_pending
46 ld1 r20=[r20]
48 cmp.ne p6,p0=r20,r0 // if there are pending events,
/arch/sh/lib64/
Dudivsi3.S15 movi 0xffffffffffffbb0c,r20 /* shift count eqiv 76 */
16 sub r20,r25,r21
21 sub r20,r0,r0
35 mulu.l r18,r22,r20
38 sub r4,r20,r25
44 mulu.l r19,r22,r20
47 sub.l r25,r20,r25
53 mulu.l r19,r22,r20
57 cmpgt r25,r20,r25
Dsdivsi3.S13 gettr tr0,r20
20 ldx.ub r20, r21, r19 /* u0.8 */
24 ldx.w r20, r21, r21 /* s2.14 */
Dudivdi3.S13 sub r63,r22,r20 // r63 == 64 % 64
16 addi r20,32,r9
45 addi r20,30-22,r0
50 addi r20,30,r0
/arch/parisc/kernel/
Dsyscall.S150 STREG %r20, TASK_PT_GR20(%r1) /* Syscall number */
201 comiclr,>> __NR_Linux_syscalls, %r20, %r0
204 LDREGX %r20(%r19), %r19
212 comb,= %r2,%r20,.Lrt_sigreturn
301 copy %ret0,%r20
317 comiclr,>>= __NR_Linux_syscalls, %r20, %r0
320 LDREGX %r20(%r19), %r19
328 comb,= %r2,%r20,.Ltrace_rt_sigreturn
437 depdi 0, 31, 32, %r20
441 comiclr,>> __NR_lws_entries, %r20, %r0
[all …]
Dpacache.S80 LDREG ITLB_SID_BASE(%r1), %r20
93 mtsp %r20, %sr1
94 add %r21, %r20, %r20 /* increment space */
108 mtsp %r20, %sr1
117 add %r21, %r20, %r20 /* increment space */
123 LDREG DTLB_SID_BASE(%r1), %r20
136 mtsp %r20, %sr1
137 add %r21, %r20, %r20 /* increment space */
151 mtsp %r20, %sr1
160 add %r21, %r20, %r20 /* increment space */
[all …]
/arch/tile/kernel/
Dintvec_64.S431 push_reg r20, r52
456 mfspr r20, SPR_SYSTEM_SAVE_K_0
461 bfextu r20, r20, 0, LOG2_THREAD_SIZE-1
464 shl3add r20, r20, r21
465 ld tp, r20
501 shli r20, r1, 5
503 moveli r20, INT_SWINT_1 << 5
508 add r20, r20, r21
509 jalr r20
524 IRQ_DISABLE_ALL(r20)
[all …]
Dintvec_32.S442 push_reg r20, r52
466 mfspr r20, SPR_SYSTEM_SAVE_K_0
471 mm r20, r20, zero, 0, LOG2_THREAD_SIZE-1
473 s2a r20, r20, r21
474 lw tp, r20
512 shli r20, r1, 5
514 moveli r20, INT_SWINT_1 << 5
516 addli r20, r20, lo16(intvec_feedback)
517 auli r20, r20, ha16(intvec_feedback)
518 jalr r20
[all …]
/arch/ia64/kvm/
Doptvfault.S32 mov r20 = r31; \
48 mov r31 = r20
133 addl r20=@gprel(asm_mov_to_reg),gp
136 adds r30=kvm_resume_to_guest-asm_mov_to_reg,r20
137 shladd r17=r17,4,r20
160 addl r20=@gprel(asm_mov_to_reg),gp
163 shladd r17=r17,4,r20
165 adds r30=kvm_resume_to_guest-asm_mov_to_reg,r20
181 addl r20=@gprel(asm_mov_from_reg),gp
183 adds r30=kvm_asm_mov_from_rr_back_1-asm_mov_from_reg,r20
[all …]
Dvmm_ivt.S158 movl r20=IA64_GRANULE_SHIFT<<2
160 mov cr.itir=r20
181 movl r20=IA64_GRANULE_SHIFT<<2
183 mov cr.itir=r20
277 mov r20=r1 /* A */
329 .mem.offset 0,0; st8.spill [r16]=r20,16 /* save original r1 */
370 .mem.offset 0,0; st8.spill [r2]=r20,16
968 ld8.fill r20=[r2],16
1038 ld8 r20=[r16],16 // ar.fpsr
1052 mov ar.fpsr=r20
[all …]
/arch/powerpc/kvm/
Dbook3s_rmhandlers.S197 PPC_STL r20, _NIP(r1); \
198 mfmsr r20; \
200 andc r3,r20,r3; /* Disable DR,EE */ \
205 mtmsr r20; /* Enable DR,EE */ \
207 PPC_LL r20, _NIP(r1)
Dbook3s_interrupts.S48 PPC_LL r20, VCPU_GPR(r20)(vcpu); \
140 PPC_STL r20, VCPU_GPR(r20)(r7)
Dbooke_interrupts.S143 stw r20, VCPU_GPR(r20)(r4)
227 lwz r20, VCPU_GPR(r20)(r4)
265 stw r20, VCPU_GPR(r20)(r4)
285 lwz r20, HOST_NV_GPR(r20)(r1)
330 stw r20, HOST_NV_GPR(r20)(r1)
350 lwz r20, VCPU_GPR(r20)(r4)
/arch/ia64/kernel/
Dfsys.S243 movl r20 = fsyscall_gtod_data // load fsyscall gettimeofday data address
247 add r22 = IA64_GTOD_WALL_TIME_OFFSET,r20 // wall_time
248 add r21 = IA64_CLKSRC_MMIO_OFFSET,r20
253 (p15) add r22 = IA64_GTOD_MONO_TIME_OFFSET,r20 // monotonic_time
255 add r26 = IA64_CLKSRC_CYCLE_LAST_OFFSET,r20 // clksrc_cycle_last
261 ld4.acq r28 = [r20] // gtod_lock.sequence, Must take first
266 add r24 = IA64_CLKSRC_MULT_OFFSET,r20
268 add r23 = IA64_CLKSRC_SHIFT_OFFSET,r20
269 add r14 = IA64_CLKSRC_MASK_OFFSET,r20
314 ld4 r10 = [r20] // gtod_lock.sequence
[all …]
Dmca_asm.S71 ld4 r20=[r2] // r20=ptce_count[1]
75 adds r20=-1,r20
82 mov ar.lc=r20
200 mov r20=IA64_TR_PALCODE
202 itr.i itr[r20]=r18
213 movl r20=PAGE_KERNEL
215 add r16=r20,r16
220 mov r20=IA64_TR_CURRENT_STACK
222 itr.d dtr[r20]=r16
829 ld8 r20=[temp2],16 // prev_task
[all …]
Defi_stub.S74 mov loc6=r20
79 mov r20=loc6
Desi_stub.S84 mov loc6=r20 // old sp
89 mov r20=loc6 // save virtual mode sp
Divt.S170 (p7) ld8 r20=[r17] // get *pmd (may be 0)
173 (p7) cmp.eq.or.andcm p6,p7=r20,r0 // was pmd_present(*pmd) == NULL?
174 dep r21=r19,r20,3,(PAGE_SHIFT-3) // r21=pte_offset(pmd,addr)
183 dep r23=0,r20,0,PAGE_SHIFT // clear low bits to get page address
230 cmp.ne.or.andcm p6,p7=r26,r20 // did *pmd change
280 mov r20=PAGE_SHIFT<<2 // setup page size for purge
284 (p7) ptc.l r16,r20
324 mov r20=PAGE_SHIFT<<2 // setup page size for purge
328 (p7) ptc.l r16,r20
379 MOV_FROM_ISR(r20)
[all …]
/arch/microblaze/kernel/
Dmcount.S36 swi r20, r1, 68; \
67 lwi r20, r1, 68; \
128 lwi r20, r0, ftrace_trace_function;
130 cmpu r5, r20, r6; /* ftrace_trace_function != ftrace_stub */
143 brald r15, r20; /* MS: jump to ftrace handler */
/arch/parisc/hpux/
Dgate.S31 ldw -56(%r30), %r20 ;! 6th argument
65 STREG %r20, TASK_PT_GR20(%r1) /* 6th argument */
87 stw %r20, -56(%r30) ;! 6th argument
/arch/microblaze/lib/
Duaccess_old.S110 3: lwi r20, r6, 0x0008 + offset; \
118 11: swi r20, r5, 0x0008 + offset; \
198 swi r20, r1, 16
221 lwi r20, r1, 16
241 lwi r20, r1, 16
/arch/ia64/include/asm/xen/
Dminstate.h5 MOV_FROM_ITC(pUStk, p6, r20, r2);
39 mov r20=r1; /* A */ \
122 .mem.offset 0,0; st8.spill [r16]=r20,16; /* save original r1 */ \
/arch/powerpc/boot/
Dppc_asm.h49 #define r20 20 macro

12345