Home
last modified time | relevance | path

Searched refs:r25 (Results 1 – 25 of 63) sorted by relevance

123

/arch/sh/lib64/
Dudivsi3.S13 shlld r22,r0,r25
14 shlri r25,48,r25
16 sub r20,r25,r21
20 mmulfx.w r25,r19,r19
36 mmacnfx.wl r25,r19,r21
38 sub r4,r20,r25
40 mulu.l r25,r21,r19
47 sub.l r25,r20,r25
49 mulu.l r25,r21,r19
50 addz.l r25,r63,r25
[all …]
Dsdivsi3.S12 shlld r5, r1, r25 /* normalize; [-2 ..1, 1..2) in s2.62 */
13 shari r25, 58, r21 /* extract 5(6) bit index (s2.4 with hole -1..1) */
17 shari r25, 32, r25 /* normalize to s2.30 */
19 muls.l r25, r19, r19 /* s2.38 */
27 muls.l r25, r21, r18 /* s2.58 */
32 muls.l r18, r25, r0 /* s2.60 */
33 muls.l r18, r4, r25 /* s32.30 */
37 shari r25, 63, r0
42 add r21, r25, r21
Dudivdi3.S70 shlrd r2,r9,r25
71 shlri r25,32,r8
86 sub r25,r5,r25
89 shlri r25,22,r21
97 sub r25,r5,r25
98 bgtu/u r7,r25,tr0 // no_lo_adj
100 sub r25,r7,r25
102 mextr4 r2,r25,r2
/arch/parisc/kernel/
Dpacache.S291 ldd 0(%r25), %r19
294 ldw 64(%r25), %r0 /* prefetch 1 cacheline ahead */
295 ldw 128(%r25), %r0 /* prefetch 2 */
297 1: ldd 8(%r25), %r20
298 ldw 192(%r25), %r0 /* prefetch 3 */
299 ldw 256(%r25), %r0 /* prefetch 4 */
301 ldd 16(%r25), %r21
302 ldd 24(%r25), %r22
306 ldd 32(%r25), %r19
307 ldd 40(%r25), %r20
[all …]
Dentry.S752 STREG %r25, PT_GR25(%r1)
756 ldi 1, %r25 /* stack_start, signals kernel thread */
814 STREG %r25, PT_GR25(%r16)
846 LDREG TASK_PT_KPC(%r25), %r2
849 LDREG TASK_PT_KSP(%r25), %r30
850 LDREG TASK_THREAD_INFO(%r25), %r25
852 mtctl %r25,%cr30
956 copy %r0, %r25 /* long in_syscall = 0 */
1152 ldo PT_FR0(%r29), %r25
1153 save_fp %r25
[all …]
Dsyscall.S106 depdi 0, 31, 32, %r25
156 STREG %r25, TASK_PT_GR25(%r1) /* 2nd argument */
219 comib,<> 0,%r25,.Lin_syscall
304 LDREG TASK_PT_GR25(%r1), %r25
349 comib,<> 0,%r25,.Ltrace_in_syscall
486 depdi 0, 31, 32, %r25
561 sub,<> %r28, %r25, %r0
/arch/parisc/lib/
Dlusercopy.S78 1: ldbs,ma 1(%sr1,%r25),%r1
83 2: ldbs,ma 1(%sr1,%r25),%r1
115 comib,=,n 0,%r25,$lclu_done
118 addib,<> -1,%r25,$lclu_loop
123 copy %r25,%r28
129 ldo 1(%r25),%r25
150 comib,= 0,%r25,$lslen_nzero
156 addib,<> -1,%r25,$lslen_loop
/arch/ia64/kernel/
Dhead.S212 mov r25=pr;;
278 SAL_TO_OS_BOOT_HANDOFF_STATE_SAVE(r16,r17,r25)
1225 tpa r25=in0
1247 RESTORE_REGION_REGS(r25, r17,r18,r19);;
1248 RESTORE_REG(b0, r25, r17);;
1249 RESTORE_REG(b1, r25, r17);;
1250 RESTORE_REG(b2, r25, r17);;
1251 RESTORE_REG(b3, r25, r17);;
1252 RESTORE_REG(b4, r25, r17);;
1253 RESTORE_REG(b5, r25, r17);;
[all …]
Divt.S116 MOV_FROM_ITIR(r25)
127 extr.u r26=r25,2,6
132 (p8) dep r25=r18,r25,2,6
193 MOV_TO_ITIR(p8, r25, r24) // change to default page-size for VHPT
203 ITC_D(p7, r24, r25)
225 ld8 r25=[r21] // read *pte again
239 (p7) cmp.ne.or.andcm p6,p7=r25,r18 // did *pte change
392 THASH(p8, r17, r16, r25)
394 MOV_TO_IHA(p8, r17, r25)
400 mov r25=PERCPU_PAGE_SHIFT << 2
[all …]
Dminstate.h51 mov r25=ar.unat; /* M */ \
113 st8 [r16]=r25,16; /* save ar.unat */ \
182 .mem.offset 8,0; st8.spill [r3]=r25,16; \
205 adds r25=PT(B7)-PT(F11),r3; \
208 st8 [r25]=r19,16; /* b7 */ \
211 st8 [r25]=r10; /* ar.ssd */ \
Dentry.S197 movl r25=init_task
204 cmp.eq p7,p6=r25,in0
226 RSM_PSR_IC(r25) // interrupts (psr.i) are already disabled here
227 movl r25=PAGE_KERNEL
230 or r23=r25,r20 // construct PA | page properties
231 mov r25=IA64_GRANULE_SHIFT<<2
233 MOV_TO_ITIR(p0, r25, r8)
236 mov r25=IA64_TR_CURRENT_STACK
239 itr.d dtr[r25]=r23 // wire in new mapping...
324 mov r25=b4
[all …]
/arch/ia64/kvm/
Dkvm_minstate.h56 add r25 = VMM_VPD_BASE_OFFSET, r21; \
63 ld8 r25 = [r25]; /* read vpd base */ \
104 mov r25 = ar.unat; /* M */ \
145 st8 [r16] = r25,16; /* save ar.unat */ \
223 .mem.offset 8,0; st8.spill [r3] = r25,16; \
237 adds r25 = PT(B7)-PT(F7),r3; \
240 st8 [r25] = r19,16; /* b7 */ \
245 st8 [r25] = r10; /* ar.ssd */ \
Doptvfault.S30 mov r19 = r25; \
41 mov r25 =r16; \
46 mov r25 = r19; \
110 ld8 r26=[r25]
125 extr.u r17=r25,6,7
149 extr.u r16=r25,20,7
150 extr.u r17=r25,6,7
180 extr.u r16=r25,20,7
181 extr.u r17=r25,13,7
257 extr.u r26=r25,6,21
[all …]
Dvmm_ivt.S278 mov r25=ar.unat /* M */
316 st8 [r16]=r25,16 /* save ar.unat */
379 .mem.offset 8,0; st8.spill [r3]=r25,16
402 adds r25=PT(B7)-PT(F11),r3
405 st8 [r25]=r19,16 /* b7 */
408 st8 [r25]=r10 /* ar.ssd */
637 st8 [r17] = r25
889 st8 [r17] = r25
983 ld8.fill r25=[r2],16
1019 ld8 r25=[r17],16 // load ar.unat
[all …]
/arch/powerpc/kvm/
Dbooke_interrupts.S145 stw r25, VCPU_GPR(r25)(r4)
223 lwz r25, VCPU_GPR(r25)(r4)
253 stw r25, VCPU_GPR(r25)(r4)
273 lwz r25, HOST_NV_GPR(r25)(r1)
314 stw r25, HOST_NV_GPR(r25)(r1)
334 lwz r25, VCPU_GPR(r25)(r4)
/arch/parisc/hpux/
Dwrappers.S94 LDREG PT_GR30(%r1),%r25
95 mtctl %r25,%cr29
158 STREG %r25,PT_GR25(%r1)
176 LDREG TASK_PT_GR25(%r1),%r25
/arch/parisc/include/asm/
Dunistd.h889 #define K_LOAD_ARGS_2(r26,r25) \ argument
890 register unsigned long __r25 __asm__("r25") = (unsigned long)(r25); \
892 #define K_LOAD_ARGS_3(r26,r25,r24) \ argument
894 K_LOAD_ARGS_2(r26,r25)
895 #define K_LOAD_ARGS_4(r26,r25,r24,r23) \ argument
897 K_LOAD_ARGS_3(r26,r25,r24)
898 #define K_LOAD_ARGS_5(r26,r25,r24,r23,r22) \ argument
900 K_LOAD_ARGS_4(r26,r25,r24,r23)
901 #define K_LOAD_ARGS_6(r26,r25,r24,r23,r22,r21) \ argument
903 K_LOAD_ARGS_5(r26,r25,r24,r23,r22)
Dasmregs.h27 arg1: .reg %r25
71 r25: .reg %r25
/arch/powerpc/mm/
Dhash_low_64.S362 std r25,STK_REG(r25)(r1)
405 rldicl r25,r3,64-12,60
485 addi r5,r25,36 /* Check actual HPTE_SUB bit, this */
495 add r5,r5,r25
523 add r5,r5,r25
590 subfic r5,r25,27 /* Must match bit position in */
594 sldi r5,r25,2
606 ld r25,STK_REG(r25)(r1)
621 sldi r5,r25,2
/arch/powerpc/boot/
Dppc_asm.h54 #define r25 25 macro
/arch/alpha/include/asm/
Dptrace.h35 unsigned long r25; member
/arch/powerpc/kernel/
Dmisc.S84 PPC_STL r25,16*SZL(r3)
110 PPC_LL r25,16*SZL(r3)
Dswsusp_asm64.S98 SAVE_REGISTER(r25)
204 RESTORE_REGISTER(r25)
/arch/ia64/lib/
Dip_fast_csum.S106 ld4 r25=[in1],4
113 add r18=r24,r25
/arch/ia64/include/asm/xen/
Dminstate.h31 mov r25=ar.unat; /* M */ \
97 st8 [r16]=r25,16; /* save ar.unat */ \

123