/arch/parisc/kernel/ |
D | pacache.S | 90 copy %arg0, %r28 /* Init base addr */ 99 pitlbe 0(%sr1, %r28) 100 pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */ 104 movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */ 109 copy %arg0, %r28 /* init base addr */ 114 pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */ 133 copy %arg0, %r28 /* Init base addr */ 142 pdtlbe 0(%sr1, %r28) 143 pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */ 147 movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */ [all …]
|
D | real2.S | 130 load32 PA(save_cr_space), %r28 131 PUSH_CR(%cr24, %r28) 132 PUSH_CR(%cr25, %r28) 133 PUSH_CR(%cr26, %r28) 134 PUSH_CR(%cr27, %r28) 135 PUSH_CR(%cr28, %r28) 136 PUSH_CR(%cr29, %r28) 137 PUSH_CR(%cr30, %r28) 138 PUSH_CR(%cr31, %r28) 139 PUSH_CR(%cr15, %r28) [all …]
|
D | syscall.S | 159 STREG %r28, TASK_PT_GR28(%r1) /* return value 0 */ 160 STREG %r28, TASK_PT_ORIG_R28(%r1) /* return value 0 (saved for signals) */ 233 ldo -ENOSYS(%r0),%r28 /* set errno */ 340 STREG %r28,TASK_PT_GR28(%r1) /* save return value now */ 343 LDREG TASK_PT_GR28(%r1), %r28 /* Restore return val. */ 415 ldo R%lws_table(%r1), %r28 /* Scratch use of r28 */ 416 LDREGX %r20(%sr2,r28), %r21 /* Scratch use of r21 */ 494 ldo R%lws_lock_start(%r20), %r28 503 add %r20, %r28, %r20 513 ldw 4(%sr2,%r20), %r28 /* Load thread register */ [all …]
|
/arch/powerpc/mm/ |
D | hash_low_64.S | 82 std r28,STK_REG(r28)(r1) 133 xor r28,r5,r0 139 rldic r28,r5,25,25 /* (vsid << 25) & 0x7fffffffff */ 142 xor r28,r28,r5 144 xor r28,r28,r0 /* hash */ 194 and r0,r28,r27 217 andc r0,r27,r28 237 mr r5,r28 266 ld r28,STK_REG(r28)(r1) 281 mr r5,r28 [all …]
|
/arch/ia64/hp/sim/boot/ |
D | boot_head.S | 49 mov r28=in0 63 cmp.gtu p6,p7=r9,r28 /* r28 <= 255? */ 68 cmp.gtu p6,p7=r9,r28 71 static: cmp.eq p6,p7=PAL_PTCE_INFO,r28 79 1: cmp.eq p6,p7=PAL_FREQ_RATIOS,r28 86 1: cmp.eq p6,p7=PAL_RSE_INFO,r28 93 1: cmp.eq p6,p7=PAL_CACHE_FLUSH,r28 /* PAL_CACHE_FLUSH */ 111 1: cmp.eq p6,p7=PAL_PERF_MON_INFO,r28 147 1: cmp.eq p6,p7=PAL_VM_SUMMARY,r28 156 1: cmp.eq p6,p7=PAL_MEM_ATTRIB,r28
|
/arch/ia64/kvm/ |
D | optvfault.S | 202 mov r28=r19 244 (p7) mov rr[r28]=r19 260 extr.u r28=r25,36,1 266 dep r26=r28,r26,23,1 269 movl r28=IA64_PSR_IC+IA64_PSR_I+IA64_PSR_DT+IA64_PSR_SI 275 or r28=r27,r28 279 and r20=r20,r28 303 dep.z r28=4,61,3 307 mov rr[r28]=r27 324 extr.u r28=r25,36,1 [all …]
|
D | kvm_minstate.h | 34 mov.m r28 = ar.rnat; \ 150 st8 [r17] = r28,16; /* save ar.rnat */ \ 228 .mem.offset 0,0; st8.spill [r2] = r28,16; \
|
D | vmm_ivt.S | 280 mov r28=cr.iip /* M */ 309 st8 [r16]=r28,16 /* save cr.iip */ 384 .mem.offset 0,0; st8.spill [r2]=r28,16 943 ld8 r28=[r2],8 // load b6 981 mov b6=r28 988 ld8.fill r28=[r3],16 1016 ld8 r28=[r17],16 // load cr.iip 1053 mov cr.iip=r28 1139 ld8.fill r28=[r2],16 1161 ld8 r28=[r17],16 // load ar.unat [all …]
|
/arch/parisc/hpux/ |
D | wrappers.S | 118 comb,>>=,n %r28,%r1,fork_exit /* just let the syscall exit handle it */ 119 or,= %r28,%r0,%r0 137 copy %r0,%r28 172 comb,>>= %r28,%r1,exec_error 198 comb,>>= %r28,%r1,pipe_exit /* let syscall exit handle it */ 203 LDREG -56(%r30),%r28 236 comb,<< %r28,%r1,no_error 238 subi 0,%r28,%r28
|
D | gate.S | 73 STREG %r28, TASK_PT_GR28(%r1) /* return value 0 */ 74 STREG %r28, TASK_PT_ORIG_R28(%r1) /* return value 0 (saved for signals) */ 103 ldo -ENOSYS(%r0),%r28
|
/arch/parisc/lib/ |
D | lusercopy.S | 85 sub %r23,%r24,%r28 94 ldi -EFAULT,%r28 123 copy %r25,%r28 160 sub %r26,%r24,%r28
|
/arch/powerpc/kvm/ |
D | booke_interrupts.S | 148 stw r28, VCPU_GPR(r28)(r4) 226 lwz r28, VCPU_GPR(r28)(r4) 256 stw r28, VCPU_GPR(r28)(r4) 276 lwz r28, HOST_NV_GPR(r28)(r1) 317 stw r28, HOST_NV_GPR(r28)(r1) 337 lwz r28, VCPU_GPR(r28)(r4)
|
/arch/ia64/kernel/ |
D | entry.h | 66 movl r28=1f; \ 70 mov.ret.sptk b7=r28,1f; \ 76 movl r28=1f; \ 79 mov.ret.sptk b7=r28,1f; \
|
D | pal.S | 60 mov r28 = in0 103 mov r28 = in0 // Index MUST be copied to r28 150 mov r28 = in0 // copy procedure index 209 mov r28 = in0 // copy procedure index
|
D | ivt.S | 152 shr.u r28=r22,PUD_SHIFT // shift pud index into position 161 dep r28=r28,r17,3,(PAGE_SHIFT-3) // r28=pud_offset(pgd,addr) 164 (p7) ld8 r29=[r28] // get *pud (may be 0) 228 ld8 r19=[r28] // read *pud again 547 mov r28=ar.ccv // save ar.ccv 574 mov ar.ccv=r28 613 mov r28=ar.ccv // save ar.ccv 640 mov ar.ccv=r28 668 mov r28=ar.ccv // save ar.ccv 693 mov ar.ccv=r28 [all …]
|
D | minstate.h | 54 MOV_FROM_IIP(r28); /* M */ \ 105 st8 [r16]=r28,16; /* save cr.iip */ \ 187 .mem.offset 0,0; st8.spill [r2]=r28,16; \
|
/arch/ia64/lib/ |
D | memcpy_mck.S | 56 #define t13 r28 74 and r28=0x7,in0 84 and r28=0x7,in0 93 cmp.ne p13,p0=0,r28 // check dest alignment 96 sub r30=8,r28 // for .align_dest 338 shl r28=r30, LOOP_SIZE // jmp_table thread 341 add r29=r29,r28 // jmp_table thread 357 (p6) shr.u r28=r37,r25 362 (p6) or r21=r28,r27 486 cmp.eq p6,p7=r28,r29 [all …]
|
/arch/powerpc/platforms/iseries/ |
D | exception.S | 72 sldi r28,r24,3 /* get current_set[cpu#] */ 73 ldx r3,r3,r28 95 sldi r28,r24,3 /* get current_set[cpu#] */ 96 ldx r3,r3,r28
|
/arch/powerpc/boot/ |
D | ppc_asm.h | 57 #define r28 28 macro
|
/arch/parisc/include/asm/ |
D | asmregs.h | 30 ret0: .reg %r28 74 r28: .reg %r28
|
/arch/alpha/include/asm/ |
D | ptrace.h | 38 unsigned long r28; member
|
/arch/powerpc/kernel/ |
D | misc.S | 87 PPC_STL r28,19*SZL(r3) 113 PPC_LL r28,19*SZL(r3)
|
D | swsusp_asm64.S | 101 SAVE_REGISTER(r28) 207 RESTORE_REGISTER(r28)
|
/arch/ia64/include/asm/xen/ |
D | minstate.h | 33 MOV_FROM_IIP(r28); /* M */ \ 89 st8 [r16]=r28,16; /* save cr.iip */ \
|
/arch/ia64/xen/ |
D | xensetup.S | 70 (isBP) movl r28=XSI_BASE;;
|