/arch/sh/kernel/ |
D | relocate_kernel.S | 24 mov.l r15, @-r0 25 mov r0, r15 26 mov.l r14, @-r15 27 mov.l r13, @-r15 28 mov.l r12, @-r15 29 mov.l r11, @-r15 30 mov.l r10, @-r15 31 mov.l r9, @-r15 32 mov.l r8, @-r15 35 sts.l macl, @-r15 [all …]
|
D | entry-common.S | 81 mov.l @(r0,r15), r0 ! get status register 282 mov.l @(OFF_R5,r15), r5 ! fn 284 mov.l @(OFF_R4,r15), r4 ! arg 303 add r15, r10 380 mov.l @(OFF_R4,r15), r4 ! arg0 381 mov.l @(OFF_R5,r15), r5 382 mov.l @(OFF_R6,r15), r6 383 mov.l @(OFF_R7,r15), r7 ! arg3 384 mov.l @(OFF_R3,r15), r3 ! syscall_nr 391 mov.l r0, @(OFF_R0,r15) ! Return value
|
/arch/powerpc/mm/nohash/ |
D | tlb_low_64e.S | 62 std r15,EX_TLB_R15(r12) 82 ld r15,EX_TLB_R15(r12) 115 srdi r15,r16,60 /* get region */ 121 cmpwi r15,0 /* user vs kernel check */ 146 rldicl r15,r16,64-PGDIR_SHIFT+3,64-PGD_INDEX_SIZE-3 148 clrrdi r15,r15,3 151 ldx r14,r14,r15 /* grab pgd entry */ 153 rldicl r15,r16,64-PUD_SHIFT+3,64-PUD_INDEX_SIZE-3 154 clrrdi r15,r15,3 157 ldx r14,r14,r15 /* grab pud entry */ [all …]
|
/arch/sh/include/asm/ |
D | entry-macros.S | 30 mov r15, \ti 38 mov.l r0, @-r15 39 mov.l r1, @-r15 40 mov.l r2, @-r15 41 mov.l r3, @-r15 42 mov.l r4, @-r15 43 mov.l r5, @-r15 44 mov.l r6, @-r15 45 mov.l r7, @-r15 51 mov.l @r15+, r7 [all …]
|
/arch/sh/kernel/cpu/sh2/ |
D | entry.S | 55 mov.l @(5*4,r15),r3 ! previous SR 59 mov.l r3,@(5*4,r15) ! update SR 75 mov r15,r2 ! r2 = user stack top 76 mov r0,r15 ! switch kernel stack 77 mov.l r1,@-r15 ! TRA 78 sts.l macl, @-r15 79 sts.l mach, @-r15 80 stc.l gbr, @-r15 82 mov.l r0,@-r15 ! original SR 83 sts.l pr,@-r15 [all …]
|
/arch/sh/kernel/cpu/sh2a/ |
D | entry.S | 47 bst.b #6,@(4*4,r15) !set cpu mode to SR.MD 56 mov r15,r2 ! r2 = user stack top 57 mov r0,r15 ! switch kernel stack 58 mov.l r1,@-r15 ! TRA 59 sts.l macl, @-r15 60 sts.l mach, @-r15 61 stc.l gbr, @-r15 63 mov.l r0,@-r15 ! original SR 64 sts.l pr,@-r15 66 mov.l r0,@-r15 ! original PC [all …]
|
/arch/s390/kernel/ |
D | entry.S | 52 tml %r15,THREAD_SIZE - CONFIG_STACK_GUARD 60 lgr %r14,%r15 146 lg %r9,__SF_SIE_CONTROL(%r15) # get control block pointer 181 stmg %r6,%r15,__SF_GPRS(%r15) # store gprs of prev task 185 stg %r15,__THREAD_ksp(%r1,%r2) # store kernel stack of prev 186 lg %r15,0(%r4,%r3) # start of kernel stack of next 187 agr %r15,%r5 # end of kernel stack of next 189 stg %r15,__LC_KERNEL_STACK # store end of kernel stack 190 lg %r15,__THREAD_ksp(%r1,%r3) # load kernel stack of next 193 lmg %r6,%r15,__SF_GPRS(%r15) # load gprs of next task [all …]
|
D | earlypgm.S | 11 stmg %r8,%r15,__LC_SAVE_AREA_SYNC 12 aghi %r15,-(STACK_FRAME_OVERHEAD+__PT_SIZE) 13 la %r11,STACK_FRAME_OVERHEAD(%r15) 14 xc __SF_BACKCHAIN(8,%r15),__SF_BACKCHAIN(%r15) 20 mvc __LC_RETURN_PSW(16),STACK_FRAME_OVERHEAD+__PT_PSW(%r15) 21 lmg %r0,%r15,STACK_FRAME_OVERHEAD+__PT_R0(%r15)
|
/arch/sh/kernel/cpu/shmobile/ |
D | sleep.S | 53 sts.l pr, @-r15 54 mov.l r14, @-r15 55 mov.l r13, @-r15 56 mov.l r12, @-r15 57 mov.l r11, @-r15 58 mov.l r10, @-r15 59 mov.l r9, @-r15 60 mov.l r8, @-r15 88 mov.l r15, @(SH_SLEEP_SP, r5) 89 mov r5, r15 [all …]
|
/arch/sh/lib/ |
D | mcount.S | 13 mov.l r4, @-r15; \ 14 mov.l r5, @-r15; \ 15 mov.l r6, @-r15; \ 16 mov.l r7, @-r15; \ 17 sts.l pr, @-r15; \ 19 mov.l @(20,r15),r4; \ 23 lds.l @r15+, pr; \ 24 mov.l @r15+, r7; \ 25 mov.l @r15+, r6; \ 26 mov.l @r15+, r5; \ [all …]
|
D | udivsi3_i4i.S | 54 mov.l r4,@-r15 56 mov.l r1,@-r15 67 mov.l r4,@-r15 70 mov.l r5,@-r15 81 mov.l @r15+,r5 85 mov.l @r15+,r4 93 mov.l @r15+,r5 95 mov.l @r15+,r4 108 mov.l r4,@-r15 110 mov.l r1,@-r15 [all …]
|
D | udivsi3_i4i-Os.S | 28 mov.l r4,@-r15 35 mov.l r5,@-r15 52 mov.l @r15+,r5 54 mov.l @r15+,r4 73 mov.l r5,@-r15 81 mov.l @r15+,r5 82 mov.l @r15+,r4 92 mov.l r4,@-r15 94 mov.l r5,@-r15
|
/arch/sh/kernel/cpu/sh3/ |
D | entry.S | 138 mov r15, r4 153 mov r15, r4 179 mov r15, r4 ! regs 194 mov k4, r15 208 ! - restore r8, r9, r10, r11, r12, r13, r14, r15 from the stack 218 mov.l @r15+, r0 219 mov.l @r15+, r1 220 mov.l @r15+, r2 221 mov.l @r15+, r3 222 mov.l @r15+, r4 [all …]
|
D | swsusp.S | 24 mov.l 1f, r15 69 mov.l @r15+, r0 70 mov.l @r15+, r1 71 mov.l @r15+, r2 72 mov.l @r15+, r3 73 mov.l @r15+, r4 74 mov.l @r15+, r5 75 mov.l @r15+, r6 76 mov.l @r15+, r7 95 mov r15, r2 ! save sp in r2 [all …]
|
/arch/microblaze/kernel/ |
D | mcount.S | 82 rtsd r15, 8; 89 rtsd r15, 8 93 swi r15, r1, 0; 113 addik r6, r15, 0; /* MS: load current function addr */ 114 bralid r15, prepare_ftrace_return; 136 addik r5, r15, -4; /* MS: load current function addr */ 138 brald r15, r20; /* MS: jump to ftrace handler */ 141 lwi r15, r1, 0; 144 rtsd r15, 8; /* MS: jump back */ 152 swi r15, r1, 0; [all …]
|
D | entry.S | 193 swi r15, r1, PT_R15; /* Save LP */ \ 227 lwi r15, r1, PT_R15; /* restore LP */ \ 309 rtsd r15, 8 327 rtsd r15, 8 384 brlid r15, do_syscall_trace_enter 424 addi r15, r0, ret_from_trap-8 449 brlid r15, do_syscall_trace_leave 460 bralid r15, schedule; /* Call scheduler */ 470 bralid r15, do_notify_resume; /* Handle any signals */ 504 bralid r15, schedule_tail; /* ...which is schedule_tail's arg */ [all …]
|
/arch/openrisc/lib/ |
D | memset.S | 36 l.slli r15, r13, 16 // r13 = 000c, r15 = 0c00 37 l.or r13, r13, r15 // r13 = 0c0c, r15 = 0c00 38 l.slli r15, r13, 8 // r13 = 0c0c, r15 = c0c0 39 l.or r13, r13, r15 // r13 = cccc, r15 = c0c0 47 l.andi r15, r3, 0x3 50 l.sfeqi r15, 0 58 l.sfeqi r15, 3 66 l.sfeqi r15, 2
|
/arch/ia64/lib/ |
D | ip_fast_csum.S | 40 add r15=4,in0 // second source pointer 45 (p7) ld4 r21=[r15],8 49 ld4 r23=[r15],8 106 dep r15=in3,in2,32,16 110 mux1 r15=r15,@rev 117 shr.u r15=r15,16 125 add r15=r15,in4 127 add r8=r8,r15
|
/arch/powerpc/crypto/ |
D | sha1-powerpc-asm.S | 44 add r0,RE(t),r15; \ 57 add r0,RE(t),r15; \ 71 add r0,RE(t),r15; \ 81 add r0,RE(t),r15; \ 100 add r0,RE(t),r15; \ 142 lis r15,0x5a82 /* K0-19 */ 143 ori r15,r15,0x7999 150 lis r15,0x6ed9 /* K20-39 */ 151 ori r15,r15,0xeba1 154 lis r15,0x8f1b /* K40-59 */ [all …]
|
/arch/ia64/kernel/ |
D | gate.S | 125 ld8 r15=[base1] // get address of new RBS base (or NULL) 128 cmp.ne p1,p0=r15,r0 // do we need to switch rbs? (note: pr is saved by kernel) 167 ld8 r15=[base0] // fetch sc_ar_bsp 170 cmp.ne p1,p0=r14,r15 // do we need to restore the rbs? 191 mov r15=__NR_rt_sigreturn 205 mov ar.bspstore=r15 // switch over to new register backing store area 215 sub r15=r16,r15 220 shl r15=r15,16 222 st8 [r14]=r15 // save sc_loadrs 251 extr.u r18=r15,3,6 // r18 <- rse_slot_num(bsp0) [all …]
|
/arch/powerpc/include/asm/ |
D | exception-64e.h | 98 std r15,EX_TLB_R15(r12); \ 99 mfspr r15,SPRN_SRR1; \ 105 std r15,EX_TLB_SRR1(r12); \ 128 ld r15,EX_TLB_SRR0(r12); \ 135 mtspr SPRN_SRR0,r15; \ 136 ld r15,EX_TLB_R15(r12); \
|
/arch/microblaze/lib/ |
D | mulsi3.S | 16 .frame r1, 0, r15 37 rtsd r15, 8 40 rtsd r15, 8 43 rtsd r15, 8
|
/arch/s390/kernel/vdso64/ |
D | vdso_user_wrapper.S | 23 aghi %r15,-WRAPPER_FRAME_SIZE 26 stg %r14,STACK_FRAME_OVERHEAD(%r15) 29 lg %r14,STACK_FRAME_OVERHEAD(%r15) 31 aghi %r15,WRAPPER_FRAME_SIZE
|
/arch/x86/include/asm/shared/ |
D | tdx.h | 43 u64 r15; member 58 static inline u64 _tdx_hypercall(u64 fn, u64 r12, u64 r13, u64 r14, u64 r15) in _tdx_hypercall() argument 66 .r15 = r15, in _tdx_hypercall()
|
/arch/powerpc/lib/ |
D | checksum_64.S | 64 std r15,STK_REG(R15)(r1) 86 ld r15,48(r3) 98 adde r0,r0,r15 113 ld r15,48(r3) 121 adde r0,r0,r15 125 ld r15,STK_REG(R15)(r1) 257 std r15,STK_REG(R15)(r1) 279 source; ld r15,48(r3) 296 dest; std r15,48(r4) 300 adde r0,r0,r15 [all …]
|