/arch/sh/kernel/ |
D | relocate_kernel.S | 26 mov.l r15, @-r0 27 mov r0, r15 28 mov.l r14, @-r15 29 mov.l r13, @-r15 30 mov.l r12, @-r15 31 mov.l r11, @-r15 32 mov.l r10, @-r15 33 mov.l r9, @-r15 34 mov.l r8, @-r15 37 sts.l macl, @-r15 [all …]
|
D | entry-common.S | 85 mov.l @(r0,r15), r0 ! get status register 200 mov.l @(OFF_R4,r15), r4 ! arg0 201 mov.l @(OFF_R5,r15), r5 202 mov.l @(OFF_R6,r15), r6 203 mov.l @(OFF_R7,r15), r7 ! arg3 204 mov.l @(OFF_R3,r15), r3 ! syscall_nr 211 mov.l r0, @(OFF_R0,r15) ! Return value 215 mov.l @(r0,r15), r0 ! get status register 239 mov.l r0, @(OFF_R0,r15) ! Return value 314 mov.l @(OFF_R5,r15), r5 ! fn [all …]
|
/arch/sh/include/asm/ |
D | entry-macros.S | 30 mov r15, \ti 38 mov.l r0, @-r15 39 mov.l r1, @-r15 40 mov.l r2, @-r15 41 mov.l r3, @-r15 42 mov.l r4, @-r15 43 mov.l r5, @-r15 44 mov.l r6, @-r15 45 mov.l r7, @-r15 51 mov.l @r15+, r7 [all …]
|
/arch/powerpc/mm/ |
D | tlb_low_64e.S | 69 std r15,EX_TLB_R15(r12) 94 ld r15,EX_TLB_R15(r12) 128 srdi r15,r16,60 /* get region */ 134 cmpwi r15,0 /* user vs kernel check */ 153 rldicl r15,r16,64-PGDIR_SHIFT+3,64-PGD_INDEX_SIZE-3 155 clrrdi r15,r15,3 163 ldx r14,r14,r15 /* grab pgd entry */ 166 ldx r14,r14,r15 /* grab pgd entry */ 170 rldicl r15,r16,64-PUD_SHIFT+3,64-PUD_INDEX_SIZE-3 171 clrrdi r15,r15,3 [all …]
|
/arch/s390/kernel/ |
D | mcount.S | 43 lgr %r1,%r15 48 aghi %r15,-TRACED_FUNC_FRAME_SIZE 49 stg %r1,__SF_BACKCHAIN(%r15) 50 stg %r0,(__SF_GPRS+8*8)(%r15) 51 stg %r15,(__SF_GPRS+9*8)(%r15) 53 aghi %r15,-STACK_FRAME_SIZE 54 stg %r1,(STACK_PTREGS_GPRS+15*8)(%r15) 56 stg %r1,__SF_BACKCHAIN(%r15) 57 stg %r0,(STACK_PTREGS_PSW+8)(%r15) 58 stmg %r2,%r14,(STACK_PTREGS_GPRS+2*8)(%r15) [all …]
|
D | swsusp.S | 32 stmg %r6,%r15,__SF_GPRS(%r15) 33 lgr %r1,%r15 34 aghi %r15,-STACK_FRAME_OVERHEAD 35 stg %r1,__SF_BACKCHAIN(%r15) 41 stnsm __SF_EMPTY(%r15),0xfb 44 stpx __SF_EMPTY(%r15) 47 llgf %r10,__SF_EMPTY(%r15) 56 mvc 0x318(4,%r1),__SF_EMPTY(%r15) /* move prefix to lowcore */ 59 stmg %r0,%r15,0x280(%r1) /* store general registers */ 62 stck __SF_EMPTY(%r15) /* store clock */ [all …]
|
D | entry.S | 85 tml %r15,\stacksize - CONFIG_STACK_GUARD 103 slgr %r14,%r15 107 aghi %r15,-(STACK_FRAME_OVERHEAD + __PT_SIZE) 109 1: UPDATE_VTIME %r14,%r15,\timer 111 2: lg %r15,__LC_ASYNC_STACK # load async stack 112 3: la %r11,STACK_FRAME_OVERHEAD(%r15) 254 stmg %r6,%r15,__SF_GPRS(%r15) # store gprs of prev task 258 stg %r15,__THREAD_ksp(%r1) # store kernel stack of prev 261 lgr %r15,%r5 262 aghi %r15,STACK_INIT # end of kernel stack of next [all …]
|
D | base.S | 21 0: lg %r15,__LC_PANIC_STACK # load panic stack 22 aghi %r15,-STACK_FRAME_OVERHEAD 29 lmg %r0,%r15,__LC_GPREGS_SAVE_AREA-4095(%r1) 40 stmg %r0,%r15,__LC_SAVE_AREA_ASYNC 42 0: aghi %r15,-STACK_FRAME_OVERHEAD 48 1: lmg %r0,%r15,__LC_SAVE_AREA_ASYNC 60 stmg %r0,%r15,__LC_SAVE_AREA_SYNC 62 0: aghi %r15,-STACK_FRAME_OVERHEAD 68 lmg %r0,%r15,__LC_SAVE_AREA_SYNC
|
/arch/sh/kernel/cpu/sh2/ |
D | entry.S | 58 mov.l @(5*4,r15),r3 ! previous SR 62 mov.l r3,@(5*4,r15) ! update SR 78 mov r15,r2 ! r2 = user stack top 79 mov r0,r15 ! switch kernel stack 80 mov.l r1,@-r15 ! TRA 81 sts.l macl, @-r15 82 sts.l mach, @-r15 83 stc.l gbr, @-r15 85 mov.l r0,@-r15 ! original SR 86 sts.l pr,@-r15 [all …]
|
/arch/sh/kernel/cpu/sh2a/ |
D | entry.S | 50 bst.b #6,@(4*4,r15) !set cpu mode to SR.MD 59 mov r15,r2 ! r2 = user stack top 60 mov r0,r15 ! switch kernel stack 61 mov.l r1,@-r15 ! TRA 62 sts.l macl, @-r15 63 sts.l mach, @-r15 64 stc.l gbr, @-r15 66 mov.l r0,@-r15 ! original SR 67 sts.l pr,@-r15 69 mov.l r0,@-r15 ! original PC [all …]
|
/arch/sh/kernel/cpu/shmobile/ |
D | sleep.S | 56 sts.l pr, @-r15 57 mov.l r14, @-r15 58 mov.l r13, @-r15 59 mov.l r12, @-r15 60 mov.l r11, @-r15 61 mov.l r10, @-r15 62 mov.l r9, @-r15 63 mov.l r8, @-r15 91 mov.l r15, @(SH_SLEEP_SP, r5) 92 mov r5, r15 [all …]
|
/arch/sh/lib/ |
D | mcount.S | 16 mov.l r4, @-r15; \ 17 mov.l r5, @-r15; \ 18 mov.l r6, @-r15; \ 19 mov.l r7, @-r15; \ 20 sts.l pr, @-r15; \ 22 mov.l @(20,r15),r4; \ 26 lds.l @r15+, pr; \ 27 mov.l @r15+, r7; \ 28 mov.l @r15+, r6; \ 29 mov.l @r15+, r5; \ [all …]
|
D | udivsi3_i4i.S | 75 mov.l r4,@-r15 77 mov.l r1,@-r15 88 mov.l r4,@-r15 91 mov.l r5,@-r15 102 mov.l @r15+,r5 106 mov.l @r15+,r4 114 mov.l @r15+,r5 116 mov.l @r15+,r4 129 mov.l r4,@-r15 131 mov.l r1,@-r15 [all …]
|
D | udivsi3_i4i-Os.S | 49 mov.l r4,@-r15 56 mov.l r5,@-r15 73 mov.l @r15+,r5 75 mov.l @r15+,r4 94 mov.l r5,@-r15 102 mov.l @r15+,r5 103 mov.l @r15+,r4 113 mov.l r4,@-r15 115 mov.l r5,@-r15
|
/arch/sh/kernel/cpu/sh3/ |
D | entry.S | 142 mov r15, r4 157 mov r15, r4 183 mov r15, r4 ! regs 198 mov k4, r15 212 ! - restore r8, r9, r10, r11, r12, r13, r14, r15 from the stack 222 mov.l @r15+, r0 223 mov.l @r15+, r1 224 mov.l @r15+, r2 225 mov.l @r15+, r3 226 mov.l @r15+, r4 [all …]
|
D | swsusp.S | 27 mov.l 1f, r15 72 mov.l @r15+, r0 73 mov.l @r15+, r1 74 mov.l @r15+, r2 75 mov.l @r15+, r3 76 mov.l @r15+, r4 77 mov.l @r15+, r5 78 mov.l @r15+, r6 79 mov.l @r15+, r7 98 mov r15, r2 ! save sp in r2 [all …]
|
/arch/s390/kernel/vdso32/ |
D | gettimeofday.S | 22 ahi %r15,-16 33 stcke 0(%r15) /* Store TOD clock */ 35 s %r0,1(%r15) 36 sl %r1,5(%r15) 48 15: a %r0,1(%r15) /* add TOD timestamp */ 49 al %r1,5(%r15) 53 16: lm %r0,%r1,1(%r15) /* load TOD timestamp */ 59 st %r0,0(%r15) 65 4: al %r0,0(%r15) 70 5: mvc 0(4,%r15),__VDSO_XTIME_SEC+4(%r5) [all …]
|
D | clock_gettime.S | 22 ahi %r15,-16 38 stcke 0(%r15) /* Store TOD clock */ 39 lm %r0,%r1,1(%r15) 74 ahi %r15,16 101 stcke 0(%r15) /* Store TOD clock */ 103 s %r0,1(%r15) /* no - ts_steering_end */ 104 sl %r1,5(%r15) 116 23: a %r0,1(%r15) /* add TOD timestamp */ 117 al %r1,5(%r15) 121 24: lm %r0,%r1,1(%r15) /* load TOD timestamp */ [all …]
|
/arch/powerpc/kernel/ |
D | idle_book3s.S | 157 3: lwz r15,0(r14) 158 andis. r15,r15,PNV_CORE_IDLE_LOCK_BIT@h 161 lwarx r15,0,r14 162 andis. r9,r15,PNV_CORE_IDLE_LOCK_BIT@h 278 lwarx r15,0,r14 280 andis. r9,r15,PNV_CORE_IDLE_LOCK_BIT@h 283 add r15,r15,r5 /* Add if winkle */ 284 andc r15,r15,r7 /* Clear thread bit */ 286 andi. r9,r15,PNV_CORE_IDLE_THREAD_BITS 300 stwcx. r15,0,r14 [all …]
|
/arch/microblaze/kernel/ |
D | mcount.S | 82 rtsd r15, 8; 89 rtsd r15, 8 93 swi r15, r1, 0; 113 addik r6, r15, 0; /* MS: load current function addr */ 114 bralid r15, prepare_ftrace_return; 136 addik r5, r15, -4; /* MS: load current function addr */ 138 brald r15, r20; /* MS: jump to ftrace handler */ 141 lwi r15, r1, 0; 144 rtsd r15, 8; /* MS: jump back */ 152 swi r15, r1, 0; [all …]
|
/arch/openrisc/lib/ |
D | memset.S | 40 l.slli r15, r13, 16 // r13 = 000c, r15 = 0c00 41 l.or r13, r13, r15 // r13 = 0c0c, r15 = 0c00 42 l.slli r15, r13, 8 // r13 = 0c0c, r15 = c0c0 43 l.or r13, r13, r15 // r13 = cccc, r15 = c0c0 51 l.andi r15, r3, 0x3 54 l.sfeqi r15, 0 62 l.sfeqi r15, 3 70 l.sfeqi r15, 2
|
/arch/ia64/lib/ |
D | ip_fast_csum.S | 40 add r15=4,in0 // second source pointer 45 (p7) ld4 r21=[r15],8 49 ld4 r23=[r15],8 106 dep r15=in3,in2,32,16 110 mux1 r15=r15,@rev 117 shr.u r15=r15,16 125 add r15=r15,in4 127 add r8=r8,r15
|
/arch/powerpc/crypto/ |
D | sha1-powerpc-asm.S | 43 add r0,RE(t),r15; \ 56 add r0,RE(t),r15; \ 70 add r0,RE(t),r15; \ 80 add r0,RE(t),r15; \ 99 add r0,RE(t),r15; \ 142 lis r15,0x5a82 /* K0-19 */ 143 ori r15,r15,0x7999 150 lis r15,0x6ed9 /* K20-39 */ 151 ori r15,r15,0xeba1 154 lis r15,0x8f1b /* K40-59 */ [all …]
|
/arch/s390/net/ |
D | bpf_jit.S | 55 clg %r3,STK_OFF_HLEN(%r15); /* Offset + SIZE > hlen? */ \ 63 la %r4,STK_OFF_TMP(%r15); /* Arg3 = temp bufffer */ \ 66 LOAD %r14,STK_OFF_TMP(%r15); /* Load from temp bufffer */ \ 84 clg %r3,STK_OFF_HLEN(%r15) # Offset >= hlen? 92 la %r4,STK_OFF_TMP(%r15) # Arg3 = pointer to temp buffer 95 llgc %r14,STK_OFF_TMP(%r15) # Load result from temp buffer 119 ltgr %r15,%r15 # Set condition code
|
/arch/ia64/kernel/ |
D | gate.S | 125 ld8 r15=[base1] // get address of new RBS base (or NULL) 128 cmp.ne p1,p0=r15,r0 // do we need to switch rbs? (note: pr is saved by kernel) 167 ld8 r15=[base0] // fetch sc_ar_bsp 170 cmp.ne p1,p0=r14,r15 // do we need to restore the rbs? 191 mov r15=__NR_rt_sigreturn 205 mov ar.bspstore=r15 // switch over to new register backing store area 215 sub r15=r16,r15 220 shl r15=r15,16 222 st8 [r14]=r15 // save sc_loadrs 251 extr.u r18=r15,3,6 // r18 <- rse_slot_num(bsp0) [all …]
|