Home
last modified time | relevance | path

Searched refs:tmp2 (Results 1 – 25 of 67) sorted by relevance

123

/arch/sparc/include/asm/
Dhead_64.h40 #define BRANCH_IF_CHEETAH_BASE(tmp1,tmp2,label) \ argument
42 sethi %hi(__CHEETAH_ID), %tmp2; \
44 or %tmp2, %lo(__CHEETAH_ID), %tmp2;\
45 cmp %tmp1, %tmp2; \
49 #define BRANCH_IF_JALAPENO(tmp1,tmp2,label) \ argument
51 sethi %hi(__JALAPENO_ID), %tmp2; \
53 or %tmp2, %lo(__JALAPENO_ID), %tmp2;\
54 cmp %tmp1, %tmp2; \
58 #define BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(tmp1,tmp2,label) \ argument
60 srlx %tmp1, (32 + 16), %tmp2; \
[all …]
Dspinlock_64.h73 unsigned long tmp1, tmp2; in arch_spin_lock_flags() local
88 : "=&r" (tmp1), "=&r" (tmp2) in arch_spin_lock_flags()
97 unsigned long tmp1, tmp2; in arch_read_lock() local
113 : "=&r" (tmp1), "=&r" (tmp2) in arch_read_lock()
120 int tmp1, tmp2; in arch_read_trylock() local
132 : "=&r" (tmp1), "=&r" (tmp2) in arch_read_trylock()
141 unsigned long tmp1, tmp2; in arch_read_unlock() local
150 : "=&r" (tmp1), "=&r" (tmp2) in arch_read_unlock()
157 unsigned long mask, tmp1, tmp2; in arch_write_lock() local
175 : "=&r" (tmp1), "=&r" (tmp2) in arch_write_lock()
[all …]
/arch/arm64/lib/
Dstrcmp.S58 tmp2 .req x8 label
81 orr tmp2, data1, #REP8_7f
83 bic has_nul, tmp1, tmp2 /* Non-zero if NUL terminator. */
100 mov tmp2, #~0
102 CPU_BE( lsl tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
104 CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
106 orr data1, data1, tmp2
107 orr data2, data2, tmp2
118 and tmp2, src2, #7
119 neg tmp2, tmp2
[all …]
Dstrlen.S49 tmp2 .req x8 label
79 orr tmp2, data1, #REP8_7f
82 bic has_nul1, tmp1, tmp2
101 CPU_BE( orr tmp2, data2, #REP8_7f )
102 CPU_BE( bic has_nul2, tmp1, tmp2 )
115 mov tmp2, #~0
117 CPU_BE( lsl tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
119 CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
121 orr data1, data1, tmp2
122 orr data2a, data2, tmp2
Dstrncmp.S59 tmp2 .req x9 label
94 orr tmp2, data1, #REP8_7f
97 bics has_nul, tmp1, tmp2 /* Non-zero if NUL terminator. */
135 mov tmp2, #~0
138 CPU_BE( lsl tmp2, tmp2, tmp3 ) /* Shift (tmp1 & 63). */
140 CPU_LE( lsr tmp2, tmp2, tmp3 ) /* Shift (tmp1 & 63). */
147 orr data1, data1, tmp2
148 orr data2, data2, tmp2
162 and tmp2, src2, #7
163 neg tmp2, tmp2
[all …]
Dstrnlen.S51 tmp2 .req x9 label
87 orr tmp2, data1, #REP8_7f
90 bic has_nul1, tmp1, tmp2
119 CPU_BE( orr tmp2, data2, #REP8_7f )
120 CPU_BE( bic has_nul2, tmp1, tmp2 )
153 mov tmp2, #~0
155 CPU_BE( lsl tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
157 CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
161 orr data1, data1, tmp2
162 orr data2a, data2, tmp2
Dmemcmp.S55 tmp2 .req x9 label
126 mov tmp2, #~0
128 CPU_BE( lsl tmp2, tmp2, tmp1 )/*Big-endian.Early bytes are at MSB*/
130 CPU_LE( lsr tmp2, tmp2, tmp1 )
132 orr data1, data1, tmp2
133 orr data2, data2, tmp2
144 and tmp2, src2, #7
145 neg tmp2, tmp2
146 add tmp2, tmp2, #8/*valid length in the first 8 bytes of src2*/
147 subs tmp3, tmp1, tmp2
[all …]
Dmemset.S45 tmp2 .req x4 label
85 neg tmp2, dst
86 ands tmp2, tmp2, #15
95 sub count, count, tmp2
96 add dst, dst, tmp2
182 neg tmp2, dst
183 ands tmp2, tmp2, zva_bits_x
186 sub tmp1, count, tmp2
202 subs tmp2, tmp2, #64
207 add dst, dst, tmp2
Dcopy_template.S41 tmp2 .req x4 label
59 neg tmp2, src
60 ands tmp2, tmp2, #15/* Bytes to reach alignment. */
62 sub count, count, tmp2
69 tbz tmp2, #0, 1f
73 tbz tmp2, #1, 2f
77 tbz tmp2, #2, 3f
81 tbz tmp2, #3, .LSrcAligned
Dmemmove.S45 tmp2 .req x4 label
74 ands tmp2, src, #15 /* Bytes to reach alignment. */
76 sub count, count, tmp2
82 tbz tmp2, #0, 1f
86 tbz tmp2, #1, 2f
90 tbz tmp2, #2, 3f
94 tbz tmp2, #3, .LSrcAligned
/arch/m68k/lib/
Dchecksum.c44 unsigned long tmp1, tmp2; in csum_partial() local
122 "=&d" (tmp1), "=&d" (tmp2) in csum_partial()
144 unsigned long tmp1, tmp2; in csum_partial_copy_from_user() local
314 "=&d" (tmp1), "=d" (tmp2) in csum_partial_copy_from_user()
318 *csum_err = tmp2; in csum_partial_copy_from_user()
333 unsigned long tmp1, tmp2; in csum_partial_copy_nocheck() local
420 "=&d" (tmp1), "=&d" (tmp2) in csum_partial_copy_nocheck()
/arch/s390/lib/
Duaccess.c24 unsigned long tmp1, tmp2; in copy_from_user_mvcos() local
55 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_from_user_mvcos()
63 unsigned long tmp1, tmp2; in copy_from_user_mvcp() local
100 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_from_user_mvcp()
117 unsigned long tmp1, tmp2; in copy_to_user_mvcos() local
138 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_to_user_mvcos()
146 unsigned long tmp1, tmp2; in copy_to_user_mvcs() local
173 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_to_user_mvcs()
190 unsigned long tmp1, tmp2; in copy_in_user_mvcos() local
204 : "+a" (size), "+a" (to), "+a" (from), "+a" (tmp1), "=a" (tmp2) in copy_in_user_mvcos()
[all …]
/arch/arm/include/asm/
Dtls.h9 .macro switch_tls_none, base, tp, tpuser, tmp1, tmp2
12 .macro switch_tls_v6k, base, tp, tpuser, tmp1, tmp2
13 mrc p15, 0, \tmp2, c13, c0, 2 @ get the user r/w register
16 str \tmp2, [\base, #TI_TP_VALUE + 4] @ save it
19 .macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2
22 mov \tmp2, #0xffff0fff
24 streq \tp, [\tmp2, #-15] @ set TLS value at 0xffff0ff0
25 mrcne p15, 0, \tmp2, c13, c0, 2 @ get the user r/w register
28 strne \tmp2, [\base, #TI_TP_VALUE + 4] @ save it
31 .macro switch_tls_software, base, tp, tpuser, tmp1, tmp2
/arch/arm64/include/asm/
Dspinlock.h254 unsigned int tmp, tmp2; in arch_read_lock() local
275 : "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock) in arch_read_lock()
282 unsigned int tmp, tmp2; in arch_read_unlock() local
295 : "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock) in arch_read_unlock()
302 unsigned int tmp, tmp2; in arch_read_trylock() local
321 : "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock) in arch_read_trylock()
325 return !tmp2; in arch_read_trylock()
Duaccess.h468 .macro __uaccess_ttbr0_enable, tmp1, tmp2
471 mrs \tmp2, ttbr1_el1
472 extr \tmp2, \tmp2, \tmp1, #48
473 ror \tmp2, \tmp2, #16
474 msr ttbr1_el1, \tmp2 // set the active ASID
480 .macro uaccess_ttbr0_disable, tmp1, tmp2
482 save_and_disable_irq \tmp2 // avoid preemption
484 restore_irq \tmp2
488 .macro uaccess_ttbr0_enable, tmp1, tmp2, tmp3
491 __uaccess_ttbr0_enable \tmp1, \tmp2
[all …]
Dassembler.h320 .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
321 dcache_line_size \tmp1, \tmp2
323 sub \tmp2, \tmp1, #1
324 bic \kaddr, \kaddr, \tmp2
433 .macro cpu_midr_match model, rv_min, rv_max, res, tmp1, tmp2, tmp3
436 mov_q \tmp2, MIDR_CPU_PART_MASK
437 and \tmp3, \res, \tmp2 // Extract model
439 mov_q \tmp2, \model
440 cmp \tmp3, \tmp2
451 mov_q \tmp2, \rv_max
[all …]
/arch/arm/mach-tegra/
Dsleep.h92 .macro check_cpu_part_num part_num, tmp1, tmp2
95 mov32 \tmp2, \part_num
96 cmp \tmp1, \tmp2
100 .macro exit_smp, tmp1, tmp2
106 check_cpu_part_num 0xc09, \tmp1, \tmp2
110 moveq \tmp2, #0xf
111 moveq \tmp2, \tmp2, lsl \tmp1
113 streq \tmp2, [\tmp1] @ invalidate SCU tags for CPU
/arch/tile/lib/
Dmemcpy_64.c99 op_t tmp0 = 0, tmp1 = 0, tmp2, tmp3; in memcpy() local
118 tmp2 = LD8(src8++); in memcpy()
127 tmp2 = LD8(src8++); in memcpy()
139 tmp2 = LD8(src8++); in memcpy()
144 tmp1 = __insn_dblalign(tmp1, tmp2, srci); in memcpy()
148 tmp2 = __insn_dblalign(tmp2, tmp3, srci); in memcpy()
149 ST8(dst8++, tmp2); in memcpy()
184 tmp2 = LD8(src8++); in memcpy()
193 tmp1 = __insn_dblalign(tmp1, tmp2, srci); in memcpy()
194 tmp2 = __insn_dblalign(tmp2, tmp3, srci); in memcpy()
[all …]
/arch/blackfin/include/asm/
Dtimex.h18 unsigned long tmp, tmp2; in get_cycles() local
19 __asm__ __volatile__("%0 = cycles; %1 = cycles2;" : "=d"(tmp), "=d"(tmp2)); in get_cycles()
20 return tmp | ((cycles_t)tmp2 << 32); in get_cycles()
/arch/ia64/lib/
Ddo_csum.S105 #define tmp2 r27 macro
148 adds tmp2=-1,tmp1 // last-1
152 and last=-8,tmp2 // address of word containing last byte
161 shl tmp2=firstoff,3 // number of bits
168 shl hmask=hmask,tmp2 // build head mask, mask off [0,first1off[
263 shr.u tmp2=result1[0],32
265 add result1[0]=tmp1,tmp2
268 shr.u tmp2=result1[0],16
270 add result1[0]=tmp1,tmp2
273 shr.u tmp2=result1[0],16
[all …]
/arch/alpha/kernel/
Dtraps.c463 long error, tmp1, tmp2, tmp3, tmp4; in do_entUna() local
490 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna()
494 una_reg(reg) = tmp1|tmp2; in do_entUna()
510 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna()
514 una_reg(reg) = (int)(tmp1|tmp2); in do_entUna()
530 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna()
534 una_reg(reg) = tmp1|tmp2; in do_entUna()
563 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2), in do_entUna()
593 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2), in do_entUna()
623 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2), in do_entUna()
[all …]
/arch/m32r/include/asm/
Dspinlock.h43 unsigned long tmp1, tmp2; in arch_spin_trylock() local
62 : "=&r" (oldval), "=&r" (tmp1), "=&r" (tmp2) in arch_spin_trylock()
205 unsigned long tmp0, tmp1, tmp2; in arch_write_lock() local
247 : "=&r" (tmp0), "=&r" (tmp1), "=&r" (tmp2) in arch_write_lock()
280 unsigned long tmp0, tmp1, tmp2; in arch_write_unlock() local
293 : "=&r" (tmp0), "=&r" (tmp1), "=&r" (tmp2) in arch_write_unlock()
/arch/mips/mm/
Dsc-ip22.c102 unsigned long addr, tmp1, tmp2; in indy_sc_enable() local
128 : "=r" (tmp1), "=r" (tmp2), "=r" (addr)); in indy_sc_enable()
133 unsigned long tmp1, tmp2, tmp3; in indy_sc_disable() local
158 : "=r" (tmp1), "=r" (tmp2), "=r" (tmp3)); in indy_sc_disable()
/arch/arm/mach-davinci/
Dmux.c67 unsigned tmp1, tmp2; in davinci_cfg_reg() local
76 tmp2 = (cfg->mode << cfg->mask_offset); in davinci_cfg_reg()
77 reg |= tmp2; in davinci_cfg_reg()
79 if (tmp1 != tmp2) in davinci_cfg_reg()
/arch/arm/kernel/
Ddebug.S28 .macro addruart_current, rx, tmp1, tmp2 argument
29 addruart \tmp1, \tmp2, \rx
33 movne \rx, \tmp2
37 .macro addruart_current, rx, tmp1, tmp2 argument
38 addruart \rx, \tmp1, \tmp2

123