Home
last modified time | relevance | path

Searched refs:p1 (Results 1 – 25 of 73) sorted by relevance

123

/arch/s390/include/asm/
Dkvm_para.h43 static inline long __kvm_hypercall1(unsigned long nr, unsigned long p1) in __kvm_hypercall1() argument
46 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall1()
54 static inline long kvm_hypercall1(unsigned long nr, unsigned long p1) in kvm_hypercall1() argument
57 return __kvm_hypercall1(nr, p1); in kvm_hypercall1()
60 static inline long __kvm_hypercall2(unsigned long nr, unsigned long p1, in __kvm_hypercall2() argument
64 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall2()
74 static inline long kvm_hypercall2(unsigned long nr, unsigned long p1, in kvm_hypercall2() argument
78 return __kvm_hypercall2(nr, p1, p2); in kvm_hypercall2()
81 static inline long __kvm_hypercall3(unsigned long nr, unsigned long p1, in __kvm_hypercall3() argument
85 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall3()
[all …]
Dcmpxchg.h36 #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \ argument
38 register __typeof__(*(p1)) __old1 asm("2") = (o1); \
40 register __typeof__(*(p1)) __new1 asm("4") = (n1); \
49 [ptr] "Q" (*(p1)), "Q" (*(p2)) \
54 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ argument
56 __typeof__(p1) __p1 = (p1); \
58 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
/arch/sparc/include/asm/
Dxor_32.h16 sparc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in sparc_2() argument
43 : "r" (p1), "r" (p2) in sparc_2()
47 p1 += 8; in sparc_2()
53 sparc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_3() argument
93 : "r" (p1), "r" (p2), "r" (p3) in sparc_3()
97 p1 += 8; in sparc_3()
104 sparc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_4() argument
156 : "r" (p1), "r" (p2), "r" (p3), "r" (p4) in sparc_4()
160 p1 += 8; in sparc_4()
168 sparc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_5() argument
[all …]
/arch/x86/include/asm/
Dxor_avx.h29 static void xor_avx_2(unsigned long bytes, unsigned long *p0, unsigned long *p1) in xor_avx_2() argument
39 asm volatile("vmovdqa %0, %%ymm" #reg : : "m" (p1[i / sizeof(*p1)])); \ in xor_avx_2()
49 p1 = (unsigned long *)((uintptr_t)p1 + 512); in xor_avx_2()
55 static void xor_avx_3(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_3() argument
68 "m" (p1[i / sizeof(*p1)])); \ in xor_avx_3()
78 p1 = (unsigned long *)((uintptr_t)p1 + 512); in xor_avx_3()
85 static void xor_avx_4(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_4() argument
100 "m" (p1[i / sizeof(*p1)])); \ in xor_avx_4()
110 p1 = (unsigned long *)((uintptr_t)p1 + 512); in xor_avx_4()
118 static void xor_avx_5(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_5() argument
[all …]
Dkvm_para.h42 static inline long kvm_hypercall1(unsigned int nr, unsigned long p1) in kvm_hypercall1() argument
47 : "a"(nr), "b"(p1) in kvm_hypercall1()
52 static inline long kvm_hypercall2(unsigned int nr, unsigned long p1, in kvm_hypercall2() argument
58 : "a"(nr), "b"(p1), "c"(p2) in kvm_hypercall2()
63 static inline long kvm_hypercall3(unsigned int nr, unsigned long p1, in kvm_hypercall3() argument
69 : "a"(nr), "b"(p1), "c"(p2), "d"(p3) in kvm_hypercall3()
74 static inline long kvm_hypercall4(unsigned int nr, unsigned long p1, in kvm_hypercall4() argument
81 : "a"(nr), "b"(p1), "c"(p2), "d"(p3), "S"(p4) in kvm_hypercall4()
Dxor.h60 xor_sse_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2() argument
103 [p1] "+r" (p1), [p2] "+r" (p2) in xor_sse_2()
111 xor_sse_2_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2_pf64() argument
137 [p1] "+r" (p1), [p2] "+r" (p2) in xor_sse_2_pf64()
145 xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3() argument
196 [p1] "+r" (p1), [p2] "+r" (p2), [p3] "+r" (p3) in xor_sse_3()
204 xor_sse_3_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3_pf64() argument
233 [p1] "+r" (p1), [p2] "+r" (p2), [p3] "+r" (p3) in xor_sse_3_pf64()
241 xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_4() argument
298 : [cnt] "+r" (lines), [p1] "+r" (p1), in xor_sse_4()
[all …]
Dxor_32.h24 xor_pII_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_pII_mmx_2() argument
59 "+r" (p1), "+r" (p2) in xor_pII_mmx_2()
67 xor_pII_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_3() argument
108 "+r" (p1), "+r" (p2), "+r" (p3) in xor_pII_mmx_3()
116 xor_pII_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_4() argument
162 "+r" (p1), "+r" (p2), "+r" (p3), "+r" (p4) in xor_pII_mmx_4()
171 xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_5() argument
230 "+r" (p1), "+r" (p2), "+r" (p3) in xor_pII_mmx_5()
251 xor_p5_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_p5_mmx_2() argument
290 "+r" (p1), "+r" (p2) in xor_p5_mmx_2()
[all …]
Dcmpxchg.h236 #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2) \ argument
239 __typeof__(*(p1)) __old1 = (o1), __new1 = (n1); \
241 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
243 VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long))); \
244 VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2)); \
248 "+m" (*(p1)), "+m" (*(p2)), \
255 #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \ argument
256 __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2)
258 #define arch_cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \ argument
259 __cmpxchg_double(, p1, p2, o1, o2, n1, n2)
/arch/arm/include/asm/
Dxor.h47 xor_arm4regs_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_arm4regs_2() argument
60 GET_BLOCK_4(p1); in xor_arm4regs_2()
62 PUT_BLOCK_4(p1); in xor_arm4regs_2()
67 xor_arm4regs_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_3() argument
81 GET_BLOCK_4(p1); in xor_arm4regs_3()
84 PUT_BLOCK_4(p1); in xor_arm4regs_3()
89 xor_arm4regs_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_4() argument
99 GET_BLOCK_2(p1); in xor_arm4regs_4()
103 PUT_BLOCK_2(p1); in xor_arm4regs_4()
108 xor_arm4regs_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_5() argument
[all …]
/arch/nds32/kernel/
Dex-scall.S22 addi $p1, $r0, #THREAD_CPU_CONTEXT
23 smw.bi $r6, [$p1], $r14, #0xb ! push r6~r14, fp, lp, sp
56 andi $p1, $p0, #_TIF_WORK_SYSCALL_ENTRY ! are we tracing syscalls?
57 bnez $p1, __sys_trace
61 addi $p1, $r7, #-__NR_syscalls ! syscall number of syscall instruction is guarded by addembler
62 bgez $p1, _SCNO_EXCEED ! call sys_* routine
64 slli $p1, $r7, #2
65 add $p1, tbl, $p1
66 lwi $p1, [$p1]
67 jr $p1 ! no return
[all …]
Dex-entry.S68 movi $p1, #0x0
69 cmovz $fp, $p1, $p0
108 andi $p1, $p0, #NDS32_VECTOR_mskNONEXCEPTION
109 bnez $p1, 1f
112 sethi $p1, hi20(exception_handlers)
113 ori $p1, $p1, lo12(exception_handlers)
114 lw $p1, [$p1+$p0<<2]
126 jr $p1
129 addi $p1, $p0, #-NDS32_VECTOR_offEXCEPTION
130 bnez $p1, 2f
[all …]
Dex-exit.S109 andi $p1, $r1, #_TIF_WORK_MASK
110 bnez $p1, fast_work_pending
120 andi $p1, $r1, #_TIF_NEED_RESCHED
121 bnez $p1, work_resched
123 andi $p1, $r1, #_TIF_SIGPENDING|#_TIF_NOTIFY_RESUME|#_TIF_NOTIFY_SIGNAL
124 beqz $p1, no_work_pending
143 andi $p1, $r1, #_TIF_WORK_MASK
144 bnez $p1, work_pending ! handle work_resched, sig_pend
168 andi $p1, $t0, #_TIF_NEED_RESCHED
169 beqz $p1, no_work_pending
[all …]
/arch/s390/lib/
Dxor.c14 static void xor_xc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_xc_2() argument
31 : : "d" (bytes), "a" (p1), "a" (p2) in xor_xc_2()
35 static void xor_xc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_3() argument
57 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3) in xor_xc_3()
61 static void xor_xc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_4() argument
87 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4) in xor_xc_4()
91 static void xor_xc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_5() argument
124 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4), in xor_xc_5()
/arch/arm64/include/asm/
Dxor.h19 xor_neon_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_neon_2() argument
22 xor_block_inner_neon.do_2(bytes, p1, p2); in xor_neon_2()
27 xor_neon_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_3() argument
31 xor_block_inner_neon.do_3(bytes, p1, p2, p3); in xor_neon_3()
36 xor_neon_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_4() argument
40 xor_block_inner_neon.do_4(bytes, p1, p2, p3, p4); in xor_neon_4()
45 xor_neon_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_5() argument
49 xor_block_inner_neon.do_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5()
/arch/hexagon/lib/
Dmemset.S30 p1 = cmp.gtu(r2, #7) define
42 if p1 jump 2f /* skip byte loop */
59 p1 = cmp.eq(r2, #1) define
65 if p1 jumpr r31
72 p1 = cmp.eq(r2, #2) define
78 if p1 jumpr r31
85 p1 = cmp.eq(r2, #4) define
92 if p1 jumpr r31
98 p1 = cmp.eq(r3, #1) define
114 p1 = cmp.eq(r2, #8) define
[all …]
Ddivsi3.S11 p1 = cmp.gt(r1,#-1) define
15 p3 = xor(p0,p1)
23 p1 = cmp.gtu(r3,r4) define
27 p0 = or(p0,p1)
Dmemcpy_likely_aligned.S28 p1 = cmp.gtu(r2,#40) define
31 if (p1.new) r15:14 = memd(r1+#40)
39 if (p1) memd(r0+#40) = r15:14
/arch/nds32/lib/
Dclear_user.S19 xor $p1, $p1, $p1 ! Use $p1=0 to clear mem
24 USER( smw.bim,$p1, [$r0], $p1) ! Clear the word
29 USER( sbi.bi, $p1, [$r0], #1) ! Clear the byte
Dmemmove.S21 srli $p1, $r2, #2 ! $p1 is how many words to copy
31 beqz $p1, byte_cpy ! When n is less than a word
35 addi $p1, $p1, #-1 ! How many words left to copy
37 bnez $p1, word_cpy ! If remained words > 0
46 beqz $p1, reverse_byte_cpy ! When n is less than a word
50 addi $p1, $p1, #-1 ! How many words left to copy
52 bnez $p1, reverse_word_cpy ! If remained words > 0
Dmemset.S10 srli $p1, $r2, 2 ! $p1 is how many words to copy
12 beqz $p1, byte_set ! When n is less than a word
21 addi $p1, $p1, #-1 ! How many words left to copy
23 bnez $p1, word_set ! Still words to set, continue looping
/arch/hexagon/mm/
Dcopy_user_template.S25 p1 = cmp.gtu(bytes,#15) define
33 if (!p1) jump .Lsmall
58 p1 = cmp.gtu(bytes,#7) define
62 if (!p1) jump .Lsmall
84 p1 = cmp.gtu(bytes,#3) define
88 if (!p1) jump .Lsmall
139 if (!p1) jump .Lsmall
151 if (!p1) jump .Lsmall
163 if (!p1) jump .Lsmall
/arch/arm/mm/
Dfault-armv.c217 static int __init check_writebuffer(unsigned long *p1, unsigned long *p2) in check_writebuffer() argument
223 *p1 = one; in check_writebuffer()
227 val = *p1; in check_writebuffer()
243 unsigned long *p1, *p2; in check_writebuffer_bugs() local
247 p1 = vmap(&page, 1, VM_IOREMAP, prot); in check_writebuffer_bugs()
250 if (p1 && p2) { in check_writebuffer_bugs()
251 v = check_writebuffer(p1, p2); in check_writebuffer_bugs()
257 vunmap(p1); in check_writebuffer_bugs()
/arch/arm64/lib/
Dxor-neon.c13 void xor_arm64_neon_2(unsigned long bytes, unsigned long *p1, in xor_arm64_neon_2() argument
16 uint64_t *dp1 = (uint64_t *)p1; in xor_arm64_neon_2()
40 void xor_arm64_neon_3(unsigned long bytes, unsigned long *p1, in xor_arm64_neon_3() argument
43 uint64_t *dp1 = (uint64_t *)p1; in xor_arm64_neon_3()
75 void xor_arm64_neon_4(unsigned long bytes, unsigned long *p1, in xor_arm64_neon_4() argument
78 uint64_t *dp1 = (uint64_t *)p1; in xor_arm64_neon_4()
118 void xor_arm64_neon_5(unsigned long bytes, unsigned long *p1, in xor_arm64_neon_5() argument
122 uint64_t *dp1 = (uint64_t *)p1; in xor_arm64_neon_5()
/arch/arm64/boot/dts/xilinx/
Dzynqmp-zc1232-revA.dts46 ceva,p1-cominit-params = /bits/ 8 <0x18 0x40 0x18 0x28>;
47 ceva,p1-comwake-params = /bits/ 8 <0x06 0x14 0x08 0x0E>;
48 ceva,p1-burst-params = /bits/ 8 <0x13 0x08 0x4A 0x06>;
49 ceva,p1-retry-params = /bits/ 16 <0x96A4 0x3FFC>;
/arch/powerpc/include/asm/
Depapr_hcalls.h529 static inline long epapr_hypercall1(unsigned int nr, unsigned long p1) in epapr_hypercall1() argument
534 in[0] = p1; in epapr_hypercall1()
538 static inline long epapr_hypercall2(unsigned int nr, unsigned long p1, in epapr_hypercall2() argument
544 in[0] = p1; in epapr_hypercall2()
549 static inline long epapr_hypercall3(unsigned int nr, unsigned long p1, in epapr_hypercall3() argument
555 in[0] = p1; in epapr_hypercall3()
561 static inline long epapr_hypercall4(unsigned int nr, unsigned long p1, in epapr_hypercall4() argument
568 in[0] = p1; in epapr_hypercall4()

123