/arch/blackfin/mach-bf561/ |
D | atomic.S | 108 [--sp] = p1; 115 GET_CPUID(p1, r7); 122 p1.l = _barrier_mask; 123 p1.h = _barrier_mask; 124 r7 = [p1]; 131 [p1] = r7; 136 p1 = [sp++]; define 147 [--sp] = p1; 154 GET_CPUID(p1, r7); 161 p1.l = _barrier_mask; [all …]
|
D | secondary.S | 79 p1.l = _initial_pda_coreb; 80 p1.h = _initial_pda_coreb; 92 [p1 + PDA_INIT_DF_RETX] = r0; 93 [p1 + PDA_INIT_DF_DCPLB] = r1; 94 [p1 + PDA_INIT_DF_ICPLB] = r2; 95 [p1 + PDA_INIT_DF_SEQSTAT] = r3; 97 [p1 + PDA_INIT_RETX] = r4; 111 p1.l = _coreb_start; 112 p1.h = _coreb_start; 113 [p5 + (EVT15 - COREMMR_BASE)] = p1;
|
/arch/s390/include/asm/ |
D | kvm_para.h | 48 static inline long __kvm_hypercall1(unsigned long nr, unsigned long p1) in __kvm_hypercall1() argument 51 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall1() 59 static inline long kvm_hypercall1(unsigned long nr, unsigned long p1) in kvm_hypercall1() argument 62 return __kvm_hypercall1(nr, p1); in kvm_hypercall1() 65 static inline long __kvm_hypercall2(unsigned long nr, unsigned long p1, in __kvm_hypercall2() argument 69 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall2() 79 static inline long kvm_hypercall2(unsigned long nr, unsigned long p1, in kvm_hypercall2() argument 83 return __kvm_hypercall2(nr, p1, p2); in kvm_hypercall2() 86 static inline long __kvm_hypercall3(unsigned long nr, unsigned long p1, in __kvm_hypercall3() argument 90 register unsigned long __p1 asm("2") = p1; in __kvm_hypercall3() [all …]
|
D | cmpxchg.h | 35 #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \ argument 37 register __typeof__(*(p1)) __old1 asm("2") = (o1); \ 39 register __typeof__(*(p1)) __new1 asm("4") = (n1); \ 48 [ptr] "Q" (*(p1)), "Q" (*(p2)) \ 53 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ argument 55 __typeof__(p1) __p1 = (p1); \ 57 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
|
/arch/sparc/include/asm/ |
D | xor_32.h | 24 sparc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in sparc_2() argument 51 : "r" (p1), "r" (p2) in sparc_2() 55 p1 += 8; in sparc_2() 61 sparc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_3() argument 101 : "r" (p1), "r" (p2), "r" (p3) in sparc_3() 105 p1 += 8; in sparc_3() 112 sparc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_4() argument 164 : "r" (p1), "r" (p2), "r" (p3), "r" (p4) in sparc_4() 168 p1 += 8; in sparc_4() 176 sparc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_5() argument [all …]
|
/arch/x86/include/asm/ |
D | xor_avx.h | 35 static void xor_avx_2(unsigned long bytes, unsigned long *p0, unsigned long *p1) in xor_avx_2() argument 45 asm volatile("vmovdqa %0, %%ymm" #reg : : "m" (p1[i / sizeof(*p1)])); \ in xor_avx_2() 55 p1 = (unsigned long *)((uintptr_t)p1 + 512); in xor_avx_2() 61 static void xor_avx_3(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_3() argument 74 "m" (p1[i / sizeof(*p1)])); \ in xor_avx_3() 84 p1 = (unsigned long *)((uintptr_t)p1 + 512); in xor_avx_3() 91 static void xor_avx_4(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_4() argument 106 "m" (p1[i / sizeof(*p1)])); \ in xor_avx_4() 116 p1 = (unsigned long *)((uintptr_t)p1 + 512); in xor_avx_4() 124 static void xor_avx_5(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_5() argument [all …]
|
D | kvm_para.h | 43 static inline long kvm_hypercall1(unsigned int nr, unsigned long p1) in kvm_hypercall1() argument 48 : "a"(nr), "b"(p1) in kvm_hypercall1() 53 static inline long kvm_hypercall2(unsigned int nr, unsigned long p1, in kvm_hypercall2() argument 59 : "a"(nr), "b"(p1), "c"(p2) in kvm_hypercall2() 64 static inline long kvm_hypercall3(unsigned int nr, unsigned long p1, in kvm_hypercall3() argument 70 : "a"(nr), "b"(p1), "c"(p2), "d"(p3) in kvm_hypercall3() 75 static inline long kvm_hypercall4(unsigned int nr, unsigned long p1, in kvm_hypercall4() argument 82 : "a"(nr), "b"(p1), "c"(p2), "d"(p3), "S"(p4) in kvm_hypercall4()
|
D | xor.h | 71 xor_sse_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2() argument 114 [p1] "+r" (p1), [p2] "+r" (p2) in xor_sse_2() 122 xor_sse_2_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2_pf64() argument 148 [p1] "+r" (p1), [p2] "+r" (p2) in xor_sse_2_pf64() 156 xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3() argument 207 [p1] "+r" (p1), [p2] "+r" (p2), [p3] "+r" (p3) in xor_sse_3() 215 xor_sse_3_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3_pf64() argument 244 [p1] "+r" (p1), [p2] "+r" (p2), [p3] "+r" (p3) in xor_sse_3_pf64() 252 xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_4() argument 309 : [cnt] "+r" (lines), [p1] "+r" (p1), in xor_sse_4() [all …]
|
D | xor_32.h | 32 xor_pII_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_pII_mmx_2() argument 67 "+r" (p1), "+r" (p2) in xor_pII_mmx_2() 75 xor_pII_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_3() argument 116 "+r" (p1), "+r" (p2), "+r" (p3) in xor_pII_mmx_3() 124 xor_pII_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_4() argument 170 "+r" (p1), "+r" (p2), "+r" (p3), "+r" (p4) in xor_pII_mmx_4() 179 xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_5() argument 238 "+r" (p1), "+r" (p2), "+r" (p3) in xor_pII_mmx_5() 259 xor_p5_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_p5_mmx_2() argument 298 "+r" (p1), "+r" (p2) in xor_p5_mmx_2() [all …]
|
D | cmpxchg.h | 165 #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2) \ argument 168 __typeof__(*(p1)) __old1 = (o1), __new1 = (n1); \ 170 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \ 172 VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long))); \ 173 VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2)); \ 176 "+m" (*(p1)), "+m" (*(p2)) \ 182 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ argument 183 __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2) 185 #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \ argument 186 __cmpxchg_double(, p1, p2, o1, o2, n1, n2)
|
/arch/arm/include/asm/ |
D | xor.h | 50 xor_arm4regs_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_arm4regs_2() argument 63 GET_BLOCK_4(p1); in xor_arm4regs_2() 65 PUT_BLOCK_4(p1); in xor_arm4regs_2() 70 xor_arm4regs_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_3() argument 84 GET_BLOCK_4(p1); in xor_arm4regs_3() 87 PUT_BLOCK_4(p1); in xor_arm4regs_3() 92 xor_arm4regs_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_4() argument 102 GET_BLOCK_2(p1); in xor_arm4regs_4() 106 PUT_BLOCK_2(p1); in xor_arm4regs_4() 111 xor_arm4regs_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_5() argument [all …]
|
/arch/s390/lib/ |
D | xor.c | 12 static void xor_xc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_xc_2() argument 29 : : "d" (bytes), "a" (p1), "a" (p2) in xor_xc_2() 33 static void xor_xc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_3() argument 55 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3) in xor_xc_3() 59 static void xor_xc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_4() argument 85 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4) in xor_xc_4() 89 static void xor_xc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_5() argument 122 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4), in xor_xc_5()
|
/arch/hexagon/lib/ |
D | memset.S | 43 p1 = cmp.gtu(r2, #7) define 55 if p1 jump 2f /* skip byte loop */ 72 p1 = cmp.eq(r2, #1) define 78 if p1 jumpr r31 85 p1 = cmp.eq(r2, #2) define 91 if p1 jumpr r31 98 p1 = cmp.eq(r2, #4) define 105 if p1 jumpr r31 111 p1 = cmp.eq(r3, #1) define 127 p1 = cmp.eq(r2, #8) define [all …]
|
D | memcpy.S | 166 #define nokernel p1 /* no 32byte multiple block in the transfer */ 202 p1 = cmp.eq(ptr_in, ptr_out); /* attempt to overwrite self */ define 205 p1 = or(p2, p1); define 213 if(p1) jumpr r31; /* =0 */ 275 p1 = cmp.eq(prolog, #0); define 276 if(!p1.new) prolog = add(prolog, #1); 471 p1 = cmp.gt(over, #0); define 474 if(p1) kernel= #0;
|
/arch/blackfin/mach-common/ |
D | head.S | 23 p1 = r1; define 26 1: [p1++] = r0; 88 p1.l = _initial_pda; 89 p1.h = _initial_pda; 101 [p1 + PDA_INIT_DF_RETX] = r0; 102 [p1 + PDA_INIT_DF_DCPLB] = r1; 103 [p1 + PDA_INIT_DF_ICPLB] = r2; 104 [p1 + PDA_INIT_DF_SEQSTAT] = r3; 106 [p1 + PDA_INIT_RETX] = r4; 175 p1.l = _real_start; [all …]
|
D | entry.S | 718 p1 = r1; define 736 sp = [p1+(TASK_THREAD+THREAD_KSP)]; 739 p0 = [p1+(TASK_THREAD+THREAD_USP)]; 743 p0 = [p1+(TASK_THREAD+THREAD_PC)]; 815 p1.l = _schedule_and_signal; 816 p1.h = _schedule_and_signal; 817 [p0] = p1; 848 p1.l = _evt_up_evt14; 849 p1.h = _evt_up_evt14; 850 [p0] = p1; [all …]
|
D | cache.S | 91 p1 = [p0]; define 92 jump (p1);
|
/arch/hexagon/mm/ |
D | copy_user_template.S | 38 p1 = cmp.gtu(bytes,#15) define 46 if (!p1) jump .Lsmall 71 p1 = cmp.gtu(bytes,#7) define 75 if (!p1) jump .Lsmall 97 p1 = cmp.gtu(bytes,#3) define 101 if (!p1) jump .Lsmall 152 if (!p1) jump .Lsmall 164 if (!p1) jump .Lsmall 176 if (!p1) jump .Lsmall
|
/arch/arm/mm/ |
D | fault-armv.c | 216 static int __init check_writebuffer(unsigned long *p1, unsigned long *p2) in check_writebuffer() argument 222 *p1 = one; in check_writebuffer() 226 val = *p1; in check_writebuffer() 242 unsigned long *p1, *p2; in check_writebuffer_bugs() local 246 p1 = vmap(&page, 1, VM_IOREMAP, prot); in check_writebuffer_bugs() 249 if (p1 && p2) { in check_writebuffer_bugs() 250 v = check_writebuffer(p1, p2); in check_writebuffer_bugs() 256 vunmap(p1); in check_writebuffer_bugs()
|
/arch/blackfin/kernel/ |
D | entry.S | 49 p1 = [sp++]; define 51 cc = p1 == 0; 54 call (p1);
|
/arch/blackfin/mach-bf609/ |
D | dpm.S | 60 p1.h = _evt_evt11; 61 p1.l = _evt_evt11; 63 [p0] = p1; 78 p1.h = _dummy_deepsleep; 79 p1.l = _dummy_deepsleep; 81 [p0] = p1;
|
/arch/powerpc/include/asm/ |
D | epapr_hcalls.h | 529 static inline long epapr_hypercall1(unsigned int nr, unsigned long p1) in epapr_hypercall1() argument 534 in[0] = p1; in epapr_hypercall1() 538 static inline long epapr_hypercall2(unsigned int nr, unsigned long p1, in epapr_hypercall2() argument 544 in[0] = p1; in epapr_hypercall2() 549 static inline long epapr_hypercall3(unsigned int nr, unsigned long p1, in epapr_hypercall3() argument 555 in[0] = p1; in epapr_hypercall3() 561 static inline long epapr_hypercall4(unsigned int nr, unsigned long p1, in epapr_hypercall4() argument 568 in[0] = p1; in epapr_hypercall4()
|
/arch/powerpc/kvm/ |
D | trace_pr.h | 123 TP_PROTO(const char *type, struct kvm_vcpu *vcpu, unsigned long long p1, 125 TP_ARGS(type, vcpu, p1, p2), 129 __field( unsigned long long, p1 ) 136 __entry->p1 = p1; 142 __entry->count, __entry->type, __entry->p1, __entry->p2)
|
/arch/x86/kernel/cpu/microcode/ |
D | intel_lib.c | 33 static inline bool cpu_signatures_match(unsigned int s1, unsigned int p1, in cpu_signatures_match() argument 40 if (!p1 && !p2) in cpu_signatures_match() 44 return p1 & p2; in cpu_signatures_match()
|
/arch/blackfin/include/asm/ |
D | elf.h | 51 #define ELF_PLAT_INIT(_r) _r->p1 = 0 57 _regs->p1 = _interp_map_addr; \
|