Home
last modified time | relevance | path

Searched refs:ptr1 (Results 1 – 3 of 3) sorted by relevance

/arch/ia64/lib/
Dmemset.S32 #define ptr1 r28 macro
76 mov ptr1 = dest
88 (p_unalgn) add ptr1 = (MIN1+1), ptr2 // after alignment
133 mov ptr9 = ptr1 // used for prefetching
141 add ptr2 = 8, ptr1 // start of stores (beyond prefetch stores)
142 add ptr1 = tmp, ptr1 // first address beyond total range
186 cmp.lt p_scr, p0 = ptr9, ptr1 // do we need more prefetching?
203 mov ptr9 = ptr1 // used for prefetching
211 add ptr2 = 16, ptr1 // start of stores (beyond prefetch stores)
212 add ptr1 = tmp, ptr1 // first address beyond total range
[all …]
/arch/arm64/include/asm/
Dcmpxchg.h205 #define __cmpxchg_double_check(ptr1, ptr2) \ argument
207 if (sizeof(*(ptr1)) != 8) \
209 VM_BUG_ON((unsigned long *)(ptr2) - (unsigned long *)(ptr1) != 1); \
212 #define arch_cmpxchg_double(ptr1, ptr2, o1, o2, n1, n2) \ argument
215 __cmpxchg_double_check(ptr1, ptr2); \
218 ptr1); \
222 #define arch_cmpxchg_double_local(ptr1, ptr2, o1, o2, n1, n2) \ argument
225 __cmpxchg_double_check(ptr1, ptr2); \
228 ptr1); \
Dpercpu.h144 #define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \ argument
148 __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \