1 #ifndef _ASM_IA64_CMPXCHG_H 2 #define _ASM_IA64_CMPXCHG_H 3 4 /* 5 * Compare/Exchange, forked from asm/intrinsics.h 6 * which was: 7 * 8 * Copyright (C) 2002-2003 Hewlett-Packard Co 9 * David Mosberger-Tang <davidm@hpl.hp.com> 10 */ 11 12 #ifndef __ASSEMBLY__ 13 14 #include <linux/types.h> 15 /* include compiler specific intrinsics */ 16 #include <asm/ia64regs.h> 17 #ifdef __INTEL_COMPILER 18 # include <asm/intel_intrin.h> 19 #else 20 # include <asm/gcc_intrin.h> 21 #endif 22 23 /* 24 * This function doesn't exist, so you'll get a linker error if 25 * something tries to do an invalid xchg(). 26 */ 27 extern void ia64_xchg_called_with_bad_pointer(void); 28 29 #define __xchg(x, ptr, size) \ 30 ({ \ 31 unsigned long __xchg_result; \ 32 \ 33 switch (size) { \ 34 case 1: \ 35 __xchg_result = ia64_xchg1((__u8 *)ptr, x); \ 36 break; \ 37 \ 38 case 2: \ 39 __xchg_result = ia64_xchg2((__u16 *)ptr, x); \ 40 break; \ 41 \ 42 case 4: \ 43 __xchg_result = ia64_xchg4((__u32 *)ptr, x); \ 44 break; \ 45 \ 46 case 8: \ 47 __xchg_result = ia64_xchg8((__u64 *)ptr, x); \ 48 break; \ 49 default: \ 50 ia64_xchg_called_with_bad_pointer(); \ 51 } \ 52 __xchg_result; \ 53 }) 54 55 #define xchg(ptr, x) \ 56 ((__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)))) 57 58 /* 59 * Atomic compare and exchange. Compare OLD with MEM, if identical, 60 * store NEW in MEM. Return the initial value in MEM. Success is 61 * indicated by comparing RETURN with OLD. 62 */ 63 64 /* 65 * This function doesn't exist, so you'll get a linker error 66 * if something tries to do an invalid cmpxchg(). 67 */ 68 extern long ia64_cmpxchg_called_with_bad_pointer(void); 69 70 #define ia64_cmpxchg(sem, ptr, old, new, size) \ 71 ({ \ 72 __u64 _o_, _r_; \ 73 \ 74 switch (size) { \ 75 case 1: \ 76 _o_ = (__u8) (long) (old); \ 77 break; \ 78 case 2: \ 79 _o_ = (__u16) (long) (old); \ 80 break; \ 81 case 4: \ 82 _o_ = (__u32) (long) (old); \ 83 break; \ 84 case 8: \ 85 _o_ = (__u64) (long) (old); \ 86 break; \ 87 default: \ 88 break; \ 89 } \ 90 switch (size) { \ 91 case 1: \ 92 _r_ = ia64_cmpxchg1_##sem((__u8 *) ptr, new, _o_); \ 93 break; \ 94 \ 95 case 2: \ 96 _r_ = ia64_cmpxchg2_##sem((__u16 *) ptr, new, _o_); \ 97 break; \ 98 \ 99 case 4: \ 100 _r_ = ia64_cmpxchg4_##sem((__u32 *) ptr, new, _o_); \ 101 break; \ 102 \ 103 case 8: \ 104 _r_ = ia64_cmpxchg8_##sem((__u64 *) ptr, new, _o_); \ 105 break; \ 106 \ 107 default: \ 108 _r_ = ia64_cmpxchg_called_with_bad_pointer(); \ 109 break; \ 110 } \ 111 (__typeof__(old)) _r_; \ 112 }) 113 114 #define cmpxchg_acq(ptr, o, n) \ 115 ia64_cmpxchg(acq, (ptr), (o), (n), sizeof(*(ptr))) 116 #define cmpxchg_rel(ptr, o, n) \ 117 ia64_cmpxchg(rel, (ptr), (o), (n), sizeof(*(ptr))) 118 119 /* 120 * Worse still - early processor implementations actually just ignored 121 * the acquire/release and did a full fence all the time. Unfortunately 122 * this meant a lot of badly written code that used .acq when they really 123 * wanted .rel became legacy out in the wild - so when we made a cpu 124 * that strictly did the .acq or .rel ... all that code started breaking - so 125 * we had to back-pedal and keep the "legacy" behavior of a full fence :-( 126 */ 127 128 /* for compatibility with other platforms: */ 129 #define cmpxchg(ptr, o, n) cmpxchg_acq((ptr), (o), (n)) 130 #define cmpxchg64(ptr, o, n) cmpxchg_acq((ptr), (o), (n)) 131 132 #define cmpxchg_local cmpxchg 133 #define cmpxchg64_local cmpxchg64 134 135 #ifdef CONFIG_IA64_DEBUG_CMPXCHG 136 # define CMPXCHG_BUGCHECK_DECL int _cmpxchg_bugcheck_count = 128; 137 # define CMPXCHG_BUGCHECK(v) \ 138 do { \ 139 if (_cmpxchg_bugcheck_count-- <= 0) { \ 140 void *ip; \ 141 extern int printk(const char *fmt, ...); \ 142 ip = (void *) ia64_getreg(_IA64_REG_IP); \ 143 printk("CMPXCHG_BUGCHECK: stuck at %p on word %p\n", ip, (v));\ 144 break; \ 145 } \ 146 } while (0) 147 #else /* !CONFIG_IA64_DEBUG_CMPXCHG */ 148 # define CMPXCHG_BUGCHECK_DECL 149 # define CMPXCHG_BUGCHECK(v) 150 #endif /* !CONFIG_IA64_DEBUG_CMPXCHG */ 151 152 #endif /* !__ASSEMBLY__ */ 153 154 #endif /* _ASM_IA64_CMPXCHG_H */ 155