• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Atomic functions: similar to pycore_atomic.h, but don't need
2    to declare variables as atomic.
3 
4    Py_ssize_t type:
5 
6    * value = _Py_atomic_size_get(&var)
7    * _Py_atomic_size_set(&var, value)
8 
9    Use sequentially-consistent ordering (__ATOMIC_SEQ_CST memory order):
10    enforce total ordering with all other atomic functions.
11 */
12 #ifndef Py_ATOMIC_FUNC_H
13 #define Py_ATOMIC_FUNC_H
14 #ifdef __cplusplus
15 extern "C" {
16 #endif
17 
18 #ifndef Py_BUILD_CORE
19 #  error "this header requires Py_BUILD_CORE define"
20 #endif
21 
22 #if defined(_MSC_VER)
23 #  include <intrin.h>             // _InterlockedExchange()
24 #endif
25 
26 
27 // Use builtin atomic operations in GCC >= 4.7 and clang
28 #ifdef HAVE_BUILTIN_ATOMIC
29 
_Py_atomic_size_get(Py_ssize_t * var)30 static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
31 {
32     return __atomic_load_n(var, __ATOMIC_SEQ_CST);
33 }
34 
_Py_atomic_size_set(Py_ssize_t * var,Py_ssize_t value)35 static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
36 {
37     __atomic_store_n(var, value, __ATOMIC_SEQ_CST);
38 }
39 
40 #elif defined(_MSC_VER)
41 
42 static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
43 {
44 #if SIZEOF_VOID_P == 8
45     Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var));
46     volatile __int64 *volatile_var = (volatile __int64 *)var;
47     __int64 old;
48     do {
49         old = *volatile_var;
50     } while(_InterlockedCompareExchange64(volatile_var, old, old) != old);
51 #else
52     Py_BUILD_ASSERT(sizeof(long) == sizeof(*var));
53     volatile long *volatile_var = (volatile long *)var;
54     long old;
55     do {
56         old = *volatile_var;
57     } while(_InterlockedCompareExchange(volatile_var, old, old) != old);
58 #endif
59     return old;
60 }
61 
62 static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
63 {
64 #if SIZEOF_VOID_P == 8
65     Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var));
66     volatile __int64 *volatile_var = (volatile __int64 *)var;
67     _InterlockedExchange64(volatile_var, value);
68 #else
69     Py_BUILD_ASSERT(sizeof(long) == sizeof(*var));
70     volatile long *volatile_var = (volatile long *)var;
71     _InterlockedExchange(volatile_var, value);
72 #endif
73 }
74 
75 #else
76 // Fallback implementation using volatile
77 
78 static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
79 {
80     volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var;
81     return *volatile_var;
82 }
83 
84 static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
85 {
86     volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var;
87     *volatile_var = value;
88 }
89 #endif
90 
91 #ifdef __cplusplus
92 }
93 #endif
94 #endif  /* Py_ATOMIC_FUNC_H */
95