• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef Py_INTERNAL_CRITICAL_SECTION_H
2 #define Py_INTERNAL_CRITICAL_SECTION_H
3 
4 #ifndef Py_BUILD_CORE
5 #  error "this header requires Py_BUILD_CORE define"
6 #endif
7 
8 #include "pycore_lock.h"        // PyMutex
9 #include "pycore_pystate.h"     // _PyThreadState_GET()
10 #include <stdint.h>
11 
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15 
16 // Tagged pointers to critical sections use the two least significant bits to
17 // mark if the pointed-to critical section is inactive and whether it is a
18 // PyCriticalSection2 object.
19 #define _Py_CRITICAL_SECTION_INACTIVE       0x1
20 #define _Py_CRITICAL_SECTION_TWO_MUTEXES    0x2
21 #define _Py_CRITICAL_SECTION_MASK           0x3
22 
23 #ifdef Py_GIL_DISABLED
24 # define Py_BEGIN_CRITICAL_SECTION_MUT(mutex)                           \
25     {                                                                   \
26         PyCriticalSection _py_cs;                                       \
27         _PyCriticalSection_BeginMutex(&_py_cs, mutex)
28 
29 # define Py_BEGIN_CRITICAL_SECTION2_MUT(m1, m2)                         \
30     {                                                                   \
31         PyCriticalSection2 _py_cs2;                                     \
32         _PyCriticalSection2_BeginMutex(&_py_cs2, m1, m2)
33 
34 // Specialized version of critical section locking to safely use
35 // PySequence_Fast APIs without the GIL. For performance, the argument *to*
36 // PySequence_Fast() is provided to the macro, not the *result* of
37 // PySequence_Fast(), which would require an extra test to determine if the
38 // lock must be acquired.
39 # define Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(original)              \
40     {                                                                   \
41         PyObject *_orig_seq = _PyObject_CAST(original);                 \
42         const bool _should_lock_cs = PyList_CheckExact(_orig_seq);      \
43         PyCriticalSection _cs;                                          \
44         if (_should_lock_cs) {                                          \
45             _PyCriticalSection_Begin(&_cs, _orig_seq);                  \
46         }
47 
48 # define Py_END_CRITICAL_SECTION_SEQUENCE_FAST()                        \
49         if (_should_lock_cs) {                                          \
50             PyCriticalSection_End(&_cs);                                \
51         }                                                               \
52     }
53 
54 // Asserts that the mutex is locked.  The mutex must be held by the
55 // top-most critical section otherwise there's the possibility
56 // that the mutex would be swalled out in some code paths.
57 #define _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(mutex) \
58     _PyCriticalSection_AssertHeld(mutex)
59 
60 // Asserts that the mutex for the given object is locked. The mutex must
61 // be held by the top-most critical section otherwise there's the
62 // possibility that the mutex would be swalled out in some code paths.
63 #ifdef Py_DEBUG
64 
65 # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op)                           \
66     if (Py_REFCNT(op) != 1) {                                                    \
67         _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&_PyObject_CAST(op)->ob_mutex); \
68     }
69 
70 #else   /* Py_DEBUG */
71 
72 # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op)
73 
74 #endif  /* Py_DEBUG */
75 
76 #else  /* !Py_GIL_DISABLED */
77 // The critical section APIs are no-ops with the GIL.
78 # define Py_BEGIN_CRITICAL_SECTION_MUT(mut) {
79 # define Py_BEGIN_CRITICAL_SECTION2_MUT(m1, m2) {
80 # define Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(original) {
81 # define Py_END_CRITICAL_SECTION_SEQUENCE_FAST() }
82 # define _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(mutex)
83 # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op)
84 #endif  /* !Py_GIL_DISABLED */
85 
86 // Resumes the top-most critical section.
87 PyAPI_FUNC(void)
88 _PyCriticalSection_Resume(PyThreadState *tstate);
89 
90 // (private) slow path for locking the mutex
91 PyAPI_FUNC(void)
92 _PyCriticalSection_BeginSlow(PyCriticalSection *c, PyMutex *m);
93 
94 PyAPI_FUNC(void)
95 _PyCriticalSection2_BeginSlow(PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2,
96                              int is_m1_locked);
97 
98 PyAPI_FUNC(void)
99 _PyCriticalSection_SuspendAll(PyThreadState *tstate);
100 
101 #ifdef Py_GIL_DISABLED
102 
103 static inline int
_PyCriticalSection_IsActive(uintptr_t tag)104 _PyCriticalSection_IsActive(uintptr_t tag)
105 {
106     return tag != 0 && (tag & _Py_CRITICAL_SECTION_INACTIVE) == 0;
107 }
108 
109 static inline void
_PyCriticalSection_BeginMutex(PyCriticalSection * c,PyMutex * m)110 _PyCriticalSection_BeginMutex(PyCriticalSection *c, PyMutex *m)
111 {
112     if (PyMutex_LockFast(&m->_bits)) {
113         PyThreadState *tstate = _PyThreadState_GET();
114         c->_cs_mutex = m;
115         c->_cs_prev = tstate->critical_section;
116         tstate->critical_section = (uintptr_t)c;
117     }
118     else {
119         _PyCriticalSection_BeginSlow(c, m);
120     }
121 }
122 
123 static inline void
_PyCriticalSection_Begin(PyCriticalSection * c,PyObject * op)124 _PyCriticalSection_Begin(PyCriticalSection *c, PyObject *op)
125 {
126     _PyCriticalSection_BeginMutex(c, &op->ob_mutex);
127 }
128 #define PyCriticalSection_Begin _PyCriticalSection_Begin
129 
130 // Removes the top-most critical section from the thread's stack of critical
131 // sections. If the new top-most critical section is inactive, then it is
132 // resumed.
133 static inline void
_PyCriticalSection_Pop(PyCriticalSection * c)134 _PyCriticalSection_Pop(PyCriticalSection *c)
135 {
136     PyThreadState *tstate = _PyThreadState_GET();
137     uintptr_t prev = c->_cs_prev;
138     tstate->critical_section = prev;
139 
140     if ((prev & _Py_CRITICAL_SECTION_INACTIVE) != 0) {
141         _PyCriticalSection_Resume(tstate);
142     }
143 }
144 
145 static inline void
_PyCriticalSection_End(PyCriticalSection * c)146 _PyCriticalSection_End(PyCriticalSection *c)
147 {
148     PyMutex_Unlock(c->_cs_mutex);
149     _PyCriticalSection_Pop(c);
150 }
151 #define PyCriticalSection_End _PyCriticalSection_End
152 
153 static inline void
_PyCriticalSection2_BeginMutex(PyCriticalSection2 * c,PyMutex * m1,PyMutex * m2)154 _PyCriticalSection2_BeginMutex(PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2)
155 {
156     if (m1 == m2) {
157         // If the two mutex arguments are the same, treat this as a critical
158         // section with a single mutex.
159         c->_cs_mutex2 = NULL;
160         _PyCriticalSection_BeginMutex(&c->_cs_base, m1);
161         return;
162     }
163 
164     if ((uintptr_t)m2 < (uintptr_t)m1) {
165         // Sort the mutexes so that the lower address is locked first.
166         // The exact order does not matter, but we need to acquire the mutexes
167         // in a consistent order to avoid lock ordering deadlocks.
168         PyMutex *tmp = m1;
169         m1 = m2;
170         m2 = tmp;
171     }
172 
173     if (PyMutex_LockFast(&m1->_bits)) {
174         if (PyMutex_LockFast(&m2->_bits)) {
175             PyThreadState *tstate = _PyThreadState_GET();
176             c->_cs_base._cs_mutex = m1;
177             c->_cs_mutex2 = m2;
178             c->_cs_base._cs_prev = tstate->critical_section;
179 
180             uintptr_t p = (uintptr_t)c | _Py_CRITICAL_SECTION_TWO_MUTEXES;
181             tstate->critical_section = p;
182         }
183         else {
184             _PyCriticalSection2_BeginSlow(c, m1, m2, 1);
185         }
186     }
187     else {
188         _PyCriticalSection2_BeginSlow(c, m1, m2, 0);
189     }
190 }
191 
192 static inline void
_PyCriticalSection2_Begin(PyCriticalSection2 * c,PyObject * a,PyObject * b)193 _PyCriticalSection2_Begin(PyCriticalSection2 *c, PyObject *a, PyObject *b)
194 {
195     _PyCriticalSection2_BeginMutex(c, &a->ob_mutex, &b->ob_mutex);
196 }
197 #define PyCriticalSection2_Begin _PyCriticalSection2_Begin
198 
199 static inline void
_PyCriticalSection2_End(PyCriticalSection2 * c)200 _PyCriticalSection2_End(PyCriticalSection2 *c)
201 {
202     if (c->_cs_mutex2) {
203         PyMutex_Unlock(c->_cs_mutex2);
204     }
205     PyMutex_Unlock(c->_cs_base._cs_mutex);
206     _PyCriticalSection_Pop(&c->_cs_base);
207 }
208 #define PyCriticalSection2_End _PyCriticalSection2_End
209 
210 static inline void
_PyCriticalSection_AssertHeld(PyMutex * mutex)211 _PyCriticalSection_AssertHeld(PyMutex *mutex)
212 {
213 #ifdef Py_DEBUG
214     PyThreadState *tstate = _PyThreadState_GET();
215     uintptr_t prev = tstate->critical_section;
216     if (prev & _Py_CRITICAL_SECTION_TWO_MUTEXES) {
217         PyCriticalSection2 *cs = (PyCriticalSection2 *)(prev & ~_Py_CRITICAL_SECTION_MASK);
218         assert(cs != NULL && (cs->_cs_base._cs_mutex == mutex || cs->_cs_mutex2 == mutex));
219     }
220     else {
221         PyCriticalSection *cs = (PyCriticalSection *)(tstate->critical_section & ~_Py_CRITICAL_SECTION_MASK);
222         assert(cs != NULL && cs->_cs_mutex == mutex);
223     }
224 
225 #endif
226 }
227 
228 #endif /* Py_GIL_DISABLED */
229 
230 #ifdef __cplusplus
231 }
232 #endif
233 #endif /* !Py_INTERNAL_CRITICAL_SECTION_H */
234