• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
2 #define BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
3 
4 //  interlocked_read_win32.hpp
5 //
6 //  (C) Copyright 2005-8 Anthony Williams
7 //  (C) Copyright 2012 Vicente J. Botet Escriba
8 //  (C) Copyright 2017 Andrey Semashev
9 //
10 //  Distributed under the Boost Software License, Version 1.0. (See
11 //  accompanying file LICENSE_1_0.txt or copy at
12 //  http://www.boost.org/LICENSE_1_0.txt)
13 
14 #include <boost/detail/interlocked.hpp>
15 #include <boost/thread/detail/config.hpp>
16 
17 #include <boost/config/abi_prefix.hpp>
18 
19 // Define compiler barriers
20 #if defined(__INTEL_COMPILER)
21 #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() __memory_barrier()
22 #elif defined(__clang__)
23 #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() __atomic_signal_fence(__ATOMIC_SEQ_CST)
24 #elif defined(_MSC_VER) && !defined(_WIN32_WCE)
25 extern "C" void _ReadWriteBarrier(void);
26 #pragma intrinsic(_ReadWriteBarrier)
27 #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() _ReadWriteBarrier()
28 #endif
29 
30 #ifndef BOOST_THREAD_DETAIL_COMPILER_BARRIER
31 #define BOOST_THREAD_DETAIL_COMPILER_BARRIER()
32 #endif
33 
34 #if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))
35 
36 // Since VS2005 and until VS2012 volatile reads always acquire and volatile writes are always release.
37 // But VS2012 adds a compiler switch that can change behavior to the standard. On x86 though
38 // the compiler generates a single instruction for the load/store, which is enough synchronization
39 // as far as uarch is concerned. To prevent compiler reordering code around the load/store we add
40 // compiler barriers.
41 
42 namespace boost
43 {
44     namespace detail
45     {
interlocked_read_acquire(long volatile * x)46         inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
47         {
48             long const res=*x;
49             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
50             return res;
51         }
interlocked_read_acquire(void * volatile * x)52         inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
53         {
54             void* const res=*x;
55             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
56             return res;
57         }
58 
interlocked_write_release(long volatile * x,long value)59         inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
60         {
61             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
62             *x=value;
63         }
interlocked_write_release(void * volatile * x,void * value)64         inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
65         {
66             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
67             *x=value;
68         }
69     }
70 }
71 
72 #elif defined(_MSC_VER) && _MSC_VER >= 1700 && (defined(_M_ARM) || defined(_M_ARM64))
73 
74 #include <intrin.h>
75 
76 namespace boost
77 {
78     namespace detail
79     {
interlocked_read_acquire(long volatile * x)80         inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
81         {
82             long const res=__iso_volatile_load32((const volatile __int32*)x);
83             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
84             __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
85             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
86             return res;
87         }
interlocked_read_acquire(void * volatile * x)88         inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
89         {
90             void* const res=
91 #if defined(_M_ARM64)
92                 (void*)__iso_volatile_load64((const volatile __int64*)x);
93 #else
94                 (void*)__iso_volatile_load32((const volatile __int32*)x);
95 #endif
96             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
97             __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
98             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
99             return res;
100         }
101 
interlocked_write_release(long volatile * x,long value)102         inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
103         {
104             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
105             __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
106             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
107             __iso_volatile_store32((volatile __int32*)x, (__int32)value);
108         }
interlocked_write_release(void * volatile * x,void * value)109         inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
110         {
111             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
112             __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
113             BOOST_THREAD_DETAIL_COMPILER_BARRIER();
114 #if defined(_M_ARM64)
115             __iso_volatile_store64((volatile __int64*)x, (__int64)value);
116 #else
117             __iso_volatile_store32((volatile __int32*)x, (__int32)value);
118 #endif
119         }
120     }
121 }
122 
123 #elif defined(__GNUC__) && (((__GNUC__ * 100 + __GNUC_MINOR__) >= 407) || (defined(__clang__) && (__clang_major__ * 100 + __clang_minor__) >= 302))
124 
125 namespace boost
126 {
127     namespace detail
128     {
interlocked_read_acquire(long volatile * x)129         inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
130         {
131             return __atomic_load_n((long*)x, __ATOMIC_ACQUIRE);
132         }
interlocked_read_acquire(void * volatile * x)133         inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
134         {
135             return __atomic_load_n((void**)x, __ATOMIC_ACQUIRE);
136         }
137 
interlocked_write_release(long volatile * x,long value)138         inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
139         {
140             __atomic_store_n((long*)x, value, __ATOMIC_RELEASE);
141         }
interlocked_write_release(void * volatile * x,void * value)142         inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
143         {
144             __atomic_store_n((void**)x, value, __ATOMIC_RELEASE);
145         }
146     }
147 }
148 
149 #elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
150 
151 namespace boost
152 {
153     namespace detail
154     {
interlocked_read_acquire(long volatile * x)155         inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
156         {
157             long res;
158             __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
159             return res;
160         }
interlocked_read_acquire(void * volatile * x)161         inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
162         {
163             void* res;
164 #if defined(__x86_64__)
165             __asm__ __volatile__ ("movq %1, %0" : "=r" (res) : "m" (*x) : "memory");
166 #else
167             __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
168 #endif
169             return res;
170         }
171 
interlocked_write_release(long volatile * x,long value)172         inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
173         {
174             __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
175         }
interlocked_write_release(void * volatile * x,void * value)176         inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
177         {
178 #if defined(__x86_64__)
179             __asm__ __volatile__ ("movq %1, %0" : "=m" (*x) : "r" (value) : "memory");
180 #else
181             __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
182 #endif
183         }
184     }
185 }
186 
187 #else
188 
189 namespace boost
190 {
191     namespace detail
192     {
interlocked_read_acquire(long volatile * x)193         inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
194         {
195             return BOOST_INTERLOCKED_COMPARE_EXCHANGE((long*)x,0,0);
196         }
interlocked_read_acquire(void * volatile * x)197         inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
198         {
199             return BOOST_INTERLOCKED_COMPARE_EXCHANGE_POINTER((void**)x,0,0);
200         }
interlocked_write_release(long volatile * x,long value)201         inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
202         {
203             BOOST_INTERLOCKED_EXCHANGE((long*)x,value);
204         }
interlocked_write_release(void * volatile * x,void * value)205         inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
206         {
207             BOOST_INTERLOCKED_EXCHANGE_POINTER((void**)x,value);
208         }
209     }
210 }
211 
212 #endif
213 
214 #include <boost/config/abi_suffix.hpp>
215 
216 #endif
217