• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Distributed under the Boost Software License, Version 1.0.
3  * (See accompanying file LICENSE_1_0.txt or copy at
4  * http://www.boost.org/LICENSE_1_0.txt)
5  *
6  * Copyright (c) 2009 Helge Bahmann
7  * Copyright (c) 2012 Tim Blechmann
8  * Copyright (c) 2014 Andrey Semashev
9  */
10 /*!
11  * \file   atomic/detail/ops_msvc_arm.hpp
12  *
13  * This header contains implementation of the \c operations template.
14  */
15 
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
18 
19 #include <intrin.h>
20 #include <cstddef>
21 #include <boost/memory_order.hpp>
22 #include <boost/atomic/detail/config.hpp>
23 #include <boost/atomic/detail/interlocked.hpp>
24 #include <boost/atomic/detail/storage_type.hpp>
25 #include <boost/atomic/detail/operations_fwd.hpp>
26 #include <boost/atomic/detail/type_traits/make_signed.hpp>
27 #include <boost/atomic/capabilities.hpp>
28 #include <boost/atomic/detail/ops_msvc_common.hpp>
29 
30 #ifdef BOOST_HAS_PRAGMA_ONCE
31 #pragma once
32 #endif
33 
34 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
35 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
36 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
37 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
38 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
39 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
40 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
41 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
42 
43 namespace boost {
44 namespace atomics {
45 namespace detail {
46 
47 // A note about memory_order_consume. Technically, this architecture allows to avoid
48 // unnecessary memory barrier after consume load since it supports data dependency ordering.
49 // However, some compiler optimizations may break a seemingly valid code relying on data
50 // dependency tracking by injecting bogus branches to aid out of order execution.
51 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
52 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
53 // For this reason we promote memory_order_consume to memory_order_acquire.
54 
55 struct msvc_arm_operations_base
56 {
57     static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
58     static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
59 
hardware_full_fenceboost::atomics::detail::msvc_arm_operations_base60     static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
61     {
62         __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
63     }
64 
fence_before_storeboost::atomics::detail::msvc_arm_operations_base65     static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
66     {
67         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
68 
69         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
70             hardware_full_fence();
71 
72         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
73     }
74 
fence_after_storeboost::atomics::detail::msvc_arm_operations_base75     static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
76     {
77         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
78 
79         if (order == memory_order_seq_cst)
80             hardware_full_fence();
81 
82         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
83     }
84 
fence_after_loadboost::atomics::detail::msvc_arm_operations_base85     static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
86     {
87         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
88 
89         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
90             hardware_full_fence();
91 
92         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
93     }
94 
cas_common_orderboost::atomics::detail::msvc_arm_operations_base95     static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
96     {
97         // Combine order flags together and promote memory_order_consume to memory_order_acquire
98         return static_cast< memory_order >(((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & ~static_cast< unsigned int >(memory_order_consume))
99             | (((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & static_cast< unsigned int >(memory_order_consume)) << 1u));
100     }
101 };
102 
103 template< std::size_t Size, bool Signed, typename Derived >
104 struct msvc_arm_operations :
105     public msvc_arm_operations_base
106 {
107     typedef typename make_storage_type< Size >::type storage_type;
108     typedef typename make_storage_type< Size >::aligned aligned_storage_type;
109 
110     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size;
111     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
112 
fetch_subboost::atomics::detail::msvc_arm_operations113     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
114     {
115         typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
116         return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
117     }
118 
compare_exchange_weakboost::atomics::detail::msvc_arm_operations119     static BOOST_FORCEINLINE bool compare_exchange_weak(
120         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
121     {
122         return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
123     }
124 
test_and_setboost::atomics::detail::msvc_arm_operations125     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
126     {
127         return !!Derived::exchange(storage, (storage_type)1, order);
128     }
129 
clearboost::atomics::detail::msvc_arm_operations130     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
131     {
132         Derived::store(storage, (storage_type)0, order);
133     }
134 };
135 
136 template< bool Signed >
137 struct operations< 1u, Signed > :
138     public msvc_arm_operations< 1u, Signed, operations< 1u, Signed > >
139 {
140     typedef msvc_arm_operations< 1u, Signed, operations< 1u, Signed > > base_type;
141     typedef typename base_type::storage_type storage_type;
142 
storeboost::atomics::detail::operations143     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
144     {
145         base_type::fence_before_store(order);
146         BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
147         base_type::fence_after_store(order);
148     }
149 
loadboost::atomics::detail::operations150     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
151     {
152         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
153         base_type::fence_after_load(order);
154         return v;
155     }
156 
fetch_addboost::atomics::detail::operations157     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
158     {
159         switch (order)
160         {
161         case memory_order_relaxed:
162             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
163             break;
164         case memory_order_consume:
165         case memory_order_acquire:
166             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
167             break;
168         case memory_order_release:
169             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
170             break;
171         case memory_order_acq_rel:
172         case memory_order_seq_cst:
173         default:
174             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
175             break;
176         }
177         return v;
178     }
179 
exchangeboost::atomics::detail::operations180     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
181     {
182         switch (order)
183         {
184         case memory_order_relaxed:
185             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
186             break;
187         case memory_order_consume:
188         case memory_order_acquire:
189             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
190             break;
191         case memory_order_release:
192             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
193             break;
194         case memory_order_acq_rel:
195         case memory_order_seq_cst:
196         default:
197             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
198             break;
199         }
200         return v;
201     }
202 
compare_exchange_strongboost::atomics::detail::operations203     static BOOST_FORCEINLINE bool compare_exchange_strong(
204         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
205     {
206         storage_type previous = expected, old_val;
207 
208         switch (cas_common_order(success_order, failure_order))
209         {
210         case memory_order_relaxed:
211             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
212             break;
213         case memory_order_consume:
214         case memory_order_acquire:
215             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
216             break;
217         case memory_order_release:
218             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
219             break;
220         case memory_order_acq_rel:
221         case memory_order_seq_cst:
222         default:
223             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
224             break;
225         }
226         expected = old_val;
227 
228         return (previous == old_val);
229     }
230 
fetch_andboost::atomics::detail::operations231     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
232     {
233         switch (order)
234         {
235         case memory_order_relaxed:
236             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
237             break;
238         case memory_order_consume:
239         case memory_order_acquire:
240             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
241             break;
242         case memory_order_release:
243             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
244             break;
245         case memory_order_acq_rel:
246         case memory_order_seq_cst:
247         default:
248             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
249             break;
250         }
251         return v;
252     }
253 
fetch_orboost::atomics::detail::operations254     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
255     {
256         switch (order)
257         {
258         case memory_order_relaxed:
259             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
260             break;
261         case memory_order_consume:
262         case memory_order_acquire:
263             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
264             break;
265         case memory_order_release:
266             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
267             break;
268         case memory_order_acq_rel:
269         case memory_order_seq_cst:
270         default:
271             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
272             break;
273         }
274         return v;
275     }
276 
fetch_xorboost::atomics::detail::operations277     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
278     {
279         switch (order)
280         {
281         case memory_order_relaxed:
282             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
283             break;
284         case memory_order_consume:
285         case memory_order_acquire:
286             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
287             break;
288         case memory_order_release:
289             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
290             break;
291         case memory_order_acq_rel:
292         case memory_order_seq_cst:
293         default:
294             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
295             break;
296         }
297         return v;
298     }
299 };
300 
301 template< bool Signed >
302 struct operations< 2u, Signed > :
303     public msvc_arm_operations< 2u, Signed, operations< 2u, Signed > >
304 {
305     typedef msvc_arm_operations< 2u, Signed, operations< 2u, Signed > > base_type;
306     typedef typename base_type::storage_type storage_type;
307 
storeboost::atomics::detail::operations308     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
309     {
310         base_type::fence_before_store(order);
311         BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
312         base_type::fence_after_store(order);
313     }
314 
loadboost::atomics::detail::operations315     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
316     {
317         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
318         base_type::fence_after_load(order);
319         return v;
320     }
321 
fetch_addboost::atomics::detail::operations322     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
323     {
324         switch (order)
325         {
326         case memory_order_relaxed:
327             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
328             break;
329         case memory_order_consume:
330         case memory_order_acquire:
331             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
332             break;
333         case memory_order_release:
334             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
335             break;
336         case memory_order_acq_rel:
337         case memory_order_seq_cst:
338         default:
339             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
340             break;
341         }
342         return v;
343     }
344 
exchangeboost::atomics::detail::operations345     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
346     {
347         switch (order)
348         {
349         case memory_order_relaxed:
350             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
351             break;
352         case memory_order_consume:
353         case memory_order_acquire:
354             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
355             break;
356         case memory_order_release:
357             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
358             break;
359         case memory_order_acq_rel:
360         case memory_order_seq_cst:
361         default:
362             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
363             break;
364         }
365         return v;
366     }
367 
compare_exchange_strongboost::atomics::detail::operations368     static BOOST_FORCEINLINE bool compare_exchange_strong(
369         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
370     {
371         storage_type previous = expected, old_val;
372 
373         switch (cas_common_order(success_order, failure_order))
374         {
375         case memory_order_relaxed:
376             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
377             break;
378         case memory_order_consume:
379         case memory_order_acquire:
380             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
381             break;
382         case memory_order_release:
383             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
384             break;
385         case memory_order_acq_rel:
386         case memory_order_seq_cst:
387         default:
388             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
389             break;
390         }
391         expected = old_val;
392 
393         return (previous == old_val);
394     }
395 
fetch_andboost::atomics::detail::operations396     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
397     {
398         switch (order)
399         {
400         case memory_order_relaxed:
401             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
402             break;
403         case memory_order_consume:
404         case memory_order_acquire:
405             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
406             break;
407         case memory_order_release:
408             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
409             break;
410         case memory_order_acq_rel:
411         case memory_order_seq_cst:
412         default:
413             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
414             break;
415         }
416         return v;
417     }
418 
fetch_orboost::atomics::detail::operations419     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
420     {
421         switch (order)
422         {
423         case memory_order_relaxed:
424             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
425             break;
426         case memory_order_consume:
427         case memory_order_acquire:
428             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
429             break;
430         case memory_order_release:
431             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
432             break;
433         case memory_order_acq_rel:
434         case memory_order_seq_cst:
435         default:
436             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
437             break;
438         }
439         return v;
440     }
441 
fetch_xorboost::atomics::detail::operations442     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
443     {
444         switch (order)
445         {
446         case memory_order_relaxed:
447             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
448             break;
449         case memory_order_consume:
450         case memory_order_acquire:
451             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
452             break;
453         case memory_order_release:
454             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
455             break;
456         case memory_order_acq_rel:
457         case memory_order_seq_cst:
458         default:
459             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
460             break;
461         }
462         return v;
463     }
464 };
465 
466 template< bool Signed >
467 struct operations< 4u, Signed > :
468     public msvc_arm_operations< 4u, Signed, operations< 4u, Signed > >
469 {
470     typedef msvc_arm_operations< 4u, Signed, operations< 4u, Signed > > base_type;
471     typedef typename base_type::storage_type storage_type;
472 
storeboost::atomics::detail::operations473     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
474     {
475         base_type::fence_before_store(order);
476         BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
477         base_type::fence_after_store(order);
478     }
479 
loadboost::atomics::detail::operations480     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
481     {
482         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
483         base_type::fence_after_load(order);
484         return v;
485     }
486 
fetch_addboost::atomics::detail::operations487     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
488     {
489         switch (order)
490         {
491         case memory_order_relaxed:
492             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
493             break;
494         case memory_order_consume:
495         case memory_order_acquire:
496             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
497             break;
498         case memory_order_release:
499             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
500             break;
501         case memory_order_acq_rel:
502         case memory_order_seq_cst:
503         default:
504             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
505             break;
506         }
507         return v;
508     }
509 
exchangeboost::atomics::detail::operations510     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
511     {
512         switch (order)
513         {
514         case memory_order_relaxed:
515             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
516             break;
517         case memory_order_consume:
518         case memory_order_acquire:
519             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
520             break;
521         case memory_order_release:
522             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
523             break;
524         case memory_order_acq_rel:
525         case memory_order_seq_cst:
526         default:
527             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
528             break;
529         }
530         return v;
531     }
532 
compare_exchange_strongboost::atomics::detail::operations533     static BOOST_FORCEINLINE bool compare_exchange_strong(
534         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
535     {
536         storage_type previous = expected, old_val;
537 
538         switch (cas_common_order(success_order, failure_order))
539         {
540         case memory_order_relaxed:
541             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
542             break;
543         case memory_order_consume:
544         case memory_order_acquire:
545             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
546             break;
547         case memory_order_release:
548             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
549             break;
550         case memory_order_acq_rel:
551         case memory_order_seq_cst:
552         default:
553             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
554             break;
555         }
556         expected = old_val;
557 
558         return (previous == old_val);
559     }
560 
fetch_andboost::atomics::detail::operations561     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
562     {
563         switch (order)
564         {
565         case memory_order_relaxed:
566             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
567             break;
568         case memory_order_consume:
569         case memory_order_acquire:
570             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
571             break;
572         case memory_order_release:
573             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
574             break;
575         case memory_order_acq_rel:
576         case memory_order_seq_cst:
577         default:
578             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
579             break;
580         }
581         return v;
582     }
583 
fetch_orboost::atomics::detail::operations584     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
585     {
586         switch (order)
587         {
588         case memory_order_relaxed:
589             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
590             break;
591         case memory_order_consume:
592         case memory_order_acquire:
593             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
594             break;
595         case memory_order_release:
596             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
597             break;
598         case memory_order_acq_rel:
599         case memory_order_seq_cst:
600         default:
601             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
602             break;
603         }
604         return v;
605     }
606 
fetch_xorboost::atomics::detail::operations607     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
608     {
609         switch (order)
610         {
611         case memory_order_relaxed:
612             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
613             break;
614         case memory_order_consume:
615         case memory_order_acquire:
616             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
617             break;
618         case memory_order_release:
619             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
620             break;
621         case memory_order_acq_rel:
622         case memory_order_seq_cst:
623         default:
624             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
625             break;
626         }
627         return v;
628     }
629 };
630 
631 template< bool Signed >
632 struct operations< 8u, Signed > :
633     public msvc_arm_operations< 8u, Signed, operations< 8u, Signed > >
634 {
635     typedef msvc_arm_operations< 8u, Signed, operations< 8u, Signed > > base_type;
636     typedef typename base_type::storage_type storage_type;
637 
storeboost::atomics::detail::operations638     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
639     {
640         base_type::fence_before_store(order);
641         BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
642         base_type::fence_after_store(order);
643     }
644 
loadboost::atomics::detail::operations645     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
646     {
647         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
648         base_type::fence_after_load(order);
649         return v;
650     }
651 
fetch_addboost::atomics::detail::operations652     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
653     {
654         switch (order)
655         {
656         case memory_order_relaxed:
657             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
658             break;
659         case memory_order_consume:
660         case memory_order_acquire:
661             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
662             break;
663         case memory_order_release:
664             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
665             break;
666         case memory_order_acq_rel:
667         case memory_order_seq_cst:
668         default:
669             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
670             break;
671         }
672         return v;
673     }
674 
exchangeboost::atomics::detail::operations675     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
676     {
677         switch (order)
678         {
679         case memory_order_relaxed:
680             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
681             break;
682         case memory_order_consume:
683         case memory_order_acquire:
684             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
685             break;
686         case memory_order_release:
687             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
688             break;
689         case memory_order_acq_rel:
690         case memory_order_seq_cst:
691         default:
692             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
693             break;
694         }
695         return v;
696     }
697 
compare_exchange_strongboost::atomics::detail::operations698     static BOOST_FORCEINLINE bool compare_exchange_strong(
699         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
700     {
701         storage_type previous = expected, old_val;
702 
703         switch (cas_common_order(success_order, failure_order))
704         {
705         case memory_order_relaxed:
706             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
707             break;
708         case memory_order_consume:
709         case memory_order_acquire:
710             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
711             break;
712         case memory_order_release:
713             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
714             break;
715         case memory_order_acq_rel:
716         case memory_order_seq_cst:
717         default:
718             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
719             break;
720         }
721         expected = old_val;
722 
723         return (previous == old_val);
724     }
725 
fetch_andboost::atomics::detail::operations726     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
727     {
728         switch (order)
729         {
730         case memory_order_relaxed:
731             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
732             break;
733         case memory_order_consume:
734         case memory_order_acquire:
735             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
736             break;
737         case memory_order_release:
738             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
739             break;
740         case memory_order_acq_rel:
741         case memory_order_seq_cst:
742         default:
743             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
744             break;
745         }
746         return v;
747     }
748 
fetch_orboost::atomics::detail::operations749     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
750     {
751         switch (order)
752         {
753         case memory_order_relaxed:
754             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
755             break;
756         case memory_order_consume:
757         case memory_order_acquire:
758             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
759             break;
760         case memory_order_release:
761             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
762             break;
763         case memory_order_acq_rel:
764         case memory_order_seq_cst:
765         default:
766             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
767             break;
768         }
769         return v;
770     }
771 
fetch_xorboost::atomics::detail::operations772     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
773     {
774         switch (order)
775         {
776         case memory_order_relaxed:
777             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
778             break;
779         case memory_order_consume:
780         case memory_order_acquire:
781             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
782             break;
783         case memory_order_release:
784             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
785             break;
786         case memory_order_acq_rel:
787         case memory_order_seq_cst:
788         default:
789             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
790             break;
791         }
792         return v;
793     }
794 };
795 
796 
thread_fence(memory_order order)797 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
798 {
799     BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
800     if (order != memory_order_relaxed)
801         msvc_arm_operations_base::hardware_full_fence();
802     BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
803 }
804 
signal_fence(memory_order order)805 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
806 {
807     if (order != memory_order_relaxed)
808         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
809 }
810 
811 } // namespace detail
812 } // namespace atomics
813 } // namespace boost
814 
815 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
816 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
817 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
818 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
819 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
820 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
821 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
822 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
823 
824 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
825