• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2008 Google Inc. All rights reserved.
2 //
3 // Redistribution and use in source and binary forms, with or without
4 // modification, are permitted provided that the following conditions are
5 // met:
6 //
7 //    * Redistributions of source code must retain the above copyright
8 // notice, this list of conditions and the following disclaimer.
9 //    * Redistributions in binary form must reproduce the above
10 // copyright notice, this list of conditions and the following disclaimer
11 // in the documentation and/or other materials provided with the
12 // distribution.
13 //    * Neither the name of Google Inc. nor the name Chromium Embedded
14 // Framework nor the names of its contributors may be used to endorse
15 // or promote products derived from this software without specific prior
16 // written permission.
17 //
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 
30 // Do not include this header file directly. Use base/cef_atomicops.h
31 // instead.
32 
33 #ifndef CEF_INCLUDE_BASE_INTERNAL_CEF_ATOMICOPS_ARM64_MSVC_H_
34 #define CEF_INCLUDE_BASE_INTERNAL_CEF_ATOMICOPS_ARM64_MSVC_H_
35 
36 #include <windows.h>
37 
38 #include <intrin.h>
39 
40 #include "include/base/cef_macros.h"
41 
42 namespace base {
43 namespace subtle {
44 
NoBarrier_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)45 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
46                                          Atomic32 old_value,
47                                          Atomic32 new_value) {
48   LONG result = _InterlockedCompareExchange(
49       reinterpret_cast<volatile LONG*>(ptr), static_cast<LONG>(new_value),
50       static_cast<LONG>(old_value));
51   return static_cast<Atomic32>(result);
52 }
53 
NoBarrier_AtomicExchange(volatile Atomic32 * ptr,Atomic32 new_value)54 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
55                                          Atomic32 new_value) {
56   LONG result = _InterlockedExchange(reinterpret_cast<volatile LONG*>(ptr),
57                                      static_cast<LONG>(new_value));
58   return static_cast<Atomic32>(result);
59 }
60 
Barrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)61 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
62                                         Atomic32 increment) {
63   return _InterlockedExchangeAdd(reinterpret_cast<volatile LONG*>(ptr),
64                                  static_cast<LONG>(increment)) +
65          increment;
66 }
67 
NoBarrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)68 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
69                                           Atomic32 increment) {
70   return Barrier_AtomicIncrement(ptr, increment);
71 }
72 
73 #if !(defined(_MSC_VER) && _MSC_VER >= 1400)
74 #error "We require at least vs2005 for MemoryBarrier"
75 #endif
76 
Acquire_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)77 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
78                                        Atomic32 old_value,
79                                        Atomic32 new_value) {
80   return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
81 }
82 
Release_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)83 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
84                                        Atomic32 old_value,
85                                        Atomic32 new_value) {
86   return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
87 }
88 
NoBarrier_Store(volatile Atomic32 * ptr,Atomic32 value)89 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
90   *ptr = value;
91 }
92 
Acquire_Store(volatile Atomic32 * ptr,Atomic32 value)93 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
94   NoBarrier_AtomicExchange(ptr, value);
95   // acts as a barrier in this implementation
96 }
97 
Release_Store(volatile Atomic32 * ptr,Atomic32 value)98 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
99   *ptr = value;
100   // See comments in Atomic64 version of Release_Store() below.
101 }
102 
NoBarrier_Load(volatile const Atomic32 * ptr)103 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
104   return *ptr;
105 }
106 
Acquire_Load(volatile const Atomic32 * ptr)107 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
108   Atomic32 value = *ptr;
109   return value;
110 }
111 
Release_Load(volatile const Atomic32 * ptr)112 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
113   MemoryBarrier();
114   return *ptr;
115 }
116 
117 #if defined(_WIN64)
118 
119 // 64-bit low-level operations on 64-bit platform.
120 
121 COMPILE_ASSERT(sizeof(Atomic64) == sizeof(PVOID), atomic_word_is_atomic);
122 
NoBarrier_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)123 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
124                                          Atomic64 old_value,
125                                          Atomic64 new_value) {
126   PVOID result = InterlockedCompareExchangePointer(
127       reinterpret_cast<volatile PVOID*>(ptr),
128       reinterpret_cast<PVOID>(new_value), reinterpret_cast<PVOID>(old_value));
129   return reinterpret_cast<Atomic64>(result);
130 }
131 
NoBarrier_AtomicExchange(volatile Atomic64 * ptr,Atomic64 new_value)132 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
133                                          Atomic64 new_value) {
134   PVOID result =
135       InterlockedExchangePointer(reinterpret_cast<volatile PVOID*>(ptr),
136                                  reinterpret_cast<PVOID>(new_value));
137   return reinterpret_cast<Atomic64>(result);
138 }
139 
Barrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)140 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
141                                         Atomic64 increment) {
142   return InterlockedExchangeAdd64(reinterpret_cast<volatile LONGLONG*>(ptr),
143                                   static_cast<LONGLONG>(increment)) +
144          increment;
145 }
146 
NoBarrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)147 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
148                                           Atomic64 increment) {
149   return Barrier_AtomicIncrement(ptr, increment);
150 }
151 
NoBarrier_Store(volatile Atomic64 * ptr,Atomic64 value)152 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
153   *ptr = value;
154 }
155 
Acquire_Store(volatile Atomic64 * ptr,Atomic64 value)156 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
157   NoBarrier_AtomicExchange(ptr, value);
158   // acts as a barrier in this implementation
159 }
160 
Release_Store(volatile Atomic64 * ptr,Atomic64 value)161 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
162   *ptr = value;
163 }
164 
NoBarrier_Load(volatile const Atomic64 * ptr)165 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
166   return *ptr;
167 }
168 
Acquire_Load(volatile const Atomic64 * ptr)169 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
170   Atomic64 value = *ptr;
171   return value;
172 }
173 
Release_Load(volatile const Atomic64 * ptr)174 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
175   MemoryBarrier();
176   return *ptr;
177 }
178 
Acquire_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)179 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
180                                        Atomic64 old_value,
181                                        Atomic64 new_value) {
182   return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
183 }
184 
Release_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)185 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
186                                        Atomic64 old_value,
187                                        Atomic64 new_value) {
188   return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
189 }
190 
191 #endif  // defined(_WIN64)
192 
193 }  // namespace base::subtle
194 }  // namespace base
195 
196 #endif  // CEF_INCLUDE_BASE_INTERNAL_CEF_ATOMICOPS_ARM64_MSVC_H_
197 
198