1 // SPDX-License-Identifier: Apache-2.0 OR MIT
2
3 /*
4 128-bit atomic implementation without inline assembly.
5
6 Adapted from https://github.com/rust-lang/rust/blob/1.80.0/library/core/src/sync/atomic.rs.
7
8 Note: This module is currently only enabled on Miri and ThreadSanitizer which
9 do not support inline assembly.
10
11 This uses `core::arch::x86_64::cmpxchg16b` on x86_64 and
12 `core::intrinsics::atomic_*` on aarch64, powerpc64, and s390x.
13
14 See README.md of this directory for performance comparison with the
15 implementation with inline assembly.
16
17 Note:
18 - This currently needs Rust 1.70 on x86_64, otherwise nightly compilers.
19 - On powerpc64, this requires LLVM 15+ and pwr8+ (quadword-atomics LLVM target feature):
20 https://github.com/llvm/llvm-project/commit/549e118e93c666914a1045fde38a2cac33e1e445
21 - On s390x, old LLVM (pre-18) generates libcalls for operations other than load/store/cmpxchg:
22 https://github.com/llvm/llvm-project/commit/c568927f3e2e7d9804ea74ecbf11c16c014ddcbc
23 - On aarch64 big-endian, LLVM (as of 17) generates broken code. (wrong result in stress test)
24 (on cfg(miri)/cfg(sanitize) it may be fine though)
25 - On powerpc64, LLVM (as of 17) doesn't support 128-bit atomic min/max:
26 https://github.com/llvm/llvm-project/issues/68390
27 - On powerpc64le, LLVM (as of 17) generates broken code. (wrong result from fetch_add)
28 */
29
30 include!("macros.rs");
31
32 #[allow(dead_code)] // we only use compare_exchange.
33 #[cfg(target_arch = "x86_64")]
34 #[cfg(not(target_feature = "cmpxchg16b"))]
35 #[path = "../fallback/outline_atomics.rs"]
36 mod fallback;
37
38 #[cfg(target_arch = "x86_64")]
39 #[cfg(not(target_feature = "cmpxchg16b"))]
40 #[path = "../detect/x86_64.rs"]
41 mod detect;
42
43 use core::sync::atomic::Ordering;
44 #[cfg(not(target_arch = "x86_64"))]
45 use core::{
46 intrinsics,
47 sync::atomic::Ordering::{AcqRel, Acquire, Relaxed, Release, SeqCst},
48 };
49
50 // https://github.com/rust-lang/rust/blob/1.80.0/library/core/src/sync/atomic.rs#L3267
51 #[cfg(target_arch = "x86_64")]
52 #[inline]
strongest_failure_ordering(order: Ordering) -> Ordering53 fn strongest_failure_ordering(order: Ordering) -> Ordering {
54 match order {
55 Ordering::Release | Ordering::Relaxed => Ordering::Relaxed,
56 Ordering::SeqCst => Ordering::SeqCst,
57 Ordering::Acquire | Ordering::AcqRel => Ordering::Acquire,
58 _ => unreachable!(),
59 }
60 }
61
62 #[inline]
63 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_load(src: *mut u128, order: Ordering) -> u12864 unsafe fn atomic_load(src: *mut u128, order: Ordering) -> u128 {
65 #[cfg(target_arch = "x86_64")]
66 // SAFETY: the caller must uphold the safety contract.
67 unsafe {
68 let fail_order = strongest_failure_ordering(order);
69 match atomic_compare_exchange(src, 0, 0, order, fail_order) {
70 Ok(v) | Err(v) => v,
71 }
72 }
73 #[cfg(not(target_arch = "x86_64"))]
74 // SAFETY: the caller must uphold the safety contract.
75 unsafe {
76 match order {
77 Acquire => intrinsics::atomic_load_acquire(src),
78 Relaxed => intrinsics::atomic_load_relaxed(src),
79 SeqCst => intrinsics::atomic_load_seqcst(src),
80 _ => unreachable!(),
81 }
82 }
83 }
84
85 #[inline]
86 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_store(dst: *mut u128, val: u128, order: Ordering)87 unsafe fn atomic_store(dst: *mut u128, val: u128, order: Ordering) {
88 #[cfg(target_arch = "x86_64")]
89 // SAFETY: the caller must uphold the safety contract.
90 unsafe {
91 atomic_swap(dst, val, order);
92 }
93 #[cfg(not(target_arch = "x86_64"))]
94 // SAFETY: the caller must uphold the safety contract.
95 unsafe {
96 match order {
97 Release => intrinsics::atomic_store_release(dst, val),
98 Relaxed => intrinsics::atomic_store_relaxed(dst, val),
99 SeqCst => intrinsics::atomic_store_seqcst(dst, val),
100 _ => unreachable!(),
101 }
102 }
103 }
104
105 #[inline]
106 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_compare_exchange( dst: *mut u128, old: u128, new: u128, success: Ordering, failure: Ordering, ) -> Result<u128, u128>107 unsafe fn atomic_compare_exchange(
108 dst: *mut u128,
109 old: u128,
110 new: u128,
111 success: Ordering,
112 failure: Ordering,
113 ) -> Result<u128, u128> {
114 #[cfg(target_arch = "x86_64")]
115 let (val, ok) = {
116 #[target_feature(enable = "cmpxchg16b")]
117 #[cfg_attr(target_feature = "cmpxchg16b", inline)]
118 #[cfg_attr(not(target_feature = "cmpxchg16b"), inline(never))]
119 unsafe fn cmpxchg16b(
120 dst: *mut u128,
121 old: u128,
122 new: u128,
123 success: Ordering,
124 failure: Ordering,
125 ) -> (u128, bool) {
126 debug_assert!(dst as usize % 16 == 0);
127 #[cfg(not(target_feature = "cmpxchg16b"))]
128 {
129 debug_assert!(detect::detect().has_cmpxchg16b());
130 }
131 // SAFETY: the caller must guarantee that `dst` is valid for both writes and
132 // reads, 16-byte aligned (required by CMPXCHG16B), that there are no
133 // concurrent non-atomic operations, and that the CPU supports CMPXCHG16B.
134 let prev = unsafe { core::arch::x86_64::cmpxchg16b(dst, old, new, success, failure) };
135 (prev, prev == old)
136 }
137 #[cfg(target_feature = "cmpxchg16b")]
138 // SAFETY: the caller must guarantee that `dst` is valid for both writes and
139 // reads, 16-byte aligned, that there are no concurrent non-atomic operations,
140 // and cfg guarantees that CMPXCHG16B is available at compile-time.
141 unsafe {
142 cmpxchg16b(dst, old, new, success, failure)
143 }
144 #[cfg(not(target_feature = "cmpxchg16b"))]
145 // SAFETY: the caller must guarantee that `dst` is valid for both writes and
146 // reads, 16-byte aligned, and that there are no different kinds of concurrent accesses.
147 unsafe {
148 ifunc!(unsafe fn(
149 dst: *mut u128, old: u128, new: u128, success: Ordering, failure: Ordering
150 ) -> (u128, bool) {
151 if detect::detect().has_cmpxchg16b() {
152 cmpxchg16b
153 } else {
154 fallback::atomic_compare_exchange
155 }
156 })
157 }
158 };
159 #[cfg(not(target_arch = "x86_64"))]
160 // SAFETY: the caller must uphold the safety contract.
161 let (val, ok) = unsafe {
162 match (success, failure) {
163 (Relaxed, Relaxed) => intrinsics::atomic_cxchg_relaxed_relaxed(dst, old, new),
164 (Relaxed, Acquire) => intrinsics::atomic_cxchg_relaxed_acquire(dst, old, new),
165 (Relaxed, SeqCst) => intrinsics::atomic_cxchg_relaxed_seqcst(dst, old, new),
166 (Acquire, Relaxed) => intrinsics::atomic_cxchg_acquire_relaxed(dst, old, new),
167 (Acquire, Acquire) => intrinsics::atomic_cxchg_acquire_acquire(dst, old, new),
168 (Acquire, SeqCst) => intrinsics::atomic_cxchg_acquire_seqcst(dst, old, new),
169 (Release, Relaxed) => intrinsics::atomic_cxchg_release_relaxed(dst, old, new),
170 (Release, Acquire) => intrinsics::atomic_cxchg_release_acquire(dst, old, new),
171 (Release, SeqCst) => intrinsics::atomic_cxchg_release_seqcst(dst, old, new),
172 (AcqRel, Relaxed) => intrinsics::atomic_cxchg_acqrel_relaxed(dst, old, new),
173 (AcqRel, Acquire) => intrinsics::atomic_cxchg_acqrel_acquire(dst, old, new),
174 (AcqRel, SeqCst) => intrinsics::atomic_cxchg_acqrel_seqcst(dst, old, new),
175 (SeqCst, Relaxed) => intrinsics::atomic_cxchg_seqcst_relaxed(dst, old, new),
176 (SeqCst, Acquire) => intrinsics::atomic_cxchg_seqcst_acquire(dst, old, new),
177 (SeqCst, SeqCst) => intrinsics::atomic_cxchg_seqcst_seqcst(dst, old, new),
178 _ => unreachable!(),
179 }
180 };
181 if ok {
182 Ok(val)
183 } else {
184 Err(val)
185 }
186 }
187
188 #[cfg(target_arch = "x86_64")]
189 use atomic_compare_exchange as atomic_compare_exchange_weak;
190 #[cfg(not(target_arch = "x86_64"))]
191 #[inline]
192 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_compare_exchange_weak( dst: *mut u128, old: u128, new: u128, success: Ordering, failure: Ordering, ) -> Result<u128, u128>193 unsafe fn atomic_compare_exchange_weak(
194 dst: *mut u128,
195 old: u128,
196 new: u128,
197 success: Ordering,
198 failure: Ordering,
199 ) -> Result<u128, u128> {
200 // SAFETY: the caller must uphold the safety contract.
201 let (val, ok) = unsafe {
202 match (success, failure) {
203 (Relaxed, Relaxed) => intrinsics::atomic_cxchgweak_relaxed_relaxed(dst, old, new),
204 (Relaxed, Acquire) => intrinsics::atomic_cxchgweak_relaxed_acquire(dst, old, new),
205 (Relaxed, SeqCst) => intrinsics::atomic_cxchgweak_relaxed_seqcst(dst, old, new),
206 (Acquire, Relaxed) => intrinsics::atomic_cxchgweak_acquire_relaxed(dst, old, new),
207 (Acquire, Acquire) => intrinsics::atomic_cxchgweak_acquire_acquire(dst, old, new),
208 (Acquire, SeqCst) => intrinsics::atomic_cxchgweak_acquire_seqcst(dst, old, new),
209 (Release, Relaxed) => intrinsics::atomic_cxchgweak_release_relaxed(dst, old, new),
210 (Release, Acquire) => intrinsics::atomic_cxchgweak_release_acquire(dst, old, new),
211 (Release, SeqCst) => intrinsics::atomic_cxchgweak_release_seqcst(dst, old, new),
212 (AcqRel, Relaxed) => intrinsics::atomic_cxchgweak_acqrel_relaxed(dst, old, new),
213 (AcqRel, Acquire) => intrinsics::atomic_cxchgweak_acqrel_acquire(dst, old, new),
214 (AcqRel, SeqCst) => intrinsics::atomic_cxchgweak_acqrel_seqcst(dst, old, new),
215 (SeqCst, Relaxed) => intrinsics::atomic_cxchgweak_seqcst_relaxed(dst, old, new),
216 (SeqCst, Acquire) => intrinsics::atomic_cxchgweak_seqcst_acquire(dst, old, new),
217 (SeqCst, SeqCst) => intrinsics::atomic_cxchgweak_seqcst_seqcst(dst, old, new),
218 _ => unreachable!(),
219 }
220 };
221 if ok {
222 Ok(val)
223 } else {
224 Err(val)
225 }
226 }
227
228 #[inline(always)]
229 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_update<F>(dst: *mut u128, order: Ordering, mut f: F) -> u128 where F: FnMut(u128) -> u128,230 unsafe fn atomic_update<F>(dst: *mut u128, order: Ordering, mut f: F) -> u128
231 where
232 F: FnMut(u128) -> u128,
233 {
234 // SAFETY: the caller must uphold the safety contract.
235 unsafe {
236 // This is a private function and all instances of `f` only operate on the value
237 // loaded, so there is no need to synchronize the first load/failed CAS.
238 let mut prev = atomic_load(dst, Ordering::Relaxed);
239 loop {
240 let next = f(prev);
241 match atomic_compare_exchange_weak(dst, prev, next, order, Ordering::Relaxed) {
242 Ok(x) => return x,
243 Err(x) => prev = x,
244 }
245 }
246 }
247 }
248
249 // On x86_64, we use core::arch::x86_64::cmpxchg16b instead of core::intrinsics.
250 // - On s390x, old LLVM (pre-18) generates libcalls for operations other than load/store/cmpxchg (see also module-level comment).
251 #[cfg(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18)))]
252 atomic_rmw_by_atomic_update!();
253 // On powerpc64, LLVM doesn't support 128-bit atomic min/max (see also module-level comment).
254 #[cfg(target_arch = "powerpc64")]
255 atomic_rmw_by_atomic_update!(cmp);
256
257 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
258 #[inline]
259 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_swap(dst: *mut u128, val: u128, order: Ordering) -> u128260 unsafe fn atomic_swap(dst: *mut u128, val: u128, order: Ordering) -> u128 {
261 // SAFETY: the caller must uphold the safety contract.
262 unsafe {
263 match order {
264 Acquire => intrinsics::atomic_xchg_acquire(dst, val),
265 Release => intrinsics::atomic_xchg_release(dst, val),
266 AcqRel => intrinsics::atomic_xchg_acqrel(dst, val),
267 Relaxed => intrinsics::atomic_xchg_relaxed(dst, val),
268 SeqCst => intrinsics::atomic_xchg_seqcst(dst, val),
269 _ => unreachable!(),
270 }
271 }
272 }
273
274 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
275 #[inline]
276 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_add(dst: *mut u128, val: u128, order: Ordering) -> u128277 unsafe fn atomic_add(dst: *mut u128, val: u128, order: Ordering) -> u128 {
278 // SAFETY: the caller must uphold the safety contract.
279 unsafe {
280 match order {
281 Acquire => intrinsics::atomic_xadd_acquire(dst, val),
282 Release => intrinsics::atomic_xadd_release(dst, val),
283 AcqRel => intrinsics::atomic_xadd_acqrel(dst, val),
284 Relaxed => intrinsics::atomic_xadd_relaxed(dst, val),
285 SeqCst => intrinsics::atomic_xadd_seqcst(dst, val),
286 _ => unreachable!(),
287 }
288 }
289 }
290
291 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
292 #[inline]
293 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_sub(dst: *mut u128, val: u128, order: Ordering) -> u128294 unsafe fn atomic_sub(dst: *mut u128, val: u128, order: Ordering) -> u128 {
295 // SAFETY: the caller must uphold the safety contract.
296 unsafe {
297 match order {
298 Acquire => intrinsics::atomic_xsub_acquire(dst, val),
299 Release => intrinsics::atomic_xsub_release(dst, val),
300 AcqRel => intrinsics::atomic_xsub_acqrel(dst, val),
301 Relaxed => intrinsics::atomic_xsub_relaxed(dst, val),
302 SeqCst => intrinsics::atomic_xsub_seqcst(dst, val),
303 _ => unreachable!(),
304 }
305 }
306 }
307
308 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
309 #[inline]
310 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_and(dst: *mut u128, val: u128, order: Ordering) -> u128311 unsafe fn atomic_and(dst: *mut u128, val: u128, order: Ordering) -> u128 {
312 // SAFETY: the caller must uphold the safety contract.
313 unsafe {
314 match order {
315 Acquire => intrinsics::atomic_and_acquire(dst, val),
316 Release => intrinsics::atomic_and_release(dst, val),
317 AcqRel => intrinsics::atomic_and_acqrel(dst, val),
318 Relaxed => intrinsics::atomic_and_relaxed(dst, val),
319 SeqCst => intrinsics::atomic_and_seqcst(dst, val),
320 _ => unreachable!(),
321 }
322 }
323 }
324
325 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
326 #[inline]
327 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_nand(dst: *mut u128, val: u128, order: Ordering) -> u128328 unsafe fn atomic_nand(dst: *mut u128, val: u128, order: Ordering) -> u128 {
329 // SAFETY: the caller must uphold the safety contract.
330 unsafe {
331 match order {
332 Acquire => intrinsics::atomic_nand_acquire(dst, val),
333 Release => intrinsics::atomic_nand_release(dst, val),
334 AcqRel => intrinsics::atomic_nand_acqrel(dst, val),
335 Relaxed => intrinsics::atomic_nand_relaxed(dst, val),
336 SeqCst => intrinsics::atomic_nand_seqcst(dst, val),
337 _ => unreachable!(),
338 }
339 }
340 }
341
342 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
343 #[inline]
344 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_or(dst: *mut u128, val: u128, order: Ordering) -> u128345 unsafe fn atomic_or(dst: *mut u128, val: u128, order: Ordering) -> u128 {
346 // SAFETY: the caller must uphold the safety contract.
347 unsafe {
348 match order {
349 Acquire => intrinsics::atomic_or_acquire(dst, val),
350 Release => intrinsics::atomic_or_release(dst, val),
351 AcqRel => intrinsics::atomic_or_acqrel(dst, val),
352 Relaxed => intrinsics::atomic_or_relaxed(dst, val),
353 SeqCst => intrinsics::atomic_or_seqcst(dst, val),
354 _ => unreachable!(),
355 }
356 }
357 }
358
359 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
360 #[inline]
361 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_xor(dst: *mut u128, val: u128, order: Ordering) -> u128362 unsafe fn atomic_xor(dst: *mut u128, val: u128, order: Ordering) -> u128 {
363 // SAFETY: the caller must uphold the safety contract.
364 unsafe {
365 match order {
366 Acquire => intrinsics::atomic_xor_acquire(dst, val),
367 Release => intrinsics::atomic_xor_release(dst, val),
368 AcqRel => intrinsics::atomic_xor_acqrel(dst, val),
369 Relaxed => intrinsics::atomic_xor_relaxed(dst, val),
370 SeqCst => intrinsics::atomic_xor_seqcst(dst, val),
371 _ => unreachable!(),
372 }
373 }
374 }
375
376 #[cfg(not(any(
377 target_arch = "x86_64",
378 target_arch = "powerpc64",
379 all(target_arch = "s390x", portable_atomic_pre_llvm_18),
380 )))]
381 #[inline]
382 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_max(dst: *mut u128, val: u128, order: Ordering) -> i128383 unsafe fn atomic_max(dst: *mut u128, val: u128, order: Ordering) -> i128 {
384 #[allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
385 // SAFETY: the caller must uphold the safety contract.
386 unsafe {
387 match order {
388 Acquire => intrinsics::atomic_max_acquire(dst.cast::<i128>(), val as i128),
389 Release => intrinsics::atomic_max_release(dst.cast::<i128>(), val as i128),
390 AcqRel => intrinsics::atomic_max_acqrel(dst.cast::<i128>(), val as i128),
391 Relaxed => intrinsics::atomic_max_relaxed(dst.cast::<i128>(), val as i128),
392 SeqCst => intrinsics::atomic_max_seqcst(dst.cast::<i128>(), val as i128),
393 _ => unreachable!(),
394 }
395 }
396 }
397
398 #[cfg(not(any(
399 target_arch = "x86_64",
400 target_arch = "powerpc64",
401 all(target_arch = "s390x", portable_atomic_pre_llvm_18),
402 )))]
403 #[inline]
404 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_min(dst: *mut u128, val: u128, order: Ordering) -> i128405 unsafe fn atomic_min(dst: *mut u128, val: u128, order: Ordering) -> i128 {
406 #[allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
407 // SAFETY: the caller must uphold the safety contract.
408 unsafe {
409 match order {
410 Acquire => intrinsics::atomic_min_acquire(dst.cast::<i128>(), val as i128),
411 Release => intrinsics::atomic_min_release(dst.cast::<i128>(), val as i128),
412 AcqRel => intrinsics::atomic_min_acqrel(dst.cast::<i128>(), val as i128),
413 Relaxed => intrinsics::atomic_min_relaxed(dst.cast::<i128>(), val as i128),
414 SeqCst => intrinsics::atomic_min_seqcst(dst.cast::<i128>(), val as i128),
415 _ => unreachable!(),
416 }
417 }
418 }
419
420 #[cfg(not(any(
421 target_arch = "x86_64",
422 target_arch = "powerpc64",
423 all(target_arch = "s390x", portable_atomic_pre_llvm_18),
424 )))]
425 #[inline]
426 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_umax(dst: *mut u128, val: u128, order: Ordering) -> u128427 unsafe fn atomic_umax(dst: *mut u128, val: u128, order: Ordering) -> u128 {
428 // SAFETY: the caller must uphold the safety contract.
429 unsafe {
430 match order {
431 Acquire => intrinsics::atomic_umax_acquire(dst, val),
432 Release => intrinsics::atomic_umax_release(dst, val),
433 AcqRel => intrinsics::atomic_umax_acqrel(dst, val),
434 Relaxed => intrinsics::atomic_umax_relaxed(dst, val),
435 SeqCst => intrinsics::atomic_umax_seqcst(dst, val),
436 _ => unreachable!(),
437 }
438 }
439 }
440
441 #[cfg(not(any(
442 target_arch = "x86_64",
443 target_arch = "powerpc64",
444 all(target_arch = "s390x", portable_atomic_pre_llvm_18),
445 )))]
446 #[inline]
447 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_umin(dst: *mut u128, val: u128, order: Ordering) -> u128448 unsafe fn atomic_umin(dst: *mut u128, val: u128, order: Ordering) -> u128 {
449 // SAFETY: the caller must uphold the safety contract.
450 unsafe {
451 match order {
452 Acquire => intrinsics::atomic_umin_acquire(dst, val),
453 Release => intrinsics::atomic_umin_release(dst, val),
454 AcqRel => intrinsics::atomic_umin_acqrel(dst, val),
455 Relaxed => intrinsics::atomic_umin_relaxed(dst, val),
456 SeqCst => intrinsics::atomic_umin_seqcst(dst, val),
457 _ => unreachable!(),
458 }
459 }
460 }
461
462 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
463 #[inline]
464 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_not(dst: *mut u128, order: Ordering) -> u128465 unsafe fn atomic_not(dst: *mut u128, order: Ordering) -> u128 {
466 // SAFETY: the caller must uphold the safety contract.
467 unsafe { atomic_xor(dst, !0, order) }
468 }
469
470 #[cfg(not(any(target_arch = "x86_64", all(target_arch = "s390x", portable_atomic_pre_llvm_18))))]
471 #[inline]
472 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
atomic_neg(dst: *mut u128, order: Ordering) -> u128473 unsafe fn atomic_neg(dst: *mut u128, order: Ordering) -> u128 {
474 // SAFETY: the caller must uphold the safety contract.
475 unsafe { atomic_update(dst, order, u128::wrapping_neg) }
476 }
477
478 #[cfg(not(target_arch = "x86_64"))]
479 #[inline]
is_lock_free() -> bool480 const fn is_lock_free() -> bool {
481 IS_ALWAYS_LOCK_FREE
482 }
483 #[cfg(not(target_arch = "x86_64"))]
484 const IS_ALWAYS_LOCK_FREE: bool = true;
485
486 #[cfg(target_arch = "x86_64")]
487 #[inline]
is_lock_free() -> bool488 fn is_lock_free() -> bool {
489 #[cfg(target_feature = "cmpxchg16b")]
490 {
491 // CMPXCHG16B is available at compile-time.
492 true
493 }
494 #[cfg(not(target_feature = "cmpxchg16b"))]
495 {
496 detect::detect().has_cmpxchg16b()
497 }
498 }
499 #[cfg(target_arch = "x86_64")]
500 const IS_ALWAYS_LOCK_FREE: bool = cfg!(target_feature = "cmpxchg16b");
501
502 atomic128!(AtomicI128, i128, atomic_max, atomic_min);
503 atomic128!(AtomicU128, u128, atomic_umax, atomic_umin);
504
505 #[cfg(test)]
506 mod tests {
507 use super::*;
508
509 test_atomic_int!(i128);
510 test_atomic_int!(u128);
511
512 // load/store/swap implementation is not affected by signedness, so it is
513 // enough to test only unsigned types.
514 stress_test!(u128);
515 }
516