• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of ThreadSanitizer (TSan), a race detector.
11 //
12 //===----------------------------------------------------------------------===//
13 #ifndef TSAN_INTERFACE_ATOMIC_H
14 #define TSAN_INTERFACE_ATOMIC_H
15 
16 #ifdef __cplusplus
17 extern "C" {
18 #endif
19 
20 typedef char  __tsan_atomic8;
21 typedef short __tsan_atomic16;  // NOLINT
22 typedef int   __tsan_atomic32;
23 typedef long  __tsan_atomic64;  // NOLINT
24 
25 typedef enum {
26   __tsan_memory_order_relaxed = 1 << 0,
27   __tsan_memory_order_consume = 1 << 1,
28   __tsan_memory_order_acquire = 1 << 2,
29   __tsan_memory_order_release = 1 << 3,
30   __tsan_memory_order_acq_rel = 1 << 4,
31   __tsan_memory_order_seq_cst = 1 << 5,
32 } __tsan_memory_order;
33 
34 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a,
35     __tsan_memory_order mo);
36 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a,
37     __tsan_memory_order mo);
38 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a,
39     __tsan_memory_order mo);
40 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a,
41     __tsan_memory_order mo);
42 
43 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v,
44     __tsan_memory_order mo);
45 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v,
46     __tsan_memory_order mo);
47 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v,
48     __tsan_memory_order mo);
49 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v,
50     __tsan_memory_order mo);
51 
52 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a,
53     __tsan_atomic8 v, __tsan_memory_order mo);
54 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a,
55     __tsan_atomic16 v, __tsan_memory_order mo);
56 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a,
57     __tsan_atomic32 v, __tsan_memory_order mo);
58 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a,
59     __tsan_atomic64 v, __tsan_memory_order mo);
60 
61 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a,
62     __tsan_atomic8 v, __tsan_memory_order mo);
63 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a,
64     __tsan_atomic16 v, __tsan_memory_order mo);
65 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a,
66     __tsan_atomic32 v, __tsan_memory_order mo);
67 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a,
68     __tsan_atomic64 v, __tsan_memory_order mo);
69 
70 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a,
71     __tsan_atomic8 v, __tsan_memory_order mo);
72 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a,
73     __tsan_atomic16 v, __tsan_memory_order mo);
74 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a,
75     __tsan_atomic32 v, __tsan_memory_order mo);
76 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a,
77     __tsan_atomic64 v, __tsan_memory_order mo);
78 
79 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a,
80     __tsan_atomic8 v, __tsan_memory_order mo);
81 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a,
82     __tsan_atomic16 v, __tsan_memory_order mo);
83 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a,
84     __tsan_atomic32 v, __tsan_memory_order mo);
85 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a,
86     __tsan_atomic64 v, __tsan_memory_order mo);
87 
88 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a,
89     __tsan_atomic8 v, __tsan_memory_order mo);
90 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a,
91     __tsan_atomic16 v, __tsan_memory_order mo);
92 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a,
93     __tsan_atomic32 v, __tsan_memory_order mo);
94 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a,
95     __tsan_atomic64 v, __tsan_memory_order mo);
96 
97 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a,
98     __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo);
99 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a,
100     __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo);
101 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a,
102     __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo);
103 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a,
104     __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo);
105 
106 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a,
107     __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo);
108 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a,
109     __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo);
110 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a,
111     __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo);
112 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a,
113     __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo);
114 
115 void __tsan_atomic_thread_fence(__tsan_memory_order mo);
116 
117 #ifdef __cplusplus
118 }  // extern "C"
119 #endif
120 
121 #endif  // #ifndef TSAN_INTERFACE_ATOMIC_H
122