1 //===-- tsan_sync.cc ------------------------------------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of ThreadSanitizer (TSan), a race detector.
11 //
12 //===----------------------------------------------------------------------===//
13 #include "sanitizer_common/sanitizer_placement_new.h"
14 #include "tsan_sync.h"
15 #include "tsan_rtl.h"
16 #include "tsan_mman.h"
17
18 namespace __tsan {
19
SyncVar(uptr addr)20 SyncVar::SyncVar(uptr addr)
21 : mtx(MutexTypeSyncVar, StatMtxSyncVar)
22 , addr(addr)
23 , owner_tid(kInvalidTid)
24 , last_lock()
25 , recursion()
26 , is_rw()
27 , is_recursive()
28 , is_broken()
29 , is_linker_init() {
30 }
31
Part()32 SyncTab::Part::Part()
33 : mtx(MutexTypeSyncTab, StatMtxSyncTab)
34 , val() {
35 }
36
SyncTab()37 SyncTab::SyncTab() {
38 }
39
~SyncTab()40 SyncTab::~SyncTab() {
41 for (int i = 0; i < kPartCount; i++) {
42 while (tab_[i].val) {
43 SyncVar *tmp = tab_[i].val;
44 tab_[i].val = tmp->next;
45 DestroyAndFree(tmp);
46 }
47 }
48 }
49
GetAndLock(ThreadState * thr,uptr pc,uptr addr,bool write_lock)50 SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc,
51 uptr addr, bool write_lock) {
52 #ifndef TSAN_GO
53 if (PrimaryAllocator::PointerIsMine((void*)addr)) {
54 MBlock *b = user_mblock(thr, (void*)addr);
55 Lock l(&b->mtx);
56 SyncVar *res = 0;
57 for (res = b->head; res; res = res->next) {
58 if (res->addr == addr)
59 break;
60 }
61 if (res == 0) {
62 StatInc(thr, StatSyncCreated);
63 void *mem = internal_alloc(MBlockSync, sizeof(SyncVar));
64 res = new(mem) SyncVar(addr);
65 res->creation_stack.ObtainCurrent(thr, pc);
66 res->next = b->head;
67 b->head = res;
68 }
69 if (write_lock)
70 res->mtx.Lock();
71 else
72 res->mtx.ReadLock();
73 return res;
74 }
75 #endif
76
77 Part *p = &tab_[PartIdx(addr)];
78 {
79 ReadLock l(&p->mtx);
80 for (SyncVar *res = p->val; res; res = res->next) {
81 if (res->addr == addr) {
82 if (write_lock)
83 res->mtx.Lock();
84 else
85 res->mtx.ReadLock();
86 return res;
87 }
88 }
89 }
90 {
91 Lock l(&p->mtx);
92 SyncVar *res = p->val;
93 for (; res; res = res->next) {
94 if (res->addr == addr)
95 break;
96 }
97 if (res == 0) {
98 StatInc(thr, StatSyncCreated);
99 void *mem = internal_alloc(MBlockSync, sizeof(SyncVar));
100 res = new(mem) SyncVar(addr);
101 #ifndef TSAN_GO
102 res->creation_stack.ObtainCurrent(thr, pc);
103 #endif
104 res->next = p->val;
105 p->val = res;
106 }
107 if (write_lock)
108 res->mtx.Lock();
109 else
110 res->mtx.ReadLock();
111 return res;
112 }
113 }
114
GetAndRemove(ThreadState * thr,uptr pc,uptr addr)115 SyncVar* SyncTab::GetAndRemove(ThreadState *thr, uptr pc, uptr addr) {
116 #ifndef TSAN_GO
117 if (PrimaryAllocator::PointerIsMine((void*)addr)) {
118 MBlock *b = user_mblock(thr, (void*)addr);
119 SyncVar *res = 0;
120 {
121 Lock l(&b->mtx);
122 SyncVar **prev = &b->head;
123 res = *prev;
124 while (res) {
125 if (res->addr == addr) {
126 if (res->is_linker_init)
127 return 0;
128 *prev = res->next;
129 break;
130 }
131 prev = &res->next;
132 res = *prev;
133 }
134 }
135 if (res) {
136 StatInc(thr, StatSyncDestroyed);
137 res->mtx.Lock();
138 res->mtx.Unlock();
139 }
140 return res;
141 }
142 #endif
143
144 Part *p = &tab_[PartIdx(addr)];
145 SyncVar *res = 0;
146 {
147 Lock l(&p->mtx);
148 SyncVar **prev = &p->val;
149 res = *prev;
150 while (res) {
151 if (res->addr == addr) {
152 if (res->is_linker_init)
153 return 0;
154 *prev = res->next;
155 break;
156 }
157 prev = &res->next;
158 res = *prev;
159 }
160 }
161 if (res) {
162 StatInc(thr, StatSyncDestroyed);
163 res->mtx.Lock();
164 res->mtx.Unlock();
165 }
166 return res;
167 }
168
GetMemoryConsumption()169 uptr SyncVar::GetMemoryConsumption() {
170 return sizeof(*this)
171 + clock.size() * sizeof(u64)
172 + read_clock.size() * sizeof(u64)
173 + creation_stack.Size() * sizeof(uptr);
174 }
175
GetMemoryConsumption(uptr * nsync)176 uptr SyncTab::GetMemoryConsumption(uptr *nsync) {
177 uptr mem = 0;
178 for (int i = 0; i < kPartCount; i++) {
179 Part *p = &tab_[i];
180 Lock l(&p->mtx);
181 for (SyncVar *s = p->val; s; s = s->next) {
182 *nsync += 1;
183 mem += s->GetMemoryConsumption();
184 }
185 }
186 return mem;
187 }
188
PartIdx(uptr addr)189 int SyncTab::PartIdx(uptr addr) {
190 return (addr >> 3) % kPartCount;
191 }
192
StackTrace()193 StackTrace::StackTrace()
194 : n_()
195 , s_()
196 , c_() {
197 }
198
StackTrace(uptr * buf,uptr cnt)199 StackTrace::StackTrace(uptr *buf, uptr cnt)
200 : n_()
201 , s_(buf)
202 , c_(cnt) {
203 CHECK_NE(buf, 0);
204 CHECK_NE(cnt, 0);
205 }
206
~StackTrace()207 StackTrace::~StackTrace() {
208 Reset();
209 }
210
Reset()211 void StackTrace::Reset() {
212 if (s_ && !c_) {
213 CHECK_NE(n_, 0);
214 internal_free(s_);
215 s_ = 0;
216 }
217 n_ = 0;
218 }
219
Init(const uptr * pcs,uptr cnt)220 void StackTrace::Init(const uptr *pcs, uptr cnt) {
221 Reset();
222 if (cnt == 0)
223 return;
224 if (c_) {
225 CHECK_NE(s_, 0);
226 CHECK_LE(cnt, c_);
227 } else {
228 s_ = (uptr*)internal_alloc(MBlockStackTrace, cnt * sizeof(s_[0]));
229 }
230 n_ = cnt;
231 internal_memcpy(s_, pcs, cnt * sizeof(s_[0]));
232 }
233
ObtainCurrent(ThreadState * thr,uptr toppc)234 void StackTrace::ObtainCurrent(ThreadState *thr, uptr toppc) {
235 Reset();
236 n_ = thr->shadow_stack_pos - thr->shadow_stack;
237 if (n_ + !!toppc == 0)
238 return;
239 uptr start = 0;
240 if (c_) {
241 CHECK_NE(s_, 0);
242 if (n_ + !!toppc > c_) {
243 start = n_ - c_ + !!toppc;
244 n_ = c_ - !!toppc;
245 }
246 } else {
247 s_ = (uptr*)internal_alloc(MBlockStackTrace,
248 (n_ + !!toppc) * sizeof(s_[0]));
249 }
250 for (uptr i = 0; i < n_; i++)
251 s_[i] = thr->shadow_stack[start + i];
252 if (toppc) {
253 s_[n_] = toppc;
254 n_++;
255 }
256 }
257
CopyFrom(const StackTrace & other)258 void StackTrace::CopyFrom(const StackTrace& other) {
259 Reset();
260 Init(other.Begin(), other.Size());
261 }
262
IsEmpty() const263 bool StackTrace::IsEmpty() const {
264 return n_ == 0;
265 }
266
Size() const267 uptr StackTrace::Size() const {
268 return n_;
269 }
270
Get(uptr i) const271 uptr StackTrace::Get(uptr i) const {
272 CHECK_LT(i, n_);
273 return s_[i];
274 }
275
Begin() const276 const uptr *StackTrace::Begin() const {
277 return s_;
278 }
279
280 } // namespace __tsan
281