1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file contains routines that help determine which pointers are captured.
11 // A pointer value is captured if the function makes a copy of any part of the
12 // pointer that outlives the call. Not being captured means, more or less, that
13 // the pointer is only dereferenced and not stored in a global. Returning part
14 // of the pointer as the function return value may or may not count as capturing
15 // the pointer, depending on the context.
16 //
17 //===----------------------------------------------------------------------===//
18
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Analysis/CFG.h"
23 #include "llvm/Analysis/CaptureTracking.h"
24 #include "llvm/Analysis/OrderedBasicBlock.h"
25 #include "llvm/IR/CallSite.h"
26 #include "llvm/IR/Constants.h"
27 #include "llvm/IR/Dominators.h"
28 #include "llvm/IR/Instructions.h"
29 #include "llvm/IR/IntrinsicInst.h"
30
31 using namespace llvm;
32
~CaptureTracker()33 CaptureTracker::~CaptureTracker() {}
34
shouldExplore(const Use * U)35 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
36
37 namespace {
38 struct SimpleCaptureTracker : public CaptureTracker {
SimpleCaptureTracker__anon90316b0b0111::SimpleCaptureTracker39 explicit SimpleCaptureTracker(bool ReturnCaptures)
40 : ReturnCaptures(ReturnCaptures), Captured(false) {}
41
tooManyUses__anon90316b0b0111::SimpleCaptureTracker42 void tooManyUses() override { Captured = true; }
43
captured__anon90316b0b0111::SimpleCaptureTracker44 bool captured(const Use *U) override {
45 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
46 return false;
47
48 Captured = true;
49 return true;
50 }
51
52 bool ReturnCaptures;
53
54 bool Captured;
55 };
56
57 /// Only find pointer captures which happen before the given instruction. Uses
58 /// the dominator tree to determine whether one instruction is before another.
59 /// Only support the case where the Value is defined in the same basic block
60 /// as the given instruction and the use.
61 struct CapturesBefore : public CaptureTracker {
62
CapturesBefore__anon90316b0b0111::CapturesBefore63 CapturesBefore(bool ReturnCaptures, const Instruction *I, DominatorTree *DT,
64 bool IncludeI, OrderedBasicBlock *IC)
65 : OrderedBB(IC), BeforeHere(I), DT(DT),
66 ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
67
tooManyUses__anon90316b0b0111::CapturesBefore68 void tooManyUses() override { Captured = true; }
69
isSafeToPrune__anon90316b0b0111::CapturesBefore70 bool isSafeToPrune(Instruction *I) {
71 BasicBlock *BB = I->getParent();
72 // We explore this usage only if the usage can reach "BeforeHere".
73 // If use is not reachable from entry, there is no need to explore.
74 if (BeforeHere != I && !DT->isReachableFromEntry(BB))
75 return true;
76
77 // Compute the case where both instructions are inside the same basic
78 // block. Since instructions in the same BB as BeforeHere are numbered in
79 // 'OrderedBB', avoid using 'dominates' and 'isPotentiallyReachable'
80 // which are very expensive for large basic blocks.
81 if (BB == BeforeHere->getParent()) {
82 // 'I' dominates 'BeforeHere' => not safe to prune.
83 //
84 // The value defined by an invoke dominates an instruction only
85 // if it dominates every instruction in UseBB. A PHI is dominated only
86 // if the instruction dominates every possible use in the UseBB. Since
87 // UseBB == BB, avoid pruning.
88 if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere)
89 return false;
90 if (!OrderedBB->dominates(BeforeHere, I))
91 return false;
92
93 // 'BeforeHere' comes before 'I', it's safe to prune if we also
94 // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
95 // by its successors, i.e, prune if:
96 //
97 // (1) BB is an entry block or have no sucessors.
98 // (2) There's no path coming back through BB sucessors.
99 if (BB == &BB->getParent()->getEntryBlock() ||
100 !BB->getTerminator()->getNumSuccessors())
101 return true;
102
103 SmallVector<BasicBlock*, 32> Worklist;
104 Worklist.append(succ_begin(BB), succ_end(BB));
105 return !isPotentiallyReachableFromMany(Worklist, BB, DT);
106 }
107
108 // If the value is defined in the same basic block as use and BeforeHere,
109 // there is no need to explore the use if BeforeHere dominates use.
110 // Check whether there is a path from I to BeforeHere.
111 if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
112 !isPotentiallyReachable(I, BeforeHere, DT))
113 return true;
114
115 return false;
116 }
117
shouldExplore__anon90316b0b0111::CapturesBefore118 bool shouldExplore(const Use *U) override {
119 Instruction *I = cast<Instruction>(U->getUser());
120
121 if (BeforeHere == I && !IncludeI)
122 return false;
123
124 if (isSafeToPrune(I))
125 return false;
126
127 return true;
128 }
129
captured__anon90316b0b0111::CapturesBefore130 bool captured(const Use *U) override {
131 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
132 return false;
133
134 if (!shouldExplore(U))
135 return false;
136
137 Captured = true;
138 return true;
139 }
140
141 OrderedBasicBlock *OrderedBB;
142 const Instruction *BeforeHere;
143 DominatorTree *DT;
144
145 bool ReturnCaptures;
146 bool IncludeI;
147
148 bool Captured;
149 };
150 }
151
152 /// PointerMayBeCaptured - Return true if this pointer value may be captured
153 /// by the enclosing function (which is required to exist). This routine can
154 /// be expensive, so consider caching the results. The boolean ReturnCaptures
155 /// specifies whether returning the value (or part of it) from the function
156 /// counts as capturing it or not. The boolean StoreCaptures specified whether
157 /// storing the value (or part of it) into memory anywhere automatically
158 /// counts as capturing it or not.
PointerMayBeCaptured(const Value * V,bool ReturnCaptures,bool StoreCaptures)159 bool llvm::PointerMayBeCaptured(const Value *V,
160 bool ReturnCaptures, bool StoreCaptures) {
161 assert(!isa<GlobalValue>(V) &&
162 "It doesn't make sense to ask whether a global is captured.");
163
164 // TODO: If StoreCaptures is not true, we could do Fancy analysis
165 // to determine whether this store is not actually an escape point.
166 // In that case, BasicAliasAnalysis should be updated as well to
167 // take advantage of this.
168 (void)StoreCaptures;
169
170 SimpleCaptureTracker SCT(ReturnCaptures);
171 PointerMayBeCaptured(V, &SCT);
172 return SCT.Captured;
173 }
174
175 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
176 /// captured by the enclosing function (which is required to exist). If a
177 /// DominatorTree is provided, only captures which happen before the given
178 /// instruction are considered. This routine can be expensive, so consider
179 /// caching the results. The boolean ReturnCaptures specifies whether
180 /// returning the value (or part of it) from the function counts as capturing
181 /// it or not. The boolean StoreCaptures specified whether storing the value
182 /// (or part of it) into memory anywhere automatically counts as capturing it
183 /// or not. A ordered basic block \p OBB can be used in order to speed up
184 /// queries about relative order among instructions in the same basic block.
PointerMayBeCapturedBefore(const Value * V,bool ReturnCaptures,bool StoreCaptures,const Instruction * I,DominatorTree * DT,bool IncludeI,OrderedBasicBlock * OBB)185 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
186 bool StoreCaptures, const Instruction *I,
187 DominatorTree *DT, bool IncludeI,
188 OrderedBasicBlock *OBB) {
189 assert(!isa<GlobalValue>(V) &&
190 "It doesn't make sense to ask whether a global is captured.");
191 bool UseNewOBB = OBB == nullptr;
192
193 if (!DT)
194 return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures);
195 if (UseNewOBB)
196 OBB = new OrderedBasicBlock(I->getParent());
197
198 // TODO: See comment in PointerMayBeCaptured regarding what could be done
199 // with StoreCaptures.
200
201 CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, OBB);
202 PointerMayBeCaptured(V, &CB);
203
204 if (UseNewOBB)
205 delete OBB;
206 return CB.Captured;
207 }
208
209 /// TODO: Write a new FunctionPass AliasAnalysis so that it can keep
210 /// a cache. Then we can move the code from BasicAliasAnalysis into
211 /// that path, and remove this threshold.
212 static int const Threshold = 20;
213
PointerMayBeCaptured(const Value * V,CaptureTracker * Tracker)214 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
215 assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
216 SmallVector<const Use *, Threshold> Worklist;
217 SmallSet<const Use *, Threshold> Visited;
218 int Count = 0;
219
220 for (const Use &U : V->uses()) {
221 // If there are lots of uses, conservatively say that the value
222 // is captured to avoid taking too much compile time.
223 if (Count++ >= Threshold)
224 return Tracker->tooManyUses();
225
226 if (!Tracker->shouldExplore(&U)) continue;
227 Visited.insert(&U);
228 Worklist.push_back(&U);
229 }
230
231 while (!Worklist.empty()) {
232 const Use *U = Worklist.pop_back_val();
233 Instruction *I = cast<Instruction>(U->getUser());
234 V = U->get();
235
236 switch (I->getOpcode()) {
237 case Instruction::Call:
238 case Instruction::Invoke: {
239 CallSite CS(I);
240 // Not captured if the callee is readonly, doesn't return a copy through
241 // its return value and doesn't unwind (a readonly function can leak bits
242 // by throwing an exception or not depending on the input value).
243 if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
244 break;
245
246 // Volatile operations effectively capture the memory location that they
247 // load and store to.
248 if (auto *MI = dyn_cast<MemIntrinsic>(I))
249 if (MI->isVolatile())
250 if (Tracker->captured(U))
251 return;
252
253 // Not captured if only passed via 'nocapture' arguments. Note that
254 // calling a function pointer does not in itself cause the pointer to
255 // be captured. This is a subtle point considering that (for example)
256 // the callee might return its own address. It is analogous to saying
257 // that loading a value from a pointer does not cause the pointer to be
258 // captured, even though the loaded value might be the pointer itself
259 // (think of self-referential objects).
260 CallSite::data_operand_iterator B =
261 CS.data_operands_begin(), E = CS.data_operands_end();
262 for (CallSite::data_operand_iterator A = B; A != E; ++A)
263 if (A->get() == V && !CS.doesNotCapture(A - B))
264 // The parameter is not marked 'nocapture' - captured.
265 if (Tracker->captured(U))
266 return;
267 break;
268 }
269 case Instruction::Load:
270 // Volatile loads make the address observable.
271 if (cast<LoadInst>(I)->isVolatile())
272 if (Tracker->captured(U))
273 return;
274 break;
275 case Instruction::VAArg:
276 // "va-arg" from a pointer does not cause it to be captured.
277 break;
278 case Instruction::Store:
279 // Stored the pointer - conservatively assume it may be captured.
280 // Volatile stores make the address observable.
281 if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile())
282 if (Tracker->captured(U))
283 return;
284 break;
285 case Instruction::AtomicRMW: {
286 // atomicrmw conceptually includes both a load and store from
287 // the same location.
288 // As with a store, the location being accessed is not captured,
289 // but the value being stored is.
290 // Volatile stores make the address observable.
291 auto *ARMWI = cast<AtomicRMWInst>(I);
292 if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
293 if (Tracker->captured(U))
294 return;
295 break;
296 }
297 case Instruction::AtomicCmpXchg: {
298 // cmpxchg conceptually includes both a load and store from
299 // the same location.
300 // As with a store, the location being accessed is not captured,
301 // but the value being stored is.
302 // Volatile stores make the address observable.
303 auto *ACXI = cast<AtomicCmpXchgInst>(I);
304 if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V ||
305 ACXI->isVolatile())
306 if (Tracker->captured(U))
307 return;
308 break;
309 }
310 case Instruction::BitCast:
311 case Instruction::GetElementPtr:
312 case Instruction::PHI:
313 case Instruction::Select:
314 case Instruction::AddrSpaceCast:
315 // The original value is not captured via this if the new value isn't.
316 Count = 0;
317 for (Use &UU : I->uses()) {
318 // If there are lots of uses, conservatively say that the value
319 // is captured to avoid taking too much compile time.
320 if (Count++ >= Threshold)
321 return Tracker->tooManyUses();
322
323 if (Visited.insert(&UU).second)
324 if (Tracker->shouldExplore(&UU))
325 Worklist.push_back(&UU);
326 }
327 break;
328 case Instruction::ICmp: {
329 // Don't count comparisons of a no-alias return value against null as
330 // captures. This allows us to ignore comparisons of malloc results
331 // with null, for example.
332 if (ConstantPointerNull *CPN =
333 dyn_cast<ConstantPointerNull>(I->getOperand(1)))
334 if (CPN->getType()->getAddressSpace() == 0)
335 if (isNoAliasCall(V->stripPointerCasts()))
336 break;
337 // Comparison against value stored in global variable. Given the pointer
338 // does not escape, its value cannot be guessed and stored separately in a
339 // global variable.
340 unsigned OtherIndex = (I->getOperand(0) == V) ? 1 : 0;
341 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIndex));
342 if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
343 break;
344 // Otherwise, be conservative. There are crazy ways to capture pointers
345 // using comparisons.
346 if (Tracker->captured(U))
347 return;
348 break;
349 }
350 default:
351 // Something else - be conservative and say it is captured.
352 if (Tracker->captured(U))
353 return;
354 break;
355 }
356 }
357
358 // All uses examined.
359 }
360