• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // These classes support the generation of LLVM IR for cleanups.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef CLANG_CODEGEN_CGCLEANUP_H
15 #define CLANG_CODEGEN_CGCLEANUP_H
16 
17 /// EHScopeStack is defined in CodeGenFunction.h, but its
18 /// implementation is in this file and in CGCleanup.cpp.
19 #include "CodeGenFunction.h"
20 
21 namespace llvm {
22   class Value;
23   class BasicBlock;
24 }
25 
26 namespace clang {
27 namespace CodeGen {
28 
29 /// A protected scope for zero-cost EH handling.
30 class EHScope {
31   llvm::BasicBlock *CachedLandingPad;
32 
33   unsigned K : 2;
34 
35 protected:
36   enum { BitsRemaining = 30 };
37 
38 public:
39   enum Kind { Cleanup, Catch, Terminate, Filter };
40 
EHScope(Kind K)41   EHScope(Kind K) : CachedLandingPad(0), K(K) {}
42 
getKind()43   Kind getKind() const { return static_cast<Kind>(K); }
44 
getCachedLandingPad()45   llvm::BasicBlock *getCachedLandingPad() const {
46     return CachedLandingPad;
47   }
48 
setCachedLandingPad(llvm::BasicBlock * Block)49   void setCachedLandingPad(llvm::BasicBlock *Block) {
50     CachedLandingPad = Block;
51   }
52 };
53 
54 /// A scope which attempts to handle some, possibly all, types of
55 /// exceptions.
56 ///
57 /// Objective C @finally blocks are represented using a cleanup scope
58 /// after the catch scope.
59 class EHCatchScope : public EHScope {
60   unsigned NumHandlers : BitsRemaining;
61 
62   // In effect, we have a flexible array member
63   //   Handler Handlers[0];
64   // But that's only standard in C99, not C++, so we have to do
65   // annoying pointer arithmetic instead.
66 
67 public:
68   struct Handler {
69     /// A type info value, or null (C++ null, not an LLVM null pointer)
70     /// for a catch-all.
71     llvm::Value *Type;
72 
73     /// The catch handler for this type.
74     llvm::BasicBlock *Block;
75 
76     /// The unwind destination index for this handler.
77     unsigned Index;
78   };
79 
80 private:
81   friend class EHScopeStack;
82 
getHandlers()83   Handler *getHandlers() {
84     return reinterpret_cast<Handler*>(this+1);
85   }
86 
getHandlers()87   const Handler *getHandlers() const {
88     return reinterpret_cast<const Handler*>(this+1);
89   }
90 
91 public:
getSizeForNumHandlers(unsigned N)92   static size_t getSizeForNumHandlers(unsigned N) {
93     return sizeof(EHCatchScope) + N * sizeof(Handler);
94   }
95 
EHCatchScope(unsigned NumHandlers)96   EHCatchScope(unsigned NumHandlers)
97     : EHScope(Catch), NumHandlers(NumHandlers) {
98   }
99 
getNumHandlers()100   unsigned getNumHandlers() const {
101     return NumHandlers;
102   }
103 
setCatchAllHandler(unsigned I,llvm::BasicBlock * Block)104   void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
105     setHandler(I, /*catchall*/ 0, Block);
106   }
107 
setHandler(unsigned I,llvm::Value * Type,llvm::BasicBlock * Block)108   void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
109     assert(I < getNumHandlers());
110     getHandlers()[I].Type = Type;
111     getHandlers()[I].Block = Block;
112   }
113 
getHandler(unsigned I)114   const Handler &getHandler(unsigned I) const {
115     assert(I < getNumHandlers());
116     return getHandlers()[I];
117   }
118 
119   typedef const Handler *iterator;
begin()120   iterator begin() const { return getHandlers(); }
end()121   iterator end() const { return getHandlers() + getNumHandlers(); }
122 
classof(const EHScope * Scope)123   static bool classof(const EHScope *Scope) {
124     return Scope->getKind() == Catch;
125   }
126 };
127 
128 /// A cleanup scope which generates the cleanup blocks lazily.
129 class EHCleanupScope : public EHScope {
130   /// Whether this cleanup needs to be run along normal edges.
131   bool IsNormalCleanup : 1;
132 
133   /// Whether this cleanup needs to be run along exception edges.
134   bool IsEHCleanup : 1;
135 
136   /// Whether this cleanup is currently active.
137   bool IsActive : 1;
138 
139   /// Whether the normal cleanup should test the activation flag.
140   bool TestFlagInNormalCleanup : 1;
141 
142   /// Whether the EH cleanup should test the activation flag.
143   bool TestFlagInEHCleanup : 1;
144 
145   /// The amount of extra storage needed by the Cleanup.
146   /// Always a multiple of the scope-stack alignment.
147   unsigned CleanupSize : 12;
148 
149   /// The number of fixups required by enclosing scopes (not including
150   /// this one).  If this is the top cleanup scope, all the fixups
151   /// from this index onwards belong to this scope.
152   unsigned FixupDepth : BitsRemaining - 17; // currently 13
153 
154   /// The nearest normal cleanup scope enclosing this one.
155   EHScopeStack::stable_iterator EnclosingNormal;
156 
157   /// The nearest EH cleanup scope enclosing this one.
158   EHScopeStack::stable_iterator EnclosingEH;
159 
160   /// The dual entry/exit block along the normal edge.  This is lazily
161   /// created if needed before the cleanup is popped.
162   llvm::BasicBlock *NormalBlock;
163 
164   /// The dual entry/exit block along the EH edge.  This is lazily
165   /// created if needed before the cleanup is popped.
166   llvm::BasicBlock *EHBlock;
167 
168   /// An optional i1 variable indicating whether this cleanup has been
169   /// activated yet.
170   llvm::AllocaInst *ActiveFlag;
171 
172   /// Extra information required for cleanups that have resolved
173   /// branches through them.  This has to be allocated on the side
174   /// because everything on the cleanup stack has be trivially
175   /// movable.
176   struct ExtInfo {
177     /// The destinations of normal branch-afters and branch-throughs.
178     llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
179 
180     /// Normal branch-afters.
181     llvm::SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
182       BranchAfters;
183 
184     /// The destinations of EH branch-afters and branch-throughs.
185     /// TODO: optimize for the extremely common case of a single
186     /// branch-through.
187     llvm::SmallPtrSet<llvm::BasicBlock*, 4> EHBranches;
188 
189     /// EH branch-afters.
190     llvm::SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
191     EHBranchAfters;
192   };
193   mutable struct ExtInfo *ExtInfo;
194 
getExtInfo()195   struct ExtInfo &getExtInfo() {
196     if (!ExtInfo) ExtInfo = new struct ExtInfo();
197     return *ExtInfo;
198   }
199 
getExtInfo()200   const struct ExtInfo &getExtInfo() const {
201     if (!ExtInfo) ExtInfo = new struct ExtInfo();
202     return *ExtInfo;
203   }
204 
205 public:
206   /// Gets the size required for a lazy cleanup scope with the given
207   /// cleanup-data requirements.
getSizeForCleanupSize(size_t Size)208   static size_t getSizeForCleanupSize(size_t Size) {
209     return sizeof(EHCleanupScope) + Size;
210   }
211 
getAllocatedSize()212   size_t getAllocatedSize() const {
213     return sizeof(EHCleanupScope) + CleanupSize;
214   }
215 
EHCleanupScope(bool IsNormal,bool IsEH,bool IsActive,unsigned CleanupSize,unsigned FixupDepth,EHScopeStack::stable_iterator EnclosingNormal,EHScopeStack::stable_iterator EnclosingEH)216   EHCleanupScope(bool IsNormal, bool IsEH, bool IsActive,
217                  unsigned CleanupSize, unsigned FixupDepth,
218                  EHScopeStack::stable_iterator EnclosingNormal,
219                  EHScopeStack::stable_iterator EnclosingEH)
220     : EHScope(EHScope::Cleanup),
221       IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), IsActive(IsActive),
222       TestFlagInNormalCleanup(false), TestFlagInEHCleanup(false),
223       CleanupSize(CleanupSize), FixupDepth(FixupDepth),
224       EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
225       NormalBlock(0), EHBlock(0), ActiveFlag(0), ExtInfo(0)
226   {
227     assert(this->CleanupSize == CleanupSize && "cleanup size overflow");
228   }
229 
~EHCleanupScope()230   ~EHCleanupScope() {
231     delete ExtInfo;
232   }
233 
isNormalCleanup()234   bool isNormalCleanup() const { return IsNormalCleanup; }
getNormalBlock()235   llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
setNormalBlock(llvm::BasicBlock * BB)236   void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
237 
isEHCleanup()238   bool isEHCleanup() const { return IsEHCleanup; }
getEHBlock()239   llvm::BasicBlock *getEHBlock() const { return EHBlock; }
setEHBlock(llvm::BasicBlock * BB)240   void setEHBlock(llvm::BasicBlock *BB) { EHBlock = BB; }
241 
isActive()242   bool isActive() const { return IsActive; }
setActive(bool A)243   void setActive(bool A) { IsActive = A; }
244 
getActiveFlag()245   llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; }
setActiveFlag(llvm::AllocaInst * Var)246   void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; }
247 
setTestFlagInNormalCleanup()248   void setTestFlagInNormalCleanup() { TestFlagInNormalCleanup = true; }
shouldTestFlagInNormalCleanup()249   bool shouldTestFlagInNormalCleanup() const { return TestFlagInNormalCleanup; }
250 
setTestFlagInEHCleanup()251   void setTestFlagInEHCleanup() { TestFlagInEHCleanup = true; }
shouldTestFlagInEHCleanup()252   bool shouldTestFlagInEHCleanup() const { return TestFlagInEHCleanup; }
253 
getFixupDepth()254   unsigned getFixupDepth() const { return FixupDepth; }
getEnclosingNormalCleanup()255   EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
256     return EnclosingNormal;
257   }
getEnclosingEHCleanup()258   EHScopeStack::stable_iterator getEnclosingEHCleanup() const {
259     return EnclosingEH;
260   }
261 
getCleanupSize()262   size_t getCleanupSize() const { return CleanupSize; }
getCleanupBuffer()263   void *getCleanupBuffer() { return this + 1; }
264 
getCleanup()265   EHScopeStack::Cleanup *getCleanup() {
266     return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
267   }
268 
269   /// True if this cleanup scope has any branch-afters or branch-throughs.
hasBranches()270   bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
271 
272   /// Add a branch-after to this cleanup scope.  A branch-after is a
273   /// branch from a point protected by this (normal) cleanup to a
274   /// point in the normal cleanup scope immediately containing it.
275   /// For example,
276   ///   for (;;) { A a; break; }
277   /// contains a branch-after.
278   ///
279   /// Branch-afters each have their own destination out of the
280   /// cleanup, guaranteed distinct from anything else threaded through
281   /// it.  Therefore branch-afters usually force a switch after the
282   /// cleanup.
addBranchAfter(llvm::ConstantInt * Index,llvm::BasicBlock * Block)283   void addBranchAfter(llvm::ConstantInt *Index,
284                       llvm::BasicBlock *Block) {
285     struct ExtInfo &ExtInfo = getExtInfo();
286     if (ExtInfo.Branches.insert(Block))
287       ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
288   }
289 
290   /// Return the number of unique branch-afters on this scope.
getNumBranchAfters()291   unsigned getNumBranchAfters() const {
292     return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
293   }
294 
getBranchAfterBlock(unsigned I)295   llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
296     assert(I < getNumBranchAfters());
297     return ExtInfo->BranchAfters[I].first;
298   }
299 
getBranchAfterIndex(unsigned I)300   llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
301     assert(I < getNumBranchAfters());
302     return ExtInfo->BranchAfters[I].second;
303   }
304 
305   /// Add a branch-through to this cleanup scope.  A branch-through is
306   /// a branch from a scope protected by this (normal) cleanup to an
307   /// enclosing scope other than the immediately-enclosing normal
308   /// cleanup scope.
309   ///
310   /// In the following example, the branch through B's scope is a
311   /// branch-through, while the branch through A's scope is a
312   /// branch-after:
313   ///   for (;;) { A a; B b; break; }
314   ///
315   /// All branch-throughs have a common destination out of the
316   /// cleanup, one possibly shared with the fall-through.  Therefore
317   /// branch-throughs usually don't force a switch after the cleanup.
318   ///
319   /// \return true if the branch-through was new to this scope
addBranchThrough(llvm::BasicBlock * Block)320   bool addBranchThrough(llvm::BasicBlock *Block) {
321     return getExtInfo().Branches.insert(Block);
322   }
323 
324   /// Determines if this cleanup scope has any branch throughs.
hasBranchThroughs()325   bool hasBranchThroughs() const {
326     if (!ExtInfo) return false;
327     return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
328   }
329 
330   // Same stuff, only for EH branches instead of normal branches.
331   // It's quite possible that we could find a better representation
332   // for this.
333 
hasEHBranches()334   bool hasEHBranches() const { return ExtInfo && !ExtInfo->EHBranches.empty(); }
addEHBranchAfter(llvm::ConstantInt * Index,llvm::BasicBlock * Block)335   void addEHBranchAfter(llvm::ConstantInt *Index,
336                         llvm::BasicBlock *Block) {
337     struct ExtInfo &ExtInfo = getExtInfo();
338     if (ExtInfo.EHBranches.insert(Block))
339       ExtInfo.EHBranchAfters.push_back(std::make_pair(Block, Index));
340   }
341 
getNumEHBranchAfters()342   unsigned getNumEHBranchAfters() const {
343     return ExtInfo ? ExtInfo->EHBranchAfters.size() : 0;
344   }
345 
getEHBranchAfterBlock(unsigned I)346   llvm::BasicBlock *getEHBranchAfterBlock(unsigned I) const {
347     assert(I < getNumEHBranchAfters());
348     return ExtInfo->EHBranchAfters[I].first;
349   }
350 
getEHBranchAfterIndex(unsigned I)351   llvm::ConstantInt *getEHBranchAfterIndex(unsigned I) const {
352     assert(I < getNumEHBranchAfters());
353     return ExtInfo->EHBranchAfters[I].second;
354   }
355 
addEHBranchThrough(llvm::BasicBlock * Block)356   bool addEHBranchThrough(llvm::BasicBlock *Block) {
357     return getExtInfo().EHBranches.insert(Block);
358   }
359 
hasEHBranchThroughs()360   bool hasEHBranchThroughs() const {
361     if (!ExtInfo) return false;
362     return (ExtInfo->EHBranchAfters.size() != ExtInfo->EHBranches.size());
363   }
364 
classof(const EHScope * Scope)365   static bool classof(const EHScope *Scope) {
366     return (Scope->getKind() == Cleanup);
367   }
368 };
369 
370 /// An exceptions scope which filters exceptions thrown through it.
371 /// Only exceptions matching the filter types will be permitted to be
372 /// thrown.
373 ///
374 /// This is used to implement C++ exception specifications.
375 class EHFilterScope : public EHScope {
376   unsigned NumFilters : BitsRemaining;
377 
378   // Essentially ends in a flexible array member:
379   // llvm::Value *FilterTypes[0];
380 
getFilters()381   llvm::Value **getFilters() {
382     return reinterpret_cast<llvm::Value**>(this+1);
383   }
384 
getFilters()385   llvm::Value * const *getFilters() const {
386     return reinterpret_cast<llvm::Value* const *>(this+1);
387   }
388 
389 public:
EHFilterScope(unsigned NumFilters)390   EHFilterScope(unsigned NumFilters) :
391     EHScope(Filter), NumFilters(NumFilters) {}
392 
getSizeForNumFilters(unsigned NumFilters)393   static size_t getSizeForNumFilters(unsigned NumFilters) {
394     return sizeof(EHFilterScope) + NumFilters * sizeof(llvm::Value*);
395   }
396 
getNumFilters()397   unsigned getNumFilters() const { return NumFilters; }
398 
setFilter(unsigned I,llvm::Value * FilterValue)399   void setFilter(unsigned I, llvm::Value *FilterValue) {
400     assert(I < getNumFilters());
401     getFilters()[I] = FilterValue;
402   }
403 
getFilter(unsigned I)404   llvm::Value *getFilter(unsigned I) const {
405     assert(I < getNumFilters());
406     return getFilters()[I];
407   }
408 
classof(const EHScope * Scope)409   static bool classof(const EHScope *Scope) {
410     return Scope->getKind() == Filter;
411   }
412 };
413 
414 /// An exceptions scope which calls std::terminate if any exception
415 /// reaches it.
416 class EHTerminateScope : public EHScope {
417   unsigned DestIndex : BitsRemaining;
418 public:
EHTerminateScope(unsigned Index)419   EHTerminateScope(unsigned Index) : EHScope(Terminate), DestIndex(Index) {}
getSize()420   static size_t getSize() { return sizeof(EHTerminateScope); }
421 
getDestIndex()422   unsigned getDestIndex() const { return DestIndex; }
423 
classof(const EHScope * Scope)424   static bool classof(const EHScope *Scope) {
425     return Scope->getKind() == Terminate;
426   }
427 };
428 
429 /// A non-stable pointer into the scope stack.
430 class EHScopeStack::iterator {
431   char *Ptr;
432 
433   friend class EHScopeStack;
iterator(char * Ptr)434   explicit iterator(char *Ptr) : Ptr(Ptr) {}
435 
436 public:
iterator()437   iterator() : Ptr(0) {}
438 
get()439   EHScope *get() const {
440     return reinterpret_cast<EHScope*>(Ptr);
441   }
442 
443   EHScope *operator->() const { return get(); }
444   EHScope &operator*() const { return *get(); }
445 
446   iterator &operator++() {
447     switch (get()->getKind()) {
448     case EHScope::Catch:
449       Ptr += EHCatchScope::getSizeForNumHandlers(
450           static_cast<const EHCatchScope*>(get())->getNumHandlers());
451       break;
452 
453     case EHScope::Filter:
454       Ptr += EHFilterScope::getSizeForNumFilters(
455           static_cast<const EHFilterScope*>(get())->getNumFilters());
456       break;
457 
458     case EHScope::Cleanup:
459       Ptr += static_cast<const EHCleanupScope*>(get())
460         ->getAllocatedSize();
461       break;
462 
463     case EHScope::Terminate:
464       Ptr += EHTerminateScope::getSize();
465       break;
466     }
467 
468     return *this;
469   }
470 
next()471   iterator next() {
472     iterator copy = *this;
473     ++copy;
474     return copy;
475   }
476 
477   iterator operator++(int) {
478     iterator copy = *this;
479     operator++();
480     return copy;
481   }
482 
encloses(iterator other)483   bool encloses(iterator other) const { return Ptr >= other.Ptr; }
strictlyEncloses(iterator other)484   bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
485 
486   bool operator==(iterator other) const { return Ptr == other.Ptr; }
487   bool operator!=(iterator other) const { return Ptr != other.Ptr; }
488 };
489 
begin()490 inline EHScopeStack::iterator EHScopeStack::begin() const {
491   return iterator(StartOfData);
492 }
493 
end()494 inline EHScopeStack::iterator EHScopeStack::end() const {
495   return iterator(EndOfBuffer);
496 }
497 
popCatch()498 inline void EHScopeStack::popCatch() {
499   assert(!empty() && "popping exception stack when not empty");
500 
501   assert(isa<EHCatchScope>(*begin()));
502   StartOfData += EHCatchScope::getSizeForNumHandlers(
503                           cast<EHCatchScope>(*begin()).getNumHandlers());
504 
505   if (empty()) NextEHDestIndex = FirstEHDestIndex;
506 
507   assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
508   CatchDepth--;
509 }
510 
popTerminate()511 inline void EHScopeStack::popTerminate() {
512   assert(!empty() && "popping exception stack when not empty");
513 
514   assert(isa<EHTerminateScope>(*begin()));
515   StartOfData += EHTerminateScope::getSize();
516 
517   if (empty()) NextEHDestIndex = FirstEHDestIndex;
518 
519   assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
520   CatchDepth--;
521 }
522 
find(stable_iterator sp)523 inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
524   assert(sp.isValid() && "finding invalid savepoint");
525   assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
526   return iterator(EndOfBuffer - sp.Size);
527 }
528 
529 inline EHScopeStack::stable_iterator
stabilize(iterator ir)530 EHScopeStack::stabilize(iterator ir) const {
531   assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
532   return stable_iterator(EndOfBuffer - ir.Ptr);
533 }
534 
535 inline EHScopeStack::stable_iterator
getInnermostActiveNormalCleanup()536 EHScopeStack::getInnermostActiveNormalCleanup() const {
537   for (EHScopeStack::stable_iterator
538          I = getInnermostNormalCleanup(), E = stable_end(); I != E; ) {
539     EHCleanupScope &S = cast<EHCleanupScope>(*find(I));
540     if (S.isActive()) return I;
541     I = S.getEnclosingNormalCleanup();
542   }
543   return stable_end();
544 }
545 
546 inline EHScopeStack::stable_iterator
getInnermostActiveEHCleanup()547 EHScopeStack::getInnermostActiveEHCleanup() const {
548   for (EHScopeStack::stable_iterator
549          I = getInnermostEHCleanup(), E = stable_end(); I != E; ) {
550     EHCleanupScope &S = cast<EHCleanupScope>(*find(I));
551     if (S.isActive()) return I;
552     I = S.getEnclosingEHCleanup();
553   }
554   return stable_end();
555 }
556 
557 }
558 }
559 
560 #endif
561