• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===- subzero/src/IceGlobalContext.h - Global context defs -----*- C++ -*-===//
2 //
3 //                        The Subzero Code Generator
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 ///
10 /// \file
11 /// \brief Declares aspects of the compilation that persist across multiple
12 /// functions.
13 ///
14 //===----------------------------------------------------------------------===//
15 
16 #ifndef SUBZERO_SRC_ICEGLOBALCONTEXT_H
17 #define SUBZERO_SRC_ICEGLOBALCONTEXT_H
18 
19 #include "IceDefs.h"
20 #include "IceClFlags.h"
21 #include "IceInstrumentation.h"
22 #include "IceIntrinsics.h"
23 #include "IceRNG.h"
24 #include "IceStringPool.h"
25 #include "IceSwitchLowering.h"
26 #include "IceTargetLowering.def"
27 #include "IceThreading.h"
28 #include "IceTimerTree.h"
29 #include "IceTypes.h"
30 #include "IceUtils.h"
31 
32 #include <array>
33 #include <atomic>
34 #include <cassert>
35 #include <functional>
36 #include <memory>
37 #include <mutex>
38 #include <thread>
39 #include <type_traits>
40 #include <utility>
41 #include <vector>
42 
43 namespace Ice {
44 
45 class ConstantPool;
46 class EmitterWorkItem;
47 class FuncSigType;
48 class Instrumentation;
49 
50 // Runtime helper function IDs
51 
52 enum class RuntimeHelper {
53 #define X(Tag, Name) H_##Tag,
54   RUNTIME_HELPER_FUNCTIONS_TABLE
55 #undef X
56       H_Num
57 };
58 
59 /// OptWorkItem is a simple wrapper used to pass parse information on a function
60 /// block, to a translator thread.
61 class OptWorkItem {
62   OptWorkItem(const OptWorkItem &) = delete;
63   OptWorkItem &operator=(const OptWorkItem &) = delete;
64 
65 public:
66   // Get the Cfg for the funtion to translate.
67   virtual std::unique_ptr<Cfg> getParsedCfg() = 0;
68   virtual ~OptWorkItem() = default;
69 
70 protected:
71   OptWorkItem() = default;
72 };
73 
74 class GlobalContext {
75   GlobalContext() = delete;
76   GlobalContext(const GlobalContext &) = delete;
77   GlobalContext &operator=(const GlobalContext &) = delete;
78 
79   /// CodeStats collects rudimentary statistics during translation.
80   class CodeStats {
81     CodeStats(const CodeStats &) = delete;
82     CodeStats &operator=(const CodeStats &) = default;
83 #define CODESTATS_TABLE                                                        \
84   /* dump string, enum value */                                                \
85   X("Inst Count  ", InstCount)                                                 \
86   X("Regs Saved  ", RegsSaved)                                                 \
87   X("Frame Bytes ", FrameByte)                                                 \
88   X("Spills      ", NumSpills)                                                 \
89   X("Fills       ", NumFills)                                                  \
90   X("R/P Imms    ", NumRPImms)
91     //#define X(str, tag)
92 
93   public:
94     enum CSTag {
95 #define X(str, tag) CS_##tag,
96       CODESTATS_TABLE
97 #undef X
98           CS_NUM
99     };
CodeStats()100     CodeStats() { reset(); }
reset()101     void reset() { Stats.fill(0); }
102     void update(CSTag Tag, uint32_t Count = 1) {
103       assert(Tag < Stats.size());
104       Stats[Tag] += Count;
105     }
add(const CodeStats & Other)106     void add(const CodeStats &Other) {
107       for (uint32_t i = 0; i < Stats.size(); ++i)
108         Stats[i] += Other.Stats[i];
109     }
110     /// Dumps the stats for the given Cfg.  If Func==nullptr, it identifies it
111     /// as the "final" cumulative stats instead as a specific function's name.
112     void dump(const Cfg *Func, GlobalContext *Ctx);
113 
114   private:
115     std::array<uint32_t, CS_NUM> Stats;
116   };
117 
118   /// TimerList is a vector of TimerStack objects, with extra methods
119   /// to initialize and merge these vectors.
120   class TimerList : public std::vector<TimerStack> {
121     TimerList(const TimerList &) = delete;
122     TimerList &operator=(const TimerList &) = delete;
123 
124   public:
125     TimerList() = default;
126     /// initInto() initializes a target list of timers based on the
127     /// current list.  In particular, it creates the same number of
128     /// timers, in the same order, with the same names, but initially
129     /// empty of timing data.
initInto(TimerList & Dest)130     void initInto(TimerList &Dest) const {
131       if (!BuildDefs::timers())
132         return;
133       Dest.clear();
134       for (const TimerStack &Stack : *this) {
135         Dest.push_back(TimerStack(Stack.getName()));
136       }
137     }
mergeFrom(TimerList & Src)138     void mergeFrom(TimerList &Src) {
139       if (!BuildDefs::timers())
140         return;
141       assert(size() == Src.size());
142       size_type i = 0;
143       for (TimerStack &Stack : *this) {
144         assert(Stack.getName() == Src[i].getName());
145         Stack.mergeFrom(Src[i]);
146         ++i;
147       }
148     }
149   };
150 
151   /// ThreadContext contains thread-local data.  This data can be
152   /// combined/reduced as needed after all threads complete.
153   class ThreadContext {
154     ThreadContext(const ThreadContext &) = delete;
155     ThreadContext &operator=(const ThreadContext &) = delete;
156 
157   public:
158     ThreadContext() = default;
159     CodeStats StatsFunction;
160     CodeStats StatsCumulative;
161     TimerList Timers;
162   };
163 
164 public:
165   /// The dump stream is a log stream while emit is the stream code
166   /// is emitted to. The error stream is strictly for logging errors.
167   GlobalContext(Ostream *OsDump, Ostream *OsEmit, Ostream *OsError,
168                 ELFStreamer *ELFStreamer);
169   ~GlobalContext();
170 
171   void dumpStrings();
172   ///
173   /// The dump, error, and emit streams need to be used by only one
174   /// thread at a time.  This is done by exclusively reserving the
175   /// streams via lockStr() and unlockStr().  The OstreamLocker class
176   /// can be used to conveniently manage this.
177   ///
178   /// The model is that a thread grabs the stream lock, then does an
179   /// arbitrary amount of work during which far-away callees may grab
180   /// the stream and do something with it, and finally the thread
181   /// releases the stream lock.  This allows large chunks of output to
182   /// be dumped or emitted without risking interleaving from multiple
183   /// threads.
lockStr()184   void lockStr() { StrLock.lock(); }
unlockStr()185   void unlockStr() { StrLock.unlock(); }
getStrDump()186   Ostream &getStrDump() { return *StrDump; }
getStrError()187   Ostream &getStrError() { return *StrError; }
getStrEmit()188   Ostream &getStrEmit() { return *StrEmit; }
setStrEmit(Ostream & NewStrEmit)189   void setStrEmit(Ostream &NewStrEmit) { StrEmit = &NewStrEmit; }
190 
getErrorStatus()191   LockedPtr<ErrorCode> getErrorStatus() {
192     return LockedPtr<ErrorCode>(&ErrorStatus, &ErrorStatusLock);
193   }
194 
195   /// \name Manage Constants.
196   /// @{
197   // getConstant*() functions are not const because they might add something to
198   // the constant pool.
199   Constant *getConstantInt(Type Ty, int64_t Value);
getConstantInt1(int8_t ConstantInt1)200   Constant *getConstantInt1(int8_t ConstantInt1) {
201     ConstantInt1 &= INT8_C(1);
202     switch (ConstantInt1) {
203     case 0:
204       return getConstantZero(IceType_i1);
205     case 1:
206       return ConstantTrue;
207     default:
208       assert(false && "getConstantInt1 not on true/false");
209       return getConstantInt1Internal(ConstantInt1);
210     }
211   }
getConstantInt8(int8_t ConstantInt8)212   Constant *getConstantInt8(int8_t ConstantInt8) {
213     switch (ConstantInt8) {
214     case 0:
215       return getConstantZero(IceType_i8);
216     default:
217       return getConstantInt8Internal(ConstantInt8);
218     }
219   }
getConstantInt16(int16_t ConstantInt16)220   Constant *getConstantInt16(int16_t ConstantInt16) {
221     switch (ConstantInt16) {
222     case 0:
223       return getConstantZero(IceType_i16);
224     default:
225       return getConstantInt16Internal(ConstantInt16);
226     }
227   }
getConstantInt32(int32_t ConstantInt32)228   Constant *getConstantInt32(int32_t ConstantInt32) {
229     switch (ConstantInt32) {
230     case 0:
231       return getConstantZero(IceType_i32);
232     default:
233       return getConstantInt32Internal(ConstantInt32);
234     }
235   }
getConstantInt64(int64_t ConstantInt64)236   Constant *getConstantInt64(int64_t ConstantInt64) {
237     switch (ConstantInt64) {
238     case 0:
239       return getConstantZero(IceType_i64);
240     default:
241       return getConstantInt64Internal(ConstantInt64);
242     }
243   }
244   Constant *getConstantFloat(float Value);
245   Constant *getConstantDouble(double Value);
246   /// Returns a symbolic constant.
247   Constant *getConstantSymWithEmitString(const RelocOffsetT Offset,
248                                          const RelocOffsetArray &OffsetExpr,
249                                          GlobalString Name,
250                                          const std::string &EmitString);
251   Constant *getConstantSym(RelocOffsetT Offset, GlobalString Name);
252   Constant *getConstantExternSym(GlobalString Name);
253   /// Returns an undef.
254   Constant *getConstantUndef(Type Ty);
255   /// Returns a zero value.
256   Constant *getConstantZero(Type Ty);
257   /// getConstantPool() returns a copy of the constant pool for constants of a
258   /// given type.
259   ConstantList getConstantPool(Type Ty);
260   /// Returns a copy of the list of external symbols.
261   ConstantList getConstantExternSyms();
262   /// @}
getRuntimeHelperFunc(RuntimeHelper FuncID)263   Constant *getRuntimeHelperFunc(RuntimeHelper FuncID) const {
264     assert(FuncID < RuntimeHelper::H_Num);
265     Constant *Result = RuntimeHelperFunc[static_cast<size_t>(FuncID)];
266     assert(Result != nullptr && "No such runtime helper function");
267     return Result;
268   }
269   GlobalString getGlobalString(const std::string &Name);
270 
271   /// Return a locked pointer to the registered jump tables.
272   JumpTableDataList getJumpTables();
273   /// Adds JumpTable to the list of know jump tables, for a posteriori emission.
274   void addJumpTableData(JumpTableData JumpTable);
275 
276   /// Allocate data of type T using the global allocator. We allow entities
277   /// allocated from this global allocator to be either trivially or
278   /// non-trivially destructible. We optimize the case when T is trivially
279   /// destructible by not registering a destructor. Destructors will be invoked
280   /// during GlobalContext destruction in the reverse object creation order.
281   template <typename T>
282   typename std::enable_if<std::is_trivially_destructible<T>::value, T>::type *
allocate()283   allocate() {
284     return getAllocator()->Allocate<T>();
285   }
286 
287   template <typename T>
288   typename std::enable_if<!std::is_trivially_destructible<T>::value, T>::type *
allocate()289   allocate() {
290     T *Ret = getAllocator()->Allocate<T>();
291     getDestructors()->emplace_back([Ret]() { Ret->~T(); });
292     return Ret;
293   }
294 
getIntrinsicsInfo()295   const Intrinsics &getIntrinsicsInfo() const { return IntrinsicsInfo; }
296 
getObjectWriter()297   ELFObjectWriter *getObjectWriter() const { return ObjectWriter.get(); }
298 
299   /// Reset stats at the beginning of a function.
300   void resetStats();
301   void dumpStats(const Cfg *Func = nullptr);
302   void statsUpdateEmitted(uint32_t InstCount);
303   void statsUpdateRegistersSaved(uint32_t Num);
304   void statsUpdateFrameBytes(uint32_t Bytes);
305   void statsUpdateSpills();
306   void statsUpdateFills();
307 
308   /// Number of Randomized or Pooled Immediates
309   void statsUpdateRPImms();
310 
311   /// These are predefined TimerStackIdT values.
312   enum TimerStackKind { TSK_Default = 0, TSK_Funcs, TSK_Num };
313 
314   /// newTimerStackID() creates a new TimerStack in the global space. It does
315   /// not affect any TimerStack objects in TLS.
316   TimerStackIdT newTimerStackID(const std::string &Name);
317   /// dumpTimers() dumps the global timer data.  This assumes all the
318   /// thread-local copies of timer data have been merged into the global timer
319   /// data.
320   void dumpTimers(TimerStackIdT StackID = TSK_Default,
321                   bool DumpCumulative = true);
322   void dumpLocalTimers(const std::string &TimerNameOverride,
323                        TimerStackIdT StackID = TSK_Default,
324                        bool DumpCumulative = true);
325   /// The following methods affect only the calling thread's TLS timer data.
326   TimerIdT getTimerID(TimerStackIdT StackID, const std::string &Name);
327   void pushTimer(TimerIdT ID, TimerStackIdT StackID);
328   void popTimer(TimerIdT ID, TimerStackIdT StackID);
329   void resetTimer(TimerStackIdT StackID);
330   std::string getTimerName(TimerStackIdT StackID);
331   void setTimerName(TimerStackIdT StackID, const std::string &NewName);
332 
333   /// This is the first work item sequence number that the parser produces, and
334   /// correspondingly the first sequence number that the emitter thread will
335   /// wait for. Start numbering at 1 to leave room for a sentinel, in case e.g.
336   /// we wish to inject items with a special sequence number that may be
337   /// executed out of order.
getFirstSequenceNumber()338   static constexpr uint32_t getFirstSequenceNumber() { return 1; }
339   /// Adds a newly parsed and constructed function to the Cfg work queue.
340   /// Notifies any idle workers that a new function is available for
341   /// translating. May block if the work queue is too large, in order to control
342   /// memory footprint.
343   void optQueueBlockingPush(std::unique_ptr<OptWorkItem> Item);
344   /// Takes a Cfg from the work queue for translating. May block if the work
345   /// queue is currently empty. Returns nullptr if there is no more work - the
346   /// queue is empty and either end() has been called or the Sequential flag was
347   /// set.
348   std::unique_ptr<OptWorkItem> optQueueBlockingPop();
349   /// Notifies that no more work will be added to the work queue.
optQueueNotifyEnd()350   void optQueueNotifyEnd() { OptQ.notifyEnd(); }
351 
352   /// Emit file header for output file.
353   void emitFileHeader();
354 
355   void lowerConstants();
356 
357   void lowerJumpTables();
358 
359   /// Emit target specific read-only data sections if any. E.g., for MIPS this
360   /// generates a .MIPS.abiflags section.
361   void emitTargetRODataSections();
362 
363   void emitQueueBlockingPush(std::unique_ptr<EmitterWorkItem> Item);
364   std::unique_ptr<EmitterWorkItem> emitQueueBlockingPop();
emitQueueNotifyEnd()365   void emitQueueNotifyEnd() { EmitQ.notifyEnd(); }
366 
367   void initParserThread();
368   void startWorkerThreads();
369 
370   void waitForWorkerThreads();
371 
372   /// sets the instrumentation object to use.
setInstrumentation(std::unique_ptr<Instrumentation> Instr)373   void setInstrumentation(std::unique_ptr<Instrumentation> Instr) {
374     if (!BuildDefs::minimal())
375       Instrumentor = std::move(Instr);
376   }
377 
instrumentFunc(Cfg * Func)378   void instrumentFunc(Cfg *Func) {
379     if (!BuildDefs::minimal() && Instrumentor)
380       Instrumentor->instrumentFunc(Func);
381   }
382 
383   /// Translation thread startup routine.
384   void translateFunctionsWrapper(ThreadContext *MyTLS);
385   /// Translate functions from the Cfg queue until the queue is empty.
386   void translateFunctions();
387 
388   /// Emitter thread startup routine.
389   void emitterWrapper(ThreadContext *MyTLS);
390   /// Emit functions and global initializers from the emitter queue until the
391   /// queue is empty.
392   void emitItems();
393 
394   /// Uses DataLowering to lower Globals. Side effects:
395   ///  - discards the initializer list for the global variable in Globals.
396   ///  - clears the Globals array.
397   void lowerGlobals(const std::string &SectionSuffix);
398 
399   /// Lowers the profile information.
400   void lowerProfileData();
401 
402   void dumpConstantLookupCounts();
403 
404   /// DisposeGlobalVariablesAfterLowering controls whether the memory used by
405   /// GlobaleVariables can be reclaimed right after they have been lowered.
406   /// @{
getDisposeGlobalVariablesAfterLowering()407   bool getDisposeGlobalVariablesAfterLowering() const {
408     return DisposeGlobalVariablesAfterLowering;
409   }
410 
setDisposeGlobalVariablesAfterLowering(bool Value)411   void setDisposeGlobalVariablesAfterLowering(bool Value) {
412     DisposeGlobalVariablesAfterLowering = Value;
413   }
414   /// @}
415 
getStrings()416   LockedPtr<StringPool> getStrings() const {
417     return LockedPtr<StringPool>(Strings.get(), &StringsLock);
418   }
419 
getGlobals()420   LockedPtr<VariableDeclarationList> getGlobals() {
421     return LockedPtr<VariableDeclarationList>(&Globals, &InitAllocLock);
422   }
423 
424   /// Number of function blocks that can be queued before waiting for
425   /// translation
426   /// threads to consume.
427   static constexpr size_t MaxOptQSize = 1 << 16;
428 
429 private:
430   // Try to ensure mutexes are allocated on separate cache lines.
431 
432   // Destructors collaborate with Allocator
433   ICE_CACHELINE_BOUNDARY;
434   // Managed by getAllocator()
435   mutable GlobalLockType AllocLock;
436   ArenaAllocator Allocator;
437 
438   ICE_CACHELINE_BOUNDARY;
439   // Managed by getInitializerAllocator()
440   mutable GlobalLockType InitAllocLock;
441   VariableDeclarationList Globals;
442 
443   ICE_CACHELINE_BOUNDARY;
444   // Managed by getDestructors()
445   using DestructorArray = std::vector<std::function<void()>>;
446   mutable GlobalLockType DestructorsLock;
447   DestructorArray Destructors;
448 
449   ICE_CACHELINE_BOUNDARY;
450   // Managed by getStrings()
451   mutable GlobalLockType StringsLock;
452   std::unique_ptr<StringPool> Strings;
453 
454   ICE_CACHELINE_BOUNDARY;
455   // Managed by getConstPool()
456   mutable GlobalLockType ConstPoolLock;
457   std::unique_ptr<ConstantPool> ConstPool;
458 
459   ICE_CACHELINE_BOUNDARY;
460   // Managed by getJumpTableList()
461   mutable GlobalLockType JumpTablesLock;
462   JumpTableDataList JumpTableList;
463 
464   ICE_CACHELINE_BOUNDARY;
465   // Managed by getErrorStatus()
466   mutable GlobalLockType ErrorStatusLock;
467   ErrorCode ErrorStatus;
468 
469   ICE_CACHELINE_BOUNDARY;
470   // Managed by getStatsCumulative()
471   mutable GlobalLockType StatsLock;
472   CodeStats StatsCumulative;
473 
474   ICE_CACHELINE_BOUNDARY;
475   // Managed by getTimers()
476   mutable GlobalLockType TimerLock;
477   TimerList Timers;
478 
479   ICE_CACHELINE_BOUNDARY;
480   /// StrLock is a global lock on the dump and emit output streams.
481   using StrLockType = std::mutex;
482   StrLockType StrLock;
483   Ostream *StrDump;  /// Stream for dumping / diagnostics
484   Ostream *StrEmit;  /// Stream for code emission
485   Ostream *StrError; /// Stream for logging errors.
486 
487   // True if waitForWorkerThreads() has been called.
488   std::atomic_bool WaitForWorkerThreadsCalled;
489 
490   ICE_CACHELINE_BOUNDARY;
491 
492   Intrinsics IntrinsicsInfo;
493   // TODO(jpp): move to EmitterContext.
494   std::unique_ptr<ELFObjectWriter> ObjectWriter;
495   // Value defining when to wake up the main parse thread.
496   const size_t OptQWakeupSize;
497   BoundedProducerConsumerQueue<OptWorkItem, MaxOptQSize> OptQ;
498   BoundedProducerConsumerQueue<EmitterWorkItem> EmitQ;
499   // DataLowering is only ever used by a single thread at a time (either in
500   // emitItems(), or in IceCompiler::run before the compilation is over.)
501   // TODO(jpp): move to EmitterContext.
502   std::unique_ptr<TargetDataLowering> DataLowering;
503   /// If !HasEmittedCode, SubZero will accumulate all Globals (which are "true"
504   /// program global variables) until the first code WorkItem is seen.
505   // TODO(jpp): move to EmitterContext.
506   bool HasSeenCode = false;
507   // If Instrumentor is not empty then it will be used to instrument globals and
508   // CFGs.
509   std::unique_ptr<Instrumentation> Instrumentor = nullptr;
510   // TODO(jpp): move to EmitterContext.
511   VariableDeclaration *ProfileBlockInfoVarDecl = nullptr;
512   std::vector<VariableDeclaration *> ProfileBlockInfos;
513   /// Indicates if global variable declarations can be disposed of right after
514   /// lowering.
515   bool DisposeGlobalVariablesAfterLowering = true;
516   Constant *ConstZeroForType[IceType_NUM];
517   Constant *ConstantTrue;
518   // Holds the constants representing each runtime helper function.
519   Constant *RuntimeHelperFunc[static_cast<size_t>(RuntimeHelper::H_Num)];
520 
521   Constant *getConstantZeroInternal(Type Ty);
522   Constant *getConstantIntInternal(Type Ty, int64_t Value);
523   Constant *getConstantInt1Internal(int8_t ConstantInt1);
524   Constant *getConstantInt8Internal(int8_t ConstantInt8);
525   Constant *getConstantInt16Internal(int16_t ConstantInt16);
526   Constant *getConstantInt32Internal(int32_t ConstantInt32);
527   Constant *getConstantInt64Internal(int64_t ConstantInt64);
getAllocator()528   LockedPtr<ArenaAllocator> getAllocator() {
529     return LockedPtr<ArenaAllocator>(&Allocator, &AllocLock);
530   }
getInitializerAllocator()531   LockedPtr<VariableDeclarationList> getInitializerAllocator() {
532     return LockedPtr<VariableDeclarationList>(&Globals, &InitAllocLock);
533   }
getConstPool()534   LockedPtr<ConstantPool> getConstPool() {
535     return LockedPtr<ConstantPool>(ConstPool.get(), &ConstPoolLock);
536   }
getJumpTableList()537   LockedPtr<JumpTableDataList> getJumpTableList() {
538     return LockedPtr<JumpTableDataList>(&JumpTableList, &JumpTablesLock);
539   }
getStatsCumulative()540   LockedPtr<CodeStats> getStatsCumulative() {
541     return LockedPtr<CodeStats>(&StatsCumulative, &StatsLock);
542   }
getTimers()543   LockedPtr<TimerList> getTimers() {
544     return LockedPtr<TimerList>(&Timers, &TimerLock);
545   }
getDestructors()546   LockedPtr<DestructorArray> getDestructors() {
547     return LockedPtr<DestructorArray>(&Destructors, &DestructorsLock);
548   }
549 
accumulateGlobals(std::unique_ptr<VariableDeclarationList> Globls)550   void accumulateGlobals(std::unique_ptr<VariableDeclarationList> Globls) {
551     LockedPtr<VariableDeclarationList> _(&Globals, &InitAllocLock);
552     if (Globls != nullptr) {
553       Globals.merge(Globls.get());
554       if (!BuildDefs::minimal() && Instrumentor != nullptr)
555         Instrumentor->setHasSeenGlobals();
556     }
557   }
558 
lowerGlobalsIfNoCodeHasBeenSeen()559   void lowerGlobalsIfNoCodeHasBeenSeen() {
560     if (HasSeenCode)
561       return;
562     constexpr char NoSuffix[] = "";
563     lowerGlobals(NoSuffix);
564     HasSeenCode = true;
565   }
566 
567   void saveBlockInfoPtrs();
568 
569   llvm::SmallVector<ThreadContext *, 128> AllThreadContexts;
570   llvm::SmallVector<std::thread, 128> TranslationThreads;
571   llvm::SmallVector<std::thread, 128> EmitterThreads;
572   // Each thread has its own TLS pointer which is also held in
573   // AllThreadContexts.
574   ICE_TLS_DECLARE_FIELD(ThreadContext *, TLS);
575 
576 public:
577   static void TlsInit();
578 };
579 
580 /// Helper class to push and pop a timer marker. The constructor pushes a
581 /// marker, and the destructor pops it. This is for convenient timing of regions
582 /// of code.
583 class TimerMarker {
584   TimerMarker() = delete;
585   TimerMarker(const TimerMarker &) = delete;
586   TimerMarker &operator=(const TimerMarker &) = delete;
587 
588 public:
589   TimerMarker(TimerIdT ID, GlobalContext *Ctx,
590               TimerStackIdT StackID = GlobalContext::TSK_Default)
ID(ID)591       : ID(ID), Ctx(Ctx), StackID(StackID) {
592     if (BuildDefs::timers())
593       push();
594   }
595   TimerMarker(TimerIdT ID, const Cfg *Func,
596               TimerStackIdT StackID = GlobalContext::TSK_Default)
ID(ID)597       : ID(ID), Ctx(nullptr), StackID(StackID) {
598     // Ctx gets set at the beginning of pushCfg().
599     if (BuildDefs::timers())
600       pushCfg(Func);
601   }
TimerMarker(GlobalContext * Ctx,const std::string & FuncName)602   TimerMarker(GlobalContext *Ctx, const std::string &FuncName)
603       : ID(getTimerIdFromFuncName(Ctx, FuncName)), Ctx(Ctx),
604         StackID(GlobalContext::TSK_Funcs) {
605     if (BuildDefs::timers())
606       push();
607   }
608 
~TimerMarker()609   ~TimerMarker() {
610     if (BuildDefs::timers() && Active)
611       Ctx->popTimer(ID, StackID);
612   }
613 
614 private:
615   void push();
616   void pushCfg(const Cfg *Func);
617   static TimerIdT getTimerIdFromFuncName(GlobalContext *Ctx,
618                                          const std::string &FuncName);
619   const TimerIdT ID;
620   GlobalContext *Ctx;
621   const TimerStackIdT StackID;
622   bool Active = false;
623 };
624 
625 /// Helper class for locking the streams and then automatically unlocking them.
626 class OstreamLocker {
627 private:
628   OstreamLocker() = delete;
629   OstreamLocker(const OstreamLocker &) = delete;
630   OstreamLocker &operator=(const OstreamLocker &) = delete;
631 
632 public:
OstreamLocker(GlobalContext * Ctx)633   explicit OstreamLocker(GlobalContext *Ctx) : Ctx(Ctx) { Ctx->lockStr(); }
~OstreamLocker()634   ~OstreamLocker() { Ctx->unlockStr(); }
635 
636 private:
637   GlobalContext *const Ctx;
638 };
639 
640 } // end of namespace Ice
641 
642 #endif // SUBZERO_SRC_ICEGLOBALCONTEXT_H
643