1 /*
2 * Copyright 2014 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef GrTRecorder_DEFINED
9 #define GrTRecorder_DEFINED
10
11 #include "include/gpu/GrTypes.h"
12 #include "include/private/SkTLogic.h"
13 #include "src/core/SkArenaAlloc.h"
14
15 /**
16 * Records a list of items with a common base type, optional associated data, and
17 * permanent memory addresses. It supports forward iteration.
18 *
19 * This class allocates space for the stored items and associated data in a SkArenaAlloc.
20 * There is an overhead of 1 pointer for each stored item.
21 *
22 * Upon reset or delete, the items are destructed in the same order they were received,
23 * not reverse (stack) order.
24 *
25 * @param TBase Common base type of items in the list. It is assumed that the items are
26 * trivially destructable or that TBase has a virtual destructor as ~TBase()
27 * is called to destroy the items.
28 */
29 template <typename TBase> class GrTRecorder {
30 private:
31 template <bool IsConst> class IterImpl;
32
33 public:
34 using iterator = IterImpl<false>;
35 using const_iterator = IterImpl<true>;
36
37 /**
38 * Create a recorder.
39 *
40 * @param initialSizeInBytes The amount of memory reserved by the recorder initially,
41 and after calls to reset().
42 */
GrTRecorder(size_t initialSizeInBytes)43 explicit GrTRecorder(size_t initialSizeInBytes) : fArena(initialSizeInBytes) {}
44 GrTRecorder(const GrTRecorder&) = delete;
45 GrTRecorder& operator=(const GrTRecorder&) = delete;
46
~GrTRecorder()47 ~GrTRecorder() { this->reset(); }
48
empty()49 bool empty() { return !SkToBool(fTail); }
50
51 /** The last item. Must not be empty. */
back()52 TBase& back() {
53 SkASSERT(!this->empty());
54 return *fTail->get();
55 }
56
57 /** Forward mutable iteration */
begin()58 iterator begin() { return iterator(fHead); }
end()59 iterator end() { return iterator(nullptr); }
60
61 /** Forward const iteration */
begin()62 const_iterator begin() const { return const_iterator(fHead); }
end()63 const_iterator end() const { return const_iterator(nullptr); }
64
65 /** Destruct all items in the list and reset to empty. Frees memory allocated from arena. */
66 void reset();
67
68 /**
69 * Emplace a new TItem (which derives from TBase) in the recorder. This requires equivalence
70 * between reinterpret_cast<TBase*> and static_cast<TBase*> when operating on TItem*.
71 * Multiple inheritance may make this not true. It is runtime asserted.
72 */
emplace(Args &&...args)73 template <typename TItem, typename... Args> TItem& emplace(Args&&... args) {
74 return this->emplaceWithData<TItem, Args...>(0, std::forward<Args>(args)...);
75 }
76
77 /**
78 * Emplace a new TItem (which derives from TBase) in the recorder with extra data space. The
79 * extra data immediately follows the stored item with no extra alignment. E.g.,
80 * void* extraData = &recorder->emplaceWithData<Subclass>(dataSize, ...) + 1;
81 *
82 * This requires equivalence between reinterpret_cast<TBase*> and static_cast<TBase*> when
83 * operating on TItem*. Multiple inheritance may make this not true. It is runtime asserted.
84 */
85 template <typename TItem, typename... Args>
86 SK_WHEN((std::is_base_of<TBase, TItem>::value), TItem&)
87 emplaceWithData(size_t extraDataSize, Args... args);
88
89 private:
90 struct Header {
91 Header* fNext = nullptr;
92 // We always store the T immediately after the header (and ensure proper alignment). See
93 // emplaceWithData() implementation.
getHeader94 TBase* get() const { return reinterpret_cast<TBase*>(const_cast<Header*>(this) + 1); }
95 };
96
97 SkArenaAlloc fArena;
98 Header* fHead = nullptr;
99 Header* fTail = nullptr;
100 };
101
102 ////////////////////////////////////////////////////////////////////////////////
103
104 template <typename TBase>
105 template <typename TItem, typename... Args>
106 inline SK_WHEN((std::is_base_of<TBase, TItem>::value), TItem&)
emplaceWithData(size_t extraDataSize,Args...args)107 GrTRecorder<TBase>::emplaceWithData(size_t extraDataSize, Args... args) {
108 static constexpr size_t kTAlign = alignof(TItem);
109 static constexpr size_t kHeaderAlign = alignof(Header);
110 static constexpr size_t kAllocAlign = kTAlign > kHeaderAlign ? kTAlign : kHeaderAlign;
111 static constexpr size_t kTItemOffset = GrSizeAlignUp(sizeof(Header), kAllocAlign);
112 // We're assuming if we back up from kItemOffset by sizeof(Header) we will still be aligned.
113 GR_STATIC_ASSERT(sizeof(Header) % alignof(Header) == 0);
114 const size_t totalSize = kTItemOffset + sizeof(TItem) + extraDataSize;
115 auto alloc = reinterpret_cast<char*>(fArena.makeBytesAlignedTo(totalSize, kAllocAlign));
116 Header* header = new (alloc + kTItemOffset - sizeof(Header)) Header();
117 if (fTail) {
118 fTail->fNext = header;
119 }
120 fTail = header;
121 if (!fHead) {
122 fHead = header;
123 }
124 auto* item = new (alloc + kTItemOffset) TItem(std::forward<Args>(args)...);
125 // We require that we can reinterpret_cast between TBase* and TItem*. Could not figure out how
126 // to statically assert this. See proposal for std::is_initial_base_of here:
127 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2016/p0466r0.pdf
128 SkASSERT(reinterpret_cast<uintptr_t>(item) ==
129 reinterpret_cast<uintptr_t>(static_cast<TBase*>(item)));
130 return *item;
131 }
132
reset()133 template <typename TBase> inline void GrTRecorder<TBase>::reset() {
134 for (auto& i : *this) {
135 i.~TBase();
136 }
137 GR_STATIC_ASSERT(std::is_trivially_destructible<Header>::value);
138 fHead = fTail = nullptr;
139 fArena.reset();
140 }
141
142 /**
143 * Iterates through a recorder front-to-back, const or not.
144 */
145 template <typename TBase> template <bool IsConst> class GrTRecorder<TBase>::IterImpl {
146 private:
147 using T = typename std::conditional<IsConst, const TBase, TBase>::type;
148
149 public:
150 IterImpl() = default;
151
152 IterImpl operator++() {
153 fCurr = fCurr->fNext;
154 return *this;
155 }
156
157 IterImpl operator++(int) {
158 auto old = fCurr;
159 fCurr = fCurr->fNext;
160 return {old};
161 }
162
163 T& operator*() const { return *fCurr->get(); }
164 T* operator->() const { return fCurr->get(); }
165
166 bool operator==(const IterImpl& that) const { return fCurr == that.fCurr; }
167 bool operator!=(const IterImpl& that) const { return !(*this == that); }
168
169 private:
IterImpl(Header * curr)170 IterImpl(Header* curr) : fCurr(curr) {}
171 Header* fCurr = nullptr;
172
173 friend class GrTRecorder<TBase>; // To construct from Header.
174 };
175
176 #endif
177