1 /**
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef PANDA_MEM_GC_G1_REM_SET_INL_H
17 #define PANDA_MEM_GC_G1_REM_SET_INL_H
18
19 #include "runtime/mem/rem_set.h"
20 #include "runtime/mem/region_space-inl.h"
21 #include "runtime/mem/region_allocator.h"
22
23 namespace ark::mem {
24
25 template <typename LockConfigT>
~RemSet()26 RemSet<LockConfigT>::~RemSet()
27 {
28 Clear();
29 }
30
31 template <typename LockConfigT>
32 template <bool NEED_LOCK>
AddRef(const ObjectHeader * fromObjAddr,size_t offset)33 void RemSet<LockConfigT>::AddRef(const ObjectHeader *fromObjAddr, size_t offset)
34 {
35 ASSERT(fromObjAddr != nullptr);
36 auto ref = ToUintPtr(fromObjAddr) + offset;
37 auto bitmapBeginAddr = ref & ~DEFAULT_REGION_MASK;
38 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(remSetLock_);
39 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
40 bitmaps_[bitmapBeginAddr].Set(GetIdxInBitmap(ref, bitmapBeginAddr));
41 }
42
43 template <typename LockConfigT>
44 template <bool NEED_LOCK>
AddRef(const void * fromAddr)45 void RemSet<LockConfigT>::AddRef(const void *fromAddr)
46 {
47 ASSERT(fromAddr != nullptr);
48 auto ref = ToUintPtr(fromAddr);
49 auto bitmapBeginAddr = ref & ~DEFAULT_REGION_MASK;
50 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(remSetLock_);
51 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
52 bitmaps_[bitmapBeginAddr].Set(GetIdxInBitmap(ref, bitmapBeginAddr));
53 }
54
55 template <typename LockConfigT>
Clear()56 void RemSet<LockConfigT>::Clear()
57 {
58 os::memory::LockHolder lock(remSetLock_);
59 bitmaps_.clear();
60 refRegions_.clear();
61 }
62
63 /* static */
64 template <typename LockConfigT>
65 template <bool NEED_LOCK>
InvalidateRegion(Region * invalidRegion)66 void RemSet<LockConfigT>::InvalidateRegion(Region *invalidRegion)
67 {
68 RemSet<> *invalidRemset = invalidRegion->GetRemSet();
69 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(invalidRemset->remSetLock_);
70
71 for (Region *refReg : invalidRemset->refRegions_) {
72 refReg->GetRemSet()->RemoveFromRegion<NEED_LOCK>(invalidRegion);
73 }
74
75 for (auto entry : invalidRemset->bitmaps_) {
76 auto bitmapBeginAddr = entry.first;
77 auto *fromRegion = AddrToRegion(ToVoidPtr(bitmapBeginAddr));
78 fromRegion->GetRemSet()->RemoveRefRegion<NEED_LOCK>(invalidRegion);
79 }
80 }
81
82 /* static */
83 template <typename LockConfigT>
84 template <bool NEED_LOCK>
InvalidateRefsFromRegion(Region * invalidRegion)85 void RemSet<LockConfigT>::InvalidateRefsFromRegion(Region *invalidRegion)
86 {
87 RemSet<> *invalidRemset = invalidRegion->GetRemSet();
88 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(invalidRemset->remSetLock_);
89 for (Region *refReg : invalidRemset->refRegions_) {
90 refReg->GetRemSet()->RemoveFromRegion<NEED_LOCK>(invalidRegion);
91 }
92 invalidRemset->refRegions_.clear();
93 }
94
95 template <typename LockConfigT>
Merge(RemSet<> * other)96 void RemSet<LockConfigT>::Merge(RemSet<> *other)
97 {
98 for (auto &[bitmap_begin_addr, bitmap] : other->bitmaps_) {
99 bitmaps_[bitmap_begin_addr].AddBits(bitmap);
100 }
101 }
102
103 template <typename LockConfigT>
GetDirtyRegions()104 PandaUnorderedSet<Region *> RemSet<LockConfigT>::GetDirtyRegions()
105 {
106 PandaUnorderedSet<Region *> regions;
107 for (auto &[bitmap_begin_addr, _] : bitmaps_) {
108 auto *region = AddrToRegion(ToVoidPtr(bitmap_begin_addr));
109 regions.insert(region);
110 }
111 return regions;
112 }
113
114 /* static */
115 template <typename LockConfigT>
116 template <bool NEED_LOCK>
AddRefWithAddr(const ObjectHeader * objAddr,size_t offset,const ObjectHeader * valueAddr)117 void RemSet<LockConfigT>::AddRefWithAddr(const ObjectHeader *objAddr, size_t offset, const ObjectHeader *valueAddr)
118 {
119 auto *fromRegion = ObjectToRegion(objAddr);
120 auto *toRegion = ObjectToRegion(valueAddr);
121 // TSAN thinks that we can have a data race here when we get region or getRemSet from region, because we don't have
122 // synchronization between these events. In reality it's impossible, because if we get write from/to region it
123 // should be created already by allocator in mutator thread, and only then writes happens.
124 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN();
125 ASSERT(fromRegion != nullptr);
126 ASSERT(fromRegion->GetRemSet() != nullptr);
127 ASSERT(toRegion != nullptr);
128 ASSERT_PRINT(toRegion->GetRemSet() != nullptr,
129 "region " << toRegion << ", obj " << objAddr << ", value " << valueAddr);
130
131 toRegion->GetRemSet()->AddRef<NEED_LOCK>(objAddr, offset);
132 fromRegion->GetRemSet()->AddRefRegion<NEED_LOCK>(toRegion);
133 TSAN_ANNOTATE_IGNORE_WRITES_END();
134 }
135
136 template <typename LockConfigT>
137 template <bool NEED_LOCK>
AddRefWithAddr(RemSet<> * fromRemset,const void * fromAddr,const ObjectHeader * valueAddr)138 void RemSet<LockConfigT>::AddRefWithAddr(RemSet<> *fromRemset, const void *fromAddr, const ObjectHeader *valueAddr)
139 {
140 ASSERT(AddrToRegion(fromAddr)->GetRemSet() == fromRemset);
141 auto *toRegion = ObjectToRegion(valueAddr);
142 // TSAN thinks that we can have a data race here when we get region or getRemSet from region, because we don't have
143 // synchronization between these events. In reality it's impossible, because if we get write from/to region it
144 // should be created already by allocator in mutator thread, and only then writes happens.
145 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN();
146 ASSERT(toRegion != nullptr);
147 ASSERT_PRINT(toRegion->GetRemSet() != nullptr,
148 "region " << toRegion << ", from addr " << fromAddr << ", value " << valueAddr);
149
150 toRegion->GetRemSet()->AddRef<NEED_LOCK>(fromAddr);
151 fromRemset->AddRefRegion<NEED_LOCK>(toRegion);
152 TSAN_ANNOTATE_IGNORE_WRITES_END();
153 }
154
155 template <typename LockConfigT>
156 template <typename RegionPred, typename MemVisitor>
Iterate(const RegionPred & regionPred,const MemVisitor & visitor)157 inline void RemSet<LockConfigT>::Iterate(const RegionPred ®ionPred, const MemVisitor &visitor) const
158 {
159 for (auto &[bitmapBeginAddr, bitmap] : bitmaps_) {
160 auto *region = AddrToRegion(ToVoidPtr(bitmapBeginAddr));
161 if (regionPred(region)) {
162 MemRange bitmapRange(bitmapBeginAddr, bitmapBeginAddr + DEFAULT_REGION_SIZE);
163 bitmap.Iterate(bitmapRange, [region, visitor](const MemRange &range) { visitor(region, range); });
164 }
165 }
166 }
167
168 template <typename LockConfigT>
169 template <typename Visitor>
IterateOverObjects(const Visitor & visitor)170 inline void RemSet<LockConfigT>::IterateOverObjects(const Visitor &visitor) const
171 {
172 auto regionPred = []([[maybe_unused]] Region *region) { return true; };
173 Iterate(regionPred, [visitor](Region *region, const MemRange &range) {
174 region->GetLiveBitmap()->IterateOverMarkedChunkInRange(
175 ToVoidPtr(range.GetStartAddress()), ToVoidPtr(range.GetEndAddress()),
176 [visitor](void *mem) { visitor(static_cast<ObjectHeader *>(mem)); });
177 });
178 }
179
180 template <typename LockConfigT>
181 template <bool NEED_LOCK>
GetRefRegions()182 PandaUnorderedSet<Region *> *RemSet<LockConfigT>::GetRefRegions()
183 {
184 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(remSetLock_);
185 return &refRegions_;
186 }
187
188 template <typename LockConfigT>
189 template <bool NEED_LOCK>
AddRefRegion(Region * region)190 void RemSet<LockConfigT>::AddRefRegion(Region *region)
191 {
192 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(remSetLock_);
193 refRegions_.insert(region);
194 }
195
196 template <typename LockConfigT>
197 template <bool NEED_LOCK>
RemoveFromRegion(Region * region)198 void RemSet<LockConfigT>::RemoveFromRegion(Region *region)
199 {
200 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(remSetLock_);
201 for (auto bitmapBeginAddr = ToUintPtr(region); bitmapBeginAddr < region->End();
202 bitmapBeginAddr += DEFAULT_REGION_SIZE) {
203 bitmaps_.erase(bitmapBeginAddr);
204 }
205 }
206
207 template <typename LockConfigT>
208 template <bool NEED_LOCK>
RemoveRefRegion(Region * region)209 void RemSet<LockConfigT>::RemoveRefRegion(Region *region)
210 {
211 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(remSetLock_);
212 refRegions_.erase(region);
213 }
214
215 template <typename LockConfigT>
GetIdxInBitmap(uintptr_t addr,uintptr_t bitmapBeginAddr)216 size_t RemSet<LockConfigT>::GetIdxInBitmap(uintptr_t addr, uintptr_t bitmapBeginAddr)
217 {
218 static constexpr size_t MEM_SIZE = DEFAULT_REGION_SIZE / Bitmap::GetNumBits();
219 ASSERT(bitmapBeginAddr <= addr);
220 ASSERT(addr < bitmapBeginAddr + DEFAULT_REGION_SIZE);
221 return (addr - bitmapBeginAddr) / MEM_SIZE;
222 }
223
224 template <typename LockConfigT>
Dump(std::ostream & out)225 void RemSet<LockConfigT>::Dump(std::ostream &out)
226 {
227 os::memory::LockHolder lock(remSetLock_);
228 auto pred = []([[maybe_unused]] Region *region) { return true; };
229 Iterate(pred, [&out](Region *region, const MemRange &range) {
230 if (region->HasFlag(RegionFlag::IS_LARGE_OBJECT)) {
231 out << " H";
232 } else if (region->HasFlag(RegionFlag::IS_NONMOVABLE)) {
233 out << " NM";
234 } else if (region->HasFlag(RegionFlag::IS_OLD)) {
235 out << " T";
236 } else {
237 out << " Y";
238 }
239 out << "[" << ToVoidPtr(range.GetStartAddress()) << "-" << ToVoidPtr(range.GetEndAddress()) << "]";
240 });
241 out << " To:";
242 for (auto reg : refRegions_) {
243 out << " " << *reg;
244 }
245 out << std::dec;
246 }
247
248 template <typename LockConfigT>
249 template <typename Visitor>
VisitBitmaps(const Visitor & visitor)250 void RemSet<LockConfigT>::VisitBitmaps(const Visitor &visitor) const
251 {
252 for (auto &[bitmapBeginAddr, bitmap] : bitmaps_) {
253 visitor(bitmapBeginAddr, bitmap);
254 }
255 }
256
257 template <typename RegionContainer, typename RegionPred, typename MemVisitor>
ProcessRemSets(const RegionContainer & cont,const RegionPred & regionPred,const MemVisitor & visitor)258 void GlobalRemSet::ProcessRemSets(const RegionContainer &cont, const RegionPred ®ionPred, const MemVisitor &visitor)
259 {
260 for (auto *region : cont) {
261 FillBitmap(*region->GetRemSet(), regionPred);
262 }
263 IterateOverBits(visitor);
264 }
265
266 template <typename RegionPred>
FillBitmap(const RemSet<> & remSet,const RegionPred & regionPred)267 void GlobalRemSet::FillBitmap(const RemSet<> &remSet, const RegionPred ®ionPred)
268 {
269 remSet.VisitBitmaps([this, ®ionPred](uintptr_t beginAddr, const RemSet<>::Bitmap &bitmap) {
270 auto *region = AddrToRegion(ToVoidPtr(beginAddr));
271 if (regionPred(region)) {
272 bitmaps_[beginAddr].AddBits(bitmap);
273 }
274 });
275 }
276
277 template <typename MemVisitor>
IterateOverBits(const MemVisitor & visitor)278 void GlobalRemSet::IterateOverBits(const MemVisitor &visitor) const
279 {
280 for (auto &[bitmapBeginAddr, bitmap] : bitmaps_) {
281 auto *region = AddrToRegion(ToVoidPtr(bitmapBeginAddr));
282 MemRange bitmapRange(bitmapBeginAddr, bitmapBeginAddr + DEFAULT_REGION_SIZE);
283 bitmap.Iterate(bitmapRange, [region, visitor](const MemRange &range) { visitor(region, range); });
284 }
285 }
286
287 template <typename MemVisitor>
IterateOverUniqueRange(Region * region,MemRange range,const MemVisitor & visitor)288 bool GlobalRemSet::IterateOverUniqueRange(Region *region, MemRange range, const MemVisitor &visitor)
289 {
290 auto addr = range.GetStartAddress();
291 auto bitmapBeginAddr = addr & ~DEFAULT_REGION_MASK;
292 auto bitmapIt = bitmaps_.find(bitmapBeginAddr);
293 if (bitmapIt == bitmaps_.cend()) {
294 return visitor(region, range);
295 }
296
297 auto &bitmap = bitmapIt->second;
298 auto endAddr = range.GetEndAddress() + 1U;
299 static constexpr size_t MEM_SIZE = DEFAULT_REGION_SIZE / RemSet<>::Bitmap::GetNumBits();
300 ASSERT(((endAddr - addr) % MEM_SIZE) == 0);
301 bool allRefsProcessed = true;
302 for (; addr != endAddr; addr += MEM_SIZE) {
303 auto isMarked = bitmap.Check(RemSet<>::GetIdxInBitmap(addr, bitmapBeginAddr));
304 if (isMarked) {
305 allRefsProcessed = false;
306 continue;
307 }
308 if (!visitor(region, MemRange(addr, addr + MEM_SIZE))) {
309 allRefsProcessed = false;
310 }
311 }
312 return allRefsProcessed;
313 }
314
315 } // namespace ark::mem
316
317 #endif // PANDA_MEM_GC_G1_REM_SET_INL_H
318