1 /*
2 * Copyright (c) 2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMMON_COMPONENTS_OBJECTS_STRING_TABLE_HASHTRIEMAP_INL_H
17 #define COMMON_COMPONENTS_OBJECTS_STRING_TABLE_HASHTRIEMAP_INL_H
18
19 #include "common_components/log/log.h"
20 #include "common_interfaces/objects/readonly_handle.h"
21 #include "common_interfaces/objects/base_string.h"
22 #include "common_components/objects/string_table/hashtriemap.h"
23 #include "common_components/objects/string_table/integer_cache.h"
24
25 namespace common {
26 // Expand to get oldEntry and newEntry, with hash conflicts from 32 bits up to
27 // hashShift and Generate a subtree of indirect nodes to hold two new entries.
28 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
29 template <bool IsLock>
Expand(Entry * oldEntry,Entry * newEntry,uint32_t oldHash,uint32_t newHash,uint32_t hashShift,Indirect * parent)30 typename HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::Node* HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::Expand(
31 Entry* oldEntry, Entry* newEntry, uint32_t oldHash, uint32_t newHash, uint32_t hashShift, Indirect* parent)
32 {
33 // Check for hash conflicts.
34 if (oldHash == newHash) {
35 // Store the old entry in the overflow list of the new entry, and then store
36 // the new entry.
37 newEntry->Overflow().store(oldEntry, std::memory_order_release);
38 return newEntry;
39 }
40
41 // must add an indirect node. may need to add more than one.
42 Indirect* newIndirect = new Indirect();
43 Indirect* top = newIndirect;
44
45 while (true) {
46 #ifndef NDEBUG
47 if (hashShift == TrieMapConfig::TOTAL_HASH_BITS) {
48 if constexpr (IsLock) {
49 GetMutex().Unlock();
50 }
51 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while inserting";
52 UNREACHABLE();
53 }
54 #endif
55
56 hashShift += TrieMapConfig::N_CHILDREN_LOG2; // hashShift is the level at which the parent
57 // is located. Need to go deeper.
58 uint32_t oldIdx = (oldHash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
59 uint32_t newIdx = (newHash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
60 if (oldIdx != newIdx) {
61 newIndirect->GetChild(oldIdx).store(oldEntry, std::memory_order_release);
62 newIndirect->GetChild(newIdx).store(newEntry, std::memory_order_release);
63 break;
64 }
65 Indirect* nextIndirect = new Indirect();
66
67 newIndirect->GetChild(oldIdx).store(nextIndirect, std::memory_order_release);
68 newIndirect = nextIndirect;
69 }
70 return top;
71 }
72
73 // Load returns the value of the key stored in the mapping, or nullptr value if not
74 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
75 template <bool IsCheck, typename ReadBarrier>
Load(ReadBarrier && readBarrier,const uint32_t key,BaseString * value)76 BaseString* HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::Load(ReadBarrier&& readBarrier, const uint32_t key,
77 BaseString* value)
78 {
79 uint32_t hash = key;
80 Indirect* current = GetRootAndProcessHash(hash);
81
82 for (uint32_t hashShift = 0; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift +=
83 TrieMapConfig::N_CHILDREN_LOG2) {
84 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
85
86 std::atomic<Node*>* slot = ¤t->GetChild(index);
87 Node* node = slot->load(std::memory_order_acquire);
88 if (node == nullptr) {
89 return nullptr;
90 }
91 if (!node->IsEntry()) {
92 current = node->AsIndirect();
93 continue;
94 }
95 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
96 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
97 auto oldValue = currentEntry->Value<SlotBarrier>();
98 bool valuesEqual = false;
99 if (!IsNull(oldValue) && BaseString::StringsAreEqual(std::forward<ReadBarrier>(readBarrier), oldValue,
100 value)) {
101 valuesEqual = true;
102 }
103 if constexpr (IsCheck) {
104 if (oldValue->GetMixHashcode() != value->GetMixHashcode()) {
105 continue;
106 }
107 if (!valuesEqual) {
108 return oldValue;
109 }
110 } else if (valuesEqual) {
111 return oldValue;
112 }
113 }
114 return nullptr;
115 }
116
117 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
118 UNREACHABLE();
119 }
120
121 // LoadOrStore returns the existing value of the key, if it exists.
122 // Otherwise, call the callback function to create a value,
123 // store the value, and return the value
124 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
125 template <bool IsLock, typename LoaderCallback, typename EqualsCallback>
LoadOrStore(ThreadHolder * holder,const uint32_t key,LoaderCallback loaderCallback,EqualsCallback equalsCallback)126 BaseString* HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::LoadOrStore(ThreadHolder* holder, const uint32_t key,
127 LoaderCallback loaderCallback,
128 EqualsCallback equalsCallback)
129 {
130 HashTrieMapInUseScope mapInUse(this);
131 uint32_t hash = key;
132 uint32_t hashShift = 0;
133 std::atomic<Node*>* slot = nullptr;
134 Node* node = nullptr;
135 [[maybe_unused]] bool haveInsertPoint = false;
136 ReadOnlyHandle<BaseString> str;
137 bool isStrCreated = false; // Flag to track whether an object has been created
138 Indirect* current = GetRootAndProcessHash(hash);
139 while (true) {
140 haveInsertPoint = false;
141 // find the key or insert the candidate position.
142 for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) {
143 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
144 slot = ¤t->GetChild(index);
145 node = slot->load(std::memory_order_acquire);
146 if (node == nullptr) {
147 haveInsertPoint = true;
148 break;
149 }
150 // Entry, Search in overflow
151 if (!node->IsEntry()) {
152 // Indirect, Next level Continue to search
153 current = node->AsIndirect();
154 continue;
155 }
156 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
157 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
158 auto oldValue = currentEntry->Value<SlotBarrier>();
159 if (IsNull(oldValue)) {
160 continue;
161 }
162 if (std::invoke(std::forward<EqualsCallback>(equalsCallback), oldValue)) {
163 #if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE
164 TraceFindSuccessDepth(hashShift);
165 #endif
166 return oldValue;
167 }
168 }
169 haveInsertPoint = true;
170 break;
171 }
172 #ifndef NDEBUG
173 if (!haveInsertPoint) {
174 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
175 UNREACHABLE();
176 }
177 #endif
178 // invoke the callback to create str
179 if (!isStrCreated) {
180 str = std::invoke(std::forward<LoaderCallback>(loaderCallback));
181 isStrCreated = true; // Tag str created
182 }
183 // lock and double-check
184 if constexpr (IsLock) {
185 GetMutex().LockWithThreadState(holder);
186 }
187
188 ASSERT(slot != nullptr);
189 node = slot->load(std::memory_order_acquire);
190 if (node == nullptr || node->IsEntry()) {
191 // see is still real, so can continue to insert.
192 break;
193 }
194 if constexpr (IsLock) {
195 GetMutex().Unlock();
196 }
197 current = node->AsIndirect();
198 hashShift += TrieMapConfig::N_CHILDREN_LOG2;
199 }
200
201 #if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE
202 TraceFindFail();
203 #endif
204 Entry* oldEntry = nullptr;
205 uint32_t oldHash = key;
206 if (node != nullptr) {
207 oldEntry = node->AsEntry();
208 for (Entry* currentEntry = oldEntry; currentEntry;
209 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
210 auto oldValue = currentEntry->Value<SlotBarrier>();
211 if (IsNull(oldValue)) {
212 continue;
213 }
214 if (oldValue->GetMixHashcode() != key) {
215 oldHash = oldValue->GetMixHashcode();
216 continue;
217 }
218 if (std::invoke(std::forward<EqualsCallback>(equalsCallback), oldValue)) {
219 if constexpr (IsLock) {
220 GetMutex().Unlock();
221 }
222 return oldValue;
223 }
224 }
225 }
226
227 BaseString* value = *str;
228 ASSERT(value != nullptr);
229 value->SetIsInternString();
230 IntegerCache::InitIntegerCache(value);
231 Entry* newEntry = new Entry(value);
232 oldEntry = PruneHead(oldEntry);
233 if (oldEntry == nullptr) {
234 // The simple case: Create a new entry and store it.
235 slot->store(newEntry, std::memory_order_release);
236 } else {
237 // Expand an existing entry to one or more new nodes.
238 // Release the node, which will make both oldEntry and newEntry visible
239 auto expandedNode = Expand<IsLock>(oldEntry, newEntry,
240 oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current);
241 slot->store(expandedNode, std::memory_order_release);
242 }
243 if constexpr (IsLock) {
244 GetMutex().Unlock();
245 }
246 return value;
247 }
248
249 // LoadOrStorForJit need to lock the object before creating the object.
250 // returns the existing value of the key, if it exists.
251 // Otherwise, call the callback function to create a value,
252 // store the value, and return the value
253 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
254 template <typename LoaderCallback, typename EqualsCallback>
LoadOrStoreForJit(ThreadHolder * holder,const uint32_t key,LoaderCallback loaderCallback,EqualsCallback equalsCallback)255 BaseString* HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::LoadOrStoreForJit(ThreadHolder* holder, const uint32_t key,
256 LoaderCallback loaderCallback,
257 EqualsCallback equalsCallback)
258 {
259 HashTrieMapInUseScope mapInUse(this);
260 uint32_t hash = key;
261 uint32_t hashShift = 0;
262 std::atomic<Node*>* slot = nullptr;
263 Node* node = nullptr;
264 [[maybe_unused]] bool haveInsertPoint = false;
265 BaseString* value = nullptr;
266 Indirect* current = GetRootAndProcessHash(hash);
267 while (true) {
268 haveInsertPoint = false;
269 // find the key or insert the candidate position.
270 for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) {
271 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
272 slot = ¤t->GetChild(index);
273 node = slot->load(std::memory_order_acquire);
274 if (node == nullptr) {
275 haveInsertPoint = true;
276 break;
277 }
278 // Entry, Search in overflow
279 if (!node->IsEntry()) {
280 // Indirect, Next level Continue to search
281 current = node->AsIndirect();
282 continue;
283 }
284 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
285 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
286 auto oldValue = currentEntry->Value<SlotBarrier>();
287 if (IsNull(oldValue)) {
288 continue;
289 }
290 if (std::invoke(std::forward<EqualsCallback>(equalsCallback), oldValue)) {
291 return oldValue;
292 }
293 }
294 haveInsertPoint = true;
295 break;
296 }
297 #ifndef NDEBUG
298 if (!haveInsertPoint) {
299 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
300 UNREACHABLE();
301 }
302 #endif
303 // Jit need to lock the object before creating the object
304 GetMutex().LockWithThreadState(holder);
305 // invoke the callback to create str
306 value = std::invoke(std::forward<LoaderCallback>(loaderCallback));
307 ASSERT(slot != nullptr);
308 node = slot->load(std::memory_order_acquire);
309 if (node == nullptr || node->IsEntry()) {
310 // see is still real, so can continue to insert.
311 break;
312 }
313
314 GetMutex().Unlock();
315 current = node->AsIndirect();
316 hashShift += TrieMapConfig::N_CHILDREN_LOG2;
317 }
318
319 Entry* oldEntry = nullptr;
320 uint32_t oldHash = key;
321 if (node != nullptr) {
322 oldEntry = node->AsEntry();
323 for (Entry* currentEntry = oldEntry; currentEntry;
324 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
325 auto oldValue = currentEntry->Value<SlotBarrier>();
326 if (IsNull(oldValue)) {
327 continue;
328 }
329 if (oldValue->GetMixHashcode() != key) {
330 oldHash = oldValue->GetMixHashcode();
331 continue;
332 }
333 if (std::invoke(std::forward<EqualsCallback>(equalsCallback), oldValue)) {
334 GetMutex().Unlock();
335 return oldValue;
336 }
337 }
338 }
339
340 ASSERT(value != nullptr);
341 value->SetIsInternString();
342 IntegerCache::InitIntegerCache(value);
343 Entry* newEntry = new Entry(value);
344 oldEntry = PruneHead(oldEntry);
345 if (oldEntry == nullptr) {
346 // The simple case: Create a new entry and store it.
347 slot->store(newEntry, std::memory_order_release);
348 } else {
349 // Expand an existing entry to one or more new nodes.
350 // Release the node, which will make both oldEntry and newEntry visible
351 auto expandedNode = Expand<true>(oldEntry, newEntry,
352 oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current);
353 slot->store(expandedNode, std::memory_order_release);
354 }
355 GetMutex().Unlock();
356 return value;
357 }
358
359 // Based on the loadResult, try the store first
360 // StoreOrLoad returns the existing value of the key, store the value, and return the value
361 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
362 template <typename LoaderCallback, typename EqualsCallback>
StoreOrLoad(ThreadHolder * holder,const uint32_t key,HashTrieMapLoadResult loadResult,LoaderCallback loaderCallback,EqualsCallback equalsCallback)363 BaseString* HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::StoreOrLoad(ThreadHolder* holder, const uint32_t key,
364 HashTrieMapLoadResult loadResult,
365 LoaderCallback loaderCallback,
366 EqualsCallback equalsCallback)
367 {
368 HashTrieMapInUseScope mapInUse(this);
369 uint32_t hash = key;
370 ProcessHash(hash);
371 uint32_t hashShift = loadResult.hashShift;
372 std::atomic<Node*>* slot = loadResult.slot;
373 Node* node = nullptr;
374 [[maybe_unused]] bool haveInsertPoint = true;
375 Indirect* current = loadResult.current;
376 ReadOnlyHandle<BaseString> str = std::invoke(std::forward<LoaderCallback>(loaderCallback));
377 // lock and double-check
378 GetMutex().LockWithThreadState(holder);
379 node = slot->load(std::memory_order_acquire);
380 if (node != nullptr && !node->IsEntry()) {
381 GetMutex().Unlock();
382 current = node->AsIndirect();
383 hashShift += TrieMapConfig::N_CHILDREN_LOG2;
384 while (true) {
385 haveInsertPoint = false;
386 // find the key or insert the candidate position.
387 for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) {
388 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
389 slot = ¤t->GetChild(index);
390 node = slot->load(std::memory_order_acquire);
391 if (node == nullptr) {
392 haveInsertPoint = true;
393 break;
394 }
395 // Entry, Search in overflow
396 if (node->IsEntry()) {
397 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
398 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
399 auto oldValue = currentEntry->Value<SlotBarrier>();
400 if (!IsNull(oldValue) && std::invoke(std::forward<EqualsCallback>(equalsCallback),
401 oldValue)) {
402 return oldValue;
403 }
404 }
405 haveInsertPoint = true;
406 break;
407 }
408 // Indirect, Next level Continue to search
409 current = node->AsIndirect();
410 }
411 #ifndef NDEBUG
412 if (!haveInsertPoint) {
413 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
414 UNREACHABLE();
415 }
416 #endif
417 // lock and double-check
418 GetMutex().LockWithThreadState(holder);
419 node = slot->load(std::memory_order_acquire);
420 if (node == nullptr || node->IsEntry()) {
421 // see is still real, so can continue to insert.
422 break;
423 }
424 GetMutex().Unlock();
425 current = node->AsIndirect();
426 hashShift += TrieMapConfig::N_CHILDREN_LOG2;
427 }
428 }
429 Entry* oldEntry = nullptr;
430 uint32_t oldHash = key;
431 if (node != nullptr) {
432 oldEntry = node->AsEntry();
433 for (Entry* currentEntry = oldEntry; currentEntry != nullptr;
434 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
435 auto oldValue = currentEntry->Value<SlotBarrier>();
436 if (IsNull(oldValue)) {
437 continue;
438 }
439 if (oldValue->GetMixHashcode() != key) {
440 oldHash = oldValue->GetMixHashcode();
441 continue;
442 }
443 if (std::invoke(std::forward<EqualsCallback>(equalsCallback), oldValue)) {
444 GetMutex().Unlock();
445 return oldValue;
446 }
447 }
448 }
449
450 BaseString* value = *str;
451 ASSERT(value != nullptr);
452 value->SetIsInternString();
453 IntegerCache::InitIntegerCache(value);
454 Entry* newEntry = new Entry(value);
455 oldEntry = PruneHead(oldEntry);
456 if (oldEntry == nullptr) {
457 // The simple case: Create a new entry and store it.
458 slot->store(newEntry, std::memory_order_release);
459 } else {
460 // Expand an existing entry to one or more new nodes.
461 // Release the node, which will make both oldEntry and newEntry visible
462 auto expandedNode = Expand<true>(oldEntry, newEntry,
463 oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current);
464 slot->store(expandedNode, std::memory_order_release);
465 }
466
467 GetMutex().Unlock();
468 return value;
469 }
470
471 // Load returns the value of the key stored in the mapping, or HashTrieMapLoadResult for StoreOrLoad
472 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
473 template <typename ReadBarrier>
Load(ReadBarrier && readBarrier,const uint32_t key,BaseString * value)474 HashTrieMapLoadResult HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::Load(ReadBarrier&& readBarrier,
475 const uint32_t key, BaseString* value)
476 {
477 uint32_t hash = key;
478 Indirect* current = GetRootAndProcessHash(hash);
479 for (uint32_t hashShift = 0; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift +=
480 TrieMapConfig::N_CHILDREN_LOG2) {
481 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
482
483 std::atomic<Node*>* slot = ¤t->GetChild(index);
484 Node* node = slot->load(std::memory_order_acquire);
485 if (node == nullptr) {
486 return {nullptr, current, hashShift, slot};
487 }
488 if (node->IsEntry()) {
489 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
490 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
491 auto oldValue = currentEntry->Value<SlotBarrier>();
492 if (IsNull(oldValue)) {
493 continue;
494 }
495 if (BaseString::StringsAreEqual(std::forward<ReadBarrier>(readBarrier), oldValue, value)) {
496 return {oldValue, nullptr, hashShift, nullptr};
497 }
498 }
499 return {nullptr, current, hashShift, slot};
500 }
501 current = node->AsIndirect();
502 }
503
504 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
505 UNREACHABLE();
506 }
507
508 // Load returns the value of the key stored in the mapping, or HashTrieMapLoadResult for StoreOrLoad
509 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
510 template <typename ReadBarrier>
Load(ReadBarrier && readBarrier,const uint32_t key,const ReadOnlyHandle<BaseString> & string,uint32_t offset,uint32_t utf8Len)511 HashTrieMapLoadResult HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::Load(ReadBarrier&& readBarrier, const uint32_t key,
512 const ReadOnlyHandle<BaseString>& string,
513 uint32_t offset, uint32_t utf8Len)
514 {
515 uint32_t hash = key;
516 Indirect* current = GetRootAndProcessHash(hash);
517 const uint8_t* utf8Data = string->GetDataUtf8() + offset;
518 for (uint32_t hashShift = 0; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift +=
519 TrieMapConfig::N_CHILDREN_LOG2) {
520 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
521
522 std::atomic<Node*>* slot = ¤t->GetChild(index);
523 Node* node = slot->load(std::memory_order_acquire);
524 if (node == nullptr) {
525 return {nullptr, current, hashShift, slot};
526 }
527 if (!node->IsEntry()) {
528 current = node->AsIndirect();
529 continue;
530 }
531 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
532 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
533 auto oldValue = currentEntry->Value<SlotBarrier>();
534 if (IsNull(oldValue)) {
535 continue;
536 }
537 if (BaseString::StringIsEqualUint8Data(std::forward<ReadBarrier>(readBarrier), oldValue, utf8Data, utf8Len,
538 true)) {
539 return {oldValue, nullptr, hashShift, nullptr};
540 }
541 }
542 return {nullptr, current, hashShift, slot};
543 }
544
545 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
546 UNREACHABLE();
547 }
548
549 // Based on the loadResult, try the store first
550 // StoreOrLoad returns the existing value of the key, store the value, and return the value
551 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
552 template <bool threadState, typename ReadBarrier, typename HandleType>
StoreOrLoad(ThreadHolder * holder,ReadBarrier && readBarrier,const uint32_t key,HashTrieMapLoadResult loadResult,HandleType str)553 BaseString* HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::StoreOrLoad(ThreadHolder* holder, ReadBarrier&& readBarrier,
554 const uint32_t key,
555 HashTrieMapLoadResult loadResult,
556 HandleType str)
557 {
558 HashTrieMapInUseScope mapInUse(this);
559 uint32_t hash = key;
560 ProcessHash(hash);
561 uint32_t hashShift = loadResult.hashShift;
562 std::atomic<Node*>* slot = loadResult.slot;
563 Node* node = nullptr;
564 [[maybe_unused]] bool haveInsertPoint = true;
565 Indirect* current = loadResult.current;
566 if constexpr (threadState) {
567 GetMutex().LockWithThreadState(holder);
568 } else {
569 GetMutex().Lock();
570 }
571 node = slot->load(std::memory_order_acquire);
572 if (node != nullptr && !node->IsEntry()) {
573 GetMutex().Unlock();
574 current = node->AsIndirect();
575 hashShift += TrieMapConfig::N_CHILDREN_LOG2;
576 while (true) {
577 haveInsertPoint = false;
578 for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) {
579 size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK;
580 slot = ¤t->GetChild(index);
581 node = slot->load(std::memory_order_acquire);
582 if (node == nullptr) {
583 haveInsertPoint = true;
584 break;
585 }
586 // Entry, Search in overflow
587 if (!node->IsEntry()) {
588 // Indirect, Next level Continue to search
589 current = node->AsIndirect();
590 continue;
591 }
592 for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr;
593 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
594 BaseString* oldValue = currentEntry->Value<SlotBarrier>();
595 if (IsNull(oldValue)) {
596 continue;
597 }
598 if (BaseString::StringsAreEqual(std::forward<ReadBarrier>(readBarrier), oldValue, *str)) {
599 return oldValue;
600 }
601 }
602 haveInsertPoint = true;
603 break;
604 }
605 #ifndef NDEBUG
606 if (!haveInsertPoint) {
607 LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating";
608 UNREACHABLE();
609 }
610 #endif
611 // lock and double-check
612 if constexpr (threadState) {
613 GetMutex().LockWithThreadState(holder);
614 } else {
615 GetMutex().Lock();
616 }
617 node = slot->load(std::memory_order_acquire);
618 if (node == nullptr || node->IsEntry()) {
619 // see is still real, so can continue to insert.
620 break;
621 }
622 GetMutex().Unlock();
623 current = node->AsIndirect();
624 hashShift += TrieMapConfig::N_CHILDREN_LOG2;
625 }
626 }
627
628 Entry* oldEntry = nullptr;
629 uint32_t oldHash = key;
630 if (node != nullptr) {
631 oldEntry = node->AsEntry();
632 for (Entry* currentEntry = oldEntry; currentEntry != nullptr;
633 currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) {
634 BaseString* oldValue = currentEntry->Value<SlotBarrier>();
635 if (IsNull(oldValue)) {
636 continue;
637 }
638 if (oldValue->GetMixHashcode() != key) {
639 oldHash = oldValue->GetMixHashcode();
640 continue;
641 }
642 if (BaseString::StringsAreEqual(std::forward<ReadBarrier>(readBarrier), oldValue, *str)) {
643 GetMutex().Unlock();
644 return oldValue;
645 }
646 }
647 }
648
649 BaseString* value = *str;
650 ASSERT(value != nullptr);
651 value->SetIsInternString();
652 IntegerCache::InitIntegerCache(value);
653 Entry* newEntry = new Entry(value);
654 oldEntry = PruneHead(oldEntry);
655 if (oldEntry == nullptr) {
656 // The simple case: Create a new entry and store it.
657 slot->store(newEntry, std::memory_order_release);
658 } else {
659 // Expand an existing entry to one or more new nodes.
660 // Release the node, which will make both oldEntry and newEntry visible
661 auto expandedNode = Expand<true>(oldEntry, newEntry,
662 oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current);
663 slot->store(expandedNode, std::memory_order_release);
664 }
665 GetMutex().Unlock();
666 return value;
667 }
668
669
670 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
CheckWeakRef(const WeakRootVisitor & visitor,Entry * entry)671 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::CheckWeakRef(const WeakRootVisitor& visitor, Entry* entry)
672 {
673 panda::ecmascript::TaggedObject* object = reinterpret_cast<panda::ecmascript::TaggedObject*>(entry->Value<
674 SlotBarrier>());
675 auto fwd = visitor(object);
676 if (fwd == nullptr) {
677 LOG_COMMON(VERBOSE) << "StringTable: delete string " << std::hex << object;
678 return true;
679 } else if (fwd != object) {
680 entry->SetValue(reinterpret_cast<BaseString*>(fwd));
681 LOG_COMMON(VERBOSE) << "StringTable: forward " << std::hex << object << " -> " << fwd;
682 }
683 return false;
684 }
685
686 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
687 template <typename ReadBarrier>
CheckValidity(ReadBarrier && readBarrier,BaseString * value,bool & isValid)688 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::CheckValidity(ReadBarrier&& readBarrier, BaseString* value,
689 bool& isValid)
690 {
691 if (!value->NotTreeString()) {
692 isValid = false;
693 return false;
694 }
695 uint32_t hashcode = value->GetHashcode(std::forward<ReadBarrier>(readBarrier));
696 if (this->template Load<true>(std::forward<ReadBarrier>(readBarrier), hashcode, value) != nullptr) {
697 isValid = false;
698 return false;
699 }
700 return true;
701 }
702
703 template<typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
Iter(Indirect * node,std::function<bool (Node *)> & iter)704 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::Iter(Indirect *node, std::function<bool(Node *)> &iter)
705 {
706 if (node == nullptr) {
707 return true;
708 }
709 if (!iter(node)) {
710 return false;
711 }
712 for (std::atomic<uint64_t> &temp: node->children_) {
713 auto &child = reinterpret_cast<std::atomic<HashTrieMapNode *> &>(temp);
714 Node *childNode = child.load(std::memory_order_acquire);
715 if (childNode == nullptr)
716 continue;
717
718 if (!(childNode->IsEntry())) {
719 // Recursive traversal of indirect nodes
720 Iter(childNode->AsIndirect(), iter);
721 continue;
722 }
723
724 for (Entry *e = childNode->AsEntry(); e != nullptr; e = e->Overflow().load(std::memory_order_acquire)) {
725 if (!iter(e)) {
726 return false;
727 }
728 }
729 }
730 return true;
731 }
732
733 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
CheckWeakRef(const WeakRefFieldVisitor & visitor,HashTrieMap::Entry * entry)734 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::CheckWeakRef(const WeakRefFieldVisitor& visitor,
735 HashTrieMap::Entry* entry)
736 {
737 // RefField only support 64-bit value, so could not cirectly pass `Entry::Value` to WeakRefFieldVisitor
738 // int 32-bit machine
739 if (SlotBarrier == TrieMapConfig::NeedSlotBarrier) {
740 uint64_t str = static_cast<uint64_t>(reinterpret_cast<uintptr_t>(entry->Value<SlotBarrier>()));
741 bool isAlive = visitor(reinterpret_cast<RefField<>&>(str));
742 entry->SetValue(reinterpret_cast<BaseString*>(static_cast<uintptr_t>(str)));
743 return isAlive;
744 }
745 uint64_t str = static_cast<uint64_t>(reinterpret_cast<uintptr_t>(entry->Value<SlotBarrier>()));
746 bool isAlive = visitor(reinterpret_cast<RefField<>&>(str));
747 if (!isAlive) {
748 return true;
749 }
750 entry->SetValue(reinterpret_cast<BaseString*>(static_cast<uintptr_t>(str)));
751 return false;
752 }
753
754 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
755 template <TrieMapConfig::SlotBarrier Barrier, std::enable_if_t<Barrier == TrieMapConfig::NeedSlotBarrier, int>>
ClearNodeFromGC(Indirect * parent,int index,const WeakRefFieldVisitor & visitor,std::vector<Entry * > & waitDeleteEntries)756 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::ClearNodeFromGC(Indirect* parent, int index,
757 const WeakRefFieldVisitor& visitor,
758 std::vector<Entry*>& waitDeleteEntries)
759 {
760 // load sub-nodes
761 Node* child = parent->GetChild(index).load(std::memory_order_relaxed);
762 if (child == nullptr)
763 return true;
764
765 if (child->IsEntry()) {
766 // Processing the overflow linked list
767 for (Entry *prev = nullptr, *current = child->AsEntry(); current != nullptr; current = current->
768 Overflow().load(std::memory_order_acquire)) {
769 if (!CheckWeakRef(visitor, current) && prev != nullptr) {
770 prev->Overflow().store(current->Overflow().load(std::memory_order_acquire), std::memory_order_release);
771 waitDeleteEntries.push_back(current);
772 } else {
773 prev = current;
774 }
775 }
776 return false;
777 } else {
778 // Recursive processing of the Indirect node
779 Indirect* indirect = child->AsIndirect();
780 uint32_t cleanCount = 0;
781 for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) {
782 if (ClearNodeFromGC(indirect, i, visitor, waitDeleteEntries)) {
783 cleanCount += 1;
784 }
785 }
786 return false;
787 }
788 }
789
790 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
791 template <TrieMapConfig::SlotBarrier Barrier, std::enable_if_t<Barrier == TrieMapConfig::NoSlotBarrier, int>>
ClearNodeFromGC(Indirect * parent,int index,const WeakRefFieldVisitor & visitor)792 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::ClearNodeFromGC(Indirect* parent, int index,
793 const WeakRefFieldVisitor& visitor)
794 {
795 // load sub-nodes
796 Node* child = parent->GetChild(index).load(std::memory_order_relaxed);
797 if (child == nullptr) {
798 return true;
799 }
800 if (child->IsEntry()) {
801 Entry* entry = child->AsEntry();
802 // Processing the overflow linked list
803 for (Entry *prev = nullptr, *current = entry->Overflow().load(std::memory_order_relaxed); current != nullptr
804 ;) {
805 Entry* next = current->Overflow().load(std::memory_order_relaxed);
806 if (CheckWeakRef(visitor, current)) {
807 // Remove and remove a node from a linked list
808 if (prev) {
809 prev->Overflow().store(next, std::memory_order_relaxed);
810 } else {
811 entry->Overflow().store(next, std::memory_order_relaxed);
812 }
813 delete current;
814 } else {
815 prev = current;
816 }
817 current = next;
818 }
819 // processing entry node
820 if (CheckWeakRef(visitor, entry)) {
821 Entry* e = entry->Overflow().load(std::memory_order_relaxed);
822 if (e == nullptr) {
823 // Delete the empty Entry node and update the parent reference
824 delete entry;
825 parent->GetChild(index).store(nullptr, std::memory_order_relaxed);
826 return true;
827 }
828 // Delete the Entry node and update the parent reference
829 delete entry;
830 parent->GetChild(index).store(e, std::memory_order_relaxed);
831 }
832 return false;
833 } else {
834 // Recursive processing of the Indirect node
835 Indirect* indirect = child->AsIndirect();
836 uint32_t cleanCount = 0;
837 for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) {
838 if (ClearNodeFromGC(indirect, i, visitor)) {
839 cleanCount += 1;
840 }
841 }
842 // Check whether the indirect node is empty
843 if (cleanCount == TrieMapConfig::INDIRECT_SIZE) {
844 // Remove the empty Indirect and update the parent reference
845 delete indirect;
846 parent->GetChild(index).store(nullptr, std::memory_order_relaxed);
847 return true;
848 }
849 return false;
850 }
851 }
852
853 template <typename Mutex, typename ThreadHolder, TrieMapConfig::SlotBarrier SlotBarrier>
854 template <TrieMapConfig::SlotBarrier Barrier, std::enable_if_t<Barrier == TrieMapConfig::NoSlotBarrier, int>>
ClearNodeFromGC(Indirect * parent,int index,const WeakRootVisitor & visitor)855 bool HashTrieMap<Mutex, ThreadHolder, SlotBarrier>::ClearNodeFromGC(Indirect* parent, int index,
856 const WeakRootVisitor& visitor)
857 {
858 // load sub-nodes
859 Node* child = parent->GetChild(index).load(std::memory_order_relaxed);
860 if (child == nullptr)
861 return true;
862
863 if (child->IsEntry()) {
864 Entry* entry = child->AsEntry();
865 // Processing the overflow linked list
866 for (Entry *prev = nullptr, *current = entry->Overflow().load(std::memory_order_relaxed); current != nullptr;) {
867 Entry* next = current->Overflow().load(std::memory_order_relaxed);
868 if (CheckWeakRef(visitor, current)) {
869 // Remove and remove a node from a linked list
870 if (prev != nullptr) {
871 prev->Overflow().store(next, std::memory_order_relaxed);
872 } else {
873 entry->Overflow().store(next, std::memory_order_relaxed);
874 }
875 delete current;
876 } else {
877 prev = current;
878 }
879 current = next;
880 }
881 // processing entry node
882 if (CheckWeakRef(visitor, entry)) {
883 Entry* e = entry->Overflow().load(std::memory_order_relaxed);
884 if (e == nullptr) {
885 // Delete the empty Entry node and update the parent reference
886 delete entry;
887 parent->GetChild(index).store(nullptr, std::memory_order_relaxed);
888 return true;
889 }
890 // Delete the Entry node and update the parent reference
891 delete entry;
892 parent->GetChild(index).store(e, std::memory_order_relaxed);
893 }
894 return false;
895 } else {
896 // Recursive processing of the Indirect node
897 Indirect* indirect = child->AsIndirect();
898 uint32_t cleanCount = 0;
899 for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) {
900 if (ClearNodeFromGC(indirect, i, visitor)) {
901 cleanCount += 1;
902 }
903 }
904 // Check whether the indirect node is empty
905 if (cleanCount == TrieMapConfig::INDIRECT_SIZE && inuseCount_ == 0) {
906 // Remove the empty Indirect and update the parent reference
907 delete indirect;
908 parent->GetChild(index).store(nullptr, std::memory_order_relaxed);
909 return true;
910 }
911 return false;
912 }
913 }
914 }
915 #endif //COMMON_COMPONENTS_OBJECTS_STRING_TABLE_HASHTRIEMAP_INL_H
916