1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/handles/global-handles.h"
6
7 #include <algorithm>
8 #include <cstdint>
9 #include <map>
10
11 #include "src/api/api-inl.h"
12 #include "src/base/compiler-specific.h"
13 #include "src/execution/vm-state-inl.h"
14 #include "src/heap/embedder-tracing.h"
15 #include "src/heap/heap-write-barrier-inl.h"
16 #include "src/init/v8.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/objects-inl.h"
19 #include "src/objects/slots.h"
20 #include "src/objects/visitors.h"
21 #include "src/sanitizer/asan.h"
22 #include "src/tasks/cancelable-task.h"
23 #include "src/tasks/task-utils.h"
24 #include "src/utils/utils.h"
25
26 namespace v8 {
27 namespace internal {
28
29 namespace {
30
31 // Specifies whether V8 expects the holder memory of a global handle to be live
32 // or dead.
33 enum class HandleHolder { kLive, kDead };
34
35 constexpr size_t kBlockSize = 256;
36
37 } // namespace
38
39 template <class _NodeType>
40 class GlobalHandles::NodeBlock final {
41 public:
42 using BlockType = NodeBlock<_NodeType>;
43 using NodeType = _NodeType;
44
45 V8_INLINE static const NodeBlock* From(const NodeType* node);
46 V8_INLINE static NodeBlock* From(NodeType* node);
47
NodeBlock(GlobalHandles * global_handles,GlobalHandles::NodeSpace<NodeType> * space,NodeBlock * next)48 NodeBlock(GlobalHandles* global_handles,
49 GlobalHandles::NodeSpace<NodeType>* space,
50 NodeBlock* next) V8_NOEXCEPT : next_(next),
51 global_handles_(global_handles),
52 space_(space) {}
53
at(size_t index)54 NodeType* at(size_t index) { return &nodes_[index]; }
at(size_t index) const55 const NodeType* at(size_t index) const { return &nodes_[index]; }
space() const56 GlobalHandles::NodeSpace<NodeType>* space() const { return space_; }
global_handles() const57 GlobalHandles* global_handles() const { return global_handles_; }
58
59 V8_INLINE bool IncreaseUsage();
60 V8_INLINE bool DecreaseUsage();
61
62 V8_INLINE void ListAdd(NodeBlock** top);
63 V8_INLINE void ListRemove(NodeBlock** top);
64
next() const65 NodeBlock* next() const { return next_; }
next_used() const66 NodeBlock* next_used() const { return next_used_; }
67
68 private:
69 NodeType nodes_[kBlockSize];
70 NodeBlock* const next_;
71 GlobalHandles* const global_handles_;
72 GlobalHandles::NodeSpace<NodeType>* const space_;
73 NodeBlock* next_used_ = nullptr;
74 NodeBlock* prev_used_ = nullptr;
75 uint32_t used_nodes_ = 0;
76
77 DISALLOW_COPY_AND_ASSIGN(NodeBlock);
78 };
79
80 template <class NodeType>
81 const GlobalHandles::NodeBlock<NodeType>*
From(const NodeType * node)82 GlobalHandles::NodeBlock<NodeType>::From(const NodeType* node) {
83 const NodeType* firstNode = node - node->index();
84 const BlockType* block = reinterpret_cast<const BlockType*>(firstNode);
85 DCHECK_EQ(node, block->at(node->index()));
86 return block;
87 }
88
89 template <class NodeType>
From(NodeType * node)90 GlobalHandles::NodeBlock<NodeType>* GlobalHandles::NodeBlock<NodeType>::From(
91 NodeType* node) {
92 NodeType* firstNode = node - node->index();
93 BlockType* block = reinterpret_cast<BlockType*>(firstNode);
94 DCHECK_EQ(node, block->at(node->index()));
95 return block;
96 }
97
98 template <class NodeType>
IncreaseUsage()99 bool GlobalHandles::NodeBlock<NodeType>::IncreaseUsage() {
100 DCHECK_LT(used_nodes_, kBlockSize);
101 return used_nodes_++ == 0;
102 }
103
104 template <class NodeType>
ListAdd(BlockType ** top)105 void GlobalHandles::NodeBlock<NodeType>::ListAdd(BlockType** top) {
106 BlockType* old_top = *top;
107 *top = this;
108 next_used_ = old_top;
109 prev_used_ = nullptr;
110 if (old_top != nullptr) {
111 old_top->prev_used_ = this;
112 }
113 }
114
115 template <class NodeType>
DecreaseUsage()116 bool GlobalHandles::NodeBlock<NodeType>::DecreaseUsage() {
117 DCHECK_GT(used_nodes_, 0);
118 return --used_nodes_ == 0;
119 }
120
121 template <class NodeType>
ListRemove(BlockType ** top)122 void GlobalHandles::NodeBlock<NodeType>::ListRemove(BlockType** top) {
123 if (next_used_ != nullptr) next_used_->prev_used_ = prev_used_;
124 if (prev_used_ != nullptr) prev_used_->next_used_ = next_used_;
125 if (this == *top) {
126 *top = next_used_;
127 }
128 }
129
130 template <class BlockType>
131 class GlobalHandles::NodeIterator final {
132 public:
133 using NodeType = typename BlockType::NodeType;
134
135 // Iterator traits.
136 using iterator_category = std::forward_iterator_tag;
137 using difference_type = std::ptrdiff_t;
138 using value_type = NodeType*;
139 using reference = value_type;
140 using pointer = value_type*;
141
NodeIterator(BlockType * block)142 explicit NodeIterator(BlockType* block) V8_NOEXCEPT : block_(block) {}
NodeIterator(NodeIterator && other)143 NodeIterator(NodeIterator&& other) V8_NOEXCEPT : block_(other.block_),
144 index_(other.index_) {}
145
operator ==(const NodeIterator & other) const146 bool operator==(const NodeIterator& other) const {
147 return block_ == other.block_;
148 }
operator !=(const NodeIterator & other) const149 bool operator!=(const NodeIterator& other) const {
150 return block_ != other.block_;
151 }
152
operator ++()153 NodeIterator& operator++() {
154 if (++index_ < kBlockSize) return *this;
155 index_ = 0;
156 block_ = block_->next_used();
157 return *this;
158 }
159
operator *()160 NodeType* operator*() { return block_->at(index_); }
operator ->()161 NodeType* operator->() { return block_->at(index_); }
162
163 private:
164 BlockType* block_ = nullptr;
165 size_t index_ = 0;
166
167 DISALLOW_COPY_AND_ASSIGN(NodeIterator);
168 };
169
170 template <class NodeType>
171 class GlobalHandles::NodeSpace final {
172 public:
173 using BlockType = NodeBlock<NodeType>;
174 using iterator = NodeIterator<BlockType>;
175
176 static NodeSpace* From(NodeType* node);
177 static void Release(NodeType* node);
178
NodeSpace(GlobalHandles * global_handles)179 explicit NodeSpace(GlobalHandles* global_handles) V8_NOEXCEPT
180 : global_handles_(global_handles) {}
181 ~NodeSpace();
182
183 V8_INLINE NodeType* Acquire(Object object);
184
begin()185 iterator begin() { return iterator(first_used_block_); }
end()186 iterator end() { return iterator(nullptr); }
187
TotalSize() const188 size_t TotalSize() const { return blocks_ * sizeof(NodeType) * kBlockSize; }
handles_count() const189 size_t handles_count() const { return handles_count_; }
190
191 private:
192 void PutNodesOnFreeList(BlockType* block);
193 V8_INLINE void Free(NodeType* node);
194
195 GlobalHandles* const global_handles_;
196 BlockType* first_block_ = nullptr;
197 BlockType* first_used_block_ = nullptr;
198 NodeType* first_free_ = nullptr;
199 size_t blocks_ = 0;
200 size_t handles_count_ = 0;
201 };
202
203 template <class NodeType>
~NodeSpace()204 GlobalHandles::NodeSpace<NodeType>::~NodeSpace() {
205 auto* block = first_block_;
206 while (block != nullptr) {
207 auto* tmp = block->next();
208 delete block;
209 block = tmp;
210 }
211 }
212
213 template <class NodeType>
Acquire(Object object)214 NodeType* GlobalHandles::NodeSpace<NodeType>::Acquire(Object object) {
215 if (first_free_ == nullptr) {
216 first_block_ = new BlockType(global_handles_, this, first_block_);
217 blocks_++;
218 PutNodesOnFreeList(first_block_);
219 }
220 DCHECK_NOT_NULL(first_free_);
221 NodeType* node = first_free_;
222 first_free_ = first_free_->next_free();
223 node->Acquire(object);
224 BlockType* block = BlockType::From(node);
225 if (block->IncreaseUsage()) {
226 block->ListAdd(&first_used_block_);
227 }
228 global_handles_->isolate()->counters()->global_handles()->Increment();
229 handles_count_++;
230 DCHECK(node->IsInUse());
231 return node;
232 }
233
234 template <class NodeType>
PutNodesOnFreeList(BlockType * block)235 void GlobalHandles::NodeSpace<NodeType>::PutNodesOnFreeList(BlockType* block) {
236 for (int32_t i = kBlockSize - 1; i >= 0; --i) {
237 NodeType* node = block->at(i);
238 const uint8_t index = static_cast<uint8_t>(i);
239 DCHECK_EQ(i, index);
240 node->set_index(index);
241 node->Free(first_free_);
242 first_free_ = node;
243 }
244 }
245
246 template <class NodeType>
Release(NodeType * node)247 void GlobalHandles::NodeSpace<NodeType>::Release(NodeType* node) {
248 BlockType* block = BlockType::From(node);
249 block->space()->Free(node);
250 }
251
252 template <class NodeType>
Free(NodeType * node)253 void GlobalHandles::NodeSpace<NodeType>::Free(NodeType* node) {
254 node->Release(first_free_);
255 first_free_ = node;
256 BlockType* block = BlockType::From(node);
257 if (block->DecreaseUsage()) {
258 block->ListRemove(&first_used_block_);
259 }
260 global_handles_->isolate()->counters()->global_handles()->Decrement();
261 handles_count_--;
262 }
263
264 template <class Child>
265 class NodeBase {
266 public:
FromLocation(const Address * location)267 static const Child* FromLocation(const Address* location) {
268 return reinterpret_cast<const Child*>(location);
269 }
270
FromLocation(Address * location)271 static Child* FromLocation(Address* location) {
272 return reinterpret_cast<Child*>(location);
273 }
274
NodeBase()275 NodeBase() {
276 DCHECK_EQ(offsetof(NodeBase, object_), 0);
277 DCHECK_EQ(offsetof(NodeBase, class_id_), Internals::kNodeClassIdOffset);
278 DCHECK_EQ(offsetof(NodeBase, flags_), Internals::kNodeFlagsOffset);
279 }
280
281 #ifdef ENABLE_HANDLE_ZAPPING
~NodeBase()282 ~NodeBase() {
283 ClearFields();
284 data_.next_free = nullptr;
285 index_ = 0;
286 }
287 #endif
288
Free(Child * free_list)289 void Free(Child* free_list) {
290 ClearFields();
291 AsChild()->MarkAsFree();
292 data_.next_free = free_list;
293 }
294
Acquire(Object object)295 void Acquire(Object object) {
296 DCHECK(!AsChild()->IsInUse());
297 CheckFieldsAreCleared();
298 object_ = object.ptr();
299 AsChild()->MarkAsUsed();
300 data_.parameter = nullptr;
301 DCHECK(AsChild()->IsInUse());
302 }
303
Release(Child * free_list)304 void Release(Child* free_list) {
305 DCHECK(AsChild()->IsInUse());
306 Free(free_list);
307 DCHECK(!AsChild()->IsInUse());
308 }
309
object() const310 Object object() const { return Object(object_); }
location()311 FullObjectSlot location() { return FullObjectSlot(&object_); }
handle()312 Handle<Object> handle() { return Handle<Object>(&object_); }
313
index() const314 uint8_t index() const { return index_; }
set_index(uint8_t value)315 void set_index(uint8_t value) { index_ = value; }
316
wrapper_class_id() const317 uint16_t wrapper_class_id() const { return class_id_; }
has_wrapper_class_id() const318 bool has_wrapper_class_id() const {
319 return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
320 }
321
322 // Accessors for next free node in the free list.
next_free()323 Child* next_free() {
324 DCHECK(!AsChild()->IsInUse());
325 return data_.next_free;
326 }
327
set_parameter(void * parameter)328 void set_parameter(void* parameter) {
329 DCHECK(AsChild()->IsInUse());
330 data_.parameter = parameter;
331 }
parameter() const332 void* parameter() const {
333 DCHECK(AsChild()->IsInUse());
334 return data_.parameter;
335 }
336
337 protected:
AsChild()338 Child* AsChild() { return reinterpret_cast<Child*>(this); }
AsChild() const339 const Child* AsChild() const { return reinterpret_cast<const Child*>(this); }
340
ClearFields()341 void ClearFields() {
342 // Zap the values for eager trapping.
343 object_ = kGlobalHandleZapValue;
344 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
345 AsChild()->ClearImplFields();
346 }
347
CheckFieldsAreCleared()348 void CheckFieldsAreCleared() {
349 DCHECK_EQ(kGlobalHandleZapValue, object_);
350 DCHECK_EQ(v8::HeapProfiler::kPersistentHandleNoClassId, class_id_);
351 AsChild()->CheckImplFieldsAreCleared();
352 }
353
354 // Storage for object pointer.
355 //
356 // Placed first to avoid offset computation. The stored data is equivalent to
357 // an Object. It is stored as a plain Address for convenience (smallest number
358 // of casts), and because it is a private implementation detail: the public
359 // interface provides type safety.
360 Address object_;
361
362 // Class id set by the embedder.
363 uint16_t class_id_;
364
365 // Index in the containing handle block.
366 uint8_t index_;
367
368 uint8_t flags_;
369
370 // The meaning of this field depends on node state:
371 // - Node in free list: Stores next free node pointer.
372 // - Otherwise, specific to the node implementation.
373 union {
374 Child* next_free;
375 void* parameter;
376 } data_;
377 };
378
379 namespace {
380
ExtractInternalFields(JSObject jsobject,void ** embedder_fields,int len)381 void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
382 int field_count = jsobject.GetEmbedderFieldCount();
383 IsolateRoot isolate = GetIsolateForPtrCompr(jsobject);
384 for (int i = 0; i < len; ++i) {
385 if (field_count == i) break;
386 void* pointer;
387 if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) {
388 embedder_fields[i] = pointer;
389 }
390 }
391 }
392
393 } // namespace
394
395 class GlobalHandles::Node final : public NodeBase<GlobalHandles::Node> {
396 public:
397 // State transition diagram:
398 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
399 enum State {
400 FREE = 0,
401 NORMAL, // Normal global handle.
402 WEAK, // Flagged as weak but not yet finalized.
403 PENDING, // Has been recognized as only reachable by weak handles.
404 NEAR_DEATH, // Callback has informed the handle is near death.
405 NUMBER_OF_NODE_STATES
406 };
407
Node()408 Node() {
409 STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
410 Internals::kNodeStateMask);
411 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
412 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
413 set_in_young_list(false);
414 }
415
Zap()416 void Zap() {
417 DCHECK(IsInUse());
418 // Zap the values for eager trapping.
419 object_ = kGlobalHandleZapValue;
420 }
421
label() const422 const char* label() const {
423 return state() == NORMAL ? reinterpret_cast<char*>(data_.parameter)
424 : nullptr;
425 }
426
427 // State and flag accessors.
428
state() const429 State state() const { return NodeState::decode(flags_); }
set_state(State state)430 void set_state(State state) { flags_ = NodeState::update(flags_, state); }
431
is_in_young_list() const432 bool is_in_young_list() const { return IsInYoungList::decode(flags_); }
set_in_young_list(bool v)433 void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); }
434
weakness_type() const435 WeaknessType weakness_type() const {
436 return NodeWeaknessType::decode(flags_);
437 }
set_weakness_type(WeaknessType weakness_type)438 void set_weakness_type(WeaknessType weakness_type) {
439 flags_ = NodeWeaknessType::update(flags_, weakness_type);
440 }
441
IsWeak() const442 bool IsWeak() const { return state() == WEAK; }
443
IsInUse() const444 bool IsInUse() const { return state() != FREE; }
445
IsPhantomCallback() const446 bool IsPhantomCallback() const {
447 return weakness_type() == PHANTOM_WEAK ||
448 weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS;
449 }
450
IsPhantomResetHandle() const451 bool IsPhantomResetHandle() const {
452 return weakness_type() == PHANTOM_WEAK_RESET_HANDLE;
453 }
454
IsFinalizerHandle() const455 bool IsFinalizerHandle() const { return weakness_type() == FINALIZER_WEAK; }
456
IsPendingPhantomCallback() const457 bool IsPendingPhantomCallback() const {
458 return state() == PENDING && IsPhantomCallback();
459 }
460
IsPendingPhantomResetHandle() const461 bool IsPendingPhantomResetHandle() const {
462 return state() == PENDING && IsPhantomResetHandle();
463 }
464
IsPendingFinalizer() const465 bool IsPendingFinalizer() const {
466 return state() == PENDING && weakness_type() == FINALIZER_WEAK;
467 }
468
IsPending() const469 bool IsPending() const { return state() == PENDING; }
470
IsRetainer() const471 bool IsRetainer() const {
472 return state() != FREE &&
473 !(state() == NEAR_DEATH && weakness_type() != FINALIZER_WEAK);
474 }
475
IsStrongRetainer() const476 bool IsStrongRetainer() const { return state() == NORMAL; }
477
IsWeakRetainer() const478 bool IsWeakRetainer() const {
479 return state() == WEAK || state() == PENDING ||
480 (state() == NEAR_DEATH && weakness_type() == FINALIZER_WEAK);
481 }
482
MarkPending()483 void MarkPending() {
484 DCHECK(state() == WEAK);
485 set_state(PENDING);
486 }
487
has_callback() const488 bool has_callback() const { return weak_callback_ != nullptr; }
489
490 // Accessors for next free node in the free list.
next_free()491 Node* next_free() {
492 DCHECK_EQ(FREE, state());
493 return data_.next_free;
494 }
495
MakeWeak(void * parameter,WeakCallbackInfo<void>::Callback phantom_callback,v8::WeakCallbackType type)496 void MakeWeak(void* parameter,
497 WeakCallbackInfo<void>::Callback phantom_callback,
498 v8::WeakCallbackType type) {
499 DCHECK_NOT_NULL(phantom_callback);
500 DCHECK(IsInUse());
501 CHECK_NE(object_, kGlobalHandleZapValue);
502 set_state(WEAK);
503 switch (type) {
504 case v8::WeakCallbackType::kParameter:
505 set_weakness_type(PHANTOM_WEAK);
506 break;
507 case v8::WeakCallbackType::kInternalFields:
508 set_weakness_type(PHANTOM_WEAK_2_EMBEDDER_FIELDS);
509 break;
510 case v8::WeakCallbackType::kFinalizer:
511 set_weakness_type(FINALIZER_WEAK);
512 break;
513 }
514 set_parameter(parameter);
515 weak_callback_ = phantom_callback;
516 }
517
MakeWeak(Address ** location_addr)518 void MakeWeak(Address** location_addr) {
519 DCHECK(IsInUse());
520 CHECK_NE(object_, kGlobalHandleZapValue);
521 set_state(WEAK);
522 set_weakness_type(PHANTOM_WEAK_RESET_HANDLE);
523 set_parameter(location_addr);
524 weak_callback_ = nullptr;
525 }
526
ClearWeakness()527 void* ClearWeakness() {
528 DCHECK(IsInUse());
529 void* p = parameter();
530 set_state(NORMAL);
531 set_parameter(nullptr);
532 return p;
533 }
534
AnnotateStrongRetainer(const char * label)535 void AnnotateStrongRetainer(const char* label) {
536 DCHECK_EQ(state(), NORMAL);
537 data_.parameter = const_cast<char*>(label);
538 }
539
CollectPhantomCallbackData(std::vector<std::pair<Node *,PendingPhantomCallback>> * pending_phantom_callbacks)540 void CollectPhantomCallbackData(
541 std::vector<std::pair<Node*, PendingPhantomCallback>>*
542 pending_phantom_callbacks) {
543 DCHECK(weakness_type() == PHANTOM_WEAK ||
544 weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS);
545 DCHECK(state() == PENDING);
546 DCHECK_NOT_NULL(weak_callback_);
547
548 void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
549 nullptr};
550 if (weakness_type() != PHANTOM_WEAK && object().IsJSObject()) {
551 ExtractInternalFields(JSObject::cast(object()), embedder_fields,
552 v8::kEmbedderFieldsInWeakCallback);
553 }
554
555 // Zap with something dangerous.
556 location().store(Object(0xCA11));
557
558 pending_phantom_callbacks->push_back(std::make_pair(
559 this,
560 PendingPhantomCallback(weak_callback_, parameter(), embedder_fields)));
561 DCHECK(IsInUse());
562 set_state(NEAR_DEATH);
563 }
564
ResetPhantomHandle(HandleHolder handle_holder)565 void ResetPhantomHandle(HandleHolder handle_holder) {
566 DCHECK_EQ(HandleHolder::kLive, handle_holder);
567 DCHECK_EQ(PHANTOM_WEAK_RESET_HANDLE, weakness_type());
568 DCHECK_EQ(PENDING, state());
569 DCHECK_NULL(weak_callback_);
570 Address** handle = reinterpret_cast<Address**>(parameter());
571 *handle = nullptr;
572 NodeSpace<Node>::Release(this);
573 }
574
PostGarbageCollectionProcessing(Isolate * isolate)575 void PostGarbageCollectionProcessing(Isolate* isolate) {
576 // This method invokes a finalizer. Updating the method name would require
577 // adjusting CFI blocklist as weak_callback_ is invoked on the wrong type.
578 CHECK(IsPendingFinalizer());
579 set_state(NEAR_DEATH);
580 // Check that we are not passing a finalized external string to
581 // the callback.
582 DCHECK(!object().IsExternalOneByteString() ||
583 ExternalOneByteString::cast(object()).resource() != nullptr);
584 DCHECK(!object().IsExternalTwoByteString() ||
585 ExternalTwoByteString::cast(object()).resource() != nullptr);
586 // Leaving V8.
587 VMState<EXTERNAL> vmstate(isolate);
588 HandleScope handle_scope(isolate);
589 void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
590 nullptr};
591 v8::WeakCallbackInfo<void> data(reinterpret_cast<v8::Isolate*>(isolate),
592 parameter(), embedder_fields, nullptr);
593 weak_callback_(data);
594 // For finalizers the handle must have either been reset or made strong.
595 // Both cases reset the state.
596 CHECK_NE(NEAR_DEATH, state());
597 }
598
MarkAsFree()599 void MarkAsFree() { set_state(FREE); }
MarkAsUsed()600 void MarkAsUsed() { set_state(NORMAL); }
601
global_handles()602 GlobalHandles* global_handles() {
603 return NodeBlock<Node>::From(this)->global_handles();
604 }
605
606 private:
607 // Fields that are not used for managing node memory.
ClearImplFields()608 void ClearImplFields() { weak_callback_ = nullptr; }
609
CheckImplFieldsAreCleared()610 void CheckImplFieldsAreCleared() { DCHECK_EQ(nullptr, weak_callback_); }
611
612 // This stores three flags (independent, partially_dependent and
613 // in_young_list) and a State.
614 using NodeState = base::BitField8<State, 0, 3>;
615 using IsInYoungList = NodeState::Next<bool, 1>;
616 using NodeWeaknessType = IsInYoungList::Next<WeaknessType, 2>;
617
618 // Handle specific callback - might be a weak reference in disguise.
619 WeakCallbackInfo<void>::Callback weak_callback_;
620
621 friend class NodeBase<Node>;
622
623 DISALLOW_COPY_AND_ASSIGN(Node);
624 };
625
626 class GlobalHandles::TracedNode final
627 : public NodeBase<GlobalHandles::TracedNode> {
628 public:
TracedNode()629 TracedNode() { set_in_young_list(false); }
630
631 // Copy and move ctors are used when constructing a TracedNode when recording
632 // a node for on-stack data structures. (Older compilers may refer to copy
633 // instead of move ctor.)
634 TracedNode(TracedNode&& other) V8_NOEXCEPT = default;
635 TracedNode(const TracedNode& other) V8_NOEXCEPT = default;
636
637 enum State { FREE = 0, NORMAL, NEAR_DEATH };
638
state() const639 State state() const { return NodeState::decode(flags_); }
set_state(State state)640 void set_state(State state) { flags_ = NodeState::update(flags_, state); }
641
MarkAsFree()642 void MarkAsFree() { set_state(FREE); }
MarkAsUsed()643 void MarkAsUsed() { set_state(NORMAL); }
IsInUse() const644 bool IsInUse() const { return state() != FREE; }
IsRetainer() const645 bool IsRetainer() const { return state() == NORMAL; }
IsPhantomResetHandle() const646 bool IsPhantomResetHandle() const { return callback_ == nullptr; }
647
is_in_young_list() const648 bool is_in_young_list() const { return IsInYoungList::decode(flags_); }
set_in_young_list(bool v)649 void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); }
650
is_root() const651 bool is_root() const { return IsRoot::decode(flags_); }
set_root(bool v)652 void set_root(bool v) { flags_ = IsRoot::update(flags_, v); }
653
has_destructor() const654 bool has_destructor() const { return HasDestructor::decode(flags_); }
set_has_destructor(bool v)655 void set_has_destructor(bool v) { flags_ = HasDestructor::update(flags_, v); }
656
markbit() const657 bool markbit() const { return Markbit::decode(flags_); }
clear_markbit()658 void clear_markbit() { flags_ = Markbit::update(flags_, false); }
set_markbit()659 void set_markbit() { flags_ = Markbit::update(flags_, true); }
660
is_on_stack() const661 bool is_on_stack() const { return IsOnStack::decode(flags_); }
set_is_on_stack(bool v)662 void set_is_on_stack(bool v) { flags_ = IsOnStack::update(flags_, v); }
663
SetFinalizationCallback(void * parameter,WeakCallbackInfo<void>::Callback callback)664 void SetFinalizationCallback(void* parameter,
665 WeakCallbackInfo<void>::Callback callback) {
666 set_parameter(parameter);
667 callback_ = callback;
668 }
HasFinalizationCallback() const669 bool HasFinalizationCallback() const { return callback_ != nullptr; }
670
CopyObjectReference(const TracedNode & other)671 void CopyObjectReference(const TracedNode& other) { object_ = other.object_; }
672
CollectPhantomCallbackData(std::vector<std::pair<TracedNode *,PendingPhantomCallback>> * pending_phantom_callbacks)673 void CollectPhantomCallbackData(
674 std::vector<std::pair<TracedNode*, PendingPhantomCallback>>*
675 pending_phantom_callbacks) {
676 DCHECK(IsInUse());
677 DCHECK_NOT_NULL(callback_);
678
679 void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
680 nullptr};
681 ExtractInternalFields(JSObject::cast(object()), embedder_fields,
682 v8::kEmbedderFieldsInWeakCallback);
683
684 // Zap with something dangerous.
685 location().store(Object(0xCA11));
686
687 pending_phantom_callbacks->push_back(std::make_pair(
688 this, PendingPhantomCallback(callback_, parameter(), embedder_fields)));
689 set_state(NEAR_DEATH);
690 }
691
ResetPhantomHandle(HandleHolder handle_holder)692 void ResetPhantomHandle(HandleHolder handle_holder) {
693 DCHECK(IsInUse());
694 if (handle_holder == HandleHolder::kLive) {
695 Address** handle = reinterpret_cast<Address**>(data_.parameter);
696 *handle = nullptr;
697 }
698 NodeSpace<TracedNode>::Release(this);
699 DCHECK(!IsInUse());
700 }
701
702 static void Verify(GlobalHandles* global_handles, const Address* const* slot);
703
704 protected:
705 using NodeState = base::BitField8<State, 0, 2>;
706 using IsInYoungList = NodeState::Next<bool, 1>;
707 using IsRoot = IsInYoungList::Next<bool, 1>;
708 using HasDestructor = IsRoot::Next<bool, 1>;
709 using Markbit = HasDestructor::Next<bool, 1>;
710 using IsOnStack = Markbit::Next<bool, 1>;
711
ClearImplFields()712 void ClearImplFields() {
713 set_root(true);
714 // Nodes are black allocated for simplicity.
715 set_markbit();
716 callback_ = nullptr;
717 set_is_on_stack(false);
718 set_has_destructor(false);
719 }
720
CheckImplFieldsAreCleared() const721 void CheckImplFieldsAreCleared() const {
722 DCHECK(is_root());
723 DCHECK(markbit());
724 DCHECK_NULL(callback_);
725 }
726
727 WeakCallbackInfo<void>::Callback callback_;
728
729 friend class NodeBase<GlobalHandles::TracedNode>;
730 };
731
732 // Space to keep track of on-stack handles (e.g. TracedReference). Such
733 // references are treated as root for any V8 garbage collection. The data
734 // structure is self healing and pessimistally filters outdated entries on
735 // insertion and iteration.
736 //
737 // Design doc: http://bit.ly/on-stack-traced-reference
738 class GlobalHandles::OnStackTracedNodeSpace final {
739 public:
GetGlobalHandles(const TracedNode * on_stack_node)740 static GlobalHandles* GetGlobalHandles(const TracedNode* on_stack_node) {
741 DCHECK(on_stack_node->is_on_stack());
742 return reinterpret_cast<const NodeEntry*>(on_stack_node)->global_handles;
743 }
744
OnStackTracedNodeSpace(GlobalHandles * global_handles)745 explicit OnStackTracedNodeSpace(GlobalHandles* global_handles)
746 : global_handles_(global_handles) {}
747
SetStackStart(void * stack_start)748 void SetStackStart(void* stack_start) {
749 CHECK(on_stack_nodes_.empty());
750 stack_start_ = base::Stack::GetRealStackAddressForSlot(stack_start);
751 }
752
753 V8_INLINE bool IsOnStack(uintptr_t slot) const;
754
755 void Iterate(RootVisitor* v);
756 TracedNode* Acquire(Object value, uintptr_t address);
757 void CleanupBelowCurrentStackPosition();
758 void NotifyEmptyEmbedderStack();
759
NumberOfHandlesForTesting() const760 size_t NumberOfHandlesForTesting() const { return on_stack_nodes_.size(); }
761
762 private:
763 struct NodeEntry {
764 TracedNode node;
765 // Used to find back to GlobalHandles from a Node on copy. Needs to follow
766 // node.
767 GlobalHandles* global_handles;
768 };
769
770 // Keeps track of registered handles. The data structure is cleaned on
771 // iteration and when adding new references using the current stack address.
772 // Cleaning is based on current stack address and the key of the map which is
773 // slightly different for ASAN configs -- see below.
774 #ifdef V8_USE_ADDRESS_SANITIZER
775 // Mapping from stack slots or real stack frames to the corresponding nodes.
776 // In case a reference is part of a fake frame, we map it to the real stack
777 // frame base instead of the actual stack slot. The list keeps all nodes for
778 // a particular real frame.
779 std::map<uintptr_t, std::list<NodeEntry>> on_stack_nodes_;
780 #else // !V8_USE_ADDRESS_SANITIZER
781 // Mapping from stack slots to the corresponding nodes. We don't expect
782 // aliasing with overlapping lifetimes of nodes.
783 std::map<uintptr_t, NodeEntry> on_stack_nodes_;
784 #endif // !V8_USE_ADDRESS_SANITIZER
785
786 uintptr_t stack_start_ = 0;
787 GlobalHandles* global_handles_ = nullptr;
788 size_t acquire_count_ = 0;
789 };
790
IsOnStack(uintptr_t slot) const791 bool GlobalHandles::OnStackTracedNodeSpace::IsOnStack(uintptr_t slot) const {
792 #ifdef V8_USE_ADDRESS_SANITIZER
793 if (__asan_addr_is_in_fake_stack(__asan_get_current_fake_stack(),
794 reinterpret_cast<void*>(slot), nullptr,
795 nullptr)) {
796 return true;
797 }
798 #endif // V8_USE_ADDRESS_SANITIZER
799 return stack_start_ >= slot && slot > base::Stack::GetCurrentStackPosition();
800 }
801
NotifyEmptyEmbedderStack()802 void GlobalHandles::OnStackTracedNodeSpace::NotifyEmptyEmbedderStack() {
803 on_stack_nodes_.clear();
804 }
805
Iterate(RootVisitor * v)806 void GlobalHandles::OnStackTracedNodeSpace::Iterate(RootVisitor* v) {
807 #ifdef V8_USE_ADDRESS_SANITIZER
808 for (auto& pair : on_stack_nodes_) {
809 for (auto& node_entry : pair.second) {
810 TracedNode& node = node_entry.node;
811 if (node.IsRetainer()) {
812 v->VisitRootPointer(Root::kGlobalHandles, "on-stack TracedReference",
813 node.location());
814 }
815 }
816 }
817 #else // !V8_USE_ADDRESS_SANITIZER
818 // Handles have been cleaned from the GC entry point which is higher up the
819 // stack.
820 for (auto& pair : on_stack_nodes_) {
821 TracedNode& node = pair.second.node;
822 if (node.IsRetainer()) {
823 v->VisitRootPointer(Root::kGlobalHandles, "on-stack TracedReference",
824 node.location());
825 }
826 }
827 #endif // !V8_USE_ADDRESS_SANITIZER
828 }
829
Acquire(Object value,uintptr_t slot)830 GlobalHandles::TracedNode* GlobalHandles::OnStackTracedNodeSpace::Acquire(
831 Object value, uintptr_t slot) {
832 constexpr size_t kAcquireCleanupThresholdLog2 = 8;
833 constexpr size_t kAcquireCleanupThresholdMask =
834 (size_t{1} << kAcquireCleanupThresholdLog2) - 1;
835 DCHECK(IsOnStack(slot));
836 if (((acquire_count_++) & kAcquireCleanupThresholdMask) == 0) {
837 CleanupBelowCurrentStackPosition();
838 }
839 NodeEntry entry;
840 entry.node.Free(nullptr);
841 entry.global_handles = global_handles_;
842 #ifdef V8_USE_ADDRESS_SANITIZER
843 auto pair = on_stack_nodes_.insert(
844 {base::Stack::GetRealStackAddressForSlot(slot), {}});
845 pair.first->second.push_back(std::move(entry));
846 TracedNode* result = &(pair.first->second.back().node);
847 #else // !V8_USE_ADDRESS_SANITIZER
848 auto pair = on_stack_nodes_.insert(
849 {base::Stack::GetRealStackAddressForSlot(slot), std::move(entry)});
850 if (!pair.second) {
851 // Insertion failed because there already was an entry present for that
852 // stack address. This can happen because cleanup is conservative in which
853 // stack limits it used. Reusing the entry is fine as there's no aliasing of
854 // different references with the same stack slot.
855 pair.first->second.node.Free(nullptr);
856 }
857 TracedNode* result = &(pair.first->second.node);
858 #endif // !V8_USE_ADDRESS_SANITIZER
859 result->Acquire(value);
860 result->set_is_on_stack(true);
861 return result;
862 }
863
CleanupBelowCurrentStackPosition()864 void GlobalHandles::OnStackTracedNodeSpace::CleanupBelowCurrentStackPosition() {
865 if (on_stack_nodes_.empty()) return;
866 const auto it =
867 on_stack_nodes_.upper_bound(base::Stack::GetCurrentStackPosition());
868 on_stack_nodes_.erase(on_stack_nodes_.begin(), it);
869 }
870
871 // static
Verify(GlobalHandles * global_handles,const Address * const * slot)872 void GlobalHandles::TracedNode::Verify(GlobalHandles* global_handles,
873 const Address* const* slot) {
874 #ifdef DEBUG
875 const TracedNode* node = FromLocation(*slot);
876 DCHECK(node->IsInUse());
877 DCHECK_IMPLIES(!node->has_destructor(), nullptr == node->parameter());
878 DCHECK_IMPLIES(node->has_destructor() && !node->HasFinalizationCallback(),
879 node->parameter());
880 bool slot_on_stack = global_handles->on_stack_nodes_->IsOnStack(
881 reinterpret_cast<uintptr_t>(slot));
882 DCHECK_EQ(slot_on_stack, node->is_on_stack());
883 if (!node->is_on_stack()) {
884 // On-heap nodes have seprate lists for young generation processing.
885 bool is_young_gen_object = ObjectInYoungGeneration(node->object());
886 DCHECK_IMPLIES(is_young_gen_object, node->is_in_young_list());
887 }
888 bool in_young_list =
889 std::find(global_handles->traced_young_nodes_.begin(),
890 global_handles->traced_young_nodes_.end(),
891 node) != global_handles->traced_young_nodes_.end();
892 DCHECK_EQ(in_young_list, node->is_in_young_list());
893 #endif // DEBUG
894 }
895
CleanupOnStackReferencesBelowCurrentStackPosition()896 void GlobalHandles::CleanupOnStackReferencesBelowCurrentStackPosition() {
897 on_stack_nodes_->CleanupBelowCurrentStackPosition();
898 }
899
NumberOfOnStackHandlesForTesting()900 size_t GlobalHandles::NumberOfOnStackHandlesForTesting() {
901 return on_stack_nodes_->NumberOfHandlesForTesting();
902 }
903
TotalSize() const904 size_t GlobalHandles::TotalSize() const {
905 return regular_nodes_->TotalSize() + traced_nodes_->TotalSize();
906 }
907
UsedSize() const908 size_t GlobalHandles::UsedSize() const {
909 return regular_nodes_->handles_count() * sizeof(Node) +
910 traced_nodes_->handles_count() * sizeof(TracedNode);
911 }
912
handles_count() const913 size_t GlobalHandles::handles_count() const {
914 return regular_nodes_->handles_count() + traced_nodes_->handles_count();
915 }
916
SetStackStart(void * stack_start)917 void GlobalHandles::SetStackStart(void* stack_start) {
918 on_stack_nodes_->SetStackStart(stack_start);
919 }
920
NotifyEmptyEmbedderStack()921 void GlobalHandles::NotifyEmptyEmbedderStack() {
922 on_stack_nodes_->NotifyEmptyEmbedderStack();
923 }
924
GlobalHandles(Isolate * isolate)925 GlobalHandles::GlobalHandles(Isolate* isolate)
926 : isolate_(isolate),
927 regular_nodes_(new NodeSpace<GlobalHandles::Node>(this)),
928 traced_nodes_(new NodeSpace<GlobalHandles::TracedNode>(this)),
929 on_stack_nodes_(new OnStackTracedNodeSpace(this)) {}
930
~GlobalHandles()931 GlobalHandles::~GlobalHandles() { regular_nodes_.reset(nullptr); }
932
Create(Object value)933 Handle<Object> GlobalHandles::Create(Object value) {
934 GlobalHandles::Node* result = regular_nodes_->Acquire(value);
935 if (ObjectInYoungGeneration(value) && !result->is_in_young_list()) {
936 young_nodes_.push_back(result);
937 result->set_in_young_list(true);
938 }
939 return result->handle();
940 }
941
Create(Address value)942 Handle<Object> GlobalHandles::Create(Address value) {
943 return Create(Object(value));
944 }
945
CreateTraced(Object value,Address * slot,bool has_destructor)946 Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
947 bool has_destructor) {
948 return CreateTraced(
949 value, slot, has_destructor,
950 on_stack_nodes_->IsOnStack(reinterpret_cast<uintptr_t>(slot)));
951 }
952
CreateTraced(Object value,Address * slot,bool has_destructor,bool is_on_stack)953 Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
954 bool has_destructor,
955 bool is_on_stack) {
956 GlobalHandles::TracedNode* result;
957 if (is_on_stack) {
958 result = on_stack_nodes_->Acquire(value, reinterpret_cast<uintptr_t>(slot));
959 } else {
960 result = traced_nodes_->Acquire(value);
961 if (ObjectInYoungGeneration(value) && !result->is_in_young_list()) {
962 traced_young_nodes_.push_back(result);
963 result->set_in_young_list(true);
964 }
965 }
966 result->set_has_destructor(has_destructor);
967 result->set_parameter(has_destructor ? slot : nullptr);
968 return result->handle();
969 }
970
CreateTraced(Address value,Address * slot,bool has_destructor)971 Handle<Object> GlobalHandles::CreateTraced(Address value, Address* slot,
972 bool has_destructor) {
973 return CreateTraced(Object(value), slot, has_destructor);
974 }
975
CopyGlobal(Address * location)976 Handle<Object> GlobalHandles::CopyGlobal(Address* location) {
977 DCHECK_NOT_NULL(location);
978 GlobalHandles* global_handles =
979 Node::FromLocation(location)->global_handles();
980 #ifdef VERIFY_HEAP
981 if (i::FLAG_verify_heap) {
982 Object(*location).ObjectVerify(global_handles->isolate());
983 }
984 #endif // VERIFY_HEAP
985 return global_handles->Create(*location);
986 }
987
988 namespace {
SetSlotThreadSafe(Address ** slot,Address * val)989 void SetSlotThreadSafe(Address** slot, Address* val) {
990 reinterpret_cast<std::atomic<Address*>*>(slot)->store(
991 val, std::memory_order_relaxed);
992 }
993 } // namespace
994
995 // static
CopyTracedGlobal(const Address * const * from,Address ** to)996 void GlobalHandles::CopyTracedGlobal(const Address* const* from, Address** to) {
997 DCHECK_NOT_NULL(*from);
998 DCHECK_NULL(*to);
999 const TracedNode* node = TracedNode::FromLocation(*from);
1000 // Copying a traced handle with finalization callback is prohibited because
1001 // the callback may require knowing about multiple copies of the traced
1002 // handle.
1003 CHECK_WITH_MSG(!node->HasFinalizationCallback(),
1004 "Copying of references is not supported when "
1005 "SetFinalizationCallback is set.");
1006
1007 GlobalHandles* global_handles =
1008 GlobalHandles::From(const_cast<TracedNode*>(node));
1009 Handle<Object> o = global_handles->CreateTraced(
1010 node->object(), reinterpret_cast<Address*>(to), node->has_destructor());
1011 SetSlotThreadSafe(to, o.location());
1012 TracedNode::Verify(global_handles, from);
1013 TracedNode::Verify(global_handles, to);
1014 #ifdef VERIFY_HEAP
1015 if (i::FLAG_verify_heap) {
1016 Object(**to).ObjectVerify(global_handles->isolate());
1017 }
1018 #endif // VERIFY_HEAP
1019 }
1020
MoveGlobal(Address ** from,Address ** to)1021 void GlobalHandles::MoveGlobal(Address** from, Address** to) {
1022 DCHECK_NOT_NULL(*from);
1023 DCHECK_NOT_NULL(*to);
1024 DCHECK_EQ(*from, *to);
1025 Node* node = Node::FromLocation(*from);
1026 if (node->IsWeak() && node->IsPhantomResetHandle()) {
1027 node->set_parameter(to);
1028 }
1029
1030 // - Strong handles do not require fixups.
1031 // - Weak handles with finalizers and callbacks are too general to fix up. For
1032 // those the callers need to ensure consistency.
1033 }
1034
MoveTracedGlobal(Address ** from,Address ** to)1035 void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
1036 // Fast path for moving from an empty reference.
1037 if (!*from) {
1038 DestroyTraced(*to);
1039 SetSlotThreadSafe(to, nullptr);
1040 return;
1041 }
1042
1043 // Determining whether from or to are on stack.
1044 TracedNode* from_node = TracedNode::FromLocation(*from);
1045 DCHECK(from_node->IsInUse());
1046 TracedNode* to_node = TracedNode::FromLocation(*to);
1047 GlobalHandles* global_handles = nullptr;
1048 #ifdef DEBUG
1049 global_handles = GlobalHandles::From(from_node);
1050 #endif // DEBUG
1051 bool from_on_stack = from_node->is_on_stack();
1052 bool to_on_stack = false;
1053 if (!to_node) {
1054 // Figure out whether stack or heap to allow fast path for heap->heap move.
1055 global_handles = GlobalHandles::From(from_node);
1056 to_on_stack = global_handles->on_stack_nodes_->IsOnStack(
1057 reinterpret_cast<uintptr_t>(to));
1058 } else {
1059 to_on_stack = to_node->is_on_stack();
1060 }
1061
1062 // Moving a traced handle with finalization callback is prohibited because
1063 // the callback may require knowing about multiple copies of the traced
1064 // handle.
1065 CHECK_WITH_MSG(!from_node->HasFinalizationCallback(),
1066 "Moving of references is not supported when "
1067 "SetFinalizationCallback is set.");
1068 // Types in v8.h ensure that we only copy/move handles that have the same
1069 // destructor behavior.
1070 DCHECK_IMPLIES(to_node,
1071 to_node->has_destructor() == from_node->has_destructor());
1072
1073 // Moving.
1074 if (from_on_stack || to_on_stack) {
1075 // Move involving a stack slot.
1076 if (!to_node) {
1077 DCHECK(global_handles);
1078 Handle<Object> o = global_handles->CreateTraced(
1079 from_node->object(), reinterpret_cast<Address*>(to),
1080 from_node->has_destructor(), to_on_stack);
1081 SetSlotThreadSafe(to, o.location());
1082 to_node = TracedNode::FromLocation(*to);
1083 DCHECK(to_node->markbit());
1084 } else {
1085 DCHECK(to_node->IsInUse());
1086 to_node->CopyObjectReference(*from_node);
1087 if (!to_node->is_on_stack() && !to_node->is_in_young_list() &&
1088 ObjectInYoungGeneration(to_node->object())) {
1089 global_handles = GlobalHandles::From(from_node);
1090 global_handles->traced_young_nodes_.push_back(to_node);
1091 to_node->set_in_young_list(true);
1092 }
1093 }
1094 DestroyTraced(*from);
1095 SetSlotThreadSafe(from, nullptr);
1096 } else {
1097 // Pure heap move.
1098 DestroyTraced(*to);
1099 SetSlotThreadSafe(to, *from);
1100 to_node = from_node;
1101 DCHECK_NOT_NULL(*from);
1102 DCHECK_NOT_NULL(*to);
1103 DCHECK_EQ(*from, *to);
1104 // Fixup back reference for destructor.
1105 if (to_node->has_destructor()) {
1106 to_node->set_parameter(to);
1107 }
1108 SetSlotThreadSafe(from, nullptr);
1109 }
1110 TracedNode::Verify(global_handles, to);
1111 }
1112
1113 // static
From(const TracedNode * node)1114 GlobalHandles* GlobalHandles::From(const TracedNode* node) {
1115 return node->is_on_stack()
1116 ? OnStackTracedNodeSpace::GetGlobalHandles(node)
1117 : NodeBlock<TracedNode>::From(node)->global_handles();
1118 }
1119
MarkTraced(Address * location)1120 void GlobalHandles::MarkTraced(Address* location) {
1121 TracedNode* node = TracedNode::FromLocation(location);
1122 node->set_markbit();
1123 DCHECK(node->IsInUse());
1124 }
1125
Destroy(Address * location)1126 void GlobalHandles::Destroy(Address* location) {
1127 if (location != nullptr) {
1128 NodeSpace<Node>::Release(Node::FromLocation(location));
1129 }
1130 }
1131
DestroyTraced(Address * location)1132 void GlobalHandles::DestroyTraced(Address* location) {
1133 if (location != nullptr) {
1134 TracedNode* node = TracedNode::FromLocation(location);
1135 if (node->is_on_stack()) {
1136 node->Release(nullptr);
1137 } else {
1138 NodeSpace<TracedNode>::Release(node);
1139 }
1140 }
1141 }
1142
SetFinalizationCallbackForTraced(Address * location,void * parameter,WeakCallbackInfo<void>::Callback callback)1143 void GlobalHandles::SetFinalizationCallbackForTraced(
1144 Address* location, void* parameter,
1145 WeakCallbackInfo<void>::Callback callback) {
1146 TracedNode::FromLocation(location)->SetFinalizationCallback(parameter,
1147 callback);
1148 }
1149
1150 using GenericCallback = v8::WeakCallbackInfo<void>::Callback;
1151
MakeWeak(Address * location,void * parameter,GenericCallback phantom_callback,v8::WeakCallbackType type)1152 void GlobalHandles::MakeWeak(Address* location, void* parameter,
1153 GenericCallback phantom_callback,
1154 v8::WeakCallbackType type) {
1155 Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
1156 }
1157
MakeWeak(Address ** location_addr)1158 void GlobalHandles::MakeWeak(Address** location_addr) {
1159 Node::FromLocation(*location_addr)->MakeWeak(location_addr);
1160 }
1161
ClearWeakness(Address * location)1162 void* GlobalHandles::ClearWeakness(Address* location) {
1163 return Node::FromLocation(location)->ClearWeakness();
1164 }
1165
AnnotateStrongRetainer(Address * location,const char * label)1166 void GlobalHandles::AnnotateStrongRetainer(Address* location,
1167 const char* label) {
1168 Node::FromLocation(location)->AnnotateStrongRetainer(label);
1169 }
1170
IsWeak(Address * location)1171 bool GlobalHandles::IsWeak(Address* location) {
1172 return Node::FromLocation(location)->IsWeak();
1173 }
1174
1175 DISABLE_CFI_PERF
IterateWeakRootsForFinalizers(RootVisitor * v)1176 void GlobalHandles::IterateWeakRootsForFinalizers(RootVisitor* v) {
1177 for (Node* node : *regular_nodes_) {
1178 if (node->IsWeakRetainer() && node->state() == Node::PENDING) {
1179 DCHECK(!node->IsPhantomCallback());
1180 DCHECK(!node->IsPhantomResetHandle());
1181 // Finalizers need to survive.
1182 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1183 node->location());
1184 }
1185 }
1186 }
1187
1188 DISABLE_CFI_PERF
IterateWeakRootsForPhantomHandles(WeakSlotCallbackWithHeap should_reset_handle)1189 void GlobalHandles::IterateWeakRootsForPhantomHandles(
1190 WeakSlotCallbackWithHeap should_reset_handle) {
1191 for (Node* node : *regular_nodes_) {
1192 if (node->IsWeakRetainer() &&
1193 should_reset_handle(isolate()->heap(), node->location())) {
1194 if (node->IsPhantomResetHandle()) {
1195 node->MarkPending();
1196 node->ResetPhantomHandle(HandleHolder::kLive);
1197 ++number_of_phantom_handle_resets_;
1198 } else if (node->IsPhantomCallback()) {
1199 node->MarkPending();
1200 node->CollectPhantomCallbackData(®ular_pending_phantom_callbacks_);
1201 }
1202 }
1203 }
1204 for (TracedNode* node : *traced_nodes_) {
1205 if (!node->IsInUse()) continue;
1206 // Detect unreachable nodes first.
1207 if (!node->markbit() && node->IsPhantomResetHandle() &&
1208 !node->has_destructor()) {
1209 // The handle is unreachable and does not have a callback and a
1210 // destructor associated with it. We can clear it even if the target V8
1211 // object is alive. Note that the desctructor and the callback may
1212 // access the handle, that is why we avoid clearing it.
1213 node->ResetPhantomHandle(HandleHolder::kDead);
1214 ++number_of_phantom_handle_resets_;
1215 continue;
1216 } else if (node->markbit()) {
1217 // Clear the markbit for the next GC.
1218 node->clear_markbit();
1219 }
1220 DCHECK(node->IsInUse());
1221 // Detect nodes with unreachable target objects.
1222 if (should_reset_handle(isolate()->heap(), node->location())) {
1223 // If the node allows eager resetting, then reset it here. Otherwise,
1224 // collect its callback that will reset it.
1225 if (node->IsPhantomResetHandle()) {
1226 node->ResetPhantomHandle(node->has_destructor() ? HandleHolder::kLive
1227 : HandleHolder::kDead);
1228 ++number_of_phantom_handle_resets_;
1229 } else {
1230 node->CollectPhantomCallbackData(&traced_pending_phantom_callbacks_);
1231 }
1232 }
1233 }
1234 }
1235
IterateWeakRootsIdentifyFinalizers(WeakSlotCallbackWithHeap should_reset_handle)1236 void GlobalHandles::IterateWeakRootsIdentifyFinalizers(
1237 WeakSlotCallbackWithHeap should_reset_handle) {
1238 for (Node* node : *regular_nodes_) {
1239 if (node->IsWeak() &&
1240 should_reset_handle(isolate()->heap(), node->location())) {
1241 if (node->IsFinalizerHandle()) {
1242 node->MarkPending();
1243 }
1244 }
1245 }
1246 }
1247
IdentifyWeakUnmodifiedObjects(WeakSlotCallback is_unmodified)1248 void GlobalHandles::IdentifyWeakUnmodifiedObjects(
1249 WeakSlotCallback is_unmodified) {
1250 if (!FLAG_reclaim_unmodified_wrappers) return;
1251
1252 LocalEmbedderHeapTracer* const tracer =
1253 isolate()->heap()->local_embedder_heap_tracer();
1254 for (TracedNode* node : traced_young_nodes_) {
1255 if (node->IsInUse()) {
1256 DCHECK(node->is_root());
1257 if (is_unmodified(node->location())) {
1258 v8::Value* value = ToApi<v8::Value>(node->handle());
1259 if (node->has_destructor()) {
1260 node->set_root(tracer->IsRootForNonTracingGC(
1261 *reinterpret_cast<v8::TracedGlobal<v8::Value>*>(&value)));
1262 } else {
1263 node->set_root(tracer->IsRootForNonTracingGC(
1264 *reinterpret_cast<v8::TracedReference<v8::Value>*>(&value)));
1265 }
1266 }
1267 }
1268 }
1269 }
1270
IterateYoungStrongAndDependentRoots(RootVisitor * v)1271 void GlobalHandles::IterateYoungStrongAndDependentRoots(RootVisitor* v) {
1272 for (Node* node : young_nodes_) {
1273 if (node->IsStrongRetainer()) {
1274 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1275 node->location());
1276 }
1277 }
1278 for (TracedNode* node : traced_young_nodes_) {
1279 if (node->IsInUse() && node->is_root()) {
1280 v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1281 }
1282 }
1283 }
1284
MarkYoungWeakDeadObjectsPending(WeakSlotCallbackWithHeap is_dead)1285 void GlobalHandles::MarkYoungWeakDeadObjectsPending(
1286 WeakSlotCallbackWithHeap is_dead) {
1287 for (Node* node : young_nodes_) {
1288 DCHECK(node->is_in_young_list());
1289 if (node->IsWeak() && is_dead(isolate_->heap(), node->location())) {
1290 if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
1291 node->MarkPending();
1292 }
1293 }
1294 }
1295 }
1296
IterateYoungWeakDeadObjectsForFinalizers(RootVisitor * v)1297 void GlobalHandles::IterateYoungWeakDeadObjectsForFinalizers(RootVisitor* v) {
1298 for (Node* node : young_nodes_) {
1299 DCHECK(node->is_in_young_list());
1300 if (node->IsWeakRetainer() && (node->state() == Node::PENDING)) {
1301 DCHECK(!node->IsPhantomCallback());
1302 DCHECK(!node->IsPhantomResetHandle());
1303 // Finalizers need to survive.
1304 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1305 node->location());
1306 }
1307 }
1308 }
1309
IterateYoungWeakObjectsForPhantomHandles(RootVisitor * v,WeakSlotCallbackWithHeap should_reset_handle)1310 void GlobalHandles::IterateYoungWeakObjectsForPhantomHandles(
1311 RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) {
1312 for (Node* node : young_nodes_) {
1313 DCHECK(node->is_in_young_list());
1314 if (node->IsWeakRetainer() && (node->state() != Node::PENDING)) {
1315 if (should_reset_handle(isolate_->heap(), node->location())) {
1316 DCHECK(node->IsPhantomResetHandle() || node->IsPhantomCallback());
1317 if (node->IsPhantomResetHandle()) {
1318 node->MarkPending();
1319 node->ResetPhantomHandle(HandleHolder::kLive);
1320 ++number_of_phantom_handle_resets_;
1321 } else if (node->IsPhantomCallback()) {
1322 node->MarkPending();
1323 node->CollectPhantomCallbackData(®ular_pending_phantom_callbacks_);
1324 } else {
1325 UNREACHABLE();
1326 }
1327 } else {
1328 // Node survived and needs to be visited.
1329 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1330 node->location());
1331 }
1332 }
1333 }
1334
1335 if (!FLAG_reclaim_unmodified_wrappers) return;
1336
1337 LocalEmbedderHeapTracer* const tracer =
1338 isolate()->heap()->local_embedder_heap_tracer();
1339 for (TracedNode* node : traced_young_nodes_) {
1340 if (!node->IsInUse()) continue;
1341
1342 DCHECK_IMPLIES(node->is_root(),
1343 !should_reset_handle(isolate_->heap(), node->location()));
1344 if (should_reset_handle(isolate_->heap(), node->location())) {
1345 if (node->IsPhantomResetHandle()) {
1346 if (node->has_destructor()) {
1347 // For handles with destructor it is guaranteed that the embedder
1348 // memory is still alive as the destructor would have otherwise
1349 // removed the memory.
1350 node->ResetPhantomHandle(HandleHolder::kLive);
1351 } else {
1352 v8::Value* value = ToApi<v8::Value>(node->handle());
1353 tracer->ResetHandleInNonTracingGC(
1354 *reinterpret_cast<v8::TracedReference<v8::Value>*>(&value));
1355 DCHECK(!node->IsInUse());
1356 }
1357
1358 ++number_of_phantom_handle_resets_;
1359 } else {
1360 node->CollectPhantomCallbackData(&traced_pending_phantom_callbacks_);
1361 }
1362 } else {
1363 if (!node->is_root()) {
1364 node->set_root(true);
1365 v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1366 }
1367 }
1368 }
1369 }
1370
InvokeSecondPassPhantomCallbacksFromTask()1371 void GlobalHandles::InvokeSecondPassPhantomCallbacksFromTask() {
1372 DCHECK(second_pass_callbacks_task_posted_);
1373 second_pass_callbacks_task_posted_ = false;
1374 Heap::DevToolsTraceEventScope devtools_trace_event_scope(
1375 isolate()->heap(), "MajorGC", "invoke weak phantom callbacks");
1376 TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
1377 isolate()->heap()->CallGCPrologueCallbacks(
1378 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1379 InvokeSecondPassPhantomCallbacks();
1380 isolate()->heap()->CallGCEpilogueCallbacks(
1381 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1382 }
1383
InvokeSecondPassPhantomCallbacks()1384 void GlobalHandles::InvokeSecondPassPhantomCallbacks() {
1385 // The callbacks may execute JS, which in turn may lead to another GC run.
1386 // If we are already processing the callbacks, we do not want to start over
1387 // from within the inner GC. Newly added callbacks will always be run by the
1388 // outermost GC run only.
1389 if (running_second_pass_callbacks_) return;
1390 running_second_pass_callbacks_ = true;
1391
1392 AllowJavascriptExecution allow_js(isolate());
1393 while (!second_pass_callbacks_.empty()) {
1394 auto callback = second_pass_callbacks_.back();
1395 second_pass_callbacks_.pop_back();
1396 callback.Invoke(isolate(), PendingPhantomCallback::kSecondPass);
1397 }
1398 running_second_pass_callbacks_ = false;
1399 }
1400
PostScavengeProcessing(unsigned post_processing_count)1401 size_t GlobalHandles::PostScavengeProcessing(unsigned post_processing_count) {
1402 size_t freed_nodes = 0;
1403 for (Node* node : young_nodes_) {
1404 // Filter free nodes.
1405 if (!node->IsRetainer()) continue;
1406
1407 if (node->IsPending()) {
1408 DCHECK(node->has_callback());
1409 DCHECK(node->IsPendingFinalizer());
1410 node->PostGarbageCollectionProcessing(isolate_);
1411 }
1412 if (InRecursiveGC(post_processing_count)) return freed_nodes;
1413
1414 if (!node->IsRetainer()) freed_nodes++;
1415 }
1416 return freed_nodes;
1417 }
1418
PostMarkSweepProcessing(unsigned post_processing_count)1419 size_t GlobalHandles::PostMarkSweepProcessing(unsigned post_processing_count) {
1420 size_t freed_nodes = 0;
1421 for (Node* node : *regular_nodes_) {
1422 // Filter free nodes.
1423 if (!node->IsRetainer()) continue;
1424
1425 if (node->IsPending()) {
1426 DCHECK(node->has_callback());
1427 DCHECK(node->IsPendingFinalizer());
1428 node->PostGarbageCollectionProcessing(isolate_);
1429 }
1430 if (InRecursiveGC(post_processing_count)) return freed_nodes;
1431
1432 if (!node->IsRetainer()) freed_nodes++;
1433 }
1434 return freed_nodes;
1435 }
1436
1437 template <typename T>
UpdateAndCompactListOfYoungNode(std::vector<T * > * node_list)1438 void GlobalHandles::UpdateAndCompactListOfYoungNode(
1439 std::vector<T*>* node_list) {
1440 size_t last = 0;
1441 for (T* node : *node_list) {
1442 DCHECK(node->is_in_young_list());
1443 if (node->IsInUse()) {
1444 if (ObjectInYoungGeneration(node->object())) {
1445 (*node_list)[last++] = node;
1446 isolate_->heap()->IncrementNodesCopiedInNewSpace();
1447 } else {
1448 node->set_in_young_list(false);
1449 isolate_->heap()->IncrementNodesPromoted();
1450 }
1451 } else {
1452 node->set_in_young_list(false);
1453 isolate_->heap()->IncrementNodesDiedInNewSpace();
1454 }
1455 }
1456 DCHECK_LE(last, node_list->size());
1457 node_list->resize(last);
1458 node_list->shrink_to_fit();
1459 }
1460
UpdateListOfYoungNodes()1461 void GlobalHandles::UpdateListOfYoungNodes() {
1462 UpdateAndCompactListOfYoungNode(&young_nodes_);
1463 UpdateAndCompactListOfYoungNode(&traced_young_nodes_);
1464 }
1465
1466 template <typename T>
InvokeFirstPassWeakCallbacks(std::vector<std::pair<T *,PendingPhantomCallback>> * pending)1467 size_t GlobalHandles::InvokeFirstPassWeakCallbacks(
1468 std::vector<std::pair<T*, PendingPhantomCallback>>* pending) {
1469 size_t freed_nodes = 0;
1470 std::vector<std::pair<T*, PendingPhantomCallback>> pending_phantom_callbacks;
1471 pending_phantom_callbacks.swap(*pending);
1472 {
1473 // The initial pass callbacks must simply clear the nodes.
1474 for (auto& pair : pending_phantom_callbacks) {
1475 T* node = pair.first;
1476 DCHECK_EQ(T::NEAR_DEATH, node->state());
1477 pair.second.Invoke(isolate(), PendingPhantomCallback::kFirstPass);
1478
1479 // Transition to second pass. It is required that the first pass callback
1480 // resets the handle using |v8::PersistentBase::Reset|. Also see comments
1481 // on |v8::WeakCallbackInfo|.
1482 CHECK_WITH_MSG(T::FREE == node->state(),
1483 "Handle not reset in first callback. See comments on "
1484 "|v8::WeakCallbackInfo|.");
1485
1486 if (pair.second.callback()) second_pass_callbacks_.push_back(pair.second);
1487 freed_nodes++;
1488 }
1489 }
1490 return freed_nodes;
1491 }
1492
InvokeFirstPassWeakCallbacks()1493 size_t GlobalHandles::InvokeFirstPassWeakCallbacks() {
1494 return InvokeFirstPassWeakCallbacks(®ular_pending_phantom_callbacks_) +
1495 InvokeFirstPassWeakCallbacks(&traced_pending_phantom_callbacks_);
1496 }
1497
InvokeOrScheduleSecondPassPhantomCallbacks(bool synchronous_second_pass)1498 void GlobalHandles::InvokeOrScheduleSecondPassPhantomCallbacks(
1499 bool synchronous_second_pass) {
1500 if (!second_pass_callbacks_.empty()) {
1501 if (FLAG_optimize_for_size || FLAG_predictable || synchronous_second_pass) {
1502 Heap::DevToolsTraceEventScope devtools_trace_event_scope(
1503 isolate()->heap(), "MajorGC", "invoke weak phantom callbacks");
1504 isolate()->heap()->CallGCPrologueCallbacks(
1505 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1506 InvokeSecondPassPhantomCallbacks();
1507 isolate()->heap()->CallGCEpilogueCallbacks(
1508 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1509 } else if (!second_pass_callbacks_task_posted_) {
1510 second_pass_callbacks_task_posted_ = true;
1511 auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(
1512 reinterpret_cast<v8::Isolate*>(isolate()));
1513 taskrunner->PostTask(MakeCancelableTask(
1514 isolate(), [this] { InvokeSecondPassPhantomCallbacksFromTask(); }));
1515 }
1516 }
1517 }
1518
Invoke(Isolate * isolate,InvocationType type)1519 void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate,
1520 InvocationType type) {
1521 Data::Callback* callback_addr = nullptr;
1522 if (type == kFirstPass) {
1523 callback_addr = &callback_;
1524 }
1525 Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
1526 embedder_fields_, callback_addr);
1527 Data::Callback callback = callback_;
1528 callback_ = nullptr;
1529 callback(data);
1530 }
1531
InRecursiveGC(unsigned gc_processing_counter)1532 bool GlobalHandles::InRecursiveGC(unsigned gc_processing_counter) {
1533 return gc_processing_counter != post_gc_processing_count_;
1534 }
1535
PostGarbageCollectionProcessing(GarbageCollector collector,const v8::GCCallbackFlags gc_callback_flags)1536 size_t GlobalHandles::PostGarbageCollectionProcessing(
1537 GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
1538 // Process weak global handle callbacks. This must be done after the
1539 // GC is completely done, because the callbacks may invoke arbitrary
1540 // API functions.
1541 DCHECK_EQ(Heap::NOT_IN_GC, isolate_->heap()->gc_state());
1542 const unsigned post_processing_count = ++post_gc_processing_count_;
1543 size_t freed_nodes = 0;
1544 bool synchronous_second_pass =
1545 isolate_->heap()->IsTearingDown() ||
1546 (gc_callback_flags &
1547 (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage |
1548 kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0;
1549 InvokeOrScheduleSecondPassPhantomCallbacks(synchronous_second_pass);
1550 if (InRecursiveGC(post_processing_count)) return freed_nodes;
1551
1552 freed_nodes += Heap::IsYoungGenerationCollector(collector)
1553 ? PostScavengeProcessing(post_processing_count)
1554 : PostMarkSweepProcessing(post_processing_count);
1555 if (InRecursiveGC(post_processing_count)) return freed_nodes;
1556
1557 UpdateListOfYoungNodes();
1558 return freed_nodes;
1559 }
1560
IterateStrongRoots(RootVisitor * v)1561 void GlobalHandles::IterateStrongRoots(RootVisitor* v) {
1562 for (Node* node : *regular_nodes_) {
1563 if (node->IsStrongRetainer()) {
1564 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1565 node->location());
1566 }
1567 }
1568 }
1569
IterateStrongStackRoots(RootVisitor * v)1570 void GlobalHandles::IterateStrongStackRoots(RootVisitor* v) {
1571 on_stack_nodes_->Iterate(v);
1572 }
1573
IterateWeakRoots(RootVisitor * v)1574 void GlobalHandles::IterateWeakRoots(RootVisitor* v) {
1575 for (Node* node : *regular_nodes_) {
1576 if (node->IsWeak()) {
1577 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1578 node->location());
1579 }
1580 }
1581 for (TracedNode* node : *traced_nodes_) {
1582 if (node->IsInUse()) {
1583 v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1584 }
1585 }
1586 }
1587
1588 DISABLE_CFI_PERF
IterateAllRoots(RootVisitor * v)1589 void GlobalHandles::IterateAllRoots(RootVisitor* v) {
1590 for (Node* node : *regular_nodes_) {
1591 if (node->IsRetainer()) {
1592 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1593 node->location());
1594 }
1595 }
1596 for (TracedNode* node : *traced_nodes_) {
1597 if (node->IsRetainer()) {
1598 v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1599 }
1600 }
1601 on_stack_nodes_->Iterate(v);
1602 }
1603
1604 DISABLE_CFI_PERF
IterateAllYoungRoots(RootVisitor * v)1605 void GlobalHandles::IterateAllYoungRoots(RootVisitor* v) {
1606 for (Node* node : young_nodes_) {
1607 if (node->IsRetainer()) {
1608 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1609 node->location());
1610 }
1611 }
1612 for (TracedNode* node : traced_young_nodes_) {
1613 if (node->IsRetainer()) {
1614 v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1615 }
1616 }
1617 on_stack_nodes_->Iterate(v);
1618 }
1619
1620 DISABLE_CFI_PERF
ApplyPersistentHandleVisitor(v8::PersistentHandleVisitor * visitor,GlobalHandles::Node * node)1621 void GlobalHandles::ApplyPersistentHandleVisitor(
1622 v8::PersistentHandleVisitor* visitor, GlobalHandles::Node* node) {
1623 v8::Value* value = ToApi<v8::Value>(node->handle());
1624 visitor->VisitPersistentHandle(
1625 reinterpret_cast<v8::Persistent<v8::Value>*>(&value),
1626 node->wrapper_class_id());
1627 }
1628
1629 DISABLE_CFI_PERF
IterateAllRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1630 void GlobalHandles::IterateAllRootsWithClassIds(
1631 v8::PersistentHandleVisitor* visitor) {
1632 for (Node* node : *regular_nodes_) {
1633 if (node->IsRetainer() && node->has_wrapper_class_id()) {
1634 ApplyPersistentHandleVisitor(visitor, node);
1635 }
1636 }
1637 }
1638
1639 DISABLE_CFI_PERF
IterateTracedNodes(v8::EmbedderHeapTracer::TracedGlobalHandleVisitor * visitor)1640 void GlobalHandles::IterateTracedNodes(
1641 v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor) {
1642 for (TracedNode* node : *traced_nodes_) {
1643 if (node->IsInUse()) {
1644 v8::Value* value = ToApi<v8::Value>(node->handle());
1645 if (node->has_destructor()) {
1646 visitor->VisitTracedGlobalHandle(
1647 *reinterpret_cast<v8::TracedGlobal<v8::Value>*>(&value));
1648 } else {
1649 visitor->VisitTracedReference(
1650 *reinterpret_cast<v8::TracedReference<v8::Value>*>(&value));
1651 }
1652 }
1653 }
1654 }
1655
1656 DISABLE_CFI_PERF
IterateAllYoungRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1657 void GlobalHandles::IterateAllYoungRootsWithClassIds(
1658 v8::PersistentHandleVisitor* visitor) {
1659 for (Node* node : young_nodes_) {
1660 if (node->IsRetainer() && node->has_wrapper_class_id()) {
1661 ApplyPersistentHandleVisitor(visitor, node);
1662 }
1663 }
1664 }
1665
1666 DISABLE_CFI_PERF
IterateYoungWeakRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1667 void GlobalHandles::IterateYoungWeakRootsWithClassIds(
1668 v8::PersistentHandleVisitor* visitor) {
1669 for (Node* node : young_nodes_) {
1670 if (node->has_wrapper_class_id() && node->IsWeak()) {
1671 ApplyPersistentHandleVisitor(visitor, node);
1672 }
1673 }
1674 }
1675
RecordStats(HeapStats * stats)1676 void GlobalHandles::RecordStats(HeapStats* stats) {
1677 *stats->global_handle_count = 0;
1678 *stats->weak_global_handle_count = 0;
1679 *stats->pending_global_handle_count = 0;
1680 *stats->near_death_global_handle_count = 0;
1681 *stats->free_global_handle_count = 0;
1682 for (Node* node : *regular_nodes_) {
1683 *stats->global_handle_count += 1;
1684 if (node->state() == Node::WEAK) {
1685 *stats->weak_global_handle_count += 1;
1686 } else if (node->state() == Node::PENDING) {
1687 *stats->pending_global_handle_count += 1;
1688 } else if (node->state() == Node::NEAR_DEATH) {
1689 *stats->near_death_global_handle_count += 1;
1690 } else if (node->state() == Node::FREE) {
1691 *stats->free_global_handle_count += 1;
1692 }
1693 }
1694 }
1695
1696 #ifdef DEBUG
1697
PrintStats()1698 void GlobalHandles::PrintStats() {
1699 int total = 0;
1700 int weak = 0;
1701 int pending = 0;
1702 int near_death = 0;
1703 int destroyed = 0;
1704
1705 for (Node* node : *regular_nodes_) {
1706 total++;
1707 if (node->state() == Node::WEAK) weak++;
1708 if (node->state() == Node::PENDING) pending++;
1709 if (node->state() == Node::NEAR_DEATH) near_death++;
1710 if (node->state() == Node::FREE) destroyed++;
1711 }
1712
1713 PrintF("Global Handle Statistics:\n");
1714 PrintF(" allocated memory = %zuB\n", total * sizeof(Node));
1715 PrintF(" # weak = %d\n", weak);
1716 PrintF(" # pending = %d\n", pending);
1717 PrintF(" # near_death = %d\n", near_death);
1718 PrintF(" # free = %d\n", destroyed);
1719 PrintF(" # total = %d\n", total);
1720 }
1721
Print()1722 void GlobalHandles::Print() {
1723 PrintF("Global handles:\n");
1724 for (Node* node : *regular_nodes_) {
1725 PrintF(" handle %p to %p%s\n", node->location().ToVoidPtr(),
1726 reinterpret_cast<void*>(node->object().ptr()),
1727 node->IsWeak() ? " (weak)" : "");
1728 }
1729 }
1730
1731 #endif
1732
~EternalHandles()1733 EternalHandles::~EternalHandles() {
1734 for (Address* block : blocks_) delete[] block;
1735 }
1736
IterateAllRoots(RootVisitor * visitor)1737 void EternalHandles::IterateAllRoots(RootVisitor* visitor) {
1738 int limit = size_;
1739 for (Address* block : blocks_) {
1740 DCHECK_GT(limit, 0);
1741 visitor->VisitRootPointers(Root::kEternalHandles, nullptr,
1742 FullObjectSlot(block),
1743 FullObjectSlot(block + Min(limit, kSize)));
1744 limit -= kSize;
1745 }
1746 }
1747
IterateYoungRoots(RootVisitor * visitor)1748 void EternalHandles::IterateYoungRoots(RootVisitor* visitor) {
1749 for (int index : young_node_indices_) {
1750 visitor->VisitRootPointer(Root::kEternalHandles, nullptr,
1751 FullObjectSlot(GetLocation(index)));
1752 }
1753 }
1754
PostGarbageCollectionProcessing()1755 void EternalHandles::PostGarbageCollectionProcessing() {
1756 size_t last = 0;
1757 for (int index : young_node_indices_) {
1758 if (ObjectInYoungGeneration(Object(*GetLocation(index)))) {
1759 young_node_indices_[last++] = index;
1760 }
1761 }
1762 DCHECK_LE(last, young_node_indices_.size());
1763 young_node_indices_.resize(last);
1764 }
1765
Create(Isolate * isolate,Object object,int * index)1766 void EternalHandles::Create(Isolate* isolate, Object object, int* index) {
1767 DCHECK_EQ(kInvalidIndex, *index);
1768 if (object == Object()) return;
1769 Object the_hole = ReadOnlyRoots(isolate).the_hole_value();
1770 DCHECK_NE(the_hole, object);
1771 int block = size_ >> kShift;
1772 int offset = size_ & kMask;
1773 // Need to resize.
1774 if (offset == 0) {
1775 Address* next_block = new Address[kSize];
1776 MemsetPointer(FullObjectSlot(next_block), the_hole, kSize);
1777 blocks_.push_back(next_block);
1778 }
1779 DCHECK_EQ(the_hole.ptr(), blocks_[block][offset]);
1780 blocks_[block][offset] = object.ptr();
1781 if (ObjectInYoungGeneration(object)) {
1782 young_node_indices_.push_back(size_);
1783 }
1784 *index = size_++;
1785 }
1786
1787 } // namespace internal
1788 } // namespace v8
1789