• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/handles/global-handles.h"
6 
7 #include <algorithm>
8 #include <atomic>
9 #include <climits>
10 #include <cstdint>
11 #include <map>
12 
13 #include "include/v8-traced-handle.h"
14 #include "src/api/api-inl.h"
15 #include "src/base/bits.h"
16 #include "src/base/compiler-specific.h"
17 #include "src/base/sanitizer/asan.h"
18 #include "src/common/allow-deprecated.h"
19 #include "src/common/globals.h"
20 #include "src/execution/vm-state-inl.h"
21 #include "src/heap/base/stack.h"
22 #include "src/heap/embedder-tracing.h"
23 #include "src/heap/heap-inl.h"
24 #include "src/heap/heap-write-barrier-inl.h"
25 #include "src/heap/heap-write-barrier.h"
26 #include "src/init/v8.h"
27 #include "src/logging/counters.h"
28 #include "src/objects/objects-inl.h"
29 #include "src/objects/slots.h"
30 #include "src/objects/visitors.h"
31 #include "src/tasks/cancelable-task.h"
32 #include "src/tasks/task-utils.h"
33 #include "src/utils/utils.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 namespace {
39 
40 constexpr size_t kBlockSize = 256;
41 
42 }  // namespace
43 
44 template <class _NodeType>
45 class GlobalHandles::NodeBlock final {
46  public:
47   using BlockType = NodeBlock<_NodeType>;
48   using NodeType = _NodeType;
49 
50   V8_INLINE static const NodeBlock* From(const NodeType* node);
51   V8_INLINE static NodeBlock* From(NodeType* node);
52 
NodeBlock(GlobalHandles * global_handles,GlobalHandles::NodeSpace<NodeType> * space,NodeBlock * next)53   NodeBlock(GlobalHandles* global_handles,
54             GlobalHandles::NodeSpace<NodeType>* space,
55             NodeBlock* next) V8_NOEXCEPT : next_(next),
56                                            global_handles_(global_handles),
57                                            space_(space) {}
58 
59   NodeBlock(const NodeBlock&) = delete;
60   NodeBlock& operator=(const NodeBlock&) = delete;
61 
at(size_t index)62   NodeType* at(size_t index) { return &nodes_[index]; }
at(size_t index) const63   const NodeType* at(size_t index) const { return &nodes_[index]; }
space() const64   GlobalHandles::NodeSpace<NodeType>* space() const { return space_; }
global_handles() const65   GlobalHandles* global_handles() const { return global_handles_; }
66 
67   V8_INLINE bool IncreaseUsage();
68   V8_INLINE bool DecreaseUsage();
69 
70   V8_INLINE void ListAdd(NodeBlock** top);
71   V8_INLINE void ListRemove(NodeBlock** top);
72 
next() const73   NodeBlock* next() const { return next_; }
next_used() const74   NodeBlock* next_used() const { return next_used_; }
75 
set_markbit(size_t index)76   void set_markbit(size_t index) {
77     const auto [cell, bit] = CellAndBit(index);
78     reinterpret_cast<std::atomic<CellType>&>(mark_bits_[cell])
79         .fetch_or(CellType{1} << bit, std::memory_order_relaxed);
80   }
81 
clear_markbit(size_t index)82   void clear_markbit(size_t index) {
83     const auto [cell, bit] = CellAndBit(index);
84     mark_bits_[cell] &= ~(CellType{1} << bit);
85   }
86 
markbit(size_t index) const87   bool markbit(size_t index) const {
88     const auto [cell, bit] = CellAndBit(index);
89     return mark_bits_[cell] & CellType{1} << bit;
90   }
91 
92  private:
93   using CellType = uint32_t;
94 
CellAndBit(size_t index) const95   std::tuple<CellType, CellType> CellAndBit(size_t index) const {
96     static constexpr CellType kMarkBitCellSizeLog2 = 5;
97     static_assert(base::bits::IsPowerOfTwo(kBlockSize),
98                   "Block size must be power of two.");
99     static_assert(
100         sizeof(CellType) * CHAR_BIT == (CellType{1} << kMarkBitCellSizeLog2),
101         "Markbit CellType not matching defined log2 size.");
102     static constexpr CellType kCellMask =
103         (CellType{1} << kMarkBitCellSizeLog2) - 1;
104     return {static_cast<CellType>(index >> kMarkBitCellSizeLog2),
105             index & kCellMask};
106   }
107 
108   NodeType nodes_[kBlockSize];
109   NodeBlock* const next_;
110   GlobalHandles* const global_handles_;
111   GlobalHandles::NodeSpace<NodeType>* const space_;
112   NodeBlock* next_used_ = nullptr;
113   NodeBlock* prev_used_ = nullptr;
114   uint32_t used_nodes_ = 0;
115   CellType mark_bits_[kBlockSize / (sizeof(CellType) * CHAR_BIT)] = {0};
116 };
117 
118 template <class NodeType>
119 const GlobalHandles::NodeBlock<NodeType>*
From(const NodeType * node)120 GlobalHandles::NodeBlock<NodeType>::From(const NodeType* node) {
121   const NodeType* firstNode = node - node->index();
122   const BlockType* block = reinterpret_cast<const BlockType*>(firstNode);
123   DCHECK_EQ(node, block->at(node->index()));
124   return block;
125 }
126 
127 template <class NodeType>
From(NodeType * node)128 GlobalHandles::NodeBlock<NodeType>* GlobalHandles::NodeBlock<NodeType>::From(
129     NodeType* node) {
130   NodeType* firstNode = node - node->index();
131   BlockType* block = reinterpret_cast<BlockType*>(firstNode);
132   DCHECK_EQ(node, block->at(node->index()));
133   return block;
134 }
135 
136 template <class NodeType>
IncreaseUsage()137 bool GlobalHandles::NodeBlock<NodeType>::IncreaseUsage() {
138   DCHECK_LT(used_nodes_, kBlockSize);
139   return used_nodes_++ == 0;
140 }
141 
142 template <class NodeType>
ListAdd(BlockType ** top)143 void GlobalHandles::NodeBlock<NodeType>::ListAdd(BlockType** top) {
144   BlockType* old_top = *top;
145   *top = this;
146   next_used_ = old_top;
147   prev_used_ = nullptr;
148   if (old_top != nullptr) {
149     old_top->prev_used_ = this;
150   }
151 }
152 
153 template <class NodeType>
DecreaseUsage()154 bool GlobalHandles::NodeBlock<NodeType>::DecreaseUsage() {
155   DCHECK_GT(used_nodes_, 0);
156   return --used_nodes_ == 0;
157 }
158 
159 template <class NodeType>
ListRemove(BlockType ** top)160 void GlobalHandles::NodeBlock<NodeType>::ListRemove(BlockType** top) {
161   if (next_used_ != nullptr) next_used_->prev_used_ = prev_used_;
162   if (prev_used_ != nullptr) prev_used_->next_used_ = next_used_;
163   if (this == *top) {
164     *top = next_used_;
165   }
166 }
167 
168 template <class BlockType>
169 class GlobalHandles::NodeIterator final {
170  public:
171   using NodeType = typename BlockType::NodeType;
172 
173   // Iterator traits.
174   using iterator_category = std::forward_iterator_tag;
175   using difference_type = std::ptrdiff_t;
176   using value_type = NodeType*;
177   using reference = value_type;
178   using pointer = value_type*;
179 
NodeIterator(BlockType * block)180   explicit NodeIterator(BlockType* block) V8_NOEXCEPT : block_(block) {}
NodeIterator(NodeIterator && other)181   NodeIterator(NodeIterator&& other) V8_NOEXCEPT : block_(other.block_),
182                                                    index_(other.index_) {}
183 
184   NodeIterator(const NodeIterator&) = delete;
185   NodeIterator& operator=(const NodeIterator&) = delete;
186 
operator ==(const NodeIterator & other) const187   bool operator==(const NodeIterator& other) const {
188     return block_ == other.block_;
189   }
operator !=(const NodeIterator & other) const190   bool operator!=(const NodeIterator& other) const {
191     return block_ != other.block_;
192   }
193 
operator ++()194   NodeIterator& operator++() {
195     if (++index_ < kBlockSize) return *this;
196     index_ = 0;
197     block_ = block_->next_used();
198     return *this;
199   }
200 
operator *()201   NodeType* operator*() { return block_->at(index_); }
operator ->()202   NodeType* operator->() { return block_->at(index_); }
203 
204  private:
205   BlockType* block_ = nullptr;
206   size_t index_ = 0;
207 };
208 
209 template <class NodeType>
210 class GlobalHandles::NodeSpace final {
211  public:
212   using BlockType = NodeBlock<NodeType>;
213   using iterator = NodeIterator<BlockType>;
214 
215   static NodeSpace* From(NodeType* node);
216   static void Release(NodeType* node);
217 
NodeSpace(GlobalHandles * global_handles)218   explicit NodeSpace(GlobalHandles* global_handles) V8_NOEXCEPT
219       : global_handles_(global_handles) {}
220   ~NodeSpace();
221 
222   V8_INLINE NodeType* Acquire(Object object);
223 
begin()224   iterator begin() { return iterator(first_used_block_); }
end()225   iterator end() { return iterator(nullptr); }
226 
TotalSize() const227   size_t TotalSize() const { return blocks_ * sizeof(NodeType) * kBlockSize; }
handles_count() const228   size_t handles_count() const { return handles_count_; }
229 
230  private:
231   void PutNodesOnFreeList(BlockType* block);
232   V8_INLINE void Free(NodeType* node);
233 
234   GlobalHandles* const global_handles_;
235   BlockType* first_block_ = nullptr;
236   BlockType* first_used_block_ = nullptr;
237   NodeType* first_free_ = nullptr;
238   size_t blocks_ = 0;
239   size_t handles_count_ = 0;
240 };
241 
242 template <class NodeType>
~NodeSpace()243 GlobalHandles::NodeSpace<NodeType>::~NodeSpace() {
244   auto* block = first_block_;
245   while (block != nullptr) {
246     auto* tmp = block->next();
247     delete block;
248     block = tmp;
249   }
250 }
251 
252 template <class NodeType>
Acquire(Object object)253 NodeType* GlobalHandles::NodeSpace<NodeType>::Acquire(Object object) {
254   if (first_free_ == nullptr) {
255     first_block_ = new BlockType(global_handles_, this, first_block_);
256     blocks_++;
257     PutNodesOnFreeList(first_block_);
258   }
259   DCHECK_NOT_NULL(first_free_);
260   NodeType* node = first_free_;
261   first_free_ = first_free_->next_free();
262   node->Acquire(object);
263   BlockType* block = BlockType::From(node);
264   if (block->IncreaseUsage()) {
265     block->ListAdd(&first_used_block_);
266   }
267   global_handles_->isolate()->counters()->global_handles()->Increment();
268   handles_count_++;
269   DCHECK(node->IsInUse());
270   return node;
271 }
272 
273 template <class NodeType>
PutNodesOnFreeList(BlockType * block)274 void GlobalHandles::NodeSpace<NodeType>::PutNodesOnFreeList(BlockType* block) {
275   for (int32_t i = kBlockSize - 1; i >= 0; --i) {
276     NodeType* node = block->at(i);
277     const uint8_t index = static_cast<uint8_t>(i);
278     DCHECK_EQ(i, index);
279     node->set_index(index);
280     node->Free(first_free_);
281     first_free_ = node;
282   }
283 }
284 
285 template <class NodeType>
Release(NodeType * node)286 void GlobalHandles::NodeSpace<NodeType>::Release(NodeType* node) {
287   BlockType* block = BlockType::From(node);
288   block->space()->Free(node);
289 }
290 
291 template <class NodeType>
Free(NodeType * node)292 void GlobalHandles::NodeSpace<NodeType>::Free(NodeType* node) {
293   node->Release(first_free_);
294   first_free_ = node;
295   BlockType* block = BlockType::From(node);
296   if (block->DecreaseUsage()) {
297     block->ListRemove(&first_used_block_);
298   }
299   global_handles_->isolate()->counters()->global_handles()->Decrement();
300   handles_count_--;
301 }
302 
303 template <class Child>
304 class NodeBase {
305  public:
FromLocation(const Address * location)306   static const Child* FromLocation(const Address* location) {
307     return reinterpret_cast<const Child*>(location);
308   }
309 
FromLocation(Address * location)310   static Child* FromLocation(Address* location) {
311     return reinterpret_cast<Child*>(location);
312   }
313 
NodeBase()314   NodeBase() {
315     DCHECK_EQ(offsetof(NodeBase, object_), 0);
316     DCHECK_EQ(offsetof(NodeBase, class_id_), Internals::kNodeClassIdOffset);
317     DCHECK_EQ(offsetof(NodeBase, flags_), Internals::kNodeFlagsOffset);
318   }
319 
320 #ifdef ENABLE_HANDLE_ZAPPING
~NodeBase()321   ~NodeBase() {
322     ClearFields();
323     data_.next_free = nullptr;
324     index_ = 0;
325   }
326 #endif
327 
Free(Child * free_list)328   void Free(Child* free_list) {
329     ClearFields();
330     AsChild()->MarkAsFree();
331     data_.next_free = free_list;
332   }
333 
Acquire(Object object)334   void Acquire(Object object) {
335     DCHECK(!AsChild()->IsInUse());
336     CheckFieldsAreCleared();
337     reinterpret_cast<std::atomic<Address>*>(&object_)->store(
338         object.ptr(), std::memory_order_relaxed);
339     AsChild()->MarkAsUsed();
340     data_.parameter = nullptr;
341     DCHECK(AsChild()->IsInUse());
342   }
343 
Release(Child * free_list)344   void Release(Child* free_list) {
345     DCHECK(AsChild()->IsInUse());
346     Free(free_list);
347     DCHECK(!AsChild()->IsInUse());
348   }
349 
object() const350   Object object() const { return Object(object_); }
location()351   FullObjectSlot location() { return FullObjectSlot(&object_); }
handle()352   Handle<Object> handle() { return Handle<Object>(&object_); }
353 
index() const354   uint8_t index() const { return index_; }
set_index(uint8_t value)355   void set_index(uint8_t value) { index_ = value; }
356 
wrapper_class_id() const357   uint16_t wrapper_class_id() const { return class_id_; }
has_wrapper_class_id() const358   bool has_wrapper_class_id() const {
359     return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
360   }
361 
362   // Accessors for next free node in the free list.
next_free()363   Child* next_free() {
364     DCHECK(!AsChild()->IsInUse());
365     return data_.next_free;
366   }
367 
set_parameter(void * parameter)368   void set_parameter(void* parameter) {
369     DCHECK(AsChild()->IsInUse());
370     data_.parameter = parameter;
371   }
parameter() const372   void* parameter() const {
373     DCHECK(AsChild()->IsInUse());
374     return data_.parameter;
375   }
376 
377  protected:
AsChild()378   Child* AsChild() { return reinterpret_cast<Child*>(this); }
AsChild() const379   const Child* AsChild() const { return reinterpret_cast<const Child*>(this); }
380 
ClearFields()381   void ClearFields() {
382     // Zap the values for eager trapping.
383     object_ = kGlobalHandleZapValue;
384     class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
385     AsChild()->ClearImplFields();
386   }
387 
CheckFieldsAreCleared()388   void CheckFieldsAreCleared() {
389     DCHECK_EQ(kGlobalHandleZapValue, object_);
390     DCHECK_EQ(v8::HeapProfiler::kPersistentHandleNoClassId, class_id_);
391     AsChild()->CheckImplFieldsAreCleared();
392   }
393 
394   // Storage for object pointer.
395   //
396   // Placed first to avoid offset computation. The stored data is equivalent to
397   // an Object. It is stored as a plain Address for convenience (smallest number
398   // of casts), and because it is a private implementation detail: the public
399   // interface provides type safety.
400   Address object_;
401 
402   // Class id set by the embedder.
403   uint16_t class_id_;
404 
405   // Index in the containing handle block.
406   uint8_t index_;
407 
408   uint8_t flags_;
409 
410   // The meaning of this field depends on node state:
411   // - Node in free list: Stores next free node pointer.
412   // - Otherwise, specific to the node implementation.
413   union {
414     Child* next_free;
415     void* parameter;
416   } data_;
417 };
418 
419 namespace {
420 
ExtractInternalFields(JSObject jsobject,void ** embedder_fields,int len)421 void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
422   int field_count = jsobject.GetEmbedderFieldCount();
423   Isolate* isolate = GetIsolateForSandbox(jsobject);
424   for (int i = 0; i < len; ++i) {
425     if (field_count == i) break;
426     void* pointer;
427     if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) {
428       embedder_fields[i] = pointer;
429     }
430   }
431 }
432 
433 }  // namespace
434 
435 class GlobalHandles::Node final : public NodeBase<GlobalHandles::Node> {
436  public:
437   // State transition diagram:
438   // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
439   enum State {
440     FREE = 0,
441     NORMAL,      // Normal global handle.
442     WEAK,        // Flagged as weak but not yet finalized.
443     PENDING,     // Has been recognized as only reachable by weak handles.
444     NEAR_DEATH,  // Callback has informed the handle is near death.
445     NUMBER_OF_NODE_STATES
446   };
447 
Node()448   Node() {
449     STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
450                   Internals::kNodeStateMask);
451     STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
452     STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
453     set_in_young_list(false);
454   }
455 
456   Node(const Node&) = delete;
457   Node& operator=(const Node&) = delete;
458 
label() const459   const char* label() const {
460     return state() == NORMAL ? reinterpret_cast<char*>(data_.parameter)
461                              : nullptr;
462   }
463 
464   // State and flag accessors.
465 
state() const466   State state() const { return NodeState::decode(flags_); }
set_state(State state)467   void set_state(State state) { flags_ = NodeState::update(flags_, state); }
468 
is_in_young_list() const469   bool is_in_young_list() const { return IsInYoungList::decode(flags_); }
set_in_young_list(bool v)470   void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); }
471 
weakness_type() const472   WeaknessType weakness_type() const {
473     return NodeWeaknessType::decode(flags_);
474   }
set_weakness_type(WeaknessType weakness_type)475   void set_weakness_type(WeaknessType weakness_type) {
476     flags_ = NodeWeaknessType::update(flags_, weakness_type);
477   }
478 
IsWeak() const479   bool IsWeak() const { return state() == WEAK; }
480 
IsInUse() const481   bool IsInUse() const { return state() != FREE; }
482 
IsPhantomCallback() const483   bool IsPhantomCallback() const {
484     return weakness_type() == PHANTOM_WEAK ||
485            weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS;
486   }
487 
IsPhantomResetHandle() const488   bool IsPhantomResetHandle() const {
489     return weakness_type() == PHANTOM_WEAK_RESET_HANDLE;
490   }
491 
IsFinalizerHandle() const492   bool IsFinalizerHandle() const { return weakness_type() == FINALIZER_WEAK; }
493 
IsPendingPhantomCallback() const494   bool IsPendingPhantomCallback() const {
495     return state() == PENDING && IsPhantomCallback();
496   }
497 
IsPendingPhantomResetHandle() const498   bool IsPendingPhantomResetHandle() const {
499     return state() == PENDING && IsPhantomResetHandle();
500   }
501 
IsPendingFinalizer() const502   bool IsPendingFinalizer() const {
503     return state() == PENDING && weakness_type() == FINALIZER_WEAK;
504   }
505 
IsPending() const506   bool IsPending() const { return state() == PENDING; }
507 
IsRetainer() const508   bool IsRetainer() const {
509     return state() != FREE &&
510            !(state() == NEAR_DEATH && weakness_type() != FINALIZER_WEAK);
511   }
512 
IsStrongRetainer() const513   bool IsStrongRetainer() const { return state() == NORMAL; }
514 
IsWeakRetainer() const515   bool IsWeakRetainer() const {
516     return state() == WEAK || state() == PENDING ||
517            (state() == NEAR_DEATH && weakness_type() == FINALIZER_WEAK);
518   }
519 
MarkPending()520   void MarkPending() {
521     DCHECK(state() == WEAK);
522     set_state(PENDING);
523   }
524 
has_callback() const525   bool has_callback() const { return weak_callback_ != nullptr; }
526 
527   // Accessors for next free node in the free list.
next_free()528   Node* next_free() {
529     DCHECK_EQ(FREE, state());
530     return data_.next_free;
531   }
532 
MakeWeak(void * parameter,WeakCallbackInfo<void>::Callback phantom_callback,v8::WeakCallbackType type)533   void MakeWeak(void* parameter,
534                 WeakCallbackInfo<void>::Callback phantom_callback,
535                 v8::WeakCallbackType type) {
536     DCHECK_NOT_NULL(phantom_callback);
537     DCHECK(IsInUse());
538     CHECK_NE(object_, kGlobalHandleZapValue);
539     set_state(WEAK);
540     switch (type) {
541       case v8::WeakCallbackType::kParameter:
542         set_weakness_type(PHANTOM_WEAK);
543         break;
544       case v8::WeakCallbackType::kInternalFields:
545         set_weakness_type(PHANTOM_WEAK_2_EMBEDDER_FIELDS);
546         break;
547         START_ALLOW_USE_DEPRECATED()
548       case v8::WeakCallbackType::kFinalizer:
549         set_weakness_type(FINALIZER_WEAK);
550         break;
551         END_ALLOW_USE_DEPRECATED()
552     }
553     set_parameter(parameter);
554     weak_callback_ = phantom_callback;
555   }
556 
MakeWeak(Address ** location_addr)557   void MakeWeak(Address** location_addr) {
558     DCHECK(IsInUse());
559     CHECK_NE(object_, kGlobalHandleZapValue);
560     set_state(WEAK);
561     set_weakness_type(PHANTOM_WEAK_RESET_HANDLE);
562     set_parameter(location_addr);
563     weak_callback_ = nullptr;
564   }
565 
ClearWeakness()566   void* ClearWeakness() {
567     DCHECK(IsInUse());
568     void* p = parameter();
569     set_state(NORMAL);
570     set_parameter(nullptr);
571     return p;
572   }
573 
AnnotateStrongRetainer(const char * label)574   void AnnotateStrongRetainer(const char* label) {
575     DCHECK_EQ(state(), NORMAL);
576     data_.parameter = const_cast<char*>(label);
577   }
578 
CollectPhantomCallbackData(std::vector<std::pair<Node *,PendingPhantomCallback>> * pending_phantom_callbacks)579   void CollectPhantomCallbackData(
580       std::vector<std::pair<Node*, PendingPhantomCallback>>*
581           pending_phantom_callbacks) {
582     DCHECK(weakness_type() == PHANTOM_WEAK ||
583            weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS);
584     DCHECK(state() == PENDING);
585     DCHECK_NOT_NULL(weak_callback_);
586 
587     void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
588                                                                 nullptr};
589     if (weakness_type() != PHANTOM_WEAK && object().IsJSObject()) {
590       ExtractInternalFields(JSObject::cast(object()), embedder_fields,
591                             v8::kEmbedderFieldsInWeakCallback);
592     }
593 
594     // Zap with something dangerous.
595     location().store(Object(0xCA11));
596 
597     pending_phantom_callbacks->push_back(std::make_pair(
598         this,
599         PendingPhantomCallback(weak_callback_, parameter(), embedder_fields)));
600     DCHECK(IsInUse());
601     set_state(NEAR_DEATH);
602   }
603 
ResetPhantomHandle()604   void ResetPhantomHandle() {
605     DCHECK_EQ(PHANTOM_WEAK_RESET_HANDLE, weakness_type());
606     DCHECK_EQ(PENDING, state());
607     DCHECK_NULL(weak_callback_);
608     Address** handle = reinterpret_cast<Address**>(parameter());
609     *handle = nullptr;
610     NodeSpace<Node>::Release(this);
611   }
612 
PostGarbageCollectionProcessing(Isolate * isolate)613   void PostGarbageCollectionProcessing(Isolate* isolate) {
614     // This method invokes a finalizer. Updating the method name would require
615     // adjusting CFI blocklist as weak_callback_ is invoked on the wrong type.
616     CHECK(IsPendingFinalizer());
617     set_state(NEAR_DEATH);
618     // Check that we are not passing a finalized external string to
619     // the callback.
620     DCHECK(!object().IsExternalOneByteString() ||
621            ExternalOneByteString::cast(object()).resource() != nullptr);
622     DCHECK(!object().IsExternalTwoByteString() ||
623            ExternalTwoByteString::cast(object()).resource() != nullptr);
624     // Leaving V8.
625     VMState<EXTERNAL> vmstate(isolate);
626     HandleScope handle_scope(isolate);
627     void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
628                                                                 nullptr};
629     v8::WeakCallbackInfo<void> data(reinterpret_cast<v8::Isolate*>(isolate),
630                                     parameter(), embedder_fields, nullptr);
631     weak_callback_(data);
632     // For finalizers the handle must have either been reset or made strong.
633     // Both cases reset the state.
634     CHECK_NE(NEAR_DEATH, state());
635   }
636 
MarkAsFree()637   void MarkAsFree() { set_state(FREE); }
MarkAsUsed()638   void MarkAsUsed() { set_state(NORMAL); }
639 
global_handles()640   GlobalHandles* global_handles() {
641     return NodeBlock<Node>::From(this)->global_handles();
642   }
643 
644  private:
645   // Fields that are not used for managing node memory.
ClearImplFields()646   void ClearImplFields() { weak_callback_ = nullptr; }
647 
CheckImplFieldsAreCleared()648   void CheckImplFieldsAreCleared() { DCHECK_EQ(nullptr, weak_callback_); }
649 
650   // This stores three flags (independent, partially_dependent and
651   // in_young_list) and a State.
652   using NodeState = base::BitField8<State, 0, 3>;
653   using IsInYoungList = NodeState::Next<bool, 1>;
654   using NodeWeaknessType = IsInYoungList::Next<WeaknessType, 2>;
655 
656   // Handle specific callback - might be a weak reference in disguise.
657   WeakCallbackInfo<void>::Callback weak_callback_;
658 
659   friend class NodeBase<Node>;
660 };
661 
662 class GlobalHandles::TracedNode final
663     : public NodeBase<GlobalHandles::TracedNode> {
664  public:
TracedNode()665   TracedNode() { set_in_young_list(false); }
666 
667   // Copy and move ctors are used when constructing a TracedNode when recording
668   // a node for on-stack data structures. (Older compilers may refer to copy
669   // instead of move ctor.)
670   TracedNode(TracedNode&& other) V8_NOEXCEPT = default;
671   TracedNode(const TracedNode& other) V8_NOEXCEPT = default;
672 
673   enum State { FREE = 0, NORMAL, NEAR_DEATH };
674 
state() const675   State state() const { return NodeState::decode(flags_); }
set_state(State state)676   void set_state(State state) { flags_ = NodeState::update(flags_, state); }
677 
MarkAsFree()678   void MarkAsFree() { set_state(FREE); }
MarkAsUsed()679   void MarkAsUsed() { set_state(NORMAL); }
IsInUse() const680   bool IsInUse() const { return state() != FREE; }
IsRetainer() const681   bool IsRetainer() const { return state() == NORMAL; }
682 
is_in_young_list() const683   bool is_in_young_list() const { return IsInYoungList::decode(flags_); }
set_in_young_list(bool v)684   void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); }
685 
is_root() const686   bool is_root() const { return IsRoot::decode(flags_); }
set_root(bool v)687   void set_root(bool v) { flags_ = IsRoot::update(flags_, v); }
688 
set_markbit()689   void set_markbit() {
690     NodeBlock<TracedNode>::From(this)->set_markbit(index());
691   }
692 
markbit() const693   bool markbit() const {
694     return NodeBlock<TracedNode>::From(this)->markbit(index());
695   }
clear_markbit()696   void clear_markbit() {
697     NodeBlock<TracedNode>::From(this)->clear_markbit(index());
698   }
699 
is_on_stack() const700   bool is_on_stack() const { return IsOnStack::decode(flags_); }
set_is_on_stack(bool v)701   void set_is_on_stack(bool v) { flags_ = IsOnStack::update(flags_, v); }
702 
clear_object()703   void clear_object() {
704     reinterpret_cast<std::atomic<Address>*>(&object_)->store(
705         kNullAddress, std::memory_order_relaxed);
706   }
707 
CopyObjectReference(const TracedNode & other)708   void CopyObjectReference(const TracedNode& other) {
709     reinterpret_cast<std::atomic<Address>*>(&object_)->store(
710         other.object_, std::memory_order_relaxed);
711   }
712 
ResetPhantomHandle()713   void ResetPhantomHandle() {
714     DCHECK(IsInUse());
715     NodeSpace<TracedNode>::Release(this);
716     DCHECK(!IsInUse());
717   }
718 
719   static void Verify(GlobalHandles* global_handles, const Address* const* slot);
720 
721  protected:
722   // Various state is managed in a bit field. Mark bits are used concurrently
723   // and held externally in a NodeBlock.
724   using NodeState = base::BitField8<State, 0, 2>;
725   using IsInYoungList = NodeState::Next<bool, 1>;
726   using IsRoot = IsInYoungList::Next<bool, 1>;
727   using IsOnStack = IsRoot::Next<bool, 1>;
ClearImplFields()728   void ClearImplFields() {
729     set_root(true);
730     set_is_on_stack(false);
731   }
732 
CheckImplFieldsAreCleared() const733   void CheckImplFieldsAreCleared() const { DCHECK(is_root()); }
734 
735   friend class NodeBase<GlobalHandles::TracedNode>;
736 };
737 
738 // Space to keep track of on-stack handles (e.g. TracedReference). Such
739 // references are treated as root for any V8 garbage collection. The data
740 // structure is self healing and pessimistally filters outdated entries on
741 // insertion and iteration.
742 //
743 // Design doc: http://bit.ly/on-stack-traced-reference
744 class GlobalHandles::OnStackTracedNodeSpace final {
745  public:
GetGlobalHandles(const TracedNode * on_stack_node)746   static GlobalHandles* GetGlobalHandles(const TracedNode* on_stack_node) {
747     DCHECK(on_stack_node->is_on_stack());
748     return reinterpret_cast<const NodeEntry*>(on_stack_node)->global_handles;
749   }
750 
OnStackTracedNodeSpace(GlobalHandles * global_handles)751   explicit OnStackTracedNodeSpace(GlobalHandles* global_handles)
752       : global_handles_(global_handles) {}
753 
SetStackStart(void * stack_start)754   void SetStackStart(void* stack_start) {
755     CHECK(on_stack_nodes_.empty());
756     stack_.SetStackStart(base::Stack::GetRealStackAddressForSlot(stack_start));
757   }
758 
759   V8_INLINE bool IsOnStack(uintptr_t slot) const;
760 
761   void Iterate(RootVisitor* v);
762   TracedNode* Acquire(Object value, uintptr_t address);
763   void CleanupBelowCurrentStackPosition();
764   void NotifyEmptyEmbedderStack();
765 
NumberOfHandlesForTesting() const766   size_t NumberOfHandlesForTesting() const { return on_stack_nodes_.size(); }
767 
768  private:
769   struct NodeEntry {
770     TracedNode node;
771     // Used to find back to GlobalHandles from a Node on copy. Needs to follow
772     // node.
773     GlobalHandles* global_handles;
774   };
775 
776   // Keeps track of registered handles. The data structure is cleaned on
777   // iteration and when adding new references using the current stack address.
778   // Cleaning is based on current stack address and the key of the map which is
779   // slightly different for ASAN configs -- see below.
780 #ifdef V8_USE_ADDRESS_SANITIZER
781   // Mapping from stack slots or real stack frames to the corresponding nodes.
782   // In case a reference is part of a fake frame, we map it to the real stack
783   // frame base instead of the actual stack slot. The list keeps all nodes for
784   // a particular real frame.
785   std::map<uintptr_t, std::list<NodeEntry>> on_stack_nodes_;
786 #else   // !V8_USE_ADDRESS_SANITIZER
787   // Mapping from stack slots to the corresponding nodes. We don't expect
788   // aliasing with overlapping lifetimes of nodes.
789   std::map<uintptr_t, NodeEntry> on_stack_nodes_;
790 #endif  // !V8_USE_ADDRESS_SANITIZER
791 
792   ::heap::base::Stack stack_;
793   GlobalHandles* global_handles_ = nullptr;
794   size_t acquire_count_ = 0;
795 };
796 
IsOnStack(uintptr_t slot) const797 bool GlobalHandles::OnStackTracedNodeSpace::IsOnStack(uintptr_t slot) const {
798   // By the time this function is called, the stack start may not be set (i.e.
799   // SetStackStart() was not called). In that case, assume the slot is not on
800   // stack.
801   if (!stack_.stack_start()) return false;
802   return stack_.IsOnStack(reinterpret_cast<void*>(slot));
803 }
804 
NotifyEmptyEmbedderStack()805 void GlobalHandles::OnStackTracedNodeSpace::NotifyEmptyEmbedderStack() {
806   on_stack_nodes_.clear();
807 }
808 
Iterate(RootVisitor * v)809 void GlobalHandles::OnStackTracedNodeSpace::Iterate(RootVisitor* v) {
810 #ifdef V8_USE_ADDRESS_SANITIZER
811   for (auto& pair : on_stack_nodes_) {
812     for (auto& node_entry : pair.second) {
813       TracedNode& node = node_entry.node;
814       if (node.IsRetainer()) {
815         v->VisitRootPointer(Root::kGlobalHandles, "on-stack TracedReference",
816                             node.location());
817       }
818     }
819   }
820 #else   // !V8_USE_ADDRESS_SANITIZER
821   // Handles have been cleaned from the GC entry point which is higher up the
822   // stack.
823   for (auto& pair : on_stack_nodes_) {
824     TracedNode& node = pair.second.node;
825     if (node.IsRetainer()) {
826       v->VisitRootPointer(Root::kGlobalHandles, "on-stack TracedReference",
827                           node.location());
828     }
829   }
830 #endif  // !V8_USE_ADDRESS_SANITIZER
831 }
832 
Acquire(Object value,uintptr_t slot)833 GlobalHandles::TracedNode* GlobalHandles::OnStackTracedNodeSpace::Acquire(
834     Object value, uintptr_t slot) {
835   constexpr size_t kAcquireCleanupThresholdLog2 = 8;
836   constexpr size_t kAcquireCleanupThresholdMask =
837       (size_t{1} << kAcquireCleanupThresholdLog2) - 1;
838   DCHECK(IsOnStack(slot));
839   if (((acquire_count_++) & kAcquireCleanupThresholdMask) == 0) {
840     CleanupBelowCurrentStackPosition();
841   }
842   NodeEntry entry;
843   entry.node.Free(nullptr);
844   entry.global_handles = global_handles_;
845 #ifdef V8_USE_ADDRESS_SANITIZER
846   auto pair = on_stack_nodes_.insert(
847       {base::Stack::GetRealStackAddressForSlot(slot), {}});
848   pair.first->second.push_back(std::move(entry));
849   TracedNode* result = &(pair.first->second.back().node);
850 #else   // !V8_USE_ADDRESS_SANITIZER
851   auto pair = on_stack_nodes_.insert(
852       {base::Stack::GetRealStackAddressForSlot(slot), std::move(entry)});
853   if (!pair.second) {
854     // Insertion failed because there already was an entry present for that
855     // stack address. This can happen because cleanup is conservative in which
856     // stack limits it used. Reusing the entry is fine as there's no aliasing of
857     // different references with the same stack slot.
858     pair.first->second.node.Free(nullptr);
859   }
860   TracedNode* result = &(pair.first->second.node);
861 #endif  // !V8_USE_ADDRESS_SANITIZER
862   result->Acquire(value);
863   result->set_is_on_stack(true);
864   return result;
865 }
866 
CleanupBelowCurrentStackPosition()867 void GlobalHandles::OnStackTracedNodeSpace::CleanupBelowCurrentStackPosition() {
868   if (on_stack_nodes_.empty()) return;
869   const uintptr_t stack_ptr = reinterpret_cast<uintptr_t>(
870       ::heap::base::Stack::GetCurrentStackPointerForLocalVariables());
871   const auto it = on_stack_nodes_.upper_bound(stack_ptr);
872   on_stack_nodes_.erase(on_stack_nodes_.begin(), it);
873 }
874 
875 // static
EnableMarkingBarrier(Isolate * isolate)876 void GlobalHandles::EnableMarkingBarrier(Isolate* isolate) {
877   auto* global_handles = isolate->global_handles();
878   DCHECK(!global_handles->is_marking_);
879   global_handles->is_marking_ = true;
880 }
881 
882 // static
DisableMarkingBarrier(Isolate * isolate)883 void GlobalHandles::DisableMarkingBarrier(Isolate* isolate) {
884   auto* global_handles = isolate->global_handles();
885   DCHECK(global_handles->is_marking_);
886   global_handles->is_marking_ = false;
887 }
888 
889 // static
Verify(GlobalHandles * global_handles,const Address * const * slot)890 void GlobalHandles::TracedNode::Verify(GlobalHandles* global_handles,
891                                        const Address* const* slot) {
892 #ifdef DEBUG
893   const TracedNode* node = FromLocation(*slot);
894   DCHECK(node->IsInUse());
895   bool slot_on_stack = global_handles->on_stack_nodes_->IsOnStack(
896       reinterpret_cast<uintptr_t>(slot));
897   DCHECK_EQ(slot_on_stack, node->is_on_stack());
898   if (!node->is_on_stack()) {
899     // On-heap nodes have seprate lists for young generation processing.
900     bool is_young_gen_object = ObjectInYoungGeneration(node->object());
901     DCHECK_IMPLIES(is_young_gen_object, node->is_in_young_list());
902   }
903   bool in_young_list =
904       std::find(global_handles->traced_young_nodes_.begin(),
905                 global_handles->traced_young_nodes_.end(),
906                 node) != global_handles->traced_young_nodes_.end();
907   DCHECK_EQ(in_young_list, node->is_in_young_list());
908 #endif  // DEBUG
909 }
910 
CleanupOnStackReferencesBelowCurrentStackPosition()911 void GlobalHandles::CleanupOnStackReferencesBelowCurrentStackPosition() {
912   on_stack_nodes_->CleanupBelowCurrentStackPosition();
913 }
914 
NumberOfOnStackHandlesForTesting()915 size_t GlobalHandles::NumberOfOnStackHandlesForTesting() {
916   return on_stack_nodes_->NumberOfHandlesForTesting();
917 }
918 
TotalSize() const919 size_t GlobalHandles::TotalSize() const {
920   return regular_nodes_->TotalSize() + traced_nodes_->TotalSize();
921 }
922 
UsedSize() const923 size_t GlobalHandles::UsedSize() const {
924   return regular_nodes_->handles_count() * sizeof(Node) +
925          traced_nodes_->handles_count() * sizeof(TracedNode);
926 }
927 
handles_count() const928 size_t GlobalHandles::handles_count() const {
929   return regular_nodes_->handles_count() + traced_nodes_->handles_count();
930 }
931 
SetStackStart(void * stack_start)932 void GlobalHandles::SetStackStart(void* stack_start) {
933   on_stack_nodes_->SetStackStart(stack_start);
934 }
935 
NotifyEmptyEmbedderStack()936 void GlobalHandles::NotifyEmptyEmbedderStack() {
937   on_stack_nodes_->NotifyEmptyEmbedderStack();
938 }
939 
GlobalHandles(Isolate * isolate)940 GlobalHandles::GlobalHandles(Isolate* isolate)
941     : isolate_(isolate),
942       regular_nodes_(new NodeSpace<GlobalHandles::Node>(this)),
943       traced_nodes_(new NodeSpace<GlobalHandles::TracedNode>(this)),
944       on_stack_nodes_(new OnStackTracedNodeSpace(this)) {}
945 
~GlobalHandles()946 GlobalHandles::~GlobalHandles() { regular_nodes_.reset(nullptr); }
947 
Create(Object value)948 Handle<Object> GlobalHandles::Create(Object value) {
949   GlobalHandles::Node* result = regular_nodes_->Acquire(value);
950   if (ObjectInYoungGeneration(value) && !result->is_in_young_list()) {
951     young_nodes_.push_back(result);
952     result->set_in_young_list(true);
953   }
954   return result->handle();
955 }
956 
Create(Address value)957 Handle<Object> GlobalHandles::Create(Address value) {
958   return Create(Object(value));
959 }
960 
CreateTraced(Object value,Address * slot,GlobalHandleStoreMode store_mode)961 Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
962                                            GlobalHandleStoreMode store_mode) {
963   return CreateTraced(
964       value, slot, store_mode,
965       on_stack_nodes_->IsOnStack(reinterpret_cast<uintptr_t>(slot)));
966 }
967 
CreateTraced(Object value,Address * slot,GlobalHandleStoreMode store_mode,bool is_on_stack)968 Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
969                                            GlobalHandleStoreMode store_mode,
970                                            bool is_on_stack) {
971   GlobalHandles::TracedNode* result;
972   if (is_on_stack) {
973     result = on_stack_nodes_->Acquire(value, reinterpret_cast<uintptr_t>(slot));
974   } else {
975     result = traced_nodes_->Acquire(value);
976     if (ObjectInYoungGeneration(value) && !result->is_in_young_list()) {
977       traced_young_nodes_.push_back(result);
978       result->set_in_young_list(true);
979     }
980     // Nodes are black allocated for simplicity.
981     result->set_markbit();
982     if (store_mode != GlobalHandleStoreMode::kInitializingStore) {
983       WriteBarrier::MarkingFromGlobalHandle(value);
984     }
985   }
986   result->set_parameter(nullptr);
987   return result->handle();
988 }
989 
CreateTraced(Address value,Address * slot,GlobalHandleStoreMode store_mode)990 Handle<Object> GlobalHandles::CreateTraced(Address value, Address* slot,
991                                            GlobalHandleStoreMode store_mode) {
992   return CreateTraced(Object(value), slot, store_mode);
993 }
994 
CopyGlobal(Address * location)995 Handle<Object> GlobalHandles::CopyGlobal(Address* location) {
996   DCHECK_NOT_NULL(location);
997   GlobalHandles* global_handles =
998       Node::FromLocation(location)->global_handles();
999 #ifdef VERIFY_HEAP
1000   if (i::FLAG_verify_heap) {
1001     Object(*location).ObjectVerify(global_handles->isolate());
1002   }
1003 #endif  // VERIFY_HEAP
1004   return global_handles->Create(*location);
1005 }
1006 
1007 namespace {
SetSlotThreadSafe(Address ** slot,Address * val)1008 void SetSlotThreadSafe(Address** slot, Address* val) {
1009   reinterpret_cast<std::atomic<Address*>*>(slot)->store(
1010       val, std::memory_order_relaxed);
1011 }
1012 }  // namespace
1013 
1014 // static
CopyTracedReference(const Address * const * from,Address ** to)1015 void GlobalHandles::CopyTracedReference(const Address* const* from,
1016                                         Address** to) {
1017   DCHECK_NOT_NULL(*from);
1018   DCHECK_NULL(*to);
1019   const TracedNode* node = TracedNode::FromLocation(*from);
1020   GlobalHandles* global_handles =
1021       GlobalHandles::From(const_cast<TracedNode*>(node));
1022   Handle<Object> o = global_handles->CreateTraced(
1023       node->object(), reinterpret_cast<Address*>(to),
1024       GlobalHandleStoreMode::kAssigningStore);
1025   SetSlotThreadSafe(to, o.location());
1026   TracedNode::Verify(global_handles, from);
1027   TracedNode::Verify(global_handles, to);
1028 #ifdef VERIFY_HEAP
1029   if (i::FLAG_verify_heap) {
1030     Object(**to).ObjectVerify(global_handles->isolate());
1031   }
1032 #endif  // VERIFY_HEAP
1033 }
1034 
MoveGlobal(Address ** from,Address ** to)1035 void GlobalHandles::MoveGlobal(Address** from, Address** to) {
1036   DCHECK_NOT_NULL(*from);
1037   DCHECK_NOT_NULL(*to);
1038   DCHECK_EQ(*from, *to);
1039   Node* node = Node::FromLocation(*from);
1040   if (node->IsWeak() && node->IsPhantomResetHandle()) {
1041     node->set_parameter(to);
1042   }
1043 
1044   // - Strong handles do not require fixups.
1045   // - Weak handles with finalizers and callbacks are too general to fix up. For
1046   //   those the callers need to ensure consistency.
1047 }
1048 
MoveTracedReference(Address ** from,Address ** to)1049 void GlobalHandles::MoveTracedReference(Address** from, Address** to) {
1050   // Fast path for moving from an empty reference.
1051   if (!*from) {
1052     DestroyTracedReference(*to);
1053     SetSlotThreadSafe(to, nullptr);
1054     return;
1055   }
1056 
1057   // Determining whether from or to are on stack.
1058   TracedNode* from_node = TracedNode::FromLocation(*from);
1059   DCHECK(from_node->IsInUse());
1060   TracedNode* to_node = TracedNode::FromLocation(*to);
1061   GlobalHandles* global_handles = nullptr;
1062 #ifdef DEBUG
1063   global_handles = GlobalHandles::From(from_node);
1064 #endif  // DEBUG
1065   bool from_on_stack = from_node->is_on_stack();
1066   bool to_on_stack = false;
1067   if (!to_node) {
1068     // Figure out whether stack or heap to allow fast path for heap->heap move.
1069     global_handles = GlobalHandles::From(from_node);
1070     to_on_stack = global_handles->on_stack_nodes_->IsOnStack(
1071         reinterpret_cast<uintptr_t>(to));
1072   } else {
1073     to_on_stack = to_node->is_on_stack();
1074   }
1075 
1076   // Moving.
1077   if (from_on_stack || to_on_stack) {
1078     // Move involving a stack slot.
1079     if (!to_node) {
1080       DCHECK(global_handles);
1081       Handle<Object> o = global_handles->CreateTraced(
1082           from_node->object(), reinterpret_cast<Address*>(to),
1083           GlobalHandleStoreMode::kAssigningStore, to_on_stack);
1084       SetSlotThreadSafe(to, o.location());
1085       to_node = TracedNode::FromLocation(*to);
1086       DCHECK_IMPLIES(!to_node->is_on_stack(), to_node->markbit());
1087     } else {
1088       DCHECK(to_node->IsInUse());
1089       to_node->CopyObjectReference(*from_node);
1090       if (!to_node->is_on_stack() && !to_node->is_in_young_list() &&
1091           ObjectInYoungGeneration(to_node->object())) {
1092         global_handles = GlobalHandles::From(from_node);
1093         global_handles->traced_young_nodes_.push_back(to_node);
1094         to_node->set_in_young_list(true);
1095       }
1096       if (!to_on_stack) {
1097         WriteBarrier::MarkingFromGlobalHandle(to_node->object());
1098       }
1099     }
1100     DestroyTracedReference(*from);
1101     SetSlotThreadSafe(from, nullptr);
1102   } else {
1103     // Pure heap move.
1104     DestroyTracedReference(*to);
1105     SetSlotThreadSafe(to, *from);
1106     to_node = from_node;
1107     DCHECK_NOT_NULL(*from);
1108     DCHECK_NOT_NULL(*to);
1109     DCHECK_EQ(*from, *to);
1110     WriteBarrier::MarkingFromGlobalHandle(to_node->object());
1111     SetSlotThreadSafe(from, nullptr);
1112   }
1113   TracedNode::Verify(global_handles, to);
1114 }
1115 
1116 // static
From(const TracedNode * node)1117 GlobalHandles* GlobalHandles::From(const TracedNode* node) {
1118   return node->is_on_stack()
1119              ? OnStackTracedNodeSpace::GetGlobalHandles(node)
1120              : NodeBlock<TracedNode>::From(node)->global_handles();
1121 }
1122 
MarkTraced(Address * location)1123 void GlobalHandles::MarkTraced(Address* location) {
1124   TracedNode* node = TracedNode::FromLocation(location);
1125   DCHECK(node->IsInUse());
1126   if (node->is_on_stack()) return;
1127   node->set_markbit();
1128 }
1129 
Destroy(Address * location)1130 void GlobalHandles::Destroy(Address* location) {
1131   if (location != nullptr) {
1132     NodeSpace<Node>::Release(Node::FromLocation(location));
1133   }
1134 }
1135 
1136 // static
DestroyTracedReference(Address * location)1137 void GlobalHandles::DestroyTracedReference(Address* location) {
1138   if (location != nullptr) {
1139     TracedNode* node = TracedNode::FromLocation(location);
1140     if (node->is_on_stack()) {
1141       node->Release(nullptr);
1142       return;
1143     }
1144     DCHECK(!node->is_on_stack());
1145 
1146     auto* global_handles = GlobalHandles::From(node);
1147     // When marking is off the handle may be freed immediately. Note that this
1148     // includes also the case when invoking the first pass callbacks during the
1149     // atomic pause which requires releasing a node fully.
1150     if (!global_handles->is_marking_) {
1151       NodeSpace<TracedNode>::Release(node);
1152       return;
1153     }
1154 
1155     // Incremental marking is on. This also covers the scavenge case which
1156     // prohibits eagerly reclaiming nodes when marking is on during a scavenge.
1157     //
1158     // On-heap traced nodes are released in the atomic pause in
1159     // `IterateWeakRootsForPhantomHandles()` when they are discovered as not
1160     // marked.
1161     //
1162     // Eagerly clear out the object here to avoid needlessly marking it from
1163     // this point on. Also clear out callback and backreference for the version
1164     // with callbacks to avoid calling into possibly dead memory later.
1165     //
1166     // In the case this happens during incremental marking, the node may
1167     // still be spuriously marked as live and is then only reclaimed on the
1168     // next cycle.
1169     node->clear_object();
1170     node->set_parameter(nullptr);
1171   }
1172 }
1173 
1174 using GenericCallback = v8::WeakCallbackInfo<void>::Callback;
1175 
MakeWeak(Address * location,void * parameter,GenericCallback phantom_callback,v8::WeakCallbackType type)1176 void GlobalHandles::MakeWeak(Address* location, void* parameter,
1177                              GenericCallback phantom_callback,
1178                              v8::WeakCallbackType type) {
1179   Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
1180 }
1181 
MakeWeak(Address ** location_addr)1182 void GlobalHandles::MakeWeak(Address** location_addr) {
1183   Node::FromLocation(*location_addr)->MakeWeak(location_addr);
1184 }
1185 
ClearWeakness(Address * location)1186 void* GlobalHandles::ClearWeakness(Address* location) {
1187   return Node::FromLocation(location)->ClearWeakness();
1188 }
1189 
AnnotateStrongRetainer(Address * location,const char * label)1190 void GlobalHandles::AnnotateStrongRetainer(Address* location,
1191                                            const char* label) {
1192   Node::FromLocation(location)->AnnotateStrongRetainer(label);
1193 }
1194 
IsWeak(Address * location)1195 bool GlobalHandles::IsWeak(Address* location) {
1196   return Node::FromLocation(location)->IsWeak();
1197 }
1198 
1199 DISABLE_CFI_PERF
IterateWeakRootsForFinalizers(RootVisitor * v)1200 void GlobalHandles::IterateWeakRootsForFinalizers(RootVisitor* v) {
1201   for (Node* node : *regular_nodes_) {
1202     if (node->IsWeakRetainer() && node->state() == Node::PENDING) {
1203       DCHECK(!node->IsPhantomCallback());
1204       DCHECK(!node->IsPhantomResetHandle());
1205       // Finalizers need to survive.
1206       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1207                           node->location());
1208     }
1209   }
1210 }
1211 
1212 DISABLE_CFI_PERF
IterateWeakRootsForPhantomHandles(WeakSlotCallbackWithHeap should_reset_handle)1213 void GlobalHandles::IterateWeakRootsForPhantomHandles(
1214     WeakSlotCallbackWithHeap should_reset_handle) {
1215   for (Node* node : *regular_nodes_) {
1216     if (node->IsWeakRetainer() &&
1217         should_reset_handle(isolate()->heap(), node->location())) {
1218       if (node->IsPhantomResetHandle()) {
1219         node->MarkPending();
1220         node->ResetPhantomHandle();
1221         ++number_of_phantom_handle_resets_;
1222       } else if (node->IsPhantomCallback()) {
1223         node->MarkPending();
1224         node->CollectPhantomCallbackData(&regular_pending_phantom_callbacks_);
1225       }
1226     }
1227   }
1228   for (TracedNode* node : *traced_nodes_) {
1229     if (!node->IsInUse()) continue;
1230     // Detect unreachable nodes first.
1231     if (!node->markbit()) {
1232       // The handle itself is unreachable. We can clear it even if the target V8
1233       // object is alive.
1234       node->ResetPhantomHandle();
1235       ++number_of_phantom_handle_resets_;
1236       continue;
1237     }
1238     // Clear the markbit for the next GC.
1239     node->clear_markbit();
1240     DCHECK(node->IsInUse());
1241     // Detect nodes with unreachable target objects.
1242     if (should_reset_handle(isolate()->heap(), node->location())) {
1243       node->ResetPhantomHandle();
1244       ++number_of_phantom_handle_resets_;
1245     }
1246   }
1247 }
1248 
IterateWeakRootsIdentifyFinalizers(WeakSlotCallbackWithHeap should_reset_handle)1249 void GlobalHandles::IterateWeakRootsIdentifyFinalizers(
1250     WeakSlotCallbackWithHeap should_reset_handle) {
1251   for (Node* node : *regular_nodes_) {
1252     if (node->IsWeak() &&
1253         should_reset_handle(isolate()->heap(), node->location())) {
1254       if (node->IsFinalizerHandle()) {
1255         node->MarkPending();
1256       }
1257     }
1258   }
1259 }
1260 
IdentifyWeakUnmodifiedObjects(WeakSlotCallback is_unmodified)1261 void GlobalHandles::IdentifyWeakUnmodifiedObjects(
1262     WeakSlotCallback is_unmodified) {
1263   if (!FLAG_reclaim_unmodified_wrappers) return;
1264 
1265   // Treat all objects as roots during incremental marking to avoid corrupting
1266   // marking worklists.
1267   if (isolate()->heap()->incremental_marking()->IsMarking()) return;
1268 
1269   auto* const handler = isolate()->heap()->GetEmbedderRootsHandler();
1270   for (TracedNode* node : traced_young_nodes_) {
1271     if (node->IsInUse()) {
1272       DCHECK(node->is_root());
1273       if (is_unmodified(node->location())) {
1274         v8::Value* value = ToApi<v8::Value>(node->handle());
1275         node->set_root(handler->IsRoot(
1276             *reinterpret_cast<v8::TracedReference<v8::Value>*>(&value)));
1277       }
1278     }
1279   }
1280 }
1281 
IterateYoungStrongAndDependentRoots(RootVisitor * v)1282 void GlobalHandles::IterateYoungStrongAndDependentRoots(RootVisitor* v) {
1283   for (Node* node : young_nodes_) {
1284     if (node->IsStrongRetainer()) {
1285       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1286                           node->location());
1287     }
1288   }
1289   for (TracedNode* node : traced_young_nodes_) {
1290     if (node->IsInUse() && node->is_root()) {
1291       v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1292     }
1293   }
1294 }
1295 
MarkYoungWeakDeadObjectsPending(WeakSlotCallbackWithHeap is_dead)1296 void GlobalHandles::MarkYoungWeakDeadObjectsPending(
1297     WeakSlotCallbackWithHeap is_dead) {
1298   for (Node* node : young_nodes_) {
1299     DCHECK(node->is_in_young_list());
1300     if (node->IsWeak() && is_dead(isolate_->heap(), node->location())) {
1301       if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
1302         node->MarkPending();
1303       }
1304     }
1305   }
1306 }
1307 
IterateYoungWeakDeadObjectsForFinalizers(RootVisitor * v)1308 void GlobalHandles::IterateYoungWeakDeadObjectsForFinalizers(RootVisitor* v) {
1309   for (Node* node : young_nodes_) {
1310     DCHECK(node->is_in_young_list());
1311     if (node->IsWeakRetainer() && (node->state() == Node::PENDING)) {
1312       DCHECK(!node->IsPhantomCallback());
1313       DCHECK(!node->IsPhantomResetHandle());
1314       // Finalizers need to survive.
1315       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1316                           node->location());
1317     }
1318   }
1319 }
1320 
IterateYoungWeakObjectsForPhantomHandles(RootVisitor * v,WeakSlotCallbackWithHeap should_reset_handle)1321 void GlobalHandles::IterateYoungWeakObjectsForPhantomHandles(
1322     RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) {
1323   for (Node* node : young_nodes_) {
1324     DCHECK(node->is_in_young_list());
1325     if (node->IsWeakRetainer() && (node->state() != Node::PENDING)) {
1326       if (should_reset_handle(isolate_->heap(), node->location())) {
1327         DCHECK(node->IsPhantomResetHandle() || node->IsPhantomCallback());
1328         if (node->IsPhantomResetHandle()) {
1329           node->MarkPending();
1330           node->ResetPhantomHandle();
1331           ++number_of_phantom_handle_resets_;
1332         } else if (node->IsPhantomCallback()) {
1333           node->MarkPending();
1334           node->CollectPhantomCallbackData(&regular_pending_phantom_callbacks_);
1335         } else {
1336           UNREACHABLE();
1337         }
1338       } else {
1339         // Node survived and needs to be visited.
1340         v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1341                             node->location());
1342       }
1343     }
1344   }
1345 
1346   if (!FLAG_reclaim_unmodified_wrappers) return;
1347 
1348   auto* const handler = isolate()->heap()->GetEmbedderRootsHandler();
1349   for (TracedNode* node : traced_young_nodes_) {
1350     if (!node->IsInUse()) continue;
1351 
1352     DCHECK_IMPLIES(node->is_root(),
1353                    !should_reset_handle(isolate_->heap(), node->location()));
1354     if (should_reset_handle(isolate_->heap(), node->location())) {
1355       v8::Value* value = ToApi<v8::Value>(node->handle());
1356       handler->ResetRoot(
1357           *reinterpret_cast<v8::TracedReference<v8::Value>*>(&value));
1358       // We cannot check whether a node is in use here as the reset behavior
1359       // depends on whether incremental marking is running when reclaiming
1360       // young objects.
1361       ++number_of_phantom_handle_resets_;
1362     } else {
1363       if (!node->is_root()) {
1364         node->set_root(true);
1365         v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1366       }
1367     }
1368   }
1369 }
1370 
InvokeSecondPassPhantomCallbacksFromTask()1371 void GlobalHandles::InvokeSecondPassPhantomCallbacksFromTask() {
1372   DCHECK(second_pass_callbacks_task_posted_);
1373   second_pass_callbacks_task_posted_ = false;
1374   Heap::DevToolsTraceEventScope devtools_trace_event_scope(
1375       isolate()->heap(), "MajorGC", "invoke weak phantom callbacks");
1376   TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
1377   isolate()->heap()->CallGCPrologueCallbacks(
1378       GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1379   InvokeSecondPassPhantomCallbacks();
1380   isolate()->heap()->CallGCEpilogueCallbacks(
1381       GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1382 }
1383 
InvokeSecondPassPhantomCallbacks()1384 void GlobalHandles::InvokeSecondPassPhantomCallbacks() {
1385   // The callbacks may execute JS, which in turn may lead to another GC run.
1386   // If we are already processing the callbacks, we do not want to start over
1387   // from within the inner GC. Newly added callbacks will always be run by the
1388   // outermost GC run only.
1389   if (running_second_pass_callbacks_) return;
1390   running_second_pass_callbacks_ = true;
1391 
1392   AllowJavascriptExecution allow_js(isolate());
1393   while (!second_pass_callbacks_.empty()) {
1394     auto callback = second_pass_callbacks_.back();
1395     second_pass_callbacks_.pop_back();
1396     callback.Invoke(isolate(), PendingPhantomCallback::kSecondPass);
1397   }
1398   running_second_pass_callbacks_ = false;
1399 }
1400 
PostScavengeProcessing(unsigned post_processing_count)1401 size_t GlobalHandles::PostScavengeProcessing(unsigned post_processing_count) {
1402   size_t freed_nodes = 0;
1403   for (Node* node : young_nodes_) {
1404     // Filter free nodes.
1405     if (!node->IsRetainer()) continue;
1406 
1407     if (node->IsPending()) {
1408       DCHECK(node->has_callback());
1409       DCHECK(node->IsPendingFinalizer());
1410       node->PostGarbageCollectionProcessing(isolate_);
1411     }
1412     if (InRecursiveGC(post_processing_count)) return freed_nodes;
1413 
1414     if (!node->IsRetainer()) freed_nodes++;
1415   }
1416   return freed_nodes;
1417 }
1418 
PostMarkSweepProcessing(unsigned post_processing_count)1419 size_t GlobalHandles::PostMarkSweepProcessing(unsigned post_processing_count) {
1420   size_t freed_nodes = 0;
1421   for (Node* node : *regular_nodes_) {
1422     // Filter free nodes.
1423     if (!node->IsRetainer()) continue;
1424 
1425     if (node->IsPending()) {
1426       DCHECK(node->has_callback());
1427       DCHECK(node->IsPendingFinalizer());
1428       node->PostGarbageCollectionProcessing(isolate_);
1429     }
1430     if (InRecursiveGC(post_processing_count)) return freed_nodes;
1431 
1432     if (!node->IsRetainer()) freed_nodes++;
1433   }
1434   return freed_nodes;
1435 }
1436 
1437 template <typename T>
UpdateAndCompactListOfYoungNode(std::vector<T * > * node_list)1438 void GlobalHandles::UpdateAndCompactListOfYoungNode(
1439     std::vector<T*>* node_list) {
1440   size_t last = 0;
1441   for (T* node : *node_list) {
1442     DCHECK(node->is_in_young_list());
1443     if (node->IsInUse()) {
1444       if (ObjectInYoungGeneration(node->object())) {
1445         (*node_list)[last++] = node;
1446         isolate_->heap()->IncrementNodesCopiedInNewSpace();
1447       } else {
1448         node->set_in_young_list(false);
1449         isolate_->heap()->IncrementNodesPromoted();
1450       }
1451     } else {
1452       node->set_in_young_list(false);
1453       isolate_->heap()->IncrementNodesDiedInNewSpace();
1454     }
1455   }
1456   DCHECK_LE(last, node_list->size());
1457   node_list->resize(last);
1458   node_list->shrink_to_fit();
1459 }
1460 
UpdateListOfYoungNodes()1461 void GlobalHandles::UpdateListOfYoungNodes() {
1462   UpdateAndCompactListOfYoungNode(&young_nodes_);
1463   UpdateAndCompactListOfYoungNode(&traced_young_nodes_);
1464 }
1465 
1466 template <typename T>
InvokeFirstPassWeakCallbacks(std::vector<std::pair<T *,PendingPhantomCallback>> * pending)1467 size_t GlobalHandles::InvokeFirstPassWeakCallbacks(
1468     std::vector<std::pair<T*, PendingPhantomCallback>>* pending) {
1469   size_t freed_nodes = 0;
1470   std::vector<std::pair<T*, PendingPhantomCallback>> pending_phantom_callbacks;
1471   pending_phantom_callbacks.swap(*pending);
1472   {
1473     // The initial pass callbacks must simply clear the nodes.
1474     for (auto& pair : pending_phantom_callbacks) {
1475       T* node = pair.first;
1476       DCHECK_EQ(T::NEAR_DEATH, node->state());
1477       pair.second.Invoke(isolate(), PendingPhantomCallback::kFirstPass);
1478 
1479       // Transition to second pass. It is required that the first pass callback
1480       // resets the handle using |v8::PersistentBase::Reset|. Also see comments
1481       // on |v8::WeakCallbackInfo|.
1482       CHECK_WITH_MSG(T::FREE == node->state(),
1483                      "Handle not reset in first callback. See comments on "
1484                      "|v8::WeakCallbackInfo|.");
1485 
1486       if (pair.second.callback()) second_pass_callbacks_.push_back(pair.second);
1487       freed_nodes++;
1488     }
1489   }
1490   return freed_nodes;
1491 }
1492 
InvokeFirstPassWeakCallbacks()1493 size_t GlobalHandles::InvokeFirstPassWeakCallbacks() {
1494   return InvokeFirstPassWeakCallbacks(&regular_pending_phantom_callbacks_) +
1495          InvokeFirstPassWeakCallbacks(&traced_pending_phantom_callbacks_);
1496 }
1497 
InvokeOrScheduleSecondPassPhantomCallbacks(bool synchronous_second_pass)1498 void GlobalHandles::InvokeOrScheduleSecondPassPhantomCallbacks(
1499     bool synchronous_second_pass) {
1500   if (!second_pass_callbacks_.empty()) {
1501     if (FLAG_optimize_for_size || FLAG_predictable || synchronous_second_pass) {
1502       Heap::DevToolsTraceEventScope devtools_trace_event_scope(
1503           isolate()->heap(), "MajorGC", "invoke weak phantom callbacks");
1504       isolate()->heap()->CallGCPrologueCallbacks(
1505           GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1506       InvokeSecondPassPhantomCallbacks();
1507       isolate()->heap()->CallGCEpilogueCallbacks(
1508           GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
1509     } else if (!second_pass_callbacks_task_posted_) {
1510       second_pass_callbacks_task_posted_ = true;
1511       auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(
1512           reinterpret_cast<v8::Isolate*>(isolate()));
1513       taskrunner->PostTask(MakeCancelableTask(
1514           isolate(), [this] { InvokeSecondPassPhantomCallbacksFromTask(); }));
1515     }
1516   }
1517 }
1518 
Invoke(Isolate * isolate,InvocationType type)1519 void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate,
1520                                                    InvocationType type) {
1521   Data::Callback* callback_addr = nullptr;
1522   if (type == kFirstPass) {
1523     callback_addr = &callback_;
1524   }
1525   Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
1526             embedder_fields_, callback_addr);
1527   Data::Callback callback = callback_;
1528   callback_ = nullptr;
1529   callback(data);
1530 }
1531 
InRecursiveGC(unsigned gc_processing_counter)1532 bool GlobalHandles::InRecursiveGC(unsigned gc_processing_counter) {
1533   return gc_processing_counter != post_gc_processing_count_;
1534 }
1535 
PostGarbageCollectionProcessing(GarbageCollector collector,const v8::GCCallbackFlags gc_callback_flags)1536 size_t GlobalHandles::PostGarbageCollectionProcessing(
1537     GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
1538   // Process weak global handle callbacks. This must be done after the
1539   // GC is completely done, because the callbacks may invoke arbitrary
1540   // API functions.
1541   DCHECK_EQ(Heap::NOT_IN_GC, isolate_->heap()->gc_state());
1542   const unsigned post_processing_count = ++post_gc_processing_count_;
1543   size_t freed_nodes = 0;
1544   bool synchronous_second_pass =
1545       isolate_->heap()->IsTearingDown() ||
1546       (gc_callback_flags &
1547        (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage |
1548         kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0;
1549   InvokeOrScheduleSecondPassPhantomCallbacks(synchronous_second_pass);
1550   if (InRecursiveGC(post_processing_count)) return freed_nodes;
1551 
1552   freed_nodes += Heap::IsYoungGenerationCollector(collector)
1553                      ? PostScavengeProcessing(post_processing_count)
1554                      : PostMarkSweepProcessing(post_processing_count);
1555   if (InRecursiveGC(post_processing_count)) return freed_nodes;
1556 
1557   UpdateListOfYoungNodes();
1558   return freed_nodes;
1559 }
1560 
IterateStrongRoots(RootVisitor * v)1561 void GlobalHandles::IterateStrongRoots(RootVisitor* v) {
1562   for (Node* node : *regular_nodes_) {
1563     if (node->IsStrongRetainer()) {
1564       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1565                           node->location());
1566     }
1567   }
1568 }
1569 
IterateStrongStackRoots(RootVisitor * v)1570 void GlobalHandles::IterateStrongStackRoots(RootVisitor* v) {
1571   on_stack_nodes_->Iterate(v);
1572 }
1573 
IterateWeakRoots(RootVisitor * v)1574 void GlobalHandles::IterateWeakRoots(RootVisitor* v) {
1575   for (Node* node : *regular_nodes_) {
1576     if (node->IsWeak()) {
1577       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1578                           node->location());
1579     }
1580   }
1581   for (TracedNode* node : *traced_nodes_) {
1582     if (node->IsInUse()) {
1583       v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1584     }
1585   }
1586 }
1587 
1588 DISABLE_CFI_PERF
IterateAllRoots(RootVisitor * v)1589 void GlobalHandles::IterateAllRoots(RootVisitor* v) {
1590   for (Node* node : *regular_nodes_) {
1591     if (node->IsRetainer()) {
1592       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1593                           node->location());
1594     }
1595   }
1596   for (TracedNode* node : *traced_nodes_) {
1597     if (node->IsRetainer()) {
1598       v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1599     }
1600   }
1601   on_stack_nodes_->Iterate(v);
1602 }
1603 
1604 DISABLE_CFI_PERF
IterateAllYoungRoots(RootVisitor * v)1605 void GlobalHandles::IterateAllYoungRoots(RootVisitor* v) {
1606   for (Node* node : young_nodes_) {
1607     if (node->IsRetainer()) {
1608       v->VisitRootPointer(Root::kGlobalHandles, node->label(),
1609                           node->location());
1610     }
1611   }
1612   for (TracedNode* node : traced_young_nodes_) {
1613     if (node->IsRetainer()) {
1614       v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
1615     }
1616   }
1617   on_stack_nodes_->Iterate(v);
1618 }
1619 
1620 DISABLE_CFI_PERF
ApplyPersistentHandleVisitor(v8::PersistentHandleVisitor * visitor,GlobalHandles::Node * node)1621 void GlobalHandles::ApplyPersistentHandleVisitor(
1622     v8::PersistentHandleVisitor* visitor, GlobalHandles::Node* node) {
1623   v8::Value* value = ToApi<v8::Value>(node->handle());
1624   visitor->VisitPersistentHandle(
1625       reinterpret_cast<v8::Persistent<v8::Value>*>(&value),
1626       node->wrapper_class_id());
1627 }
1628 
1629 DISABLE_CFI_PERF
IterateAllRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1630 void GlobalHandles::IterateAllRootsWithClassIds(
1631     v8::PersistentHandleVisitor* visitor) {
1632   for (Node* node : *regular_nodes_) {
1633     if (node->IsRetainer() && node->has_wrapper_class_id()) {
1634       ApplyPersistentHandleVisitor(visitor, node);
1635     }
1636   }
1637 }
1638 
1639 DISABLE_CFI_PERF
IterateTracedNodes(v8::EmbedderHeapTracer::TracedGlobalHandleVisitor * visitor)1640 void GlobalHandles::IterateTracedNodes(
1641     v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor) {
1642   for (TracedNode* node : *traced_nodes_) {
1643     if (node->IsInUse()) {
1644       v8::Value* value = ToApi<v8::Value>(node->handle());
1645       visitor->VisitTracedReference(
1646           *reinterpret_cast<v8::TracedReference<v8::Value>*>(&value));
1647     }
1648   }
1649 }
1650 
1651 DISABLE_CFI_PERF
IterateAllYoungRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1652 void GlobalHandles::IterateAllYoungRootsWithClassIds(
1653     v8::PersistentHandleVisitor* visitor) {
1654   for (Node* node : young_nodes_) {
1655     if (node->IsRetainer() && node->has_wrapper_class_id()) {
1656       ApplyPersistentHandleVisitor(visitor, node);
1657     }
1658   }
1659 }
1660 
1661 DISABLE_CFI_PERF
IterateYoungWeakRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1662 void GlobalHandles::IterateYoungWeakRootsWithClassIds(
1663     v8::PersistentHandleVisitor* visitor) {
1664   for (Node* node : young_nodes_) {
1665     if (node->has_wrapper_class_id() && node->IsWeak()) {
1666       ApplyPersistentHandleVisitor(visitor, node);
1667     }
1668   }
1669 }
1670 
RecordStats(HeapStats * stats)1671 void GlobalHandles::RecordStats(HeapStats* stats) {
1672   *stats->global_handle_count = 0;
1673   *stats->weak_global_handle_count = 0;
1674   *stats->pending_global_handle_count = 0;
1675   *stats->near_death_global_handle_count = 0;
1676   *stats->free_global_handle_count = 0;
1677   for (Node* node : *regular_nodes_) {
1678     *stats->global_handle_count += 1;
1679     if (node->state() == Node::WEAK) {
1680       *stats->weak_global_handle_count += 1;
1681     } else if (node->state() == Node::PENDING) {
1682       *stats->pending_global_handle_count += 1;
1683     } else if (node->state() == Node::NEAR_DEATH) {
1684       *stats->near_death_global_handle_count += 1;
1685     } else if (node->state() == Node::FREE) {
1686       *stats->free_global_handle_count += 1;
1687     }
1688   }
1689 }
1690 
1691 #ifdef DEBUG
1692 
PrintStats()1693 void GlobalHandles::PrintStats() {
1694   int total = 0;
1695   int weak = 0;
1696   int pending = 0;
1697   int near_death = 0;
1698   int destroyed = 0;
1699 
1700   for (Node* node : *regular_nodes_) {
1701     total++;
1702     if (node->state() == Node::WEAK) weak++;
1703     if (node->state() == Node::PENDING) pending++;
1704     if (node->state() == Node::NEAR_DEATH) near_death++;
1705     if (node->state() == Node::FREE) destroyed++;
1706   }
1707 
1708   PrintF("Global Handle Statistics:\n");
1709   PrintF("  allocated memory = %zuB\n", total * sizeof(Node));
1710   PrintF("  # weak       = %d\n", weak);
1711   PrintF("  # pending    = %d\n", pending);
1712   PrintF("  # near_death = %d\n", near_death);
1713   PrintF("  # free       = %d\n", destroyed);
1714   PrintF("  # total      = %d\n", total);
1715 }
1716 
Print()1717 void GlobalHandles::Print() {
1718   PrintF("Global handles:\n");
1719   for (Node* node : *regular_nodes_) {
1720     PrintF("  handle %p to %p%s\n", node->location().ToVoidPtr(),
1721            reinterpret_cast<void*>(node->object().ptr()),
1722            node->IsWeak() ? " (weak)" : "");
1723   }
1724 }
1725 
1726 #endif
1727 
~EternalHandles()1728 EternalHandles::~EternalHandles() {
1729   for (Address* block : blocks_) delete[] block;
1730 }
1731 
IterateAllRoots(RootVisitor * visitor)1732 void EternalHandles::IterateAllRoots(RootVisitor* visitor) {
1733   int limit = size_;
1734   for (Address* block : blocks_) {
1735     DCHECK_GT(limit, 0);
1736     visitor->VisitRootPointers(
1737         Root::kEternalHandles, nullptr, FullObjectSlot(block),
1738         FullObjectSlot(block + std::min({limit, kSize})));
1739     limit -= kSize;
1740   }
1741 }
1742 
IterateYoungRoots(RootVisitor * visitor)1743 void EternalHandles::IterateYoungRoots(RootVisitor* visitor) {
1744   for (int index : young_node_indices_) {
1745     visitor->VisitRootPointer(Root::kEternalHandles, nullptr,
1746                               FullObjectSlot(GetLocation(index)));
1747   }
1748 }
1749 
PostGarbageCollectionProcessing()1750 void EternalHandles::PostGarbageCollectionProcessing() {
1751   size_t last = 0;
1752   for (int index : young_node_indices_) {
1753     if (ObjectInYoungGeneration(Object(*GetLocation(index)))) {
1754       young_node_indices_[last++] = index;
1755     }
1756   }
1757   DCHECK_LE(last, young_node_indices_.size());
1758   young_node_indices_.resize(last);
1759 }
1760 
Create(Isolate * isolate,Object object,int * index)1761 void EternalHandles::Create(Isolate* isolate, Object object, int* index) {
1762   DCHECK_EQ(kInvalidIndex, *index);
1763   if (object == Object()) return;
1764   Object the_hole = ReadOnlyRoots(isolate).the_hole_value();
1765   DCHECK_NE(the_hole, object);
1766   int block = size_ >> kShift;
1767   int offset = size_ & kMask;
1768   // Need to resize.
1769   if (offset == 0) {
1770     Address* next_block = new Address[kSize];
1771     MemsetPointer(FullObjectSlot(next_block), the_hole, kSize);
1772     blocks_.push_back(next_block);
1773   }
1774   DCHECK_EQ(the_hole.ptr(), blocks_[block][offset]);
1775   blocks_[block][offset] = object.ptr();
1776   if (ObjectInYoungGeneration(object)) {
1777     young_node_indices_.push_back(size_);
1778   }
1779   *index = size_++;
1780 }
1781 
1782 }  // namespace internal
1783 }  // namespace v8
1784