1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/global-handles.h"
6
7 #include "src/api-inl.h"
8 #include "src/cancelable-task.h"
9 #include "src/objects-inl.h"
10 #include "src/v8.h"
11 #include "src/visitors.h"
12 #include "src/vm-state-inl.h"
13
14 namespace v8 {
15 namespace internal {
16
17 class GlobalHandles::Node {
18 public:
19 // State transition diagram:
20 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
21 enum State {
22 FREE = 0,
23 NORMAL, // Normal global handle.
24 WEAK, // Flagged as weak but not yet finalized.
25 PENDING, // Has been recognized as only reachable by weak handles.
26 NEAR_DEATH, // Callback has informed the handle is near death.
27 NUMBER_OF_NODE_STATES
28 };
29
30 // Maps handle location (slot) to the containing node.
FromLocation(Object ** location)31 static Node* FromLocation(Object** location) {
32 DCHECK_EQ(offsetof(Node, object_), 0);
33 return reinterpret_cast<Node*>(location);
34 }
35
Node()36 Node() {
37 DCHECK_EQ(offsetof(Node, class_id_), Internals::kNodeClassIdOffset);
38 DCHECK_EQ(offsetof(Node, flags_), Internals::kNodeFlagsOffset);
39 STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
40 Internals::kNodeStateMask);
41 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
42 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
43 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
44 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
45 Internals::kNodeIsIndependentShift);
46 STATIC_ASSERT(static_cast<int>(IsActive::kShift) ==
47 Internals::kNodeIsActiveShift);
48 }
49
50 #ifdef ENABLE_HANDLE_ZAPPING
~Node()51 ~Node() {
52 // TODO(1428): if it's a weak handle we should have invoked its callback.
53 // Zap the values for eager trapping.
54 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
55 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
56 index_ = 0;
57 set_independent(false);
58 set_active(false);
59 set_in_new_space_list(false);
60 data_.next_free = nullptr;
61 weak_callback_ = nullptr;
62 }
63 #endif
64
Initialize(int index,Node ** first_free)65 void Initialize(int index, Node** first_free) {
66 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
67 index_ = static_cast<uint8_t>(index);
68 DCHECK(static_cast<int>(index_) == index);
69 set_state(FREE);
70 set_in_new_space_list(false);
71 data_.next_free = *first_free;
72 *first_free = this;
73 }
74
Acquire(Object * object)75 void Acquire(Object* object) {
76 DCHECK(state() == FREE);
77 object_ = object;
78 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
79 set_independent(false);
80 set_active(false);
81 set_state(NORMAL);
82 data_.parameter = nullptr;
83 weak_callback_ = nullptr;
84 IncreaseBlockUses();
85 }
86
Zap()87 void Zap() {
88 DCHECK(IsInUse());
89 // Zap the values for eager trapping.
90 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
91 }
92
Release()93 void Release() {
94 DCHECK(IsInUse());
95 set_state(FREE);
96 // Zap the values for eager trapping.
97 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
98 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
99 set_independent(false);
100 set_active(false);
101 weak_callback_ = nullptr;
102 DecreaseBlockUses();
103 }
104
105 // Object slot accessors.
object() const106 Object* object() const { return object_; }
location()107 Object** location() { return &object_; }
label()108 const char* label() { return state() == NORMAL ? data_.label : nullptr; }
handle()109 Handle<Object> handle() { return Handle<Object>(location()); }
110
111 // Wrapper class ID accessors.
has_wrapper_class_id() const112 bool has_wrapper_class_id() const {
113 return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
114 }
115
wrapper_class_id() const116 uint16_t wrapper_class_id() const { return class_id_; }
117
118 // State and flag accessors.
119
state() const120 State state() const {
121 return NodeState::decode(flags_);
122 }
set_state(State state)123 void set_state(State state) {
124 flags_ = NodeState::update(flags_, state);
125 }
126
is_independent()127 bool is_independent() { return IsIndependent::decode(flags_); }
set_independent(bool v)128 void set_independent(bool v) { flags_ = IsIndependent::update(flags_, v); }
129
is_active()130 bool is_active() {
131 return IsActive::decode(flags_);
132 }
set_active(bool v)133 void set_active(bool v) {
134 flags_ = IsActive::update(flags_, v);
135 }
136
is_in_new_space_list()137 bool is_in_new_space_list() {
138 return IsInNewSpaceList::decode(flags_);
139 }
set_in_new_space_list(bool v)140 void set_in_new_space_list(bool v) {
141 flags_ = IsInNewSpaceList::update(flags_, v);
142 }
143
weakness_type() const144 WeaknessType weakness_type() const {
145 return NodeWeaknessType::decode(flags_);
146 }
set_weakness_type(WeaknessType weakness_type)147 void set_weakness_type(WeaknessType weakness_type) {
148 flags_ = NodeWeaknessType::update(flags_, weakness_type);
149 }
150
IsNearDeath() const151 bool IsNearDeath() const {
152 // Check for PENDING to ensure correct answer when processing callbacks.
153 return state() == PENDING || state() == NEAR_DEATH;
154 }
155
IsWeak() const156 bool IsWeak() const { return state() == WEAK; }
157
IsInUse() const158 bool IsInUse() const { return state() != FREE; }
159
IsPhantomCallback() const160 bool IsPhantomCallback() const {
161 return weakness_type() == PHANTOM_WEAK ||
162 weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS;
163 }
164
IsPhantomResetHandle() const165 bool IsPhantomResetHandle() const {
166 return weakness_type() == PHANTOM_WEAK_RESET_HANDLE;
167 }
168
IsPendingPhantomCallback() const169 bool IsPendingPhantomCallback() const {
170 return state() == PENDING && IsPhantomCallback();
171 }
172
IsPendingPhantomResetHandle() const173 bool IsPendingPhantomResetHandle() const {
174 return state() == PENDING && IsPhantomResetHandle();
175 }
176
IsRetainer() const177 bool IsRetainer() const {
178 return state() != FREE &&
179 !(state() == NEAR_DEATH && weakness_type() != FINALIZER_WEAK);
180 }
181
IsStrongRetainer() const182 bool IsStrongRetainer() const { return state() == NORMAL; }
183
IsWeakRetainer() const184 bool IsWeakRetainer() const {
185 return state() == WEAK || state() == PENDING ||
186 (state() == NEAR_DEATH && weakness_type() == FINALIZER_WEAK);
187 }
188
MarkPending()189 void MarkPending() {
190 DCHECK(state() == WEAK);
191 set_state(PENDING);
192 }
193
194 // Callback parameter accessors.
set_parameter(void * parameter)195 void set_parameter(void* parameter) {
196 DCHECK(IsInUse());
197 data_.parameter = parameter;
198 }
parameter() const199 void* parameter() const {
200 DCHECK(IsInUse());
201 return data_.parameter;
202 }
203
204 // Accessors for next free node in the free list.
next_free()205 Node* next_free() {
206 DCHECK(state() == FREE);
207 return data_.next_free;
208 }
set_next_free(Node * value)209 void set_next_free(Node* value) {
210 DCHECK(state() == FREE);
211 data_.next_free = value;
212 }
213
MakeWeak(void * parameter,WeakCallbackInfo<void>::Callback phantom_callback,v8::WeakCallbackType type)214 void MakeWeak(void* parameter,
215 WeakCallbackInfo<void>::Callback phantom_callback,
216 v8::WeakCallbackType type) {
217 DCHECK_NOT_NULL(phantom_callback);
218 DCHECK(IsInUse());
219 CHECK_NE(object_, reinterpret_cast<Object*>(kGlobalHandleZapValue));
220 set_state(WEAK);
221 switch (type) {
222 case v8::WeakCallbackType::kParameter:
223 set_weakness_type(PHANTOM_WEAK);
224 break;
225 case v8::WeakCallbackType::kInternalFields:
226 set_weakness_type(PHANTOM_WEAK_2_EMBEDDER_FIELDS);
227 break;
228 case v8::WeakCallbackType::kFinalizer:
229 set_weakness_type(FINALIZER_WEAK);
230 break;
231 }
232 set_parameter(parameter);
233 weak_callback_ = phantom_callback;
234 }
235
MakeWeak(Object *** location_addr)236 void MakeWeak(Object*** location_addr) {
237 DCHECK(IsInUse());
238 CHECK_NE(object_, reinterpret_cast<Object*>(kGlobalHandleZapValue));
239 set_state(WEAK);
240 set_weakness_type(PHANTOM_WEAK_RESET_HANDLE);
241 set_parameter(location_addr);
242 weak_callback_ = nullptr;
243 }
244
ClearWeakness()245 void* ClearWeakness() {
246 DCHECK(IsInUse());
247 void* p = parameter();
248 set_state(NORMAL);
249 set_parameter(nullptr);
250 return p;
251 }
252
AnnotateStrongRetainer(const char * label)253 void AnnotateStrongRetainer(const char* label) {
254 DCHECK_EQ(state(), NORMAL);
255 data_.label = label;
256 }
257
CollectPhantomCallbackData(std::vector<PendingPhantomCallback> * pending_phantom_callbacks)258 void CollectPhantomCallbackData(
259
260 std::vector<PendingPhantomCallback>* pending_phantom_callbacks) {
261 DCHECK(weakness_type() == PHANTOM_WEAK ||
262 weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS);
263 DCHECK(state() == PENDING);
264 DCHECK_NOT_NULL(weak_callback_);
265
266 void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
267 nullptr};
268 if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) {
269 auto jsobject = JSObject::cast(object());
270 int field_count = jsobject->GetEmbedderFieldCount();
271 for (int i = 0; i < v8::kEmbedderFieldsInWeakCallback; ++i) {
272 if (field_count == i) break;
273 auto field = jsobject->GetEmbedderField(i);
274 if (field->IsSmi()) embedder_fields[i] = field;
275 }
276 }
277
278 // Zap with something dangerous.
279 *location() = reinterpret_cast<Object*>(0x6057CA11);
280
281 pending_phantom_callbacks->push_back(PendingPhantomCallback(
282 this, weak_callback_, parameter(), embedder_fields));
283 DCHECK(IsInUse());
284 set_state(NEAR_DEATH);
285 }
286
ResetPhantomHandle()287 void ResetPhantomHandle() {
288 DCHECK(weakness_type() == PHANTOM_WEAK_RESET_HANDLE);
289 DCHECK(state() == PENDING);
290 DCHECK_NULL(weak_callback_);
291 Object*** handle = reinterpret_cast<Object***>(parameter());
292 *handle = nullptr;
293 Release();
294 }
295
PostGarbageCollectionProcessing(Isolate * isolate)296 bool PostGarbageCollectionProcessing(Isolate* isolate) {
297 // Handles only weak handles (not phantom) that are dying.
298 if (state() != Node::PENDING) return false;
299 if (weak_callback_ == nullptr) {
300 Release();
301 return false;
302 }
303 set_state(NEAR_DEATH);
304
305 // Check that we are not passing a finalized external string to
306 // the callback.
307 DCHECK(!object_->IsExternalOneByteString() ||
308 ExternalOneByteString::cast(object_)->resource() != nullptr);
309 DCHECK(!object_->IsExternalTwoByteString() ||
310 ExternalTwoByteString::cast(object_)->resource() != nullptr);
311 if (weakness_type() != FINALIZER_WEAK) {
312 return false;
313 }
314
315 // Leaving V8.
316 VMState<EXTERNAL> vmstate(isolate);
317 HandleScope handle_scope(isolate);
318 void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
319 nullptr};
320 v8::WeakCallbackInfo<void> data(reinterpret_cast<v8::Isolate*>(isolate),
321 parameter(), embedder_fields, nullptr);
322 weak_callback_(data);
323
324 // Absence of explicit cleanup or revival of weak handle
325 // in most of the cases would lead to memory leak.
326 CHECK(state() != NEAR_DEATH);
327 return true;
328 }
329
330 inline GlobalHandles* GetGlobalHandles();
331
332 private:
333 inline NodeBlock* FindBlock();
334 inline void IncreaseBlockUses();
335 inline void DecreaseBlockUses();
336
337 // Storage for object pointer.
338 // Placed first to avoid offset computation.
339 Object* object_;
340
341 // Next word stores class_id, index, state, and independent.
342 // Note: the most aligned fields should go first.
343
344 // Wrapper class ID.
345 uint16_t class_id_;
346
347 // Index in the containing handle block.
348 uint8_t index_;
349
350 // This stores three flags (independent, partially_dependent and
351 // in_new_space_list) and a State.
352 class NodeState : public BitField<State, 0, 3> {};
353 class IsIndependent : public BitField<bool, 3, 1> {};
354 // The following two fields are mutually exclusive
355 class IsActive : public BitField<bool, 4, 1> {};
356 class IsInNewSpaceList : public BitField<bool, 5, 1> {};
357 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
358
359 uint8_t flags_;
360
361 // Handle specific callback - might be a weak reference in disguise.
362 WeakCallbackInfo<void>::Callback weak_callback_;
363
364 // The meaning of this field depends on node state:
365 // state == FREE: it stores the next free node pointer.
366 // state == NORMAL: it stores the strong retainer label.
367 // otherwise: it stores the parameter for the weak callback.
368 union {
369 Node* next_free;
370 const char* label;
371 void* parameter;
372 } data_;
373
374 DISALLOW_COPY_AND_ASSIGN(Node);
375 };
376
377
378 class GlobalHandles::NodeBlock {
379 public:
380 static const int kSize = 256;
381
NodeBlock(GlobalHandles * global_handles,NodeBlock * next)382 explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
383 : next_(next),
384 used_nodes_(0),
385 next_used_(nullptr),
386 prev_used_(nullptr),
387 global_handles_(global_handles) {}
388
PutNodesOnFreeList(Node ** first_free)389 void PutNodesOnFreeList(Node** first_free) {
390 for (int i = kSize - 1; i >= 0; --i) {
391 nodes_[i].Initialize(i, first_free);
392 }
393 }
394
node_at(int index)395 Node* node_at(int index) {
396 DCHECK(0 <= index && index < kSize);
397 return &nodes_[index];
398 }
399
IncreaseUses()400 void IncreaseUses() {
401 DCHECK_LT(used_nodes_, kSize);
402 if (used_nodes_++ == 0) {
403 NodeBlock* old_first = global_handles_->first_used_block_;
404 global_handles_->first_used_block_ = this;
405 next_used_ = old_first;
406 prev_used_ = nullptr;
407 if (old_first == nullptr) return;
408 old_first->prev_used_ = this;
409 }
410 }
411
DecreaseUses()412 void DecreaseUses() {
413 DCHECK_GT(used_nodes_, 0);
414 if (--used_nodes_ == 0) {
415 if (next_used_ != nullptr) next_used_->prev_used_ = prev_used_;
416 if (prev_used_ != nullptr) prev_used_->next_used_ = next_used_;
417 if (this == global_handles_->first_used_block_) {
418 global_handles_->first_used_block_ = next_used_;
419 }
420 }
421 }
422
global_handles()423 GlobalHandles* global_handles() { return global_handles_; }
424
425 // Next block in the list of all blocks.
next() const426 NodeBlock* next() const { return next_; }
427
428 // Next/previous block in the list of blocks with used nodes.
next_used() const429 NodeBlock* next_used() const { return next_used_; }
prev_used() const430 NodeBlock* prev_used() const { return prev_used_; }
431
432 private:
433 Node nodes_[kSize];
434 NodeBlock* const next_;
435 int used_nodes_;
436 NodeBlock* next_used_;
437 NodeBlock* prev_used_;
438 GlobalHandles* global_handles_;
439 };
440
441
GetGlobalHandles()442 GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
443 return FindBlock()->global_handles();
444 }
445
446
FindBlock()447 GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
448 intptr_t ptr = reinterpret_cast<intptr_t>(this);
449 ptr = ptr - index_ * sizeof(Node);
450 NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
451 DCHECK(block->node_at(index_) == this);
452 return block;
453 }
454
455
IncreaseBlockUses()456 void GlobalHandles::Node::IncreaseBlockUses() {
457 NodeBlock* node_block = FindBlock();
458 node_block->IncreaseUses();
459 GlobalHandles* global_handles = node_block->global_handles();
460 global_handles->isolate()->counters()->global_handles()->Increment();
461 global_handles->number_of_global_handles_++;
462 }
463
464
DecreaseBlockUses()465 void GlobalHandles::Node::DecreaseBlockUses() {
466 NodeBlock* node_block = FindBlock();
467 GlobalHandles* global_handles = node_block->global_handles();
468 data_.next_free = global_handles->first_free_;
469 global_handles->first_free_ = this;
470 node_block->DecreaseUses();
471 global_handles->isolate()->counters()->global_handles()->Decrement();
472 global_handles->number_of_global_handles_--;
473 }
474
475
476 class GlobalHandles::NodeIterator {
477 public:
NodeIterator(GlobalHandles * global_handles)478 explicit NodeIterator(GlobalHandles* global_handles)
479 : block_(global_handles->first_used_block_),
480 index_(0) {}
481
done() const482 bool done() const { return block_ == nullptr; }
483
node() const484 Node* node() const {
485 DCHECK(!done());
486 return block_->node_at(index_);
487 }
488
Advance()489 void Advance() {
490 DCHECK(!done());
491 if (++index_ < NodeBlock::kSize) return;
492 index_ = 0;
493 block_ = block_->next_used();
494 }
495
496 private:
497 NodeBlock* block_;
498 int index_;
499
500 DISALLOW_COPY_AND_ASSIGN(NodeIterator);
501 };
502
503 class GlobalHandles::PendingPhantomCallbacksSecondPassTask
504 : public v8::internal::CancelableTask {
505 public:
PendingPhantomCallbacksSecondPassTask(GlobalHandles * global_handles,Isolate * isolate)506 PendingPhantomCallbacksSecondPassTask(GlobalHandles* global_handles,
507 Isolate* isolate)
508 : CancelableTask(isolate), global_handles_(global_handles) {}
509
RunInternal()510 void RunInternal() override {
511 global_handles_->InvokeSecondPassPhantomCallbacksFromTask();
512 }
513
514 private:
515 GlobalHandles* global_handles_;
516 DISALLOW_COPY_AND_ASSIGN(PendingPhantomCallbacksSecondPassTask);
517 };
518
GlobalHandles(Isolate * isolate)519 GlobalHandles::GlobalHandles(Isolate* isolate)
520 : isolate_(isolate),
521 number_of_global_handles_(0),
522 first_block_(nullptr),
523 first_used_block_(nullptr),
524 first_free_(nullptr),
525 post_gc_processing_count_(0),
526 number_of_phantom_handle_resets_(0) {}
527
~GlobalHandles()528 GlobalHandles::~GlobalHandles() {
529 NodeBlock* block = first_block_;
530 while (block != nullptr) {
531 NodeBlock* tmp = block->next();
532 delete block;
533 block = tmp;
534 }
535 first_block_ = nullptr;
536 }
537
538
Create(Object * value)539 Handle<Object> GlobalHandles::Create(Object* value) {
540 if (first_free_ == nullptr) {
541 first_block_ = new NodeBlock(this, first_block_);
542 first_block_->PutNodesOnFreeList(&first_free_);
543 }
544 DCHECK_NOT_NULL(first_free_);
545 // Take the first node in the free list.
546 Node* result = first_free_;
547 first_free_ = result->next_free();
548 result->Acquire(value);
549 if (Heap::InNewSpace(value) && !result->is_in_new_space_list()) {
550 new_space_nodes_.push_back(result);
551 result->set_in_new_space_list(true);
552 }
553 return result->handle();
554 }
555
556
CopyGlobal(Object ** location)557 Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
558 DCHECK_NOT_NULL(location);
559 GlobalHandles* global_handles =
560 Node::FromLocation(location)->GetGlobalHandles();
561 #ifdef VERIFY_HEAP
562 if (i::FLAG_verify_heap) {
563 (*location)->ObjectVerify(global_handles->isolate());
564 }
565 #endif // VERIFY_HEAP
566 return global_handles->Create(*location);
567 }
568
569
Destroy(Object ** location)570 void GlobalHandles::Destroy(Object** location) {
571 if (location != nullptr) Node::FromLocation(location)->Release();
572 }
573
574
575 typedef v8::WeakCallbackInfo<void>::Callback GenericCallback;
576
577
MakeWeak(Object ** location,void * parameter,GenericCallback phantom_callback,v8::WeakCallbackType type)578 void GlobalHandles::MakeWeak(Object** location, void* parameter,
579 GenericCallback phantom_callback,
580 v8::WeakCallbackType type) {
581 Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
582 }
583
MakeWeak(Object *** location_addr)584 void GlobalHandles::MakeWeak(Object*** location_addr) {
585 Node::FromLocation(*location_addr)->MakeWeak(location_addr);
586 }
587
ClearWeakness(Object ** location)588 void* GlobalHandles::ClearWeakness(Object** location) {
589 return Node::FromLocation(location)->ClearWeakness();
590 }
591
AnnotateStrongRetainer(Object ** location,const char * label)592 void GlobalHandles::AnnotateStrongRetainer(Object** location,
593 const char* label) {
594 Node::FromLocation(location)->AnnotateStrongRetainer(label);
595 }
596
IsNearDeath(Object ** location)597 bool GlobalHandles::IsNearDeath(Object** location) {
598 return Node::FromLocation(location)->IsNearDeath();
599 }
600
601
IsWeak(Object ** location)602 bool GlobalHandles::IsWeak(Object** location) {
603 return Node::FromLocation(location)->IsWeak();
604 }
605
606 DISABLE_CFI_PERF
IterateWeakRootsForFinalizers(RootVisitor * v)607 void GlobalHandles::IterateWeakRootsForFinalizers(RootVisitor* v) {
608 for (NodeIterator it(this); !it.done(); it.Advance()) {
609 Node* node = it.node();
610 if (node->IsWeakRetainer() && node->state() == Node::PENDING) {
611 DCHECK(!node->IsPhantomCallback());
612 DCHECK(!node->IsPhantomResetHandle());
613 // Finalizers need to survive.
614 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
615 node->location());
616 }
617 }
618 }
619
620 DISABLE_CFI_PERF
IterateWeakRootsForPhantomHandles(WeakSlotCallbackWithHeap should_reset_handle)621 void GlobalHandles::IterateWeakRootsForPhantomHandles(
622 WeakSlotCallbackWithHeap should_reset_handle) {
623 for (NodeIterator it(this); !it.done(); it.Advance()) {
624 Node* node = it.node();
625 if (node->IsWeakRetainer() &&
626 should_reset_handle(isolate()->heap(), node->location())) {
627 if (node->IsPhantomResetHandle()) {
628 node->MarkPending();
629 node->ResetPhantomHandle();
630 ++number_of_phantom_handle_resets_;
631 } else if (node->IsPhantomCallback()) {
632 node->MarkPending();
633 node->CollectPhantomCallbackData(&pending_phantom_callbacks_);
634 }
635 }
636 }
637 }
638
IdentifyWeakHandles(WeakSlotCallbackWithHeap should_reset_handle)639 void GlobalHandles::IdentifyWeakHandles(
640 WeakSlotCallbackWithHeap should_reset_handle) {
641 for (NodeIterator it(this); !it.done(); it.Advance()) {
642 Node* node = it.node();
643 if (node->IsWeak() &&
644 should_reset_handle(isolate()->heap(), node->location())) {
645 if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
646 node->MarkPending();
647 }
648 }
649 }
650 }
651
IterateNewSpaceStrongAndDependentRoots(RootVisitor * v)652 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(RootVisitor* v) {
653 for (Node* node : new_space_nodes_) {
654 if (node->IsStrongRetainer() ||
655 (node->IsWeakRetainer() && !node->is_independent() &&
656 node->is_active())) {
657 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
658 node->location());
659 }
660 }
661 }
662
IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(RootVisitor * v,size_t start,size_t end)663 void GlobalHandles::IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
664 RootVisitor* v, size_t start, size_t end) {
665 for (size_t i = start; i < end; ++i) {
666 Node* node = new_space_nodes_[i];
667 if (node->IsWeak() && !JSObject::IsUnmodifiedApiObject(node->location())) {
668 node->set_active(true);
669 }
670 if (node->IsStrongRetainer() ||
671 (node->IsWeakRetainer() && !node->is_independent() &&
672 node->is_active())) {
673 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
674 node->location());
675 }
676 }
677 }
678
IdentifyWeakUnmodifiedObjects(WeakSlotCallback is_unmodified)679 void GlobalHandles::IdentifyWeakUnmodifiedObjects(
680 WeakSlotCallback is_unmodified) {
681 for (Node* node : new_space_nodes_) {
682 if (node->IsWeak() && !is_unmodified(node->location())) {
683 node->set_active(true);
684 }
685 }
686 }
687
MarkNewSpaceWeakUnmodifiedObjectsPending(WeakSlotCallbackWithHeap is_dead)688 void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
689 WeakSlotCallbackWithHeap is_dead) {
690 for (Node* node : new_space_nodes_) {
691 DCHECK(node->is_in_new_space_list());
692 if ((node->is_independent() || !node->is_active()) && node->IsWeak() &&
693 is_dead(isolate_->heap(), node->location())) {
694 if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
695 node->MarkPending();
696 }
697 }
698 }
699 }
700
IterateNewSpaceWeakUnmodifiedRootsForFinalizers(RootVisitor * v)701 void GlobalHandles::IterateNewSpaceWeakUnmodifiedRootsForFinalizers(
702 RootVisitor* v) {
703 for (Node* node : new_space_nodes_) {
704 DCHECK(node->is_in_new_space_list());
705 if ((node->is_independent() || !node->is_active()) &&
706 node->IsWeakRetainer() && (node->state() == Node::PENDING)) {
707 DCHECK(!node->IsPhantomCallback());
708 DCHECK(!node->IsPhantomResetHandle());
709 // Finalizers need to survive.
710 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
711 node->location());
712 }
713 }
714 }
715
IterateNewSpaceWeakUnmodifiedRootsForPhantomHandles(RootVisitor * v,WeakSlotCallbackWithHeap should_reset_handle)716 void GlobalHandles::IterateNewSpaceWeakUnmodifiedRootsForPhantomHandles(
717 RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) {
718 for (Node* node : new_space_nodes_) {
719 DCHECK(node->is_in_new_space_list());
720 if ((node->is_independent() || !node->is_active()) &&
721 node->IsWeakRetainer() && (node->state() != Node::PENDING)) {
722 DCHECK(node->IsPhantomResetHandle() || node->IsPhantomCallback());
723 if (should_reset_handle(isolate_->heap(), node->location())) {
724 if (node->IsPhantomResetHandle()) {
725 node->MarkPending();
726 node->ResetPhantomHandle();
727 ++number_of_phantom_handle_resets_;
728
729 } else if (node->IsPhantomCallback()) {
730 node->MarkPending();
731 node->CollectPhantomCallbackData(&pending_phantom_callbacks_);
732 } else {
733 UNREACHABLE();
734 }
735 } else {
736 // Node survived and needs to be visited.
737 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
738 node->location());
739 }
740 }
741 }
742 }
743
InvokeSecondPassPhantomCallbacksFromTask()744 void GlobalHandles::InvokeSecondPassPhantomCallbacksFromTask() {
745 DCHECK(second_pass_callbacks_task_posted_);
746 second_pass_callbacks_task_posted_ = false;
747 TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
748 isolate()->heap()->CallGCPrologueCallbacks(
749 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
750 InvokeSecondPassPhantomCallbacks();
751 isolate()->heap()->CallGCEpilogueCallbacks(
752 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
753 }
754
InvokeSecondPassPhantomCallbacks()755 void GlobalHandles::InvokeSecondPassPhantomCallbacks() {
756 while (!second_pass_callbacks_.empty()) {
757 auto callback = second_pass_callbacks_.back();
758 second_pass_callbacks_.pop_back();
759 DCHECK_NULL(callback.node());
760 // Fire second pass callback
761 callback.Invoke(isolate());
762 }
763 }
764
PostScavengeProcessing(const int initial_post_gc_processing_count)765 int GlobalHandles::PostScavengeProcessing(
766 const int initial_post_gc_processing_count) {
767 int freed_nodes = 0;
768 for (Node* node : new_space_nodes_) {
769 DCHECK(node->is_in_new_space_list());
770 if (!node->IsRetainer()) {
771 // Free nodes do not have weak callbacks. Do not use them to compute
772 // the freed_nodes.
773 continue;
774 }
775 // Skip dependent or unmodified handles. Their weak callbacks might expect
776 // to be
777 // called between two global garbage collection callbacks which
778 // are not called for minor collections.
779 if (!node->is_independent() && (node->is_active())) {
780 node->set_active(false);
781 continue;
782 }
783 node->set_active(false);
784
785 if (node->PostGarbageCollectionProcessing(isolate_)) {
786 if (initial_post_gc_processing_count != post_gc_processing_count_) {
787 // Weak callback triggered another GC and another round of
788 // PostGarbageCollection processing. The current node might
789 // have been deleted in that round, so we need to bail out (or
790 // restart the processing).
791 return freed_nodes;
792 }
793 }
794 if (!node->IsRetainer()) {
795 freed_nodes++;
796 }
797 }
798 return freed_nodes;
799 }
800
801
PostMarkSweepProcessing(const int initial_post_gc_processing_count)802 int GlobalHandles::PostMarkSweepProcessing(
803 const int initial_post_gc_processing_count) {
804 int freed_nodes = 0;
805 for (NodeIterator it(this); !it.done(); it.Advance()) {
806 if (!it.node()->IsRetainer()) {
807 // Free nodes do not have weak callbacks. Do not use them to compute
808 // the freed_nodes.
809 continue;
810 }
811 it.node()->set_active(false);
812 if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
813 if (initial_post_gc_processing_count != post_gc_processing_count_) {
814 // See the comment above.
815 return freed_nodes;
816 }
817 }
818 if (!it.node()->IsRetainer()) {
819 freed_nodes++;
820 }
821 }
822 return freed_nodes;
823 }
824
825
UpdateListOfNewSpaceNodes()826 void GlobalHandles::UpdateListOfNewSpaceNodes() {
827 size_t last = 0;
828 for (Node* node : new_space_nodes_) {
829 DCHECK(node->is_in_new_space_list());
830 if (node->IsRetainer()) {
831 if (Heap::InNewSpace(node->object())) {
832 new_space_nodes_[last++] = node;
833 isolate_->heap()->IncrementNodesCopiedInNewSpace();
834 } else {
835 node->set_in_new_space_list(false);
836 isolate_->heap()->IncrementNodesPromoted();
837 }
838 } else {
839 node->set_in_new_space_list(false);
840 isolate_->heap()->IncrementNodesDiedInNewSpace();
841 }
842 }
843 DCHECK_LE(last, new_space_nodes_.size());
844 new_space_nodes_.resize(last);
845 new_space_nodes_.shrink_to_fit();
846 }
847
848
DispatchPendingPhantomCallbacks(bool synchronous_second_pass)849 int GlobalHandles::DispatchPendingPhantomCallbacks(
850 bool synchronous_second_pass) {
851 int freed_nodes = 0;
852 // Protect against callback modifying pending_phantom_callbacks_.
853 std::vector<PendingPhantomCallback> pending_phantom_callbacks;
854 pending_phantom_callbacks.swap(pending_phantom_callbacks_);
855 {
856 // The initial pass callbacks must simply clear the nodes.
857 for (auto callback : pending_phantom_callbacks) {
858 // Skip callbacks that have already been processed once.
859 if (callback.node() == nullptr) continue;
860 callback.Invoke(isolate());
861 if (callback.callback()) second_pass_callbacks_.push_back(callback);
862 freed_nodes++;
863 }
864 }
865 if (!second_pass_callbacks_.empty()) {
866 if (FLAG_optimize_for_size || FLAG_predictable || synchronous_second_pass) {
867 isolate()->heap()->CallGCPrologueCallbacks(
868 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
869 InvokeSecondPassPhantomCallbacks();
870 isolate()->heap()->CallGCEpilogueCallbacks(
871 GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
872 } else if (!second_pass_callbacks_task_posted_) {
873 second_pass_callbacks_task_posted_ = true;
874 auto task = new PendingPhantomCallbacksSecondPassTask(this, isolate());
875 V8::GetCurrentPlatform()->CallOnForegroundThread(
876 reinterpret_cast<v8::Isolate*>(isolate()), task);
877 }
878 }
879 return freed_nodes;
880 }
881
882
Invoke(Isolate * isolate)883 void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate) {
884 Data::Callback* callback_addr = nullptr;
885 if (node_ != nullptr) {
886 // Initialize for first pass callback.
887 DCHECK(node_->state() == Node::NEAR_DEATH);
888 callback_addr = &callback_;
889 }
890 Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
891 embedder_fields_, callback_addr);
892 Data::Callback callback = callback_;
893 callback_ = nullptr;
894 callback(data);
895 if (node_ != nullptr) {
896 // Transition to second pass. It is required that the first pass callback
897 // resets the handle using |v8::PersistentBase::Reset|. Also see comments on
898 // |v8::WeakCallbackInfo|.
899 CHECK_WITH_MSG(Node::FREE == node_->state(),
900 "Handle not reset in first callback. See comments on "
901 "|v8::WeakCallbackInfo|.");
902 node_ = nullptr;
903 }
904 }
905
906
PostGarbageCollectionProcessing(GarbageCollector collector,const v8::GCCallbackFlags gc_callback_flags)907 int GlobalHandles::PostGarbageCollectionProcessing(
908 GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
909 // Process weak global handle callbacks. This must be done after the
910 // GC is completely done, because the callbacks may invoke arbitrary
911 // API functions.
912 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
913 const int initial_post_gc_processing_count = ++post_gc_processing_count_;
914 int freed_nodes = 0;
915 bool synchronous_second_pass =
916 (gc_callback_flags &
917 (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage |
918 kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0;
919 freed_nodes += DispatchPendingPhantomCallbacks(synchronous_second_pass);
920 if (initial_post_gc_processing_count != post_gc_processing_count_) {
921 // If the callbacks caused a nested GC, then return. See comment in
922 // PostScavengeProcessing.
923 return freed_nodes;
924 }
925 if (Heap::IsYoungGenerationCollector(collector)) {
926 freed_nodes += PostScavengeProcessing(initial_post_gc_processing_count);
927 } else {
928 freed_nodes += PostMarkSweepProcessing(initial_post_gc_processing_count);
929 }
930 if (initial_post_gc_processing_count != post_gc_processing_count_) {
931 // If the callbacks caused a nested GC, then return. See comment in
932 // PostScavengeProcessing.
933 return freed_nodes;
934 }
935 if (initial_post_gc_processing_count == post_gc_processing_count_) {
936 UpdateListOfNewSpaceNodes();
937 }
938 return freed_nodes;
939 }
940
IterateStrongRoots(RootVisitor * v)941 void GlobalHandles::IterateStrongRoots(RootVisitor* v) {
942 for (NodeIterator it(this); !it.done(); it.Advance()) {
943 if (it.node()->IsStrongRetainer()) {
944 v->VisitRootPointer(Root::kGlobalHandles, it.node()->label(),
945 it.node()->location());
946 }
947 }
948 }
949
IterateWeakRoots(RootVisitor * v)950 void GlobalHandles::IterateWeakRoots(RootVisitor* v) {
951 for (NodeIterator it(this); !it.done(); it.Advance()) {
952 if (it.node()->IsWeak()) {
953 v->VisitRootPointer(Root::kGlobalHandles, it.node()->label(),
954 it.node()->location());
955 }
956 }
957 }
958
959 DISABLE_CFI_PERF
IterateAllRoots(RootVisitor * v)960 void GlobalHandles::IterateAllRoots(RootVisitor* v) {
961 for (NodeIterator it(this); !it.done(); it.Advance()) {
962 if (it.node()->IsRetainer()) {
963 v->VisitRootPointer(Root::kGlobalHandles, it.node()->label(),
964 it.node()->location());
965 }
966 }
967 }
968
969 DISABLE_CFI_PERF
IterateAllNewSpaceRoots(RootVisitor * v)970 void GlobalHandles::IterateAllNewSpaceRoots(RootVisitor* v) {
971 for (Node* node : new_space_nodes_) {
972 if (node->IsRetainer()) {
973 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
974 node->location());
975 }
976 }
977 }
978
979 DISABLE_CFI_PERF
IterateNewSpaceRoots(RootVisitor * v,size_t start,size_t end)980 void GlobalHandles::IterateNewSpaceRoots(RootVisitor* v, size_t start,
981 size_t end) {
982 for (size_t i = start; i < end; ++i) {
983 Node* node = new_space_nodes_[i];
984 if (node->IsRetainer()) {
985 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
986 node->location());
987 }
988 }
989 }
990
991 DISABLE_CFI_PERF
ApplyPersistentHandleVisitor(v8::PersistentHandleVisitor * visitor,GlobalHandles::Node * node)992 void GlobalHandles::ApplyPersistentHandleVisitor(
993 v8::PersistentHandleVisitor* visitor, GlobalHandles::Node* node) {
994 v8::Value* value = ToApi<v8::Value>(Handle<Object>(node->location()));
995 visitor->VisitPersistentHandle(
996 reinterpret_cast<v8::Persistent<v8::Value>*>(&value),
997 node->wrapper_class_id());
998 }
999
1000 DISABLE_CFI_PERF
IterateAllRootsWithClassIds(v8::PersistentHandleVisitor * visitor)1001 void GlobalHandles::IterateAllRootsWithClassIds(
1002 v8::PersistentHandleVisitor* visitor) {
1003 for (NodeIterator it(this); !it.done(); it.Advance()) {
1004 if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
1005 ApplyPersistentHandleVisitor(visitor, it.node());
1006 }
1007 }
1008 }
1009
1010
1011 DISABLE_CFI_PERF
IterateAllRootsInNewSpaceWithClassIds(v8::PersistentHandleVisitor * visitor)1012 void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(
1013 v8::PersistentHandleVisitor* visitor) {
1014 for (Node* node : new_space_nodes_) {
1015 if (node->IsRetainer() && node->has_wrapper_class_id()) {
1016 ApplyPersistentHandleVisitor(visitor, node);
1017 }
1018 }
1019 }
1020
1021
1022 DISABLE_CFI_PERF
IterateWeakRootsInNewSpaceWithClassIds(v8::PersistentHandleVisitor * visitor)1023 void GlobalHandles::IterateWeakRootsInNewSpaceWithClassIds(
1024 v8::PersistentHandleVisitor* visitor) {
1025 for (Node* node : new_space_nodes_) {
1026 if (node->has_wrapper_class_id() && node->IsWeak()) {
1027 ApplyPersistentHandleVisitor(visitor, node);
1028 }
1029 }
1030 }
1031
RecordStats(HeapStats * stats)1032 void GlobalHandles::RecordStats(HeapStats* stats) {
1033 *stats->global_handle_count = 0;
1034 *stats->weak_global_handle_count = 0;
1035 *stats->pending_global_handle_count = 0;
1036 *stats->near_death_global_handle_count = 0;
1037 *stats->free_global_handle_count = 0;
1038 for (NodeIterator it(this); !it.done(); it.Advance()) {
1039 *stats->global_handle_count += 1;
1040 if (it.node()->state() == Node::WEAK) {
1041 *stats->weak_global_handle_count += 1;
1042 } else if (it.node()->state() == Node::PENDING) {
1043 *stats->pending_global_handle_count += 1;
1044 } else if (it.node()->state() == Node::NEAR_DEATH) {
1045 *stats->near_death_global_handle_count += 1;
1046 } else if (it.node()->state() == Node::FREE) {
1047 *stats->free_global_handle_count += 1;
1048 }
1049 }
1050 }
1051
1052 #ifdef DEBUG
1053
PrintStats()1054 void GlobalHandles::PrintStats() {
1055 int total = 0;
1056 int weak = 0;
1057 int pending = 0;
1058 int near_death = 0;
1059 int destroyed = 0;
1060
1061 for (NodeIterator it(this); !it.done(); it.Advance()) {
1062 total++;
1063 if (it.node()->state() == Node::WEAK) weak++;
1064 if (it.node()->state() == Node::PENDING) pending++;
1065 if (it.node()->state() == Node::NEAR_DEATH) near_death++;
1066 if (it.node()->state() == Node::FREE) destroyed++;
1067 }
1068
1069 PrintF("Global Handle Statistics:\n");
1070 PrintF(" allocated memory = %" PRIuS "B\n", total * sizeof(Node));
1071 PrintF(" # weak = %d\n", weak);
1072 PrintF(" # pending = %d\n", pending);
1073 PrintF(" # near_death = %d\n", near_death);
1074 PrintF(" # free = %d\n", destroyed);
1075 PrintF(" # total = %d\n", total);
1076 }
1077
1078
Print()1079 void GlobalHandles::Print() {
1080 PrintF("Global handles:\n");
1081 for (NodeIterator it(this); !it.done(); it.Advance()) {
1082 PrintF(" handle %p to %p%s\n",
1083 reinterpret_cast<void*>(it.node()->location()),
1084 reinterpret_cast<void*>(it.node()->object()),
1085 it.node()->IsWeak() ? " (weak)" : "");
1086 }
1087 }
1088
1089 #endif
1090
TearDown()1091 void GlobalHandles::TearDown() {}
1092
EternalHandles()1093 EternalHandles::EternalHandles() : size_(0) {
1094 for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1095 singleton_handles_[i] = kInvalidIndex;
1096 }
1097 }
1098
1099
~EternalHandles()1100 EternalHandles::~EternalHandles() {
1101 for (Object** block : blocks_) delete[] block;
1102 }
1103
IterateAllRoots(RootVisitor * visitor)1104 void EternalHandles::IterateAllRoots(RootVisitor* visitor) {
1105 int limit = size_;
1106 for (Object** block : blocks_) {
1107 DCHECK_GT(limit, 0);
1108 visitor->VisitRootPointers(Root::kEternalHandles, nullptr, block,
1109 block + Min(limit, kSize));
1110 limit -= kSize;
1111 }
1112 }
1113
IterateNewSpaceRoots(RootVisitor * visitor)1114 void EternalHandles::IterateNewSpaceRoots(RootVisitor* visitor) {
1115 for (int index : new_space_indices_) {
1116 visitor->VisitRootPointer(Root::kEternalHandles, nullptr,
1117 GetLocation(index));
1118 }
1119 }
1120
PostGarbageCollectionProcessing()1121 void EternalHandles::PostGarbageCollectionProcessing() {
1122 size_t last = 0;
1123 for (int index : new_space_indices_) {
1124 if (Heap::InNewSpace(*GetLocation(index))) {
1125 new_space_indices_[last++] = index;
1126 }
1127 }
1128 DCHECK_LE(last, new_space_indices_.size());
1129 new_space_indices_.resize(last);
1130 }
1131
1132
Create(Isolate * isolate,Object * object,int * index)1133 void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1134 DCHECK_EQ(kInvalidIndex, *index);
1135 if (object == nullptr) return;
1136 Object* the_hole = ReadOnlyRoots(isolate).the_hole_value();
1137 DCHECK_NE(the_hole, object);
1138 int block = size_ >> kShift;
1139 int offset = size_ & kMask;
1140 // need to resize
1141 if (offset == 0) {
1142 Object** next_block = new Object*[kSize];
1143 MemsetPointer(next_block, the_hole, kSize);
1144 blocks_.push_back(next_block);
1145 }
1146 DCHECK_EQ(the_hole, blocks_[block][offset]);
1147 blocks_[block][offset] = object;
1148 if (Heap::InNewSpace(object)) {
1149 new_space_indices_.push_back(size_);
1150 }
1151 *index = size_++;
1152 }
1153
1154
1155 } // namespace internal
1156 } // namespace v8
1157