1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/array-buffer-tracker.h"
6
7 #include <vector>
8
9 #include "src/heap/array-buffer-collector.h"
10 #include "src/heap/array-buffer-tracker-inl.h"
11 #include "src/heap/heap.h"
12 #include "src/heap/spaces.h"
13
14 namespace v8 {
15 namespace internal {
16
~LocalArrayBufferTracker()17 LocalArrayBufferTracker::~LocalArrayBufferTracker() {
18 CHECK(array_buffers_.empty());
19 }
20
21 template <typename Callback>
Process(Callback callback)22 void LocalArrayBufferTracker::Process(Callback callback) {
23 std::vector<JSArrayBuffer::Allocation> backing_stores_to_free;
24 TrackingData kept_array_buffers;
25
26 JSArrayBuffer* new_buffer = nullptr;
27 JSArrayBuffer* old_buffer = nullptr;
28 size_t freed_memory = 0;
29 size_t moved_memory = 0;
30 for (TrackingData::iterator it = array_buffers_.begin();
31 it != array_buffers_.end(); ++it) {
32 old_buffer = it->first;
33 Page* old_page = Page::FromAddress(old_buffer->address());
34 const CallbackResult result = callback(old_buffer, &new_buffer);
35 if (result == kKeepEntry) {
36 kept_array_buffers.insert(*it);
37 } else if (result == kUpdateEntry) {
38 DCHECK_NOT_NULL(new_buffer);
39 Page* target_page = Page::FromAddress(new_buffer->address());
40 {
41 base::LockGuard<base::Mutex> guard(target_page->mutex());
42 LocalArrayBufferTracker* tracker = target_page->local_tracker();
43 if (tracker == nullptr) {
44 target_page->AllocateLocalTracker();
45 tracker = target_page->local_tracker();
46 }
47 DCHECK_NOT_NULL(tracker);
48 const size_t length = it->second.length;
49 // We should decrement before adding to avoid potential overflows in
50 // the external memory counters.
51 DCHECK_EQ(it->first->is_wasm_memory(), it->second.is_wasm_memory);
52 old_page->DecrementExternalBackingStoreBytes(
53 ExternalBackingStoreType::kArrayBuffer, length);
54 tracker->Add(new_buffer, length);
55 }
56 moved_memory += it->second.length;
57
58 } else if (result == kRemoveEntry) {
59 const size_t length = it->second.length;
60 freed_memory += length;
61 // We pass backing_store() and stored length to the collector for freeing
62 // the backing store. Wasm allocations will go through their own tracker
63 // based on the backing store.
64 backing_stores_to_free.push_back(it->second);
65 old_page->DecrementExternalBackingStoreBytes(
66 ExternalBackingStoreType::kArrayBuffer, length);
67 } else {
68 UNREACHABLE();
69 }
70 }
71 if (moved_memory || freed_memory) {
72 // TODO(wez): Remove backing-store from external memory accounting.
73 page_->heap()->update_external_memory_concurrently_freed(
74 static_cast<intptr_t>(freed_memory));
75 }
76
77 array_buffers_.swap(kept_array_buffers);
78
79 // Pass the backing stores that need to be freed to the main thread for later
80 // distribution.
81 page_->heap()->array_buffer_collector()->AddGarbageAllocations(
82 std::move(backing_stores_to_free));
83 }
84
PrepareToFreeDeadInNewSpace(Heap * heap)85 void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) {
86 DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
87 for (Page* page :
88 PageRange(heap->new_space()->from_space().first_page(), nullptr)) {
89 bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
90 CHECK(empty);
91 }
92 }
93
FreeAll(Page * page)94 void ArrayBufferTracker::FreeAll(Page* page) {
95 LocalArrayBufferTracker* tracker = page->local_tracker();
96 if (tracker == nullptr) return;
97 tracker->Free([](JSArrayBuffer* buffer) { return true; });
98 if (tracker->IsEmpty()) {
99 page->ReleaseLocalTracker();
100 }
101 }
102
ProcessBuffers(Page * page,ProcessingMode mode)103 bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
104 LocalArrayBufferTracker* tracker = page->local_tracker();
105 if (tracker == nullptr) return true;
106
107 DCHECK(page->SweepingDone());
108 tracker->Process(
109 [mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
110 MapWord map_word = old_buffer->map_word();
111 if (map_word.IsForwardingAddress()) {
112 *new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
113 return LocalArrayBufferTracker::kUpdateEntry;
114 }
115 return mode == kUpdateForwardedKeepOthers
116 ? LocalArrayBufferTracker::kKeepEntry
117 : LocalArrayBufferTracker::kRemoveEntry;
118 });
119 return tracker->IsEmpty();
120 }
121
IsTracked(JSArrayBuffer * buffer)122 bool ArrayBufferTracker::IsTracked(JSArrayBuffer* buffer) {
123 Page* page = Page::FromAddress(buffer->address());
124 {
125 base::LockGuard<base::Mutex> guard(page->mutex());
126 LocalArrayBufferTracker* tracker = page->local_tracker();
127 if (tracker == nullptr) return false;
128 return tracker->IsTracked(buffer);
129 }
130 }
131
TearDown(Heap * heap)132 void ArrayBufferTracker::TearDown(Heap* heap) {
133 // ArrayBuffers can only be found in NEW_SPACE and OLD_SPACE.
134 for (Page* p : *heap->old_space()) {
135 FreeAll(p);
136 }
137 NewSpace* new_space = heap->new_space();
138 if (new_space->to_space().is_committed()) {
139 for (Page* p : new_space->to_space()) {
140 FreeAll(p);
141 }
142 }
143 #ifdef DEBUG
144 if (new_space->from_space().is_committed()) {
145 for (Page* p : new_space->from_space()) {
146 DCHECK(!p->contains_array_buffers());
147 }
148 }
149 #endif // DEBUG
150 }
151
152 } // namespace internal
153 } // namespace v8
154