1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/concurrent-allocator.h"
6
7 #include "src/common/globals.h"
8 #include "src/execution/isolate.h"
9 #include "src/handles/persistent-handles.h"
10 #include "src/heap/concurrent-allocator-inl.h"
11 #include "src/heap/local-heap-inl.h"
12 #include "src/heap/local-heap.h"
13 #include "src/heap/marking.h"
14 #include "src/heap/memory-chunk.h"
15
16 namespace v8 {
17 namespace internal {
18
RunInternal()19 void StressConcurrentAllocatorTask::RunInternal() {
20 Heap* heap = isolate_->heap();
21 LocalHeap local_heap(heap, ThreadKind::kBackground);
22 UnparkedScope unparked_scope(&local_heap);
23
24 const int kNumIterations = 2000;
25 const int kSmallObjectSize = 10 * kTaggedSize;
26 const int kMediumObjectSize = 8 * KB;
27 const int kLargeObjectSize =
28 static_cast<int>(MemoryChunk::kPageSize -
29 MemoryChunkLayout::ObjectStartOffsetInDataPage());
30
31 for (int i = 0; i < kNumIterations; i++) {
32 // Isolate tear down started, stop allocation...
33 if (heap->gc_state() == Heap::TEAR_DOWN) return;
34
35 Address address = local_heap.AllocateRawOrFail(
36 kSmallObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
37 AllocationAlignment::kWordAligned);
38 heap->CreateFillerObjectAtBackground(
39 address, kSmallObjectSize, ClearFreedMemoryMode::kDontClearFreedMemory);
40 local_heap.Safepoint();
41
42 address = local_heap.AllocateRawOrFail(
43 kMediumObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
44 AllocationAlignment::kWordAligned);
45 heap->CreateFillerObjectAtBackground(
46 address, kMediumObjectSize,
47 ClearFreedMemoryMode::kDontClearFreedMemory);
48 local_heap.Safepoint();
49
50 address = local_heap.AllocateRawOrFail(
51 kLargeObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
52 AllocationAlignment::kWordAligned);
53 heap->CreateFillerObjectAtBackground(
54 address, kLargeObjectSize, ClearFreedMemoryMode::kDontClearFreedMemory);
55 local_heap.Safepoint();
56 }
57
58 Schedule(isolate_);
59 }
60
61 // static
Schedule(Isolate * isolate)62 void StressConcurrentAllocatorTask::Schedule(Isolate* isolate) {
63 CHECK(FLAG_local_heaps && FLAG_concurrent_allocation);
64 auto task = std::make_unique<StressConcurrentAllocatorTask>(isolate);
65 const double kDelayInSeconds = 0.1;
66 V8::GetCurrentPlatform()->CallDelayedOnWorkerThread(std::move(task),
67 kDelayInSeconds);
68 }
69
FreeLinearAllocationArea()70 void ConcurrentAllocator::FreeLinearAllocationArea() {
71 lab_.CloseAndMakeIterable();
72 }
73
MakeLinearAllocationAreaIterable()74 void ConcurrentAllocator::MakeLinearAllocationAreaIterable() {
75 lab_.MakeIterable();
76 }
77
MarkLinearAllocationAreaBlack()78 void ConcurrentAllocator::MarkLinearAllocationAreaBlack() {
79 Address top = lab_.top();
80 Address limit = lab_.limit();
81
82 if (top != kNullAddress && top != limit) {
83 Page::FromAllocationAreaAddress(top)->CreateBlackAreaBackground(top, limit);
84 }
85 }
86
UnmarkLinearAllocationArea()87 void ConcurrentAllocator::UnmarkLinearAllocationArea() {
88 Address top = lab_.top();
89 Address limit = lab_.limit();
90
91 if (top != kNullAddress && top != limit) {
92 Page::FromAllocationAreaAddress(top)->DestroyBlackAreaBackground(top,
93 limit);
94 }
95 }
96
AllocateInLabSlow(int object_size,AllocationAlignment alignment,AllocationOrigin origin)97 AllocationResult ConcurrentAllocator::AllocateInLabSlow(
98 int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
99 if (!EnsureLab(origin)) {
100 return AllocationResult::Retry(OLD_SPACE);
101 }
102
103 AllocationResult allocation = lab_.AllocateRawAligned(object_size, alignment);
104 DCHECK(!allocation.IsRetry());
105
106 return allocation;
107 }
108
EnsureLab(AllocationOrigin origin)109 bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) {
110 auto result = space_->RawRefillLabBackground(
111 local_heap_, kLabSize, kMaxLabSize, kWordAligned, origin);
112
113 if (!result) return false;
114
115 if (local_heap_->heap()->incremental_marking()->black_allocation()) {
116 Address top = result->first;
117 Address limit = top + result->second;
118 Page::FromAllocationAreaAddress(top)->CreateBlackAreaBackground(top, limit);
119 }
120
121 HeapObject object = HeapObject::FromAddress(result->first);
122 LocalAllocationBuffer saved_lab = std::move(lab_);
123 lab_ = LocalAllocationBuffer::FromResult(
124 local_heap_->heap(), AllocationResult(object), result->second);
125 DCHECK(lab_.IsValid());
126 if (!lab_.TryMerge(&saved_lab)) {
127 saved_lab.CloseAndMakeIterable();
128 }
129 return true;
130 }
131
AllocateOutsideLab(int object_size,AllocationAlignment alignment,AllocationOrigin origin)132 AllocationResult ConcurrentAllocator::AllocateOutsideLab(
133 int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
134 auto result = space_->RawRefillLabBackground(local_heap_, object_size,
135 object_size, alignment, origin);
136 if (!result) return AllocationResult::Retry(OLD_SPACE);
137
138 HeapObject object = HeapObject::FromAddress(result->first);
139
140 if (local_heap_->heap()->incremental_marking()->black_allocation()) {
141 local_heap_->heap()->incremental_marking()->MarkBlackBackground(
142 object, object_size);
143 }
144
145 return AllocationResult(object);
146 }
147
148 } // namespace internal
149 } // namespace v8
150