1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/heap-allocator.h"
6
7 #include "src/base/logging.h"
8 #include "src/common/globals.h"
9 #include "src/execution/isolate.h"
10 #include "src/heap/heap-allocator-inl.h"
11 #include "src/heap/heap-inl.h"
12 #include "src/logging/counters.h"
13
14 namespace v8 {
15 namespace internal {
16
17 class Heap;
18
HeapAllocator(Heap * heap)19 HeapAllocator::HeapAllocator(Heap* heap) : heap_(heap) {}
20
Setup()21 void HeapAllocator::Setup() {
22 for (int i = FIRST_SPACE; i <= LAST_SPACE; ++i) {
23 spaces_[i] = heap_->space(i);
24 }
25
26 space_for_maps_ = spaces_[MAP_SPACE]
27 ? static_cast<PagedSpace*>(spaces_[MAP_SPACE])
28 : static_cast<PagedSpace*>(spaces_[OLD_SPACE]);
29
30 shared_old_allocator_ = heap_->shared_old_allocator_.get();
31 shared_map_allocator_ = heap_->shared_map_allocator_
32 ? heap_->shared_map_allocator_.get()
33 : shared_old_allocator_;
34 }
35
SetReadOnlySpace(ReadOnlySpace * read_only_space)36 void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) {
37 read_only_space_ = read_only_space;
38 }
39
AllocateRawLargeInternal(int size_in_bytes,AllocationType allocation,AllocationOrigin origin,AllocationAlignment alignment)40 AllocationResult HeapAllocator::AllocateRawLargeInternal(
41 int size_in_bytes, AllocationType allocation, AllocationOrigin origin,
42 AllocationAlignment alignment) {
43 DCHECK_GT(size_in_bytes, heap_->MaxRegularHeapObjectSize(allocation));
44 switch (allocation) {
45 case AllocationType::kYoung:
46 return new_lo_space()->AllocateRaw(size_in_bytes);
47 case AllocationType::kOld:
48 return lo_space()->AllocateRaw(size_in_bytes);
49 case AllocationType::kCode:
50 return code_lo_space()->AllocateRaw(size_in_bytes);
51 case AllocationType::kMap:
52 case AllocationType::kReadOnly:
53 case AllocationType::kSharedMap:
54 case AllocationType::kSharedOld:
55 UNREACHABLE();
56 }
57 }
58
59 namespace {
60
AllocationTypeToGCSpace(AllocationType type)61 constexpr AllocationSpace AllocationTypeToGCSpace(AllocationType type) {
62 switch (type) {
63 case AllocationType::kYoung:
64 return NEW_SPACE;
65 case AllocationType::kOld:
66 case AllocationType::kCode:
67 case AllocationType::kMap:
68 // OLD_SPACE indicates full GC.
69 return OLD_SPACE;
70 case AllocationType::kReadOnly:
71 case AllocationType::kSharedMap:
72 case AllocationType::kSharedOld:
73 UNREACHABLE();
74 }
75 }
76
77 } // namespace
78
AllocateRawWithLightRetrySlowPath(int size,AllocationType allocation,AllocationOrigin origin,AllocationAlignment alignment)79 AllocationResult HeapAllocator::AllocateRawWithLightRetrySlowPath(
80 int size, AllocationType allocation, AllocationOrigin origin,
81 AllocationAlignment alignment) {
82 AllocationResult result = AllocateRaw(size, allocation, origin, alignment);
83 if (!result.IsFailure()) {
84 return result;
85 }
86
87 // Two GCs before returning failure.
88 for (int i = 0; i < 2; i++) {
89 if (IsSharedAllocationType(allocation)) {
90 heap_->CollectSharedGarbage(GarbageCollectionReason::kAllocationFailure);
91 } else {
92 heap_->CollectGarbage(AllocationTypeToGCSpace(allocation),
93 GarbageCollectionReason::kAllocationFailure);
94 }
95 result = AllocateRaw(size, allocation, origin, alignment);
96 if (!result.IsFailure()) {
97 return result;
98 }
99 }
100 return result;
101 }
102
AllocateRawWithRetryOrFailSlowPath(int size,AllocationType allocation,AllocationOrigin origin,AllocationAlignment alignment)103 AllocationResult HeapAllocator::AllocateRawWithRetryOrFailSlowPath(
104 int size, AllocationType allocation, AllocationOrigin origin,
105 AllocationAlignment alignment) {
106 AllocationResult result =
107 AllocateRawWithLightRetrySlowPath(size, allocation, origin, alignment);
108 if (!result.IsFailure()) return result;
109
110 heap_->isolate()->counters()->gc_last_resort_from_handles()->Increment();
111 if (IsSharedAllocationType(allocation)) {
112 heap_->CollectSharedGarbage(GarbageCollectionReason::kLastResort);
113
114 // We need always_allocate() to be true both on the client- and
115 // server-isolate. It is used in both code paths.
116 AlwaysAllocateScope shared_scope(
117 heap_->isolate()->shared_isolate()->heap());
118 AlwaysAllocateScope client_scope(heap_);
119 result = AllocateRaw(size, allocation, origin, alignment);
120 } else {
121 heap_->CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
122
123 AlwaysAllocateScope scope(heap_);
124 result = AllocateRaw(size, allocation, origin, alignment);
125 }
126
127 if (!result.IsFailure()) {
128 return result;
129 }
130
131 v8::internal::V8::FatalProcessOutOfMemory(heap_->isolate(),
132 "CALL_AND_RETRY_LAST", true);
133 }
134
135 #ifdef DEBUG
136
IncrementObjectCounters()137 void HeapAllocator::IncrementObjectCounters() {
138 heap_->isolate()->counters()->objs_since_last_full()->Increment();
139 heap_->isolate()->counters()->objs_since_last_young()->Increment();
140 }
141
142 #endif // DEBUG
143
144 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
145
SetAllocationTimeout(int allocation_timeout)146 void HeapAllocator::SetAllocationTimeout(int allocation_timeout) {
147 allocation_timeout_ = allocation_timeout;
148 }
149
UpdateAllocationTimeout()150 void HeapAllocator::UpdateAllocationTimeout() {
151 if (FLAG_random_gc_interval > 0) {
152 const int new_timeout = allocation_timeout_ <= 0
153 ? heap_->isolate()->fuzzer_rng()->NextInt(
154 FLAG_random_gc_interval + 1)
155 : allocation_timeout_;
156 // Reset the allocation timeout, but make sure to allow at least a few
157 // allocations after a collection. The reason for this is that we have a lot
158 // of allocation sequences and we assume that a garbage collection will
159 // allow the subsequent allocation attempts to go through.
160 constexpr int kFewAllocationsHeadroom = 6;
161 allocation_timeout_ = std::max(kFewAllocationsHeadroom, new_timeout);
162 } else if (FLAG_gc_interval >= 0) {
163 allocation_timeout_ = FLAG_gc_interval;
164 }
165 }
166
167 #endif // V8_ENABLE_ALLOCATION_TIMEOUT
168
169 } // namespace internal
170 } // namespace v8
171