1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_LOCAL_ALLOCATOR_INL_H_
6 #define V8_HEAP_LOCAL_ALLOCATOR_INL_H_
7
8 #include "src/heap/local-allocator.h"
9
10 #include "src/heap/spaces-inl.h"
11
12 namespace v8 {
13 namespace internal {
14
Allocate(AllocationSpace space,int object_size,AllocationOrigin origin,AllocationAlignment alignment)15 AllocationResult EvacuationAllocator::Allocate(AllocationSpace space,
16 int object_size,
17 AllocationOrigin origin,
18 AllocationAlignment alignment) {
19 switch (space) {
20 case NEW_SPACE:
21 return AllocateInNewSpace(object_size, origin, alignment);
22 case OLD_SPACE:
23 return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size,
24 alignment, origin);
25 case CODE_SPACE:
26 return compaction_spaces_.Get(CODE_SPACE)
27 ->AllocateRaw(object_size, alignment, origin);
28 default:
29 UNREACHABLE();
30 }
31 }
32
FreeLast(AllocationSpace space,HeapObject object,int object_size)33 void EvacuationAllocator::FreeLast(AllocationSpace space, HeapObject object,
34 int object_size) {
35 switch (space) {
36 case NEW_SPACE:
37 FreeLastInNewSpace(object, object_size);
38 return;
39 case OLD_SPACE:
40 FreeLastInOldSpace(object, object_size);
41 return;
42 default:
43 // Only new and old space supported.
44 UNREACHABLE();
45 }
46 }
47
FreeLastInNewSpace(HeapObject object,int object_size)48 void EvacuationAllocator::FreeLastInNewSpace(HeapObject object,
49 int object_size) {
50 if (!new_space_lab_.TryFreeLast(object, object_size)) {
51 // We couldn't free the last object so we have to write a proper filler.
52 heap_->CreateFillerObjectAt(object.address(), object_size,
53 ClearRecordedSlots::kNo);
54 }
55 }
56
FreeLastInOldSpace(HeapObject object,int object_size)57 void EvacuationAllocator::FreeLastInOldSpace(HeapObject object,
58 int object_size) {
59 if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
60 // We couldn't free the last object so we have to write a proper filler.
61 heap_->CreateFillerObjectAt(object.address(), object_size,
62 ClearRecordedSlots::kNo);
63 }
64 }
65
AllocateInLAB(int object_size,AllocationAlignment alignment)66 AllocationResult EvacuationAllocator::AllocateInLAB(
67 int object_size, AllocationAlignment alignment) {
68 AllocationResult allocation;
69 if (!new_space_lab_.IsValid() && !NewLocalAllocationBuffer()) {
70 return AllocationResult::Retry(OLD_SPACE);
71 }
72 allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
73 if (allocation.IsRetry()) {
74 if (!NewLocalAllocationBuffer()) {
75 return AllocationResult::Retry(OLD_SPACE);
76 } else {
77 allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
78 CHECK(!allocation.IsRetry());
79 }
80 }
81 return allocation;
82 }
83
NewLocalAllocationBuffer()84 bool EvacuationAllocator::NewLocalAllocationBuffer() {
85 if (lab_allocation_will_fail_) return false;
86 AllocationResult result =
87 new_space_->AllocateRawSynchronized(kLabSize, kWordAligned);
88 if (result.IsRetry()) {
89 lab_allocation_will_fail_ = true;
90 return false;
91 }
92 LocalAllocationBuffer saved_lab = std::move(new_space_lab_);
93 new_space_lab_ = LocalAllocationBuffer::FromResult(heap_, result, kLabSize);
94 DCHECK(new_space_lab_.IsValid());
95 if (!new_space_lab_.TryMerge(&saved_lab)) {
96 saved_lab.CloseAndMakeIterable();
97 }
98 return true;
99 }
100
AllocateInNewSpace(int object_size,AllocationOrigin origin,AllocationAlignment alignment)101 AllocationResult EvacuationAllocator::AllocateInNewSpace(
102 int object_size, AllocationOrigin origin, AllocationAlignment alignment) {
103 if (object_size > kMaxLabObjectSize) {
104 return new_space_->AllocateRawSynchronized(object_size, alignment, origin);
105 }
106 return AllocateInLAB(object_size, alignment);
107 }
108
109 } // namespace internal
110 } // namespace v8
111
112 #endif // V8_HEAP_LOCAL_ALLOCATOR_INL_H_
113