1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_NEW_SPACES_INL_H_
6 #define V8_HEAP_NEW_SPACES_INL_H_
7
8 #include "src/common/globals.h"
9 #include "src/heap/heap.h"
10 #include "src/heap/new-spaces.h"
11 #include "src/heap/spaces-inl.h"
12 #include "src/objects/tagged-impl.h"
13 #include "src/sanitizer/msan.h"
14
15 namespace v8 {
16 namespace internal {
17
18 // -----------------------------------------------------------------------------
19 // SemiSpace
20
Contains(HeapObject o)21 bool SemiSpace::Contains(HeapObject o) const {
22 BasicMemoryChunk* memory_chunk = BasicMemoryChunk::FromHeapObject(o);
23 if (memory_chunk->IsLargePage()) return false;
24 return id_ == kToSpace ? memory_chunk->IsToPage()
25 : memory_chunk->IsFromPage();
26 }
27
Contains(Object o)28 bool SemiSpace::Contains(Object o) const {
29 return o.IsHeapObject() && Contains(HeapObject::cast(o));
30 }
31
ContainsSlow(Address a)32 bool SemiSpace::ContainsSlow(Address a) const {
33 for (const Page* p : *this) {
34 if (p == BasicMemoryChunk::FromAddress(a)) return true;
35 }
36 return false;
37 }
38
39 // --------------------------------------------------------------------------
40 // NewSpace
41
Contains(Object o)42 bool NewSpace::Contains(Object o) const {
43 return o.IsHeapObject() && Contains(HeapObject::cast(o));
44 }
45
Contains(HeapObject o)46 bool NewSpace::Contains(HeapObject o) const {
47 return BasicMemoryChunk::FromHeapObject(o)->InNewSpace();
48 }
49
ContainsSlow(Address a)50 bool NewSpace::ContainsSlow(Address a) const {
51 return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
52 }
53
ToSpaceContainsSlow(Address a)54 bool NewSpace::ToSpaceContainsSlow(Address a) const {
55 return to_space_.ContainsSlow(a);
56 }
57
ToSpaceContains(Object o)58 bool NewSpace::ToSpaceContains(Object o) const { return to_space_.Contains(o); }
FromSpaceContains(Object o)59 bool NewSpace::FromSpaceContains(Object o) const {
60 return from_space_.Contains(o);
61 }
62
63 // -----------------------------------------------------------------------------
64 // SemiSpaceObjectIterator
65
Next()66 HeapObject SemiSpaceObjectIterator::Next() {
67 while (current_ != limit_) {
68 if (Page::IsAlignedToPageSize(current_)) {
69 Page* page = Page::FromAllocationAreaAddress(current_);
70 page = page->next_page();
71 DCHECK(page);
72 current_ = page->area_start();
73 if (current_ == limit_) return HeapObject();
74 }
75 HeapObject object = HeapObject::FromAddress(current_);
76 current_ += object.Size();
77 if (!object.IsFreeSpaceOrFiller()) {
78 return object;
79 }
80 }
81 return HeapObject();
82 }
83
84 // -----------------------------------------------------------------------------
85 // NewSpace
86
AllocateRaw(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)87 AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
88 AllocationAlignment alignment,
89 AllocationOrigin origin) {
90 #if DEBUG
91 VerifyTop();
92 #endif
93
94 AllocationResult result;
95
96 if (alignment != kWordAligned) {
97 result = AllocateFastAligned(size_in_bytes, nullptr, alignment, origin);
98 } else {
99 result = AllocateFastUnaligned(size_in_bytes, origin);
100 }
101
102 if (!result.IsRetry()) {
103 return result;
104 } else {
105 return AllocateRawSlow(size_in_bytes, alignment, origin);
106 }
107 }
108
AllocateFastUnaligned(int size_in_bytes,AllocationOrigin origin)109 AllocationResult NewSpace::AllocateFastUnaligned(int size_in_bytes,
110 AllocationOrigin origin) {
111 Address top = allocation_info_.top();
112 if (allocation_info_.limit() < top + size_in_bytes) {
113 return AllocationResult::Retry();
114 }
115
116 HeapObject obj = HeapObject::FromAddress(top);
117 allocation_info_.set_top(top + size_in_bytes);
118 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
119
120 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
121
122 if (FLAG_trace_allocations_origins) {
123 UpdateAllocationOrigins(origin);
124 }
125
126 return obj;
127 }
128
AllocateFastAligned(int size_in_bytes,int * result_aligned_size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)129 AllocationResult NewSpace::AllocateFastAligned(
130 int size_in_bytes, int* result_aligned_size_in_bytes,
131 AllocationAlignment alignment, AllocationOrigin origin) {
132 Address top = allocation_info_.top();
133 int filler_size = Heap::GetFillToAlign(top, alignment);
134 int aligned_size_in_bytes = size_in_bytes + filler_size;
135
136 if (allocation_info_.limit() - top <
137 static_cast<uintptr_t>(aligned_size_in_bytes)) {
138 return AllocationResult::Retry();
139 }
140
141 HeapObject obj = HeapObject::FromAddress(top);
142 allocation_info_.set_top(top + aligned_size_in_bytes);
143 if (result_aligned_size_in_bytes)
144 *result_aligned_size_in_bytes = aligned_size_in_bytes;
145 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
146
147 if (filler_size > 0) {
148 obj = Heap::PrecedeWithFiller(ReadOnlyRoots(heap()), obj, filler_size);
149 }
150
151 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
152
153 if (FLAG_trace_allocations_origins) {
154 UpdateAllocationOrigins(origin);
155 }
156
157 return obj;
158 }
159
AllocateRawSynchronized(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)160 V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
161 int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
162 base::MutexGuard guard(&mutex_);
163 return AllocateRaw(size_in_bytes, alignment, origin);
164 }
165
166 } // namespace internal
167 } // namespace v8
168
169 #endif // V8_HEAP_NEW_SPACES_INL_H_
170