1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/interpreter/constant-array-builder.h"
6
7 #include <cmath>
8 #include <functional>
9 #include <set>
10
11 #include "src/ast/ast-value-factory.h"
12 #include "src/ast/ast.h"
13 #include "src/ast/scopes.h"
14 #include "src/base/functional.h"
15 #include "src/isolate.h"
16 #include "src/objects-inl.h"
17
18 namespace v8 {
19 namespace internal {
20 namespace interpreter {
21
ConstantArraySlice(Zone * zone,size_t start_index,size_t capacity,OperandSize operand_size)22 ConstantArrayBuilder::ConstantArraySlice::ConstantArraySlice(
23 Zone* zone, size_t start_index, size_t capacity, OperandSize operand_size)
24 : start_index_(start_index),
25 capacity_(capacity),
26 reserved_(0),
27 operand_size_(operand_size),
28 constants_(zone) {}
29
Reserve()30 void ConstantArrayBuilder::ConstantArraySlice::Reserve() {
31 DCHECK_GT(available(), 0u);
32 reserved_++;
33 DCHECK_LE(reserved_, capacity() - constants_.size());
34 }
35
Unreserve()36 void ConstantArrayBuilder::ConstantArraySlice::Unreserve() {
37 DCHECK_GT(reserved_, 0u);
38 reserved_--;
39 }
40
Allocate(ConstantArrayBuilder::Entry entry,size_t count)41 size_t ConstantArrayBuilder::ConstantArraySlice::Allocate(
42 ConstantArrayBuilder::Entry entry, size_t count) {
43 DCHECK_GE(available(), count);
44 size_t index = constants_.size();
45 DCHECK_LT(index, capacity());
46 for (size_t i = 0; i < count; ++i) {
47 constants_.push_back(entry);
48 }
49 return index + start_index();
50 }
51
At(size_t index)52 ConstantArrayBuilder::Entry& ConstantArrayBuilder::ConstantArraySlice::At(
53 size_t index) {
54 DCHECK_GE(index, start_index());
55 DCHECK_LT(index, start_index() + size());
56 return constants_[index - start_index()];
57 }
58
At(size_t index) const59 const ConstantArrayBuilder::Entry& ConstantArrayBuilder::ConstantArraySlice::At(
60 size_t index) const {
61 DCHECK_GE(index, start_index());
62 DCHECK_LT(index, start_index() + size());
63 return constants_[index - start_index()];
64 }
65
66 #if DEBUG
CheckAllElementsAreUnique(Isolate * isolate) const67 void ConstantArrayBuilder::ConstantArraySlice::CheckAllElementsAreUnique(
68 Isolate* isolate) const {
69 std::set<Smi*> smis;
70 std::set<double> heap_numbers;
71 std::set<const AstRawString*> strings;
72 std::set<const char*> bigints;
73 std::set<const Scope*> scopes;
74 std::set<Object*> deferred_objects;
75 for (const Entry& entry : constants_) {
76 bool duplicate = false;
77 switch (entry.tag_) {
78 case Entry::Tag::kSmi:
79 duplicate = !smis.insert(entry.smi_).second;
80 break;
81 case Entry::Tag::kHeapNumber:
82 duplicate = !heap_numbers.insert(entry.heap_number_).second;
83 break;
84 case Entry::Tag::kRawString:
85 duplicate = !strings.insert(entry.raw_string_).second;
86 break;
87 case Entry::Tag::kBigInt:
88 duplicate = !bigints.insert(entry.bigint_.c_str()).second;
89 break;
90 case Entry::Tag::kScope:
91 duplicate = !scopes.insert(entry.scope_).second;
92 break;
93 case Entry::Tag::kHandle:
94 duplicate = !deferred_objects.insert(*entry.handle_).second;
95 break;
96 case Entry::Tag::kDeferred:
97 UNREACHABLE(); // Should be kHandle at this point.
98 case Entry::Tag::kJumpTableSmi:
99 case Entry::Tag::kUninitializedJumpTableSmi:
100 // TODO(leszeks): Ignore jump tables because they have to be contiguous,
101 // so they can contain duplicates.
102 break;
103 #define CASE_TAG(NAME, ...) case Entry::Tag::k##NAME:
104 SINGLETON_CONSTANT_ENTRY_TYPES(CASE_TAG)
105 #undef CASE_TAG
106 // Singletons are non-duplicated by definition.
107 break;
108 }
109 if (duplicate) {
110 std::ostringstream os;
111 os << "Duplicate constant found: " << Brief(*entry.ToHandle(isolate))
112 << std::endl;
113 // Print all the entries in the slice to help debug duplicates.
114 size_t i = start_index();
115 for (const Entry& prev_entry : constants_) {
116 os << i++ << ": " << Brief(*prev_entry.ToHandle(isolate)) << std::endl;
117 }
118 FATAL("%s", os.str().c_str());
119 }
120 }
121 }
122 #endif
123
124 STATIC_CONST_MEMBER_DEFINITION const size_t ConstantArrayBuilder::k8BitCapacity;
125 STATIC_CONST_MEMBER_DEFINITION const size_t
126 ConstantArrayBuilder::k16BitCapacity;
127 STATIC_CONST_MEMBER_DEFINITION const size_t
128 ConstantArrayBuilder::k32BitCapacity;
129
ConstantArrayBuilder(Zone * zone)130 ConstantArrayBuilder::ConstantArrayBuilder(Zone* zone)
131 : constants_map_(16, base::KeyEqualityMatcher<intptr_t>(),
132 ZoneAllocationPolicy(zone)),
133 smi_map_(zone),
134 smi_pairs_(zone),
135 heap_number_map_(zone),
136 #define INIT_SINGLETON_ENTRY_FIELD(NAME, LOWER_NAME) LOWER_NAME##_(-1),
137 SINGLETON_CONSTANT_ENTRY_TYPES(INIT_SINGLETON_ENTRY_FIELD)
138 #undef INIT_SINGLETON_ENTRY_FIELD
139 zone_(zone) {
140 idx_slice_[0] =
141 new (zone) ConstantArraySlice(zone, 0, k8BitCapacity, OperandSize::kByte);
142 idx_slice_[1] = new (zone) ConstantArraySlice(
143 zone, k8BitCapacity, k16BitCapacity, OperandSize::kShort);
144 idx_slice_[2] = new (zone) ConstantArraySlice(
145 zone, k8BitCapacity + k16BitCapacity, k32BitCapacity, OperandSize::kQuad);
146 }
147
size() const148 size_t ConstantArrayBuilder::size() const {
149 size_t i = arraysize(idx_slice_);
150 while (i > 0) {
151 ConstantArraySlice* slice = idx_slice_[--i];
152 if (slice->size() > 0) {
153 return slice->start_index() + slice->size();
154 }
155 }
156 return idx_slice_[0]->size();
157 }
158
IndexToSlice(size_t index) const159 ConstantArrayBuilder::ConstantArraySlice* ConstantArrayBuilder::IndexToSlice(
160 size_t index) const {
161 for (ConstantArraySlice* slice : idx_slice_) {
162 if (index <= slice->max_index()) {
163 return slice;
164 }
165 }
166 UNREACHABLE();
167 }
168
At(size_t index,Isolate * isolate) const169 MaybeHandle<Object> ConstantArrayBuilder::At(size_t index,
170 Isolate* isolate) const {
171 const ConstantArraySlice* slice = IndexToSlice(index);
172 DCHECK_LT(index, slice->capacity());
173 if (index < slice->start_index() + slice->size()) {
174 const Entry& entry = slice->At(index);
175 if (!entry.IsDeferred()) return entry.ToHandle(isolate);
176 }
177 return MaybeHandle<Object>();
178 }
179
ToFixedArray(Isolate * isolate)180 Handle<FixedArray> ConstantArrayBuilder::ToFixedArray(Isolate* isolate) {
181 Handle<FixedArray> fixed_array = isolate->factory()->NewFixedArrayWithHoles(
182 static_cast<int>(size()), PretenureFlag::TENURED);
183 int array_index = 0;
184 for (const ConstantArraySlice* slice : idx_slice_) {
185 DCHECK_EQ(slice->reserved(), 0);
186 DCHECK(array_index == 0 ||
187 base::bits::IsPowerOfTwo(static_cast<uint32_t>(array_index)));
188 #if DEBUG
189 // Different slices might contain the same element due to reservations, but
190 // all elements within a slice should be unique.
191 slice->CheckAllElementsAreUnique(isolate);
192 #endif
193 // Copy objects from slice into array.
194 for (size_t i = 0; i < slice->size(); ++i) {
195 Handle<Object> value =
196 slice->At(slice->start_index() + i).ToHandle(isolate);
197 fixed_array->set(array_index++, *value);
198 }
199 // Leave holes where reservations led to unused slots.
200 size_t padding = slice->capacity() - slice->size();
201 if (static_cast<size_t>(fixed_array->length() - array_index) <= padding) {
202 break;
203 }
204 array_index += padding;
205 }
206 DCHECK_GE(array_index, fixed_array->length());
207 return fixed_array;
208 }
209
Insert(Smi * smi)210 size_t ConstantArrayBuilder::Insert(Smi* smi) {
211 auto entry = smi_map_.find(smi);
212 if (entry == smi_map_.end()) {
213 return AllocateReservedEntry(smi);
214 }
215 return entry->second;
216 }
217
Insert(double number)218 size_t ConstantArrayBuilder::Insert(double number) {
219 if (std::isnan(number)) return InsertNaN();
220 auto entry = heap_number_map_.find(number);
221 if (entry == heap_number_map_.end()) {
222 index_t index = static_cast<index_t>(AllocateIndex(Entry(number)));
223 heap_number_map_[number] = index;
224 return index;
225 }
226 return entry->second;
227 }
228
Insert(const AstRawString * raw_string)229 size_t ConstantArrayBuilder::Insert(const AstRawString* raw_string) {
230 return constants_map_
231 .LookupOrInsert(reinterpret_cast<intptr_t>(raw_string),
232 raw_string->Hash(),
233 [&]() { return AllocateIndex(Entry(raw_string)); },
234 ZoneAllocationPolicy(zone_))
235 ->value;
236 }
237
Insert(AstBigInt bigint)238 size_t ConstantArrayBuilder::Insert(AstBigInt bigint) {
239 return constants_map_
240 .LookupOrInsert(reinterpret_cast<intptr_t>(bigint.c_str()),
241 static_cast<uint32_t>(base::hash_value(bigint.c_str())),
242 [&]() { return AllocateIndex(Entry(bigint)); },
243 ZoneAllocationPolicy(zone_))
244 ->value;
245 }
246
Insert(const Scope * scope)247 size_t ConstantArrayBuilder::Insert(const Scope* scope) {
248 return constants_map_
249 .LookupOrInsert(reinterpret_cast<intptr_t>(scope),
250 static_cast<uint32_t>(base::hash_value(scope)),
251 [&]() { return AllocateIndex(Entry(scope)); },
252 ZoneAllocationPolicy(zone_))
253 ->value;
254 }
255
256 #define INSERT_ENTRY(NAME, LOWER_NAME) \
257 size_t ConstantArrayBuilder::Insert##NAME() { \
258 if (LOWER_NAME##_ < 0) { \
259 LOWER_NAME##_ = AllocateIndex(Entry::NAME()); \
260 } \
261 return LOWER_NAME##_; \
262 }
SINGLETON_CONSTANT_ENTRY_TYPES(INSERT_ENTRY)263 SINGLETON_CONSTANT_ENTRY_TYPES(INSERT_ENTRY)
264 #undef INSERT_ENTRY
265
266 ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateIndex(
267 ConstantArrayBuilder::Entry entry) {
268 return AllocateIndexArray(entry, 1);
269 }
270
AllocateIndexArray(ConstantArrayBuilder::Entry entry,size_t count)271 ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateIndexArray(
272 ConstantArrayBuilder::Entry entry, size_t count) {
273 for (size_t i = 0; i < arraysize(idx_slice_); ++i) {
274 if (idx_slice_[i]->available() >= count) {
275 return static_cast<index_t>(idx_slice_[i]->Allocate(entry, count));
276 }
277 }
278 UNREACHABLE();
279 }
280
281 ConstantArrayBuilder::ConstantArraySlice*
OperandSizeToSlice(OperandSize operand_size) const282 ConstantArrayBuilder::OperandSizeToSlice(OperandSize operand_size) const {
283 ConstantArraySlice* slice = nullptr;
284 switch (operand_size) {
285 case OperandSize::kNone:
286 UNREACHABLE();
287 break;
288 case OperandSize::kByte:
289 slice = idx_slice_[0];
290 break;
291 case OperandSize::kShort:
292 slice = idx_slice_[1];
293 break;
294 case OperandSize::kQuad:
295 slice = idx_slice_[2];
296 break;
297 }
298 DCHECK(slice->operand_size() == operand_size);
299 return slice;
300 }
301
InsertDeferred()302 size_t ConstantArrayBuilder::InsertDeferred() {
303 return AllocateIndex(Entry::Deferred());
304 }
305
InsertJumpTable(size_t size)306 size_t ConstantArrayBuilder::InsertJumpTable(size_t size) {
307 return AllocateIndexArray(Entry::UninitializedJumpTableSmi(), size);
308 }
309
SetDeferredAt(size_t index,Handle<Object> object)310 void ConstantArrayBuilder::SetDeferredAt(size_t index, Handle<Object> object) {
311 ConstantArraySlice* slice = IndexToSlice(index);
312 return slice->At(index).SetDeferred(object);
313 }
314
SetJumpTableSmi(size_t index,Smi * smi)315 void ConstantArrayBuilder::SetJumpTableSmi(size_t index, Smi* smi) {
316 ConstantArraySlice* slice = IndexToSlice(index);
317 // Allow others to reuse these Smis, but insert using emplace to avoid
318 // overwriting existing values in the Smi map (which may have a smaller
319 // operand size).
320 smi_map_.emplace(smi, static_cast<index_t>(index));
321 return slice->At(index).SetJumpTableSmi(smi);
322 }
323
CreateReservedEntry()324 OperandSize ConstantArrayBuilder::CreateReservedEntry() {
325 for (size_t i = 0; i < arraysize(idx_slice_); ++i) {
326 if (idx_slice_[i]->available() > 0) {
327 idx_slice_[i]->Reserve();
328 return idx_slice_[i]->operand_size();
329 }
330 }
331 UNREACHABLE();
332 }
333
AllocateReservedEntry(Smi * value)334 ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateReservedEntry(
335 Smi* value) {
336 index_t index = static_cast<index_t>(AllocateIndex(Entry(value)));
337 smi_map_[value] = index;
338 return index;
339 }
340
CommitReservedEntry(OperandSize operand_size,Smi * value)341 size_t ConstantArrayBuilder::CommitReservedEntry(OperandSize operand_size,
342 Smi* value) {
343 DiscardReservedEntry(operand_size);
344 size_t index;
345 auto entry = smi_map_.find(value);
346 if (entry == smi_map_.end()) {
347 index = AllocateReservedEntry(value);
348 } else {
349 ConstantArraySlice* slice = OperandSizeToSlice(operand_size);
350 index = entry->second;
351 if (index > slice->max_index()) {
352 // The object is already in the constant array, but may have an
353 // index too big for the reserved operand_size. So, duplicate
354 // entry with the smaller operand size.
355 index = AllocateReservedEntry(value);
356 }
357 DCHECK_LE(index, slice->max_index());
358 }
359 return index;
360 }
361
DiscardReservedEntry(OperandSize operand_size)362 void ConstantArrayBuilder::DiscardReservedEntry(OperandSize operand_size) {
363 OperandSizeToSlice(operand_size)->Unreserve();
364 }
365
ToHandle(Isolate * isolate) const366 Handle<Object> ConstantArrayBuilder::Entry::ToHandle(Isolate* isolate) const {
367 switch (tag_) {
368 case Tag::kDeferred:
369 // We shouldn't have any deferred entries by now.
370 UNREACHABLE();
371 case Tag::kHandle:
372 return handle_;
373 case Tag::kSmi:
374 case Tag::kJumpTableSmi:
375 return handle(smi_, isolate);
376 case Tag::kUninitializedJumpTableSmi:
377 // TODO(leszeks): There's probably a better value we could use here.
378 return isolate->factory()->the_hole_value();
379 case Tag::kRawString:
380 return raw_string_->string();
381 case Tag::kHeapNumber:
382 return isolate->factory()->NewNumber(heap_number_, TENURED);
383 case Tag::kBigInt:
384 // This should never fail: the parser will never create a BigInt
385 // literal that cannot be allocated.
386 return BigIntLiteral(isolate, bigint_.c_str()).ToHandleChecked();
387 case Tag::kScope:
388 return scope_->scope_info();
389 #define ENTRY_LOOKUP(Name, name) \
390 case Tag::k##Name: \
391 return isolate->factory()->name();
392 SINGLETON_CONSTANT_ENTRY_TYPES(ENTRY_LOOKUP);
393 #undef ENTRY_LOOKUP
394 }
395 UNREACHABLE();
396 }
397
398 } // namespace interpreter
399 } // namespace internal
400 } // namespace v8
401