1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_MAP_INL_H_
6 #define V8_OBJECTS_MAP_INL_H_
7
8 #include "src/heap/heap-write-barrier-inl.h"
9 #include "src/objects/api-callbacks-inl.h"
10 #include "src/objects/cell-inl.h"
11 #include "src/objects/descriptor-array-inl.h"
12 #include "src/objects/field-type.h"
13 #include "src/objects/instance-type-inl.h"
14 #include "src/objects/js-function-inl.h"
15 #include "src/objects/layout-descriptor-inl.h"
16 #include "src/objects/map.h"
17 #include "src/objects/objects-inl.h"
18 #include "src/objects/property.h"
19 #include "src/objects/prototype-info-inl.h"
20 #include "src/objects/shared-function-info.h"
21 #include "src/objects/templates-inl.h"
22 #include "src/objects/transitions-inl.h"
23 #include "src/objects/transitions.h"
24 #include "src/wasm/wasm-objects-inl.h"
25
26 // Has to be the last include (doesn't have include guards):
27 #include "src/objects/object-macros.h"
28
29 namespace v8 {
30 namespace internal {
31
32 #include "torque-generated/src/objects/map-tq-inl.inc"
33
OBJECT_CONSTRUCTORS_IMPL(Map,HeapObject)34 OBJECT_CONSTRUCTORS_IMPL(Map, HeapObject)
35 CAST_ACCESSOR(Map)
36
37 RELAXED_ACCESSORS(Map, instance_descriptors, DescriptorArray,
38 kInstanceDescriptorsOffset)
39 RELEASE_ACQUIRE_ACCESSORS(Map, instance_descriptors, DescriptorArray,
40 kInstanceDescriptorsOffset)
41
42 // A freshly allocated layout descriptor can be set on an existing map.
43 // We need to use release-store and acquire-load accessor pairs to ensure
44 // that the concurrent marking thread observes initializing stores of the
45 // layout descriptor.
46 RELEASE_ACQUIRE_ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
47 kLayoutDescriptorOffset,
48 FLAG_unbox_double_fields)
49 SYNCHRONIZED_WEAK_ACCESSORS(Map, raw_transitions,
50 kTransitionsOrPrototypeInfoOffset)
51
52 ACCESSORS_CHECKED2(Map, prototype, HeapObject, kPrototypeOffset, true,
53 value.IsNull() || value.IsJSReceiver())
54
55 ACCESSORS_CHECKED(Map, prototype_info, Object,
56 kTransitionsOrPrototypeInfoOffset, this->is_prototype_map())
57
58 // |bit_field| fields.
59 // Concurrent access to |has_prototype_slot| and |has_non_instance_prototype|
60 // is explicitly allowlisted here. The former is never modified after the map
61 // is setup but it's being read by concurrent marker when pointer compression
62 // is enabled. The latter bit can be modified on a live objects.
63 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
64 Map::Bits1::HasNonInstancePrototypeBit)
65 BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::Bits1::IsCallableBit)
66 BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
67 Map::Bits1::HasNamedInterceptorBit)
68 BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor,
69 Map::Bits1::HasIndexedInterceptorBit)
70 BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable,
71 Map::Bits1::IsUndetectableBit)
72 BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
73 Map::Bits1::IsAccessCheckNeededBit)
74 BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor,
75 Map::Bits1::IsConstructorBit)
76 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot,
77 Map::Bits1::HasPrototypeSlotBit)
78
79 // |bit_field2| fields.
80 BIT_FIELD_ACCESSORS(Map, bit_field2, new_target_is_base,
81 Map::Bits2::NewTargetIsBaseBit)
82 BIT_FIELD_ACCESSORS(Map, bit_field2, is_immutable_proto,
83 Map::Bits2::IsImmutablePrototypeBit)
84
85 // |bit_field3| fields.
86 BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors,
87 Map::Bits3::OwnsDescriptorsBit)
88 BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::Bits3::IsDeprecatedBit)
89 BIT_FIELD_ACCESSORS(Map, bit_field3, is_in_retained_map_list,
90 Map::Bits3::IsInRetainedMapListBit)
91 BIT_FIELD_ACCESSORS(Map, bit_field3, is_prototype_map,
92 Map::Bits3::IsPrototypeMapBit)
93 BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target,
94 Map::Bits3::IsMigrationTargetBit)
95 BIT_FIELD_ACCESSORS(Map, bit_field3, is_extensible, Map::Bits3::IsExtensibleBit)
96 BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
97 Map::Bits3::MayHaveInterestingSymbolsBit)
98 BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
99 Map::Bits3::ConstructionCounterBits)
100
101 DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
102 DCHECK(has_named_interceptor());
103 FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
104 return InterceptorInfo::cast(info.GetNamedPropertyHandler(isolate));
105 }
106
DEF_GETTER(Map,GetIndexedInterceptor,InterceptorInfo)107 DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
108 DCHECK(has_indexed_interceptor());
109 FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
110 return InterceptorInfo::cast(info.GetIndexedPropertyHandler(isolate));
111 }
112
IsMostGeneralFieldType(Representation representation,FieldType field_type)113 bool Map::IsMostGeneralFieldType(Representation representation,
114 FieldType field_type) {
115 return !representation.IsHeapObject() || field_type.IsAny();
116 }
117
CanHaveFastTransitionableElementsKind(InstanceType instance_type)118 bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
119 return instance_type == JS_ARRAY_TYPE ||
120 instance_type == JS_PRIMITIVE_WRAPPER_TYPE ||
121 instance_type == JS_ARGUMENTS_OBJECT_TYPE;
122 }
123
CanHaveFastTransitionableElementsKind()124 bool Map::CanHaveFastTransitionableElementsKind() const {
125 return CanHaveFastTransitionableElementsKind(instance_type());
126 }
127
IsDetached(Isolate * isolate)128 bool Map::IsDetached(Isolate* isolate) const {
129 if (is_prototype_map()) return true;
130 return instance_type() == JS_OBJECT_TYPE && NumberOfOwnDescriptors() > 0 &&
131 GetBackPointer().IsUndefined(isolate);
132 }
133
134 // static
GeneralizeIfCanHaveTransitionableFastElementsKind(Isolate * isolate,InstanceType instance_type,Representation * representation,Handle<FieldType> * field_type)135 void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
136 Isolate* isolate, InstanceType instance_type,
137 Representation* representation, Handle<FieldType>* field_type) {
138 if (CanHaveFastTransitionableElementsKind(instance_type)) {
139 // We don't support propagation of field generalization through elements
140 // kind transitions because they are inserted into the transition tree
141 // before field transitions. In order to avoid complexity of handling
142 // such a case we ensure that all maps with transitionable elements kinds
143 // have the most general field representation and type.
144 *field_type = FieldType::Any(isolate);
145 *representation = Representation::Tagged();
146 }
147 }
148
Normalize(Isolate * isolate,Handle<Map> fast_map,PropertyNormalizationMode mode,const char * reason)149 Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
150 PropertyNormalizationMode mode, const char* reason) {
151 return Normalize(isolate, fast_map, fast_map->elements_kind(), mode, reason);
152 }
153
EquivalentToForNormalization(const Map other,PropertyNormalizationMode mode)154 bool Map::EquivalentToForNormalization(const Map other,
155 PropertyNormalizationMode mode) const {
156 return EquivalentToForNormalization(other, elements_kind(), mode);
157 }
158
IsUnboxedDoubleField(FieldIndex index)159 bool Map::IsUnboxedDoubleField(FieldIndex index) const {
160 IsolateRoot isolate = GetIsolateForPtrCompr(*this);
161 return IsUnboxedDoubleField(isolate, index);
162 }
163
IsUnboxedDoubleField(IsolateRoot isolate,FieldIndex index)164 bool Map::IsUnboxedDoubleField(IsolateRoot isolate, FieldIndex index) const {
165 if (!FLAG_unbox_double_fields) return false;
166 if (!index.is_inobject()) return false;
167 return !layout_descriptor(isolate, kAcquireLoad)
168 .IsTagged(index.property_index());
169 }
170
TooManyFastProperties(StoreOrigin store_origin)171 bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
172 if (UnusedPropertyFields() != 0) return false;
173 if (is_prototype_map()) return false;
174 if (store_origin == StoreOrigin::kNamed) {
175 int limit = std::max({kMaxFastProperties, GetInObjectProperties()});
176 FieldCounts counts = GetFieldCounts();
177 // Only count mutable fields so that objects with large numbers of
178 // constant functions do not go to dictionary mode. That would be bad
179 // because such objects have often been used as modules.
180 int external = counts.mutable_count() - GetInObjectProperties();
181 return external > limit || counts.GetTotal() > kMaxNumberOfDescriptors;
182 } else {
183 int limit = std::max({kFastPropertiesSoftLimit, GetInObjectProperties()});
184 int external = NumberOfFields() - GetInObjectProperties();
185 return external > limit;
186 }
187 }
188
GetLastDescriptorDetails(Isolate * isolate)189 PropertyDetails Map::GetLastDescriptorDetails(Isolate* isolate) const {
190 return instance_descriptors(isolate, kRelaxedLoad).GetDetails(LastAdded());
191 }
192
LastAdded()193 InternalIndex Map::LastAdded() const {
194 int number_of_own_descriptors = NumberOfOwnDescriptors();
195 DCHECK_GT(number_of_own_descriptors, 0);
196 return InternalIndex(number_of_own_descriptors - 1);
197 }
198
NumberOfOwnDescriptors()199 int Map::NumberOfOwnDescriptors() const {
200 return Bits3::NumberOfOwnDescriptorsBits::decode(bit_field3());
201 }
202
SetNumberOfOwnDescriptors(int number)203 void Map::SetNumberOfOwnDescriptors(int number) {
204 DCHECK_LE(number, instance_descriptors(kRelaxedLoad).number_of_descriptors());
205 CHECK_LE(static_cast<unsigned>(number),
206 static_cast<unsigned>(kMaxNumberOfDescriptors));
207 set_bit_field3(
208 Bits3::NumberOfOwnDescriptorsBits::update(bit_field3(), number));
209 }
210
IterateOwnDescriptors()211 InternalIndex::Range Map::IterateOwnDescriptors() const {
212 return InternalIndex::Range(NumberOfOwnDescriptors());
213 }
214
EnumLength()215 int Map::EnumLength() const {
216 return Bits3::EnumLengthBits::decode(bit_field3());
217 }
218
SetEnumLength(int length)219 void Map::SetEnumLength(int length) {
220 if (length != kInvalidEnumCacheSentinel) {
221 DCHECK_LE(length, NumberOfOwnDescriptors());
222 CHECK_LE(static_cast<unsigned>(length),
223 static_cast<unsigned>(kMaxNumberOfDescriptors));
224 }
225 set_bit_field3(Bits3::EnumLengthBits::update(bit_field3(), length));
226 }
227
GetInitialElements()228 FixedArrayBase Map::GetInitialElements() const {
229 FixedArrayBase result;
230 if (has_fast_elements() || has_fast_string_wrapper_elements() ||
231 has_any_nonextensible_elements()) {
232 result = GetReadOnlyRoots().empty_fixed_array();
233 } else if (has_typed_array_elements()) {
234 result = GetReadOnlyRoots().empty_byte_array();
235 } else if (has_dictionary_elements()) {
236 result = GetReadOnlyRoots().empty_slow_element_dictionary();
237 } else {
238 UNREACHABLE();
239 }
240 DCHECK(!ObjectInYoungGeneration(result));
241 return result;
242 }
243
visitor_id()244 VisitorId Map::visitor_id() const {
245 return static_cast<VisitorId>(
246 RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset));
247 }
248
set_visitor_id(VisitorId id)249 void Map::set_visitor_id(VisitorId id) {
250 CHECK_LT(static_cast<unsigned>(id), 256);
251 RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<byte>(id));
252 }
253
instance_size_in_words()254 int Map::instance_size_in_words() const {
255 return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset);
256 }
257
set_instance_size_in_words(int value)258 void Map::set_instance_size_in_words(int value) {
259 RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset,
260 static_cast<byte>(value));
261 }
262
instance_size()263 int Map::instance_size() const {
264 return instance_size_in_words() << kTaggedSizeLog2;
265 }
266
set_instance_size(int value)267 void Map::set_instance_size(int value) {
268 CHECK(IsAligned(value, kTaggedSize));
269 value >>= kTaggedSizeLog2;
270 CHECK_LT(static_cast<unsigned>(value), 256);
271 set_instance_size_in_words(value);
272 }
273
inobject_properties_start_or_constructor_function_index()274 int Map::inobject_properties_start_or_constructor_function_index() const {
275 return RELAXED_READ_BYTE_FIELD(
276 *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
277 }
278
set_inobject_properties_start_or_constructor_function_index(int value)279 void Map::set_inobject_properties_start_or_constructor_function_index(
280 int value) {
281 CHECK_LT(static_cast<unsigned>(value), 256);
282 RELAXED_WRITE_BYTE_FIELD(
283 *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
284 static_cast<byte>(value));
285 }
286
GetInObjectPropertiesStartInWords()287 int Map::GetInObjectPropertiesStartInWords() const {
288 DCHECK(IsJSObjectMap());
289 return inobject_properties_start_or_constructor_function_index();
290 }
291
SetInObjectPropertiesStartInWords(int value)292 void Map::SetInObjectPropertiesStartInWords(int value) {
293 CHECK(IsJSObjectMap());
294 set_inobject_properties_start_or_constructor_function_index(value);
295 }
296
GetInObjectProperties()297 int Map::GetInObjectProperties() const {
298 DCHECK(IsJSObjectMap());
299 return instance_size_in_words() - GetInObjectPropertiesStartInWords();
300 }
301
GetConstructorFunctionIndex()302 int Map::GetConstructorFunctionIndex() const {
303 DCHECK(IsPrimitiveMap());
304 return inobject_properties_start_or_constructor_function_index();
305 }
306
SetConstructorFunctionIndex(int value)307 void Map::SetConstructorFunctionIndex(int value) {
308 CHECK(IsPrimitiveMap());
309 set_inobject_properties_start_or_constructor_function_index(value);
310 }
311
GetInObjectPropertyOffset(int index)312 int Map::GetInObjectPropertyOffset(int index) const {
313 return (GetInObjectPropertiesStartInWords() + index) * kTaggedSize;
314 }
315
AddMissingTransitionsForTesting(Isolate * isolate,Handle<Map> split_map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)316 Handle<Map> Map::AddMissingTransitionsForTesting(
317 Isolate* isolate, Handle<Map> split_map,
318 Handle<DescriptorArray> descriptors,
319 Handle<LayoutDescriptor> full_layout_descriptor) {
320 return AddMissingTransitions(isolate, split_map, descriptors,
321 full_layout_descriptor);
322 }
323
instance_type()324 InstanceType Map::instance_type() const {
325 return static_cast<InstanceType>(ReadField<uint16_t>(kInstanceTypeOffset));
326 }
327
set_instance_type(InstanceType value)328 void Map::set_instance_type(InstanceType value) {
329 WriteField<uint16_t>(kInstanceTypeOffset, value);
330 }
331
UnusedPropertyFields()332 int Map::UnusedPropertyFields() const {
333 int value = used_or_unused_instance_size_in_words();
334 DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
335 int unused;
336 if (value >= JSObject::kFieldsAdded) {
337 unused = instance_size_in_words() - value;
338 } else {
339 // For out of object properties "used_or_unused_instance_size_in_words"
340 // byte encodes the slack in the property array.
341 unused = value;
342 }
343 return unused;
344 }
345
UnusedInObjectProperties()346 int Map::UnusedInObjectProperties() const {
347 // Like Map::UnusedPropertyFields(), but returns 0 for out of object
348 // properties.
349 int value = used_or_unused_instance_size_in_words();
350 DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
351 if (value >= JSObject::kFieldsAdded) {
352 return instance_size_in_words() - value;
353 }
354 return 0;
355 }
356
used_or_unused_instance_size_in_words()357 int Map::used_or_unused_instance_size_in_words() const {
358 return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset);
359 }
360
set_used_or_unused_instance_size_in_words(int value)361 void Map::set_used_or_unused_instance_size_in_words(int value) {
362 CHECK_LE(static_cast<unsigned>(value), 255);
363 RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset,
364 static_cast<byte>(value));
365 }
366
UsedInstanceSize()367 int Map::UsedInstanceSize() const {
368 int words = used_or_unused_instance_size_in_words();
369 if (words < JSObject::kFieldsAdded) {
370 // All in-object properties are used and the words is tracking the slack
371 // in the property array.
372 return instance_size();
373 }
374 return words * kTaggedSize;
375 }
376
SetInObjectUnusedPropertyFields(int value)377 void Map::SetInObjectUnusedPropertyFields(int value) {
378 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
379 if (!IsJSObjectMap()) {
380 CHECK_EQ(0, value);
381 set_used_or_unused_instance_size_in_words(0);
382 DCHECK_EQ(0, UnusedPropertyFields());
383 return;
384 }
385 CHECK_LE(0, value);
386 DCHECK_LE(value, GetInObjectProperties());
387 int used_inobject_properties = GetInObjectProperties() - value;
388 set_used_or_unused_instance_size_in_words(
389 GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize);
390 DCHECK_EQ(value, UnusedPropertyFields());
391 }
392
SetOutOfObjectUnusedPropertyFields(int value)393 void Map::SetOutOfObjectUnusedPropertyFields(int value) {
394 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
395 CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
396 // For out of object properties "used_instance_size_in_words" byte encodes
397 // the slack in the property array.
398 set_used_or_unused_instance_size_in_words(value);
399 DCHECK_EQ(value, UnusedPropertyFields());
400 }
401
CopyUnusedPropertyFields(Map map)402 void Map::CopyUnusedPropertyFields(Map map) {
403 set_used_or_unused_instance_size_in_words(
404 map.used_or_unused_instance_size_in_words());
405 DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
406 }
407
CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map)408 void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) {
409 int value = map.used_or_unused_instance_size_in_words();
410 if (value >= JSPrimitiveWrapper::kFieldsAdded) {
411 // Unused in-object fields. Adjust the offset from the object’s start
412 // so it matches the distance to the object’s end.
413 value += instance_size_in_words() - map.instance_size_in_words();
414 }
415 set_used_or_unused_instance_size_in_words(value);
416 DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
417 }
418
AccountAddedPropertyField()419 void Map::AccountAddedPropertyField() {
420 // Update used instance size and unused property fields number.
421 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
422 #ifdef DEBUG
423 int new_unused = UnusedPropertyFields() - 1;
424 if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
425 #endif
426 int value = used_or_unused_instance_size_in_words();
427 if (value >= JSObject::kFieldsAdded) {
428 if (value == instance_size_in_words()) {
429 AccountAddedOutOfObjectPropertyField(0);
430 } else {
431 // The property is added in-object, so simply increment the counter.
432 set_used_or_unused_instance_size_in_words(value + 1);
433 }
434 } else {
435 AccountAddedOutOfObjectPropertyField(value);
436 }
437 DCHECK_EQ(new_unused, UnusedPropertyFields());
438 }
439
AccountAddedOutOfObjectPropertyField(int unused_in_property_array)440 void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
441 unused_in_property_array--;
442 if (unused_in_property_array < 0) {
443 unused_in_property_array += JSObject::kFieldsAdded;
444 }
445 CHECK_LT(static_cast<unsigned>(unused_in_property_array),
446 JSObject::kFieldsAdded);
447 set_used_or_unused_instance_size_in_words(unused_in_property_array);
448 DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
449 }
450
bit_field()451 byte Map::bit_field() const { return ReadField<byte>(kBitFieldOffset); }
452
set_bit_field(byte value)453 void Map::set_bit_field(byte value) {
454 WriteField<byte>(kBitFieldOffset, value);
455 }
456
relaxed_bit_field()457 byte Map::relaxed_bit_field() const {
458 return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset);
459 }
460
set_relaxed_bit_field(byte value)461 void Map::set_relaxed_bit_field(byte value) {
462 RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
463 }
464
bit_field2()465 byte Map::bit_field2() const { return ReadField<byte>(kBitField2Offset); }
466
set_bit_field2(byte value)467 void Map::set_bit_field2(byte value) {
468 WriteField<byte>(kBitField2Offset, value);
469 }
470
is_abandoned_prototype_map()471 bool Map::is_abandoned_prototype_map() const {
472 return is_prototype_map() && !owns_descriptors();
473 }
474
should_be_fast_prototype_map()475 bool Map::should_be_fast_prototype_map() const {
476 if (!prototype_info().IsPrototypeInfo()) return false;
477 return PrototypeInfo::cast(prototype_info()).should_be_fast_map();
478 }
479
set_elements_kind(ElementsKind elements_kind)480 void Map::set_elements_kind(ElementsKind elements_kind) {
481 CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
482 set_bit_field2(
483 Map::Bits2::ElementsKindBits::update(bit_field2(), elements_kind));
484 }
485
elements_kind()486 ElementsKind Map::elements_kind() const {
487 return Map::Bits2::ElementsKindBits::decode(bit_field2());
488 }
489
has_fast_smi_elements()490 bool Map::has_fast_smi_elements() const {
491 return IsSmiElementsKind(elements_kind());
492 }
493
has_fast_object_elements()494 bool Map::has_fast_object_elements() const {
495 return IsObjectElementsKind(elements_kind());
496 }
497
has_fast_smi_or_object_elements()498 bool Map::has_fast_smi_or_object_elements() const {
499 return IsSmiOrObjectElementsKind(elements_kind());
500 }
501
has_fast_double_elements()502 bool Map::has_fast_double_elements() const {
503 return IsDoubleElementsKind(elements_kind());
504 }
505
has_fast_elements()506 bool Map::has_fast_elements() const {
507 return IsFastElementsKind(elements_kind());
508 }
509
has_sloppy_arguments_elements()510 bool Map::has_sloppy_arguments_elements() const {
511 return IsSloppyArgumentsElementsKind(elements_kind());
512 }
513
has_fast_sloppy_arguments_elements()514 bool Map::has_fast_sloppy_arguments_elements() const {
515 return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
516 }
517
has_fast_string_wrapper_elements()518 bool Map::has_fast_string_wrapper_elements() const {
519 return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
520 }
521
has_typed_array_elements()522 bool Map::has_typed_array_elements() const {
523 return IsTypedArrayElementsKind(elements_kind());
524 }
525
has_dictionary_elements()526 bool Map::has_dictionary_elements() const {
527 return IsDictionaryElementsKind(elements_kind());
528 }
529
has_any_nonextensible_elements()530 bool Map::has_any_nonextensible_elements() const {
531 return IsAnyNonextensibleElementsKind(elements_kind());
532 }
533
has_nonextensible_elements()534 bool Map::has_nonextensible_elements() const {
535 return IsNonextensibleElementsKind(elements_kind());
536 }
537
has_sealed_elements()538 bool Map::has_sealed_elements() const {
539 return IsSealedElementsKind(elements_kind());
540 }
541
has_frozen_elements()542 bool Map::has_frozen_elements() const {
543 return IsFrozenElementsKind(elements_kind());
544 }
545
set_is_dictionary_map(bool value)546 void Map::set_is_dictionary_map(bool value) {
547 uint32_t new_bit_field3 =
548 Bits3::IsDictionaryMapBit::update(bit_field3(), value);
549 new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, value);
550 set_bit_field3(new_bit_field3);
551 }
552
is_dictionary_map()553 bool Map::is_dictionary_map() const {
554 return Bits3::IsDictionaryMapBit::decode(bit_field3());
555 }
556
mark_unstable()557 void Map::mark_unstable() {
558 set_bit_field3(Bits3::IsUnstableBit::update(bit_field3(), true));
559 }
560
is_stable()561 bool Map::is_stable() const {
562 return !Bits3::IsUnstableBit::decode(bit_field3());
563 }
564
CanBeDeprecated()565 bool Map::CanBeDeprecated() const {
566 for (InternalIndex i : IterateOwnDescriptors()) {
567 PropertyDetails details = instance_descriptors(kRelaxedLoad).GetDetails(i);
568 if (details.representation().IsNone()) return true;
569 if (details.representation().IsSmi()) return true;
570 if (details.representation().IsDouble() && FLAG_unbox_double_fields)
571 return true;
572 if (details.representation().IsHeapObject()) return true;
573 if (details.kind() == kData && details.location() == kDescriptor) {
574 return true;
575 }
576 }
577 return false;
578 }
579
NotifyLeafMapLayoutChange(Isolate * isolate)580 void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
581 if (is_stable()) {
582 mark_unstable();
583 dependent_code().DeoptimizeDependentCodeGroup(
584 DependentCode::kPrototypeCheckGroup);
585 }
586 }
587
CanTransition()588 bool Map::CanTransition() const {
589 // Only JSObject and subtypes have map transitions and back pointers.
590 return InstanceTypeChecker::IsJSObject(instance_type());
591 }
592
593 #define DEF_TESTER(Type, ...) \
594 bool Map::Is##Type##Map() const { \
595 return InstanceTypeChecker::Is##Type(instance_type()); \
596 }
INSTANCE_TYPE_CHECKERS(DEF_TESTER)597 INSTANCE_TYPE_CHECKERS(DEF_TESTER)
598 #undef DEF_TESTER
599
600 bool Map::IsBooleanMap() const {
601 return *this == GetReadOnlyRoots().boolean_map();
602 }
603
IsNullOrUndefinedMap()604 bool Map::IsNullOrUndefinedMap() const {
605 return *this == GetReadOnlyRoots().null_map() ||
606 *this == GetReadOnlyRoots().undefined_map();
607 }
608
IsPrimitiveMap()609 bool Map::IsPrimitiveMap() const {
610 return instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE;
611 }
612
layout_descriptor_gc_safe()613 LayoutDescriptor Map::layout_descriptor_gc_safe() const {
614 DCHECK(FLAG_unbox_double_fields);
615 // The loaded value can be dereferenced on background thread to load the
616 // bitmap. We need acquire load in order to ensure that the bitmap
617 // initializing stores are also visible to the background thread.
618 Object layout_desc =
619 TaggedField<Object, kLayoutDescriptorOffset>::Acquire_Load(*this);
620 return LayoutDescriptor::cast_gc_safe(layout_desc);
621 }
622
HasFastPointerLayout()623 bool Map::HasFastPointerLayout() const {
624 DCHECK(FLAG_unbox_double_fields);
625 // The loaded value is used for SMI check only and is not dereferenced,
626 // so relaxed load is safe.
627 Object layout_desc =
628 TaggedField<Object, kLayoutDescriptorOffset>::Relaxed_Load(*this);
629 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
630 }
631
UpdateDescriptors(Isolate * isolate,DescriptorArray descriptors,LayoutDescriptor layout_desc,int number_of_own_descriptors)632 void Map::UpdateDescriptors(Isolate* isolate, DescriptorArray descriptors,
633 LayoutDescriptor layout_desc,
634 int number_of_own_descriptors) {
635 SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
636 if (FLAG_unbox_double_fields) {
637 if (layout_descriptor(kAcquireLoad).IsSlowLayout()) {
638 set_layout_descriptor(layout_desc, kReleaseStore);
639 }
640 #ifdef VERIFY_HEAP
641 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
642 if (FLAG_verify_heap) {
643 CHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
644 CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
645 }
646 #else
647 SLOW_DCHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
648 DCHECK(visitor_id() == Map::GetVisitorId(*this));
649 #endif
650 }
651 }
652
InitializeDescriptors(Isolate * isolate,DescriptorArray descriptors,LayoutDescriptor layout_desc)653 void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors,
654 LayoutDescriptor layout_desc) {
655 SetInstanceDescriptors(isolate, descriptors,
656 descriptors.number_of_descriptors());
657
658 if (FLAG_unbox_double_fields) {
659 set_layout_descriptor(layout_desc, kReleaseStore);
660 #ifdef VERIFY_HEAP
661 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
662 if (FLAG_verify_heap) {
663 CHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
664 }
665 #else
666 SLOW_DCHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
667 #endif
668 set_visitor_id(Map::GetVisitorId(*this));
669 }
670 }
671
set_bit_field3(uint32_t bits)672 void Map::set_bit_field3(uint32_t bits) {
673 RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, bits);
674 }
675
bit_field3()676 uint32_t Map::bit_field3() const {
677 return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset);
678 }
679
clear_padding()680 void Map::clear_padding() {
681 if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
682 DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
683 memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
684 FIELD_SIZE(kOptionalPaddingOffset));
685 }
686
GetLayoutDescriptor()687 LayoutDescriptor Map::GetLayoutDescriptor() const {
688 return FLAG_unbox_double_fields ? layout_descriptor(kAcquireLoad)
689 : LayoutDescriptor::FastPointerLayout();
690 }
691
AppendDescriptor(Isolate * isolate,Descriptor * desc)692 void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
693 DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
694 int number_of_own_descriptors = NumberOfOwnDescriptors();
695 DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
696 {
697 // The following two operations need to happen before the marking write
698 // barrier.
699 descriptors.Append(desc);
700 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
701 #ifndef V8_DISABLE_WRITE_BARRIERS
702 WriteBarrier::Marking(descriptors, number_of_own_descriptors + 1);
703 #endif
704 }
705 // Properly mark the map if the {desc} is an "interesting symbol".
706 if (desc->GetKey()->IsInterestingSymbol()) {
707 set_may_have_interesting_symbols(true);
708 }
709 PropertyDetails details = desc->GetDetails();
710 if (details.location() == kField) {
711 DCHECK_GT(UnusedPropertyFields(), 0);
712 AccountAddedPropertyField();
713 }
714
715 // This function does not support appending double field descriptors and
716 // it should never try to (otherwise, layout descriptor must be updated too).
717 #ifdef DEBUG
718 DCHECK(details.location() != kField || !details.representation().IsDouble());
719 #endif
720 }
721
DEF_GETTER(Map,GetBackPointer,HeapObject)722 DEF_GETTER(Map, GetBackPointer, HeapObject) {
723 Object object = constructor_or_backpointer(isolate);
724 // This is the equivalent of IsMap() but avoids reading the instance type so
725 // it can be used concurrently without acquire load.
726 if (object.IsHeapObject() && HeapObject::cast(object).map(isolate) ==
727 GetReadOnlyRoots(isolate).meta_map()) {
728 return Map::cast(object);
729 }
730 // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
731 // i::GetIsolateForPtrCompr(HeapObject).
732 return GetReadOnlyRoots(isolate).undefined_value();
733 }
734
SetBackPointer(HeapObject value,WriteBarrierMode mode)735 void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
736 CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
737 CHECK(value.IsMap());
738 CHECK(GetBackPointer().IsUndefined());
739 CHECK_IMPLIES(value.IsMap(), Map::cast(value).GetConstructor() ==
740 constructor_or_backpointer());
741 set_constructor_or_backpointer(value, mode);
742 }
743
744 // static
ElementsTransitionMap(Isolate * isolate)745 Map Map::ElementsTransitionMap(Isolate* isolate) {
746 DisallowHeapAllocation no_gc;
747 return TransitionsAccessor(isolate, *this, &no_gc)
748 .SearchSpecial(ReadOnlyRoots(isolate).elements_transition_symbol());
749 }
750
ACCESSORS(Map,dependent_code,DependentCode,kDependentCodeOffset)751 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
752 ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
753 ACCESSORS_CHECKED2(Map, constructor_or_backpointer, Object,
754 kConstructorOrBackPointerOrNativeContextOffset,
755 !IsContextMap(), value.IsNull() || !IsContextMap())
756 ACCESSORS_CHECKED(Map, native_context, NativeContext,
757 kConstructorOrBackPointerOrNativeContextOffset,
758 IsContextMap())
759 ACCESSORS_CHECKED(Map, wasm_type_info, WasmTypeInfo,
760 kConstructorOrBackPointerOrNativeContextOffset,
761 IsWasmStructMap() || IsWasmArrayMap())
762
763 bool Map::IsPrototypeValidityCellValid() const {
764 Object validity_cell = prototype_validity_cell();
765 Object value = validity_cell.IsSmi() ? Smi::cast(validity_cell)
766 : Cell::cast(validity_cell).value();
767 return value == Smi::FromInt(Map::kPrototypeChainValid);
768 }
769
DEF_GETTER(Map,GetConstructor,Object)770 DEF_GETTER(Map, GetConstructor, Object) {
771 Object maybe_constructor = constructor_or_backpointer(isolate);
772 // Follow any back pointers.
773 while (maybe_constructor.IsMap(isolate)) {
774 maybe_constructor =
775 Map::cast(maybe_constructor).constructor_or_backpointer(isolate);
776 }
777 return maybe_constructor;
778 }
779
TryGetConstructor(Isolate * isolate,int max_steps)780 Object Map::TryGetConstructor(Isolate* isolate, int max_steps) {
781 Object maybe_constructor = constructor_or_backpointer(isolate);
782 // Follow any back pointers.
783 while (maybe_constructor.IsMap(isolate)) {
784 if (max_steps-- == 0) return Smi::FromInt(0);
785 maybe_constructor =
786 Map::cast(maybe_constructor).constructor_or_backpointer(isolate);
787 }
788 return maybe_constructor;
789 }
790
DEF_GETTER(Map,GetFunctionTemplateInfo,FunctionTemplateInfo)791 DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
792 Object constructor = GetConstructor(isolate);
793 if (constructor.IsJSFunction(isolate)) {
794 // TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
795 DCHECK(JSFunction::cast(constructor).shared(isolate).IsApiFunction());
796 return JSFunction::cast(constructor).shared(isolate).get_api_func_data();
797 }
798 DCHECK(constructor.IsFunctionTemplateInfo(isolate));
799 return FunctionTemplateInfo::cast(constructor);
800 }
801
SetConstructor(Object constructor,WriteBarrierMode mode)802 void Map::SetConstructor(Object constructor, WriteBarrierMode mode) {
803 // Never overwrite a back pointer with a constructor.
804 CHECK(!constructor_or_backpointer().IsMap());
805 set_constructor_or_backpointer(constructor, mode);
806 }
807
CopyInitialMap(Isolate * isolate,Handle<Map> map)808 Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) {
809 return CopyInitialMap(isolate, map, map->instance_size(),
810 map->GetInObjectProperties(),
811 map->UnusedPropertyFields());
812 }
813
IsInobjectSlackTrackingInProgress()814 bool Map::IsInobjectSlackTrackingInProgress() const {
815 return construction_counter() != Map::kNoSlackTracking;
816 }
817
InobjectSlackTrackingStep(Isolate * isolate)818 void Map::InobjectSlackTrackingStep(Isolate* isolate) {
819 // Slack tracking should only be performed on an initial map.
820 DCHECK(GetBackPointer().IsUndefined());
821 if (!IsInobjectSlackTrackingInProgress()) return;
822 int counter = construction_counter();
823 set_construction_counter(counter - 1);
824 if (counter == kSlackTrackingCounterEnd) {
825 CompleteInobjectSlackTracking(isolate);
826 }
827 }
828
SlackForArraySize(int old_size,int size_limit)829 int Map::SlackForArraySize(int old_size, int size_limit) {
830 const int max_slack = size_limit - old_size;
831 CHECK_LE(0, max_slack);
832 if (old_size < 4) {
833 DCHECK_LE(1, max_slack);
834 return 1;
835 }
836 return std::min(max_slack, old_size / 4);
837 }
838
InstanceSizeFromSlack(int slack)839 int Map::InstanceSizeFromSlack(int slack) const {
840 return instance_size() - slack * kTaggedSize;
841 }
842
OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache,WeakFixedArray)843 OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache, WeakFixedArray)
844 CAST_ACCESSOR(NormalizedMapCache)
845 NEVER_READ_ONLY_SPACE_IMPL(NormalizedMapCache)
846
847 int NormalizedMapCache::GetIndex(Handle<Map> map) {
848 return map->Hash() % NormalizedMapCache::kEntries;
849 }
850
DEF_GETTER(HeapObject,IsNormalizedMapCache,bool)851 DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
852 if (!IsWeakFixedArray(isolate)) return false;
853 if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
854 return false;
855 }
856 return true;
857 }
858
859 } // namespace internal
860 } // namespace v8
861
862 #include "src/objects/object-macros-undef.h"
863
864 #endif // V8_OBJECTS_MAP_INL_H_
865