1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/objects/map.h"
6
7 #include "src/execution/frames.h"
8 #include "src/execution/isolate.h"
9 #include "src/handles/handles-inl.h"
10 #include "src/handles/maybe-handles.h"
11 #include "src/heap/heap-write-barrier-inl.h"
12 #include "src/init/bootstrapper.h"
13 #include "src/logging/counters-inl.h"
14 #include "src/logging/log.h"
15 #include "src/objects/arguments-inl.h"
16 #include "src/objects/descriptor-array.h"
17 #include "src/objects/elements-kind.h"
18 #include "src/objects/field-type.h"
19 #include "src/objects/js-objects.h"
20 #include "src/objects/layout-descriptor.h"
21 #include "src/objects/map-updater.h"
22 #include "src/objects/maybe-object.h"
23 #include "src/objects/oddball.h"
24 #include "src/objects/property.h"
25 #include "src/objects/transitions-inl.h"
26 #include "src/roots/roots.h"
27 #include "src/utils/ostreams.h"
28 #include "src/zone/zone-containers.h"
29 #include "torque-generated/field-offsets.h"
30
31 namespace v8 {
32 namespace internal {
33
GetPrototypeChainRootMap(Isolate * isolate) const34 Map Map::GetPrototypeChainRootMap(Isolate* isolate) const {
35 DisallowHeapAllocation no_alloc;
36 if (IsJSReceiverMap()) {
37 return *this;
38 }
39 int constructor_function_index = GetConstructorFunctionIndex();
40 if (constructor_function_index != Map::kNoConstructorFunctionIndex) {
41 Context native_context = isolate->context().native_context();
42 JSFunction constructor_function =
43 JSFunction::cast(native_context.get(constructor_function_index));
44 return constructor_function.initial_map();
45 }
46 return ReadOnlyRoots(isolate).null_value().map();
47 }
48
49 // static
GetConstructorFunction(Handle<Map> map,Handle<Context> native_context)50 MaybeHandle<JSFunction> Map::GetConstructorFunction(
51 Handle<Map> map, Handle<Context> native_context) {
52 if (map->IsPrimitiveMap()) {
53 int const constructor_function_index = map->GetConstructorFunctionIndex();
54 if (constructor_function_index != kNoConstructorFunctionIndex) {
55 return handle(
56 JSFunction::cast(native_context->get(constructor_function_index)),
57 native_context->GetIsolate());
58 }
59 }
60 return MaybeHandle<JSFunction>();
61 }
62
PrintReconfiguration(Isolate * isolate,FILE * file,InternalIndex modify_index,PropertyKind kind,PropertyAttributes attributes)63 void Map::PrintReconfiguration(Isolate* isolate, FILE* file,
64 InternalIndex modify_index, PropertyKind kind,
65 PropertyAttributes attributes) {
66 OFStream os(file);
67 os << "[reconfiguring]";
68 Name name = instance_descriptors(kRelaxedLoad).GetKey(modify_index);
69 if (name.IsString()) {
70 String::cast(name).PrintOn(file);
71 } else {
72 os << "{symbol " << reinterpret_cast<void*>(name.ptr()) << "}";
73 }
74 os << ": " << (kind == kData ? "kData" : "ACCESSORS") << ", attrs: ";
75 os << attributes << " [";
76 JavaScriptFrame::PrintTop(isolate, file, false, true);
77 os << "]\n";
78 }
79
GetInstanceTypeMap(ReadOnlyRoots roots,InstanceType type)80 Map Map::GetInstanceTypeMap(ReadOnlyRoots roots, InstanceType type) {
81 Map map;
82 switch (type) {
83 #define MAKE_CASE(TYPE, Name, name) \
84 case TYPE: \
85 map = roots.name##_map(); \
86 break;
87 STRUCT_LIST(MAKE_CASE)
88 #undef MAKE_CASE
89 #define MAKE_CASE(TYPE, Name, name) \
90 case TYPE: \
91 map = roots.name##_map(); \
92 break;
93 TORQUE_DEFINED_INSTANCE_TYPE_LIST(MAKE_CASE)
94 #undef MAKE_CASE
95 default:
96 UNREACHABLE();
97 }
98 return map;
99 }
100
GetVisitorId(Map map)101 VisitorId Map::GetVisitorId(Map map) {
102 STATIC_ASSERT(kVisitorIdCount <= 256);
103
104 const int instance_type = map.instance_type();
105
106 if (instance_type < FIRST_NONSTRING_TYPE) {
107 switch (instance_type & kStringRepresentationMask) {
108 case kSeqStringTag:
109 if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
110 return kVisitSeqOneByteString;
111 } else {
112 return kVisitSeqTwoByteString;
113 }
114
115 case kConsStringTag:
116 if (IsShortcutCandidate(instance_type)) {
117 return kVisitShortcutCandidate;
118 } else {
119 return kVisitConsString;
120 }
121
122 case kSlicedStringTag:
123 return kVisitSlicedString;
124
125 case kExternalStringTag:
126 return kVisitDataObject;
127
128 case kThinStringTag:
129 return kVisitThinString;
130 }
131 UNREACHABLE();
132 }
133
134 switch (instance_type) {
135 case BYTE_ARRAY_TYPE:
136 return kVisitByteArray;
137
138 case BYTECODE_ARRAY_TYPE:
139 return kVisitBytecodeArray;
140
141 case FREE_SPACE_TYPE:
142 return kVisitFreeSpace;
143
144 case EMBEDDER_DATA_ARRAY_TYPE:
145 return kVisitEmbedderDataArray;
146
147 case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
148 case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
149 case HASH_TABLE_TYPE:
150 case ORDERED_HASH_MAP_TYPE:
151 case ORDERED_HASH_SET_TYPE:
152 case ORDERED_NAME_DICTIONARY_TYPE:
153 case NAME_DICTIONARY_TYPE:
154 case GLOBAL_DICTIONARY_TYPE:
155 case NUMBER_DICTIONARY_TYPE:
156 case SIMPLE_NUMBER_DICTIONARY_TYPE:
157 case SCOPE_INFO_TYPE:
158 case SCRIPT_CONTEXT_TABLE_TYPE:
159 return kVisitFixedArray;
160
161 case AWAIT_CONTEXT_TYPE:
162 case BLOCK_CONTEXT_TYPE:
163 case CATCH_CONTEXT_TYPE:
164 case DEBUG_EVALUATE_CONTEXT_TYPE:
165 case EVAL_CONTEXT_TYPE:
166 case FUNCTION_CONTEXT_TYPE:
167 case MODULE_CONTEXT_TYPE:
168 case SCRIPT_CONTEXT_TYPE:
169 case WITH_CONTEXT_TYPE:
170 return kVisitContext;
171
172 case NATIVE_CONTEXT_TYPE:
173 return kVisitNativeContext;
174
175 case EPHEMERON_HASH_TABLE_TYPE:
176 return kVisitEphemeronHashTable;
177
178 case FIXED_DOUBLE_ARRAY_TYPE:
179 return kVisitFixedDoubleArray;
180
181 case PROPERTY_ARRAY_TYPE:
182 return kVisitPropertyArray;
183
184 case FEEDBACK_CELL_TYPE:
185 return kVisitFeedbackCell;
186
187 case FEEDBACK_METADATA_TYPE:
188 return kVisitFeedbackMetadata;
189
190 case MAP_TYPE:
191 return kVisitMap;
192
193 case CODE_TYPE:
194 return kVisitCode;
195
196 case CELL_TYPE:
197 return kVisitCell;
198
199 case PROPERTY_CELL_TYPE:
200 return kVisitPropertyCell;
201
202 case TRANSITION_ARRAY_TYPE:
203 return kVisitTransitionArray;
204
205 case JS_WEAK_MAP_TYPE:
206 case JS_WEAK_SET_TYPE:
207 return kVisitJSWeakCollection;
208
209 case CALL_HANDLER_INFO_TYPE:
210 return kVisitStruct;
211
212 case SHARED_FUNCTION_INFO_TYPE:
213 return kVisitSharedFunctionInfo;
214
215 case JS_PROXY_TYPE:
216 return kVisitStruct;
217
218 case SYMBOL_TYPE:
219 return kVisitSymbol;
220
221 case JS_ARRAY_BUFFER_TYPE:
222 return kVisitJSArrayBuffer;
223
224 case JS_DATA_VIEW_TYPE:
225 return kVisitJSDataView;
226
227 case JS_FUNCTION_TYPE:
228 return kVisitJSFunction;
229
230 case JS_TYPED_ARRAY_TYPE:
231 return kVisitJSTypedArray;
232
233 case SMALL_ORDERED_HASH_MAP_TYPE:
234 return kVisitSmallOrderedHashMap;
235
236 case SMALL_ORDERED_HASH_SET_TYPE:
237 return kVisitSmallOrderedHashSet;
238
239 case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
240 return kVisitSmallOrderedNameDictionary;
241
242 case CODE_DATA_CONTAINER_TYPE:
243 return kVisitCodeDataContainer;
244
245 case WASM_INSTANCE_OBJECT_TYPE:
246 return kVisitWasmInstanceObject;
247
248 case PREPARSE_DATA_TYPE:
249 return kVisitPreparseData;
250
251 case UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE:
252 return kVisitUncompiledDataWithoutPreparseData;
253
254 case UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE:
255 return kVisitUncompiledDataWithPreparseData;
256
257 case COVERAGE_INFO_TYPE:
258 return kVisitCoverageInfo;
259
260 case JS_OBJECT_TYPE:
261 case JS_ERROR_TYPE:
262 case JS_ARGUMENTS_OBJECT_TYPE:
263 case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
264 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
265 case JS_GENERATOR_OBJECT_TYPE:
266 case JS_ASYNC_FUNCTION_OBJECT_TYPE:
267 case JS_ASYNC_GENERATOR_OBJECT_TYPE:
268 case JS_MODULE_NAMESPACE_TYPE:
269 case JS_PRIMITIVE_WRAPPER_TYPE:
270 case JS_DATE_TYPE:
271 case JS_ARRAY_ITERATOR_TYPE:
272 case JS_ARRAY_TYPE:
273 case JS_MESSAGE_OBJECT_TYPE:
274 case JS_SET_TYPE:
275 case JS_MAP_TYPE:
276 case JS_SET_KEY_VALUE_ITERATOR_TYPE:
277 case JS_SET_VALUE_ITERATOR_TYPE:
278 case JS_MAP_KEY_ITERATOR_TYPE:
279 case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
280 case JS_MAP_VALUE_ITERATOR_TYPE:
281 case JS_STRING_ITERATOR_TYPE:
282 case JS_PROMISE_TYPE:
283 case JS_REG_EXP_TYPE:
284 case JS_REG_EXP_STRING_ITERATOR_TYPE:
285 case JS_FINALIZATION_REGISTRY_TYPE:
286 #ifdef V8_INTL_SUPPORT
287 case JS_V8_BREAK_ITERATOR_TYPE:
288 case JS_COLLATOR_TYPE:
289 case JS_DATE_TIME_FORMAT_TYPE:
290 case JS_DISPLAY_NAMES_TYPE:
291 case JS_LIST_FORMAT_TYPE:
292 case JS_LOCALE_TYPE:
293 case JS_NUMBER_FORMAT_TYPE:
294 case JS_PLURAL_RULES_TYPE:
295 case JS_RELATIVE_TIME_FORMAT_TYPE:
296 case JS_SEGMENT_ITERATOR_TYPE:
297 case JS_SEGMENTER_TYPE:
298 case JS_SEGMENTS_TYPE:
299 #endif // V8_INTL_SUPPORT
300 case WASM_EXCEPTION_OBJECT_TYPE:
301 case WASM_GLOBAL_OBJECT_TYPE:
302 case WASM_MEMORY_OBJECT_TYPE:
303 case WASM_MODULE_OBJECT_TYPE:
304 case WASM_TABLE_OBJECT_TYPE:
305 case JS_BOUND_FUNCTION_TYPE: {
306 const bool has_raw_data_fields =
307 (FLAG_unbox_double_fields && !map.HasFastPointerLayout()) ||
308 (COMPRESS_POINTERS_BOOL && JSObject::GetEmbedderFieldCount(map) > 0);
309 return has_raw_data_fields ? kVisitJSObject : kVisitJSObjectFast;
310 }
311 case JS_API_OBJECT_TYPE:
312 case JS_GLOBAL_PROXY_TYPE:
313 case JS_GLOBAL_OBJECT_TYPE:
314 case JS_SPECIAL_API_OBJECT_TYPE:
315 return kVisitJSApiObject;
316
317 case JS_WEAK_REF_TYPE:
318 return kVisitJSWeakRef;
319
320 case WEAK_CELL_TYPE:
321 return kVisitWeakCell;
322
323 case FILLER_TYPE:
324 case FOREIGN_TYPE:
325 case HEAP_NUMBER_TYPE:
326 return kVisitDataObject;
327
328 case BIGINT_TYPE:
329 return kVisitBigInt;
330
331 case ALLOCATION_SITE_TYPE:
332 return kVisitAllocationSite;
333
334 #define MAKE_STRUCT_CASE(TYPE, Name, name) case TYPE:
335 STRUCT_LIST(MAKE_STRUCT_CASE)
336 #undef MAKE_STRUCT_CASE
337 if (instance_type == PROTOTYPE_INFO_TYPE) {
338 return kVisitPrototypeInfo;
339 }
340 if (instance_type == WASM_CAPI_FUNCTION_DATA_TYPE) {
341 return kVisitWasmCapiFunctionData;
342 }
343 if (instance_type == WASM_INDIRECT_FUNCTION_TABLE_TYPE) {
344 return kVisitWasmIndirectFunctionTable;
345 }
346 return kVisitStruct;
347
348 case LOAD_HANDLER_TYPE:
349 case STORE_HANDLER_TYPE:
350 return kVisitDataHandler;
351
352 case SOURCE_TEXT_MODULE_TYPE:
353 return kVisitSourceTextModule;
354 case SYNTHETIC_MODULE_TYPE:
355 return kVisitSyntheticModule;
356
357 case WASM_ARRAY_TYPE:
358 return kVisitWasmArray;
359 case WASM_STRUCT_TYPE:
360 return kVisitWasmStruct;
361 case WASM_TYPE_INFO_TYPE:
362 return kVisitWasmTypeInfo;
363
364 #define MAKE_TQ_CASE(TYPE, Name) \
365 case TYPE: \
366 return kVisit##Name;
367 TORQUE_INSTANCE_TYPE_TO_BODY_DESCRIPTOR_LIST(MAKE_TQ_CASE)
368 #undef MAKE_TQ_CASE
369
370 default:
371 UNREACHABLE();
372 }
373 }
374
PrintGeneralization(Isolate * isolate,FILE * file,const char * reason,InternalIndex modify_index,int split,int descriptors,bool descriptor_to_field,Representation old_representation,Representation new_representation,PropertyConstness old_constness,PropertyConstness new_constness,MaybeHandle<FieldType> old_field_type,MaybeHandle<Object> old_value,MaybeHandle<FieldType> new_field_type,MaybeHandle<Object> new_value)375 void Map::PrintGeneralization(
376 Isolate* isolate, FILE* file, const char* reason,
377 InternalIndex modify_index, int split, int descriptors,
378 bool descriptor_to_field, Representation old_representation,
379 Representation new_representation, PropertyConstness old_constness,
380 PropertyConstness new_constness, MaybeHandle<FieldType> old_field_type,
381 MaybeHandle<Object> old_value, MaybeHandle<FieldType> new_field_type,
382 MaybeHandle<Object> new_value) {
383 OFStream os(file);
384 os << "[generalizing]";
385 Name name = instance_descriptors(kRelaxedLoad).GetKey(modify_index);
386 if (name.IsString()) {
387 String::cast(name).PrintOn(file);
388 } else {
389 os << "{symbol " << reinterpret_cast<void*>(name.ptr()) << "}";
390 }
391 os << ":";
392 if (descriptor_to_field) {
393 os << "c";
394 } else {
395 os << old_representation.Mnemonic() << "{";
396 if (old_field_type.is_null()) {
397 os << Brief(*(old_value.ToHandleChecked()));
398 } else {
399 old_field_type.ToHandleChecked()->PrintTo(os);
400 }
401 os << ";" << old_constness << "}";
402 }
403 os << "->" << new_representation.Mnemonic() << "{";
404 if (new_field_type.is_null()) {
405 os << Brief(*(new_value.ToHandleChecked()));
406 } else {
407 new_field_type.ToHandleChecked()->PrintTo(os);
408 }
409 os << ";" << new_constness << "} (";
410 if (strlen(reason) > 0) {
411 os << reason;
412 } else {
413 os << "+" << (descriptors - split) << " maps";
414 }
415 os << ") [";
416 JavaScriptFrame::PrintTop(isolate, file, false, true);
417 os << "]\n";
418 }
419
420 // static
WrapFieldType(Isolate * isolate,Handle<FieldType> type)421 MaybeObjectHandle Map::WrapFieldType(Isolate* isolate, Handle<FieldType> type) {
422 if (type->IsClass()) {
423 return MaybeObjectHandle::Weak(type->AsClass(), isolate);
424 }
425 return MaybeObjectHandle(type);
426 }
427
428 // static
UnwrapFieldType(MaybeObject wrapped_type)429 FieldType Map::UnwrapFieldType(MaybeObject wrapped_type) {
430 if (wrapped_type->IsCleared()) {
431 return FieldType::None();
432 }
433 HeapObject heap_object;
434 if (wrapped_type->GetHeapObjectIfWeak(&heap_object)) {
435 return FieldType::cast(heap_object);
436 }
437 return wrapped_type->cast<FieldType>();
438 }
439
CopyWithField(Isolate * isolate,Handle<Map> map,Handle<Name> name,Handle<FieldType> type,PropertyAttributes attributes,PropertyConstness constness,Representation representation,TransitionFlag flag)440 MaybeHandle<Map> Map::CopyWithField(Isolate* isolate, Handle<Map> map,
441 Handle<Name> name, Handle<FieldType> type,
442 PropertyAttributes attributes,
443 PropertyConstness constness,
444 Representation representation,
445 TransitionFlag flag) {
446 DCHECK(map->instance_descriptors(kRelaxedLoad)
447 .Search(*name, map->NumberOfOwnDescriptors())
448 .is_not_found());
449
450 // Ensure the descriptor array does not get too big.
451 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
452 return MaybeHandle<Map>();
453 }
454
455 // Compute the new index for new field.
456 int index = map->NextFreePropertyIndex();
457
458 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
459 constness = PropertyConstness::kMutable;
460 representation = Representation::Tagged();
461 type = FieldType::Any(isolate);
462 } else {
463 Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
464 isolate, map->instance_type(), &representation, &type);
465 }
466
467 MaybeObjectHandle wrapped_type = WrapFieldType(isolate, type);
468
469 Descriptor d = Descriptor::DataField(name, index, attributes, constness,
470 representation, wrapped_type);
471 Handle<Map> new_map = Map::CopyAddDescriptor(isolate, map, &d, flag);
472 new_map->AccountAddedPropertyField();
473 return new_map;
474 }
475
CopyWithConstant(Isolate * isolate,Handle<Map> map,Handle<Name> name,Handle<Object> constant,PropertyAttributes attributes,TransitionFlag flag)476 MaybeHandle<Map> Map::CopyWithConstant(Isolate* isolate, Handle<Map> map,
477 Handle<Name> name,
478 Handle<Object> constant,
479 PropertyAttributes attributes,
480 TransitionFlag flag) {
481 // Ensure the descriptor array does not get too big.
482 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
483 return MaybeHandle<Map>();
484 }
485
486 Representation representation = constant->OptimalRepresentation(isolate);
487 Handle<FieldType> type = constant->OptimalType(isolate, representation);
488 return CopyWithField(isolate, map, name, type, attributes,
489 PropertyConstness::kConst, representation, flag);
490 }
491
TransitionRemovesTaggedField(Map target) const492 bool Map::TransitionRemovesTaggedField(Map target) const {
493 int inobject = NumberOfFields();
494 int target_inobject = target.NumberOfFields();
495 for (int i = target_inobject; i < inobject; i++) {
496 FieldIndex index = FieldIndex::ForPropertyIndex(*this, i);
497 if (!IsUnboxedDoubleField(index)) return true;
498 }
499 return false;
500 }
501
TransitionChangesTaggedFieldToUntaggedField(Map target) const502 bool Map::TransitionChangesTaggedFieldToUntaggedField(Map target) const {
503 int inobject = NumberOfFields();
504 int target_inobject = target.NumberOfFields();
505 int limit = std::min(inobject, target_inobject);
506 for (int i = 0; i < limit; i++) {
507 FieldIndex index = FieldIndex::ForPropertyIndex(target, i);
508 if (!IsUnboxedDoubleField(index) && target.IsUnboxedDoubleField(index)) {
509 return true;
510 }
511 }
512 return false;
513 }
514
TransitionRequiresSynchronizationWithGC(Map target) const515 bool Map::TransitionRequiresSynchronizationWithGC(Map target) const {
516 return TransitionRemovesTaggedField(target) ||
517 TransitionChangesTaggedFieldToUntaggedField(target);
518 }
519
InstancesNeedRewriting(Map target) const520 bool Map::InstancesNeedRewriting(Map target) const {
521 int target_number_of_fields = target.NumberOfFields();
522 int target_inobject = target.GetInObjectProperties();
523 int target_unused = target.UnusedPropertyFields();
524 int old_number_of_fields;
525
526 return InstancesNeedRewriting(target, target_number_of_fields,
527 target_inobject, target_unused,
528 &old_number_of_fields);
529 }
530
InstancesNeedRewriting(Map target,int target_number_of_fields,int target_inobject,int target_unused,int * old_number_of_fields) const531 bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
532 int target_inobject, int target_unused,
533 int* old_number_of_fields) const {
534 // If fields were added (or removed), rewrite the instance.
535 *old_number_of_fields = NumberOfFields();
536 DCHECK(target_number_of_fields >= *old_number_of_fields);
537 if (target_number_of_fields != *old_number_of_fields) return true;
538
539 // If smi descriptors were replaced by double descriptors, rewrite.
540 DescriptorArray old_desc = instance_descriptors(kRelaxedLoad);
541 DescriptorArray new_desc = target.instance_descriptors(kRelaxedLoad);
542 for (InternalIndex i : IterateOwnDescriptors()) {
543 if (new_desc.GetDetails(i).representation().IsDouble() !=
544 old_desc.GetDetails(i).representation().IsDouble()) {
545 return true;
546 }
547 }
548
549 // If no fields were added, and no inobject properties were removed, setting
550 // the map is sufficient.
551 if (target_inobject == GetInObjectProperties()) return false;
552 // In-object slack tracking may have reduced the object size of the new map.
553 // In that case, succeed if all existing fields were inobject, and they still
554 // fit within the new inobject size.
555 DCHECK(target_inobject < GetInObjectProperties());
556 if (target_number_of_fields <= target_inobject) {
557 DCHECK(target_number_of_fields + target_unused == target_inobject);
558 return false;
559 }
560 // Otherwise, properties will need to be moved to the backing store.
561 return true;
562 }
563
NumberOfFields() const564 int Map::NumberOfFields() const {
565 DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
566 int result = 0;
567 for (InternalIndex i : IterateOwnDescriptors()) {
568 if (descriptors.GetDetails(i).location() == kField) result++;
569 }
570 return result;
571 }
572
GetFieldCounts() const573 Map::FieldCounts Map::GetFieldCounts() const {
574 DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
575 int mutable_count = 0;
576 int const_count = 0;
577 for (InternalIndex i : IterateOwnDescriptors()) {
578 PropertyDetails details = descriptors.GetDetails(i);
579 if (details.location() == kField) {
580 switch (details.constness()) {
581 case PropertyConstness::kMutable:
582 mutable_count++;
583 break;
584 case PropertyConstness::kConst:
585 const_count++;
586 break;
587 }
588 }
589 }
590 return FieldCounts(mutable_count, const_count);
591 }
592
HasOutOfObjectProperties() const593 bool Map::HasOutOfObjectProperties() const {
594 return GetInObjectProperties() < NumberOfFields();
595 }
596
DeprecateTransitionTree(Isolate * isolate)597 void Map::DeprecateTransitionTree(Isolate* isolate) {
598 if (is_deprecated()) return;
599 DisallowHeapAllocation no_gc;
600 TransitionsAccessor transitions(isolate, *this, &no_gc);
601 int num_transitions = transitions.NumberOfTransitions();
602 for (int i = 0; i < num_transitions; ++i) {
603 transitions.GetTarget(i).DeprecateTransitionTree(isolate);
604 }
605 DCHECK(!constructor_or_backpointer().IsFunctionTemplateInfo());
606 DCHECK(CanBeDeprecated());
607 set_is_deprecated(true);
608 if (FLAG_trace_maps) {
609 LOG(isolate, MapEvent("Deprecate", handle(*this, isolate), Handle<Map>()));
610 }
611 dependent_code().DeoptimizeDependentCodeGroup(
612 DependentCode::kTransitionGroup);
613 NotifyLeafMapLayoutChange(isolate);
614 }
615
616 // Installs |new_descriptors| over the current instance_descriptors to ensure
617 // proper sharing of descriptor arrays.
ReplaceDescriptors(Isolate * isolate,DescriptorArray new_descriptors,LayoutDescriptor new_layout_descriptor)618 void Map::ReplaceDescriptors(Isolate* isolate, DescriptorArray new_descriptors,
619 LayoutDescriptor new_layout_descriptor) {
620 // Don't overwrite the empty descriptor array or initial map's descriptors.
621 if (NumberOfOwnDescriptors() == 0 ||
622 GetBackPointer(isolate).IsUndefined(isolate)) {
623 return;
624 }
625
626 DescriptorArray to_replace = instance_descriptors(kRelaxedLoad);
627 // Replace descriptors by new_descriptors in all maps that share it. The old
628 // descriptors will not be trimmed in the mark-compactor, we need to mark
629 // all its elements.
630 Map current = *this;
631 #ifndef V8_DISABLE_WRITE_BARRIERS
632 WriteBarrier::Marking(to_replace, to_replace.number_of_descriptors());
633 #endif
634 while (current.instance_descriptors(isolate, kRelaxedLoad) == to_replace) {
635 Object next = current.GetBackPointer(isolate);
636 if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
637 current.SetEnumLength(kInvalidEnumCacheSentinel);
638 current.UpdateDescriptors(isolate, new_descriptors, new_layout_descriptor,
639 current.NumberOfOwnDescriptors());
640 current = Map::cast(next);
641 }
642 set_owns_descriptors(false);
643 }
644
FindRootMap(Isolate * isolate) const645 Map Map::FindRootMap(Isolate* isolate) const {
646 Map result = *this;
647 while (true) {
648 Object back = result.GetBackPointer(isolate);
649 if (back.IsUndefined(isolate)) {
650 // Initial map must not contain descriptors in the descriptors array
651 // that do not belong to the map.
652 DCHECK_LE(
653 result.NumberOfOwnDescriptors(),
654 result.instance_descriptors(kRelaxedLoad).number_of_descriptors());
655 return result;
656 }
657 result = Map::cast(back);
658 }
659 }
660
FindFieldOwner(Isolate * isolate,InternalIndex descriptor) const661 Map Map::FindFieldOwner(Isolate* isolate, InternalIndex descriptor) const {
662 DisallowHeapAllocation no_allocation;
663 DCHECK_EQ(kField, instance_descriptors(isolate, kRelaxedLoad)
664 .GetDetails(descriptor)
665 .location());
666 Map result = *this;
667 while (true) {
668 Object back = result.GetBackPointer(isolate);
669 if (back.IsUndefined(isolate)) break;
670 const Map parent = Map::cast(back);
671 if (parent.NumberOfOwnDescriptors() <= descriptor.as_int()) break;
672 result = parent;
673 }
674 return result;
675 }
676
UpdateFieldType(Isolate * isolate,InternalIndex descriptor,Handle<Name> name,PropertyConstness new_constness,Representation new_representation,const MaybeObjectHandle & new_wrapped_type)677 void Map::UpdateFieldType(Isolate* isolate, InternalIndex descriptor,
678 Handle<Name> name, PropertyConstness new_constness,
679 Representation new_representation,
680 const MaybeObjectHandle& new_wrapped_type) {
681 DCHECK(new_wrapped_type->IsSmi() || new_wrapped_type->IsWeak());
682 // We store raw pointers in the queue, so no allocations are allowed.
683 DisallowHeapAllocation no_allocation;
684 PropertyDetails details =
685 instance_descriptors(kRelaxedLoad).GetDetails(descriptor);
686 if (details.location() != kField) return;
687 DCHECK_EQ(kData, details.kind());
688
689 if (new_constness != details.constness() && is_prototype_map()) {
690 JSObject::InvalidatePrototypeChains(*this);
691 }
692
693 Zone zone(isolate->allocator(), ZONE_NAME);
694 ZoneQueue<Map> backlog(&zone);
695 backlog.push(*this);
696
697 while (!backlog.empty()) {
698 Map current = backlog.front();
699 backlog.pop();
700
701 TransitionsAccessor transitions(isolate, current, &no_allocation);
702 int num_transitions = transitions.NumberOfTransitions();
703 for (int i = 0; i < num_transitions; ++i) {
704 Map target = transitions.GetTarget(i);
705 backlog.push(target);
706 }
707 DescriptorArray descriptors = current.instance_descriptors(kRelaxedLoad);
708 PropertyDetails details = descriptors.GetDetails(descriptor);
709
710 // It is allowed to change representation here only from None
711 // to something or from Smi or HeapObject to Tagged.
712 DCHECK(details.representation().Equals(new_representation) ||
713 details.representation().CanBeInPlaceChangedTo(new_representation));
714
715 // Skip if already updated the shared descriptor.
716 if (new_constness != details.constness() ||
717 !new_representation.Equals(details.representation()) ||
718 descriptors.GetFieldType(descriptor) != *new_wrapped_type.object()) {
719 Descriptor d = Descriptor::DataField(
720 name, descriptors.GetFieldIndex(descriptor), details.attributes(),
721 new_constness, new_representation, new_wrapped_type);
722 descriptors.Replace(descriptor, &d);
723 }
724 }
725 }
726
FieldTypeIsCleared(Representation rep,FieldType type)727 bool FieldTypeIsCleared(Representation rep, FieldType type) {
728 return type.IsNone() && rep.IsHeapObject();
729 }
730
731 // static
GeneralizeFieldType(Representation rep1,Handle<FieldType> type1,Representation rep2,Handle<FieldType> type2,Isolate * isolate)732 Handle<FieldType> Map::GeneralizeFieldType(Representation rep1,
733 Handle<FieldType> type1,
734 Representation rep2,
735 Handle<FieldType> type2,
736 Isolate* isolate) {
737 // Cleared field types need special treatment. They represent lost knowledge,
738 // so we must be conservative, so their generalization with any other type
739 // is "Any".
740 if (FieldTypeIsCleared(rep1, *type1) || FieldTypeIsCleared(rep2, *type2)) {
741 return FieldType::Any(isolate);
742 }
743 if (type1->NowIs(type2)) return type2;
744 if (type2->NowIs(type1)) return type1;
745 return FieldType::Any(isolate);
746 }
747
748 // static
GeneralizeField(Isolate * isolate,Handle<Map> map,InternalIndex modify_index,PropertyConstness new_constness,Representation new_representation,Handle<FieldType> new_field_type)749 void Map::GeneralizeField(Isolate* isolate, Handle<Map> map,
750 InternalIndex modify_index,
751 PropertyConstness new_constness,
752 Representation new_representation,
753 Handle<FieldType> new_field_type) {
754 // Check if we actually need to generalize the field type at all.
755 Handle<DescriptorArray> old_descriptors(
756 map->instance_descriptors(kRelaxedLoad), isolate);
757 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
758 PropertyConstness old_constness = old_details.constness();
759 Representation old_representation = old_details.representation();
760 Handle<FieldType> old_field_type(old_descriptors->GetFieldType(modify_index),
761 isolate);
762
763 // Return if the current map is general enough to hold requested constness and
764 // representation/field type.
765 if (IsGeneralizableTo(new_constness, old_constness) &&
766 old_representation.Equals(new_representation) &&
767 !FieldTypeIsCleared(new_representation, *new_field_type) &&
768 // Checking old_field_type for being cleared is not necessary because
769 // the NowIs check below would fail anyway in that case.
770 new_field_type->NowIs(old_field_type)) {
771 DCHECK(GeneralizeFieldType(old_representation, old_field_type,
772 new_representation, new_field_type, isolate)
773 ->NowIs(old_field_type));
774 return;
775 }
776
777 // Determine the field owner.
778 Handle<Map> field_owner(map->FindFieldOwner(isolate, modify_index), isolate);
779 Handle<DescriptorArray> descriptors(
780 field_owner->instance_descriptors(kRelaxedLoad), isolate);
781 DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
782
783 new_field_type =
784 Map::GeneralizeFieldType(old_representation, old_field_type,
785 new_representation, new_field_type, isolate);
786
787 new_constness = GeneralizeConstness(old_constness, new_constness);
788
789 PropertyDetails details = descriptors->GetDetails(modify_index);
790 Handle<Name> name(descriptors->GetKey(modify_index), isolate);
791
792 MaybeObjectHandle wrapped_type(WrapFieldType(isolate, new_field_type));
793 field_owner->UpdateFieldType(isolate, modify_index, name, new_constness,
794 new_representation, wrapped_type);
795
796 if (new_constness != old_constness) {
797 field_owner->dependent_code().DeoptimizeDependentCodeGroup(
798 DependentCode::kFieldConstGroup);
799 }
800
801 if (!new_field_type->Equals(*old_field_type)) {
802 field_owner->dependent_code().DeoptimizeDependentCodeGroup(
803 DependentCode::kFieldTypeGroup);
804 }
805
806 if (!new_representation.Equals(old_representation)) {
807 field_owner->dependent_code().DeoptimizeDependentCodeGroup(
808 DependentCode::kFieldRepresentationGroup);
809 }
810
811 if (FLAG_trace_generalization) {
812 map->PrintGeneralization(
813 isolate, stdout, "field type generalization", modify_index,
814 map->NumberOfOwnDescriptors(), map->NumberOfOwnDescriptors(), false,
815 details.representation(),
816 descriptors->GetDetails(modify_index).representation(), old_constness,
817 new_constness, old_field_type, MaybeHandle<Object>(), new_field_type,
818 MaybeHandle<Object>());
819 }
820 }
821
822 // TODO(ishell): remove.
823 // static
ReconfigureProperty(Isolate * isolate,Handle<Map> map,InternalIndex modify_index,PropertyKind new_kind,PropertyAttributes new_attributes,Representation new_representation,Handle<FieldType> new_field_type)824 Handle<Map> Map::ReconfigureProperty(Isolate* isolate, Handle<Map> map,
825 InternalIndex modify_index,
826 PropertyKind new_kind,
827 PropertyAttributes new_attributes,
828 Representation new_representation,
829 Handle<FieldType> new_field_type) {
830 DCHECK_EQ(kData, new_kind); // Only kData case is supported.
831 MapUpdater mu(isolate, map);
832 return mu.ReconfigureToDataField(modify_index, new_attributes,
833 PropertyConstness::kConst,
834 new_representation, new_field_type);
835 }
836
837 // TODO(ishell): remove.
838 // static
ReconfigureElementsKind(Isolate * isolate,Handle<Map> map,ElementsKind new_elements_kind)839 Handle<Map> Map::ReconfigureElementsKind(Isolate* isolate, Handle<Map> map,
840 ElementsKind new_elements_kind) {
841 MapUpdater mu(isolate, map);
842 return mu.ReconfigureElementsKind(new_elements_kind);
843 }
844
845 namespace {
846
SearchMigrationTarget(Isolate * isolate,Map old_map)847 Map SearchMigrationTarget(Isolate* isolate, Map old_map) {
848 DisallowHeapAllocation no_allocation;
849 DisallowDeoptimization no_deoptimization(isolate);
850
851 Map target = old_map;
852 do {
853 target = TransitionsAccessor(isolate, target, &no_allocation)
854 .GetMigrationTarget();
855 } while (!target.is_null() && target.is_deprecated());
856 if (target.is_null()) return Map();
857
858 // TODO(ishell): if this validation ever become a bottleneck consider adding a
859 // bit to the Map telling whether it contains fields whose field types may be
860 // cleared.
861 // TODO(ishell): revisit handling of cleared field types in
862 // TryReplayPropertyTransitions() and consider checking the target map's field
863 // types instead of old_map's types.
864 // Go to slow map updating if the old_map has fast properties with cleared
865 // field types.
866 DescriptorArray old_descriptors = old_map.instance_descriptors(kRelaxedLoad);
867 for (InternalIndex i : old_map.IterateOwnDescriptors()) {
868 PropertyDetails old_details = old_descriptors.GetDetails(i);
869 if (old_details.location() == kField && old_details.kind() == kData) {
870 FieldType old_type = old_descriptors.GetFieldType(i);
871 if (FieldTypeIsCleared(old_details.representation(), old_type)) {
872 return Map();
873 }
874 }
875 }
876
877 SLOW_DCHECK(Map::TryUpdateSlow(isolate, old_map) == target);
878 return target;
879 }
880 } // namespace
881
882 // TODO(ishell): Move TryUpdate() and friends to MapUpdater
883 // static
TryUpdate(Isolate * isolate,Handle<Map> old_map)884 MaybeHandle<Map> Map::TryUpdate(Isolate* isolate, Handle<Map> old_map) {
885 DisallowHeapAllocation no_allocation;
886 DisallowDeoptimization no_deoptimization(isolate);
887
888 if (!old_map->is_deprecated()) return old_map;
889
890 if (FLAG_fast_map_update) {
891 Map target_map = SearchMigrationTarget(isolate, *old_map);
892 if (!target_map.is_null()) {
893 return handle(target_map, isolate);
894 }
895 }
896
897 Map new_map = TryUpdateSlow(isolate, *old_map);
898 if (new_map.is_null()) return MaybeHandle<Map>();
899 if (FLAG_fast_map_update) {
900 TransitionsAccessor(isolate, *old_map, &no_allocation)
901 .SetMigrationTarget(new_map);
902 }
903 return handle(new_map, isolate);
904 }
905
906 namespace {
907
908 struct IntegrityLevelTransitionInfo {
IntegrityLevelTransitionInfov8::internal::__anone8c70dfb0211::IntegrityLevelTransitionInfo909 explicit IntegrityLevelTransitionInfo(Map map)
910 : integrity_level_source_map(map) {}
911
912 bool has_integrity_level_transition = false;
913 PropertyAttributes integrity_level = NONE;
914 Map integrity_level_source_map;
915 Symbol integrity_level_symbol;
916 };
917
DetectIntegrityLevelTransitions(Map map,Isolate * isolate,DisallowHeapAllocation * no_allocation)918 IntegrityLevelTransitionInfo DetectIntegrityLevelTransitions(
919 Map map, Isolate* isolate, DisallowHeapAllocation* no_allocation) {
920 IntegrityLevelTransitionInfo info(map);
921
922 // Figure out the most restrictive integrity level transition (it should
923 // be the last one in the transition tree).
924 DCHECK(!map.is_extensible());
925 Map previous = Map::cast(map.GetBackPointer(isolate));
926 TransitionsAccessor last_transitions(isolate, previous, no_allocation);
927 if (!last_transitions.HasIntegrityLevelTransitionTo(
928 map, &(info.integrity_level_symbol), &(info.integrity_level))) {
929 // The last transition was not integrity level transition - just bail out.
930 // This can happen in the following cases:
931 // - there are private symbol transitions following the integrity level
932 // transitions (see crbug.com/v8/8854).
933 // - there is a getter added in addition to an existing setter (or a setter
934 // in addition to an existing getter).
935 return info;
936 }
937
938 Map source_map = previous;
939 // Now walk up the back pointer chain and skip all integrity level
940 // transitions. If we encounter any non-integrity level transition interleaved
941 // with integrity level transitions, just bail out.
942 while (!source_map.is_extensible()) {
943 previous = Map::cast(source_map.GetBackPointer(isolate));
944 TransitionsAccessor transitions(isolate, previous, no_allocation);
945 if (!transitions.HasIntegrityLevelTransitionTo(source_map)) {
946 return info;
947 }
948 source_map = previous;
949 }
950
951 // Integrity-level transitions never change number of descriptors.
952 CHECK_EQ(map.NumberOfOwnDescriptors(), source_map.NumberOfOwnDescriptors());
953
954 info.has_integrity_level_transition = true;
955 info.integrity_level_source_map = source_map;
956 return info;
957 }
958
959 } // namespace
960
TryUpdateSlow(Isolate * isolate,Map old_map)961 Map Map::TryUpdateSlow(Isolate* isolate, Map old_map) {
962 DisallowHeapAllocation no_allocation;
963 DisallowDeoptimization no_deoptimization(isolate);
964
965 // Check the state of the root map.
966 Map root_map = old_map.FindRootMap(isolate);
967 if (root_map.is_deprecated()) {
968 JSFunction constructor = JSFunction::cast(root_map.GetConstructor());
969 DCHECK(constructor.has_initial_map());
970 DCHECK(constructor.initial_map().is_dictionary_map());
971 if (constructor.initial_map().elements_kind() != old_map.elements_kind()) {
972 return Map();
973 }
974 return constructor.initial_map();
975 }
976 if (!old_map.EquivalentToForTransition(root_map)) return Map();
977
978 ElementsKind from_kind = root_map.elements_kind();
979 ElementsKind to_kind = old_map.elements_kind();
980
981 IntegrityLevelTransitionInfo info(old_map);
982 if (root_map.is_extensible() != old_map.is_extensible()) {
983 DCHECK(!old_map.is_extensible());
984 DCHECK(root_map.is_extensible());
985 info = DetectIntegrityLevelTransitions(old_map, isolate, &no_allocation);
986 // Bail out if there were some private symbol transitions mixed up
987 // with the integrity level transitions.
988 if (!info.has_integrity_level_transition) return Map();
989 // Make sure to replay the original elements kind transitions, before
990 // the integrity level transition sets the elements to dictionary mode.
991 DCHECK(to_kind == DICTIONARY_ELEMENTS ||
992 to_kind == SLOW_STRING_WRAPPER_ELEMENTS ||
993 IsTypedArrayElementsKind(to_kind) ||
994 IsAnyHoleyNonextensibleElementsKind(to_kind));
995 to_kind = info.integrity_level_source_map.elements_kind();
996 }
997 if (from_kind != to_kind) {
998 // Try to follow existing elements kind transitions.
999 root_map = root_map.LookupElementsTransitionMap(isolate, to_kind);
1000 if (root_map.is_null()) return Map();
1001 // From here on, use the map with correct elements kind as root map.
1002 }
1003
1004 // Replay the transitions as they were before the integrity level transition.
1005 Map result = root_map.TryReplayPropertyTransitions(
1006 isolate, info.integrity_level_source_map);
1007 if (result.is_null()) return Map();
1008
1009 if (info.has_integrity_level_transition) {
1010 // Now replay the integrity level transition.
1011 result = TransitionsAccessor(isolate, result, &no_allocation)
1012 .SearchSpecial(info.integrity_level_symbol);
1013 }
1014
1015 DCHECK_IMPLIES(!result.is_null(),
1016 old_map.elements_kind() == result.elements_kind());
1017 DCHECK_IMPLIES(!result.is_null(),
1018 old_map.instance_type() == result.instance_type());
1019 return result;
1020 }
1021
TryReplayPropertyTransitions(Isolate * isolate,Map old_map)1022 Map Map::TryReplayPropertyTransitions(Isolate* isolate, Map old_map) {
1023 DisallowHeapAllocation no_allocation;
1024 DisallowDeoptimization no_deoptimization(isolate);
1025
1026 int root_nof = NumberOfOwnDescriptors();
1027
1028 int old_nof = old_map.NumberOfOwnDescriptors();
1029 DescriptorArray old_descriptors = old_map.instance_descriptors(kRelaxedLoad);
1030
1031 Map new_map = *this;
1032 for (InternalIndex i : InternalIndex::Range(root_nof, old_nof)) {
1033 PropertyDetails old_details = old_descriptors.GetDetails(i);
1034 Map transition =
1035 TransitionsAccessor(isolate, new_map, &no_allocation)
1036 .SearchTransition(old_descriptors.GetKey(i), old_details.kind(),
1037 old_details.attributes());
1038 if (transition.is_null()) return Map();
1039 new_map = transition;
1040 DescriptorArray new_descriptors =
1041 new_map.instance_descriptors(kRelaxedLoad);
1042
1043 PropertyDetails new_details = new_descriptors.GetDetails(i);
1044 DCHECK_EQ(old_details.kind(), new_details.kind());
1045 DCHECK_EQ(old_details.attributes(), new_details.attributes());
1046 if (!IsGeneralizableTo(old_details.constness(), new_details.constness())) {
1047 return Map();
1048 }
1049 DCHECK(IsGeneralizableTo(old_details.location(), new_details.location()));
1050 if (!old_details.representation().fits_into(new_details.representation())) {
1051 return Map();
1052 }
1053 if (new_details.location() == kField) {
1054 if (new_details.kind() == kData) {
1055 FieldType new_type = new_descriptors.GetFieldType(i);
1056 // Cleared field types need special treatment. They represent lost
1057 // knowledge, so we must first generalize the new_type to "Any".
1058 if (FieldTypeIsCleared(new_details.representation(), new_type)) {
1059 return Map();
1060 }
1061 DCHECK_EQ(kData, old_details.kind());
1062 DCHECK_EQ(kField, old_details.location());
1063 FieldType old_type = old_descriptors.GetFieldType(i);
1064 if (FieldTypeIsCleared(old_details.representation(), old_type) ||
1065 !old_type.NowIs(new_type)) {
1066 return Map();
1067 }
1068 } else {
1069 DCHECK_EQ(kAccessor, new_details.kind());
1070 #ifdef DEBUG
1071 FieldType new_type = new_descriptors.GetFieldType(i);
1072 DCHECK(new_type.IsAny());
1073 #endif
1074 UNREACHABLE();
1075 }
1076 } else {
1077 DCHECK_EQ(kDescriptor, new_details.location());
1078 if (old_details.location() == kField ||
1079 old_descriptors.GetStrongValue(i) !=
1080 new_descriptors.GetStrongValue(i)) {
1081 return Map();
1082 }
1083 }
1084 }
1085 if (new_map.NumberOfOwnDescriptors() != old_nof) return Map();
1086 return new_map;
1087 }
1088
1089 // static
Update(Isolate * isolate,Handle<Map> map)1090 Handle<Map> Map::Update(Isolate* isolate, Handle<Map> map) {
1091 if (!map->is_deprecated()) return map;
1092 if (FLAG_fast_map_update) {
1093 Map target_map = SearchMigrationTarget(isolate, *map);
1094 if (!target_map.is_null()) {
1095 return handle(target_map, isolate);
1096 }
1097 }
1098 MapUpdater mu(isolate, map);
1099 return mu.Update();
1100 }
1101
EnsureDescriptorSlack(Isolate * isolate,Handle<Map> map,int slack)1102 void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
1103 // Only supports adding slack to owned descriptors.
1104 DCHECK(map->owns_descriptors());
1105
1106 Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
1107 isolate);
1108 int old_size = map->NumberOfOwnDescriptors();
1109 if (slack <= descriptors->number_of_slack_descriptors()) return;
1110
1111 Handle<DescriptorArray> new_descriptors =
1112 DescriptorArray::CopyUpTo(isolate, descriptors, old_size, slack);
1113
1114 DisallowHeapAllocation no_allocation;
1115 // The descriptors are still the same, so keep the layout descriptor.
1116 LayoutDescriptor layout_descriptor = map->GetLayoutDescriptor();
1117
1118 if (old_size == 0) {
1119 map->UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
1120 map->NumberOfOwnDescriptors());
1121 return;
1122 }
1123
1124 // If the source descriptors had an enum cache we copy it. This ensures
1125 // that the maps to which we push the new descriptor array back can rely
1126 // on a cache always being available once it is set. If the map has more
1127 // enumerated descriptors than available in the original cache, the cache
1128 // will be lazily replaced by the extended cache when needed.
1129 new_descriptors->CopyEnumCacheFrom(*descriptors);
1130
1131 // Replace descriptors by new_descriptors in all maps that share it. The old
1132 // descriptors will not be trimmed in the mark-compactor, we need to mark
1133 // all its elements.
1134 #ifndef V8_DISABLE_WRITE_BARRIERS
1135 WriteBarrier::Marking(*descriptors, descriptors->number_of_descriptors());
1136 #endif
1137
1138 Map current = *map;
1139 while (current.instance_descriptors(kRelaxedLoad) == *descriptors) {
1140 Object next = current.GetBackPointer();
1141 if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
1142 current.UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
1143 current.NumberOfOwnDescriptors());
1144 current = Map::cast(next);
1145 }
1146 map->UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
1147 map->NumberOfOwnDescriptors());
1148 }
1149
1150 // static
GetObjectCreateMap(Isolate * isolate,Handle<HeapObject> prototype)1151 Handle<Map> Map::GetObjectCreateMap(Isolate* isolate,
1152 Handle<HeapObject> prototype) {
1153 Handle<Map> map(isolate->native_context()->object_function().initial_map(),
1154 isolate);
1155 if (map->prototype() == *prototype) return map;
1156 if (prototype->IsNull(isolate)) {
1157 return isolate->slow_object_with_null_prototype_map();
1158 }
1159 if (prototype->IsJSObject()) {
1160 Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
1161 if (!js_prototype->map().is_prototype_map()) {
1162 JSObject::OptimizeAsPrototype(js_prototype);
1163 }
1164 Handle<PrototypeInfo> info =
1165 Map::GetOrCreatePrototypeInfo(js_prototype, isolate);
1166 // TODO(verwaest): Use inobject slack tracking for this map.
1167 if (info->HasObjectCreateMap()) {
1168 map = handle(info->ObjectCreateMap(), isolate);
1169 } else {
1170 map = Map::CopyInitialMap(isolate, map);
1171 Map::SetPrototype(isolate, map, prototype);
1172 PrototypeInfo::SetObjectCreateMap(info, map);
1173 }
1174 return map;
1175 }
1176
1177 return Map::TransitionToPrototype(isolate, map, prototype);
1178 }
1179
1180 // static
TryGetObjectCreateMap(Isolate * isolate,Handle<HeapObject> prototype)1181 MaybeHandle<Map> Map::TryGetObjectCreateMap(Isolate* isolate,
1182 Handle<HeapObject> prototype) {
1183 Handle<Map> map(isolate->native_context()->object_function().initial_map(),
1184 isolate);
1185 if (map->prototype() == *prototype) return map;
1186 if (prototype->IsNull(isolate)) {
1187 return isolate->slow_object_with_null_prototype_map();
1188 }
1189 if (!prototype->IsJSObject()) return MaybeHandle<Map>();
1190 Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
1191 if (!js_prototype->map().is_prototype_map()) return MaybeHandle<Map>();
1192 Handle<PrototypeInfo> info =
1193 Map::GetOrCreatePrototypeInfo(js_prototype, isolate);
1194 if (!info->HasObjectCreateMap()) return MaybeHandle<Map>();
1195 return handle(info->ObjectCreateMap(), isolate);
1196 }
1197
ContainsMap(MapHandles const & maps,Map map)1198 static bool ContainsMap(MapHandles const& maps, Map map) {
1199 DCHECK(!map.is_null());
1200 for (Handle<Map> current : maps) {
1201 if (!current.is_null() && *current == map) return true;
1202 }
1203 return false;
1204 }
1205
HasElementsKind(MapHandles const & maps,ElementsKind elements_kind)1206 static bool HasElementsKind(MapHandles const& maps,
1207 ElementsKind elements_kind) {
1208 for (Handle<Map> current : maps) {
1209 if (!current.is_null() && current->elements_kind() == elements_kind)
1210 return true;
1211 }
1212 return false;
1213 }
1214
FindElementsKindTransitionedMap(Isolate * isolate,MapHandles const & candidates)1215 Map Map::FindElementsKindTransitionedMap(Isolate* isolate,
1216 MapHandles const& candidates) {
1217 DisallowHeapAllocation no_allocation;
1218 DisallowDeoptimization no_deoptimization(isolate);
1219
1220 if (IsDetached(isolate)) return Map();
1221
1222 ElementsKind kind = elements_kind();
1223 bool packed = IsFastPackedElementsKind(kind);
1224
1225 Map transition;
1226 if (IsTransitionableFastElementsKind(kind)) {
1227 // Check the state of the root map.
1228 Map root_map = FindRootMap(isolate);
1229 if (!EquivalentToForElementsKindTransition(root_map)) return Map();
1230 root_map = root_map.LookupElementsTransitionMap(isolate, kind);
1231 DCHECK(!root_map.is_null());
1232 // Starting from the next existing elements kind transition try to
1233 // replay the property transitions that does not involve instance rewriting
1234 // (ElementsTransitionAndStoreStub does not support that).
1235 for (root_map = root_map.ElementsTransitionMap(isolate);
1236 !root_map.is_null() && root_map.has_fast_elements();
1237 root_map = root_map.ElementsTransitionMap(isolate)) {
1238 // If root_map's elements kind doesn't match any of the elements kind in
1239 // the candidates there is no need to do any additional work.
1240 if (!HasElementsKind(candidates, root_map.elements_kind())) continue;
1241 Map current = root_map.TryReplayPropertyTransitions(isolate, *this);
1242 if (current.is_null()) continue;
1243 if (InstancesNeedRewriting(current)) continue;
1244
1245 if (ContainsMap(candidates, current) &&
1246 (packed || !IsFastPackedElementsKind(current.elements_kind()))) {
1247 transition = current;
1248 packed = packed && IsFastPackedElementsKind(current.elements_kind());
1249 }
1250 }
1251 }
1252 return transition;
1253 }
1254
FindClosestElementsTransition(Isolate * isolate,Map map,ElementsKind to_kind)1255 static Map FindClosestElementsTransition(Isolate* isolate, Map map,
1256 ElementsKind to_kind) {
1257 // Ensure we are requested to search elements kind transition "near the root".
1258 DCHECK_EQ(map.FindRootMap(isolate).NumberOfOwnDescriptors(),
1259 map.NumberOfOwnDescriptors());
1260 Map current_map = map;
1261
1262 ElementsKind kind = map.elements_kind();
1263 while (kind != to_kind) {
1264 Map next_map = current_map.ElementsTransitionMap(isolate);
1265 if (next_map.is_null()) return current_map;
1266 kind = next_map.elements_kind();
1267 current_map = next_map;
1268 }
1269
1270 DCHECK_EQ(to_kind, current_map.elements_kind());
1271 return current_map;
1272 }
1273
LookupElementsTransitionMap(Isolate * isolate,ElementsKind to_kind)1274 Map Map::LookupElementsTransitionMap(Isolate* isolate, ElementsKind to_kind) {
1275 Map to_map = FindClosestElementsTransition(isolate, *this, to_kind);
1276 if (to_map.elements_kind() == to_kind) return to_map;
1277 return Map();
1278 }
1279
IsMapInArrayPrototypeChain(Isolate * isolate) const1280 bool Map::IsMapInArrayPrototypeChain(Isolate* isolate) const {
1281 if (isolate->initial_array_prototype()->map() == *this) {
1282 return true;
1283 }
1284
1285 if (isolate->initial_object_prototype()->map() == *this) {
1286 return true;
1287 }
1288
1289 return false;
1290 }
1291
TransitionElementsTo(Isolate * isolate,Handle<Map> map,ElementsKind to_kind)1292 Handle<Map> Map::TransitionElementsTo(Isolate* isolate, Handle<Map> map,
1293 ElementsKind to_kind) {
1294 ElementsKind from_kind = map->elements_kind();
1295 if (from_kind == to_kind) return map;
1296
1297 Context native_context = isolate->context().native_context();
1298 if (from_kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
1299 if (*map == native_context.fast_aliased_arguments_map()) {
1300 DCHECK_EQ(SLOW_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
1301 return handle(native_context.slow_aliased_arguments_map(), isolate);
1302 }
1303 } else if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
1304 if (*map == native_context.slow_aliased_arguments_map()) {
1305 DCHECK_EQ(FAST_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
1306 return handle(native_context.fast_aliased_arguments_map(), isolate);
1307 }
1308 } else if (IsFastElementsKind(from_kind) && IsFastElementsKind(to_kind)) {
1309 // Reuse map transitions for JSArrays.
1310 DisallowHeapAllocation no_gc;
1311 if (native_context.GetInitialJSArrayMap(from_kind) == *map) {
1312 Object maybe_transitioned_map =
1313 native_context.get(Context::ArrayMapIndex(to_kind));
1314 if (maybe_transitioned_map.IsMap()) {
1315 return handle(Map::cast(maybe_transitioned_map), isolate);
1316 }
1317 }
1318 }
1319
1320 DCHECK(!map->IsUndefined(isolate));
1321 // Check if we can go back in the elements kind transition chain.
1322 if (IsHoleyElementsKind(from_kind) &&
1323 to_kind == GetPackedElementsKind(from_kind) &&
1324 map->GetBackPointer().IsMap() &&
1325 Map::cast(map->GetBackPointer()).elements_kind() == to_kind) {
1326 return handle(Map::cast(map->GetBackPointer()), isolate);
1327 }
1328
1329 bool allow_store_transition = IsTransitionElementsKind(from_kind);
1330 // Only store fast element maps in ascending generality.
1331 if (IsFastElementsKind(to_kind)) {
1332 allow_store_transition =
1333 allow_store_transition && IsTransitionableFastElementsKind(from_kind) &&
1334 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
1335 }
1336
1337 if (!allow_store_transition) {
1338 return Map::CopyAsElementsKind(isolate, map, to_kind, OMIT_TRANSITION);
1339 }
1340
1341 return Map::ReconfigureElementsKind(isolate, map, to_kind);
1342 }
1343
AddMissingElementsTransitions(Isolate * isolate,Handle<Map> map,ElementsKind to_kind)1344 static Handle<Map> AddMissingElementsTransitions(Isolate* isolate,
1345 Handle<Map> map,
1346 ElementsKind to_kind) {
1347 DCHECK(IsTransitionElementsKind(map->elements_kind()));
1348
1349 Handle<Map> current_map = map;
1350
1351 ElementsKind kind = map->elements_kind();
1352 TransitionFlag flag;
1353 if (map->IsDetached(isolate)) {
1354 flag = OMIT_TRANSITION;
1355 } else {
1356 flag = INSERT_TRANSITION;
1357 if (IsFastElementsKind(kind)) {
1358 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
1359 kind = GetNextTransitionElementsKind(kind);
1360 current_map = Map::CopyAsElementsKind(isolate, current_map, kind, flag);
1361 }
1362 }
1363 }
1364
1365 // In case we are exiting the fast elements kind system, just add the map in
1366 // the end.
1367 if (kind != to_kind) {
1368 current_map = Map::CopyAsElementsKind(isolate, current_map, to_kind, flag);
1369 }
1370
1371 DCHECK(current_map->elements_kind() == to_kind);
1372 return current_map;
1373 }
1374
1375 // static
AsElementsKind(Isolate * isolate,Handle<Map> map,ElementsKind kind)1376 Handle<Map> Map::AsElementsKind(Isolate* isolate, Handle<Map> map,
1377 ElementsKind kind) {
1378 Handle<Map> closest_map(FindClosestElementsTransition(isolate, *map, kind),
1379 isolate);
1380
1381 if (closest_map->elements_kind() == kind) {
1382 return closest_map;
1383 }
1384
1385 return AddMissingElementsTransitions(isolate, closest_map, kind);
1386 }
1387
NumberOfEnumerableProperties() const1388 int Map::NumberOfEnumerableProperties() const {
1389 int result = 0;
1390 DescriptorArray descs = instance_descriptors(kRelaxedLoad);
1391 for (InternalIndex i : IterateOwnDescriptors()) {
1392 if ((descs.GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
1393 !descs.GetKey(i).FilterKey(ENUMERABLE_STRINGS)) {
1394 result++;
1395 }
1396 }
1397 return result;
1398 }
1399
NextFreePropertyIndex() const1400 int Map::NextFreePropertyIndex() const {
1401 int number_of_own_descriptors = NumberOfOwnDescriptors();
1402 DescriptorArray descs = instance_descriptors(kRelaxedLoad);
1403 // Search properties backwards to find the last field.
1404 for (int i = number_of_own_descriptors - 1; i >= 0; --i) {
1405 PropertyDetails details = descs.GetDetails(InternalIndex(i));
1406 if (details.location() == kField) {
1407 return details.field_index() + details.field_width_in_words();
1408 }
1409 }
1410 return 0;
1411 }
1412
OnlyHasSimpleProperties() const1413 bool Map::OnlyHasSimpleProperties() const {
1414 // Wrapped string elements aren't explicitly stored in the elements backing
1415 // store, but are loaded indirectly from the underlying string.
1416 return !IsStringWrapperElementsKind(elements_kind()) &&
1417 !IsSpecialReceiverMap() && !is_dictionary_map();
1418 }
1419
MayHaveReadOnlyElementsInPrototypeChain(Isolate * isolate)1420 bool Map::MayHaveReadOnlyElementsInPrototypeChain(Isolate* isolate) {
1421 for (PrototypeIterator iter(isolate, *this); !iter.IsAtEnd();
1422 iter.Advance()) {
1423 // Be conservative, don't look into any JSReceivers that may have custom
1424 // elements. For example, into JSProxies, String wrappers (which have have
1425 // non-configurable, non-writable elements), API objects, etc.
1426 if (iter.GetCurrent().map().IsCustomElementsReceiverMap()) return true;
1427
1428 JSObject current = iter.GetCurrent<JSObject>();
1429 ElementsKind elements_kind = current.GetElementsKind(isolate);
1430 if (IsFrozenElementsKind(elements_kind)) return true;
1431
1432 if (IsDictionaryElementsKind(elements_kind) &&
1433 current.element_dictionary(isolate).requires_slow_elements()) {
1434 return true;
1435 }
1436
1437 if (IsSlowArgumentsElementsKind(elements_kind)) {
1438 SloppyArgumentsElements elements =
1439 SloppyArgumentsElements::cast(current.elements(isolate));
1440 Object arguments = elements.arguments();
1441 if (NumberDictionary::cast(arguments).requires_slow_elements()) {
1442 return true;
1443 }
1444 }
1445 }
1446
1447 return false;
1448 }
1449
RawCopy(Isolate * isolate,Handle<Map> map,int instance_size,int inobject_properties)1450 Handle<Map> Map::RawCopy(Isolate* isolate, Handle<Map> map, int instance_size,
1451 int inobject_properties) {
1452 Handle<Map> result = isolate->factory()->NewMap(
1453 map->instance_type(), instance_size, TERMINAL_FAST_ELEMENTS_KIND,
1454 inobject_properties);
1455 Handle<HeapObject> prototype(map->prototype(), isolate);
1456 Map::SetPrototype(isolate, result, prototype);
1457 result->set_constructor_or_backpointer(map->GetConstructor());
1458 result->set_relaxed_bit_field(map->bit_field());
1459 result->set_bit_field2(map->bit_field2());
1460 int new_bit_field3 = map->bit_field3();
1461 new_bit_field3 = Bits3::OwnsDescriptorsBit::update(new_bit_field3, true);
1462 new_bit_field3 = Bits3::NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
1463 new_bit_field3 =
1464 Bits3::EnumLengthBits::update(new_bit_field3, kInvalidEnumCacheSentinel);
1465 new_bit_field3 = Bits3::IsDeprecatedBit::update(new_bit_field3, false);
1466 new_bit_field3 = Bits3::IsInRetainedMapListBit::update(new_bit_field3, false);
1467 if (!map->is_dictionary_map()) {
1468 new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, false);
1469 }
1470 result->set_bit_field3(new_bit_field3);
1471 result->clear_padding();
1472 return result;
1473 }
1474
Normalize(Isolate * isolate,Handle<Map> fast_map,ElementsKind new_elements_kind,PropertyNormalizationMode mode,const char * reason)1475 Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
1476 ElementsKind new_elements_kind,
1477 PropertyNormalizationMode mode, const char* reason) {
1478 DCHECK(!fast_map->is_dictionary_map());
1479
1480 Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(),
1481 isolate);
1482 bool use_cache =
1483 !fast_map->is_prototype_map() && !maybe_cache->IsUndefined(isolate);
1484 Handle<NormalizedMapCache> cache;
1485 if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache);
1486
1487 Handle<Map> new_map;
1488 if (use_cache &&
1489 cache->Get(fast_map, new_elements_kind, mode).ToHandle(&new_map)) {
1490 #ifdef VERIFY_HEAP
1491 if (FLAG_verify_heap) new_map->DictionaryMapVerify(isolate);
1492 #endif
1493 #ifdef ENABLE_SLOW_DCHECKS
1494 if (FLAG_enable_slow_asserts) {
1495 // The cached map should match newly created normalized map bit-by-bit,
1496 // except for the code cache, which can contain some ICs which can be
1497 // applied to the shared map, dependent code and weak cell cache.
1498 Handle<Map> fresh = Map::CopyNormalized(isolate, fast_map, mode);
1499 fresh->set_elements_kind(new_elements_kind);
1500
1501 STATIC_ASSERT(Map::kPrototypeValidityCellOffset ==
1502 Map::kDependentCodeOffset + kTaggedSize);
1503 DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
1504 reinterpret_cast<void*>(new_map->address()),
1505 Map::kBitField3Offset));
1506 // The IsInRetainedMapListBit might be different if the {new_map}
1507 // that we got from the {cache} was already embedded into optimized
1508 // code somewhere.
1509 // The IsMigrationTargetBit might be different if the {new_map} from
1510 // {cache} has already been marked as a migration target.
1511 constexpr int ignored_bit_field3_bits =
1512 Bits3::IsInRetainedMapListBit::kMask |
1513 Bits3::IsMigrationTargetBit::kMask;
1514 DCHECK_EQ(fresh->bit_field3() & ~ignored_bit_field3_bits,
1515 new_map->bit_field3() & ~ignored_bit_field3_bits);
1516 int offset = Map::kBitField3Offset + kInt32Size;
1517 DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
1518 reinterpret_cast<void*>(new_map->address() + offset),
1519 Map::kDependentCodeOffset - offset));
1520 offset = Map::kPrototypeValidityCellOffset + kTaggedSize;
1521 if (new_map->is_prototype_map()) {
1522 // For prototype maps, the PrototypeInfo is not copied.
1523 STATIC_ASSERT(Map::kTransitionsOrPrototypeInfoOffset ==
1524 Map::kPrototypeValidityCellOffset + kTaggedSize);
1525 offset = kTransitionsOrPrototypeInfoOffset + kTaggedSize;
1526 DCHECK_EQ(fresh->raw_transitions(),
1527 MaybeObject::FromObject(Smi::zero()));
1528 }
1529 DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
1530 reinterpret_cast<void*>(new_map->address() + offset),
1531 Map::kSize - offset));
1532 }
1533 #endif
1534 } else {
1535 new_map = Map::CopyNormalized(isolate, fast_map, mode);
1536 new_map->set_elements_kind(new_elements_kind);
1537 if (use_cache) {
1538 cache->Set(fast_map, new_map);
1539 isolate->counters()->maps_normalized()->Increment();
1540 }
1541 }
1542 if (FLAG_trace_maps) {
1543 LOG(isolate, MapEvent("Normalize", fast_map, new_map, reason));
1544 }
1545 fast_map->NotifyLeafMapLayoutChange(isolate);
1546 return new_map;
1547 }
1548
CopyNormalized(Isolate * isolate,Handle<Map> map,PropertyNormalizationMode mode)1549 Handle<Map> Map::CopyNormalized(Isolate* isolate, Handle<Map> map,
1550 PropertyNormalizationMode mode) {
1551 int new_instance_size = map->instance_size();
1552 if (mode == CLEAR_INOBJECT_PROPERTIES) {
1553 new_instance_size -= map->GetInObjectProperties() * kTaggedSize;
1554 }
1555
1556 Handle<Map> result = RawCopy(
1557 isolate, map, new_instance_size,
1558 mode == CLEAR_INOBJECT_PROPERTIES ? 0 : map->GetInObjectProperties());
1559 // Clear the unused_property_fields explicitly as this field should not
1560 // be accessed for normalized maps.
1561 result->SetInObjectUnusedPropertyFields(0);
1562 result->set_is_dictionary_map(true);
1563 result->set_is_migration_target(false);
1564 result->set_may_have_interesting_symbols(true);
1565 result->set_construction_counter(kNoSlackTracking);
1566
1567 #ifdef VERIFY_HEAP
1568 if (FLAG_verify_heap) result->DictionaryMapVerify(isolate);
1569 #endif
1570
1571 return result;
1572 }
1573
1574 // Return an immutable prototype exotic object version of the input map.
1575 // Never even try to cache it in the transition tree, as it is intended
1576 // for the global object and its prototype chain, and excluding it saves
1577 // memory on the map transition tree.
1578
1579 // static
TransitionToImmutableProto(Isolate * isolate,Handle<Map> map)1580 Handle<Map> Map::TransitionToImmutableProto(Isolate* isolate, Handle<Map> map) {
1581 Handle<Map> new_map = Map::Copy(isolate, map, "ImmutablePrototype");
1582 new_map->set_is_immutable_proto(true);
1583 return new_map;
1584 }
1585
1586 namespace {
EnsureInitialMap(Isolate * isolate,Handle<Map> map)1587 void EnsureInitialMap(Isolate* isolate, Handle<Map> map) {
1588 #ifdef DEBUG
1589 Object maybe_constructor = map->GetConstructor();
1590 DCHECK((maybe_constructor.IsJSFunction() &&
1591 *map == JSFunction::cast(maybe_constructor).initial_map()) ||
1592 // Below are the exceptions to the check above.
1593 // Strict function maps have Function as a constructor but the
1594 // Function's initial map is a sloppy function map.
1595 *map == *isolate->strict_function_map() ||
1596 *map == *isolate->strict_function_with_name_map() ||
1597 // Same holds for GeneratorFunction and its initial map.
1598 *map == *isolate->generator_function_map() ||
1599 *map == *isolate->generator_function_with_name_map() ||
1600 *map == *isolate->generator_function_with_home_object_map() ||
1601 *map == *isolate->generator_function_with_name_and_home_object_map() ||
1602 // AsyncFunction has Null as a constructor.
1603 *map == *isolate->async_function_map() ||
1604 *map == *isolate->async_function_with_name_map() ||
1605 *map == *isolate->async_function_with_home_object_map() ||
1606 *map == *isolate->async_function_with_name_and_home_object_map());
1607 #endif
1608 // Initial maps must not contain descriptors in the descriptors array
1609 // that do not belong to the map.
1610 DCHECK_EQ(map->NumberOfOwnDescriptors(),
1611 map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
1612 }
1613 } // namespace
1614
1615 // static
CopyInitialMapNormalized(Isolate * isolate,Handle<Map> map,PropertyNormalizationMode mode)1616 Handle<Map> Map::CopyInitialMapNormalized(Isolate* isolate, Handle<Map> map,
1617 PropertyNormalizationMode mode) {
1618 EnsureInitialMap(isolate, map);
1619 return CopyNormalized(isolate, map, mode);
1620 }
1621
1622 // static
CopyInitialMap(Isolate * isolate,Handle<Map> map,int instance_size,int inobject_properties,int unused_property_fields)1623 Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map,
1624 int instance_size, int inobject_properties,
1625 int unused_property_fields) {
1626 EnsureInitialMap(isolate, map);
1627
1628 Handle<Map> result =
1629 RawCopy(isolate, map, instance_size, inobject_properties);
1630
1631 // Please note instance_type and instance_size are set when allocated.
1632 result->SetInObjectUnusedPropertyFields(unused_property_fields);
1633
1634 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1635 if (number_of_own_descriptors > 0) {
1636 // The copy will use the same descriptors array without ownership.
1637 DescriptorArray descriptors = map->instance_descriptors(kRelaxedLoad);
1638 result->set_owns_descriptors(false);
1639 result->UpdateDescriptors(isolate, descriptors, map->GetLayoutDescriptor(),
1640 number_of_own_descriptors);
1641
1642 DCHECK_EQ(result->NumberOfFields(),
1643 result->GetInObjectProperties() - result->UnusedPropertyFields());
1644 }
1645
1646 return result;
1647 }
1648
CopyDropDescriptors(Isolate * isolate,Handle<Map> map)1649 Handle<Map> Map::CopyDropDescriptors(Isolate* isolate, Handle<Map> map) {
1650 Handle<Map> result =
1651 RawCopy(isolate, map, map->instance_size(),
1652 map->IsJSObjectMap() ? map->GetInObjectProperties() : 0);
1653
1654 // Please note instance_type and instance_size are set when allocated.
1655 if (map->IsJSObjectMap()) {
1656 result->CopyUnusedPropertyFields(*map);
1657 }
1658 map->NotifyLeafMapLayoutChange(isolate);
1659 return result;
1660 }
1661
ShareDescriptor(Isolate * isolate,Handle<Map> map,Handle<DescriptorArray> descriptors,Descriptor * descriptor)1662 Handle<Map> Map::ShareDescriptor(Isolate* isolate, Handle<Map> map,
1663 Handle<DescriptorArray> descriptors,
1664 Descriptor* descriptor) {
1665 // Sanity check. This path is only to be taken if the map owns its descriptor
1666 // array, implying that its NumberOfOwnDescriptors equals the number of
1667 // descriptors in the descriptor array.
1668 DCHECK_EQ(map->NumberOfOwnDescriptors(),
1669 map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
1670
1671 Handle<Map> result = CopyDropDescriptors(isolate, map);
1672 Handle<Name> name = descriptor->GetKey();
1673
1674 // Properly mark the {result} if the {name} is an "interesting symbol".
1675 if (name->IsInterestingSymbol()) {
1676 result->set_may_have_interesting_symbols(true);
1677 }
1678
1679 // Ensure there's space for the new descriptor in the shared descriptor array.
1680 if (descriptors->number_of_slack_descriptors() == 0) {
1681 int old_size = descriptors->number_of_descriptors();
1682 if (old_size == 0) {
1683 descriptors = DescriptorArray::Allocate(isolate, 0, 1);
1684 } else {
1685 int slack = SlackForArraySize(old_size, kMaxNumberOfDescriptors);
1686 EnsureDescriptorSlack(isolate, map, slack);
1687 descriptors = handle(map->instance_descriptors(kRelaxedLoad), isolate);
1688 }
1689 }
1690
1691 Handle<LayoutDescriptor> layout_descriptor =
1692 FLAG_unbox_double_fields
1693 ? LayoutDescriptor::ShareAppend(isolate, map,
1694 descriptor->GetDetails())
1695 : handle(LayoutDescriptor::FastPointerLayout(), isolate);
1696
1697 {
1698 DisallowHeapAllocation no_gc;
1699 descriptors->Append(descriptor);
1700 result->InitializeDescriptors(isolate, *descriptors, *layout_descriptor);
1701 }
1702
1703 DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
1704 ConnectTransition(isolate, map, result, name, SIMPLE_PROPERTY_TRANSITION);
1705
1706 return result;
1707 }
1708
ConnectTransition(Isolate * isolate,Handle<Map> parent,Handle<Map> child,Handle<Name> name,SimpleTransitionFlag flag)1709 void Map::ConnectTransition(Isolate* isolate, Handle<Map> parent,
1710 Handle<Map> child, Handle<Name> name,
1711 SimpleTransitionFlag flag) {
1712 DCHECK_IMPLIES(name->IsInterestingSymbol(),
1713 child->may_have_interesting_symbols());
1714 DCHECK_IMPLIES(parent->may_have_interesting_symbols(),
1715 child->may_have_interesting_symbols());
1716 if (!parent->GetBackPointer().IsUndefined(isolate)) {
1717 parent->set_owns_descriptors(false);
1718 } else if (!parent->IsDetached(isolate)) {
1719 // |parent| is initial map and it must not contain descriptors in the
1720 // descriptors array that do not belong to the map.
1721 DCHECK_EQ(
1722 parent->NumberOfOwnDescriptors(),
1723 parent->instance_descriptors(kRelaxedLoad).number_of_descriptors());
1724 }
1725 if (parent->IsDetached(isolate)) {
1726 DCHECK(child->IsDetached(isolate));
1727 if (FLAG_trace_maps) {
1728 LOG(isolate, MapEvent("Transition", parent, child, "prototype", name));
1729 }
1730 } else {
1731 TransitionsAccessor(isolate, parent).Insert(name, child, flag);
1732 if (FLAG_trace_maps) {
1733 LOG(isolate, MapEvent("Transition", parent, child, "", name));
1734 }
1735 }
1736 }
1737
CopyReplaceDescriptors(Isolate * isolate,Handle<Map> map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> layout_descriptor,TransitionFlag flag,MaybeHandle<Name> maybe_name,const char * reason,SimpleTransitionFlag simple_flag)1738 Handle<Map> Map::CopyReplaceDescriptors(
1739 Isolate* isolate, Handle<Map> map, Handle<DescriptorArray> descriptors,
1740 Handle<LayoutDescriptor> layout_descriptor, TransitionFlag flag,
1741 MaybeHandle<Name> maybe_name, const char* reason,
1742 SimpleTransitionFlag simple_flag) {
1743 DCHECK(descriptors->IsSortedNoDuplicates());
1744
1745 Handle<Map> result = CopyDropDescriptors(isolate, map);
1746
1747 // Properly mark the {result} if the {name} is an "interesting symbol".
1748 Handle<Name> name;
1749 if (maybe_name.ToHandle(&name) && name->IsInterestingSymbol()) {
1750 result->set_may_have_interesting_symbols(true);
1751 }
1752
1753 if (map->is_prototype_map()) {
1754 result->InitializeDescriptors(isolate, *descriptors, *layout_descriptor);
1755 } else {
1756 if (flag == INSERT_TRANSITION &&
1757 TransitionsAccessor(isolate, map).CanHaveMoreTransitions()) {
1758 result->InitializeDescriptors(isolate, *descriptors, *layout_descriptor);
1759
1760 DCHECK(!maybe_name.is_null());
1761 ConnectTransition(isolate, map, result, name, simple_flag);
1762 } else {
1763 descriptors->GeneralizeAllFields();
1764 result->InitializeDescriptors(isolate, *descriptors,
1765 LayoutDescriptor::FastPointerLayout());
1766 }
1767 }
1768 if (FLAG_trace_maps &&
1769 // Mirror conditions above that did not call ConnectTransition().
1770 (map->IsDetached(isolate) ||
1771 !(flag == INSERT_TRANSITION &&
1772 TransitionsAccessor(isolate, map).CanHaveMoreTransitions()))) {
1773 LOG(isolate, MapEvent("ReplaceDescriptors", map, result, reason,
1774 maybe_name.is_null() ? Handle<HeapObject>() : name));
1775 }
1776 return result;
1777 }
1778
1779 // Creates transition tree starting from |split_map| and adding all descriptors
1780 // starting from descriptor with index |split_map|.NumberOfOwnDescriptors().
1781 // The way how it is done is tricky because of GC and special descriptors
1782 // marking logic.
AddMissingTransitions(Isolate * isolate,Handle<Map> split_map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)1783 Handle<Map> Map::AddMissingTransitions(
1784 Isolate* isolate, Handle<Map> split_map,
1785 Handle<DescriptorArray> descriptors,
1786 Handle<LayoutDescriptor> full_layout_descriptor) {
1787 DCHECK(descriptors->IsSortedNoDuplicates());
1788 int split_nof = split_map->NumberOfOwnDescriptors();
1789 int nof_descriptors = descriptors->number_of_descriptors();
1790 DCHECK_LT(split_nof, nof_descriptors);
1791
1792 // Start with creating last map which will own full descriptors array.
1793 // This is necessary to guarantee that GC will mark the whole descriptor
1794 // array if any of the allocations happening below fail.
1795 // Number of unused properties is temporarily incorrect and the layout
1796 // descriptor could unnecessarily be in slow mode but we will fix after
1797 // all the other intermediate maps are created.
1798 // Also the last map might have interesting symbols, we temporarily set
1799 // the flag and clear it right before the descriptors are installed. This
1800 // makes heap verification happy and ensures the flag ends up accurate.
1801 Handle<Map> last_map = CopyDropDescriptors(isolate, split_map);
1802 last_map->InitializeDescriptors(isolate, *descriptors,
1803 *full_layout_descriptor);
1804 last_map->SetInObjectUnusedPropertyFields(0);
1805 last_map->set_may_have_interesting_symbols(true);
1806
1807 // During creation of intermediate maps we violate descriptors sharing
1808 // invariant since the last map is not yet connected to the transition tree
1809 // we create here. But it is safe because GC never trims map's descriptors
1810 // if there are no dead transitions from that map and this is exactly the
1811 // case for all the intermediate maps we create here.
1812 Handle<Map> map = split_map;
1813 for (InternalIndex i : InternalIndex::Range(split_nof, nof_descriptors - 1)) {
1814 Handle<Map> new_map = CopyDropDescriptors(isolate, map);
1815 InstallDescriptors(isolate, map, new_map, i, descriptors,
1816 full_layout_descriptor);
1817
1818 map = new_map;
1819 }
1820 map->NotifyLeafMapLayoutChange(isolate);
1821 last_map->set_may_have_interesting_symbols(false);
1822 InstallDescriptors(isolate, map, last_map, InternalIndex(nof_descriptors - 1),
1823 descriptors, full_layout_descriptor);
1824 return last_map;
1825 }
1826
1827 // Since this method is used to rewrite an existing transition tree, it can
1828 // always insert transitions without checking.
InstallDescriptors(Isolate * isolate,Handle<Map> parent,Handle<Map> child,InternalIndex new_descriptor,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)1829 void Map::InstallDescriptors(Isolate* isolate, Handle<Map> parent,
1830 Handle<Map> child, InternalIndex new_descriptor,
1831 Handle<DescriptorArray> descriptors,
1832 Handle<LayoutDescriptor> full_layout_descriptor) {
1833 DCHECK(descriptors->IsSortedNoDuplicates());
1834
1835 child->SetInstanceDescriptors(isolate, *descriptors,
1836 new_descriptor.as_int() + 1);
1837 child->CopyUnusedPropertyFields(*parent);
1838 PropertyDetails details = descriptors->GetDetails(new_descriptor);
1839 if (details.location() == kField) {
1840 child->AccountAddedPropertyField();
1841 }
1842
1843 if (FLAG_unbox_double_fields) {
1844 Handle<LayoutDescriptor> layout_descriptor =
1845 LayoutDescriptor::AppendIfFastOrUseFull(isolate, parent, details,
1846 full_layout_descriptor);
1847 child->set_layout_descriptor(*layout_descriptor, kReleaseStore);
1848 #ifdef VERIFY_HEAP
1849 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
1850 if (FLAG_verify_heap) {
1851 CHECK(child->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*child));
1852 }
1853 #else
1854 SLOW_DCHECK(
1855 child->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*child));
1856 #endif
1857 child->set_visitor_id(Map::GetVisitorId(*child));
1858 }
1859
1860 Handle<Name> name = handle(descriptors->GetKey(new_descriptor), isolate);
1861 if (parent->may_have_interesting_symbols() || name->IsInterestingSymbol()) {
1862 child->set_may_have_interesting_symbols(true);
1863 }
1864 ConnectTransition(isolate, parent, child, name, SIMPLE_PROPERTY_TRANSITION);
1865 }
1866
CopyAsElementsKind(Isolate * isolate,Handle<Map> map,ElementsKind kind,TransitionFlag flag)1867 Handle<Map> Map::CopyAsElementsKind(Isolate* isolate, Handle<Map> map,
1868 ElementsKind kind, TransitionFlag flag) {
1869 // Only certain objects are allowed to have non-terminal fast transitional
1870 // elements kinds.
1871 DCHECK(map->IsJSObjectMap());
1872 DCHECK_IMPLIES(
1873 !map->CanHaveFastTransitionableElementsKind(),
1874 IsDictionaryElementsKind(kind) || IsTerminalElementsKind(kind));
1875
1876 Map maybe_elements_transition_map;
1877 if (flag == INSERT_TRANSITION) {
1878 // Ensure we are requested to add elements kind transition "near the root".
1879 DCHECK_EQ(map->FindRootMap(isolate).NumberOfOwnDescriptors(),
1880 map->NumberOfOwnDescriptors());
1881
1882 maybe_elements_transition_map = map->ElementsTransitionMap(isolate);
1883 DCHECK(
1884 maybe_elements_transition_map.is_null() ||
1885 (maybe_elements_transition_map.elements_kind() == DICTIONARY_ELEMENTS &&
1886 kind == DICTIONARY_ELEMENTS));
1887 DCHECK(!IsFastElementsKind(kind) ||
1888 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
1889 DCHECK(kind != map->elements_kind());
1890 }
1891
1892 bool insert_transition =
1893 flag == INSERT_TRANSITION &&
1894 TransitionsAccessor(isolate, map).CanHaveMoreTransitions() &&
1895 maybe_elements_transition_map.is_null();
1896
1897 if (insert_transition) {
1898 Handle<Map> new_map = CopyForElementsTransition(isolate, map);
1899 new_map->set_elements_kind(kind);
1900
1901 Handle<Name> name = isolate->factory()->elements_transition_symbol();
1902 ConnectTransition(isolate, map, new_map, name, SPECIAL_TRANSITION);
1903 return new_map;
1904 }
1905
1906 // Create a new free-floating map only if we are not allowed to store it.
1907 Handle<Map> new_map = Copy(isolate, map, "CopyAsElementsKind");
1908 new_map->set_elements_kind(kind);
1909 return new_map;
1910 }
1911
AsLanguageMode(Isolate * isolate,Handle<Map> initial_map,Handle<SharedFunctionInfo> shared_info)1912 Handle<Map> Map::AsLanguageMode(Isolate* isolate, Handle<Map> initial_map,
1913 Handle<SharedFunctionInfo> shared_info) {
1914 DCHECK_EQ(JS_FUNCTION_TYPE, initial_map->instance_type());
1915 // Initial map for sloppy mode function is stored in the function
1916 // constructor. Initial maps for strict mode are cached as special transitions
1917 // using |strict_function_transition_symbol| as a key.
1918 if (is_sloppy(shared_info->language_mode())) return initial_map;
1919
1920 Handle<Map> function_map(Map::cast(isolate->native_context()->get(
1921 shared_info->function_map_index())),
1922 isolate);
1923
1924 STATIC_ASSERT(LanguageModeSize == 2);
1925 DCHECK_EQ(LanguageMode::kStrict, shared_info->language_mode());
1926 Handle<Symbol> transition_symbol =
1927 isolate->factory()->strict_function_transition_symbol();
1928 Map maybe_transition = TransitionsAccessor(isolate, initial_map)
1929 .SearchSpecial(*transition_symbol);
1930 if (!maybe_transition.is_null()) {
1931 return handle(maybe_transition, isolate);
1932 }
1933 initial_map->NotifyLeafMapLayoutChange(isolate);
1934
1935 // Create new map taking descriptors from the |function_map| and all
1936 // the other details from the |initial_map|.
1937 Handle<Map> map =
1938 Map::CopyInitialMap(isolate, function_map, initial_map->instance_size(),
1939 initial_map->GetInObjectProperties(),
1940 initial_map->UnusedPropertyFields());
1941 map->SetConstructor(initial_map->GetConstructor());
1942 map->set_prototype(initial_map->prototype());
1943 map->set_construction_counter(initial_map->construction_counter());
1944
1945 if (TransitionsAccessor(isolate, initial_map).CanHaveMoreTransitions()) {
1946 Map::ConnectTransition(isolate, initial_map, map, transition_symbol,
1947 SPECIAL_TRANSITION);
1948 }
1949 return map;
1950 }
1951
CopyForElementsTransition(Isolate * isolate,Handle<Map> map)1952 Handle<Map> Map::CopyForElementsTransition(Isolate* isolate, Handle<Map> map) {
1953 DCHECK(!map->IsDetached(isolate));
1954 Handle<Map> new_map = CopyDropDescriptors(isolate, map);
1955
1956 if (map->owns_descriptors()) {
1957 // In case the map owned its own descriptors, share the descriptors and
1958 // transfer ownership to the new map.
1959 // The properties did not change, so reuse descriptors.
1960 map->set_owns_descriptors(false);
1961 new_map->InitializeDescriptors(isolate,
1962 map->instance_descriptors(kRelaxedLoad),
1963 map->GetLayoutDescriptor());
1964 } else {
1965 // In case the map did not own its own descriptors, a split is forced by
1966 // copying the map; creating a new descriptor array cell.
1967 Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
1968 isolate);
1969 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1970 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
1971 isolate, descriptors, number_of_own_descriptors);
1972 Handle<LayoutDescriptor> new_layout_descriptor(map->GetLayoutDescriptor(),
1973 isolate);
1974 new_map->InitializeDescriptors(isolate, *new_descriptors,
1975 *new_layout_descriptor);
1976 }
1977 return new_map;
1978 }
1979
Copy(Isolate * isolate,Handle<Map> map,const char * reason)1980 Handle<Map> Map::Copy(Isolate* isolate, Handle<Map> map, const char* reason) {
1981 Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
1982 isolate);
1983 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1984 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
1985 isolate, descriptors, number_of_own_descriptors);
1986 Handle<LayoutDescriptor> new_layout_descriptor(map->GetLayoutDescriptor(),
1987 isolate);
1988 return CopyReplaceDescriptors(
1989 isolate, map, new_descriptors, new_layout_descriptor, OMIT_TRANSITION,
1990 MaybeHandle<Name>(), reason, SPECIAL_TRANSITION);
1991 }
1992
Create(Isolate * isolate,int inobject_properties)1993 Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) {
1994 Handle<Map> copy =
1995 Copy(isolate, handle(isolate->object_function()->initial_map(), isolate),
1996 "MapCreate");
1997
1998 // Check that we do not overflow the instance size when adding the extra
1999 // inobject properties. If the instance size overflows, we allocate as many
2000 // properties as we can as inobject properties.
2001 if (inobject_properties > JSObject::kMaxInObjectProperties) {
2002 inobject_properties = JSObject::kMaxInObjectProperties;
2003 }
2004
2005 int new_instance_size =
2006 JSObject::kHeaderSize + kTaggedSize * inobject_properties;
2007
2008 // Adjust the map with the extra inobject properties.
2009 copy->set_instance_size(new_instance_size);
2010 copy->SetInObjectPropertiesStartInWords(JSObject::kHeaderSize / kTaggedSize);
2011 DCHECK_EQ(copy->GetInObjectProperties(), inobject_properties);
2012 copy->SetInObjectUnusedPropertyFields(inobject_properties);
2013 copy->set_visitor_id(Map::GetVisitorId(*copy));
2014 return copy;
2015 }
2016
CopyForPreventExtensions(Isolate * isolate,Handle<Map> map,PropertyAttributes attrs_to_add,Handle<Symbol> transition_marker,const char * reason,bool old_map_is_dictionary_elements_kind)2017 Handle<Map> Map::CopyForPreventExtensions(
2018 Isolate* isolate, Handle<Map> map, PropertyAttributes attrs_to_add,
2019 Handle<Symbol> transition_marker, const char* reason,
2020 bool old_map_is_dictionary_elements_kind) {
2021 int num_descriptors = map->NumberOfOwnDescriptors();
2022 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
2023 isolate, handle(map->instance_descriptors(kRelaxedLoad), isolate),
2024 num_descriptors, attrs_to_add);
2025 Handle<LayoutDescriptor> new_layout_descriptor(map->GetLayoutDescriptor(),
2026 isolate);
2027 // Do not track transitions during bootstrapping.
2028 TransitionFlag flag =
2029 isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
2030 Handle<Map> new_map = CopyReplaceDescriptors(
2031 isolate, map, new_desc, new_layout_descriptor, flag, transition_marker,
2032 reason, SPECIAL_TRANSITION);
2033 new_map->set_is_extensible(false);
2034 if (!IsTypedArrayElementsKind(map->elements_kind())) {
2035 ElementsKind new_kind = IsStringWrapperElementsKind(map->elements_kind())
2036 ? SLOW_STRING_WRAPPER_ELEMENTS
2037 : DICTIONARY_ELEMENTS;
2038 if (FLAG_enable_sealed_frozen_elements_kind &&
2039 !old_map_is_dictionary_elements_kind) {
2040 switch (map->elements_kind()) {
2041 case PACKED_ELEMENTS:
2042 if (attrs_to_add == SEALED) {
2043 new_kind = PACKED_SEALED_ELEMENTS;
2044 } else if (attrs_to_add == FROZEN) {
2045 new_kind = PACKED_FROZEN_ELEMENTS;
2046 } else {
2047 new_kind = PACKED_NONEXTENSIBLE_ELEMENTS;
2048 }
2049 break;
2050 case PACKED_NONEXTENSIBLE_ELEMENTS:
2051 if (attrs_to_add == SEALED) {
2052 new_kind = PACKED_SEALED_ELEMENTS;
2053 } else if (attrs_to_add == FROZEN) {
2054 new_kind = PACKED_FROZEN_ELEMENTS;
2055 }
2056 break;
2057 case PACKED_SEALED_ELEMENTS:
2058 if (attrs_to_add == FROZEN) {
2059 new_kind = PACKED_FROZEN_ELEMENTS;
2060 }
2061 break;
2062 case HOLEY_ELEMENTS:
2063 if (attrs_to_add == SEALED) {
2064 new_kind = HOLEY_SEALED_ELEMENTS;
2065 } else if (attrs_to_add == FROZEN) {
2066 new_kind = HOLEY_FROZEN_ELEMENTS;
2067 } else {
2068 new_kind = HOLEY_NONEXTENSIBLE_ELEMENTS;
2069 }
2070 break;
2071 case HOLEY_NONEXTENSIBLE_ELEMENTS:
2072 if (attrs_to_add == SEALED) {
2073 new_kind = HOLEY_SEALED_ELEMENTS;
2074 } else if (attrs_to_add == FROZEN) {
2075 new_kind = HOLEY_FROZEN_ELEMENTS;
2076 }
2077 break;
2078 case HOLEY_SEALED_ELEMENTS:
2079 if (attrs_to_add == FROZEN) {
2080 new_kind = HOLEY_FROZEN_ELEMENTS;
2081 }
2082 break;
2083 default:
2084 break;
2085 }
2086 }
2087 new_map->set_elements_kind(new_kind);
2088 }
2089 return new_map;
2090 }
2091
2092 namespace {
2093
CanHoldValue(DescriptorArray descriptors,InternalIndex descriptor,PropertyConstness constness,Object value)2094 bool CanHoldValue(DescriptorArray descriptors, InternalIndex descriptor,
2095 PropertyConstness constness, Object value) {
2096 PropertyDetails details = descriptors.GetDetails(descriptor);
2097 if (details.location() == kField) {
2098 if (details.kind() == kData) {
2099 return IsGeneralizableTo(constness, details.constness()) &&
2100 value.FitsRepresentation(details.representation()) &&
2101 descriptors.GetFieldType(descriptor).NowContains(value);
2102 } else {
2103 DCHECK_EQ(kAccessor, details.kind());
2104 return false;
2105 }
2106
2107 } else {
2108 DCHECK_EQ(kDescriptor, details.location());
2109 DCHECK_EQ(PropertyConstness::kConst, details.constness());
2110 DCHECK_EQ(kAccessor, details.kind());
2111 return false;
2112 }
2113 UNREACHABLE();
2114 }
2115
UpdateDescriptorForValue(Isolate * isolate,Handle<Map> map,InternalIndex descriptor,PropertyConstness constness,Handle<Object> value)2116 Handle<Map> UpdateDescriptorForValue(Isolate* isolate, Handle<Map> map,
2117 InternalIndex descriptor,
2118 PropertyConstness constness,
2119 Handle<Object> value) {
2120 if (CanHoldValue(map->instance_descriptors(kRelaxedLoad), descriptor,
2121 constness, *value)) {
2122 return map;
2123 }
2124
2125 PropertyAttributes attributes = map->instance_descriptors(kRelaxedLoad)
2126 .GetDetails(descriptor)
2127 .attributes();
2128 Representation representation = value->OptimalRepresentation(isolate);
2129 Handle<FieldType> type = value->OptimalType(isolate, representation);
2130
2131 MapUpdater mu(isolate, map);
2132 return mu.ReconfigureToDataField(descriptor, attributes, constness,
2133 representation, type);
2134 }
2135
2136 } // namespace
2137
2138 // static
PrepareForDataProperty(Isolate * isolate,Handle<Map> map,InternalIndex descriptor,PropertyConstness constness,Handle<Object> value)2139 Handle<Map> Map::PrepareForDataProperty(Isolate* isolate, Handle<Map> map,
2140 InternalIndex descriptor,
2141 PropertyConstness constness,
2142 Handle<Object> value) {
2143 // Update to the newest map before storing the property.
2144 map = Update(isolate, map);
2145 // Dictionaries can store any property value.
2146 DCHECK(!map->is_dictionary_map());
2147 return UpdateDescriptorForValue(isolate, map, descriptor, constness, value);
2148 }
2149
TransitionToDataProperty(Isolate * isolate,Handle<Map> map,Handle<Name> name,Handle<Object> value,PropertyAttributes attributes,PropertyConstness constness,StoreOrigin store_origin)2150 Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
2151 Handle<Name> name,
2152 Handle<Object> value,
2153 PropertyAttributes attributes,
2154 PropertyConstness constness,
2155 StoreOrigin store_origin) {
2156 RuntimeCallTimerScope stats_scope(
2157 isolate,
2158 map->IsDetached(isolate)
2159 ? RuntimeCallCounterId::kPrototypeMap_TransitionToDataProperty
2160 : RuntimeCallCounterId::kMap_TransitionToDataProperty);
2161
2162 DCHECK(name->IsUniqueName());
2163 DCHECK(!map->is_dictionary_map());
2164
2165 // Migrate to the newest map before storing the property.
2166 map = Update(isolate, map);
2167
2168 Map maybe_transition = TransitionsAccessor(isolate, map)
2169 .SearchTransition(*name, kData, attributes);
2170 if (!maybe_transition.is_null()) {
2171 Handle<Map> transition(maybe_transition, isolate);
2172 InternalIndex descriptor = transition->LastAdded();
2173
2174 DCHECK_EQ(attributes, transition->instance_descriptors(kRelaxedLoad)
2175 .GetDetails(descriptor)
2176 .attributes());
2177
2178 return UpdateDescriptorForValue(isolate, transition, descriptor, constness,
2179 value);
2180 }
2181
2182 // Do not track transitions during bootstrapping.
2183 TransitionFlag flag =
2184 isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
2185 MaybeHandle<Map> maybe_map;
2186 if (!map->TooManyFastProperties(store_origin)) {
2187 Representation representation = value->OptimalRepresentation(isolate);
2188 Handle<FieldType> type = value->OptimalType(isolate, representation);
2189 maybe_map = Map::CopyWithField(isolate, map, name, type, attributes,
2190 constness, representation, flag);
2191 }
2192
2193 Handle<Map> result;
2194 if (!maybe_map.ToHandle(&result)) {
2195 const char* reason = "TooManyFastProperties";
2196 #if V8_TRACE_MAPS
2197 std::unique_ptr<ScopedVector<char>> buffer;
2198 if (FLAG_trace_maps) {
2199 ScopedVector<char> name_buffer(100);
2200 name->NameShortPrint(name_buffer);
2201 buffer.reset(new ScopedVector<char>(128));
2202 SNPrintF(*buffer, "TooManyFastProperties %s", name_buffer.begin());
2203 reason = buffer->begin();
2204 }
2205 #endif
2206 Handle<Object> maybe_constructor(map->GetConstructor(), isolate);
2207 if (FLAG_feedback_normalization && map->new_target_is_base() &&
2208 maybe_constructor->IsJSFunction() &&
2209 !JSFunction::cast(*maybe_constructor).shared().native()) {
2210 Handle<JSFunction> constructor =
2211 Handle<JSFunction>::cast(maybe_constructor);
2212 DCHECK_NE(*constructor,
2213 constructor->context().native_context().object_function());
2214 Handle<Map> initial_map(constructor->initial_map(), isolate);
2215 result = Map::Normalize(isolate, initial_map, CLEAR_INOBJECT_PROPERTIES,
2216 reason);
2217 initial_map->DeprecateTransitionTree(isolate);
2218 Handle<HeapObject> prototype(result->prototype(), isolate);
2219 JSFunction::SetInitialMap(constructor, result, prototype);
2220
2221 // Deoptimize all code that embeds the previous initial map.
2222 initial_map->dependent_code().DeoptimizeDependentCodeGroup(
2223 DependentCode::kInitialMapChangedGroup);
2224 if (!result->EquivalentToForNormalization(*map,
2225 CLEAR_INOBJECT_PROPERTIES)) {
2226 result =
2227 Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, reason);
2228 }
2229 } else {
2230 result = Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, reason);
2231 }
2232 }
2233
2234 return result;
2235 }
2236
ReconfigureExistingProperty(Isolate * isolate,Handle<Map> map,InternalIndex descriptor,PropertyKind kind,PropertyAttributes attributes,PropertyConstness constness)2237 Handle<Map> Map::ReconfigureExistingProperty(Isolate* isolate, Handle<Map> map,
2238 InternalIndex descriptor,
2239 PropertyKind kind,
2240 PropertyAttributes attributes,
2241 PropertyConstness constness) {
2242 // Dictionaries have to be reconfigured in-place.
2243 DCHECK(!map->is_dictionary_map());
2244
2245 if (!map->GetBackPointer().IsMap()) {
2246 // There is no benefit from reconstructing transition tree for maps without
2247 // back pointers, normalize and try to hit the map cache instead.
2248 return Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES,
2249 "Normalize_AttributesMismatchProtoMap");
2250 }
2251
2252 if (FLAG_trace_generalization) {
2253 map->PrintReconfiguration(isolate, stdout, descriptor, kind, attributes);
2254 }
2255
2256 MapUpdater mu(isolate, map);
2257 DCHECK_EQ(kData, kind); // Only kData case is supported so far.
2258 Handle<Map> new_map = mu.ReconfigureToDataField(
2259 descriptor, attributes, constness, Representation::None(),
2260 FieldType::None(isolate));
2261 return new_map;
2262 }
2263
TransitionToAccessorProperty(Isolate * isolate,Handle<Map> map,Handle<Name> name,InternalIndex descriptor,Handle<Object> getter,Handle<Object> setter,PropertyAttributes attributes)2264 Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
2265 Handle<Name> name,
2266 InternalIndex descriptor,
2267 Handle<Object> getter,
2268 Handle<Object> setter,
2269 PropertyAttributes attributes) {
2270 RuntimeCallTimerScope stats_scope(
2271 isolate,
2272 map->IsDetached(isolate)
2273 ? RuntimeCallCounterId::kPrototypeMap_TransitionToAccessorProperty
2274 : RuntimeCallCounterId::kMap_TransitionToAccessorProperty);
2275
2276 // At least one of the accessors needs to be a new value.
2277 DCHECK(!getter->IsNull(isolate) || !setter->IsNull(isolate));
2278 DCHECK(name->IsUniqueName());
2279
2280 // Migrate to the newest map before transitioning to the new property.
2281 map = Update(isolate, map);
2282
2283 // Dictionary maps can always have additional data properties.
2284 if (map->is_dictionary_map()) return map;
2285
2286 PropertyNormalizationMode mode = map->is_prototype_map()
2287 ? KEEP_INOBJECT_PROPERTIES
2288 : CLEAR_INOBJECT_PROPERTIES;
2289
2290 Map maybe_transition = TransitionsAccessor(isolate, map)
2291 .SearchTransition(*name, kAccessor, attributes);
2292 if (!maybe_transition.is_null()) {
2293 Handle<Map> transition(maybe_transition, isolate);
2294 DescriptorArray descriptors =
2295 transition->instance_descriptors(kRelaxedLoad);
2296 InternalIndex descriptor = transition->LastAdded();
2297 DCHECK(descriptors.GetKey(descriptor).Equals(*name));
2298
2299 DCHECK_EQ(kAccessor, descriptors.GetDetails(descriptor).kind());
2300 DCHECK_EQ(attributes, descriptors.GetDetails(descriptor).attributes());
2301
2302 Handle<Object> maybe_pair(descriptors.GetStrongValue(descriptor), isolate);
2303 if (!maybe_pair->IsAccessorPair()) {
2304 return Map::Normalize(isolate, map, mode,
2305 "TransitionToAccessorFromNonPair");
2306 }
2307
2308 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair);
2309 if (!pair->Equals(*getter, *setter)) {
2310 return Map::Normalize(isolate, map, mode,
2311 "TransitionToDifferentAccessor");
2312 }
2313
2314 return transition;
2315 }
2316
2317 Handle<AccessorPair> pair;
2318 DescriptorArray old_descriptors = map->instance_descriptors(kRelaxedLoad);
2319 if (descriptor.is_found()) {
2320 if (descriptor != map->LastAdded()) {
2321 return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonLast");
2322 }
2323 PropertyDetails old_details = old_descriptors.GetDetails(descriptor);
2324 if (old_details.kind() != kAccessor) {
2325 return Map::Normalize(isolate, map, mode,
2326 "AccessorsOverwritingNonAccessors");
2327 }
2328
2329 if (old_details.attributes() != attributes) {
2330 return Map::Normalize(isolate, map, mode, "AccessorsWithAttributes");
2331 }
2332
2333 Handle<Object> maybe_pair(old_descriptors.GetStrongValue(descriptor),
2334 isolate);
2335 if (!maybe_pair->IsAccessorPair()) {
2336 return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonPair");
2337 }
2338
2339 Handle<AccessorPair> current_pair = Handle<AccessorPair>::cast(maybe_pair);
2340 if (current_pair->Equals(*getter, *setter)) return map;
2341
2342 bool overwriting_accessor = false;
2343 if (!getter->IsNull(isolate) &&
2344 !current_pair->get(ACCESSOR_GETTER).IsNull(isolate) &&
2345 current_pair->get(ACCESSOR_GETTER) != *getter) {
2346 overwriting_accessor = true;
2347 }
2348 if (!setter->IsNull(isolate) &&
2349 !current_pair->get(ACCESSOR_SETTER).IsNull(isolate) &&
2350 current_pair->get(ACCESSOR_SETTER) != *setter) {
2351 overwriting_accessor = true;
2352 }
2353 if (overwriting_accessor) {
2354 return Map::Normalize(isolate, map, mode,
2355 "AccessorsOverwritingAccessors");
2356 }
2357
2358 pair = AccessorPair::Copy(isolate, Handle<AccessorPair>::cast(maybe_pair));
2359 } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors ||
2360 map->TooManyFastProperties(StoreOrigin::kNamed)) {
2361 return Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES,
2362 "TooManyAccessors");
2363 } else {
2364 pair = isolate->factory()->NewAccessorPair();
2365 }
2366
2367 pair->SetComponents(*getter, *setter);
2368
2369 // Do not track transitions during bootstrapping.
2370 TransitionFlag flag =
2371 isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
2372 Descriptor d = Descriptor::AccessorConstant(name, pair, attributes);
2373 return Map::CopyInsertDescriptor(isolate, map, &d, flag);
2374 }
2375
CopyAddDescriptor(Isolate * isolate,Handle<Map> map,Descriptor * descriptor,TransitionFlag flag)2376 Handle<Map> Map::CopyAddDescriptor(Isolate* isolate, Handle<Map> map,
2377 Descriptor* descriptor,
2378 TransitionFlag flag) {
2379 Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
2380 isolate);
2381
2382 // Share descriptors only if map owns descriptors and it not an initial map.
2383 if (flag == INSERT_TRANSITION && map->owns_descriptors() &&
2384 !map->GetBackPointer().IsUndefined(isolate) &&
2385 TransitionsAccessor(isolate, map).CanHaveMoreTransitions()) {
2386 return ShareDescriptor(isolate, map, descriptors, descriptor);
2387 }
2388
2389 int nof = map->NumberOfOwnDescriptors();
2390 Handle<DescriptorArray> new_descriptors =
2391 DescriptorArray::CopyUpTo(isolate, descriptors, nof, 1);
2392 new_descriptors->Append(descriptor);
2393
2394 Handle<LayoutDescriptor> new_layout_descriptor =
2395 FLAG_unbox_double_fields
2396 ? LayoutDescriptor::New(isolate, map, new_descriptors, nof + 1)
2397 : handle(LayoutDescriptor::FastPointerLayout(), isolate);
2398
2399 return CopyReplaceDescriptors(
2400 isolate, map, new_descriptors, new_layout_descriptor, flag,
2401 descriptor->GetKey(), "CopyAddDescriptor", SIMPLE_PROPERTY_TRANSITION);
2402 }
2403
CopyInsertDescriptor(Isolate * isolate,Handle<Map> map,Descriptor * descriptor,TransitionFlag flag)2404 Handle<Map> Map::CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
2405 Descriptor* descriptor,
2406 TransitionFlag flag) {
2407 Handle<DescriptorArray> old_descriptors(
2408 map->instance_descriptors(kRelaxedLoad), isolate);
2409
2410 // We replace the key if it is already present.
2411 InternalIndex index =
2412 old_descriptors->SearchWithCache(isolate, *descriptor->GetKey(), *map);
2413 if (index.is_found()) {
2414 return CopyReplaceDescriptor(isolate, map, old_descriptors, descriptor,
2415 index, flag);
2416 }
2417 return CopyAddDescriptor(isolate, map, descriptor, flag);
2418 }
2419
CopyReplaceDescriptor(Isolate * isolate,Handle<Map> map,Handle<DescriptorArray> descriptors,Descriptor * descriptor,InternalIndex insertion_index,TransitionFlag flag)2420 Handle<Map> Map::CopyReplaceDescriptor(Isolate* isolate, Handle<Map> map,
2421 Handle<DescriptorArray> descriptors,
2422 Descriptor* descriptor,
2423 InternalIndex insertion_index,
2424 TransitionFlag flag) {
2425 Handle<Name> key = descriptor->GetKey();
2426 DCHECK_EQ(*key, descriptors->GetKey(insertion_index));
2427 // This function does not support replacing property fields as
2428 // that would break property field counters.
2429 DCHECK_NE(kField, descriptor->GetDetails().location());
2430 DCHECK_NE(kField, descriptors->GetDetails(insertion_index).location());
2431
2432 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
2433 isolate, descriptors, map->NumberOfOwnDescriptors());
2434
2435 new_descriptors->Replace(insertion_index, descriptor);
2436 Handle<LayoutDescriptor> new_layout_descriptor = LayoutDescriptor::New(
2437 isolate, map, new_descriptors, new_descriptors->number_of_descriptors());
2438
2439 SimpleTransitionFlag simple_flag =
2440 (insertion_index.as_int() == descriptors->number_of_descriptors() - 1)
2441 ? SIMPLE_PROPERTY_TRANSITION
2442 : PROPERTY_TRANSITION;
2443 return CopyReplaceDescriptors(isolate, map, new_descriptors,
2444 new_layout_descriptor, flag, key,
2445 "CopyReplaceDescriptor", simple_flag);
2446 }
2447
Hash()2448 int Map::Hash() {
2449 // For performance reasons we only hash the 3 most variable fields of a map:
2450 // constructor, prototype and bit_field2. For predictability reasons we
2451 // use objects' offsets in respective pages for hashing instead of raw
2452 // addresses.
2453
2454 // Shift away the tag.
2455 int hash = ObjectAddressForHashing(GetConstructor().ptr()) >> 2;
2456
2457 // XOR-ing the prototype and constructor directly yields too many zero bits
2458 // when the two pointers are close (which is fairly common).
2459 // To avoid this we shift the prototype bits relatively to the constructor.
2460 hash ^= ObjectAddressForHashing(prototype().ptr()) << (32 - kPageSizeBits);
2461
2462 return hash ^ (hash >> 16) ^ bit_field2();
2463 }
2464
2465 namespace {
2466
CheckEquivalent(const Map first,const Map second)2467 bool CheckEquivalent(const Map first, const Map second) {
2468 return first.GetConstructor() == second.GetConstructor() &&
2469 first.prototype() == second.prototype() &&
2470 first.instance_type() == second.instance_type() &&
2471 first.bit_field() == second.bit_field() &&
2472 first.is_extensible() == second.is_extensible() &&
2473 first.new_target_is_base() == second.new_target_is_base();
2474 }
2475
2476 } // namespace
2477
EquivalentToForTransition(const Map other) const2478 bool Map::EquivalentToForTransition(const Map other) const {
2479 CHECK_EQ(GetConstructor(), other.GetConstructor());
2480 CHECK_EQ(instance_type(), other.instance_type());
2481
2482 if (bit_field() != other.bit_field()) return false;
2483 if (new_target_is_base() != other.new_target_is_base()) return false;
2484 if (prototype() != other.prototype()) return false;
2485 if (instance_type() == JS_FUNCTION_TYPE) {
2486 // JSFunctions require more checks to ensure that sloppy function is
2487 // not equivalent to strict function.
2488 int nof =
2489 std::min(NumberOfOwnDescriptors(), other.NumberOfOwnDescriptors());
2490 return instance_descriptors(kRelaxedLoad)
2491 .IsEqualUpTo(other.instance_descriptors(kRelaxedLoad), nof);
2492 }
2493 return true;
2494 }
2495
EquivalentToForElementsKindTransition(const Map other) const2496 bool Map::EquivalentToForElementsKindTransition(const Map other) const {
2497 if (!EquivalentToForTransition(other)) return false;
2498 #ifdef DEBUG
2499 // Ensure that we don't try to generate elements kind transitions from maps
2500 // with fields that may be generalized in-place. This must already be handled
2501 // during addition of a new field.
2502 DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
2503 for (InternalIndex i : IterateOwnDescriptors()) {
2504 PropertyDetails details = descriptors.GetDetails(i);
2505 if (details.location() == kField) {
2506 DCHECK(IsMostGeneralFieldType(details.representation(),
2507 descriptors.GetFieldType(i)));
2508 }
2509 }
2510 #endif
2511 return true;
2512 }
2513
EquivalentToForNormalization(const Map other,ElementsKind elements_kind,PropertyNormalizationMode mode) const2514 bool Map::EquivalentToForNormalization(const Map other,
2515 ElementsKind elements_kind,
2516 PropertyNormalizationMode mode) const {
2517 int properties =
2518 mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other.GetInObjectProperties();
2519 // Make sure the elements_kind bits are in bit_field2.
2520 DCHECK_EQ(this->elements_kind(),
2521 Map::Bits2::ElementsKindBits::decode(bit_field2()));
2522 int adjusted_other_bit_field2 =
2523 Map::Bits2::ElementsKindBits::update(other.bit_field2(), elements_kind);
2524 return CheckEquivalent(*this, other) &&
2525 bit_field2() == adjusted_other_bit_field2 &&
2526 GetInObjectProperties() == properties &&
2527 JSObject::GetEmbedderFieldCount(*this) ==
2528 JSObject::GetEmbedderFieldCount(other);
2529 }
2530
GetMinInobjectSlack(Map map,void * data)2531 static void GetMinInobjectSlack(Map map, void* data) {
2532 int slack = map.UnusedPropertyFields();
2533 if (*reinterpret_cast<int*>(data) > slack) {
2534 *reinterpret_cast<int*>(data) = slack;
2535 }
2536 }
2537
ComputeMinObjectSlack(Isolate * isolate)2538 int Map::ComputeMinObjectSlack(Isolate* isolate) {
2539 DisallowHeapAllocation no_gc;
2540 // Has to be an initial map.
2541 DCHECK(GetBackPointer().IsUndefined(isolate));
2542
2543 int slack = UnusedPropertyFields();
2544 TransitionsAccessor transitions(isolate, *this, &no_gc);
2545 transitions.TraverseTransitionTree(&GetMinInobjectSlack, &slack);
2546 return slack;
2547 }
2548
ShrinkInstanceSize(Map map,void * data)2549 static void ShrinkInstanceSize(Map map, void* data) {
2550 int slack = *reinterpret_cast<int*>(data);
2551 DCHECK_GE(slack, 0);
2552 #ifdef DEBUG
2553 int old_visitor_id = Map::GetVisitorId(map);
2554 int new_unused = map.UnusedPropertyFields() - slack;
2555 #endif
2556 map.set_instance_size(map.InstanceSizeFromSlack(slack));
2557 map.set_construction_counter(Map::kNoSlackTracking);
2558 DCHECK_EQ(old_visitor_id, Map::GetVisitorId(map));
2559 DCHECK_EQ(new_unused, map.UnusedPropertyFields());
2560 }
2561
StopSlackTracking(Map map,void * data)2562 static void StopSlackTracking(Map map, void* data) {
2563 map.set_construction_counter(Map::kNoSlackTracking);
2564 }
2565
CompleteInobjectSlackTracking(Isolate * isolate)2566 void Map::CompleteInobjectSlackTracking(Isolate* isolate) {
2567 DisallowHeapAllocation no_gc;
2568 // Has to be an initial map.
2569 DCHECK(GetBackPointer().IsUndefined(isolate));
2570
2571 int slack = ComputeMinObjectSlack(isolate);
2572 TransitionsAccessor transitions(isolate, *this, &no_gc);
2573 if (slack != 0) {
2574 // Resize the initial map and all maps in its transition tree.
2575 transitions.TraverseTransitionTree(&ShrinkInstanceSize, &slack);
2576 } else {
2577 transitions.TraverseTransitionTree(&StopSlackTracking, nullptr);
2578 }
2579 }
2580
SetInstanceDescriptors(Isolate * isolate,DescriptorArray descriptors,int number_of_own_descriptors)2581 void Map::SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
2582 int number_of_own_descriptors) {
2583 set_instance_descriptors(descriptors, kReleaseStore);
2584 SetNumberOfOwnDescriptors(number_of_own_descriptors);
2585 #ifndef V8_DISABLE_WRITE_BARRIERS
2586 WriteBarrier::Marking(descriptors, number_of_own_descriptors);
2587 #endif
2588 }
2589
2590 // static
GetOrCreatePrototypeInfo(Handle<JSObject> prototype,Isolate * isolate)2591 Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<JSObject> prototype,
2592 Isolate* isolate) {
2593 Object maybe_proto_info = prototype->map().prototype_info();
2594 if (maybe_proto_info.IsPrototypeInfo()) {
2595 return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
2596 }
2597 Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
2598 prototype->map().set_prototype_info(*proto_info);
2599 return proto_info;
2600 }
2601
2602 // static
GetOrCreatePrototypeInfo(Handle<Map> prototype_map,Isolate * isolate)2603 Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<Map> prototype_map,
2604 Isolate* isolate) {
2605 Object maybe_proto_info = prototype_map->prototype_info();
2606 if (maybe_proto_info.IsPrototypeInfo()) {
2607 return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
2608 }
2609 Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
2610 prototype_map->set_prototype_info(*proto_info);
2611 return proto_info;
2612 }
2613
2614 // static
SetShouldBeFastPrototypeMap(Handle<Map> map,bool value,Isolate * isolate)2615 void Map::SetShouldBeFastPrototypeMap(Handle<Map> map, bool value,
2616 Isolate* isolate) {
2617 if (value == false && !map->prototype_info().IsPrototypeInfo()) {
2618 // "False" is the implicit default value, so there's nothing to do.
2619 return;
2620 }
2621 GetOrCreatePrototypeInfo(map, isolate)->set_should_be_fast_map(value);
2622 }
2623
2624 // static
GetOrCreatePrototypeChainValidityCell(Handle<Map> map,Isolate * isolate)2625 Handle<Object> Map::GetOrCreatePrototypeChainValidityCell(Handle<Map> map,
2626 Isolate* isolate) {
2627 Handle<Object> maybe_prototype;
2628 if (map->IsJSGlobalObjectMap()) {
2629 DCHECK(map->is_prototype_map());
2630 // Global object is prototype of a global proxy and therefore we can
2631 // use its validity cell for guarding global object's prototype change.
2632 maybe_prototype = isolate->global_object();
2633 } else {
2634 maybe_prototype =
2635 handle(map->GetPrototypeChainRootMap(isolate).prototype(), isolate);
2636 }
2637 if (!maybe_prototype->IsJSObject()) {
2638 return handle(Smi::FromInt(Map::kPrototypeChainValid), isolate);
2639 }
2640 Handle<JSObject> prototype = Handle<JSObject>::cast(maybe_prototype);
2641 // Ensure the prototype is registered with its own prototypes so its cell
2642 // will be invalidated when necessary.
2643 JSObject::LazyRegisterPrototypeUser(handle(prototype->map(), isolate),
2644 isolate);
2645
2646 Object maybe_cell = prototype->map().prototype_validity_cell();
2647 // Return existing cell if it's still valid.
2648 if (maybe_cell.IsCell()) {
2649 Handle<Cell> cell(Cell::cast(maybe_cell), isolate);
2650 if (cell->value() == Smi::FromInt(Map::kPrototypeChainValid)) {
2651 return cell;
2652 }
2653 }
2654 // Otherwise create a new cell.
2655 Handle<Cell> cell = isolate->factory()->NewCell(
2656 handle(Smi::FromInt(Map::kPrototypeChainValid), isolate));
2657 prototype->map().set_prototype_validity_cell(*cell);
2658 return cell;
2659 }
2660
2661 // static
IsPrototypeChainInvalidated(Map map)2662 bool Map::IsPrototypeChainInvalidated(Map map) {
2663 DCHECK(map.is_prototype_map());
2664 Object maybe_cell = map.prototype_validity_cell();
2665 if (maybe_cell.IsCell()) {
2666 Cell cell = Cell::cast(maybe_cell);
2667 return cell.value() != Smi::FromInt(Map::kPrototypeChainValid);
2668 }
2669 return true;
2670 }
2671
2672 // static
SetPrototype(Isolate * isolate,Handle<Map> map,Handle<HeapObject> prototype,bool enable_prototype_setup_mode)2673 void Map::SetPrototype(Isolate* isolate, Handle<Map> map,
2674 Handle<HeapObject> prototype,
2675 bool enable_prototype_setup_mode) {
2676 RuntimeCallTimerScope stats_scope(isolate,
2677 RuntimeCallCounterId::kMap_SetPrototype);
2678
2679 if (prototype->IsJSObject()) {
2680 Handle<JSObject> prototype_jsobj = Handle<JSObject>::cast(prototype);
2681 JSObject::OptimizeAsPrototype(prototype_jsobj, enable_prototype_setup_mode);
2682 } else {
2683 DCHECK(prototype->IsNull(isolate) || prototype->IsJSProxy());
2684 }
2685
2686 WriteBarrierMode wb_mode =
2687 prototype->IsNull(isolate) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
2688 map->set_prototype(*prototype, wb_mode);
2689 }
2690
StartInobjectSlackTracking()2691 void Map::StartInobjectSlackTracking() {
2692 DCHECK(!IsInobjectSlackTrackingInProgress());
2693 if (UnusedPropertyFields() == 0) return;
2694 set_construction_counter(Map::kSlackTrackingCounterStart);
2695 }
2696
TransitionToPrototype(Isolate * isolate,Handle<Map> map,Handle<HeapObject> prototype)2697 Handle<Map> Map::TransitionToPrototype(Isolate* isolate, Handle<Map> map,
2698 Handle<HeapObject> prototype) {
2699 Handle<Map> new_map =
2700 TransitionsAccessor(isolate, map).GetPrototypeTransition(prototype);
2701 if (new_map.is_null()) {
2702 new_map = Copy(isolate, map, "TransitionToPrototype");
2703 TransitionsAccessor(isolate, map)
2704 .PutPrototypeTransition(prototype, new_map);
2705 Map::SetPrototype(isolate, new_map, prototype);
2706 }
2707 return new_map;
2708 }
2709
New(Isolate * isolate)2710 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
2711 Handle<WeakFixedArray> array(
2712 isolate->factory()->NewWeakFixedArray(kEntries, AllocationType::kOld));
2713 return Handle<NormalizedMapCache>::cast(array);
2714 }
2715
Get(Handle<Map> fast_map,ElementsKind elements_kind,PropertyNormalizationMode mode)2716 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
2717 ElementsKind elements_kind,
2718 PropertyNormalizationMode mode) {
2719 DisallowHeapAllocation no_gc;
2720 MaybeObject value = WeakFixedArray::Get(GetIndex(fast_map));
2721 HeapObject heap_object;
2722 if (!value->GetHeapObjectIfWeak(&heap_object)) {
2723 return MaybeHandle<Map>();
2724 }
2725
2726 Map normalized_map = Map::cast(heap_object);
2727 if (!normalized_map.EquivalentToForNormalization(*fast_map, elements_kind,
2728 mode)) {
2729 return MaybeHandle<Map>();
2730 }
2731 return handle(normalized_map, GetIsolate());
2732 }
2733
Set(Handle<Map> fast_map,Handle<Map> normalized_map)2734 void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map) {
2735 DisallowHeapAllocation no_gc;
2736 DCHECK(normalized_map->is_dictionary_map());
2737 WeakFixedArray::Set(GetIndex(fast_map),
2738 HeapObjectReference::Weak(*normalized_map));
2739 }
2740
2741 } // namespace internal
2742 } // namespace v8
2743