• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/objects/map.h"
6 
7 #include "src/execution/frames.h"
8 #include "src/execution/isolate.h"
9 #include "src/handles/handles-inl.h"
10 #include "src/handles/maybe-handles.h"
11 #include "src/heap/heap-write-barrier-inl.h"
12 #include "src/init/bootstrapper.h"
13 #include "src/logging/log.h"
14 #include "src/logging/runtime-call-stats-scope.h"
15 #include "src/objects/arguments-inl.h"
16 #include "src/objects/descriptor-array.h"
17 #include "src/objects/elements-kind.h"
18 #include "src/objects/field-type.h"
19 #include "src/objects/js-objects.h"
20 #include "src/objects/map-updater.h"
21 #include "src/objects/maybe-object.h"
22 #include "src/objects/oddball.h"
23 #include "src/objects/property.h"
24 #include "src/objects/transitions-inl.h"
25 #include "src/roots/roots.h"
26 #include "src/utils/ostreams.h"
27 #include "src/zone/zone-containers.h"
28 
29 namespace v8 {
30 namespace internal {
31 
GetPrototypeChainRootMap(Isolate * isolate) const32 Map Map::GetPrototypeChainRootMap(Isolate* isolate) const {
33   DisallowGarbageCollection no_alloc;
34   if (IsJSReceiverMap()) {
35     return *this;
36   }
37   int constructor_function_index = GetConstructorFunctionIndex();
38   if (constructor_function_index != Map::kNoConstructorFunctionIndex) {
39     Context native_context = isolate->context().native_context();
40     JSFunction constructor_function =
41         JSFunction::cast(native_context.get(constructor_function_index));
42     return constructor_function.initial_map();
43   }
44   return ReadOnlyRoots(isolate).null_value().map();
45 }
46 
47 // static
GetConstructorFunction(Map map,Context native_context)48 base::Optional<JSFunction> Map::GetConstructorFunction(Map map,
49                                                        Context native_context) {
50   DisallowGarbageCollection no_gc;
51   if (map.IsPrimitiveMap()) {
52     int const constructor_function_index = map.GetConstructorFunctionIndex();
53     if (constructor_function_index != kNoConstructorFunctionIndex) {
54       return JSFunction::cast(native_context.get(constructor_function_index));
55     }
56   }
57   return {};
58 }
59 
GetInstanceTypeMap(ReadOnlyRoots roots,InstanceType type)60 Map Map::GetInstanceTypeMap(ReadOnlyRoots roots, InstanceType type) {
61   Map map;
62   switch (type) {
63 #define MAKE_CASE(TYPE, Name, name) \
64   case TYPE:                        \
65     map = roots.name##_map();       \
66     break;
67     STRUCT_LIST(MAKE_CASE)
68 #undef MAKE_CASE
69 #define MAKE_CASE(TYPE, Name, name) \
70   case TYPE:                        \
71     map = roots.name##_map();       \
72     break;
73     TORQUE_DEFINED_INSTANCE_TYPE_LIST(MAKE_CASE)
74 #undef MAKE_CASE
75     default:
76       UNREACHABLE();
77   }
78   return map;
79 }
80 
GetVisitorId(Map map)81 VisitorId Map::GetVisitorId(Map map) {
82   STATIC_ASSERT(kVisitorIdCount <= 256);
83 
84   const int instance_type = map.instance_type();
85 
86   if (instance_type < FIRST_NONSTRING_TYPE) {
87     switch (instance_type & kStringRepresentationMask) {
88       case kSeqStringTag:
89         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
90           return kVisitSeqOneByteString;
91         } else {
92           return kVisitSeqTwoByteString;
93         }
94 
95       case kConsStringTag:
96         if (IsShortcutCandidate(instance_type)) {
97           return kVisitShortcutCandidate;
98         } else {
99           return kVisitConsString;
100         }
101 
102       case kSlicedStringTag:
103         return kVisitSlicedString;
104 
105       case kExternalStringTag:
106         return kVisitDataObject;
107 
108       case kThinStringTag:
109         return kVisitThinString;
110     }
111     UNREACHABLE();
112   }
113 
114   if (InstanceTypeChecker::IsJSApiObject(map.instance_type())) {
115     return kVisitJSApiObject;
116   }
117 
118   switch (instance_type) {
119     case BYTE_ARRAY_TYPE:
120       return kVisitByteArray;
121 
122     case BYTECODE_ARRAY_TYPE:
123       return kVisitBytecodeArray;
124 
125     case FREE_SPACE_TYPE:
126       return kVisitFreeSpace;
127 
128     case EMBEDDER_DATA_ARRAY_TYPE:
129       return kVisitEmbedderDataArray;
130 
131     case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
132     case NAME_TO_INDEX_HASH_TABLE_TYPE:
133     case REGISTERED_SYMBOL_TABLE_TYPE:
134     case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
135     case HASH_TABLE_TYPE:
136     case ORDERED_HASH_MAP_TYPE:
137     case ORDERED_HASH_SET_TYPE:
138     case ORDERED_NAME_DICTIONARY_TYPE:
139     case NAME_DICTIONARY_TYPE:
140     case GLOBAL_DICTIONARY_TYPE:
141     case NUMBER_DICTIONARY_TYPE:
142     case SIMPLE_NUMBER_DICTIONARY_TYPE:
143     case SCRIPT_CONTEXT_TABLE_TYPE:
144       return kVisitFixedArray;
145 
146     case AWAIT_CONTEXT_TYPE:
147     case BLOCK_CONTEXT_TYPE:
148     case CATCH_CONTEXT_TYPE:
149     case DEBUG_EVALUATE_CONTEXT_TYPE:
150     case EVAL_CONTEXT_TYPE:
151     case FUNCTION_CONTEXT_TYPE:
152     case MODULE_CONTEXT_TYPE:
153     case SCRIPT_CONTEXT_TYPE:
154     case WITH_CONTEXT_TYPE:
155       return kVisitContext;
156 
157     case NATIVE_CONTEXT_TYPE:
158       return kVisitNativeContext;
159 
160     case EPHEMERON_HASH_TABLE_TYPE:
161       return kVisitEphemeronHashTable;
162 
163     case FIXED_DOUBLE_ARRAY_TYPE:
164       return kVisitFixedDoubleArray;
165 
166     case PROPERTY_ARRAY_TYPE:
167       return kVisitPropertyArray;
168 
169     case FEEDBACK_CELL_TYPE:
170       return kVisitFeedbackCell;
171 
172     case FEEDBACK_METADATA_TYPE:
173       return kVisitFeedbackMetadata;
174 
175     case ODDBALL_TYPE:
176       return kVisitOddball;
177 
178     case MAP_TYPE:
179       return kVisitMap;
180 
181     case CODE_TYPE:
182       return kVisitCode;
183 
184     case CELL_TYPE:
185       return kVisitCell;
186 
187     case PROPERTY_CELL_TYPE:
188       return kVisitPropertyCell;
189 
190     case TRANSITION_ARRAY_TYPE:
191       return kVisitTransitionArray;
192 
193     case JS_WEAK_MAP_TYPE:
194     case JS_WEAK_SET_TYPE:
195       return kVisitJSWeakCollection;
196 
197     case CALL_HANDLER_INFO_TYPE:
198       return kVisitStruct;
199 
200     case JS_PROXY_TYPE:
201       return kVisitStruct;
202 
203     case SYMBOL_TYPE:
204       return kVisitSymbol;
205 
206     case JS_ARRAY_BUFFER_TYPE:
207       return kVisitJSArrayBuffer;
208 
209     case JS_DATA_VIEW_TYPE:
210       return kVisitJSDataView;
211 
212     case JS_EXTERNAL_OBJECT_TYPE:
213       return kVisitJSExternalObject;
214 
215     case JS_FUNCTION_TYPE:
216     case JS_CLASS_CONSTRUCTOR_TYPE:
217     case JS_PROMISE_CONSTRUCTOR_TYPE:
218     case JS_REG_EXP_CONSTRUCTOR_TYPE:
219     case JS_ARRAY_CONSTRUCTOR_TYPE:
220 #define TYPED_ARRAY_CONSTRUCTORS_SWITCH(Type, type, TYPE, Ctype) \
221   case TYPE##_TYPED_ARRAY_CONSTRUCTOR_TYPE:
222       TYPED_ARRAYS(TYPED_ARRAY_CONSTRUCTORS_SWITCH)
223 #undef TYPED_ARRAY_CONSTRUCTORS_SWITCH
224       return kVisitJSFunction;
225 
226     case JS_TYPED_ARRAY_TYPE:
227       return kVisitJSTypedArray;
228 
229     case SMALL_ORDERED_HASH_MAP_TYPE:
230       return kVisitSmallOrderedHashMap;
231 
232     case SMALL_ORDERED_HASH_SET_TYPE:
233       return kVisitSmallOrderedHashSet;
234 
235     case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
236       return kVisitSmallOrderedNameDictionary;
237 
238     case SWISS_NAME_DICTIONARY_TYPE:
239       return kVisitSwissNameDictionary;
240 
241     case CODE_DATA_CONTAINER_TYPE:
242       return kVisitCodeDataContainer;
243 
244     case PREPARSE_DATA_TYPE:
245       return kVisitPreparseData;
246 
247     case COVERAGE_INFO_TYPE:
248       return kVisitCoverageInfo;
249 
250     case JS_ARGUMENTS_OBJECT_TYPE:
251     case JS_ARRAY_ITERATOR_PROTOTYPE_TYPE:
252     case JS_ARRAY_ITERATOR_TYPE:
253     case JS_ARRAY_TYPE:
254     case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
255     case JS_ASYNC_FUNCTION_OBJECT_TYPE:
256     case JS_ASYNC_GENERATOR_OBJECT_TYPE:
257     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
258     case JS_DATE_TYPE:
259     case JS_ERROR_TYPE:
260     case JS_GENERATOR_OBJECT_TYPE:
261     case JS_ITERATOR_PROTOTYPE_TYPE:
262     case JS_MAP_ITERATOR_PROTOTYPE_TYPE:
263     case JS_MAP_KEY_ITERATOR_TYPE:
264     case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
265     case JS_MAP_TYPE:
266     case JS_MAP_VALUE_ITERATOR_TYPE:
267     case JS_MESSAGE_OBJECT_TYPE:
268     case JS_MODULE_NAMESPACE_TYPE:
269     case JS_OBJECT_PROTOTYPE_TYPE:
270     case JS_OBJECT_TYPE:
271     case JS_PRIMITIVE_WRAPPER_TYPE:
272     case JS_PROMISE_PROTOTYPE_TYPE:
273     case JS_PROMISE_TYPE:
274     case JS_REG_EXP_PROTOTYPE_TYPE:
275     case JS_REG_EXP_STRING_ITERATOR_TYPE:
276     case JS_REG_EXP_TYPE:
277     case JS_SET_ITERATOR_PROTOTYPE_TYPE:
278     case JS_SET_KEY_VALUE_ITERATOR_TYPE:
279     case JS_SET_PROTOTYPE_TYPE:
280     case JS_SET_TYPE:
281     case JS_SET_VALUE_ITERATOR_TYPE:
282     case JS_SHADOW_REALM_TYPE:
283     case JS_SHARED_STRUCT_TYPE:
284     case JS_STRING_ITERATOR_PROTOTYPE_TYPE:
285     case JS_STRING_ITERATOR_TYPE:
286     case JS_TEMPORAL_CALENDAR_TYPE:
287     case JS_TEMPORAL_DURATION_TYPE:
288     case JS_TEMPORAL_INSTANT_TYPE:
289     case JS_TEMPORAL_PLAIN_DATE_TYPE:
290     case JS_TEMPORAL_PLAIN_DATE_TIME_TYPE:
291     case JS_TEMPORAL_PLAIN_MONTH_DAY_TYPE:
292     case JS_TEMPORAL_PLAIN_TIME_TYPE:
293     case JS_TEMPORAL_PLAIN_YEAR_MONTH_TYPE:
294     case JS_TEMPORAL_TIME_ZONE_TYPE:
295     case JS_TEMPORAL_ZONED_DATE_TIME_TYPE:
296     case JS_TYPED_ARRAY_PROTOTYPE_TYPE:
297 #ifdef V8_INTL_SUPPORT
298     case JS_V8_BREAK_ITERATOR_TYPE:
299     case JS_COLLATOR_TYPE:
300     case JS_DATE_TIME_FORMAT_TYPE:
301     case JS_DISPLAY_NAMES_TYPE:
302     case JS_LIST_FORMAT_TYPE:
303     case JS_LOCALE_TYPE:
304     case JS_NUMBER_FORMAT_TYPE:
305     case JS_PLURAL_RULES_TYPE:
306     case JS_RELATIVE_TIME_FORMAT_TYPE:
307     case JS_SEGMENT_ITERATOR_TYPE:
308     case JS_SEGMENTER_TYPE:
309     case JS_SEGMENTS_TYPE:
310 #endif  // V8_INTL_SUPPORT
311 #if V8_ENABLE_WEBASSEMBLY
312     case WASM_TAG_OBJECT_TYPE:
313     case WASM_GLOBAL_OBJECT_TYPE:
314     case WASM_MEMORY_OBJECT_TYPE:
315     case WASM_MODULE_OBJECT_TYPE:
316     case WASM_TABLE_OBJECT_TYPE:
317     case WASM_VALUE_OBJECT_TYPE:
318 #endif  // V8_ENABLE_WEBASSEMBLY
319     case JS_BOUND_FUNCTION_TYPE:
320     case JS_WRAPPED_FUNCTION_TYPE: {
321       const bool has_raw_data_fields =
322           COMPRESS_POINTERS_BOOL && JSObject::GetEmbedderFieldCount(map) > 0;
323       return has_raw_data_fields ? kVisitJSObject : kVisitJSObjectFast;
324     }
325     case JS_API_OBJECT_TYPE:
326     case JS_GLOBAL_PROXY_TYPE:
327     case JS_GLOBAL_OBJECT_TYPE:
328     case JS_SPECIAL_API_OBJECT_TYPE:
329       return kVisitJSApiObject;
330 
331     case JS_WEAK_REF_TYPE:
332       return kVisitJSWeakRef;
333 
334     case WEAK_CELL_TYPE:
335       return kVisitWeakCell;
336 
337     case JS_FINALIZATION_REGISTRY_TYPE:
338       return kVisitJSFinalizationRegistry;
339 
340     case FILLER_TYPE:
341     case FOREIGN_TYPE:
342     case HEAP_NUMBER_TYPE:
343       return kVisitDataObject;
344 
345     case BIGINT_TYPE:
346       return kVisitBigInt;
347 
348     case ALLOCATION_SITE_TYPE:
349       return kVisitAllocationSite;
350 
351 #define MAKE_STRUCT_CASE(TYPE, Name, name) case TYPE:
352       STRUCT_LIST(MAKE_STRUCT_CASE)
353 #undef MAKE_STRUCT_CASE
354       if (instance_type == PROMISE_ON_STACK_TYPE) {
355         return kVisitPromiseOnStack;
356       }
357       if (instance_type == PROTOTYPE_INFO_TYPE) {
358         return kVisitPrototypeInfo;
359       }
360 #if V8_ENABLE_WEBASSEMBLY
361       if (instance_type == WASM_INDIRECT_FUNCTION_TABLE_TYPE) {
362         return kVisitWasmIndirectFunctionTable;
363       }
364 #endif  // V8_ENABLE_WEBASSEMBLY
365       return kVisitStruct;
366 
367     case LOAD_HANDLER_TYPE:
368     case STORE_HANDLER_TYPE:
369       return kVisitDataHandler;
370 
371     case SOURCE_TEXT_MODULE_TYPE:
372       return kVisitSourceTextModule;
373     case SYNTHETIC_MODULE_TYPE:
374       return kVisitSyntheticModule;
375 
376 #if V8_ENABLE_WEBASSEMBLY
377     case WASM_INSTANCE_OBJECT_TYPE:
378       return kVisitWasmInstanceObject;
379     case WASM_ARRAY_TYPE:
380       return kVisitWasmArray;
381     case WASM_STRUCT_TYPE:
382       return kVisitWasmStruct;
383     case WASM_TYPE_INFO_TYPE:
384       return kVisitWasmTypeInfo;
385     case WASM_INTERNAL_FUNCTION_TYPE:
386       return kVisitWasmInternalFunction;
387     case WASM_JS_FUNCTION_DATA_TYPE:
388       return kVisitWasmJSFunctionData;
389     case WASM_ON_FULFILLED_DATA_TYPE:
390       return kVisitWasmOnFulfilledData;
391     case WASM_API_FUNCTION_REF_TYPE:
392       return kVisitWasmApiFunctionRef;
393     case WASM_EXPORTED_FUNCTION_DATA_TYPE:
394       return kVisitWasmExportedFunctionData;
395     case WASM_CAPI_FUNCTION_DATA_TYPE:
396       return kVisitWasmCapiFunctionData;
397     case WASM_SUSPENDER_OBJECT_TYPE:
398       return kVisitWasmSuspenderObject;
399 #endif  // V8_ENABLE_WEBASSEMBLY
400 
401 #define MAKE_TQ_CASE(TYPE, Name) \
402   case TYPE:                     \
403     return kVisit##Name;
404       TORQUE_INSTANCE_TYPE_TO_BODY_DESCRIPTOR_LIST(MAKE_TQ_CASE)
405 #undef MAKE_TQ_CASE
406 
407     default:
408       UNREACHABLE();
409   }
410 }
411 
412 // static
WrapFieldType(Isolate * isolate,Handle<FieldType> type)413 MaybeObjectHandle Map::WrapFieldType(Isolate* isolate, Handle<FieldType> type) {
414   if (type->IsClass()) {
415     return MaybeObjectHandle::Weak(type->AsClass(), isolate);
416   }
417   return MaybeObjectHandle(type);
418 }
419 
420 // static
UnwrapFieldType(MaybeObject wrapped_type)421 FieldType Map::UnwrapFieldType(MaybeObject wrapped_type) {
422   if (wrapped_type->IsCleared()) {
423     return FieldType::None();
424   }
425   HeapObject heap_object;
426   if (wrapped_type->GetHeapObjectIfWeak(&heap_object)) {
427     return FieldType::cast(heap_object);
428   }
429   return wrapped_type->cast<FieldType>();
430 }
431 
CopyWithField(Isolate * isolate,Handle<Map> map,Handle<Name> name,Handle<FieldType> type,PropertyAttributes attributes,PropertyConstness constness,Representation representation,TransitionFlag flag)432 MaybeHandle<Map> Map::CopyWithField(Isolate* isolate, Handle<Map> map,
433                                     Handle<Name> name, Handle<FieldType> type,
434                                     PropertyAttributes attributes,
435                                     PropertyConstness constness,
436                                     Representation representation,
437                                     TransitionFlag flag) {
438   DCHECK(map->instance_descriptors(isolate)
439              .Search(*name, map->NumberOfOwnDescriptors())
440              .is_not_found());
441 
442   // Ensure the descriptor array does not get too big.
443   if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
444     return MaybeHandle<Map>();
445   }
446 
447   // Compute the new index for new field.
448   int index = map->NextFreePropertyIndex();
449 
450   if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
451     constness = PropertyConstness::kMutable;
452     representation = Representation::Tagged();
453     type = FieldType::Any(isolate);
454   } else {
455     Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
456         isolate, map->instance_type(), &representation, &type);
457   }
458 
459   MaybeObjectHandle wrapped_type = WrapFieldType(isolate, type);
460 
461   Descriptor d = Descriptor::DataField(name, index, attributes, constness,
462                                        representation, wrapped_type);
463   Handle<Map> new_map = Map::CopyAddDescriptor(isolate, map, &d, flag);
464   new_map->AccountAddedPropertyField();
465   return new_map;
466 }
467 
CopyWithConstant(Isolate * isolate,Handle<Map> map,Handle<Name> name,Handle<Object> constant,PropertyAttributes attributes,TransitionFlag flag)468 MaybeHandle<Map> Map::CopyWithConstant(Isolate* isolate, Handle<Map> map,
469                                        Handle<Name> name,
470                                        Handle<Object> constant,
471                                        PropertyAttributes attributes,
472                                        TransitionFlag flag) {
473   // Ensure the descriptor array does not get too big.
474   if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
475     return MaybeHandle<Map>();
476   }
477 
478   Representation representation = constant->OptimalRepresentation(isolate);
479   Handle<FieldType> type = constant->OptimalType(isolate, representation);
480   return CopyWithField(isolate, map, name, type, attributes,
481                        PropertyConstness::kConst, representation, flag);
482 }
483 
InstancesNeedRewriting(Map target,ConcurrencyMode cmode) const484 bool Map::InstancesNeedRewriting(Map target, ConcurrencyMode cmode) const {
485   int target_number_of_fields = target.NumberOfFields(cmode);
486   int target_inobject = target.GetInObjectProperties();
487   int target_unused = target.UnusedPropertyFields();
488   int old_number_of_fields;
489 
490   return InstancesNeedRewriting(target, target_number_of_fields,
491                                 target_inobject, target_unused,
492                                 &old_number_of_fields, cmode);
493 }
494 
InstancesNeedRewriting(Map target,int target_number_of_fields,int target_inobject,int target_unused,int * old_number_of_fields,ConcurrencyMode cmode) const495 bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
496                                  int target_inobject, int target_unused,
497                                  int* old_number_of_fields,
498                                  ConcurrencyMode cmode) const {
499   // If fields were added (or removed), rewrite the instance.
500   *old_number_of_fields = NumberOfFields(cmode);
501   DCHECK(target_number_of_fields >= *old_number_of_fields);
502   if (target_number_of_fields != *old_number_of_fields) return true;
503 
504   // If smi descriptors were replaced by double descriptors, rewrite.
505   DescriptorArray old_desc = IsConcurrent(cmode)
506                                  ? instance_descriptors(kAcquireLoad)
507                                  : instance_descriptors();
508   DescriptorArray new_desc = IsConcurrent(cmode)
509                                  ? target.instance_descriptors(kAcquireLoad)
510                                  : target.instance_descriptors();
511   for (InternalIndex i : IterateOwnDescriptors()) {
512     if (new_desc.GetDetails(i).representation().IsDouble() !=
513         old_desc.GetDetails(i).representation().IsDouble()) {
514       return true;
515     }
516   }
517 
518   // If no fields were added, and no inobject properties were removed, setting
519   // the map is sufficient.
520   if (target_inobject == GetInObjectProperties()) return false;
521   // In-object slack tracking may have reduced the object size of the new map.
522   // In that case, succeed if all existing fields were inobject, and they still
523   // fit within the new inobject size.
524   DCHECK(target_inobject < GetInObjectProperties());
525   if (target_number_of_fields <= target_inobject) {
526     DCHECK(target_number_of_fields + target_unused == target_inobject);
527     return false;
528   }
529   // Otherwise, properties will need to be moved to the backing store.
530   return true;
531 }
532 
NumberOfFields(ConcurrencyMode cmode) const533 int Map::NumberOfFields(ConcurrencyMode cmode) const {
534   DescriptorArray descriptors = IsConcurrent(cmode)
535                                     ? instance_descriptors(kAcquireLoad)
536                                     : instance_descriptors();
537   int result = 0;
538   for (InternalIndex i : IterateOwnDescriptors()) {
539     if (descriptors.GetDetails(i).location() == PropertyLocation::kField)
540       result++;
541   }
542   return result;
543 }
544 
GetFieldCounts() const545 Map::FieldCounts Map::GetFieldCounts() const {
546   DescriptorArray descriptors = instance_descriptors();
547   int mutable_count = 0;
548   int const_count = 0;
549   for (InternalIndex i : IterateOwnDescriptors()) {
550     PropertyDetails details = descriptors.GetDetails(i);
551     if (details.location() == PropertyLocation::kField) {
552       switch (details.constness()) {
553         case PropertyConstness::kMutable:
554           mutable_count++;
555           break;
556         case PropertyConstness::kConst:
557           const_count++;
558           break;
559       }
560     }
561   }
562   return FieldCounts(mutable_count, const_count);
563 }
564 
HasOutOfObjectProperties() const565 bool Map::HasOutOfObjectProperties() const {
566   return GetInObjectProperties() <
567          NumberOfFields(ConcurrencyMode::kSynchronous);
568 }
569 
DeprecateTransitionTree(Isolate * isolate)570 void Map::DeprecateTransitionTree(Isolate* isolate) {
571   if (is_deprecated()) return;
572   TransitionsAccessor transitions(isolate, *this);
573   int num_transitions = transitions.NumberOfTransitions();
574   for (int i = 0; i < num_transitions; ++i) {
575     transitions.GetTarget(i).DeprecateTransitionTree(isolate);
576   }
577   DCHECK(!constructor_or_back_pointer().IsFunctionTemplateInfo());
578   DCHECK(CanBeDeprecated());
579   set_is_deprecated(true);
580   if (FLAG_log_maps) {
581     LOG(isolate, MapEvent("Deprecate", handle(*this, isolate), Handle<Map>()));
582   }
583   dependent_code().DeoptimizeDependentCodeGroup(
584       isolate, DependentCode::kTransitionGroup);
585   NotifyLeafMapLayoutChange(isolate);
586 }
587 
588 // Installs |new_descriptors| over the current instance_descriptors to ensure
589 // proper sharing of descriptor arrays.
ReplaceDescriptors(Isolate * isolate,DescriptorArray new_descriptors)590 void Map::ReplaceDescriptors(Isolate* isolate,
591                              DescriptorArray new_descriptors) {
592   // Don't overwrite the empty descriptor array or initial map's descriptors.
593   if (NumberOfOwnDescriptors() == 0 ||
594       GetBackPointer(isolate).IsUndefined(isolate)) {
595     return;
596   }
597 
598   DescriptorArray to_replace = instance_descriptors(isolate);
599   // Replace descriptors by new_descriptors in all maps that share it. The old
600   // descriptors will not be trimmed in the mark-compactor, we need to mark
601   // all its elements.
602   Map current = *this;
603 #ifndef V8_DISABLE_WRITE_BARRIERS
604   WriteBarrier::Marking(to_replace, to_replace.number_of_descriptors());
605 #endif
606   while (current.instance_descriptors(isolate) == to_replace) {
607     Object next = current.GetBackPointer(isolate);
608     if (next.IsUndefined(isolate)) break;  // Stop overwriting at initial map.
609     current.SetEnumLength(kInvalidEnumCacheSentinel);
610     current.UpdateDescriptors(isolate, new_descriptors,
611                               current.NumberOfOwnDescriptors());
612     current = Map::cast(next);
613   }
614   set_owns_descriptors(false);
615 }
616 
FindRootMap(Isolate * isolate) const617 Map Map::FindRootMap(Isolate* isolate) const {
618   DisallowGarbageCollection no_gc;
619   Map result = *this;
620   PtrComprCageBase cage_base(isolate);
621   while (true) {
622     Object back = result.GetBackPointer(cage_base);
623     if (back.IsUndefined(isolate)) {
624       // Initial map must not contain descriptors in the descriptors array
625       // that do not belong to the map.
626       DCHECK_LE(result.NumberOfOwnDescriptors(),
627                 result.instance_descriptors(cage_base, kRelaxedLoad)
628                     .number_of_descriptors());
629       return result;
630     }
631     result = Map::cast(back);
632   }
633 }
634 
FindFieldOwner(Isolate * isolate,InternalIndex descriptor) const635 Map Map::FindFieldOwner(Isolate* isolate, InternalIndex descriptor) const {
636   DisallowGarbageCollection no_gc;
637   DCHECK_EQ(PropertyLocation::kField,
638             instance_descriptors(isolate, kRelaxedLoad)
639                 .GetDetails(descriptor)
640                 .location());
641   Map result = *this;
642   while (true) {
643     Object back = result.GetBackPointer(isolate);
644     if (back.IsUndefined(isolate)) break;
645     const Map parent = Map::cast(back);
646     if (parent.NumberOfOwnDescriptors() <= descriptor.as_int()) break;
647     result = parent;
648   }
649   return result;
650 }
651 
652 namespace {
653 
SearchMigrationTarget(Isolate * isolate,Map old_map)654 Map SearchMigrationTarget(Isolate* isolate, Map old_map) {
655   DisallowGarbageCollection no_gc;
656 
657   Map target = old_map;
658   do {
659     target = TransitionsAccessor(isolate, target).GetMigrationTarget();
660   } while (!target.is_null() && target.is_deprecated());
661   if (target.is_null()) return Map();
662 
663   // TODO(ishell): if this validation ever become a bottleneck consider adding a
664   // bit to the Map telling whether it contains fields whose field types may be
665   // cleared.
666   // TODO(ishell): revisit handling of cleared field types in
667   // TryReplayPropertyTransitions() and consider checking the target map's field
668   // types instead of old_map's types.
669   // Go to slow map updating if the old_map has fast properties with cleared
670   // field types.
671   DescriptorArray old_descriptors = old_map.instance_descriptors(isolate);
672   for (InternalIndex i : old_map.IterateOwnDescriptors()) {
673     PropertyDetails old_details = old_descriptors.GetDetails(i);
674     if (old_details.location() == PropertyLocation::kField &&
675         old_details.kind() == PropertyKind::kData) {
676       FieldType old_type = old_descriptors.GetFieldType(i);
677       if (Map::FieldTypeIsCleared(old_details.representation(), old_type)) {
678         return Map();
679       }
680     }
681   }
682 
683   SLOW_DCHECK(MapUpdater::TryUpdateNoLock(
684                   isolate, old_map, ConcurrencyMode::kSynchronous) == target);
685   return target;
686 }
687 }  // namespace
688 
689 // static
TryUpdate(Isolate * isolate,Handle<Map> old_map)690 MaybeHandle<Map> Map::TryUpdate(Isolate* isolate, Handle<Map> old_map) {
691   DisallowGarbageCollection no_gc;
692   DisallowDeoptimization no_deoptimization(isolate);
693 
694   if (!old_map->is_deprecated()) return old_map;
695 
696   if (FLAG_fast_map_update) {
697     Map target_map = SearchMigrationTarget(isolate, *old_map);
698     if (!target_map.is_null()) {
699       return handle(target_map, isolate);
700     }
701   }
702 
703   base::Optional<Map> new_map = MapUpdater::TryUpdateNoLock(
704       isolate, *old_map, ConcurrencyMode::kSynchronous);
705   if (!new_map.has_value()) return MaybeHandle<Map>();
706   if (FLAG_fast_map_update) {
707     TransitionsAccessor::SetMigrationTarget(isolate, old_map, new_map.value());
708   }
709   return handle(new_map.value(), isolate);
710 }
711 
TryReplayPropertyTransitions(Isolate * isolate,Map old_map,ConcurrencyMode cmode)712 Map Map::TryReplayPropertyTransitions(Isolate* isolate, Map old_map,
713                                       ConcurrencyMode cmode) {
714   DisallowGarbageCollection no_gc;
715 
716   const int root_nof = NumberOfOwnDescriptors();
717   const int old_nof = old_map.NumberOfOwnDescriptors();
718   // TODO(jgruber,chromium:1239009): The main thread should use non-atomic
719   // reads, but this currently leads to odd behavior (see the linked bug).
720   // Investigate and fix this properly. Also below and in called functions.
721   DescriptorArray old_descriptors =
722       old_map.instance_descriptors(isolate, kAcquireLoad);
723 
724   Map new_map = *this;
725   for (InternalIndex i : InternalIndex::Range(root_nof, old_nof)) {
726     PropertyDetails old_details = old_descriptors.GetDetails(i);
727     Map transition =
728         TransitionsAccessor(isolate, new_map, IsConcurrent(cmode))
729             .SearchTransition(old_descriptors.GetKey(i), old_details.kind(),
730                               old_details.attributes());
731     if (transition.is_null()) return Map();
732     new_map = transition;
733     DescriptorArray new_descriptors =
734         new_map.instance_descriptors(isolate, kAcquireLoad);
735 
736     PropertyDetails new_details = new_descriptors.GetDetails(i);
737     DCHECK_EQ(old_details.kind(), new_details.kind());
738     DCHECK_EQ(old_details.attributes(), new_details.attributes());
739     if (!IsGeneralizableTo(old_details.constness(), new_details.constness())) {
740       return Map();
741     }
742     DCHECK(IsGeneralizableTo(old_details.location(), new_details.location()));
743     if (!old_details.representation().fits_into(new_details.representation())) {
744       return Map();
745     }
746     if (new_details.location() == PropertyLocation::kField) {
747       if (new_details.kind() == PropertyKind::kData) {
748         FieldType new_type = new_descriptors.GetFieldType(i);
749         // Cleared field types need special treatment. They represent lost
750         // knowledge, so we must first generalize the new_type to "Any".
751         if (FieldTypeIsCleared(new_details.representation(), new_type)) {
752           return Map();
753         }
754         DCHECK_EQ(PropertyKind::kData, old_details.kind());
755         DCHECK_EQ(PropertyLocation::kField, old_details.location());
756         FieldType old_type = old_descriptors.GetFieldType(i);
757         if (FieldTypeIsCleared(old_details.representation(), old_type) ||
758             !old_type.NowIs(new_type)) {
759           return Map();
760         }
761       } else {
762         DCHECK_EQ(PropertyKind::kAccessor, new_details.kind());
763 #ifdef DEBUG
764         FieldType new_type = new_descriptors.GetFieldType(i);
765         DCHECK(new_type.IsAny());
766 #endif
767         UNREACHABLE();
768       }
769     } else {
770       DCHECK_EQ(PropertyLocation::kDescriptor, new_details.location());
771       if (old_details.location() == PropertyLocation::kField ||
772           old_descriptors.GetStrongValue(i) !=
773               new_descriptors.GetStrongValue(i)) {
774         return Map();
775       }
776     }
777   }
778   if (new_map.NumberOfOwnDescriptors() != old_nof) return Map();
779   return new_map;
780 }
781 
782 // static
Update(Isolate * isolate,Handle<Map> map)783 Handle<Map> Map::Update(Isolate* isolate, Handle<Map> map) {
784   if (!map->is_deprecated()) return map;
785   if (FLAG_fast_map_update) {
786     Map target_map = SearchMigrationTarget(isolate, *map);
787     if (!target_map.is_null()) {
788       return handle(target_map, isolate);
789     }
790   }
791   MapUpdater mu(isolate, map);
792   return mu.Update();
793 }
794 
EnsureDescriptorSlack(Isolate * isolate,Handle<Map> map,int slack)795 void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
796   // Only supports adding slack to owned descriptors.
797   DCHECK(map->owns_descriptors());
798 
799   Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
800                                       isolate);
801   int old_size = map->NumberOfOwnDescriptors();
802   if (slack <= descriptors->number_of_slack_descriptors()) return;
803 
804   Handle<DescriptorArray> new_descriptors =
805       DescriptorArray::CopyUpTo(isolate, descriptors, old_size, slack);
806 
807   DisallowGarbageCollection no_gc;
808   if (old_size == 0) {
809     map->UpdateDescriptors(isolate, *new_descriptors,
810                            map->NumberOfOwnDescriptors());
811     return;
812   }
813 
814   // If the source descriptors had an enum cache we copy it. This ensures
815   // that the maps to which we push the new descriptor array back can rely
816   // on a cache always being available once it is set. If the map has more
817   // enumerated descriptors than available in the original cache, the cache
818   // will be lazily replaced by the extended cache when needed.
819   new_descriptors->CopyEnumCacheFrom(*descriptors);
820 
821   // Replace descriptors by new_descriptors in all maps that share it. The old
822   // descriptors will not be trimmed in the mark-compactor, we need to mark
823   // all its elements.
824 #ifndef V8_DISABLE_WRITE_BARRIERS
825   WriteBarrier::Marking(*descriptors, descriptors->number_of_descriptors());
826 #endif
827 
828   // Update the descriptors from {map} (inclusive) until the initial map
829   // (exclusive). In the case that {map} is the initial map, update it.
830   map->UpdateDescriptors(isolate, *new_descriptors,
831                          map->NumberOfOwnDescriptors());
832   Object next = map->GetBackPointer();
833   if (next.IsUndefined(isolate)) return;
834 
835   Map current = Map::cast(next);
836   while (current.instance_descriptors(isolate) == *descriptors) {
837     next = current.GetBackPointer();
838     if (next.IsUndefined(isolate)) break;
839     current.UpdateDescriptors(isolate, *new_descriptors,
840                               current.NumberOfOwnDescriptors());
841     current = Map::cast(next);
842   }
843 }
844 
845 // static
GetObjectCreateMap(Isolate * isolate,Handle<HeapObject> prototype)846 Handle<Map> Map::GetObjectCreateMap(Isolate* isolate,
847                                     Handle<HeapObject> prototype) {
848   Handle<Map> map(isolate->native_context()->object_function().initial_map(),
849                   isolate);
850   if (map->prototype() == *prototype) return map;
851   if (prototype->IsNull(isolate)) {
852     return isolate->slow_object_with_null_prototype_map();
853   }
854   if (prototype->IsJSObject()) {
855     Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
856     if (!js_prototype->map().is_prototype_map()) {
857       JSObject::OptimizeAsPrototype(js_prototype);
858     }
859     Handle<PrototypeInfo> info =
860         Map::GetOrCreatePrototypeInfo(js_prototype, isolate);
861     // TODO(verwaest): Use inobject slack tracking for this map.
862     if (info->HasObjectCreateMap()) {
863       map = handle(info->ObjectCreateMap(), isolate);
864     } else {
865       map = Map::CopyInitialMap(isolate, map);
866       Map::SetPrototype(isolate, map, prototype);
867       PrototypeInfo::SetObjectCreateMap(info, map);
868     }
869     return map;
870   }
871 
872   return Map::TransitionToPrototype(isolate, map, prototype);
873 }
874 
ContainsMap(MapHandles const & maps,Map map)875 static bool ContainsMap(MapHandles const& maps, Map map) {
876   DCHECK(!map.is_null());
877   for (Handle<Map> current : maps) {
878     if (!current.is_null() && *current == map) return true;
879   }
880   return false;
881 }
882 
HasElementsKind(MapHandles const & maps,ElementsKind elements_kind)883 static bool HasElementsKind(MapHandles const& maps,
884                             ElementsKind elements_kind) {
885   for (Handle<Map> current : maps) {
886     if (!current.is_null() && current->elements_kind() == elements_kind)
887       return true;
888   }
889   return false;
890 }
891 
FindElementsKindTransitionedMap(Isolate * isolate,MapHandles const & candidates,ConcurrencyMode cmode)892 Map Map::FindElementsKindTransitionedMap(Isolate* isolate,
893                                          MapHandles const& candidates,
894                                          ConcurrencyMode cmode) {
895   DisallowGarbageCollection no_gc;
896 
897   if (IsDetached(isolate)) return Map();
898 
899   ElementsKind kind = elements_kind();
900   bool is_packed = IsFastPackedElementsKind(kind);
901 
902   Map transition;
903   if (IsTransitionableFastElementsKind(kind)) {
904     // Check the state of the root map.
905     Map root_map = FindRootMap(isolate);
906     if (!EquivalentToForElementsKindTransition(root_map, cmode)) return Map();
907     root_map = root_map.LookupElementsTransitionMap(isolate, kind, cmode);
908     DCHECK(!root_map.is_null());
909     // Starting from the next existing elements kind transition try to
910     // replay the property transitions that does not involve instance rewriting
911     // (ElementsTransitionAndStoreStub does not support that).
912     for (root_map = root_map.ElementsTransitionMap(isolate, cmode);
913          !root_map.is_null() && root_map.has_fast_elements();
914          root_map = root_map.ElementsTransitionMap(isolate, cmode)) {
915       // If root_map's elements kind doesn't match any of the elements kind in
916       // the candidates there is no need to do any additional work.
917       if (!HasElementsKind(candidates, root_map.elements_kind())) continue;
918       Map current =
919           root_map.TryReplayPropertyTransitions(isolate, *this, cmode);
920       if (current.is_null()) continue;
921       if (InstancesNeedRewriting(current, cmode)) continue;
922 
923       const bool current_is_packed =
924           IsFastPackedElementsKind(current.elements_kind());
925       if (ContainsMap(candidates, current) &&
926           (is_packed || !current_is_packed)) {
927         transition = current;
928         is_packed = is_packed && current_is_packed;
929       }
930     }
931   }
932   return transition;
933 }
934 
FindClosestElementsTransition(Isolate * isolate,Map map,ElementsKind to_kind,ConcurrencyMode cmode)935 static Map FindClosestElementsTransition(Isolate* isolate, Map map,
936                                          ElementsKind to_kind,
937                                          ConcurrencyMode cmode) {
938   DisallowGarbageCollection no_gc;
939   // Ensure we are requested to search elements kind transition "near the root".
940   DCHECK_EQ(map.FindRootMap(isolate).NumberOfOwnDescriptors(),
941             map.NumberOfOwnDescriptors());
942   Map current_map = map;
943 
944   ElementsKind kind = map.elements_kind();
945   while (kind != to_kind) {
946     Map next_map = current_map.ElementsTransitionMap(isolate, cmode);
947     if (next_map.is_null()) return current_map;
948     kind = next_map.elements_kind();
949     current_map = next_map;
950   }
951 
952   DCHECK_EQ(to_kind, current_map.elements_kind());
953   return current_map;
954 }
955 
LookupElementsTransitionMap(Isolate * isolate,ElementsKind to_kind,ConcurrencyMode cmode)956 Map Map::LookupElementsTransitionMap(Isolate* isolate, ElementsKind to_kind,
957                                      ConcurrencyMode cmode) {
958   Map to_map = FindClosestElementsTransition(isolate, *this, to_kind, cmode);
959   if (to_map.elements_kind() == to_kind) return to_map;
960   return Map();
961 }
962 
IsMapInArrayPrototypeChain(Isolate * isolate) const963 bool Map::IsMapInArrayPrototypeChain(Isolate* isolate) const {
964   if (isolate->initial_array_prototype()->map() == *this) {
965     return true;
966   }
967 
968   if (isolate->initial_object_prototype()->map() == *this) {
969     return true;
970   }
971 
972   return false;
973 }
974 
TransitionElementsTo(Isolate * isolate,Handle<Map> map,ElementsKind to_kind)975 Handle<Map> Map::TransitionElementsTo(Isolate* isolate, Handle<Map> map,
976                                       ElementsKind to_kind) {
977   ElementsKind from_kind = map->elements_kind();
978   if (from_kind == to_kind) return map;
979 
980   Context native_context = isolate->context().native_context();
981   if (from_kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
982     if (*map == native_context.fast_aliased_arguments_map()) {
983       DCHECK_EQ(SLOW_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
984       return handle(native_context.slow_aliased_arguments_map(), isolate);
985     }
986   } else if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
987     if (*map == native_context.slow_aliased_arguments_map()) {
988       DCHECK_EQ(FAST_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
989       return handle(native_context.fast_aliased_arguments_map(), isolate);
990     }
991   } else if (IsFastElementsKind(from_kind) && IsFastElementsKind(to_kind)) {
992     // Reuse map transitions for JSArrays.
993     DisallowGarbageCollection no_gc;
994     if (native_context.GetInitialJSArrayMap(from_kind) == *map) {
995       Object maybe_transitioned_map =
996           native_context.get(Context::ArrayMapIndex(to_kind));
997       if (maybe_transitioned_map.IsMap()) {
998         return handle(Map::cast(maybe_transitioned_map), isolate);
999       }
1000     }
1001   }
1002 
1003   DCHECK(!map->IsUndefined(isolate));
1004   // Check if we can go back in the elements kind transition chain.
1005   if (IsHoleyElementsKind(from_kind) &&
1006       to_kind == GetPackedElementsKind(from_kind) &&
1007       map->GetBackPointer().IsMap() &&
1008       Map::cast(map->GetBackPointer()).elements_kind() == to_kind) {
1009     return handle(Map::cast(map->GetBackPointer()), isolate);
1010   }
1011 
1012   bool allow_store_transition = IsTransitionElementsKind(from_kind);
1013   // Only store fast element maps in ascending generality.
1014   if (IsFastElementsKind(to_kind)) {
1015     allow_store_transition =
1016         allow_store_transition && IsTransitionableFastElementsKind(from_kind) &&
1017         IsMoreGeneralElementsKindTransition(from_kind, to_kind);
1018   }
1019 
1020   if (!allow_store_transition) {
1021     return Map::CopyAsElementsKind(isolate, map, to_kind, OMIT_TRANSITION);
1022   }
1023 
1024   return MapUpdater{isolate, map}.ReconfigureElementsKind(to_kind);
1025 }
1026 
AddMissingElementsTransitions(Isolate * isolate,Handle<Map> map,ElementsKind to_kind)1027 static Handle<Map> AddMissingElementsTransitions(Isolate* isolate,
1028                                                  Handle<Map> map,
1029                                                  ElementsKind to_kind) {
1030   DCHECK(IsTransitionElementsKind(map->elements_kind()));
1031 
1032   Handle<Map> current_map = map;
1033 
1034   ElementsKind kind = map->elements_kind();
1035   TransitionFlag flag;
1036   if (map->IsDetached(isolate)) {
1037     flag = OMIT_TRANSITION;
1038   } else {
1039     flag = INSERT_TRANSITION;
1040     if (IsFastElementsKind(kind)) {
1041       while (kind != to_kind && !IsTerminalElementsKind(kind)) {
1042         kind = GetNextTransitionElementsKind(kind);
1043         current_map = Map::CopyAsElementsKind(isolate, current_map, kind, flag);
1044       }
1045     }
1046   }
1047 
1048   // In case we are exiting the fast elements kind system, just add the map in
1049   // the end.
1050   if (kind != to_kind) {
1051     current_map = Map::CopyAsElementsKind(isolate, current_map, to_kind, flag);
1052   }
1053 
1054   DCHECK(current_map->elements_kind() == to_kind);
1055   return current_map;
1056 }
1057 
1058 // static
TryAsElementsKind(Isolate * isolate,Handle<Map> map,ElementsKind kind,ConcurrencyMode cmode)1059 base::Optional<Map> Map::TryAsElementsKind(Isolate* isolate, Handle<Map> map,
1060                                            ElementsKind kind,
1061                                            ConcurrencyMode cmode) {
1062   Map closest_map = FindClosestElementsTransition(isolate, *map, kind, cmode);
1063   if (closest_map.elements_kind() != kind) return {};
1064   return closest_map;
1065 }
1066 
1067 // static
AsElementsKind(Isolate * isolate,Handle<Map> map,ElementsKind kind)1068 Handle<Map> Map::AsElementsKind(Isolate* isolate, Handle<Map> map,
1069                                 ElementsKind kind) {
1070   Handle<Map> closest_map(
1071       FindClosestElementsTransition(isolate, *map, kind,
1072                                     ConcurrencyMode::kSynchronous),
1073       isolate);
1074 
1075   if (closest_map->elements_kind() == kind) {
1076     return closest_map;
1077   }
1078 
1079   return AddMissingElementsTransitions(isolate, closest_map, kind);
1080 }
1081 
NumberOfEnumerableProperties() const1082 int Map::NumberOfEnumerableProperties() const {
1083   int result = 0;
1084   DescriptorArray descs = instance_descriptors(kRelaxedLoad);
1085   for (InternalIndex i : IterateOwnDescriptors()) {
1086     if ((descs.GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
1087         !descs.GetKey(i).FilterKey(ENUMERABLE_STRINGS)) {
1088       result++;
1089     }
1090   }
1091   return result;
1092 }
1093 
NextFreePropertyIndex() const1094 int Map::NextFreePropertyIndex() const {
1095   int number_of_own_descriptors = NumberOfOwnDescriptors();
1096   DescriptorArray descs = instance_descriptors(kRelaxedLoad);
1097   // Search properties backwards to find the last field.
1098   for (int i = number_of_own_descriptors - 1; i >= 0; --i) {
1099     PropertyDetails details = descs.GetDetails(InternalIndex(i));
1100     if (details.location() == PropertyLocation::kField) {
1101       return details.field_index() + details.field_width_in_words();
1102     }
1103   }
1104   return 0;
1105 }
1106 
OnlyHasSimpleProperties() const1107 bool Map::OnlyHasSimpleProperties() const {
1108   // Wrapped string elements aren't explicitly stored in the elements backing
1109   // store, but are loaded indirectly from the underlying string.
1110   return !IsStringWrapperElementsKind(elements_kind()) &&
1111          !IsSpecialReceiverMap() && !is_dictionary_map();
1112 }
1113 
MayHaveReadOnlyElementsInPrototypeChain(Isolate * isolate)1114 bool Map::MayHaveReadOnlyElementsInPrototypeChain(Isolate* isolate) {
1115   for (PrototypeIterator iter(isolate, *this); !iter.IsAtEnd();
1116        iter.Advance()) {
1117     // Be conservative, don't look into any JSReceivers that may have custom
1118     // elements. For example, into JSProxies, String wrappers (which have have
1119     // non-configurable, non-writable elements), API objects, etc.
1120     if (iter.GetCurrent().map().IsCustomElementsReceiverMap()) return true;
1121 
1122     JSObject current = iter.GetCurrent<JSObject>();
1123     ElementsKind elements_kind = current.GetElementsKind(isolate);
1124     if (IsFrozenElementsKind(elements_kind)) return true;
1125 
1126     if (IsDictionaryElementsKind(elements_kind) &&
1127         current.element_dictionary(isolate).requires_slow_elements()) {
1128       return true;
1129     }
1130 
1131     if (IsSlowArgumentsElementsKind(elements_kind)) {
1132       SloppyArgumentsElements elements =
1133           SloppyArgumentsElements::cast(current.elements(isolate));
1134       Object arguments = elements.arguments();
1135       if (NumberDictionary::cast(arguments).requires_slow_elements()) {
1136         return true;
1137       }
1138     }
1139   }
1140 
1141   return false;
1142 }
1143 
RawCopy(Isolate * isolate,Handle<Map> src_handle,int instance_size,int inobject_properties)1144 Handle<Map> Map::RawCopy(Isolate* isolate, Handle<Map> src_handle,
1145                          int instance_size, int inobject_properties) {
1146   Handle<Map> result = isolate->factory()->NewMap(
1147       src_handle->instance_type(), instance_size, TERMINAL_FAST_ELEMENTS_KIND,
1148       inobject_properties);
1149   // We have to set the bitfields before any potential GCs could happen because
1150   // heap verification might fail otherwise.
1151   {
1152     DisallowGarbageCollection no_gc;
1153     Map src = *src_handle;
1154     Map raw = *result;
1155     raw.set_constructor_or_back_pointer(src.GetConstructor());
1156     raw.set_bit_field(src.bit_field());
1157     raw.set_bit_field2(src.bit_field2());
1158     int new_bit_field3 = src.bit_field3();
1159     new_bit_field3 = Bits3::OwnsDescriptorsBit::update(new_bit_field3, true);
1160     new_bit_field3 =
1161         Bits3::NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
1162     new_bit_field3 = Bits3::EnumLengthBits::update(new_bit_field3,
1163                                                    kInvalidEnumCacheSentinel);
1164     new_bit_field3 = Bits3::IsDeprecatedBit::update(new_bit_field3, false);
1165     new_bit_field3 =
1166         Bits3::IsInRetainedMapListBit::update(new_bit_field3, false);
1167     if (!src.is_dictionary_map()) {
1168       new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, false);
1169     }
1170     // Same as bit_field comment above.
1171     raw.set_bit_field3(new_bit_field3);
1172     raw.clear_padding();
1173   }
1174   Handle<HeapObject> prototype(src_handle->prototype(), isolate);
1175   Map::SetPrototype(isolate, result, prototype);
1176   return result;
1177 }
1178 
Normalize(Isolate * isolate,Handle<Map> fast_map,ElementsKind new_elements_kind,PropertyNormalizationMode mode,const char * reason)1179 Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
1180                            ElementsKind new_elements_kind,
1181                            PropertyNormalizationMode mode, const char* reason) {
1182   DCHECK(!fast_map->is_dictionary_map());
1183 
1184   Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(),
1185                              isolate);
1186   bool use_cache =
1187       !fast_map->is_prototype_map() && !maybe_cache->IsUndefined(isolate);
1188   Handle<NormalizedMapCache> cache;
1189   if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache);
1190 
1191   Handle<Map> new_map;
1192   if (use_cache &&
1193       cache->Get(fast_map, new_elements_kind, mode).ToHandle(&new_map)) {
1194 #ifdef VERIFY_HEAP
1195     if (FLAG_verify_heap) new_map->DictionaryMapVerify(isolate);
1196 #endif
1197 #ifdef ENABLE_SLOW_DCHECKS
1198     if (FLAG_enable_slow_asserts) {
1199       // The cached map should match newly created normalized map bit-by-bit,
1200       // except for the code cache, which can contain some ICs which can be
1201       // applied to the shared map, dependent code and weak cell cache.
1202       Handle<Map> fresh = Map::CopyNormalized(isolate, fast_map, mode);
1203       fresh->set_elements_kind(new_elements_kind);
1204 
1205       STATIC_ASSERT(Map::kPrototypeValidityCellOffset ==
1206                     Map::kDependentCodeOffset + kTaggedSize);
1207       DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
1208                           reinterpret_cast<void*>(new_map->address()),
1209                           Map::kBitField3Offset));
1210       // The IsInRetainedMapListBit might be different if the {new_map}
1211       // that we got from the {cache} was already embedded into optimized
1212       // code somewhere.
1213       // The IsMigrationTargetBit might be different if the {new_map} from
1214       // {cache} has already been marked as a migration target.
1215       constexpr int ignored_bit_field3_bits =
1216           Bits3::IsInRetainedMapListBit::kMask |
1217           Bits3::IsMigrationTargetBit::kMask;
1218       DCHECK_EQ(fresh->bit_field3() & ~ignored_bit_field3_bits,
1219                 new_map->bit_field3() & ~ignored_bit_field3_bits);
1220       int offset = Map::kBitField3Offset + kInt32Size;
1221       DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
1222                           reinterpret_cast<void*>(new_map->address() + offset),
1223                           Map::kDependentCodeOffset - offset));
1224       offset = Map::kPrototypeValidityCellOffset + kTaggedSize;
1225       if (new_map->is_prototype_map()) {
1226         // For prototype maps, the PrototypeInfo is not copied.
1227         STATIC_ASSERT(Map::kTransitionsOrPrototypeInfoOffset ==
1228                       Map::kPrototypeValidityCellOffset + kTaggedSize);
1229         offset = kTransitionsOrPrototypeInfoOffset + kTaggedSize;
1230         DCHECK_EQ(fresh->raw_transitions(),
1231                   MaybeObject::FromObject(Smi::zero()));
1232       }
1233       DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
1234                           reinterpret_cast<void*>(new_map->address() + offset),
1235                           Map::kSize - offset));
1236     }
1237 #endif
1238     if (FLAG_log_maps) {
1239       LOG(isolate, MapEvent("NormalizeCached", fast_map, new_map, reason));
1240     }
1241   } else {
1242     new_map = Map::CopyNormalized(isolate, fast_map, mode);
1243     new_map->set_elements_kind(new_elements_kind);
1244     if (use_cache) {
1245       cache->Set(fast_map, new_map);
1246       isolate->counters()->maps_normalized()->Increment();
1247     }
1248     if (FLAG_log_maps) {
1249       LOG(isolate, MapEvent("Normalize", fast_map, new_map, reason));
1250     }
1251   }
1252   fast_map->NotifyLeafMapLayoutChange(isolate);
1253   return new_map;
1254 }
1255 
CopyNormalized(Isolate * isolate,Handle<Map> map,PropertyNormalizationMode mode)1256 Handle<Map> Map::CopyNormalized(Isolate* isolate, Handle<Map> map,
1257                                 PropertyNormalizationMode mode) {
1258   int new_instance_size = map->instance_size();
1259   if (mode == CLEAR_INOBJECT_PROPERTIES) {
1260     new_instance_size -= map->GetInObjectProperties() * kTaggedSize;
1261   }
1262 
1263   Handle<Map> result = RawCopy(
1264       isolate, map, new_instance_size,
1265       mode == CLEAR_INOBJECT_PROPERTIES ? 0 : map->GetInObjectProperties());
1266   {
1267     DisallowGarbageCollection no_gc;
1268     Map raw = *result;
1269     // Clear the unused_property_fields explicitly as this field should not
1270     // be accessed for normalized maps.
1271     raw.SetInObjectUnusedPropertyFields(0);
1272     raw.set_is_dictionary_map(true);
1273     raw.set_is_migration_target(false);
1274     raw.set_may_have_interesting_symbols(true);
1275     raw.set_construction_counter(kNoSlackTracking);
1276   }
1277 
1278 #ifdef VERIFY_HEAP
1279   if (FLAG_verify_heap) result->DictionaryMapVerify(isolate);
1280 #endif
1281 
1282   return result;
1283 }
1284 
1285 // Return an immutable prototype exotic object version of the input map.
1286 // Never even try to cache it in the transition tree, as it is intended
1287 // for the global object and its prototype chain, and excluding it saves
1288 // memory on the map transition tree.
1289 
1290 // static
TransitionToImmutableProto(Isolate * isolate,Handle<Map> map)1291 Handle<Map> Map::TransitionToImmutableProto(Isolate* isolate, Handle<Map> map) {
1292   Handle<Map> new_map = Map::Copy(isolate, map, "ImmutablePrototype");
1293   new_map->set_is_immutable_proto(true);
1294   return new_map;
1295 }
1296 
1297 namespace {
EnsureInitialMap(Isolate * isolate,Handle<Map> map)1298 void EnsureInitialMap(Isolate* isolate, Handle<Map> map) {
1299 #ifdef DEBUG
1300   Object maybe_constructor = map->GetConstructor();
1301   DCHECK((maybe_constructor.IsJSFunction() &&
1302           *map == JSFunction::cast(maybe_constructor).initial_map()) ||
1303          // Below are the exceptions to the check above.
1304          // Strict function maps have Function as a constructor but the
1305          // Function's initial map is a sloppy function map.
1306          *map == *isolate->strict_function_map() ||
1307          *map == *isolate->strict_function_with_name_map() ||
1308          // Same holds for GeneratorFunction and its initial map.
1309          *map == *isolate->generator_function_map() ||
1310          *map == *isolate->generator_function_with_name_map() ||
1311          // AsyncFunction has Null as a constructor.
1312          *map == *isolate->async_function_map() ||
1313          *map == *isolate->async_function_with_name_map());
1314 #endif
1315   // Initial maps must not contain descriptors in the descriptors array
1316   // that do not belong to the map.
1317   DCHECK_EQ(map->NumberOfOwnDescriptors(),
1318             map->instance_descriptors(isolate).number_of_descriptors());
1319 }
1320 }  // namespace
1321 
1322 // static
CopyInitialMapNormalized(Isolate * isolate,Handle<Map> map,PropertyNormalizationMode mode)1323 Handle<Map> Map::CopyInitialMapNormalized(Isolate* isolate, Handle<Map> map,
1324                                           PropertyNormalizationMode mode) {
1325   EnsureInitialMap(isolate, map);
1326   return CopyNormalized(isolate, map, mode);
1327 }
1328 
1329 // static
CopyInitialMap(Isolate * isolate,Handle<Map> map,int instance_size,int inobject_properties,int unused_property_fields)1330 Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map,
1331                                 int instance_size, int inobject_properties,
1332                                 int unused_property_fields) {
1333   EnsureInitialMap(isolate, map);
1334 
1335   Handle<Map> result =
1336       RawCopy(isolate, map, instance_size, inobject_properties);
1337 
1338   // Please note instance_type and instance_size are set when allocated.
1339   result->SetInObjectUnusedPropertyFields(unused_property_fields);
1340 
1341   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1342   if (number_of_own_descriptors > 0) {
1343     // The copy will use the same descriptors array without ownership.
1344     DescriptorArray descriptors = map->instance_descriptors(isolate);
1345     result->set_owns_descriptors(false);
1346     result->UpdateDescriptors(isolate, descriptors, number_of_own_descriptors);
1347 
1348     DCHECK_EQ(result->NumberOfFields(ConcurrencyMode::kSynchronous),
1349               result->GetInObjectProperties() - result->UnusedPropertyFields());
1350   }
1351 
1352   return result;
1353 }
1354 
CopyDropDescriptors(Isolate * isolate,Handle<Map> map)1355 Handle<Map> Map::CopyDropDescriptors(Isolate* isolate, Handle<Map> map) {
1356   Handle<Map> result =
1357       RawCopy(isolate, map, map->instance_size(),
1358               map->IsJSObjectMap() ? map->GetInObjectProperties() : 0);
1359 
1360   // Please note instance_type and instance_size are set when allocated.
1361   if (map->IsJSObjectMap()) {
1362     result->CopyUnusedPropertyFields(*map);
1363   }
1364   map->NotifyLeafMapLayoutChange(isolate);
1365   return result;
1366 }
1367 
ShareDescriptor(Isolate * isolate,Handle<Map> map,Handle<DescriptorArray> descriptors,Descriptor * descriptor)1368 Handle<Map> Map::ShareDescriptor(Isolate* isolate, Handle<Map> map,
1369                                  Handle<DescriptorArray> descriptors,
1370                                  Descriptor* descriptor) {
1371   // Sanity check. This path is only to be taken if the map owns its descriptor
1372   // array, implying that its NumberOfOwnDescriptors equals the number of
1373   // descriptors in the descriptor array.
1374   DCHECK_EQ(map->NumberOfOwnDescriptors(),
1375             map->instance_descriptors(isolate).number_of_descriptors());
1376 
1377   Handle<Map> result = CopyDropDescriptors(isolate, map);
1378   Handle<Name> name = descriptor->GetKey();
1379 
1380   // Properly mark the {result} if the {name} is an "interesting symbol".
1381   if (name->IsInterestingSymbol()) {
1382     result->set_may_have_interesting_symbols(true);
1383   }
1384 
1385   // Ensure there's space for the new descriptor in the shared descriptor array.
1386   if (descriptors->number_of_slack_descriptors() == 0) {
1387     int old_size = descriptors->number_of_descriptors();
1388     if (old_size == 0) {
1389       descriptors = DescriptorArray::Allocate(isolate, 0, 1);
1390     } else {
1391       int slack = SlackForArraySize(old_size, kMaxNumberOfDescriptors);
1392       EnsureDescriptorSlack(isolate, map, slack);
1393       descriptors = handle(map->instance_descriptors(isolate), isolate);
1394     }
1395   }
1396 
1397   {
1398     DisallowGarbageCollection no_gc;
1399     descriptors->Append(descriptor);
1400     result->InitializeDescriptors(isolate, *descriptors);
1401   }
1402 
1403   DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
1404   ConnectTransition(isolate, map, result, name, SIMPLE_PROPERTY_TRANSITION);
1405 
1406   return result;
1407 }
1408 
ConnectTransition(Isolate * isolate,Handle<Map> parent,Handle<Map> child,Handle<Name> name,SimpleTransitionFlag flag)1409 void Map::ConnectTransition(Isolate* isolate, Handle<Map> parent,
1410                             Handle<Map> child, Handle<Name> name,
1411                             SimpleTransitionFlag flag) {
1412   DCHECK_IMPLIES(name->IsInterestingSymbol(),
1413                  child->may_have_interesting_symbols());
1414   DCHECK_IMPLIES(parent->may_have_interesting_symbols(),
1415                  child->may_have_interesting_symbols());
1416   if (!parent->GetBackPointer().IsUndefined(isolate)) {
1417     parent->set_owns_descriptors(false);
1418   } else if (!parent->IsDetached(isolate)) {
1419     // |parent| is initial map and it must not contain descriptors in the
1420     // descriptors array that do not belong to the map.
1421     DCHECK_EQ(parent->NumberOfOwnDescriptors(),
1422               parent->instance_descriptors(isolate).number_of_descriptors());
1423   }
1424   if (parent->IsDetached(isolate)) {
1425     DCHECK(child->IsDetached(isolate));
1426     if (FLAG_log_maps) {
1427       LOG(isolate, MapEvent("Transition", parent, child, "prototype", name));
1428     }
1429   } else {
1430     TransitionsAccessor::Insert(isolate, parent, name, child, flag);
1431     if (FLAG_log_maps) {
1432       LOG(isolate, MapEvent("Transition", parent, child, "", name));
1433     }
1434   }
1435 }
1436 
CopyReplaceDescriptors(Isolate * isolate,Handle<Map> map,Handle<DescriptorArray> descriptors,TransitionFlag flag,MaybeHandle<Name> maybe_name,const char * reason,SimpleTransitionFlag simple_flag)1437 Handle<Map> Map::CopyReplaceDescriptors(Isolate* isolate, Handle<Map> map,
1438                                         Handle<DescriptorArray> descriptors,
1439                                         TransitionFlag flag,
1440                                         MaybeHandle<Name> maybe_name,
1441                                         const char* reason,
1442                                         SimpleTransitionFlag simple_flag) {
1443   DCHECK(descriptors->IsSortedNoDuplicates());
1444 
1445   Handle<Map> result = CopyDropDescriptors(isolate, map);
1446   bool is_connected = false;
1447 
1448   // Properly mark the {result} if the {name} is an "interesting symbol".
1449   Handle<Name> name;
1450   if (maybe_name.ToHandle(&name) && name->IsInterestingSymbol()) {
1451     result->set_may_have_interesting_symbols(true);
1452   }
1453 
1454   if (map->is_prototype_map()) {
1455     result->InitializeDescriptors(isolate, *descriptors);
1456   } else {
1457     if (flag == INSERT_TRANSITION &&
1458         TransitionsAccessor::CanHaveMoreTransitions(isolate, map)) {
1459       result->InitializeDescriptors(isolate, *descriptors);
1460 
1461       DCHECK(!maybe_name.is_null());
1462       ConnectTransition(isolate, map, result, name, simple_flag);
1463       is_connected = true;
1464     } else {
1465       descriptors->GeneralizeAllFields();
1466       result->InitializeDescriptors(isolate, *descriptors);
1467     }
1468   }
1469   if (FLAG_log_maps && !is_connected) {
1470     LOG(isolate, MapEvent("ReplaceDescriptors", map, result, reason,
1471                           maybe_name.is_null() ? Handle<HeapObject>() : name));
1472   }
1473   return result;
1474 }
1475 
1476 // Creates transition tree starting from |split_map| and adding all descriptors
1477 // starting from descriptor with index |split_map|.NumberOfOwnDescriptors().
1478 // The way how it is done is tricky because of GC and special descriptors
1479 // marking logic.
AddMissingTransitions(Isolate * isolate,Handle<Map> split_map,Handle<DescriptorArray> descriptors)1480 Handle<Map> Map::AddMissingTransitions(Isolate* isolate, Handle<Map> split_map,
1481                                        Handle<DescriptorArray> descriptors) {
1482   DCHECK(descriptors->IsSortedNoDuplicates());
1483   int split_nof = split_map->NumberOfOwnDescriptors();
1484   int nof_descriptors = descriptors->number_of_descriptors();
1485   DCHECK_LT(split_nof, nof_descriptors);
1486 
1487   // Start with creating last map which will own full descriptors array.
1488   // This is necessary to guarantee that GC will mark the whole descriptor
1489   // array if any of the allocations happening below fail.
1490   // Number of unused properties is temporarily incorrect and the layout
1491   // descriptor could unnecessarily be in slow mode but we will fix after
1492   // all the other intermediate maps are created.
1493   // Also the last map might have interesting symbols, we temporarily set
1494   // the flag and clear it right before the descriptors are installed. This
1495   // makes heap verification happy and ensures the flag ends up accurate.
1496   Handle<Map> last_map = CopyDropDescriptors(isolate, split_map);
1497   last_map->InitializeDescriptors(isolate, *descriptors);
1498   last_map->SetInObjectUnusedPropertyFields(0);
1499   last_map->set_may_have_interesting_symbols(true);
1500 
1501   // During creation of intermediate maps we violate descriptors sharing
1502   // invariant since the last map is not yet connected to the transition tree
1503   // we create here. But it is safe because GC never trims map's descriptors
1504   // if there are no dead transitions from that map and this is exactly the
1505   // case for all the intermediate maps we create here.
1506   Handle<Map> map = split_map;
1507   for (InternalIndex i : InternalIndex::Range(split_nof, nof_descriptors - 1)) {
1508     Handle<Map> new_map = CopyDropDescriptors(isolate, map);
1509     InstallDescriptors(isolate, map, new_map, i, descriptors);
1510 
1511     map = new_map;
1512   }
1513   map->NotifyLeafMapLayoutChange(isolate);
1514   last_map->set_may_have_interesting_symbols(false);
1515   InstallDescriptors(isolate, map, last_map, InternalIndex(nof_descriptors - 1),
1516                      descriptors);
1517   return last_map;
1518 }
1519 
1520 // Since this method is used to rewrite an existing transition tree, it can
1521 // always insert transitions without checking.
InstallDescriptors(Isolate * isolate,Handle<Map> parent,Handle<Map> child,InternalIndex new_descriptor,Handle<DescriptorArray> descriptors)1522 void Map::InstallDescriptors(Isolate* isolate, Handle<Map> parent,
1523                              Handle<Map> child, InternalIndex new_descriptor,
1524                              Handle<DescriptorArray> descriptors) {
1525   DCHECK(descriptors->IsSortedNoDuplicates());
1526 
1527   child->SetInstanceDescriptors(isolate, *descriptors,
1528                                 new_descriptor.as_int() + 1);
1529   child->CopyUnusedPropertyFields(*parent);
1530   PropertyDetails details = descriptors->GetDetails(new_descriptor);
1531   if (details.location() == PropertyLocation::kField) {
1532     child->AccountAddedPropertyField();
1533   }
1534 
1535   Handle<Name> name = handle(descriptors->GetKey(new_descriptor), isolate);
1536   if (parent->may_have_interesting_symbols() || name->IsInterestingSymbol()) {
1537     child->set_may_have_interesting_symbols(true);
1538   }
1539   ConnectTransition(isolate, parent, child, name, SIMPLE_PROPERTY_TRANSITION);
1540 }
1541 
CopyAsElementsKind(Isolate * isolate,Handle<Map> map,ElementsKind kind,TransitionFlag flag)1542 Handle<Map> Map::CopyAsElementsKind(Isolate* isolate, Handle<Map> map,
1543                                     ElementsKind kind, TransitionFlag flag) {
1544   // Only certain objects are allowed to have non-terminal fast transitional
1545   // elements kinds.
1546   DCHECK(map->IsJSObjectMap());
1547   DCHECK_IMPLIES(
1548       !map->CanHaveFastTransitionableElementsKind(),
1549       IsDictionaryElementsKind(kind) || IsTerminalElementsKind(kind));
1550 
1551   Map maybe_elements_transition_map;
1552   if (flag == INSERT_TRANSITION) {
1553     // Ensure we are requested to add elements kind transition "near the root".
1554     DCHECK_EQ(map->FindRootMap(isolate).NumberOfOwnDescriptors(),
1555               map->NumberOfOwnDescriptors());
1556 
1557     maybe_elements_transition_map =
1558         map->ElementsTransitionMap(isolate, ConcurrencyMode::kSynchronous);
1559     DCHECK(
1560         maybe_elements_transition_map.is_null() ||
1561         (maybe_elements_transition_map.elements_kind() == DICTIONARY_ELEMENTS &&
1562          kind == DICTIONARY_ELEMENTS));
1563     DCHECK(!IsFastElementsKind(kind) ||
1564            IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
1565     DCHECK(kind != map->elements_kind());
1566   }
1567 
1568   bool insert_transition =
1569       flag == INSERT_TRANSITION &&
1570       TransitionsAccessor::CanHaveMoreTransitions(isolate, map) &&
1571       maybe_elements_transition_map.is_null();
1572 
1573   if (insert_transition) {
1574     Handle<Map> new_map = CopyForElementsTransition(isolate, map);
1575     new_map->set_elements_kind(kind);
1576 
1577     Handle<Name> name = isolate->factory()->elements_transition_symbol();
1578     ConnectTransition(isolate, map, new_map, name, SPECIAL_TRANSITION);
1579     return new_map;
1580   }
1581 
1582   // Create a new free-floating map only if we are not allowed to store it.
1583   Handle<Map> new_map = Copy(isolate, map, "CopyAsElementsKind");
1584   new_map->set_elements_kind(kind);
1585   return new_map;
1586 }
1587 
AsLanguageMode(Isolate * isolate,Handle<Map> initial_map,Handle<SharedFunctionInfo> shared_info)1588 Handle<Map> Map::AsLanguageMode(Isolate* isolate, Handle<Map> initial_map,
1589                                 Handle<SharedFunctionInfo> shared_info) {
1590   DCHECK(InstanceTypeChecker::IsJSFunction(initial_map->instance_type()));
1591   // Initial map for sloppy mode function is stored in the function
1592   // constructor. Initial maps for strict mode are cached as special transitions
1593   // using |strict_function_transition_symbol| as a key.
1594   if (is_sloppy(shared_info->language_mode())) return initial_map;
1595 
1596   Handle<Map> function_map(Map::cast(isolate->native_context()->get(
1597                                shared_info->function_map_index())),
1598                            isolate);
1599 
1600   STATIC_ASSERT(LanguageModeSize == 2);
1601   DCHECK_EQ(LanguageMode::kStrict, shared_info->language_mode());
1602   Handle<Symbol> transition_symbol =
1603       isolate->factory()->strict_function_transition_symbol();
1604   MaybeHandle<Map> maybe_transition = TransitionsAccessor::SearchSpecial(
1605       isolate, initial_map, *transition_symbol);
1606   if (!maybe_transition.is_null()) {
1607     return maybe_transition.ToHandleChecked();
1608   }
1609   initial_map->NotifyLeafMapLayoutChange(isolate);
1610 
1611   // Create new map taking descriptors from the |function_map| and all
1612   // the other details from the |initial_map|.
1613   Handle<Map> map =
1614       Map::CopyInitialMap(isolate, function_map, initial_map->instance_size(),
1615                           initial_map->GetInObjectProperties(),
1616                           initial_map->UnusedPropertyFields());
1617   map->SetConstructor(initial_map->GetConstructor());
1618   map->set_prototype(initial_map->prototype());
1619   map->set_construction_counter(initial_map->construction_counter());
1620 
1621   if (TransitionsAccessor::CanHaveMoreTransitions(isolate, initial_map)) {
1622     Map::ConnectTransition(isolate, initial_map, map, transition_symbol,
1623                            SPECIAL_TRANSITION);
1624   }
1625   return map;
1626 }
1627 
CopyForElementsTransition(Isolate * isolate,Handle<Map> map)1628 Handle<Map> Map::CopyForElementsTransition(Isolate* isolate, Handle<Map> map) {
1629   DCHECK(!map->IsDetached(isolate));
1630   Handle<Map> new_map = CopyDropDescriptors(isolate, map);
1631 
1632   if (map->owns_descriptors()) {
1633     // In case the map owned its own descriptors, share the descriptors and
1634     // transfer ownership to the new map.
1635     // The properties did not change, so reuse descriptors.
1636     map->set_owns_descriptors(false);
1637     new_map->InitializeDescriptors(isolate, map->instance_descriptors(isolate));
1638   } else {
1639     // In case the map did not own its own descriptors, a split is forced by
1640     // copying the map; creating a new descriptor array cell.
1641     Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
1642                                         isolate);
1643     int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1644     Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
1645         isolate, descriptors, number_of_own_descriptors);
1646     new_map->InitializeDescriptors(isolate, *new_descriptors);
1647   }
1648   return new_map;
1649 }
1650 
Copy(Isolate * isolate,Handle<Map> map,const char * reason)1651 Handle<Map> Map::Copy(Isolate* isolate, Handle<Map> map, const char* reason) {
1652   Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
1653                                       isolate);
1654   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1655   Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
1656       isolate, descriptors, number_of_own_descriptors);
1657   return CopyReplaceDescriptors(isolate, map, new_descriptors, OMIT_TRANSITION,
1658                                 MaybeHandle<Name>(), reason,
1659                                 SPECIAL_TRANSITION);
1660 }
1661 
Create(Isolate * isolate,int inobject_properties)1662 Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) {
1663   Handle<Map> copy_handle =
1664       Copy(isolate, handle(isolate->object_function()->initial_map(), isolate),
1665            "MapCreate");
1666   DisallowGarbageCollection no_gc;
1667   Map copy = *copy_handle;
1668 
1669   // Check that we do not overflow the instance size when adding the extra
1670   // inobject properties. If the instance size overflows, we allocate as many
1671   // properties as we can as inobject properties.
1672   if (inobject_properties > JSObject::kMaxInObjectProperties) {
1673     inobject_properties = JSObject::kMaxInObjectProperties;
1674   }
1675 
1676   int new_instance_size =
1677       JSObject::kHeaderSize + kTaggedSize * inobject_properties;
1678 
1679   // Adjust the map with the extra inobject properties.
1680   copy.set_instance_size(new_instance_size);
1681   copy.SetInObjectPropertiesStartInWords(JSObject::kHeaderSize / kTaggedSize);
1682   DCHECK_EQ(copy.GetInObjectProperties(), inobject_properties);
1683   copy.SetInObjectUnusedPropertyFields(inobject_properties);
1684   copy.set_visitor_id(Map::GetVisitorId(copy));
1685 
1686   return copy_handle;
1687 }
1688 
CopyForPreventExtensions(Isolate * isolate,Handle<Map> map,PropertyAttributes attrs_to_add,Handle<Symbol> transition_marker,const char * reason,bool old_map_is_dictionary_elements_kind)1689 Handle<Map> Map::CopyForPreventExtensions(
1690     Isolate* isolate, Handle<Map> map, PropertyAttributes attrs_to_add,
1691     Handle<Symbol> transition_marker, const char* reason,
1692     bool old_map_is_dictionary_elements_kind) {
1693   int num_descriptors = map->NumberOfOwnDescriptors();
1694   Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
1695       isolate, handle(map->instance_descriptors(isolate), isolate),
1696       num_descriptors, attrs_to_add);
1697   // Do not track transitions during bootstrapping.
1698   TransitionFlag flag =
1699       isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
1700   Handle<Map> new_map =
1701       CopyReplaceDescriptors(isolate, map, new_desc, flag, transition_marker,
1702                              reason, SPECIAL_TRANSITION);
1703   new_map->set_is_extensible(false);
1704   if (!IsTypedArrayOrRabGsabTypedArrayElementsKind(map->elements_kind())) {
1705     ElementsKind new_kind = IsStringWrapperElementsKind(map->elements_kind())
1706                                 ? SLOW_STRING_WRAPPER_ELEMENTS
1707                                 : DICTIONARY_ELEMENTS;
1708     if (FLAG_enable_sealed_frozen_elements_kind &&
1709         !old_map_is_dictionary_elements_kind) {
1710       switch (map->elements_kind()) {
1711         case PACKED_ELEMENTS:
1712           if (attrs_to_add == SEALED) {
1713             new_kind = PACKED_SEALED_ELEMENTS;
1714           } else if (attrs_to_add == FROZEN) {
1715             new_kind = PACKED_FROZEN_ELEMENTS;
1716           } else {
1717             new_kind = PACKED_NONEXTENSIBLE_ELEMENTS;
1718           }
1719           break;
1720         case PACKED_NONEXTENSIBLE_ELEMENTS:
1721           if (attrs_to_add == SEALED) {
1722             new_kind = PACKED_SEALED_ELEMENTS;
1723           } else if (attrs_to_add == FROZEN) {
1724             new_kind = PACKED_FROZEN_ELEMENTS;
1725           }
1726           break;
1727         case PACKED_SEALED_ELEMENTS:
1728           if (attrs_to_add == FROZEN) {
1729             new_kind = PACKED_FROZEN_ELEMENTS;
1730           }
1731           break;
1732         case HOLEY_ELEMENTS:
1733           if (attrs_to_add == SEALED) {
1734             new_kind = HOLEY_SEALED_ELEMENTS;
1735           } else if (attrs_to_add == FROZEN) {
1736             new_kind = HOLEY_FROZEN_ELEMENTS;
1737           } else {
1738             new_kind = HOLEY_NONEXTENSIBLE_ELEMENTS;
1739           }
1740           break;
1741         case HOLEY_NONEXTENSIBLE_ELEMENTS:
1742           if (attrs_to_add == SEALED) {
1743             new_kind = HOLEY_SEALED_ELEMENTS;
1744           } else if (attrs_to_add == FROZEN) {
1745             new_kind = HOLEY_FROZEN_ELEMENTS;
1746           }
1747           break;
1748         case HOLEY_SEALED_ELEMENTS:
1749           if (attrs_to_add == FROZEN) {
1750             new_kind = HOLEY_FROZEN_ELEMENTS;
1751           }
1752           break;
1753         default:
1754           break;
1755       }
1756     }
1757     new_map->set_elements_kind(new_kind);
1758   }
1759   return new_map;
1760 }
1761 
1762 namespace {
1763 
CanHoldValue(DescriptorArray descriptors,InternalIndex descriptor,PropertyConstness constness,Object value)1764 bool CanHoldValue(DescriptorArray descriptors, InternalIndex descriptor,
1765                   PropertyConstness constness, Object value) {
1766   PropertyDetails details = descriptors.GetDetails(descriptor);
1767   if (details.location() == PropertyLocation::kField) {
1768     if (details.kind() == PropertyKind::kData) {
1769       return IsGeneralizableTo(constness, details.constness()) &&
1770              value.FitsRepresentation(details.representation()) &&
1771              descriptors.GetFieldType(descriptor).NowContains(value);
1772     } else {
1773       DCHECK_EQ(PropertyKind::kAccessor, details.kind());
1774       return false;
1775     }
1776 
1777   } else {
1778     DCHECK_EQ(PropertyLocation::kDescriptor, details.location());
1779     DCHECK_EQ(PropertyConstness::kConst, details.constness());
1780     DCHECK_EQ(PropertyKind::kAccessor, details.kind());
1781     return false;
1782   }
1783   UNREACHABLE();
1784 }
1785 
UpdateDescriptorForValue(Isolate * isolate,Handle<Map> map,InternalIndex descriptor,PropertyConstness constness,Handle<Object> value)1786 Handle<Map> UpdateDescriptorForValue(Isolate* isolate, Handle<Map> map,
1787                                      InternalIndex descriptor,
1788                                      PropertyConstness constness,
1789                                      Handle<Object> value) {
1790   if (CanHoldValue(map->instance_descriptors(isolate), descriptor, constness,
1791                    *value)) {
1792     return map;
1793   }
1794 
1795   PropertyAttributes attributes =
1796       map->instance_descriptors(isolate).GetDetails(descriptor).attributes();
1797   Representation representation = value->OptimalRepresentation(isolate);
1798   Handle<FieldType> type = value->OptimalType(isolate, representation);
1799 
1800   MapUpdater mu(isolate, map);
1801   return mu.ReconfigureToDataField(descriptor, attributes, constness,
1802                                    representation, type);
1803 }
1804 
1805 }  // namespace
1806 
1807 // static
PrepareForDataProperty(Isolate * isolate,Handle<Map> map,InternalIndex descriptor,PropertyConstness constness,Handle<Object> value)1808 Handle<Map> Map::PrepareForDataProperty(Isolate* isolate, Handle<Map> map,
1809                                         InternalIndex descriptor,
1810                                         PropertyConstness constness,
1811                                         Handle<Object> value) {
1812   // Update to the newest map before storing the property.
1813   map = Update(isolate, map);
1814   // Dictionaries can store any property value.
1815   DCHECK(!map->is_dictionary_map());
1816   return UpdateDescriptorForValue(isolate, map, descriptor, constness, value);
1817 }
1818 
TransitionToDataProperty(Isolate * isolate,Handle<Map> map,Handle<Name> name,Handle<Object> value,PropertyAttributes attributes,PropertyConstness constness,StoreOrigin store_origin)1819 Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
1820                                           Handle<Name> name,
1821                                           Handle<Object> value,
1822                                           PropertyAttributes attributes,
1823                                           PropertyConstness constness,
1824                                           StoreOrigin store_origin) {
1825   RCS_SCOPE(isolate,
1826             map->IsDetached(isolate)
1827                 ? RuntimeCallCounterId::kPrototypeMap_TransitionToDataProperty
1828                 : RuntimeCallCounterId::kMap_TransitionToDataProperty);
1829 
1830   DCHECK(name->IsUniqueName());
1831   DCHECK(!map->is_dictionary_map());
1832 
1833   // Migrate to the newest map before storing the property.
1834   map = Update(isolate, map);
1835 
1836   MaybeHandle<Map> maybe_transition = TransitionsAccessor::SearchTransition(
1837       isolate, map, *name, PropertyKind::kData, attributes);
1838   Handle<Map> transition;
1839   if (maybe_transition.ToHandle(&transition)) {
1840     InternalIndex descriptor = transition->LastAdded();
1841 
1842     DCHECK_EQ(attributes, transition->instance_descriptors(isolate)
1843                               .GetDetails(descriptor)
1844                               .attributes());
1845 
1846     return UpdateDescriptorForValue(isolate, transition, descriptor, constness,
1847                                     value);
1848   }
1849 
1850   // Do not track transitions during bootstrapping.
1851   TransitionFlag flag =
1852       isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
1853   MaybeHandle<Map> maybe_map;
1854   if (!map->TooManyFastProperties(store_origin)) {
1855     Representation representation = value->OptimalRepresentation(isolate);
1856     Handle<FieldType> type = value->OptimalType(isolate, representation);
1857     maybe_map = Map::CopyWithField(isolate, map, name, type, attributes,
1858                                    constness, representation, flag);
1859   }
1860 
1861   Handle<Map> result;
1862   if (!maybe_map.ToHandle(&result)) {
1863     const char* reason = "TooManyFastProperties";
1864 #if V8_TRACE_MAPS
1865     std::unique_ptr<base::ScopedVector<char>> buffer;
1866     if (FLAG_log_maps) {
1867       base::ScopedVector<char> name_buffer(100);
1868       name->NameShortPrint(name_buffer);
1869       buffer.reset(new base::ScopedVector<char>(128));
1870       SNPrintF(*buffer, "TooManyFastProperties %s", name_buffer.begin());
1871       reason = buffer->begin();
1872     }
1873 #endif
1874     Handle<Object> maybe_constructor(map->GetConstructor(), isolate);
1875     if (FLAG_feedback_normalization && map->new_target_is_base() &&
1876         maybe_constructor->IsJSFunction() &&
1877         !JSFunction::cast(*maybe_constructor).shared().native()) {
1878       Handle<JSFunction> constructor =
1879           Handle<JSFunction>::cast(maybe_constructor);
1880       DCHECK_NE(*constructor, constructor->native_context().object_function());
1881       Handle<Map> initial_map(constructor->initial_map(), isolate);
1882       result = Map::Normalize(isolate, initial_map, CLEAR_INOBJECT_PROPERTIES,
1883                               reason);
1884       initial_map->DeprecateTransitionTree(isolate);
1885       Handle<HeapObject> prototype(result->prototype(), isolate);
1886       JSFunction::SetInitialMap(isolate, constructor, result, prototype);
1887 
1888       // Deoptimize all code that embeds the previous initial map.
1889       initial_map->dependent_code().DeoptimizeDependentCodeGroup(
1890           isolate, DependentCode::kInitialMapChangedGroup);
1891       if (!result->EquivalentToForNormalization(*map,
1892                                                 CLEAR_INOBJECT_PROPERTIES)) {
1893         result =
1894             Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, reason);
1895       }
1896     } else {
1897       result = Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, reason);
1898     }
1899   }
1900 
1901   return result;
1902 }
1903 
TransitionToAccessorProperty(Isolate * isolate,Handle<Map> map,Handle<Name> name,InternalIndex descriptor,Handle<Object> getter,Handle<Object> setter,PropertyAttributes attributes)1904 Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
1905                                               Handle<Name> name,
1906                                               InternalIndex descriptor,
1907                                               Handle<Object> getter,
1908                                               Handle<Object> setter,
1909                                               PropertyAttributes attributes) {
1910   RCS_SCOPE(
1911       isolate,
1912       map->IsDetached(isolate)
1913           ? RuntimeCallCounterId::kPrototypeMap_TransitionToAccessorProperty
1914           : RuntimeCallCounterId::kMap_TransitionToAccessorProperty);
1915 
1916   // At least one of the accessors needs to be a new value.
1917   DCHECK(!getter->IsNull(isolate) || !setter->IsNull(isolate));
1918   DCHECK(name->IsUniqueName());
1919 
1920   // Migrate to the newest map before transitioning to the new property.
1921   map = Update(isolate, map);
1922 
1923   // Dictionary maps can always have additional data properties.
1924   if (map->is_dictionary_map()) return map;
1925 
1926   PropertyNormalizationMode mode = map->is_prototype_map()
1927                                        ? KEEP_INOBJECT_PROPERTIES
1928                                        : CLEAR_INOBJECT_PROPERTIES;
1929 
1930   MaybeHandle<Map> maybe_transition = TransitionsAccessor::SearchTransition(
1931       isolate, map, *name, PropertyKind::kAccessor, attributes);
1932   Handle<Map> transition;
1933   if (maybe_transition.ToHandle(&transition)) {
1934     DescriptorArray descriptors = transition->instance_descriptors(isolate);
1935     InternalIndex last_descriptor = transition->LastAdded();
1936     DCHECK(descriptors.GetKey(last_descriptor).Equals(*name));
1937 
1938     DCHECK_EQ(PropertyKind::kAccessor,
1939               descriptors.GetDetails(last_descriptor).kind());
1940     DCHECK_EQ(attributes, descriptors.GetDetails(last_descriptor).attributes());
1941 
1942     Handle<Object> maybe_pair(descriptors.GetStrongValue(last_descriptor),
1943                               isolate);
1944     if (!maybe_pair->IsAccessorPair()) {
1945       return Map::Normalize(isolate, map, mode,
1946                             "TransitionToAccessorFromNonPair");
1947     }
1948 
1949     Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair);
1950     if (!pair->Equals(*getter, *setter)) {
1951       return Map::Normalize(isolate, map, mode,
1952                             "TransitionToDifferentAccessor");
1953     }
1954 
1955     return transition;
1956   }
1957 
1958   Handle<AccessorPair> pair;
1959   DescriptorArray old_descriptors = map->instance_descriptors(isolate);
1960   if (descriptor.is_found()) {
1961     if (descriptor != map->LastAdded()) {
1962       return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonLast");
1963     }
1964     PropertyDetails old_details = old_descriptors.GetDetails(descriptor);
1965     if (old_details.kind() != PropertyKind::kAccessor) {
1966       return Map::Normalize(isolate, map, mode,
1967                             "AccessorsOverwritingNonAccessors");
1968     }
1969 
1970     if (old_details.attributes() != attributes) {
1971       return Map::Normalize(isolate, map, mode, "AccessorsWithAttributes");
1972     }
1973 
1974     Handle<Object> maybe_pair(old_descriptors.GetStrongValue(descriptor),
1975                               isolate);
1976     if (!maybe_pair->IsAccessorPair()) {
1977       return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonPair");
1978     }
1979 
1980     Handle<AccessorPair> current_pair = Handle<AccessorPair>::cast(maybe_pair);
1981     if (current_pair->Equals(*getter, *setter)) return map;
1982 
1983     bool overwriting_accessor = false;
1984     if (!getter->IsNull(isolate) &&
1985         !current_pair->get(ACCESSOR_GETTER).IsNull(isolate) &&
1986         current_pair->get(ACCESSOR_GETTER) != *getter) {
1987       overwriting_accessor = true;
1988     }
1989     if (!setter->IsNull(isolate) &&
1990         !current_pair->get(ACCESSOR_SETTER).IsNull(isolate) &&
1991         current_pair->get(ACCESSOR_SETTER) != *setter) {
1992       overwriting_accessor = true;
1993     }
1994     if (overwriting_accessor) {
1995       return Map::Normalize(isolate, map, mode,
1996                             "AccessorsOverwritingAccessors");
1997     }
1998 
1999     pair = AccessorPair::Copy(isolate, Handle<AccessorPair>::cast(maybe_pair));
2000   } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors ||
2001              map->TooManyFastProperties(StoreOrigin::kNamed)) {
2002     return Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES,
2003                           "TooManyAccessors");
2004   } else {
2005     pair = isolate->factory()->NewAccessorPair();
2006   }
2007 
2008   pair->SetComponents(*getter, *setter);
2009 
2010   // Do not track transitions during bootstrapping.
2011   TransitionFlag flag =
2012       isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
2013   Descriptor d = Descriptor::AccessorConstant(name, pair, attributes);
2014   return Map::CopyInsertDescriptor(isolate, map, &d, flag);
2015 }
2016 
CopyAddDescriptor(Isolate * isolate,Handle<Map> map,Descriptor * descriptor,TransitionFlag flag)2017 Handle<Map> Map::CopyAddDescriptor(Isolate* isolate, Handle<Map> map,
2018                                    Descriptor* descriptor,
2019                                    TransitionFlag flag) {
2020   Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
2021                                       isolate);
2022 
2023   // Share descriptors only if map owns descriptors and it not an initial map.
2024   if (flag == INSERT_TRANSITION && map->owns_descriptors() &&
2025       !map->GetBackPointer().IsUndefined(isolate) &&
2026       TransitionsAccessor::CanHaveMoreTransitions(isolate, map)) {
2027     return ShareDescriptor(isolate, map, descriptors, descriptor);
2028   }
2029 
2030   int nof = map->NumberOfOwnDescriptors();
2031   Handle<DescriptorArray> new_descriptors =
2032       DescriptorArray::CopyUpTo(isolate, descriptors, nof, 1);
2033   new_descriptors->Append(descriptor);
2034 
2035   return CopyReplaceDescriptors(isolate, map, new_descriptors, flag,
2036                                 descriptor->GetKey(), "CopyAddDescriptor",
2037                                 SIMPLE_PROPERTY_TRANSITION);
2038 }
2039 
CopyInsertDescriptor(Isolate * isolate,Handle<Map> map,Descriptor * descriptor,TransitionFlag flag)2040 Handle<Map> Map::CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
2041                                       Descriptor* descriptor,
2042                                       TransitionFlag flag) {
2043   Handle<DescriptorArray> old_descriptors(map->instance_descriptors(isolate),
2044                                           isolate);
2045 
2046   // We replace the key if it is already present.
2047   InternalIndex index =
2048       old_descriptors->SearchWithCache(isolate, *descriptor->GetKey(), *map);
2049   if (index.is_found()) {
2050     return CopyReplaceDescriptor(isolate, map, old_descriptors, descriptor,
2051                                  index, flag);
2052   }
2053   return CopyAddDescriptor(isolate, map, descriptor, flag);
2054 }
2055 
CopyReplaceDescriptor(Isolate * isolate,Handle<Map> map,Handle<DescriptorArray> descriptors,Descriptor * descriptor,InternalIndex insertion_index,TransitionFlag flag)2056 Handle<Map> Map::CopyReplaceDescriptor(Isolate* isolate, Handle<Map> map,
2057                                        Handle<DescriptorArray> descriptors,
2058                                        Descriptor* descriptor,
2059                                        InternalIndex insertion_index,
2060                                        TransitionFlag flag) {
2061   Handle<Name> key = descriptor->GetKey();
2062   DCHECK_EQ(*key, descriptors->GetKey(insertion_index));
2063   // This function does not support replacing property fields as
2064   // that would break property field counters.
2065   DCHECK_NE(PropertyLocation::kField, descriptor->GetDetails().location());
2066   DCHECK_NE(PropertyLocation::kField,
2067             descriptors->GetDetails(insertion_index).location());
2068 
2069   Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
2070       isolate, descriptors, map->NumberOfOwnDescriptors());
2071 
2072   new_descriptors->Replace(insertion_index, descriptor);
2073 
2074   SimpleTransitionFlag simple_flag =
2075       (insertion_index.as_int() == descriptors->number_of_descriptors() - 1)
2076           ? SIMPLE_PROPERTY_TRANSITION
2077           : PROPERTY_TRANSITION;
2078   return CopyReplaceDescriptors(isolate, map, new_descriptors, flag, key,
2079                                 "CopyReplaceDescriptor", simple_flag);
2080 }
2081 
Hash()2082 int Map::Hash() {
2083   // For performance reasons we only hash the 2 most variable fields of a map:
2084   // prototype and bit_field2.
2085 
2086   HeapObject prototype = this->prototype();
2087   int prototype_hash;
2088 
2089   if (prototype.IsNull()) {
2090     // No identity hash for null, so just pick a random number.
2091     prototype_hash = 1;
2092   } else {
2093     JSReceiver receiver = JSReceiver::cast(prototype);
2094     Isolate* isolate = GetIsolateFromWritableObject(receiver);
2095     prototype_hash = receiver.GetOrCreateIdentityHash(isolate).value();
2096   }
2097 
2098   return prototype_hash ^ bit_field2();
2099 }
2100 
2101 namespace {
2102 
CheckEquivalent(const Map first,const Map second)2103 bool CheckEquivalent(const Map first, const Map second) {
2104   return first.GetConstructor() == second.GetConstructor() &&
2105          first.prototype() == second.prototype() &&
2106          first.instance_type() == second.instance_type() &&
2107          first.bit_field() == second.bit_field() &&
2108          first.is_extensible() == second.is_extensible() &&
2109          first.new_target_is_base() == second.new_target_is_base();
2110 }
2111 
2112 }  // namespace
2113 
EquivalentToForTransition(const Map other,ConcurrencyMode cmode) const2114 bool Map::EquivalentToForTransition(const Map other,
2115                                     ConcurrencyMode cmode) const {
2116   CHECK_EQ(GetConstructor(), other.GetConstructor());
2117   CHECK_EQ(instance_type(), other.instance_type());
2118 
2119   if (bit_field() != other.bit_field()) return false;
2120   if (new_target_is_base() != other.new_target_is_base()) return false;
2121   if (prototype() != other.prototype()) return false;
2122   if (InstanceTypeChecker::IsJSFunction(instance_type())) {
2123     // JSFunctions require more checks to ensure that sloppy function is
2124     // not equivalent to strict function.
2125     int nof =
2126         std::min(NumberOfOwnDescriptors(), other.NumberOfOwnDescriptors());
2127     DescriptorArray this_descriptors = IsConcurrent(cmode)
2128                                            ? instance_descriptors(kAcquireLoad)
2129                                            : instance_descriptors();
2130     DescriptorArray that_descriptors =
2131         IsConcurrent(cmode) ? other.instance_descriptors(kAcquireLoad)
2132                             : other.instance_descriptors();
2133     return this_descriptors.IsEqualUpTo(that_descriptors, nof);
2134   }
2135   return true;
2136 }
2137 
EquivalentToForElementsKindTransition(const Map other,ConcurrencyMode cmode) const2138 bool Map::EquivalentToForElementsKindTransition(const Map other,
2139                                                 ConcurrencyMode cmode) const {
2140   if (!EquivalentToForTransition(other, cmode)) return false;
2141 #ifdef DEBUG
2142   // Ensure that we don't try to generate elements kind transitions from maps
2143   // with fields that may be generalized in-place. This must already be handled
2144   // during addition of a new field.
2145   DescriptorArray descriptors = IsConcurrent(cmode)
2146                                     ? instance_descriptors(kAcquireLoad)
2147                                     : instance_descriptors();
2148   for (InternalIndex i : IterateOwnDescriptors()) {
2149     PropertyDetails details = descriptors.GetDetails(i);
2150     if (details.location() == PropertyLocation::kField) {
2151       DCHECK(IsMostGeneralFieldType(details.representation(),
2152                                     descriptors.GetFieldType(i)));
2153     }
2154   }
2155 #endif
2156   return true;
2157 }
2158 
EquivalentToForNormalization(const Map other,ElementsKind elements_kind,PropertyNormalizationMode mode) const2159 bool Map::EquivalentToForNormalization(const Map other,
2160                                        ElementsKind elements_kind,
2161                                        PropertyNormalizationMode mode) const {
2162   int properties =
2163       mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other.GetInObjectProperties();
2164   // Make sure the elements_kind bits are in bit_field2.
2165   DCHECK_EQ(this->elements_kind(),
2166             Map::Bits2::ElementsKindBits::decode(bit_field2()));
2167   int adjusted_other_bit_field2 =
2168       Map::Bits2::ElementsKindBits::update(other.bit_field2(), elements_kind);
2169   return CheckEquivalent(*this, other) &&
2170          bit_field2() == adjusted_other_bit_field2 &&
2171          GetInObjectProperties() == properties &&
2172          JSObject::GetEmbedderFieldCount(*this) ==
2173              JSObject::GetEmbedderFieldCount(other);
2174 }
2175 
ComputeMinObjectSlack(Isolate * isolate)2176 int Map::ComputeMinObjectSlack(Isolate* isolate) {
2177   // Has to be an initial map.
2178   DCHECK(GetBackPointer().IsUndefined(isolate));
2179 
2180   int slack = UnusedPropertyFields();
2181   TransitionsAccessor transitions(isolate, *this);
2182   TransitionsAccessor::TraverseCallback callback = [&](Map map) {
2183     slack = std::min(slack, map.UnusedPropertyFields());
2184   };
2185   transitions.TraverseTransitionTree(callback);
2186   return slack;
2187 }
2188 
SetInstanceDescriptors(Isolate * isolate,DescriptorArray descriptors,int number_of_own_descriptors)2189 void Map::SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
2190                                  int number_of_own_descriptors) {
2191   set_instance_descriptors(descriptors, kReleaseStore);
2192   SetNumberOfOwnDescriptors(number_of_own_descriptors);
2193 #ifndef V8_DISABLE_WRITE_BARRIERS
2194   WriteBarrier::Marking(descriptors, number_of_own_descriptors);
2195 #endif
2196 }
2197 
2198 // static
GetOrCreatePrototypeInfo(Handle<JSObject> prototype,Isolate * isolate)2199 Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<JSObject> prototype,
2200                                                     Isolate* isolate) {
2201   Object maybe_proto_info = prototype->map().prototype_info();
2202   if (maybe_proto_info.IsPrototypeInfo()) {
2203     return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
2204   }
2205   Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
2206   prototype->map().set_prototype_info(*proto_info, kReleaseStore);
2207   return proto_info;
2208 }
2209 
2210 // static
GetOrCreatePrototypeInfo(Handle<Map> prototype_map,Isolate * isolate)2211 Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<Map> prototype_map,
2212                                                     Isolate* isolate) {
2213   Object maybe_proto_info = prototype_map->prototype_info();
2214   if (maybe_proto_info.IsPrototypeInfo()) {
2215     return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
2216   }
2217   Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
2218   prototype_map->set_prototype_info(*proto_info, kReleaseStore);
2219   return proto_info;
2220 }
2221 
2222 // static
SetShouldBeFastPrototypeMap(Handle<Map> map,bool value,Isolate * isolate)2223 void Map::SetShouldBeFastPrototypeMap(Handle<Map> map, bool value,
2224                                       Isolate* isolate) {
2225   if (value == false && !map->prototype_info().IsPrototypeInfo()) {
2226     // "False" is the implicit default value, so there's nothing to do.
2227     return;
2228   }
2229   GetOrCreatePrototypeInfo(map, isolate)->set_should_be_fast_map(value);
2230 }
2231 
2232 // static
GetOrCreatePrototypeChainValidityCell(Handle<Map> map,Isolate * isolate)2233 Handle<Object> Map::GetOrCreatePrototypeChainValidityCell(Handle<Map> map,
2234                                                           Isolate* isolate) {
2235   Handle<Object> maybe_prototype;
2236   if (map->IsJSGlobalObjectMap()) {
2237     DCHECK(map->is_prototype_map());
2238     // Global object is prototype of a global proxy and therefore we can
2239     // use its validity cell for guarding global object's prototype change.
2240     maybe_prototype = isolate->global_object();
2241   } else {
2242     maybe_prototype =
2243         handle(map->GetPrototypeChainRootMap(isolate).prototype(), isolate);
2244   }
2245   if (!maybe_prototype->IsJSObject()) {
2246     return handle(Smi::FromInt(Map::kPrototypeChainValid), isolate);
2247   }
2248   Handle<JSObject> prototype = Handle<JSObject>::cast(maybe_prototype);
2249   // Ensure the prototype is registered with its own prototypes so its cell
2250   // will be invalidated when necessary.
2251   JSObject::LazyRegisterPrototypeUser(handle(prototype->map(), isolate),
2252                                       isolate);
2253 
2254   Object maybe_cell = prototype->map().prototype_validity_cell();
2255   // Return existing cell if it's still valid.
2256   if (maybe_cell.IsCell()) {
2257     Handle<Cell> cell(Cell::cast(maybe_cell), isolate);
2258     if (cell->value() == Smi::FromInt(Map::kPrototypeChainValid)) {
2259       return cell;
2260     }
2261   }
2262   // Otherwise create a new cell.
2263   Handle<Cell> cell = isolate->factory()->NewCell(
2264       handle(Smi::FromInt(Map::kPrototypeChainValid), isolate));
2265   prototype->map().set_prototype_validity_cell(*cell);
2266   return cell;
2267 }
2268 
2269 // static
IsPrototypeChainInvalidated(Map map)2270 bool Map::IsPrototypeChainInvalidated(Map map) {
2271   DCHECK(map.is_prototype_map());
2272   Object maybe_cell = map.prototype_validity_cell();
2273   if (maybe_cell.IsCell()) {
2274     Cell cell = Cell::cast(maybe_cell);
2275     return cell.value() != Smi::FromInt(Map::kPrototypeChainValid);
2276   }
2277   return true;
2278 }
2279 
2280 // static
SetPrototype(Isolate * isolate,Handle<Map> map,Handle<HeapObject> prototype,bool enable_prototype_setup_mode)2281 void Map::SetPrototype(Isolate* isolate, Handle<Map> map,
2282                        Handle<HeapObject> prototype,
2283                        bool enable_prototype_setup_mode) {
2284   RCS_SCOPE(isolate, RuntimeCallCounterId::kMap_SetPrototype);
2285 
2286   if (prototype->IsJSObject()) {
2287     Handle<JSObject> prototype_jsobj = Handle<JSObject>::cast(prototype);
2288     JSObject::OptimizeAsPrototype(prototype_jsobj, enable_prototype_setup_mode);
2289   } else {
2290     DCHECK(prototype->IsNull(isolate) || prototype->IsJSProxy());
2291   }
2292 
2293   WriteBarrierMode wb_mode =
2294       prototype->IsNull(isolate) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
2295   map->set_prototype(*prototype, wb_mode);
2296 }
2297 
StartInobjectSlackTracking()2298 void Map::StartInobjectSlackTracking() {
2299   DCHECK(!IsInobjectSlackTrackingInProgress());
2300   if (UnusedPropertyFields() == 0) return;
2301   set_construction_counter(Map::kSlackTrackingCounterStart);
2302 }
2303 
TransitionToPrototype(Isolate * isolate,Handle<Map> map,Handle<HeapObject> prototype)2304 Handle<Map> Map::TransitionToPrototype(Isolate* isolate, Handle<Map> map,
2305                                        Handle<HeapObject> prototype) {
2306   Handle<Map> new_map =
2307       TransitionsAccessor::GetPrototypeTransition(isolate, map, prototype);
2308   if (new_map.is_null()) {
2309     new_map = Copy(isolate, map, "TransitionToPrototype");
2310     TransitionsAccessor::PutPrototypeTransition(isolate, map, prototype,
2311                                                 new_map);
2312     Map::SetPrototype(isolate, new_map, prototype);
2313   }
2314   return new_map;
2315 }
2316 
New(Isolate * isolate)2317 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
2318   Handle<WeakFixedArray> array(
2319       isolate->factory()->NewWeakFixedArray(kEntries, AllocationType::kOld));
2320   return Handle<NormalizedMapCache>::cast(array);
2321 }
2322 
Get(Handle<Map> fast_map,ElementsKind elements_kind,PropertyNormalizationMode mode)2323 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
2324                                          ElementsKind elements_kind,
2325                                          PropertyNormalizationMode mode) {
2326   DisallowGarbageCollection no_gc;
2327   MaybeObject value = WeakFixedArray::Get(GetIndex(fast_map));
2328   HeapObject heap_object;
2329   if (!value->GetHeapObjectIfWeak(&heap_object)) {
2330     return MaybeHandle<Map>();
2331   }
2332 
2333   Map normalized_map = Map::cast(heap_object);
2334   if (!normalized_map.EquivalentToForNormalization(*fast_map, elements_kind,
2335                                                    mode)) {
2336     return MaybeHandle<Map>();
2337   }
2338   return handle(normalized_map, GetIsolate());
2339 }
2340 
Set(Handle<Map> fast_map,Handle<Map> normalized_map)2341 void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map) {
2342   DisallowGarbageCollection no_gc;
2343   DCHECK(normalized_map->is_dictionary_map());
2344   WeakFixedArray::Set(GetIndex(fast_map),
2345                       HeapObjectReference::Weak(*normalized_map));
2346 }
2347 
2348 }  // namespace internal
2349 }  // namespace v8
2350