1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14
15 #include "src/objects.h"
16
17 #include "src/base/atomicops.h"
18 #include "src/base/bits.h"
19 #include "src/base/tsan.h"
20 #include "src/builtins/builtins.h"
21 #include "src/contexts-inl.h"
22 #include "src/conversions-inl.h"
23 #include "src/feedback-vector-inl.h"
24 #include "src/field-index-inl.h"
25 #include "src/handles-inl.h"
26 #include "src/heap/factory.h"
27 #include "src/heap/heap-inl.h"
28 #include "src/isolate-inl.h"
29 #include "src/keys.h"
30 #include "src/layout-descriptor-inl.h"
31 #include "src/lookup-cache-inl.h"
32 #include "src/lookup-inl.h"
33 #include "src/maybe-handles-inl.h"
34 #include "src/objects/bigint.h"
35 #include "src/objects/descriptor-array.h"
36 #include "src/objects/js-proxy-inl.h"
37 #include "src/objects/literal-objects.h"
38 #include "src/objects/maybe-object-inl.h"
39 #include "src/objects/regexp-match-info.h"
40 #include "src/objects/scope-info.h"
41 #include "src/objects/template-objects.h"
42 #include "src/objects/templates.h"
43 #include "src/property-details.h"
44 #include "src/property.h"
45 #include "src/prototype-inl.h"
46 #include "src/roots-inl.h"
47 #include "src/transitions-inl.h"
48 #include "src/v8memory.h"
49
50 // Has to be the last include (doesn't have include guards):
51 #include "src/objects/object-macros.h"
52
53 namespace v8 {
54 namespace internal {
55
PropertyDetails(Smi * smi)56 PropertyDetails::PropertyDetails(Smi* smi) {
57 value_ = smi->value();
58 }
59
60
AsSmi()61 Smi* PropertyDetails::AsSmi() const {
62 // Ensure the upper 2 bits have the same value by sign extending it. This is
63 // necessary to be able to use the 31st bit of the property details.
64 int value = value_ << 1;
65 return Smi::FromInt(value >> 1);
66 }
67
68
field_width_in_words()69 int PropertyDetails::field_width_in_words() const {
70 DCHECK_EQ(location(), kField);
71 if (!FLAG_unbox_double_fields) return 1;
72 if (kDoubleSize == kPointerSize) return 1;
73 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
74 }
75
76 namespace InstanceTypeChecker {
77
78 // Define type checkers for classes with single instance type.
79 INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECKER);
80
81 #define TYPED_ARRAY_INSTANCE_TYPE_CHECKER(Type, type, TYPE, ctype) \
82 INSTANCE_TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
83 TYPED_ARRAYS(TYPED_ARRAY_INSTANCE_TYPE_CHECKER)
84 #undef TYPED_ARRAY_INSTANCE_TYPE_CHECKER
85
86 #define STRUCT_INSTANCE_TYPE_CHECKER(NAME, Name, name) \
87 INSTANCE_TYPE_CHECKER(Name, NAME##_TYPE)
88 STRUCT_LIST(STRUCT_INSTANCE_TYPE_CHECKER)
89 #undef STRUCT_INSTANCE_TYPE_CHECKER
90
91 // Define type checkers for classes with ranges of instance types.
92 #define INSTANCE_TYPE_CHECKER_RANGE(type, first_instance_type, \
93 last_instance_type) \
94 V8_INLINE bool Is##type(InstanceType instance_type) { \
95 return instance_type >= first_instance_type && \
96 instance_type <= last_instance_type; \
97 }
98 INSTANCE_TYPE_CHECKERS_RANGE(INSTANCE_TYPE_CHECKER_RANGE);
99 #undef INSTANCE_TYPE_CHECKER_RANGE
100
IsFixedArrayBase(InstanceType instance_type)101 V8_INLINE bool IsFixedArrayBase(InstanceType instance_type) {
102 return IsFixedArray(instance_type) || IsFixedDoubleArray(instance_type) ||
103 IsFixedTypedArrayBase(instance_type);
104 }
105
IsHeapObject(InstanceType instance_type)106 V8_INLINE bool IsHeapObject(InstanceType instance_type) { return true; }
107
IsInternalizedString(InstanceType instance_type)108 V8_INLINE bool IsInternalizedString(InstanceType instance_type) {
109 STATIC_ASSERT(kNotInternalizedTag != 0);
110 return (instance_type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
111 (kStringTag | kInternalizedTag);
112 }
113
IsJSObject(InstanceType instance_type)114 V8_INLINE bool IsJSObject(InstanceType instance_type) {
115 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
116 return instance_type >= FIRST_JS_OBJECT_TYPE;
117 }
118
119 } // namespace InstanceTypeChecker
120
121 // TODO(v8:7786): For instance types that have a single map instance on the
122 // roots, and when that map is a embedded in the binary, compare against the map
123 // pointer rather than looking up the instance type.
124 INSTANCE_TYPE_CHECKERS(TYPE_CHECKER);
125
126 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype) \
127 TYPE_CHECKER(Fixed##Type##Array)
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)128 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
129 #undef TYPED_ARRAY_TYPE_CHECKER
130
131 bool HeapObject::IsUncompiledData() const {
132 return IsUncompiledDataWithoutPreParsedScope() ||
133 IsUncompiledDataWithPreParsedScope();
134 }
135
IsSloppyArgumentsElements()136 bool HeapObject::IsSloppyArgumentsElements() const {
137 return IsFixedArrayExact();
138 }
139
IsJSSloppyArgumentsObject()140 bool HeapObject::IsJSSloppyArgumentsObject() const {
141 return IsJSArgumentsObject();
142 }
143
IsJSGeneratorObject()144 bool HeapObject::IsJSGeneratorObject() const {
145 return map()->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
146 IsJSAsyncGeneratorObject();
147 }
148
IsDataHandler()149 bool HeapObject::IsDataHandler() const {
150 return IsLoadHandler() || IsStoreHandler();
151 }
152
IsClassBoilerplate()153 bool HeapObject::IsClassBoilerplate() const { return IsFixedArrayExact(); }
154
IsExternal(Isolate * isolate)155 bool HeapObject::IsExternal(Isolate* isolate) const {
156 return map()->FindRootMap(isolate) == isolate->heap()->external_map();
157 }
158
159 #define IS_TYPE_FUNCTION_DEF(type_) \
160 bool Object::Is##type_() const { \
161 return IsHeapObject() && HeapObject::cast(this)->Is##type_(); \
162 }
163 HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
164 #undef IS_TYPE_FUNCTION_DEF
165
166 #define IS_TYPE_FUNCTION_DEF(Type, Value) \
167 bool Object::Is##Type(Isolate* isolate) const { \
168 return Is##Type(ReadOnlyRoots(isolate->heap())); \
169 } \
170 bool Object::Is##Type(ReadOnlyRoots roots) const { \
171 return this == roots.Value(); \
172 } \
173 bool Object::Is##Type() const { \
174 return IsHeapObject() && HeapObject::cast(this)->Is##Type(); \
175 } \
176 bool HeapObject::Is##Type(Isolate* isolate) const { \
177 return Object::Is##Type(isolate); \
178 } \
179 bool HeapObject::Is##Type(ReadOnlyRoots roots) const { \
180 return Object::Is##Type(roots); \
181 } \
182 bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)183 ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
184 #undef IS_TYPE_FUNCTION_DEF
185
186 bool Object::IsNullOrUndefined(Isolate* isolate) const {
187 return IsNullOrUndefined(ReadOnlyRoots(isolate));
188 }
189
IsNullOrUndefined(ReadOnlyRoots roots)190 bool Object::IsNullOrUndefined(ReadOnlyRoots roots) const {
191 return IsNull(roots) || IsUndefined(roots);
192 }
193
IsNullOrUndefined()194 bool Object::IsNullOrUndefined() const {
195 return IsHeapObject() && HeapObject::cast(this)->IsNullOrUndefined();
196 }
197
IsNullOrUndefined(Isolate * isolate)198 bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
199 return Object::IsNullOrUndefined(isolate);
200 }
201
IsNullOrUndefined(ReadOnlyRoots roots)202 bool HeapObject::IsNullOrUndefined(ReadOnlyRoots roots) const {
203 return Object::IsNullOrUndefined(roots);
204 }
205
IsNullOrUndefined()206 bool HeapObject::IsNullOrUndefined() const {
207 return IsNullOrUndefined(GetReadOnlyRoots());
208 }
209
IsUniqueName()210 bool HeapObject::IsUniqueName() const {
211 return IsInternalizedString() || IsSymbol();
212 }
213
IsFunction()214 bool HeapObject::IsFunction() const {
215 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
216 return map()->instance_type() >= FIRST_FUNCTION_TYPE;
217 }
218
IsCallable()219 bool HeapObject::IsCallable() const { return map()->is_callable(); }
220
IsConstructor()221 bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
222
IsModuleInfo()223 bool HeapObject::IsModuleInfo() const {
224 return map() == GetReadOnlyRoots().module_info_map();
225 }
226
IsTemplateInfo()227 bool HeapObject::IsTemplateInfo() const {
228 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
229 }
230
IsConsString()231 bool HeapObject::IsConsString() const {
232 if (!IsString()) return false;
233 return StringShape(String::cast(this)).IsCons();
234 }
235
IsThinString()236 bool HeapObject::IsThinString() const {
237 if (!IsString()) return false;
238 return StringShape(String::cast(this)).IsThin();
239 }
240
IsSlicedString()241 bool HeapObject::IsSlicedString() const {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsSliced();
244 }
245
IsSeqString()246 bool HeapObject::IsSeqString() const {
247 if (!IsString()) return false;
248 return StringShape(String::cast(this)).IsSequential();
249 }
250
IsSeqOneByteString()251 bool HeapObject::IsSeqOneByteString() const {
252 if (!IsString()) return false;
253 return StringShape(String::cast(this)).IsSequential() &&
254 String::cast(this)->IsOneByteRepresentation();
255 }
256
IsSeqTwoByteString()257 bool HeapObject::IsSeqTwoByteString() const {
258 if (!IsString()) return false;
259 return StringShape(String::cast(this)).IsSequential() &&
260 String::cast(this)->IsTwoByteRepresentation();
261 }
262
IsExternalString()263 bool HeapObject::IsExternalString() const {
264 if (!IsString()) return false;
265 return StringShape(String::cast(this)).IsExternal();
266 }
267
IsExternalOneByteString()268 bool HeapObject::IsExternalOneByteString() const {
269 if (!IsString()) return false;
270 return StringShape(String::cast(this)).IsExternal() &&
271 String::cast(this)->IsOneByteRepresentation();
272 }
273
IsExternalTwoByteString()274 bool HeapObject::IsExternalTwoByteString() const {
275 if (!IsString()) return false;
276 return StringShape(String::cast(this)).IsExternal() &&
277 String::cast(this)->IsTwoByteRepresentation();
278 }
279
IsNumber()280 bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
281
IsNumeric()282 bool Object::IsNumeric() const { return IsNumber() || IsBigInt(); }
283
IsFiller()284 bool HeapObject::IsFiller() const {
285 InstanceType instance_type = map()->instance_type();
286 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
287 }
288
IsJSReceiver()289 bool HeapObject::IsJSReceiver() const {
290 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
291 return map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
292 }
293
IsJSProxy()294 bool HeapObject::IsJSProxy() const { return map()->IsJSProxyMap(); }
295
IsJSWeakCollection()296 bool HeapObject::IsJSWeakCollection() const {
297 return IsJSWeakMap() || IsJSWeakSet();
298 }
299
IsJSCollection()300 bool HeapObject::IsJSCollection() const { return IsJSMap() || IsJSSet(); }
301
IsPromiseReactionJobTask()302 bool HeapObject::IsPromiseReactionJobTask() const {
303 return IsPromiseFulfillReactionJobTask() || IsPromiseRejectReactionJobTask();
304 }
305
IsEnumCache()306 bool HeapObject::IsEnumCache() const { return IsTuple2(); }
307
IsFrameArray()308 bool HeapObject::IsFrameArray() const { return IsFixedArrayExact(); }
309
IsArrayList()310 bool HeapObject::IsArrayList() const {
311 return map() == GetReadOnlyRoots().array_list_map() ||
312 this == GetReadOnlyRoots().empty_fixed_array();
313 }
314
IsRegExpMatchInfo()315 bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArrayExact(); }
316
IsLayoutDescriptor()317 bool Object::IsLayoutDescriptor() const { return IsSmi() || IsByteArray(); }
318
IsDeoptimizationData()319 bool HeapObject::IsDeoptimizationData() const {
320 // Must be a fixed array.
321 if (!IsFixedArrayExact()) return false;
322
323 // There's no sure way to detect the difference between a fixed array and
324 // a deoptimization data array. Since this is used for asserts we can
325 // check that the length is zero or else the fixed size plus a multiple of
326 // the entry size.
327 int length = FixedArray::cast(this)->length();
328 if (length == 0) return true;
329
330 length -= DeoptimizationData::kFirstDeoptEntryIndex;
331 return length >= 0 && length % DeoptimizationData::kDeoptEntrySize == 0;
332 }
333
IsHandlerTable()334 bool HeapObject::IsHandlerTable() const {
335 if (!IsFixedArrayExact()) return false;
336 // There's actually no way to see the difference between a fixed array and
337 // a handler table array.
338 return true;
339 }
340
IsTemplateList()341 bool HeapObject::IsTemplateList() const {
342 if (!IsFixedArrayExact()) return false;
343 // There's actually no way to see the difference between a fixed array and
344 // a template list.
345 if (FixedArray::cast(this)->length() < 1) return false;
346 return true;
347 }
348
IsDependentCode()349 bool HeapObject::IsDependentCode() const {
350 if (!IsWeakFixedArray()) return false;
351 // There's actually no way to see the difference between a weak fixed array
352 // and a dependent codes array.
353 return true;
354 }
355
IsAbstractCode()356 bool HeapObject::IsAbstractCode() const {
357 return IsBytecodeArray() || IsCode();
358 }
359
IsStringWrapper()360 bool HeapObject::IsStringWrapper() const {
361 return IsJSValue() && JSValue::cast(this)->value()->IsString();
362 }
363
IsBooleanWrapper()364 bool HeapObject::IsBooleanWrapper() const {
365 return IsJSValue() && JSValue::cast(this)->value()->IsBoolean();
366 }
367
IsScriptWrapper()368 bool HeapObject::IsScriptWrapper() const {
369 return IsJSValue() && JSValue::cast(this)->value()->IsScript();
370 }
371
IsNumberWrapper()372 bool HeapObject::IsNumberWrapper() const {
373 return IsJSValue() && JSValue::cast(this)->value()->IsNumber();
374 }
375
IsBigIntWrapper()376 bool HeapObject::IsBigIntWrapper() const {
377 return IsJSValue() && JSValue::cast(this)->value()->IsBigInt();
378 }
379
IsSymbolWrapper()380 bool HeapObject::IsSymbolWrapper() const {
381 return IsJSValue() && JSValue::cast(this)->value()->IsSymbol();
382 }
383
IsBoolean()384 bool HeapObject::IsBoolean() const {
385 return IsOddball() &&
386 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
387 }
388
IsJSArrayBufferView()389 bool HeapObject::IsJSArrayBufferView() const {
390 return IsJSDataView() || IsJSTypedArray();
391 }
392
IsStringSet()393 bool HeapObject::IsStringSet() const { return IsHashTable(); }
394
IsObjectHashSet()395 bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
396
IsNormalizedMapCache()397 bool HeapObject::IsNormalizedMapCache() const {
398 return NormalizedMapCache::IsNormalizedMapCache(this);
399 }
400
IsCompilationCacheTable()401 bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
402
IsMapCache()403 bool HeapObject::IsMapCache() const { return IsHashTable(); }
404
IsObjectHashTable()405 bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
406
IsSmallOrderedHashTable()407 bool Object::IsSmallOrderedHashTable() const {
408 return IsSmallOrderedHashSet() || IsSmallOrderedHashMap();
409 }
410
IsPrimitive()411 bool Object::IsPrimitive() const {
412 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
413 }
414
415 // static
IsArray(Handle<Object> object)416 Maybe<bool> Object::IsArray(Handle<Object> object) {
417 if (object->IsSmi()) return Just(false);
418 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
419 if (heap_object->IsJSArray()) return Just(true);
420 if (!heap_object->IsJSProxy()) return Just(false);
421 return JSProxy::IsArray(Handle<JSProxy>::cast(object));
422 }
423
IsUndetectable()424 bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
425
IsAccessCheckNeeded()426 bool HeapObject::IsAccessCheckNeeded() const {
427 if (IsJSGlobalProxy()) {
428 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
429 JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
430 return proxy->IsDetachedFrom(global);
431 }
432 return map()->is_access_check_needed();
433 }
434
IsStruct()435 bool HeapObject::IsStruct() const {
436 switch (map()->instance_type()) {
437 #define MAKE_STRUCT_CASE(NAME, Name, name) \
438 case NAME##_TYPE: \
439 return true;
440 STRUCT_LIST(MAKE_STRUCT_CASE)
441 #undef MAKE_STRUCT_CASE
442 default:
443 return false;
444 }
445 }
446
447 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
448 bool Object::Is##Name() const { \
449 return IsHeapObject() && HeapObject::cast(this)->Is##Name(); \
450 } \
451 TYPE_CHECKER(Name)
STRUCT_LIST(MAKE_STRUCT_PREDICATE)452 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
453 #undef MAKE_STRUCT_PREDICATE
454
455 double Object::Number() const {
456 DCHECK(IsNumber());
457 return IsSmi()
458 ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
459 : reinterpret_cast<const HeapNumber*>(this)->value();
460 }
461
IsNaN()462 bool Object::IsNaN() const {
463 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
464 }
465
IsMinusZero()466 bool Object::IsMinusZero() const {
467 return this->IsHeapNumber() &&
468 i::IsMinusZero(HeapNumber::cast(this)->value());
469 }
470
471 // ------------------------------------
472 // Cast operations
473
474 CAST_ACCESSOR(AccessorPair)
CAST_ACCESSOR(AllocationMemento)475 CAST_ACCESSOR(AllocationMemento)
476 CAST_ACCESSOR(AllocationSite)
477 CAST_ACCESSOR(AsyncGeneratorRequest)
478 CAST_ACCESSOR(BigInt)
479 CAST_ACCESSOR(ObjectBoilerplateDescription)
480 CAST_ACCESSOR(Cell)
481 CAST_ACCESSOR(ArrayBoilerplateDescription)
482 CAST_ACCESSOR(DataHandler)
483 CAST_ACCESSOR(DescriptorArray)
484 CAST_ACCESSOR(EphemeronHashTable)
485 CAST_ACCESSOR(EnumCache)
486 CAST_ACCESSOR(FeedbackCell)
487 CAST_ACCESSOR(Foreign)
488 CAST_ACCESSOR(GlobalDictionary)
489 CAST_ACCESSOR(HeapObject)
490 CAST_ACCESSOR(JSAsyncFromSyncIterator)
491 CAST_ACCESSOR(JSBoundFunction)
492 CAST_ACCESSOR(JSDataView)
493 CAST_ACCESSOR(JSDate)
494 CAST_ACCESSOR(JSFunction)
495 CAST_ACCESSOR(JSGlobalObject)
496 CAST_ACCESSOR(JSGlobalProxy)
497 CAST_ACCESSOR(JSMessageObject)
498 CAST_ACCESSOR(JSObject)
499 CAST_ACCESSOR(JSReceiver)
500 CAST_ACCESSOR(JSStringIterator)
501 CAST_ACCESSOR(JSValue)
502 CAST_ACCESSOR(HeapNumber)
503 CAST_ACCESSOR(LayoutDescriptor)
504 CAST_ACCESSOR(MutableHeapNumber)
505 CAST_ACCESSOR(NameDictionary)
506 CAST_ACCESSOR(NormalizedMapCache)
507 CAST_ACCESSOR(NumberDictionary)
508 CAST_ACCESSOR(Object)
509 CAST_ACCESSOR(ObjectHashSet)
510 CAST_ACCESSOR(ObjectHashTable)
511 CAST_ACCESSOR(Oddball)
512 CAST_ACCESSOR(OrderedHashMap)
513 CAST_ACCESSOR(OrderedHashSet)
514 CAST_ACCESSOR(PropertyArray)
515 CAST_ACCESSOR(PropertyCell)
516 CAST_ACCESSOR(RegExpMatchInfo)
517 CAST_ACCESSOR(ScopeInfo)
518 CAST_ACCESSOR(SimpleNumberDictionary)
519 CAST_ACCESSOR(SmallOrderedHashMap)
520 CAST_ACCESSOR(SmallOrderedHashSet)
521 CAST_ACCESSOR(Smi)
522 CAST_ACCESSOR(SourcePositionTableWithFrameCache)
523 CAST_ACCESSOR(StackFrameInfo)
524 CAST_ACCESSOR(StringSet)
525 CAST_ACCESSOR(StringTable)
526 CAST_ACCESSOR(Struct)
527 CAST_ACCESSOR(TemplateObjectDescription)
528 CAST_ACCESSOR(Tuple2)
529 CAST_ACCESSOR(Tuple3)
530
531 bool Object::HasValidElements() {
532 // Dictionary is covered under FixedArray.
533 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
534 }
535
KeyEquals(Object * second)536 bool Object::KeyEquals(Object* second) {
537 Object* first = this;
538 if (second->IsNumber()) {
539 if (first->IsNumber()) return first->Number() == second->Number();
540 Object* temp = first;
541 first = second;
542 second = temp;
543 }
544 if (first->IsNumber()) {
545 DCHECK_LE(0, first->Number());
546 uint32_t expected = static_cast<uint32_t>(first->Number());
547 uint32_t index;
548 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
549 }
550 return Name::cast(first)->Equals(Name::cast(second));
551 }
552
FilterKey(PropertyFilter filter)553 bool Object::FilterKey(PropertyFilter filter) {
554 DCHECK(!IsPropertyCell());
555 if (IsSymbol()) {
556 if (filter & SKIP_SYMBOLS) return true;
557 if (Symbol::cast(this)->is_private()) return true;
558 } else {
559 if (filter & SKIP_STRINGS) return true;
560 }
561 return false;
562 }
563
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)564 Handle<Object> Object::NewStorageFor(Isolate* isolate, Handle<Object> object,
565 Representation representation) {
566 if (!representation.IsDouble()) return object;
567 auto result = isolate->factory()->NewMutableHeapNumberWithHoleNaN();
568 if (object->IsUninitialized(isolate)) {
569 result->set_value_as_bits(kHoleNanInt64);
570 } else if (object->IsMutableHeapNumber()) {
571 // Ensure that all bits of the double value are preserved.
572 result->set_value_as_bits(
573 MutableHeapNumber::cast(*object)->value_as_bits());
574 } else {
575 result->set_value(object->Number());
576 }
577 return result;
578 }
579
WrapForRead(Isolate * isolate,Handle<Object> object,Representation representation)580 Handle<Object> Object::WrapForRead(Isolate* isolate, Handle<Object> object,
581 Representation representation) {
582 DCHECK(!object->IsUninitialized(isolate));
583 if (!representation.IsDouble()) {
584 DCHECK(object->FitsRepresentation(representation));
585 return object;
586 }
587 return isolate->factory()->NewHeapNumber(
588 MutableHeapNumber::cast(*object)->value());
589 }
590
OptimalRepresentation()591 Representation Object::OptimalRepresentation() {
592 if (!FLAG_track_fields) return Representation::Tagged();
593 if (IsSmi()) {
594 return Representation::Smi();
595 } else if (FLAG_track_double_fields && IsHeapNumber()) {
596 return Representation::Double();
597 } else if (FLAG_track_computed_fields && IsUninitialized()) {
598 return Representation::None();
599 } else if (FLAG_track_heap_object_fields) {
600 DCHECK(IsHeapObject());
601 return Representation::HeapObject();
602 } else {
603 return Representation::Tagged();
604 }
605 }
606
607
OptimalElementsKind()608 ElementsKind Object::OptimalElementsKind() {
609 if (IsSmi()) return PACKED_SMI_ELEMENTS;
610 if (IsNumber()) return PACKED_DOUBLE_ELEMENTS;
611 return PACKED_ELEMENTS;
612 }
613
614
FitsRepresentation(Representation representation)615 bool Object::FitsRepresentation(Representation representation) {
616 if (FLAG_track_fields && representation.IsSmi()) {
617 return IsSmi();
618 } else if (FLAG_track_double_fields && representation.IsDouble()) {
619 return IsMutableHeapNumber() || IsNumber();
620 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
621 return IsHeapObject();
622 } else if (FLAG_track_fields && representation.IsNone()) {
623 return false;
624 }
625 return true;
626 }
627
ToUint32(uint32_t * value)628 bool Object::ToUint32(uint32_t* value) const {
629 if (IsSmi()) {
630 int num = Smi::ToInt(this);
631 if (num < 0) return false;
632 *value = static_cast<uint32_t>(num);
633 return true;
634 }
635 if (IsHeapNumber()) {
636 double num = HeapNumber::cast(this)->value();
637 return DoubleToUint32IfEqualToSelf(num, value);
638 }
639 return false;
640 }
641
642 // static
ToObject(Isolate * isolate,Handle<Object> object,const char * method_name)643 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
644 Handle<Object> object,
645 const char* method_name) {
646 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
647 return ToObject(isolate, object, isolate->native_context(), method_name);
648 }
649
650
651 // static
ToName(Isolate * isolate,Handle<Object> input)652 MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
653 if (input->IsName()) return Handle<Name>::cast(input);
654 return ConvertToName(isolate, input);
655 }
656
657 // static
ToPropertyKey(Isolate * isolate,Handle<Object> value)658 MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
659 Handle<Object> value) {
660 if (value->IsSmi() || HeapObject::cast(*value)->IsName()) return value;
661 return ConvertToPropertyKey(isolate, value);
662 }
663
664 // static
ToPrimitive(Handle<Object> input,ToPrimitiveHint hint)665 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
666 ToPrimitiveHint hint) {
667 if (input->IsPrimitive()) return input;
668 return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
669 }
670
671 // static
ToNumber(Isolate * isolate,Handle<Object> input)672 MaybeHandle<Object> Object::ToNumber(Isolate* isolate, Handle<Object> input) {
673 if (input->IsNumber()) return input; // Shortcut.
674 return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumber);
675 }
676
677 // static
ToNumeric(Isolate * isolate,Handle<Object> input)678 MaybeHandle<Object> Object::ToNumeric(Isolate* isolate, Handle<Object> input) {
679 if (input->IsNumber() || input->IsBigInt()) return input; // Shortcut.
680 return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumeric);
681 }
682
683 // static
ToInteger(Isolate * isolate,Handle<Object> input)684 MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
685 if (input->IsSmi()) return input;
686 return ConvertToInteger(isolate, input);
687 }
688
689 // static
ToInt32(Isolate * isolate,Handle<Object> input)690 MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
691 if (input->IsSmi()) return input;
692 return ConvertToInt32(isolate, input);
693 }
694
695 // static
ToUint32(Isolate * isolate,Handle<Object> input)696 MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
697 if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
698 return ConvertToUint32(isolate, input);
699 }
700
701 // static
ToString(Isolate * isolate,Handle<Object> input)702 MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
703 if (input->IsString()) return Handle<String>::cast(input);
704 return ConvertToString(isolate, input);
705 }
706
707 // static
ToLength(Isolate * isolate,Handle<Object> input)708 MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
709 if (input->IsSmi()) {
710 int value = std::max(Smi::ToInt(*input), 0);
711 return handle(Smi::FromInt(value), isolate);
712 }
713 return ConvertToLength(isolate, input);
714 }
715
716 // static
ToIndex(Isolate * isolate,Handle<Object> input,MessageTemplate::Template error_index)717 MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
718 MessageTemplate::Template error_index) {
719 if (input->IsSmi() && Smi::ToInt(*input) >= 0) return input;
720 return ConvertToIndex(isolate, input, error_index);
721 }
722
GetProperty(Isolate * isolate,Handle<Object> object,Handle<Name> name)723 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
724 Handle<Name> name) {
725 LookupIterator it(isolate, object, name);
726 if (!it.IsFound()) return it.factory()->undefined_value();
727 return GetProperty(&it);
728 }
729
GetProperty(Isolate * isolate,Handle<JSReceiver> receiver,Handle<Name> name)730 MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
731 Handle<JSReceiver> receiver,
732 Handle<Name> name) {
733 LookupIterator it(isolate, receiver, name, receiver);
734 if (!it.IsFound()) return it.factory()->undefined_value();
735 return Object::GetProperty(&it);
736 }
737
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)738 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
739 uint32_t index) {
740 LookupIterator it(isolate, object, index);
741 if (!it.IsFound()) return it.factory()->undefined_value();
742 return GetProperty(&it);
743 }
744
GetElement(Isolate * isolate,Handle<JSReceiver> receiver,uint32_t index)745 MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate,
746 Handle<JSReceiver> receiver,
747 uint32_t index) {
748 LookupIterator it(isolate, receiver, index, receiver);
749 if (!it.IsFound()) return it.factory()->undefined_value();
750 return Object::GetProperty(&it);
751 }
752
GetDataProperty(Handle<JSReceiver> object,Handle<Name> name)753 Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
754 Handle<Name> name) {
755 LookupIterator it(object, name, object,
756 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
757 if (!it.IsFound()) return it.factory()->undefined_value();
758 return GetDataProperty(&it);
759 }
760
SetElement(Isolate * isolate,Handle<Object> object,uint32_t index,Handle<Object> value,LanguageMode language_mode)761 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
762 uint32_t index, Handle<Object> value,
763 LanguageMode language_mode) {
764 LookupIterator it(isolate, object, index);
765 MAYBE_RETURN_NULL(
766 SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
767 return value;
768 }
769
GetPrototype(Isolate * isolate,Handle<JSReceiver> receiver)770 MaybeHandle<Object> JSReceiver::GetPrototype(Isolate* isolate,
771 Handle<JSReceiver> receiver) {
772 // We don't expect access checks to be needed on JSProxy objects.
773 DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
774 PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
775 PrototypeIterator::END_AT_NON_HIDDEN);
776 do {
777 if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
778 } while (!iter.IsAtEnd());
779 return PrototypeIterator::GetCurrent(iter);
780 }
781
GetProperty(Isolate * isolate,Handle<JSReceiver> receiver,const char * name)782 MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
783 Handle<JSReceiver> receiver,
784 const char* name) {
785 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
786 return GetProperty(isolate, receiver, str);
787 }
788
789 // static
OwnPropertyKeys(Handle<JSReceiver> object)790 V8_WARN_UNUSED_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys(
791 Handle<JSReceiver> object) {
792 return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
793 ALL_PROPERTIES,
794 GetKeysConversion::kConvertToString);
795 }
796
PrototypeHasNoElements(Isolate * isolate,JSObject * object)797 bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject* object) {
798 DisallowHeapAllocation no_gc;
799 HeapObject* prototype = HeapObject::cast(object->map()->prototype());
800 ReadOnlyRoots roots(isolate);
801 HeapObject* null = roots.null_value();
802 HeapObject* empty_fixed_array = roots.empty_fixed_array();
803 HeapObject* empty_slow_element_dictionary =
804 roots.empty_slow_element_dictionary();
805 while (prototype != null) {
806 Map* map = prototype->map();
807 if (map->IsCustomElementsReceiverMap()) return false;
808 HeapObject* elements = JSObject::cast(prototype)->elements();
809 if (elements != empty_fixed_array &&
810 elements != empty_slow_element_dictionary) {
811 return false;
812 }
813 prototype = HeapObject::cast(map->prototype());
814 }
815 return true;
816 }
817
RawField(const HeapObject * obj,int byte_offset)818 Object** HeapObject::RawField(const HeapObject* obj, int byte_offset) {
819 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
820 }
821
RawMaybeWeakField(HeapObject * obj,int byte_offset)822 MaybeObject** HeapObject::RawMaybeWeakField(HeapObject* obj, int byte_offset) {
823 return reinterpret_cast<MaybeObject**>(FIELD_ADDR(obj, byte_offset));
824 }
825
ToInt(const Object * object)826 int Smi::ToInt(const Object* object) { return Smi::cast(object)->value(); }
827
FromMap(const Map * map)828 MapWord MapWord::FromMap(const Map* map) {
829 return MapWord(reinterpret_cast<uintptr_t>(map));
830 }
831
ToMap()832 Map* MapWord::ToMap() const { return reinterpret_cast<Map*>(value_); }
833
IsForwardingAddress()834 bool MapWord::IsForwardingAddress() const {
835 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
836 }
837
838
FromForwardingAddress(HeapObject * object)839 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
840 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
841 return MapWord(static_cast<uintptr_t>(raw));
842 }
843
844
ToForwardingAddress()845 HeapObject* MapWord::ToForwardingAddress() {
846 DCHECK(IsForwardingAddress());
847 return HeapObject::FromAddress(static_cast<Address>(value_));
848 }
849
850
851 #ifdef VERIFY_HEAP
VerifyObjectField(Isolate * isolate,int offset)852 void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
853 VerifyPointer(isolate, READ_FIELD(this, offset));
854 }
855
VerifyMaybeObjectField(Isolate * isolate,int offset)856 void HeapObject::VerifyMaybeObjectField(Isolate* isolate, int offset) {
857 MaybeObject::VerifyMaybeObjectPointer(isolate, READ_WEAK_FIELD(this, offset));
858 }
859
VerifySmiField(int offset)860 void HeapObject::VerifySmiField(int offset) {
861 CHECK(READ_FIELD(this, offset)->IsSmi());
862 }
863 #endif
864
GetReadOnlyRoots()865 ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
866 // TODO(v8:7464): When RO_SPACE is embedded, this will access a global
867 // variable instead.
868 return ReadOnlyRoots(MemoryChunk::FromHeapObject(this)->heap());
869 }
870
GetHeap()871 Heap* NeverReadOnlySpaceObject::GetHeap() const {
872 MemoryChunk* chunk =
873 MemoryChunk::FromAddress(reinterpret_cast<Address>(this));
874 // Make sure we are not accessing an object in RO space.
875 SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE);
876 Heap* heap = chunk->heap();
877 SLOW_DCHECK(heap != nullptr);
878 return heap;
879 }
880
GetIsolate()881 Isolate* NeverReadOnlySpaceObject::GetIsolate() const {
882 return GetHeap()->isolate();
883 }
884
map()885 Map* HeapObject::map() const {
886 return map_word().ToMap();
887 }
888
889
set_map(Map * value)890 void HeapObject::set_map(Map* value) {
891 if (value != nullptr) {
892 #ifdef VERIFY_HEAP
893 Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(this, value);
894 #endif
895 }
896 set_map_word(MapWord::FromMap(value));
897 if (value != nullptr) {
898 // TODO(1600) We are passing nullptr as a slot because maps can never be on
899 // evacuation candidate.
900 MarkingBarrier(this, nullptr, value);
901 }
902 }
903
synchronized_map()904 Map* HeapObject::synchronized_map() const {
905 return synchronized_map_word().ToMap();
906 }
907
908
synchronized_set_map(Map * value)909 void HeapObject::synchronized_set_map(Map* value) {
910 if (value != nullptr) {
911 #ifdef VERIFY_HEAP
912 Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(this, value);
913 #endif
914 }
915 synchronized_set_map_word(MapWord::FromMap(value));
916 if (value != nullptr) {
917 // TODO(1600) We are passing nullptr as a slot because maps can never be on
918 // evacuation candidate.
919 MarkingBarrier(this, nullptr, value);
920 }
921 }
922
923
924 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)925 void HeapObject::set_map_no_write_barrier(Map* value) {
926 if (value != nullptr) {
927 #ifdef VERIFY_HEAP
928 Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(this, value);
929 #endif
930 }
931 set_map_word(MapWord::FromMap(value));
932 }
933
set_map_after_allocation(Map * value,WriteBarrierMode mode)934 void HeapObject::set_map_after_allocation(Map* value, WriteBarrierMode mode) {
935 set_map_word(MapWord::FromMap(value));
936 if (mode != SKIP_WRITE_BARRIER) {
937 DCHECK_NOT_NULL(value);
938 // TODO(1600) We are passing nullptr as a slot because maps can never be on
939 // evacuation candidate.
940 MarkingBarrier(this, nullptr, value);
941 }
942 }
943
map_slot()944 HeapObject** HeapObject::map_slot() {
945 return reinterpret_cast<HeapObject**>(FIELD_ADDR(this, kMapOffset));
946 }
947
map_word()948 MapWord HeapObject::map_word() const {
949 return MapWord(
950 reinterpret_cast<uintptr_t>(RELAXED_READ_FIELD(this, kMapOffset)));
951 }
952
953
set_map_word(MapWord map_word)954 void HeapObject::set_map_word(MapWord map_word) {
955 RELAXED_WRITE_FIELD(this, kMapOffset,
956 reinterpret_cast<Object*>(map_word.value_));
957 }
958
959
synchronized_map_word()960 MapWord HeapObject::synchronized_map_word() const {
961 return MapWord(
962 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
963 }
964
965
synchronized_set_map_word(MapWord map_word)966 void HeapObject::synchronized_set_map_word(MapWord map_word) {
967 RELEASE_WRITE_FIELD(
968 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
969 }
970
Size()971 int HeapObject::Size() const { return SizeFromMap(map()); }
972
value()973 double HeapNumberBase::value() const {
974 return READ_DOUBLE_FIELD(this, kValueOffset);
975 }
976
set_value(double value)977 void HeapNumberBase::set_value(double value) {
978 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
979 }
980
value_as_bits()981 uint64_t HeapNumberBase::value_as_bits() const {
982 return READ_UINT64_FIELD(this, kValueOffset);
983 }
984
set_value_as_bits(uint64_t bits)985 void HeapNumberBase::set_value_as_bits(uint64_t bits) {
986 WRITE_UINT64_FIELD(this, kValueOffset, bits);
987 }
988
get_exponent()989 int HeapNumberBase::get_exponent() {
990 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
991 kExponentShift) - kExponentBias;
992 }
993
get_sign()994 int HeapNumberBase::get_sign() {
995 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
996 }
997
ACCESSORS(JSReceiver,raw_properties_or_hash,Object,kPropertiesOrHashOffset)998 ACCESSORS(JSReceiver, raw_properties_or_hash, Object, kPropertiesOrHashOffset)
999
1000 FixedArrayBase* JSObject::elements() const {
1001 Object* array = READ_FIELD(this, kElementsOffset);
1002 return static_cast<FixedArrayBase*>(array);
1003 }
1004
HasWeakNext()1005 bool AllocationSite::HasWeakNext() const {
1006 return map() == GetReadOnlyRoots().allocation_site_map();
1007 }
1008
Initialize()1009 void AllocationSite::Initialize() {
1010 set_transition_info_or_boilerplate(Smi::kZero);
1011 SetElementsKind(GetInitialFastElementsKind());
1012 set_nested_site(Smi::kZero);
1013 set_pretenure_data(0);
1014 set_pretenure_create_count(0);
1015 set_dependent_code(
1016 DependentCode::cast(GetReadOnlyRoots().empty_weak_fixed_array()),
1017 SKIP_WRITE_BARRIER);
1018 }
1019
IsZombie()1020 bool AllocationSite::IsZombie() const {
1021 return pretenure_decision() == kZombie;
1022 }
1023
IsMaybeTenure()1024 bool AllocationSite::IsMaybeTenure() const {
1025 return pretenure_decision() == kMaybeTenure;
1026 }
1027
PretenuringDecisionMade()1028 bool AllocationSite::PretenuringDecisionMade() const {
1029 return pretenure_decision() != kUndecided;
1030 }
1031
1032
MarkZombie()1033 void AllocationSite::MarkZombie() {
1034 DCHECK(!IsZombie());
1035 Initialize();
1036 set_pretenure_decision(kZombie);
1037 }
1038
GetElementsKind()1039 ElementsKind AllocationSite::GetElementsKind() const {
1040 return ElementsKindBits::decode(transition_info());
1041 }
1042
1043
SetElementsKind(ElementsKind kind)1044 void AllocationSite::SetElementsKind(ElementsKind kind) {
1045 set_transition_info(ElementsKindBits::update(transition_info(), kind));
1046 }
1047
CanInlineCall()1048 bool AllocationSite::CanInlineCall() const {
1049 return DoNotInlineBit::decode(transition_info()) == 0;
1050 }
1051
1052
SetDoNotInlineCall()1053 void AllocationSite::SetDoNotInlineCall() {
1054 set_transition_info(DoNotInlineBit::update(transition_info(), true));
1055 }
1056
PointsToLiteral()1057 bool AllocationSite::PointsToLiteral() const {
1058 Object* raw_value = transition_info_or_boilerplate();
1059 DCHECK_EQ(!raw_value->IsSmi(),
1060 raw_value->IsJSArray() || raw_value->IsJSObject());
1061 return !raw_value->IsSmi();
1062 }
1063
1064
1065 // Heuristic: We only need to create allocation site info if the boilerplate
1066 // elements kind is the initial elements kind.
ShouldTrack(ElementsKind boilerplate_elements_kind)1067 bool AllocationSite::ShouldTrack(ElementsKind boilerplate_elements_kind) {
1068 return IsSmiElementsKind(boilerplate_elements_kind);
1069 }
1070
CanTrack(InstanceType type)1071 inline bool AllocationSite::CanTrack(InstanceType type) {
1072 if (FLAG_allocation_site_pretenuring) {
1073 // TurboFan doesn't care at all about String pretenuring feedback,
1074 // so don't bother even trying to track that.
1075 return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
1076 }
1077 return type == JS_ARRAY_TYPE;
1078 }
1079
pretenure_decision()1080 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() const {
1081 return PretenureDecisionBits::decode(pretenure_data());
1082 }
1083
set_pretenure_decision(PretenureDecision decision)1084 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1085 int32_t value = pretenure_data();
1086 set_pretenure_data(PretenureDecisionBits::update(value, decision));
1087 }
1088
deopt_dependent_code()1089 bool AllocationSite::deopt_dependent_code() const {
1090 return DeoptDependentCodeBit::decode(pretenure_data());
1091 }
1092
set_deopt_dependent_code(bool deopt)1093 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1094 int32_t value = pretenure_data();
1095 set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1096 }
1097
memento_found_count()1098 int AllocationSite::memento_found_count() const {
1099 return MementoFoundCountBits::decode(pretenure_data());
1100 }
1101
set_memento_found_count(int count)1102 inline void AllocationSite::set_memento_found_count(int count) {
1103 int32_t value = pretenure_data();
1104 // Verify that we can count more mementos than we can possibly find in one
1105 // new space collection.
1106 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1107 (Heap::kMinObjectSizeInWords * kPointerSize +
1108 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1109 DCHECK_LT(count, MementoFoundCountBits::kMax);
1110 set_pretenure_data(MementoFoundCountBits::update(value, count));
1111 }
1112
memento_create_count()1113 int AllocationSite::memento_create_count() const {
1114 return pretenure_create_count();
1115 }
1116
set_memento_create_count(int count)1117 void AllocationSite::set_memento_create_count(int count) {
1118 set_pretenure_create_count(count);
1119 }
1120
IncrementMementoFoundCount(int increment)1121 bool AllocationSite::IncrementMementoFoundCount(int increment) {
1122 if (IsZombie()) return false;
1123
1124 int value = memento_found_count();
1125 set_memento_found_count(value + increment);
1126 return memento_found_count() >= kPretenureMinimumCreated;
1127 }
1128
1129
IncrementMementoCreateCount()1130 inline void AllocationSite::IncrementMementoCreateCount() {
1131 DCHECK(FLAG_allocation_site_pretenuring);
1132 int value = memento_create_count();
1133 set_memento_create_count(value + 1);
1134 }
1135
IsValid()1136 bool AllocationMemento::IsValid() const {
1137 return allocation_site()->IsAllocationSite() &&
1138 !AllocationSite::cast(allocation_site())->IsZombie();
1139 }
1140
GetAllocationSite()1141 AllocationSite* AllocationMemento::GetAllocationSite() const {
1142 DCHECK(IsValid());
1143 return AllocationSite::cast(allocation_site());
1144 }
1145
GetAllocationSiteUnchecked()1146 Address AllocationMemento::GetAllocationSiteUnchecked() const {
1147 return reinterpret_cast<Address>(allocation_site());
1148 }
1149
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1150 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1151 JSObject::ValidateElements(*object);
1152 ElementsKind elements_kind = object->map()->elements_kind();
1153 if (!IsObjectElementsKind(elements_kind)) {
1154 if (IsHoleyElementsKind(elements_kind)) {
1155 TransitionElementsKind(object, HOLEY_ELEMENTS);
1156 } else {
1157 TransitionElementsKind(object, PACKED_ELEMENTS);
1158 }
1159 }
1160 }
1161
1162
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1163 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1164 Object** objects,
1165 uint32_t count,
1166 EnsureElementsMode mode) {
1167 ElementsKind current_kind = object->GetElementsKind();
1168 ElementsKind target_kind = current_kind;
1169 {
1170 DisallowHeapAllocation no_allocation;
1171 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1172 bool is_holey = IsHoleyElementsKind(current_kind);
1173 if (current_kind == HOLEY_ELEMENTS) return;
1174 Object* the_hole = object->GetReadOnlyRoots().the_hole_value();
1175 for (uint32_t i = 0; i < count; ++i) {
1176 Object* current = *objects++;
1177 if (current == the_hole) {
1178 is_holey = true;
1179 target_kind = GetHoleyElementsKind(target_kind);
1180 } else if (!current->IsSmi()) {
1181 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1182 if (IsSmiElementsKind(target_kind)) {
1183 if (is_holey) {
1184 target_kind = HOLEY_DOUBLE_ELEMENTS;
1185 } else {
1186 target_kind = PACKED_DOUBLE_ELEMENTS;
1187 }
1188 }
1189 } else if (is_holey) {
1190 target_kind = HOLEY_ELEMENTS;
1191 break;
1192 } else {
1193 target_kind = PACKED_ELEMENTS;
1194 }
1195 }
1196 }
1197 }
1198 if (target_kind != current_kind) {
1199 TransitionElementsKind(object, target_kind);
1200 }
1201 }
1202
1203
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1204 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1205 Handle<FixedArrayBase> elements,
1206 uint32_t length,
1207 EnsureElementsMode mode) {
1208 ReadOnlyRoots roots = object->GetReadOnlyRoots();
1209 if (elements->map() != roots.fixed_double_array_map()) {
1210 DCHECK(elements->map() == roots.fixed_array_map() ||
1211 elements->map() == roots.fixed_cow_array_map());
1212 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1213 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1214 }
1215 Object** objects =
1216 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1217 EnsureCanContainElements(object, objects, length, mode);
1218 return;
1219 }
1220
1221 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1222 if (object->GetElementsKind() == HOLEY_SMI_ELEMENTS) {
1223 TransitionElementsKind(object, HOLEY_DOUBLE_ELEMENTS);
1224 } else if (object->GetElementsKind() == PACKED_SMI_ELEMENTS) {
1225 Handle<FixedDoubleArray> double_array =
1226 Handle<FixedDoubleArray>::cast(elements);
1227 for (uint32_t i = 0; i < length; ++i) {
1228 if (double_array->is_the_hole(i)) {
1229 TransitionElementsKind(object, HOLEY_DOUBLE_ELEMENTS);
1230 return;
1231 }
1232 }
1233 TransitionElementsKind(object, PACKED_DOUBLE_ELEMENTS);
1234 }
1235 }
1236
1237
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1238 void JSObject::SetMapAndElements(Handle<JSObject> object,
1239 Handle<Map> new_map,
1240 Handle<FixedArrayBase> value) {
1241 JSObject::MigrateToMap(object, new_map);
1242 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1243 (*value == object->GetReadOnlyRoots().empty_fixed_array()) ||
1244 object->map()->has_fast_string_wrapper_elements()) ==
1245 (value->map() == object->GetReadOnlyRoots().fixed_array_map() ||
1246 value->map() == object->GetReadOnlyRoots().fixed_cow_array_map()));
1247 DCHECK((*value == object->GetReadOnlyRoots().empty_fixed_array()) ||
1248 (object->map()->has_fast_double_elements() ==
1249 value->IsFixedDoubleArray()));
1250 object->set_elements(*value);
1251 }
1252
1253
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1254 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1255 WRITE_FIELD(this, kElementsOffset, value);
1256 CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, value, mode);
1257 }
1258
1259
initialize_elements()1260 void JSObject::initialize_elements() {
1261 FixedArrayBase* elements = map()->GetInitialElements();
1262 WRITE_FIELD(this, kElementsOffset, elements);
1263 }
1264
1265
GetIndexedInterceptor()1266 InterceptorInfo* JSObject::GetIndexedInterceptor() {
1267 return map()->GetIndexedInterceptor();
1268 }
1269
GetNamedInterceptor()1270 InterceptorInfo* JSObject::GetNamedInterceptor() {
1271 return map()->GetNamedInterceptor();
1272 }
1273
to_number_raw()1274 double Oddball::to_number_raw() const {
1275 return READ_DOUBLE_FIELD(this, kToNumberRawOffset);
1276 }
1277
set_to_number_raw(double value)1278 void Oddball::set_to_number_raw(double value) {
1279 WRITE_DOUBLE_FIELD(this, kToNumberRawOffset, value);
1280 }
1281
set_to_number_raw_as_bits(uint64_t bits)1282 void Oddball::set_to_number_raw_as_bits(uint64_t bits) {
1283 WRITE_UINT64_FIELD(this, kToNumberRawOffset, bits);
1284 }
1285
ACCESSORS(Oddball,to_string,String,kToStringOffset)1286 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1287 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1288 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
1289
1290 byte Oddball::kind() const { return Smi::ToInt(READ_FIELD(this, kKindOffset)); }
1291
set_kind(byte value)1292 void Oddball::set_kind(byte value) {
1293 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1294 }
1295
1296
1297 // static
ToNumber(Isolate * isolate,Handle<Oddball> input)1298 Handle<Object> Oddball::ToNumber(Isolate* isolate, Handle<Oddball> input) {
1299 return handle(input->to_number(), isolate);
1300 }
1301
1302
ACCESSORS(Cell,value,Object,kValueOffset)1303 ACCESSORS(Cell, value, Object, kValueOffset)
1304 ACCESSORS(FeedbackCell, value, HeapObject, kValueOffset)
1305 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1306 ACCESSORS(PropertyCell, name, Name, kNameOffset)
1307 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1308 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1309
1310 PropertyDetails PropertyCell::property_details() const {
1311 return PropertyDetails(Smi::cast(property_details_raw()));
1312 }
1313
1314
set_property_details(PropertyDetails details)1315 void PropertyCell::set_property_details(PropertyDetails details) {
1316 set_property_details_raw(details.AsSmi());
1317 }
1318
GetHeaderSize()1319 int JSObject::GetHeaderSize() const { return GetHeaderSize(map()); }
1320
GetHeaderSize(const Map * map)1321 int JSObject::GetHeaderSize(const Map* map) {
1322 // Check for the most common kind of JavaScript object before
1323 // falling into the generic switch. This speeds up the internal
1324 // field operations considerably on average.
1325 InstanceType instance_type = map->instance_type();
1326 return instance_type == JS_OBJECT_TYPE
1327 ? JSObject::kHeaderSize
1328 : GetHeaderSize(instance_type, map->has_prototype_slot());
1329 }
1330
IsSpecialReceiverInstanceType(InstanceType instance_type)1331 inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
1332 return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
1333 }
1334
1335 // This should be in objects/map-inl.h, but can't, because of a cyclic
1336 // dependency.
IsSpecialReceiverMap()1337 bool Map::IsSpecialReceiverMap() const {
1338 bool result = IsSpecialReceiverInstanceType(instance_type());
1339 DCHECK_IMPLIES(!result,
1340 !has_named_interceptor() && !is_access_check_needed());
1341 return result;
1342 }
1343
IsCustomElementsReceiverInstanceType(InstanceType instance_type)1344 inline bool IsCustomElementsReceiverInstanceType(InstanceType instance_type) {
1345 return instance_type <= LAST_CUSTOM_ELEMENTS_RECEIVER;
1346 }
1347
1348 // This should be in objects/map-inl.h, but can't, because of a cyclic
1349 // dependency.
IsCustomElementsReceiverMap()1350 bool Map::IsCustomElementsReceiverMap() const {
1351 return IsCustomElementsReceiverInstanceType(instance_type());
1352 }
1353
1354 // static
GetEmbedderFieldCount(const Map * map)1355 int JSObject::GetEmbedderFieldCount(const Map* map) {
1356 int instance_size = map->instance_size();
1357 if (instance_size == kVariableSizeSentinel) return 0;
1358 return ((instance_size - GetHeaderSize(map)) >> kPointerSizeLog2) -
1359 map->GetInObjectProperties();
1360 }
1361
GetEmbedderFieldCount()1362 int JSObject::GetEmbedderFieldCount() const {
1363 return GetEmbedderFieldCount(map());
1364 }
1365
GetEmbedderFieldOffset(int index)1366 int JSObject::GetEmbedderFieldOffset(int index) {
1367 DCHECK(index < GetEmbedderFieldCount() && index >= 0);
1368 return GetHeaderSize() + (kPointerSize * index);
1369 }
1370
GetEmbedderField(int index)1371 Object* JSObject::GetEmbedderField(int index) {
1372 DCHECK(index < GetEmbedderFieldCount() && index >= 0);
1373 // Internal objects do follow immediately after the header, whereas in-object
1374 // properties are at the end of the object. Therefore there is no need
1375 // to adjust the index here.
1376 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1377 }
1378
SetEmbedderField(int index,Object * value)1379 void JSObject::SetEmbedderField(int index, Object* value) {
1380 DCHECK(index < GetEmbedderFieldCount() && index >= 0);
1381 // Internal objects do follow immediately after the header, whereas in-object
1382 // properties are at the end of the object. Therefore there is no need
1383 // to adjust the index here.
1384 int offset = GetHeaderSize() + (kPointerSize * index);
1385 WRITE_FIELD(this, offset, value);
1386 WRITE_BARRIER(this, offset, value);
1387 }
1388
SetEmbedderField(int index,Smi * value)1389 void JSObject::SetEmbedderField(int index, Smi* value) {
1390 DCHECK(index < GetEmbedderFieldCount() && index >= 0);
1391 // Internal objects do follow immediately after the header, whereas in-object
1392 // properties are at the end of the object. Therefore there is no need
1393 // to adjust the index here.
1394 int offset = GetHeaderSize() + (kPointerSize * index);
1395 WRITE_FIELD(this, offset, value);
1396 }
1397
1398
IsUnboxedDoubleField(FieldIndex index)1399 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
1400 if (!FLAG_unbox_double_fields) return false;
1401 return map()->IsUnboxedDoubleField(index);
1402 }
1403
1404 // Access fast-case object properties at index. The use of these routines
1405 // is needed to correctly distinguish between properties stored in-object and
1406 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)1407 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
1408 DCHECK(!IsUnboxedDoubleField(index));
1409 if (index.is_inobject()) {
1410 return READ_FIELD(this, index.offset());
1411 } else {
1412 return property_array()->get(index.outobject_array_index());
1413 }
1414 }
1415
1416
RawFastDoublePropertyAt(FieldIndex index)1417 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
1418 DCHECK(IsUnboxedDoubleField(index));
1419 return READ_DOUBLE_FIELD(this, index.offset());
1420 }
1421
RawFastDoublePropertyAsBitsAt(FieldIndex index)1422 uint64_t JSObject::RawFastDoublePropertyAsBitsAt(FieldIndex index) {
1423 DCHECK(IsUnboxedDoubleField(index));
1424 return READ_UINT64_FIELD(this, index.offset());
1425 }
1426
RawFastPropertyAtPut(FieldIndex index,Object * value)1427 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
1428 if (index.is_inobject()) {
1429 int offset = index.offset();
1430 WRITE_FIELD(this, offset, value);
1431 WRITE_BARRIER(this, offset, value);
1432 } else {
1433 property_array()->set(index.outobject_array_index(), value);
1434 }
1435 }
1436
RawFastDoublePropertyAsBitsAtPut(FieldIndex index,uint64_t bits)1437 void JSObject::RawFastDoublePropertyAsBitsAtPut(FieldIndex index,
1438 uint64_t bits) {
1439 // Double unboxing is enabled only on 64-bit platforms.
1440 DCHECK_EQ(kDoubleSize, kPointerSize);
1441 Address field_addr = FIELD_ADDR(this, index.offset());
1442 base::Relaxed_Store(reinterpret_cast<base::AtomicWord*>(field_addr),
1443 static_cast<base::AtomicWord>(bits));
1444 }
1445
FastPropertyAtPut(FieldIndex index,Object * value)1446 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
1447 if (IsUnboxedDoubleField(index)) {
1448 DCHECK(value->IsMutableHeapNumber());
1449 // Ensure that all bits of the double value are preserved.
1450 RawFastDoublePropertyAsBitsAtPut(
1451 index, MutableHeapNumber::cast(value)->value_as_bits());
1452 } else {
1453 RawFastPropertyAtPut(index, value);
1454 }
1455 }
1456
WriteToField(int descriptor,PropertyDetails details,Object * value)1457 void JSObject::WriteToField(int descriptor, PropertyDetails details,
1458 Object* value) {
1459 DCHECK_EQ(kField, details.location());
1460 DCHECK_EQ(kData, details.kind());
1461 DisallowHeapAllocation no_gc;
1462 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
1463 if (details.representation().IsDouble()) {
1464 // Nothing more to be done.
1465 if (value->IsUninitialized()) {
1466 return;
1467 }
1468 // Manipulating the signaling NaN used for the hole and uninitialized
1469 // double field sentinel in C++, e.g. with bit_cast or value()/set_value(),
1470 // will change its value on ia32 (the x87 stack is used to return values
1471 // and stores to the stack silently clear the signalling bit).
1472 uint64_t bits;
1473 if (value->IsSmi()) {
1474 bits = bit_cast<uint64_t>(static_cast<double>(Smi::ToInt(value)));
1475 } else {
1476 DCHECK(value->IsHeapNumber());
1477 bits = HeapNumber::cast(value)->value_as_bits();
1478 }
1479 if (IsUnboxedDoubleField(index)) {
1480 RawFastDoublePropertyAsBitsAtPut(index, bits);
1481 } else {
1482 auto box = MutableHeapNumber::cast(RawFastPropertyAt(index));
1483 box->set_value_as_bits(bits);
1484 }
1485 } else {
1486 RawFastPropertyAtPut(index, value);
1487 }
1488 }
1489
GetInObjectPropertyOffset(int index)1490 int JSObject::GetInObjectPropertyOffset(int index) {
1491 return map()->GetInObjectPropertyOffset(index);
1492 }
1493
1494
InObjectPropertyAt(int index)1495 Object* JSObject::InObjectPropertyAt(int index) {
1496 int offset = GetInObjectPropertyOffset(index);
1497 return READ_FIELD(this, offset);
1498 }
1499
1500
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)1501 Object* JSObject::InObjectPropertyAtPut(int index,
1502 Object* value,
1503 WriteBarrierMode mode) {
1504 // Adjust for the number of properties stored in the object.
1505 int offset = GetInObjectPropertyOffset(index);
1506 WRITE_FIELD(this, offset, value);
1507 CONDITIONAL_WRITE_BARRIER(this, offset, value, mode);
1508 return value;
1509 }
1510
1511
InitializeBody(Map * map,int start_offset,Object * pre_allocated_value,Object * filler_value)1512 void JSObject::InitializeBody(Map* map, int start_offset,
1513 Object* pre_allocated_value,
1514 Object* filler_value) {
1515 DCHECK(!filler_value->IsHeapObject() || !Heap::InNewSpace(filler_value));
1516 DCHECK(!pre_allocated_value->IsHeapObject() ||
1517 !Heap::InNewSpace(pre_allocated_value));
1518 int size = map->instance_size();
1519 int offset = start_offset;
1520 if (filler_value != pre_allocated_value) {
1521 int end_of_pre_allocated_offset =
1522 size - (map->UnusedPropertyFields() * kPointerSize);
1523 DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
1524 while (offset < end_of_pre_allocated_offset) {
1525 WRITE_FIELD(this, offset, pre_allocated_value);
1526 offset += kPointerSize;
1527 }
1528 }
1529 while (offset < size) {
1530 WRITE_FIELD(this, offset, filler_value);
1531 offset += kPointerSize;
1532 }
1533 }
1534
InitializeBody(int object_size)1535 void Struct::InitializeBody(int object_size) {
1536 Object* value = GetReadOnlyRoots().undefined_value();
1537 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1538 WRITE_FIELD(this, offset, value);
1539 }
1540 }
1541
ToArrayLength(uint32_t * index)1542 bool Object::ToArrayLength(uint32_t* index) const {
1543 return Object::ToUint32(index);
1544 }
1545
ToArrayIndex(uint32_t * index)1546 bool Object::ToArrayIndex(uint32_t* index) const {
1547 return Object::ToUint32(index) && *index != kMaxUInt32;
1548 }
1549
1550
VerifyApiCallResultType()1551 void Object::VerifyApiCallResultType() {
1552 #if DEBUG
1553 if (IsSmi()) return;
1554 DCHECK(IsHeapObject());
1555 if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() ||
1556 IsBigInt() || IsUndefined() || IsTrue() || IsFalse() || IsNull())) {
1557 FATAL("API call returned invalid object");
1558 }
1559 #endif // DEBUG
1560 }
1561
get(int index)1562 Object* PropertyArray::get(int index) const {
1563 DCHECK_GE(index, 0);
1564 DCHECK_LE(index, this->length());
1565 return RELAXED_READ_FIELD(this, kHeaderSize + index * kPointerSize);
1566 }
1567
set(int index,Object * value)1568 void PropertyArray::set(int index, Object* value) {
1569 DCHECK(IsPropertyArray());
1570 DCHECK_GE(index, 0);
1571 DCHECK_LT(index, this->length());
1572 int offset = kHeaderSize + index * kPointerSize;
1573 RELAXED_WRITE_FIELD(this, offset, value);
1574 WRITE_BARRIER(this, offset, value);
1575 }
1576
NumberOfCaptureRegisters()1577 int RegExpMatchInfo::NumberOfCaptureRegisters() {
1578 DCHECK_GE(length(), kLastMatchOverhead);
1579 Object* obj = get(kNumberOfCapturesIndex);
1580 return Smi::ToInt(obj);
1581 }
1582
SetNumberOfCaptureRegisters(int value)1583 void RegExpMatchInfo::SetNumberOfCaptureRegisters(int value) {
1584 DCHECK_GE(length(), kLastMatchOverhead);
1585 set(kNumberOfCapturesIndex, Smi::FromInt(value));
1586 }
1587
LastSubject()1588 String* RegExpMatchInfo::LastSubject() {
1589 DCHECK_GE(length(), kLastMatchOverhead);
1590 Object* obj = get(kLastSubjectIndex);
1591 return String::cast(obj);
1592 }
1593
SetLastSubject(String * value)1594 void RegExpMatchInfo::SetLastSubject(String* value) {
1595 DCHECK_GE(length(), kLastMatchOverhead);
1596 set(kLastSubjectIndex, value);
1597 }
1598
LastInput()1599 Object* RegExpMatchInfo::LastInput() {
1600 DCHECK_GE(length(), kLastMatchOverhead);
1601 return get(kLastInputIndex);
1602 }
1603
SetLastInput(Object * value)1604 void RegExpMatchInfo::SetLastInput(Object* value) {
1605 DCHECK_GE(length(), kLastMatchOverhead);
1606 set(kLastInputIndex, value);
1607 }
1608
Capture(int i)1609 int RegExpMatchInfo::Capture(int i) {
1610 DCHECK_LT(i, NumberOfCaptureRegisters());
1611 Object* obj = get(kFirstCaptureIndex + i);
1612 return Smi::ToInt(obj);
1613 }
1614
SetCapture(int i,int value)1615 void RegExpMatchInfo::SetCapture(int i, int value) {
1616 DCHECK_LT(i, NumberOfCaptureRegisters());
1617 set(kFirstCaptureIndex + i, Smi::FromInt(value));
1618 }
1619
GetWriteBarrierMode(const DisallowHeapAllocation & promise)1620 WriteBarrierMode HeapObject::GetWriteBarrierMode(
1621 const DisallowHeapAllocation& promise) {
1622 Heap* heap = Heap::FromWritableHeapObject(this);
1623 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1624 if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
1625 return UPDATE_WRITE_BARRIER;
1626 }
1627
RequiredAlignment(Map * map)1628 AllocationAlignment HeapObject::RequiredAlignment(Map* map) {
1629 #ifdef V8_HOST_ARCH_32_BIT
1630 int instance_type = map->instance_type();
1631 if (instance_type == FIXED_FLOAT64_ARRAY_TYPE ||
1632 instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
1633 return kDoubleAligned;
1634 }
1635 if (instance_type == HEAP_NUMBER_TYPE) return kDoubleUnaligned;
1636 #endif // V8_HOST_ARCH_32_BIT
1637 return kWordAligned;
1638 }
1639
NeedsRehashing()1640 bool HeapObject::NeedsRehashing() const {
1641 switch (map()->instance_type()) {
1642 case DESCRIPTOR_ARRAY_TYPE:
1643 return DescriptorArray::cast(this)->number_of_descriptors() > 1;
1644 case TRANSITION_ARRAY_TYPE:
1645 return TransitionArray::cast(this)->number_of_entries() > 1;
1646 case ORDERED_HASH_MAP_TYPE:
1647 return OrderedHashMap::cast(this)->NumberOfElements() > 0;
1648 case ORDERED_HASH_SET_TYPE:
1649 return OrderedHashSet::cast(this)->NumberOfElements() > 0;
1650 case NAME_DICTIONARY_TYPE:
1651 case GLOBAL_DICTIONARY_TYPE:
1652 case NUMBER_DICTIONARY_TYPE:
1653 case SIMPLE_NUMBER_DICTIONARY_TYPE:
1654 case STRING_TABLE_TYPE:
1655 case HASH_TABLE_TYPE:
1656 case SMALL_ORDERED_HASH_MAP_TYPE:
1657 case SMALL_ORDERED_HASH_SET_TYPE:
1658 return true;
1659 default:
1660 return false;
1661 }
1662 }
1663
GetFieldAddress(int field_offset)1664 Address HeapObject::GetFieldAddress(int field_offset) const {
1665 return FIELD_ADDR(this, field_offset);
1666 }
1667
set(int index,Object * value,WriteBarrierMode mode)1668 void PropertyArray::set(int index, Object* value, WriteBarrierMode mode) {
1669 DCHECK_GE(index, 0);
1670 DCHECK_LT(index, this->length());
1671 int offset = kHeaderSize + index * kPointerSize;
1672 RELAXED_WRITE_FIELD(this, offset, value);
1673 CONDITIONAL_WRITE_BARRIER(this, offset, value, mode);
1674 }
1675
data_start()1676 Object** PropertyArray::data_start() {
1677 return HeapObject::RawField(this, kHeaderSize);
1678 }
1679
ACCESSORS(EnumCache,keys,FixedArray,kKeysOffset)1680 ACCESSORS(EnumCache, keys, FixedArray, kKeysOffset)
1681 ACCESSORS(EnumCache, indices, FixedArray, kIndicesOffset)
1682
1683 int DescriptorArray::number_of_descriptors() const {
1684 return Smi::ToInt(get(kDescriptorLengthIndex)->ToSmi());
1685 }
1686
number_of_descriptors_storage()1687 int DescriptorArray::number_of_descriptors_storage() const {
1688 return (length() - kFirstIndex) / kEntrySize;
1689 }
1690
NumberOfSlackDescriptors()1691 int DescriptorArray::NumberOfSlackDescriptors() const {
1692 return number_of_descriptors_storage() - number_of_descriptors();
1693 }
1694
1695
SetNumberOfDescriptors(int number_of_descriptors)1696 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
1697 set(kDescriptorLengthIndex,
1698 MaybeObject::FromObject(Smi::FromInt(number_of_descriptors)));
1699 }
1700
number_of_entries()1701 inline int DescriptorArray::number_of_entries() const {
1702 return number_of_descriptors();
1703 }
1704
CopyEnumCacheFrom(DescriptorArray * array)1705 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
1706 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
1707 }
1708
GetEnumCache()1709 EnumCache* DescriptorArray::GetEnumCache() {
1710 return EnumCache::cast(get(kEnumCacheIndex)->ToStrongHeapObject());
1711 }
1712
1713 // Perform a binary search in a fixed array.
1714 template <SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int valid_entries,int * out_insertion_index)1715 int BinarySearch(T* array, Name* name, int valid_entries,
1716 int* out_insertion_index) {
1717 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == nullptr);
1718 int low = 0;
1719 int high = array->number_of_entries() - 1;
1720 uint32_t hash = name->hash_field();
1721 int limit = high;
1722
1723 DCHECK(low <= high);
1724
1725 while (low != high) {
1726 int mid = low + (high - low) / 2;
1727 Name* mid_name = array->GetSortedKey(mid);
1728 uint32_t mid_hash = mid_name->hash_field();
1729
1730 if (mid_hash >= hash) {
1731 high = mid;
1732 } else {
1733 low = mid + 1;
1734 }
1735 }
1736
1737 for (; low <= limit; ++low) {
1738 int sort_index = array->GetSortedKeyIndex(low);
1739 Name* entry = array->GetKey(sort_index);
1740 uint32_t current_hash = entry->hash_field();
1741 if (current_hash != hash) {
1742 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
1743 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
1744 }
1745 return T::kNotFound;
1746 }
1747 if (entry == name) {
1748 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
1749 return sort_index;
1750 }
1751 return T::kNotFound;
1752 }
1753 }
1754
1755 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
1756 *out_insertion_index = limit + 1;
1757 }
1758 return T::kNotFound;
1759 }
1760
1761
1762 // Perform a linear search in this fixed array. len is the number of entry
1763 // indices that are valid.
1764 template <SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int valid_entries,int * out_insertion_index)1765 int LinearSearch(T* array, Name* name, int valid_entries,
1766 int* out_insertion_index) {
1767 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
1768 uint32_t hash = name->hash_field();
1769 int len = array->number_of_entries();
1770 for (int number = 0; number < len; number++) {
1771 int sorted_index = array->GetSortedKeyIndex(number);
1772 Name* entry = array->GetKey(sorted_index);
1773 uint32_t current_hash = entry->hash_field();
1774 if (current_hash > hash) {
1775 *out_insertion_index = sorted_index;
1776 return T::kNotFound;
1777 }
1778 if (entry == name) return sorted_index;
1779 }
1780 *out_insertion_index = len;
1781 return T::kNotFound;
1782 } else {
1783 DCHECK_LE(valid_entries, array->number_of_entries());
1784 DCHECK_NULL(out_insertion_index); // Not supported here.
1785 for (int number = 0; number < valid_entries; number++) {
1786 if (array->GetKey(number) == name) return number;
1787 }
1788 return T::kNotFound;
1789 }
1790 }
1791
1792 template <SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries,int * out_insertion_index)1793 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
1794 SLOW_DCHECK(array->IsSortedNoDuplicates());
1795
1796 if (valid_entries == 0) {
1797 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
1798 *out_insertion_index = 0;
1799 }
1800 return T::kNotFound;
1801 }
1802
1803 // Fast case: do linear search for small arrays.
1804 const int kMaxElementsForLinearSearch = 8;
1805 if (valid_entries <= kMaxElementsForLinearSearch) {
1806 return LinearSearch<search_mode>(array, name, valid_entries,
1807 out_insertion_index);
1808 }
1809
1810 // Slow case: perform binary search.
1811 return BinarySearch<search_mode>(array, name, valid_entries,
1812 out_insertion_index);
1813 }
1814
1815
Search(Name * name,int valid_descriptors)1816 int DescriptorArray::Search(Name* name, int valid_descriptors) {
1817 DCHECK(name->IsUniqueName());
1818 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors,
1819 nullptr);
1820 }
1821
Search(Name * name,Map * map)1822 int DescriptorArray::Search(Name* name, Map* map) {
1823 DCHECK(name->IsUniqueName());
1824 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1825 if (number_of_own_descriptors == 0) return kNotFound;
1826 return Search(name, number_of_own_descriptors);
1827 }
1828
SearchWithCache(Isolate * isolate,Name * name,Map * map)1829 int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
1830 DCHECK(name->IsUniqueName());
1831 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
1832 if (number_of_own_descriptors == 0) return kNotFound;
1833
1834 DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
1835 int number = cache->Lookup(map, name);
1836
1837 if (number == DescriptorLookupCache::kAbsent) {
1838 number = Search(name, number_of_own_descriptors);
1839 cache->Update(map, name, number);
1840 }
1841
1842 return number;
1843 }
1844
1845
GetKeySlot(int descriptor_number)1846 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
1847 DCHECK(descriptor_number < number_of_descriptors());
1848 DCHECK((*RawFieldOfElementAt(ToKeyIndex(descriptor_number)))->IsObject());
1849 return reinterpret_cast<Object**>(
1850 RawFieldOfElementAt(ToKeyIndex(descriptor_number)));
1851 }
1852
GetDescriptorStartSlot(int descriptor_number)1853 MaybeObject** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
1854 return reinterpret_cast<MaybeObject**>(GetKeySlot(descriptor_number));
1855 }
1856
GetDescriptorEndSlot(int descriptor_number)1857 MaybeObject** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
1858 return GetValueSlot(descriptor_number - 1) + 1;
1859 }
1860
1861
GetKey(int descriptor_number)1862 Name* DescriptorArray::GetKey(int descriptor_number) {
1863 DCHECK(descriptor_number < number_of_descriptors());
1864 return Name::cast(get(ToKeyIndex(descriptor_number))->ToStrongHeapObject());
1865 }
1866
1867
GetSortedKeyIndex(int descriptor_number)1868 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
1869 return GetDetails(descriptor_number).pointer();
1870 }
1871
1872
GetSortedKey(int descriptor_number)1873 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
1874 return GetKey(GetSortedKeyIndex(descriptor_number));
1875 }
1876
1877
SetSortedKey(int descriptor_index,int pointer)1878 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
1879 PropertyDetails details = GetDetails(descriptor_index);
1880 set(ToDetailsIndex(descriptor_index),
1881 MaybeObject::FromObject(details.set_pointer(pointer).AsSmi()));
1882 }
1883
GetValueSlot(int descriptor_number)1884 MaybeObject** DescriptorArray::GetValueSlot(int descriptor_number) {
1885 DCHECK(descriptor_number < number_of_descriptors());
1886 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
1887 }
1888
1889
GetValueOffset(int descriptor_number)1890 int DescriptorArray::GetValueOffset(int descriptor_number) {
1891 return OffsetOfElementAt(ToValueIndex(descriptor_number));
1892 }
1893
GetStrongValue(int descriptor_number)1894 Object* DescriptorArray::GetStrongValue(int descriptor_number) {
1895 DCHECK(descriptor_number < number_of_descriptors());
1896 return get(ToValueIndex(descriptor_number))->ToObject();
1897 }
1898
1899
SetValue(int descriptor_index,Object * value)1900 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
1901 set(ToValueIndex(descriptor_index), MaybeObject::FromObject(value));
1902 }
1903
GetValue(int descriptor_number)1904 MaybeObject* DescriptorArray::GetValue(int descriptor_number) {
1905 DCHECK_LT(descriptor_number, number_of_descriptors());
1906 return get(ToValueIndex(descriptor_number));
1907 }
1908
GetDetails(int descriptor_number)1909 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
1910 DCHECK(descriptor_number < number_of_descriptors());
1911 MaybeObject* details = get(ToDetailsIndex(descriptor_number));
1912 return PropertyDetails(details->ToSmi());
1913 }
1914
GetFieldIndex(int descriptor_number)1915 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1916 DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
1917 return GetDetails(descriptor_number).field_index();
1918 }
1919
GetFieldType(int descriptor_number)1920 FieldType* DescriptorArray::GetFieldType(int descriptor_number) {
1921 DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
1922 MaybeObject* wrapped_type = GetValue(descriptor_number);
1923 return Map::UnwrapFieldType(wrapped_type);
1924 }
1925
Set(int descriptor_number,Name * key,MaybeObject * value,PropertyDetails details)1926 void DescriptorArray::Set(int descriptor_number, Name* key, MaybeObject* value,
1927 PropertyDetails details) {
1928 // Range check.
1929 DCHECK(descriptor_number < number_of_descriptors());
1930 set(ToKeyIndex(descriptor_number), MaybeObject::FromObject(key));
1931 set(ToValueIndex(descriptor_number), value);
1932 set(ToDetailsIndex(descriptor_number),
1933 MaybeObject::FromObject(details.AsSmi()));
1934 }
1935
Set(int descriptor_number,Descriptor * desc)1936 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1937 Name* key = *desc->GetKey();
1938 MaybeObject* value = *desc->GetValue();
1939 Set(descriptor_number, key, value, desc->GetDetails());
1940 }
1941
1942
Append(Descriptor * desc)1943 void DescriptorArray::Append(Descriptor* desc) {
1944 DisallowHeapAllocation no_gc;
1945 int descriptor_number = number_of_descriptors();
1946 SetNumberOfDescriptors(descriptor_number + 1);
1947 Set(descriptor_number, desc);
1948
1949 uint32_t hash = desc->GetKey()->Hash();
1950
1951 int insertion;
1952
1953 for (insertion = descriptor_number; insertion > 0; --insertion) {
1954 Name* key = GetSortedKey(insertion - 1);
1955 if (key->Hash() <= hash) break;
1956 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
1957 }
1958
1959 SetSortedKey(insertion, descriptor_number);
1960 }
1961
1962
SwapSortedKeys(int first,int second)1963 void DescriptorArray::SwapSortedKeys(int first, int second) {
1964 int first_key = GetSortedKeyIndex(first);
1965 SetSortedKey(first, GetSortedKeyIndex(second));
1966 SetSortedKey(second, first_key);
1967 }
1968
get(int index)1969 MaybeObject* DescriptorArray::get(int index) const {
1970 return WeakFixedArray::Get(index);
1971 }
1972
set(int index,MaybeObject * value)1973 void DescriptorArray::set(int index, MaybeObject* value) {
1974 WeakFixedArray::Set(index, value);
1975 }
1976
IsMatch(String * key,Object * value)1977 bool StringSetShape::IsMatch(String* key, Object* value) {
1978 DCHECK(value->IsString());
1979 return key->Equals(String::cast(value));
1980 }
1981
Hash(Isolate * isolate,String * key)1982 uint32_t StringSetShape::Hash(Isolate* isolate, String* key) {
1983 return key->Hash();
1984 }
1985
HashForObject(Isolate * isolate,Object * object)1986 uint32_t StringSetShape::HashForObject(Isolate* isolate, Object* object) {
1987 return String::cast(object)->Hash();
1988 }
1989
StringTableKey(uint32_t hash_field)1990 StringTableKey::StringTableKey(uint32_t hash_field)
1991 : HashTableKey(hash_field >> Name::kHashShift), hash_field_(hash_field) {}
1992
set_hash_field(uint32_t hash_field)1993 void StringTableKey::set_hash_field(uint32_t hash_field) {
1994 hash_field_ = hash_field;
1995 set_hash(hash_field >> Name::kHashShift);
1996 }
1997
AsHandle(Isolate * isolate,StringTableKey * key)1998 Handle<Object> StringTableShape::AsHandle(Isolate* isolate,
1999 StringTableKey* key) {
2000 return key->AsHandle(isolate);
2001 }
2002
HashForObject(Isolate * isolate,Object * object)2003 uint32_t StringTableShape::HashForObject(Isolate* isolate, Object* object) {
2004 return String::cast(object)->Hash();
2005 }
2006
GetMapRootIndex()2007 int StringTableShape::GetMapRootIndex() {
2008 return Heap::kStringTableMapRootIndex;
2009 }
2010
requires_slow_elements()2011 bool NumberDictionary::requires_slow_elements() {
2012 Object* max_index_object = get(kMaxNumberKeyIndex);
2013 if (!max_index_object->IsSmi()) return false;
2014 return 0 != (Smi::ToInt(max_index_object) & kRequiresSlowElementsMask);
2015 }
2016
max_number_key()2017 uint32_t NumberDictionary::max_number_key() {
2018 DCHECK(!requires_slow_elements());
2019 Object* max_index_object = get(kMaxNumberKeyIndex);
2020 if (!max_index_object->IsSmi()) return 0;
2021 uint32_t value = static_cast<uint32_t>(Smi::ToInt(max_index_object));
2022 return value >> kRequiresSlowElementsTagSize;
2023 }
2024
set_requires_slow_elements()2025 void NumberDictionary::set_requires_slow_elements() {
2026 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2027 }
2028
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,ByteArray)2029 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
2030 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
2031 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
2032 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
2033 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
2034 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
2035 DEFINE_DEOPT_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
2036
2037 DEFINE_DEOPT_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
2038 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
2039 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
2040
2041 int PropertyArray::length() const {
2042 Object* value_obj = READ_FIELD(this, kLengthAndHashOffset);
2043 int value = Smi::ToInt(value_obj);
2044 return LengthField::decode(value);
2045 }
2046
initialize_length(int len)2047 void PropertyArray::initialize_length(int len) {
2048 SLOW_DCHECK(len >= 0);
2049 SLOW_DCHECK(len < LengthField::kMax);
2050 WRITE_FIELD(this, kLengthAndHashOffset, Smi::FromInt(len));
2051 }
2052
synchronized_length()2053 int PropertyArray::synchronized_length() const {
2054 Object* value_obj = ACQUIRE_READ_FIELD(this, kLengthAndHashOffset);
2055 int value = Smi::ToInt(value_obj);
2056 return LengthField::decode(value);
2057 }
2058
Hash()2059 int PropertyArray::Hash() const {
2060 Object* value_obj = READ_FIELD(this, kLengthAndHashOffset);
2061 int value = Smi::ToInt(value_obj);
2062 return HashField::decode(value);
2063 }
2064
SetHash(int hash)2065 void PropertyArray::SetHash(int hash) {
2066 Object* value_obj = READ_FIELD(this, kLengthAndHashOffset);
2067 int value = Smi::ToInt(value_obj);
2068 value = HashField::update(value, hash);
2069 WRITE_FIELD(this, kLengthAndHashOffset, Smi::FromInt(value));
2070 }
2071
SMI_ACCESSORS(FreeSpace,size,kSizeOffset)2072 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2073 RELAXED_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2074
2075
2076 int FreeSpace::Size() { return size(); }
2077
2078
next()2079 FreeSpace* FreeSpace::next() {
2080 DCHECK(map() == Heap::FromWritableHeapObject(this)->root(
2081 Heap::kFreeSpaceMapRootIndex) ||
2082 (!Heap::FromWritableHeapObject(this)->deserialization_complete() &&
2083 map() == nullptr));
2084 DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
2085 return reinterpret_cast<FreeSpace*>(Memory<Address>(address() + kNextOffset));
2086 }
2087
2088
set_next(FreeSpace * next)2089 void FreeSpace::set_next(FreeSpace* next) {
2090 DCHECK(map() == Heap::FromWritableHeapObject(this)->root(
2091 Heap::kFreeSpaceMapRootIndex) ||
2092 (!Heap::FromWritableHeapObject(this)->deserialization_complete() &&
2093 map() == nullptr));
2094 DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
2095 base::Relaxed_Store(
2096 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
2097 reinterpret_cast<base::AtomicWord>(next));
2098 }
2099
2100
cast(HeapObject * o)2101 FreeSpace* FreeSpace::cast(HeapObject* o) {
2102 SLOW_DCHECK(!Heap::FromWritableHeapObject(o)->deserialization_complete() ||
2103 o->IsFreeSpace());
2104 return reinterpret_cast<FreeSpace*>(o);
2105 }
2106
SizeFromMap(Map * map)2107 int HeapObject::SizeFromMap(Map* map) const {
2108 int instance_size = map->instance_size();
2109 if (instance_size != kVariableSizeSentinel) return instance_size;
2110 // Only inline the most frequent cases.
2111 InstanceType instance_type = map->instance_type();
2112 if (instance_type >= FIRST_FIXED_ARRAY_TYPE &&
2113 instance_type <= LAST_FIXED_ARRAY_TYPE) {
2114 return FixedArray::SizeFor(
2115 reinterpret_cast<const FixedArray*>(this)->synchronized_length());
2116 }
2117 if (instance_type == ONE_BYTE_STRING_TYPE ||
2118 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
2119 // Strings may get concurrently truncated, hence we have to access its
2120 // length synchronized.
2121 return SeqOneByteString::SizeFor(
2122 reinterpret_cast<const SeqOneByteString*>(this)->synchronized_length());
2123 }
2124 if (instance_type == BYTE_ARRAY_TYPE) {
2125 return ByteArray::SizeFor(
2126 reinterpret_cast<const ByteArray*>(this)->synchronized_length());
2127 }
2128 if (instance_type == BYTECODE_ARRAY_TYPE) {
2129 return BytecodeArray::SizeFor(
2130 reinterpret_cast<const BytecodeArray*>(this)->synchronized_length());
2131 }
2132 if (instance_type == FREE_SPACE_TYPE) {
2133 return reinterpret_cast<const FreeSpace*>(this)->relaxed_read_size();
2134 }
2135 if (instance_type == STRING_TYPE ||
2136 instance_type == INTERNALIZED_STRING_TYPE) {
2137 // Strings may get concurrently truncated, hence we have to access its
2138 // length synchronized.
2139 return SeqTwoByteString::SizeFor(
2140 reinterpret_cast<const SeqTwoByteString*>(this)->synchronized_length());
2141 }
2142 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2143 return FixedDoubleArray::SizeFor(
2144 reinterpret_cast<const FixedDoubleArray*>(this)->synchronized_length());
2145 }
2146 if (instance_type == FEEDBACK_METADATA_TYPE) {
2147 return FeedbackMetadata::SizeFor(
2148 reinterpret_cast<const FeedbackMetadata*>(this)
2149 ->synchronized_slot_count());
2150 }
2151 if (instance_type >= FIRST_WEAK_FIXED_ARRAY_TYPE &&
2152 instance_type <= LAST_WEAK_FIXED_ARRAY_TYPE) {
2153 return WeakFixedArray::SizeFor(
2154 reinterpret_cast<const WeakFixedArray*>(this)->synchronized_length());
2155 }
2156 if (instance_type == WEAK_ARRAY_LIST_TYPE) {
2157 return WeakArrayList::SizeForCapacity(
2158 reinterpret_cast<const WeakArrayList*>(this)->synchronized_capacity());
2159 }
2160 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
2161 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
2162 return reinterpret_cast<const FixedTypedArrayBase*>(this)->TypedArraySize(
2163 instance_type);
2164 }
2165 if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) {
2166 return SmallOrderedHashSet::SizeFor(
2167 reinterpret_cast<const SmallOrderedHashSet*>(this)->Capacity());
2168 }
2169 if (instance_type == PROPERTY_ARRAY_TYPE) {
2170 return PropertyArray::SizeFor(
2171 reinterpret_cast<const PropertyArray*>(this)->synchronized_length());
2172 }
2173 if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
2174 return SmallOrderedHashMap::SizeFor(
2175 reinterpret_cast<const SmallOrderedHashMap*>(this)->Capacity());
2176 }
2177 if (instance_type == FEEDBACK_VECTOR_TYPE) {
2178 return FeedbackVector::SizeFor(
2179 reinterpret_cast<const FeedbackVector*>(this)->length());
2180 }
2181 if (instance_type == BIGINT_TYPE) {
2182 return BigInt::SizeFor(reinterpret_cast<const BigInt*>(this)->length());
2183 }
2184 if (instance_type == PRE_PARSED_SCOPE_DATA_TYPE) {
2185 return PreParsedScopeData::SizeFor(
2186 reinterpret_cast<const PreParsedScopeData*>(this)->length());
2187 }
2188 DCHECK(instance_type == CODE_TYPE);
2189 return reinterpret_cast<const Code*>(this)->CodeSize();
2190 }
2191
raw_bound_target_function()2192 Object* JSBoundFunction::raw_bound_target_function() const {
2193 return READ_FIELD(this, kBoundTargetFunctionOffset);
2194 }
2195
ACCESSORS(JSBoundFunction,bound_target_function,JSReceiver,kBoundTargetFunctionOffset)2196 ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
2197 kBoundTargetFunctionOffset)
2198 ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
2199 ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
2200
2201 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
2202 ACCESSORS(JSFunction, feedback_cell, FeedbackCell, kFeedbackCellOffset)
2203
2204 ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
2205 ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
2206
2207 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
2208
2209 ACCESSORS(AsyncGeneratorRequest, next, Object, kNextOffset)
2210 SMI_ACCESSORS(AsyncGeneratorRequest, resume_mode, kResumeModeOffset)
2211 ACCESSORS(AsyncGeneratorRequest, value, Object, kValueOffset)
2212 ACCESSORS(AsyncGeneratorRequest, promise, Object, kPromiseOffset)
2213
2214 ACCESSORS(Tuple2, value1, Object, kValue1Offset)
2215 ACCESSORS(Tuple2, value2, Object, kValue2Offset)
2216 ACCESSORS(Tuple3, value3, Object, kValue3Offset)
2217
2218 ACCESSORS(TemplateObjectDescription, raw_strings, FixedArray, kRawStringsOffset)
2219 ACCESSORS(TemplateObjectDescription, cooked_strings, FixedArray,
2220 kCookedStringsOffset)
2221
2222 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
2223 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
2224
2225 ACCESSORS(AllocationSite, transition_info_or_boilerplate, Object,
2226 kTransitionInfoOrBoilerplateOffset)
2227
2228 JSObject* AllocationSite::boilerplate() const {
2229 DCHECK(PointsToLiteral());
2230 return JSObject::cast(transition_info_or_boilerplate());
2231 }
2232
set_boilerplate(JSObject * object,WriteBarrierMode mode)2233 void AllocationSite::set_boilerplate(JSObject* object, WriteBarrierMode mode) {
2234 set_transition_info_or_boilerplate(object, mode);
2235 }
2236
transition_info()2237 int AllocationSite::transition_info() const {
2238 DCHECK(!PointsToLiteral());
2239 return Smi::cast(transition_info_or_boilerplate())->value();
2240 }
2241
set_transition_info(int value)2242 void AllocationSite::set_transition_info(int value) {
2243 DCHECK(!PointsToLiteral());
2244 set_transition_info_or_boilerplate(Smi::FromInt(value), SKIP_WRITE_BARRIER);
2245 }
2246
ACCESSORS(AllocationSite,nested_site,Object,kNestedSiteOffset)2247 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
2248 INT32_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
2249 INT32_ACCESSORS(AllocationSite, pretenure_create_count,
2250 kPretenureCreateCountOffset)
2251 ACCESSORS(AllocationSite, dependent_code, DependentCode,
2252 kDependentCodeOffset)
2253 ACCESSORS_CHECKED(AllocationSite, weak_next, Object, kWeakNextOffset,
2254 HasWeakNext())
2255 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
2256
2257 SMI_ACCESSORS(StackFrameInfo, line_number, kLineNumberIndex)
2258 SMI_ACCESSORS(StackFrameInfo, column_number, kColumnNumberIndex)
2259 SMI_ACCESSORS(StackFrameInfo, script_id, kScriptIdIndex)
2260 ACCESSORS(StackFrameInfo, script_name, Object, kScriptNameIndex)
2261 ACCESSORS(StackFrameInfo, script_name_or_source_url, Object,
2262 kScriptNameOrSourceUrlIndex)
2263 ACCESSORS(StackFrameInfo, function_name, Object, kFunctionNameIndex)
2264 SMI_ACCESSORS(StackFrameInfo, flag, kFlagIndex)
2265 BOOL_ACCESSORS(StackFrameInfo, flag, is_eval, kIsEvalBit)
2266 BOOL_ACCESSORS(StackFrameInfo, flag, is_constructor, kIsConstructorBit)
2267 BOOL_ACCESSORS(StackFrameInfo, flag, is_wasm, kIsWasmBit)
2268 SMI_ACCESSORS(StackFrameInfo, id, kIdIndex)
2269
2270 ACCESSORS(SourcePositionTableWithFrameCache, source_position_table, ByteArray,
2271 kSourcePositionTableIndex)
2272 ACCESSORS(SourcePositionTableWithFrameCache, stack_frame_cache,
2273 SimpleNumberDictionary, kStackFrameCacheIndex)
2274
2275
2276 FeedbackVector* JSFunction::feedback_vector() const {
2277 DCHECK(has_feedback_vector());
2278 return FeedbackVector::cast(feedback_cell()->value());
2279 }
2280
2281 // Code objects that are marked for deoptimization are not considered to be
2282 // optimized. This is because the JSFunction might have been already
2283 // deoptimized but its code() still needs to be unlinked, which will happen on
2284 // its next activation.
2285 // TODO(jupvfranco): rename this function. Maybe RunOptimizedCode,
2286 // or IsValidOptimizedCode.
IsOptimized()2287 bool JSFunction::IsOptimized() {
2288 return code()->kind() == Code::OPTIMIZED_FUNCTION &&
2289 !code()->marked_for_deoptimization();
2290 }
2291
HasOptimizedCode()2292 bool JSFunction::HasOptimizedCode() {
2293 return IsOptimized() ||
2294 (has_feedback_vector() && feedback_vector()->has_optimized_code() &&
2295 !feedback_vector()->optimized_code()->marked_for_deoptimization());
2296 }
2297
HasOptimizationMarker()2298 bool JSFunction::HasOptimizationMarker() {
2299 return has_feedback_vector() && feedback_vector()->has_optimization_marker();
2300 }
2301
ClearOptimizationMarker()2302 void JSFunction::ClearOptimizationMarker() {
2303 DCHECK(has_feedback_vector());
2304 feedback_vector()->ClearOptimizationMarker();
2305 }
2306
2307 // Optimized code marked for deoptimization will tier back down to running
2308 // interpreted on its next activation, and already doesn't count as IsOptimized.
IsInterpreted()2309 bool JSFunction::IsInterpreted() {
2310 return code()->is_interpreter_trampoline_builtin() ||
2311 (code()->kind() == Code::OPTIMIZED_FUNCTION &&
2312 code()->marked_for_deoptimization());
2313 }
2314
ChecksOptimizationMarker()2315 bool JSFunction::ChecksOptimizationMarker() {
2316 return code()->checks_optimization_marker();
2317 }
2318
IsMarkedForOptimization()2319 bool JSFunction::IsMarkedForOptimization() {
2320 return has_feedback_vector() && feedback_vector()->optimization_marker() ==
2321 OptimizationMarker::kCompileOptimized;
2322 }
2323
2324
IsMarkedForConcurrentOptimization()2325 bool JSFunction::IsMarkedForConcurrentOptimization() {
2326 return has_feedback_vector() &&
2327 feedback_vector()->optimization_marker() ==
2328 OptimizationMarker::kCompileOptimizedConcurrent;
2329 }
2330
2331
IsInOptimizationQueue()2332 bool JSFunction::IsInOptimizationQueue() {
2333 return has_feedback_vector() && feedback_vector()->optimization_marker() ==
2334 OptimizationMarker::kInOptimizationQueue;
2335 }
2336
2337
CompleteInobjectSlackTrackingIfActive()2338 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
2339 if (!has_prototype_slot()) return;
2340 if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
2341 initial_map()->CompleteInobjectSlackTracking(GetIsolate());
2342 }
2343 }
2344
abstract_code()2345 AbstractCode* JSFunction::abstract_code() {
2346 if (IsInterpreted()) {
2347 return AbstractCode::cast(shared()->GetBytecodeArray());
2348 } else {
2349 return AbstractCode::cast(code());
2350 }
2351 }
2352
code()2353 Code* JSFunction::code() { return Code::cast(READ_FIELD(this, kCodeOffset)); }
2354
set_code(Code * value)2355 void JSFunction::set_code(Code* value) {
2356 DCHECK(!Heap::InNewSpace(value));
2357 WRITE_FIELD(this, kCodeOffset, value);
2358 MarkingBarrier(this, HeapObject::RawField(this, kCodeOffset), value);
2359 }
2360
2361
set_code_no_write_barrier(Code * value)2362 void JSFunction::set_code_no_write_barrier(Code* value) {
2363 DCHECK(!Heap::InNewSpace(value));
2364 WRITE_FIELD(this, kCodeOffset, value);
2365 }
2366
ClearOptimizedCodeSlot(const char * reason)2367 void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
2368 if (has_feedback_vector() && feedback_vector()->has_optimized_code()) {
2369 if (FLAG_trace_opt) {
2370 PrintF("[evicting entry from optimizing code feedback slot (%s) for ",
2371 reason);
2372 ShortPrint();
2373 PrintF("]\n");
2374 }
2375 feedback_vector()->ClearOptimizedCode();
2376 }
2377 }
2378
SetOptimizationMarker(OptimizationMarker marker)2379 void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
2380 DCHECK(has_feedback_vector());
2381 DCHECK(ChecksOptimizationMarker());
2382 DCHECK(!HasOptimizedCode());
2383
2384 feedback_vector()->SetOptimizationMarker(marker);
2385 }
2386
has_feedback_vector()2387 bool JSFunction::has_feedback_vector() const {
2388 return !feedback_cell()->value()->IsUndefined();
2389 }
2390
context()2391 Context* JSFunction::context() {
2392 return Context::cast(READ_FIELD(this, kContextOffset));
2393 }
2394
has_context()2395 bool JSFunction::has_context() const {
2396 return READ_FIELD(this, kContextOffset)->IsContext();
2397 }
2398
global_proxy()2399 JSGlobalProxy* JSFunction::global_proxy() { return context()->global_proxy(); }
2400
native_context()2401 Context* JSFunction::native_context() { return context()->native_context(); }
2402
2403
set_context(Object * value)2404 void JSFunction::set_context(Object* value) {
2405 DCHECK(value->IsUndefined() || value->IsContext());
2406 WRITE_FIELD(this, kContextOffset, value);
2407 WRITE_BARRIER(this, kContextOffset, value);
2408 }
2409
2410 ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, Object,
2411 kPrototypeOrInitialMapOffset, map()->has_prototype_slot())
2412
has_prototype_slot()2413 bool JSFunction::has_prototype_slot() const {
2414 return map()->has_prototype_slot();
2415 }
2416
initial_map()2417 Map* JSFunction::initial_map() {
2418 return Map::cast(prototype_or_initial_map());
2419 }
2420
2421
has_initial_map()2422 bool JSFunction::has_initial_map() {
2423 DCHECK(has_prototype_slot());
2424 return prototype_or_initial_map()->IsMap();
2425 }
2426
2427
has_instance_prototype()2428 bool JSFunction::has_instance_prototype() {
2429 DCHECK(has_prototype_slot());
2430 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
2431 }
2432
has_prototype()2433 bool JSFunction::has_prototype() {
2434 DCHECK(has_prototype_slot());
2435 return map()->has_non_instance_prototype() || has_instance_prototype();
2436 }
2437
has_prototype_property()2438 bool JSFunction::has_prototype_property() {
2439 return (has_prototype_slot() && IsConstructor()) ||
2440 IsGeneratorFunction(shared()->kind());
2441 }
2442
PrototypeRequiresRuntimeLookup()2443 bool JSFunction::PrototypeRequiresRuntimeLookup() {
2444 return !has_prototype_property() || map()->has_non_instance_prototype();
2445 }
2446
instance_prototype()2447 Object* JSFunction::instance_prototype() {
2448 DCHECK(has_instance_prototype());
2449 if (has_initial_map()) return initial_map()->prototype();
2450 // When there is no initial map and the prototype is a JSReceiver, the
2451 // initial map field is used for the prototype field.
2452 return prototype_or_initial_map();
2453 }
2454
2455
prototype()2456 Object* JSFunction::prototype() {
2457 DCHECK(has_prototype());
2458 // If the function's prototype property has been set to a non-JSReceiver
2459 // value, that value is stored in the constructor field of the map.
2460 if (map()->has_non_instance_prototype()) {
2461 Object* prototype = map()->GetConstructor();
2462 // The map must have a prototype in that field, not a back pointer.
2463 DCHECK(!prototype->IsMap());
2464 DCHECK(!prototype->IsFunctionTemplateInfo());
2465 return prototype;
2466 }
2467 return instance_prototype();
2468 }
2469
2470
is_compiled()2471 bool JSFunction::is_compiled() {
2472 return code()->builtin_index() != Builtins::kCompileLazy;
2473 }
2474
2475 // static
IsNormalized(Object * value)2476 bool Foreign::IsNormalized(Object* value) {
2477 if (value == Smi::kZero) return true;
2478 return Foreign::cast(value)->foreign_address() != kNullAddress;
2479 }
2480
foreign_address()2481 Address Foreign::foreign_address() {
2482 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
2483 }
2484
set_foreign_address(Address value)2485 void Foreign::set_foreign_address(Address value) {
2486 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
2487 }
2488
2489 template <class Derived>
SetDataEntry(int entry,int relative_index,Object * value)2490 void SmallOrderedHashTable<Derived>::SetDataEntry(int entry, int relative_index,
2491 Object* value) {
2492 Address entry_offset = GetDataEntryOffset(entry, relative_index);
2493 RELAXED_WRITE_FIELD(this, entry_offset, value);
2494 WRITE_BARRIER(this, static_cast<int>(entry_offset), value);
2495 }
2496
ACCESSORS(JSValue,value,Object,kValueOffset)2497 ACCESSORS(JSValue, value, Object, kValueOffset)
2498
2499
2500 ACCESSORS(JSDate, value, Object, kValueOffset)
2501 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
2502 ACCESSORS(JSDate, year, Object, kYearOffset)
2503 ACCESSORS(JSDate, month, Object, kMonthOffset)
2504 ACCESSORS(JSDate, day, Object, kDayOffset)
2505 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
2506 ACCESSORS(JSDate, hour, Object, kHourOffset)
2507 ACCESSORS(JSDate, min, Object, kMinOffset)
2508 ACCESSORS(JSDate, sec, Object, kSecOffset)
2509
2510
2511 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
2512 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
2513 ACCESSORS(JSMessageObject, script, Script, kScriptOffset)
2514 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
2515 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
2516 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
2517 SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
2518
2519 ElementsKind JSObject::GetElementsKind() const {
2520 ElementsKind kind = map()->elements_kind();
2521 #if VERIFY_HEAP && DEBUG
2522 FixedArrayBase* fixed_array =
2523 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
2524
2525 // If a GC was caused while constructing this object, the elements
2526 // pointer may point to a one pointer filler map.
2527 if (ElementsAreSafeToExamine()) {
2528 Map* map = fixed_array->map();
2529 if (IsSmiOrObjectElementsKind(kind)) {
2530 DCHECK(map == GetReadOnlyRoots().fixed_array_map() ||
2531 map == GetReadOnlyRoots().fixed_cow_array_map());
2532 } else if (IsDoubleElementsKind(kind)) {
2533 DCHECK(fixed_array->IsFixedDoubleArray() ||
2534 fixed_array == GetReadOnlyRoots().empty_fixed_array());
2535 } else if (kind == DICTIONARY_ELEMENTS) {
2536 DCHECK(fixed_array->IsFixedArray());
2537 DCHECK(fixed_array->IsDictionary());
2538 } else {
2539 DCHECK(kind > DICTIONARY_ELEMENTS);
2540 }
2541 DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
2542 (elements()->IsFixedArray() && elements()->length() >= 2));
2543 }
2544 #endif
2545 return kind;
2546 }
2547
HasObjectElements()2548 bool JSObject::HasObjectElements() {
2549 return IsObjectElementsKind(GetElementsKind());
2550 }
2551
HasSmiElements()2552 bool JSObject::HasSmiElements() { return IsSmiElementsKind(GetElementsKind()); }
2553
HasSmiOrObjectElements()2554 bool JSObject::HasSmiOrObjectElements() {
2555 return IsSmiOrObjectElementsKind(GetElementsKind());
2556 }
2557
HasDoubleElements()2558 bool JSObject::HasDoubleElements() {
2559 return IsDoubleElementsKind(GetElementsKind());
2560 }
2561
HasHoleyElements()2562 bool JSObject::HasHoleyElements() {
2563 return IsHoleyElementsKind(GetElementsKind());
2564 }
2565
2566
HasFastElements()2567 bool JSObject::HasFastElements() {
2568 return IsFastElementsKind(GetElementsKind());
2569 }
2570
HasFastPackedElements()2571 bool JSObject::HasFastPackedElements() {
2572 return IsFastPackedElementsKind(GetElementsKind());
2573 }
2574
HasDictionaryElements()2575 bool JSObject::HasDictionaryElements() {
2576 return GetElementsKind() == DICTIONARY_ELEMENTS;
2577 }
2578
2579
HasFastArgumentsElements()2580 bool JSObject::HasFastArgumentsElements() {
2581 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
2582 }
2583
2584
HasSlowArgumentsElements()2585 bool JSObject::HasSlowArgumentsElements() {
2586 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
2587 }
2588
2589
HasSloppyArgumentsElements()2590 bool JSObject::HasSloppyArgumentsElements() {
2591 return IsSloppyArgumentsElementsKind(GetElementsKind());
2592 }
2593
HasStringWrapperElements()2594 bool JSObject::HasStringWrapperElements() {
2595 return IsStringWrapperElementsKind(GetElementsKind());
2596 }
2597
HasFastStringWrapperElements()2598 bool JSObject::HasFastStringWrapperElements() {
2599 return GetElementsKind() == FAST_STRING_WRAPPER_ELEMENTS;
2600 }
2601
HasSlowStringWrapperElements()2602 bool JSObject::HasSlowStringWrapperElements() {
2603 return GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS;
2604 }
2605
HasFixedTypedArrayElements()2606 bool JSObject::HasFixedTypedArrayElements() {
2607 DCHECK_NOT_NULL(elements());
2608 return map()->has_fixed_typed_array_elements();
2609 }
2610
2611 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
2612 bool JSObject::HasFixed##Type##Elements() { \
2613 HeapObject* array = elements(); \
2614 DCHECK_NOT_NULL(array); \
2615 if (!array->IsHeapObject()) return false; \
2616 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
2617 }
2618
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)2619 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
2620
2621 #undef FIXED_TYPED_ELEMENTS_CHECK
2622
2623
2624 bool JSObject::HasNamedInterceptor() {
2625 return map()->has_named_interceptor();
2626 }
2627
2628
HasIndexedInterceptor()2629 bool JSObject::HasIndexedInterceptor() {
2630 return map()->has_indexed_interceptor();
2631 }
2632
set_global_dictionary(GlobalDictionary * dictionary)2633 void JSGlobalObject::set_global_dictionary(GlobalDictionary* dictionary) {
2634 DCHECK(IsJSGlobalObject());
2635 set_raw_properties_or_hash(dictionary);
2636 }
2637
global_dictionary()2638 GlobalDictionary* JSGlobalObject::global_dictionary() {
2639 DCHECK(!HasFastProperties());
2640 DCHECK(IsJSGlobalObject());
2641 return GlobalDictionary::cast(raw_properties_or_hash());
2642 }
2643
element_dictionary()2644 NumberDictionary* JSObject::element_dictionary() {
2645 DCHECK(HasDictionaryElements() || HasSlowStringWrapperElements());
2646 return NumberDictionary::cast(elements());
2647 }
2648
2649 // static
GreaterThan(Isolate * isolate,Handle<Object> x,Handle<Object> y)2650 Maybe<bool> Object::GreaterThan(Isolate* isolate, Handle<Object> x,
2651 Handle<Object> y) {
2652 Maybe<ComparisonResult> result = Compare(isolate, x, y);
2653 if (result.IsJust()) {
2654 switch (result.FromJust()) {
2655 case ComparisonResult::kGreaterThan:
2656 return Just(true);
2657 case ComparisonResult::kLessThan:
2658 case ComparisonResult::kEqual:
2659 case ComparisonResult::kUndefined:
2660 return Just(false);
2661 }
2662 }
2663 return Nothing<bool>();
2664 }
2665
2666
2667 // static
GreaterThanOrEqual(Isolate * isolate,Handle<Object> x,Handle<Object> y)2668 Maybe<bool> Object::GreaterThanOrEqual(Isolate* isolate, Handle<Object> x,
2669 Handle<Object> y) {
2670 Maybe<ComparisonResult> result = Compare(isolate, x, y);
2671 if (result.IsJust()) {
2672 switch (result.FromJust()) {
2673 case ComparisonResult::kEqual:
2674 case ComparisonResult::kGreaterThan:
2675 return Just(true);
2676 case ComparisonResult::kLessThan:
2677 case ComparisonResult::kUndefined:
2678 return Just(false);
2679 }
2680 }
2681 return Nothing<bool>();
2682 }
2683
2684
2685 // static
LessThan(Isolate * isolate,Handle<Object> x,Handle<Object> y)2686 Maybe<bool> Object::LessThan(Isolate* isolate, Handle<Object> x,
2687 Handle<Object> y) {
2688 Maybe<ComparisonResult> result = Compare(isolate, x, y);
2689 if (result.IsJust()) {
2690 switch (result.FromJust()) {
2691 case ComparisonResult::kLessThan:
2692 return Just(true);
2693 case ComparisonResult::kEqual:
2694 case ComparisonResult::kGreaterThan:
2695 case ComparisonResult::kUndefined:
2696 return Just(false);
2697 }
2698 }
2699 return Nothing<bool>();
2700 }
2701
2702
2703 // static
LessThanOrEqual(Isolate * isolate,Handle<Object> x,Handle<Object> y)2704 Maybe<bool> Object::LessThanOrEqual(Isolate* isolate, Handle<Object> x,
2705 Handle<Object> y) {
2706 Maybe<ComparisonResult> result = Compare(isolate, x, y);
2707 if (result.IsJust()) {
2708 switch (result.FromJust()) {
2709 case ComparisonResult::kEqual:
2710 case ComparisonResult::kLessThan:
2711 return Just(true);
2712 case ComparisonResult::kGreaterThan:
2713 case ComparisonResult::kUndefined:
2714 return Just(false);
2715 }
2716 }
2717 return Nothing<bool>();
2718 }
2719
GetPropertyOrElement(Isolate * isolate,Handle<Object> object,Handle<Name> name)2720 MaybeHandle<Object> Object::GetPropertyOrElement(Isolate* isolate,
2721 Handle<Object> object,
2722 Handle<Name> name) {
2723 LookupIterator it = LookupIterator::PropertyOrElement(isolate, object, name);
2724 return GetProperty(&it);
2725 }
2726
SetPropertyOrElement(Isolate * isolate,Handle<Object> object,Handle<Name> name,Handle<Object> value,LanguageMode language_mode,StoreFromKeyed store_mode)2727 MaybeHandle<Object> Object::SetPropertyOrElement(Isolate* isolate,
2728 Handle<Object> object,
2729 Handle<Name> name,
2730 Handle<Object> value,
2731 LanguageMode language_mode,
2732 StoreFromKeyed store_mode) {
2733 LookupIterator it = LookupIterator::PropertyOrElement(isolate, object, name);
2734 MAYBE_RETURN_NULL(SetProperty(&it, value, language_mode, store_mode));
2735 return value;
2736 }
2737
GetPropertyOrElement(Handle<Object> receiver,Handle<Name> name,Handle<JSReceiver> holder)2738 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
2739 Handle<Name> name,
2740 Handle<JSReceiver> holder) {
2741 LookupIterator it = LookupIterator::PropertyOrElement(holder->GetIsolate(),
2742 receiver, name, holder);
2743 return GetProperty(&it);
2744 }
2745
2746
initialize_properties()2747 void JSReceiver::initialize_properties() {
2748 Heap* heap = GetHeap();
2749 ReadOnlyRoots roots(heap);
2750 DCHECK(!Heap::InNewSpace(roots.empty_fixed_array()));
2751 DCHECK(!Heap::InNewSpace(heap->empty_property_dictionary()));
2752 if (map()->is_dictionary_map()) {
2753 WRITE_FIELD(this, kPropertiesOrHashOffset,
2754 heap->empty_property_dictionary());
2755 } else {
2756 WRITE_FIELD(this, kPropertiesOrHashOffset, roots.empty_fixed_array());
2757 }
2758 }
2759
HasFastProperties()2760 bool JSReceiver::HasFastProperties() const {
2761 DCHECK(
2762 raw_properties_or_hash()->IsSmi() ||
2763 (raw_properties_or_hash()->IsDictionary() == map()->is_dictionary_map()));
2764 return !map()->is_dictionary_map();
2765 }
2766
property_dictionary()2767 NameDictionary* JSReceiver::property_dictionary() const {
2768 DCHECK(!IsJSGlobalObject());
2769 DCHECK(!HasFastProperties());
2770
2771 Object* prop = raw_properties_or_hash();
2772 if (prop->IsSmi()) {
2773 return GetHeap()->empty_property_dictionary();
2774 }
2775
2776 return NameDictionary::cast(prop);
2777 }
2778
2779 // TODO(gsathya): Pass isolate directly to this function and access
2780 // the heap from this.
property_array()2781 PropertyArray* JSReceiver::property_array() const {
2782 DCHECK(HasFastProperties());
2783
2784 Object* prop = raw_properties_or_hash();
2785 if (prop->IsSmi() || prop == GetReadOnlyRoots().empty_fixed_array()) {
2786 return GetReadOnlyRoots().empty_property_array();
2787 }
2788
2789 return PropertyArray::cast(prop);
2790 }
2791
HasProperty(Handle<JSReceiver> object,Handle<Name> name)2792 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
2793 Handle<Name> name) {
2794 LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(),
2795 object, name, object);
2796 return HasProperty(&it);
2797 }
2798
2799
HasOwnProperty(Handle<JSReceiver> object,uint32_t index)2800 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
2801 uint32_t index) {
2802 if (object->IsJSModuleNamespace()) return Just(false);
2803
2804 if (object->IsJSObject()) { // Shortcut.
2805 LookupIterator it(object->GetIsolate(), object, index, object,
2806 LookupIterator::OWN);
2807 return HasProperty(&it);
2808 }
2809
2810 Maybe<PropertyAttributes> attributes =
2811 JSReceiver::GetOwnPropertyAttributes(object, index);
2812 MAYBE_RETURN(attributes, Nothing<bool>());
2813 return Just(attributes.FromJust() != ABSENT);
2814 }
2815
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)2816 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
2817 Handle<JSReceiver> object, Handle<Name> name) {
2818 LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(),
2819 object, name, object);
2820 return GetPropertyAttributes(&it);
2821 }
2822
2823
GetOwnPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)2824 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
2825 Handle<JSReceiver> object, Handle<Name> name) {
2826 LookupIterator it = LookupIterator::PropertyOrElement(
2827 object->GetIsolate(), object, name, object, LookupIterator::OWN);
2828 return GetPropertyAttributes(&it);
2829 }
2830
GetOwnPropertyAttributes(Handle<JSReceiver> object,uint32_t index)2831 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
2832 Handle<JSReceiver> object, uint32_t index) {
2833 LookupIterator it(object->GetIsolate(), object, index, object,
2834 LookupIterator::OWN);
2835 return GetPropertyAttributes(&it);
2836 }
2837
HasElement(Handle<JSReceiver> object,uint32_t index)2838 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
2839 LookupIterator it(object->GetIsolate(), object, index, object);
2840 return HasProperty(&it);
2841 }
2842
2843
GetElementAttributes(Handle<JSReceiver> object,uint32_t index)2844 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
2845 Handle<JSReceiver> object, uint32_t index) {
2846 Isolate* isolate = object->GetIsolate();
2847 LookupIterator it(isolate, object, index, object);
2848 return GetPropertyAttributes(&it);
2849 }
2850
2851
GetOwnElementAttributes(Handle<JSReceiver> object,uint32_t index)2852 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
2853 Handle<JSReceiver> object, uint32_t index) {
2854 Isolate* isolate = object->GetIsolate();
2855 LookupIterator it(isolate, object, index, object, LookupIterator::OWN);
2856 return GetPropertyAttributes(&it);
2857 }
2858
2859
IsDetached()2860 bool JSGlobalObject::IsDetached() {
2861 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
2862 }
2863
2864
IsDetachedFrom(JSGlobalObject * global)2865 bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
2866 const PrototypeIterator iter(this->GetIsolate(),
2867 const_cast<JSGlobalProxy*>(this));
2868 return iter.GetCurrent() != global;
2869 }
2870
SizeWithEmbedderFields(int embedder_field_count)2871 inline int JSGlobalProxy::SizeWithEmbedderFields(int embedder_field_count) {
2872 DCHECK_GE(embedder_field_count, 0);
2873 return kSize + embedder_field_count * kPointerSize;
2874 }
2875
get(AccessorComponent component)2876 Object* AccessorPair::get(AccessorComponent component) {
2877 return component == ACCESSOR_GETTER ? getter() : setter();
2878 }
2879
2880
set(AccessorComponent component,Object * value)2881 void AccessorPair::set(AccessorComponent component, Object* value) {
2882 if (component == ACCESSOR_GETTER) {
2883 set_getter(value);
2884 } else {
2885 set_setter(value);
2886 }
2887 }
2888
2889
SetComponents(Object * getter,Object * setter)2890 void AccessorPair::SetComponents(Object* getter, Object* setter) {
2891 if (!getter->IsNull()) set_getter(getter);
2892 if (!setter->IsNull()) set_setter(setter);
2893 }
2894
Equals(AccessorPair * pair)2895 bool AccessorPair::Equals(AccessorPair* pair) {
2896 return (this == pair) || pair->Equals(getter(), setter());
2897 }
2898
2899
Equals(Object * getter_value,Object * setter_value)2900 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
2901 return (getter() == getter_value) && (setter() == setter_value);
2902 }
2903
2904
ContainsAccessor()2905 bool AccessorPair::ContainsAccessor() {
2906 return IsJSAccessor(getter()) || IsJSAccessor(setter());
2907 }
2908
2909
IsJSAccessor(Object * obj)2910 bool AccessorPair::IsJSAccessor(Object* obj) {
2911 return obj->IsCallable() || obj->IsUndefined();
2912 }
2913
2914 template <typename Derived, typename Shape>
ClearEntry(Isolate * isolate,int entry)2915 void Dictionary<Derived, Shape>::ClearEntry(Isolate* isolate, int entry) {
2916 Object* the_hole = this->GetReadOnlyRoots().the_hole_value();
2917 PropertyDetails details = PropertyDetails::Empty();
2918 Derived::cast(this)->SetEntry(isolate, entry, the_hole, the_hole, details);
2919 }
2920
2921 template <typename Derived, typename Shape>
SetEntry(Isolate * isolate,int entry,Object * key,Object * value,PropertyDetails details)2922 void Dictionary<Derived, Shape>::SetEntry(Isolate* isolate, int entry,
2923 Object* key, Object* value,
2924 PropertyDetails details) {
2925 DCHECK(Dictionary::kEntrySize == 2 || Dictionary::kEntrySize == 3);
2926 DCHECK(!key->IsName() || details.dictionary_index() > 0);
2927 int index = DerivedHashTable::EntryToIndex(entry);
2928 DisallowHeapAllocation no_gc;
2929 WriteBarrierMode mode = this->GetWriteBarrierMode(no_gc);
2930 this->set(index + Derived::kEntryKeyIndex, key, mode);
2931 this->set(index + Derived::kEntryValueIndex, value, mode);
2932 if (Shape::kHasDetails) DetailsAtPut(isolate, entry, details);
2933 }
2934
Unwrap(Object * object)2935 Object* GlobalDictionaryShape::Unwrap(Object* object) {
2936 return PropertyCell::cast(object)->name();
2937 }
2938
GetMapRootIndex()2939 int GlobalDictionaryShape::GetMapRootIndex() {
2940 return Heap::kGlobalDictionaryMapRootIndex;
2941 }
2942
NameAt(int entry)2943 Name* NameDictionary::NameAt(int entry) { return Name::cast(KeyAt(entry)); }
2944
GetMapRootIndex()2945 int NameDictionaryShape::GetMapRootIndex() {
2946 return Heap::kNameDictionaryMapRootIndex;
2947 }
2948
CellAt(int entry)2949 PropertyCell* GlobalDictionary::CellAt(int entry) {
2950 DCHECK(KeyAt(entry)->IsPropertyCell());
2951 return PropertyCell::cast(KeyAt(entry));
2952 }
2953
IsLive(ReadOnlyRoots roots,Object * k)2954 bool GlobalDictionaryShape::IsLive(ReadOnlyRoots roots, Object* k) {
2955 DCHECK_NE(roots.the_hole_value(), k);
2956 return k != roots.undefined_value();
2957 }
2958
IsKey(ReadOnlyRoots roots,Object * k)2959 bool GlobalDictionaryShape::IsKey(ReadOnlyRoots roots, Object* k) {
2960 return IsLive(roots, k) && !PropertyCell::cast(k)->value()->IsTheHole(roots);
2961 }
2962
NameAt(int entry)2963 Name* GlobalDictionary::NameAt(int entry) { return CellAt(entry)->name(); }
ValueAt(int entry)2964 Object* GlobalDictionary::ValueAt(int entry) { return CellAt(entry)->value(); }
2965
SetEntry(Isolate * isolate,int entry,Object * key,Object * value,PropertyDetails details)2966 void GlobalDictionary::SetEntry(Isolate* isolate, int entry, Object* key,
2967 Object* value, PropertyDetails details) {
2968 DCHECK_EQ(key, PropertyCell::cast(value)->name());
2969 set(EntryToIndex(entry) + kEntryKeyIndex, value);
2970 DetailsAtPut(isolate, entry, details);
2971 }
2972
ValueAtPut(int entry,Object * value)2973 void GlobalDictionary::ValueAtPut(int entry, Object* value) {
2974 set(EntryToIndex(entry), value);
2975 }
2976
IsMatch(uint32_t key,Object * other)2977 bool NumberDictionaryBaseShape::IsMatch(uint32_t key, Object* other) {
2978 DCHECK(other->IsNumber());
2979 return key == static_cast<uint32_t>(other->Number());
2980 }
2981
Hash(Isolate * isolate,uint32_t key)2982 uint32_t NumberDictionaryBaseShape::Hash(Isolate* isolate, uint32_t key) {
2983 return ComputeIntegerHash(key, isolate->heap()->HashSeed());
2984 }
2985
HashForObject(Isolate * isolate,Object * other)2986 uint32_t NumberDictionaryBaseShape::HashForObject(Isolate* isolate,
2987 Object* other) {
2988 DCHECK(other->IsNumber());
2989 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()),
2990 isolate->heap()->HashSeed());
2991 }
2992
AsHandle(Isolate * isolate,uint32_t key)2993 Handle<Object> NumberDictionaryBaseShape::AsHandle(Isolate* isolate,
2994 uint32_t key) {
2995 return isolate->factory()->NewNumberFromUint(key);
2996 }
2997
GetMapRootIndex()2998 int NumberDictionaryShape::GetMapRootIndex() {
2999 return Heap::kNumberDictionaryMapRootIndex;
3000 }
3001
GetMapRootIndex()3002 int SimpleNumberDictionaryShape::GetMapRootIndex() {
3003 return Heap::kSimpleNumberDictionaryMapRootIndex;
3004 }
3005
IsMatch(Handle<Name> key,Object * other)3006 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
3007 DCHECK(other->IsTheHole() || Name::cast(other)->IsUniqueName());
3008 DCHECK(key->IsUniqueName());
3009 return *key == other;
3010 }
3011
Hash(Isolate * isolate,Handle<Name> key)3012 uint32_t NameDictionaryShape::Hash(Isolate* isolate, Handle<Name> key) {
3013 return key->Hash();
3014 }
3015
HashForObject(Isolate * isolate,Object * other)3016 uint32_t NameDictionaryShape::HashForObject(Isolate* isolate, Object* other) {
3017 return Name::cast(other)->Hash();
3018 }
3019
IsMatch(Handle<Name> key,Object * other)3020 bool GlobalDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
3021 DCHECK(PropertyCell::cast(other)->name()->IsUniqueName());
3022 return *key == PropertyCell::cast(other)->name();
3023 }
3024
HashForObject(Isolate * isolate,Object * other)3025 uint32_t GlobalDictionaryShape::HashForObject(Isolate* isolate, Object* other) {
3026 return PropertyCell::cast(other)->name()->Hash();
3027 }
3028
AsHandle(Isolate * isolate,Handle<Name> key)3029 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
3030 Handle<Name> key) {
3031 DCHECK(key->IsUniqueName());
3032 return key;
3033 }
3034
3035
3036 template <typename Dictionary>
DetailsAt(Dictionary * dict,int entry)3037 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
3038 DCHECK_LE(0, entry); // Not found is -1, which is not caught by get().
3039 return dict->CellAt(entry)->property_details();
3040 }
3041
3042 template <typename Dictionary>
DetailsAtPut(Isolate * isolate,Dictionary * dict,int entry,PropertyDetails value)3043 void GlobalDictionaryShape::DetailsAtPut(Isolate* isolate, Dictionary* dict,
3044 int entry, PropertyDetails value) {
3045 DCHECK_LE(0, entry); // Not found is -1, which is not caught by get().
3046 PropertyCell* cell = dict->CellAt(entry);
3047 if (cell->property_details().IsReadOnly() != value.IsReadOnly()) {
3048 cell->dependent_code()->DeoptimizeDependentCodeGroup(
3049 isolate, DependentCode::kPropertyCellChangedGroup);
3050 }
3051 cell->set_property_details(value);
3052 }
3053
IsMatch(Handle<Object> key,Object * other)3054 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
3055 return key->SameValue(other);
3056 }
3057
Hash(Isolate * isolate,Handle<Object> key)3058 uint32_t ObjectHashTableShape::Hash(Isolate* isolate, Handle<Object> key) {
3059 return Smi::ToInt(key->GetHash());
3060 }
3061
HashForObject(Isolate * isolate,Object * other)3062 uint32_t ObjectHashTableShape::HashForObject(Isolate* isolate, Object* other) {
3063 return Smi::ToInt(other->GetHash());
3064 }
3065
3066 // static
GetSimpleHash(Object * object)3067 Object* Object::GetSimpleHash(Object* object) {
3068 DisallowHeapAllocation no_gc;
3069 if (object->IsSmi()) {
3070 uint32_t hash = ComputeIntegerHash(Smi::ToInt(object));
3071 return Smi::FromInt(hash & Smi::kMaxValue);
3072 }
3073 if (object->IsHeapNumber()) {
3074 double num = HeapNumber::cast(object)->value();
3075 if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
3076 // Use ComputeIntegerHash for all values in Signed32 range, including -0,
3077 // which is considered equal to 0 because collections use SameValueZero.
3078 uint32_t hash;
3079 // Check range before conversion to avoid undefined behavior.
3080 if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
3081 hash = ComputeIntegerHash(FastD2I(num));
3082 } else {
3083 hash = ComputeLongHash(double_to_uint64(num));
3084 }
3085 return Smi::FromInt(hash & Smi::kMaxValue);
3086 }
3087 if (object->IsName()) {
3088 uint32_t hash = Name::cast(object)->Hash();
3089 return Smi::FromInt(hash);
3090 }
3091 if (object->IsOddball()) {
3092 uint32_t hash = Oddball::cast(object)->to_string()->Hash();
3093 return Smi::FromInt(hash);
3094 }
3095 if (object->IsBigInt()) {
3096 uint32_t hash = BigInt::cast(object)->Hash();
3097 return Smi::FromInt(hash & Smi::kMaxValue);
3098 }
3099 DCHECK(object->IsJSReceiver());
3100 return object;
3101 }
3102
GetHash()3103 Object* Object::GetHash() {
3104 DisallowHeapAllocation no_gc;
3105 Object* hash = GetSimpleHash(this);
3106 if (hash->IsSmi()) return hash;
3107
3108 DCHECK(IsJSReceiver());
3109 JSReceiver* receiver = JSReceiver::cast(this);
3110 Isolate* isolate = receiver->GetIsolate();
3111 return receiver->GetIdentityHash(isolate);
3112 }
3113
AsHandle(Handle<Object> key)3114 Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) {
3115 return key;
3116 }
3117
Relocatable(Isolate * isolate)3118 Relocatable::Relocatable(Isolate* isolate) {
3119 isolate_ = isolate;
3120 prev_ = isolate->relocatable_top();
3121 isolate->set_relocatable_top(this);
3122 }
3123
3124
~Relocatable()3125 Relocatable::~Relocatable() {
3126 DCHECK_EQ(isolate_->relocatable_top(), this);
3127 isolate_->set_relocatable_top(prev_);
3128 }
3129
3130
3131 template<class Derived, class TableType>
CurrentKey()3132 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
3133 TableType* table(TableType::cast(this->table()));
3134 int index = Smi::ToInt(this->index());
3135 Object* key = table->KeyAt(index);
3136 DCHECK(!key->IsTheHole());
3137 return key;
3138 }
3139
3140 // Predictably converts HeapObject* or Address to uint32 by calculating
3141 // offset of the address in respective MemoryChunk.
ObjectAddressForHashing(void * object)3142 static inline uint32_t ObjectAddressForHashing(void* object) {
3143 uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
3144 return value & MemoryChunk::kAlignmentMask;
3145 }
3146
MakeEntryPair(Isolate * isolate,uint32_t index,Handle<Object> value)3147 static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
3148 Handle<Object> value) {
3149 Handle<Object> key = isolate->factory()->Uint32ToString(index);
3150 Handle<FixedArray> entry_storage =
3151 isolate->factory()->NewUninitializedFixedArray(2);
3152 {
3153 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
3154 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
3155 }
3156 return isolate->factory()->NewJSArrayWithElements(entry_storage,
3157 PACKED_ELEMENTS, 2);
3158 }
3159
MakeEntryPair(Isolate * isolate,Handle<Object> key,Handle<Object> value)3160 static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Object> key,
3161 Handle<Object> value) {
3162 Handle<FixedArray> entry_storage =
3163 isolate->factory()->NewUninitializedFixedArray(2);
3164 {
3165 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
3166 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
3167 }
3168 return isolate->factory()->NewJSArrayWithElements(entry_storage,
3169 PACKED_ELEMENTS, 2);
3170 }
3171
ACCESSORS(JSIteratorResult,value,Object,kValueOffset)3172 ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
3173 ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
3174
3175 ACCESSORS(JSAsyncFromSyncIterator, sync_iterator, JSReceiver,
3176 kSyncIteratorOffset)
3177 ACCESSORS(JSAsyncFromSyncIterator, next, Object, kNextOffset)
3178
3179 ACCESSORS(JSStringIterator, string, String, kStringOffset)
3180 SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
3181
3182 bool ScopeInfo::IsAsmModule() const { return AsmModuleField::decode(Flags()); }
3183
HasSimpleParameters()3184 bool ScopeInfo::HasSimpleParameters() const {
3185 return HasSimpleParametersField::decode(Flags());
3186 }
3187
3188 #define FIELD_ACCESSORS(name) \
3189 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
3190 int ScopeInfo::name() const { \
3191 if (length() > 0) { \
3192 return Smi::ToInt(get(k##name)); \
3193 } else { \
3194 return 0; \
3195 } \
3196 }
FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(FIELD_ACCESSORS)3197 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(FIELD_ACCESSORS)
3198 #undef FIELD_ACCESSORS
3199
3200 FreshlyAllocatedBigInt* FreshlyAllocatedBigInt::cast(Object* object) {
3201 SLOW_DCHECK(object->IsBigInt());
3202 return reinterpret_cast<FreshlyAllocatedBigInt*>(object);
3203 }
3204
3205 } // namespace internal
3206 } // namespace v8
3207
3208 #include "src/objects/object-macros-undef.h"
3209
3210 #endif // V8_OBJECTS_INL_H_
3211