• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11 
12 #ifndef V8_OBJECTS_OBJECTS_INL_H_
13 #define V8_OBJECTS_OBJECTS_INL_H_
14 
15 #include "src/base/bits.h"
16 #include "src/base/memory.h"
17 #include "src/base/numbers/double.h"
18 #include "src/builtins/builtins.h"
19 #include "src/common/globals.h"
20 #include "src/common/ptr-compr-inl.h"
21 #include "src/handles/handles-inl.h"
22 #include "src/heap/factory.h"
23 #include "src/heap/heap-write-barrier-inl.h"
24 #include "src/heap/read-only-heap-inl.h"
25 #include "src/numbers/conversions-inl.h"
26 #include "src/objects/bigint.h"
27 #include "src/objects/heap-number-inl.h"
28 #include "src/objects/heap-object.h"
29 #include "src/objects/js-proxy-inl.h"  // TODO(jkummerow): Drop.
30 #include "src/objects/keys.h"
31 #include "src/objects/literal-objects.h"
32 #include "src/objects/lookup-inl.h"  // TODO(jkummerow): Drop.
33 #include "src/objects/objects.h"
34 #include "src/objects/oddball-inl.h"
35 #include "src/objects/property-details.h"
36 #include "src/objects/property.h"
37 #include "src/objects/regexp-match-info-inl.h"
38 #include "src/objects/shared-function-info.h"
39 #include "src/objects/slots-inl.h"
40 #include "src/objects/smi-inl.h"
41 #include "src/objects/tagged-field-inl.h"
42 #include "src/objects/tagged-impl-inl.h"
43 #include "src/objects/tagged-index.h"
44 #include "src/objects/templates.h"
45 #include "src/sandbox/external-pointer-inl.h"
46 #include "src/sandbox/sandboxed-pointer-inl.h"
47 
48 // Has to be the last include (doesn't have include guards):
49 #include "src/objects/object-macros.h"
50 
51 namespace v8 {
52 namespace internal {
53 
PropertyDetails(Smi smi)54 PropertyDetails::PropertyDetails(Smi smi) { value_ = smi.value(); }
55 
AsSmi()56 Smi PropertyDetails::AsSmi() const {
57   // Ensure the upper 2 bits have the same value by sign extending it. This is
58   // necessary to be able to use the 31st bit of the property details.
59   int value = value_ << 1;
60   return Smi::FromInt(value >> 1);
61 }
62 
field_width_in_words()63 int PropertyDetails::field_width_in_words() const {
64   DCHECK_EQ(location(), PropertyLocation::kField);
65   return 1;
66 }
67 
DEF_GETTER(HeapObject,IsClassBoilerplate,bool)68 DEF_GETTER(HeapObject, IsClassBoilerplate, bool) {
69   return IsFixedArrayExact(cage_base);
70 }
71 
IsTaggedIndex()72 bool Object::IsTaggedIndex() const {
73   return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
74 }
75 
InSharedHeap()76 bool Object::InSharedHeap() const {
77   return IsHeapObject() && HeapObject::cast(*this).InSharedHeap();
78 }
79 
InSharedWritableHeap()80 bool Object::InSharedWritableHeap() const {
81   return IsHeapObject() && HeapObject::cast(*this).InSharedWritableHeap();
82 }
83 
84 #define IS_TYPE_FUNCTION_DEF(type_)                                        \
85   bool Object::Is##type_() const {                                         \
86     return IsHeapObject() && HeapObject::cast(*this).Is##type_();          \
87   }                                                                        \
88   bool Object::Is##type_(PtrComprCageBase cage_base) const {               \
89     return IsHeapObject() && HeapObject::cast(*this).Is##type_(cage_base); \
90   }
91 HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
IS_TYPE_FUNCTION_DEF(HashTableBase)92 IS_TYPE_FUNCTION_DEF(HashTableBase)
93 IS_TYPE_FUNCTION_DEF(SmallOrderedHashTable)
94 IS_TYPE_FUNCTION_DEF(CodeT)
95 #undef IS_TYPE_FUNCTION_DEF
96 
97 #define IS_TYPE_FUNCTION_DEF(Type, Value)                        \
98   bool Object::Is##Type(Isolate* isolate) const {                \
99     return Is##Type(ReadOnlyRoots(isolate));                     \
100   }                                                              \
101   bool Object::Is##Type(LocalIsolate* isolate) const {           \
102     return Is##Type(ReadOnlyRoots(isolate));                     \
103   }                                                              \
104   bool Object::Is##Type(ReadOnlyRoots roots) const {             \
105     return *this == roots.Value();                               \
106   }                                                              \
107   bool Object::Is##Type() const {                                \
108     return IsHeapObject() && HeapObject::cast(*this).Is##Type(); \
109   }                                                              \
110   bool HeapObject::Is##Type(Isolate* isolate) const {            \
111     return Object::Is##Type(isolate);                            \
112   }                                                              \
113   bool HeapObject::Is##Type(LocalIsolate* isolate) const {       \
114     return Object::Is##Type(isolate);                            \
115   }                                                              \
116   bool HeapObject::Is##Type(ReadOnlyRoots roots) const {         \
117     return Object::Is##Type(roots);                              \
118   }                                                              \
119   bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
120 ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
121 #undef IS_TYPE_FUNCTION_DEF
122 
123 bool Object::IsNullOrUndefined(Isolate* isolate) const {
124   return IsNullOrUndefined(ReadOnlyRoots(isolate));
125 }
126 
IsNullOrUndefined(ReadOnlyRoots roots)127 bool Object::IsNullOrUndefined(ReadOnlyRoots roots) const {
128   return IsNull(roots) || IsUndefined(roots);
129 }
130 
IsNullOrUndefined()131 bool Object::IsNullOrUndefined() const {
132   return IsHeapObject() && HeapObject::cast(*this).IsNullOrUndefined();
133 }
134 
IsZero()135 bool Object::IsZero() const { return *this == Smi::zero(); }
136 
IsPublicSymbol()137 bool Object::IsPublicSymbol() const {
138   return IsSymbol() && !Symbol::cast(*this).is_private();
139 }
IsPrivateSymbol()140 bool Object::IsPrivateSymbol() const {
141   return IsSymbol() && Symbol::cast(*this).is_private();
142 }
143 
IsNoSharedNameSentinel()144 bool Object::IsNoSharedNameSentinel() const {
145   return *this == SharedFunctionInfo::kNoSharedNameSentinel;
146 }
147 
148 template <class T,
149           typename std::enable_if<(std::is_arithmetic<T>::value ||
150                                    std::is_enum<T>::value) &&
151                                       !std::is_floating_point<T>::value,
152                                   int>::type>
Relaxed_ReadField(size_t offset)153 T Object::Relaxed_ReadField(size_t offset) const {
154   // Pointer compression causes types larger than kTaggedSize to be
155   // unaligned. Atomic loads must be aligned.
156   DCHECK_IMPLIES(COMPRESS_POINTERS_BOOL, sizeof(T) <= kTaggedSize);
157   using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
158   return static_cast<T>(base::AsAtomicImpl<AtomicT>::Relaxed_Load(
159       reinterpret_cast<AtomicT*>(field_address(offset))));
160 }
161 
162 template <class T,
163           typename std::enable_if<(std::is_arithmetic<T>::value ||
164                                    std::is_enum<T>::value) &&
165                                       !std::is_floating_point<T>::value,
166                                   int>::type>
Relaxed_WriteField(size_t offset,T value)167 void Object::Relaxed_WriteField(size_t offset, T value) {
168   // Pointer compression causes types larger than kTaggedSize to be
169   // unaligned. Atomic stores must be aligned.
170   DCHECK_IMPLIES(COMPRESS_POINTERS_BOOL, sizeof(T) <= kTaggedSize);
171   using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
172   base::AsAtomicImpl<AtomicT>::Relaxed_Store(
173       reinterpret_cast<AtomicT*>(field_address(offset)),
174       static_cast<AtomicT>(value));
175 }
176 
InSharedHeap()177 bool HeapObject::InSharedHeap() const {
178   if (IsReadOnlyHeapObject(*this)) return V8_SHARED_RO_HEAP_BOOL;
179   return InSharedWritableHeap();
180 }
181 
InSharedWritableHeap()182 bool HeapObject::InSharedWritableHeap() const {
183   return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
184 }
185 
IsNullOrUndefined(Isolate * isolate)186 bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
187   return IsNullOrUndefined(ReadOnlyRoots(isolate));
188 }
189 
IsNullOrUndefined(ReadOnlyRoots roots)190 bool HeapObject::IsNullOrUndefined(ReadOnlyRoots roots) const {
191   return Object::IsNullOrUndefined(roots);
192 }
193 
IsNullOrUndefined()194 bool HeapObject::IsNullOrUndefined() const {
195   return IsNullOrUndefined(GetReadOnlyRoots());
196 }
197 
DEF_GETTER(HeapObject,IsCodeT,bool)198 DEF_GETTER(HeapObject, IsCodeT, bool) {
199   return V8_EXTERNAL_CODE_SPACE_BOOL ? IsCodeDataContainer(cage_base)
200                                      : IsCode(cage_base);
201 }
202 
DEF_GETTER(HeapObject,IsUniqueName,bool)203 DEF_GETTER(HeapObject, IsUniqueName, bool) {
204   return IsInternalizedString(cage_base) || IsSymbol(cage_base);
205 }
206 
DEF_GETTER(HeapObject,IsFunction,bool)207 DEF_GETTER(HeapObject, IsFunction, bool) {
208   return IsJSFunctionOrBoundFunctionOrWrappedFunction();
209 }
210 
DEF_GETTER(HeapObject,IsCallable,bool)211 DEF_GETTER(HeapObject, IsCallable, bool) {
212   return map(cage_base).is_callable();
213 }
214 
DEF_GETTER(HeapObject,IsCallableJSProxy,bool)215 DEF_GETTER(HeapObject, IsCallableJSProxy, bool) {
216   return IsCallable(cage_base) && IsJSProxy(cage_base);
217 }
218 
DEF_GETTER(HeapObject,IsCallableApiObject,bool)219 DEF_GETTER(HeapObject, IsCallableApiObject, bool) {
220   InstanceType type = map(cage_base).instance_type();
221   return IsCallable(cage_base) &&
222          (type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
223 }
224 
DEF_GETTER(HeapObject,IsNonNullForeign,bool)225 DEF_GETTER(HeapObject, IsNonNullForeign, bool) {
226   return IsForeign(cage_base) &&
227          Foreign::cast(*this).foreign_address() != kNullAddress;
228 }
229 
DEF_GETTER(HeapObject,IsConstructor,bool)230 DEF_GETTER(HeapObject, IsConstructor, bool) {
231   return map(cage_base).is_constructor();
232 }
233 
DEF_GETTER(HeapObject,IsSourceTextModuleInfo,bool)234 DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) {
235   return map(cage_base) == GetReadOnlyRoots(cage_base).module_info_map();
236 }
237 
DEF_GETTER(HeapObject,IsConsString,bool)238 DEF_GETTER(HeapObject, IsConsString, bool) {
239   if (!IsString(cage_base)) return false;
240   return StringShape(String::cast(*this).map(cage_base)).IsCons();
241 }
242 
DEF_GETTER(HeapObject,IsThinString,bool)243 DEF_GETTER(HeapObject, IsThinString, bool) {
244   if (!IsString(cage_base)) return false;
245   return StringShape(String::cast(*this).map(cage_base)).IsThin();
246 }
247 
DEF_GETTER(HeapObject,IsSlicedString,bool)248 DEF_GETTER(HeapObject, IsSlicedString, bool) {
249   if (!IsString(cage_base)) return false;
250   return StringShape(String::cast(*this).map(cage_base)).IsSliced();
251 }
252 
DEF_GETTER(HeapObject,IsSeqString,bool)253 DEF_GETTER(HeapObject, IsSeqString, bool) {
254   if (!IsString(cage_base)) return false;
255   return StringShape(String::cast(*this).map(cage_base)).IsSequential();
256 }
257 
DEF_GETTER(HeapObject,IsSeqOneByteString,bool)258 DEF_GETTER(HeapObject, IsSeqOneByteString, bool) {
259   if (!IsString(cage_base)) return false;
260   return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
261          String::cast(*this).IsOneByteRepresentation(cage_base);
262 }
263 
DEF_GETTER(HeapObject,IsSeqTwoByteString,bool)264 DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) {
265   if (!IsString(cage_base)) return false;
266   return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
267          String::cast(*this).IsTwoByteRepresentation(cage_base);
268 }
269 
DEF_GETTER(HeapObject,IsExternalOneByteString,bool)270 DEF_GETTER(HeapObject, IsExternalOneByteString, bool) {
271   if (!IsString(cage_base)) return false;
272   return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
273          String::cast(*this).IsOneByteRepresentation(cage_base);
274 }
275 
DEF_GETTER(HeapObject,IsExternalTwoByteString,bool)276 DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
277   if (!IsString(cage_base)) return false;
278   return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
279          String::cast(*this).IsTwoByteRepresentation(cage_base);
280 }
281 
IsNumber()282 bool Object::IsNumber() const {
283   if (IsSmi()) return true;
284   HeapObject this_heap_object = HeapObject::cast(*this);
285   PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
286   return this_heap_object.IsHeapNumber(cage_base);
287 }
288 
IsNumber(PtrComprCageBase cage_base)289 bool Object::IsNumber(PtrComprCageBase cage_base) const {
290   return IsSmi() || IsHeapNumber(cage_base);
291 }
292 
IsNumeric()293 bool Object::IsNumeric() const {
294   if (IsSmi()) return true;
295   HeapObject this_heap_object = HeapObject::cast(*this);
296   PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
297   return this_heap_object.IsHeapNumber(cage_base) ||
298          this_heap_object.IsBigInt(cage_base);
299 }
300 
IsNumeric(PtrComprCageBase cage_base)301 bool Object::IsNumeric(PtrComprCageBase cage_base) const {
302   return IsNumber(cage_base) || IsBigInt(cage_base);
303 }
304 
DEF_GETTER(HeapObject,IsArrayList,bool)305 DEF_GETTER(HeapObject, IsArrayList, bool) {
306   return map(cage_base) ==
307          GetReadOnlyRoots(cage_base).unchecked_array_list_map();
308 }
309 
DEF_GETTER(HeapObject,IsRegExpMatchInfo,bool)310 DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
311   return IsFixedArrayExact(cage_base);
312 }
313 
DEF_GETTER(HeapObject,IsDeoptimizationData,bool)314 DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
315   // Must be a fixed array.
316   if (!IsFixedArrayExact(cage_base)) return false;
317 
318   // There's no sure way to detect the difference between a fixed array and
319   // a deoptimization data array.  Since this is used for asserts we can
320   // check that the length is zero or else the fixed size plus a multiple of
321   // the entry size.
322   int length = FixedArray::cast(*this).length();
323   if (length == 0) return true;
324 
325   length -= DeoptimizationData::kFirstDeoptEntryIndex;
326   return length >= 0 && length % DeoptimizationData::kDeoptEntrySize == 0;
327 }
328 
DEF_GETTER(HeapObject,IsHandlerTable,bool)329 DEF_GETTER(HeapObject, IsHandlerTable, bool) {
330   return IsFixedArrayExact(cage_base);
331 }
332 
DEF_GETTER(HeapObject,IsTemplateList,bool)333 DEF_GETTER(HeapObject, IsTemplateList, bool) {
334   if (!IsFixedArrayExact(cage_base)) return false;
335   if (FixedArray::cast(*this).length() < 1) return false;
336   return true;
337 }
338 
DEF_GETTER(HeapObject,IsDependentCode,bool)339 DEF_GETTER(HeapObject, IsDependentCode, bool) {
340   return IsWeakArrayList(cage_base);
341 }
342 
DEF_GETTER(HeapObject,IsOSROptimizedCodeCache,bool)343 DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
344   return IsWeakFixedArray(cage_base);
345 }
346 
IsAbstractCode()347 bool HeapObject::IsAbstractCode() const {
348   // TODO(v8:11880): Either make AbstractCode be ByteArray|CodeT or
349   // ensure this version is not called for hot code.
350   PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
351   return HeapObject::IsAbstractCode(cage_base);
352 }
IsAbstractCode(PtrComprCageBase cage_base)353 bool HeapObject::IsAbstractCode(PtrComprCageBase cage_base) const {
354   return IsBytecodeArray(cage_base) || IsCode(cage_base);
355 }
356 
DEF_GETTER(HeapObject,IsStringWrapper,bool)357 DEF_GETTER(HeapObject, IsStringWrapper, bool) {
358   return IsJSPrimitiveWrapper(cage_base) &&
359          JSPrimitiveWrapper::cast(*this).value().IsString(cage_base);
360 }
361 
DEF_GETTER(HeapObject,IsBooleanWrapper,bool)362 DEF_GETTER(HeapObject, IsBooleanWrapper, bool) {
363   return IsJSPrimitiveWrapper(cage_base) &&
364          JSPrimitiveWrapper::cast(*this).value().IsBoolean(cage_base);
365 }
366 
DEF_GETTER(HeapObject,IsScriptWrapper,bool)367 DEF_GETTER(HeapObject, IsScriptWrapper, bool) {
368   return IsJSPrimitiveWrapper(cage_base) &&
369          JSPrimitiveWrapper::cast(*this).value().IsScript(cage_base);
370 }
371 
DEF_GETTER(HeapObject,IsNumberWrapper,bool)372 DEF_GETTER(HeapObject, IsNumberWrapper, bool) {
373   return IsJSPrimitiveWrapper(cage_base) &&
374          JSPrimitiveWrapper::cast(*this).value().IsNumber(cage_base);
375 }
376 
DEF_GETTER(HeapObject,IsBigIntWrapper,bool)377 DEF_GETTER(HeapObject, IsBigIntWrapper, bool) {
378   return IsJSPrimitiveWrapper(cage_base) &&
379          JSPrimitiveWrapper::cast(*this).value().IsBigInt(cage_base);
380 }
381 
DEF_GETTER(HeapObject,IsSymbolWrapper,bool)382 DEF_GETTER(HeapObject, IsSymbolWrapper, bool) {
383   return IsJSPrimitiveWrapper(cage_base) &&
384          JSPrimitiveWrapper::cast(*this).value().IsSymbol(cage_base);
385 }
386 
DEF_GETTER(HeapObject,IsStringSet,bool)387 DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(cage_base); }
388 
DEF_GETTER(HeapObject,IsObjectHashSet,bool)389 DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(cage_base); }
390 
DEF_GETTER(HeapObject,IsCompilationCacheTable,bool)391 DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) {
392   return IsHashTable(cage_base);
393 }
394 
DEF_GETTER(HeapObject,IsMapCache,bool)395 DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(cage_base); }
396 
DEF_GETTER(HeapObject,IsObjectHashTable,bool)397 DEF_GETTER(HeapObject, IsObjectHashTable, bool) {
398   return IsHashTable(cage_base);
399 }
400 
DEF_GETTER(HeapObject,IsHashTableBase,bool)401 DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(cage_base); }
402 
403 #if V8_ENABLE_WEBASSEMBLY
DEF_GETTER(HeapObject,IsWasmExceptionPackage,bool)404 DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
405   // It is not possible to check for the existence of certain properties on the
406   // underlying {JSReceiver} here because that requires calling handlified code.
407   return IsJSReceiver(cage_base);
408 }
409 #endif  // V8_ENABLE_WEBASSEMBLY
410 
IsPrimitive()411 bool Object::IsPrimitive() const {
412   if (IsSmi()) return true;
413   HeapObject this_heap_object = HeapObject::cast(*this);
414   PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
415   return this_heap_object.map(cage_base).IsPrimitiveMap();
416 }
417 
IsPrimitive(PtrComprCageBase cage_base)418 bool Object::IsPrimitive(PtrComprCageBase cage_base) const {
419   return IsSmi() || HeapObject::cast(*this).map(cage_base).IsPrimitiveMap();
420 }
421 
422 // static
IsArray(Handle<Object> object)423 Maybe<bool> Object::IsArray(Handle<Object> object) {
424   if (object->IsSmi()) return Just(false);
425   Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
426   if (heap_object->IsJSArray()) return Just(true);
427   if (!heap_object->IsJSProxy()) return Just(false);
428   return JSProxy::IsArray(Handle<JSProxy>::cast(object));
429 }
430 
DEF_GETTER(HeapObject,IsUndetectable,bool)431 DEF_GETTER(HeapObject, IsUndetectable, bool) {
432   return map(cage_base).is_undetectable();
433 }
434 
DEF_GETTER(HeapObject,IsAccessCheckNeeded,bool)435 DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
436   if (IsJSGlobalProxy(cage_base)) {
437     const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
438     JSGlobalObject global = proxy.GetIsolate()->context().global_object();
439     return proxy.IsDetachedFrom(global);
440   }
441   return map(cage_base).is_access_check_needed();
442 }
443 
444 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                           \
445   bool Object::Is##Name() const {                                         \
446     return IsHeapObject() && HeapObject::cast(*this).Is##Name();          \
447   }                                                                       \
448   bool Object::Is##Name(PtrComprCageBase cage_base) const {               \
449     return IsHeapObject() && HeapObject::cast(*this).Is##Name(cage_base); \
450   }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)451 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
452 #undef MAKE_STRUCT_PREDICATE
453 
454 double Object::Number() const {
455   DCHECK(IsNumber());
456   return IsSmi() ? static_cast<double>(Smi(this->ptr()).value())
457                  : HeapNumber::unchecked_cast(*this).value();
458 }
459 
460 // static
SameNumberValue(double value1,double value2)461 bool Object::SameNumberValue(double value1, double value2) {
462   // SameNumberValue(NaN, NaN) is true.
463   if (value1 != value2) {
464     return std::isnan(value1) && std::isnan(value2);
465   }
466   // SameNumberValue(0.0, -0.0) is false.
467   return (std::signbit(value1) == std::signbit(value2));
468 }
469 
IsNaN()470 bool Object::IsNaN() const {
471   return this->IsHeapNumber() && std::isnan(HeapNumber::cast(*this).value());
472 }
473 
IsMinusZero()474 bool Object::IsMinusZero() const {
475   return this->IsHeapNumber() &&
476          i::IsMinusZero(HeapNumber::cast(*this).value());
477 }
478 
OBJECT_CONSTRUCTORS_IMPL(BigIntBase,PrimitiveHeapObject)479 OBJECT_CONSTRUCTORS_IMPL(BigIntBase, PrimitiveHeapObject)
480 OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
481 OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)
482 
483 // ------------------------------------
484 // Cast operations
485 
486 CAST_ACCESSOR(BigIntBase)
487 CAST_ACCESSOR(BigInt)
488 
489 bool Object::HasValidElements() {
490   // Dictionary is covered under FixedArray. ByteArray is used
491   // for the JSTypedArray backing stores.
492   return IsFixedArray() || IsFixedDoubleArray() || IsByteArray();
493 }
494 
FilterKey(PropertyFilter filter)495 bool Object::FilterKey(PropertyFilter filter) {
496   DCHECK(!IsPropertyCell());
497   if (filter == PRIVATE_NAMES_ONLY) {
498     if (!IsSymbol()) return true;
499     return !Symbol::cast(*this).is_private_name();
500   } else if (IsSymbol()) {
501     if (filter & SKIP_SYMBOLS) return true;
502 
503     if (Symbol::cast(*this).is_private()) return true;
504   } else {
505     if (filter & SKIP_STRINGS) return true;
506   }
507   return false;
508 }
509 
OptimalRepresentation(PtrComprCageBase cage_base)510 Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const {
511   if (IsSmi()) {
512     return Representation::Smi();
513   }
514   HeapObject heap_object = HeapObject::cast(*this);
515   if (heap_object.IsHeapNumber(cage_base)) {
516     return Representation::Double();
517   } else if (heap_object.IsUninitialized(
518                  heap_object.GetReadOnlyRoots(cage_base))) {
519     return Representation::None();
520   }
521   return Representation::HeapObject();
522 }
523 
OptimalElementsKind(PtrComprCageBase cage_base)524 ElementsKind Object::OptimalElementsKind(PtrComprCageBase cage_base) const {
525   if (IsSmi()) return PACKED_SMI_ELEMENTS;
526   if (IsNumber(cage_base)) return PACKED_DOUBLE_ELEMENTS;
527   return PACKED_ELEMENTS;
528 }
529 
FitsRepresentation(Representation representation,bool allow_coercion)530 bool Object::FitsRepresentation(Representation representation,
531                                 bool allow_coercion) const {
532   if (representation.IsSmi()) {
533     return IsSmi();
534   } else if (representation.IsDouble()) {
535     return allow_coercion ? IsNumber() : IsHeapNumber();
536   } else if (representation.IsHeapObject()) {
537     return IsHeapObject();
538   } else if (representation.IsNone()) {
539     return false;
540   }
541   return true;
542 }
543 
ToUint32(uint32_t * value)544 bool Object::ToUint32(uint32_t* value) const {
545   if (IsSmi()) {
546     int num = Smi::ToInt(*this);
547     if (num < 0) return false;
548     *value = static_cast<uint32_t>(num);
549     return true;
550   }
551   if (IsHeapNumber()) {
552     double num = HeapNumber::cast(*this).value();
553     return DoubleToUint32IfEqualToSelf(num, value);
554   }
555   return false;
556 }
557 
558 // static
ToObject(Isolate * isolate,Handle<Object> object,const char * method_name)559 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
560                                          Handle<Object> object,
561                                          const char* method_name) {
562   if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
563   return ToObjectImpl(isolate, object, method_name);
564 }
565 
566 // static
ToName(Isolate * isolate,Handle<Object> input)567 MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
568   if (input->IsName()) return Handle<Name>::cast(input);
569   return ConvertToName(isolate, input);
570 }
571 
572 // static
ToPropertyKey(Isolate * isolate,Handle<Object> value)573 MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
574                                           Handle<Object> value) {
575   if (value->IsSmi() || HeapObject::cast(*value).IsName()) return value;
576   return ConvertToPropertyKey(isolate, value);
577 }
578 
579 // static
ToPrimitive(Isolate * isolate,Handle<Object> input,ToPrimitiveHint hint)580 MaybeHandle<Object> Object::ToPrimitive(Isolate* isolate, Handle<Object> input,
581                                         ToPrimitiveHint hint) {
582   if (input->IsPrimitive()) return input;
583   return JSReceiver::ToPrimitive(isolate, Handle<JSReceiver>::cast(input),
584                                  hint);
585 }
586 
587 // static
ToNumber(Isolate * isolate,Handle<Object> input)588 MaybeHandle<Object> Object::ToNumber(Isolate* isolate, Handle<Object> input) {
589   if (input->IsNumber()) return input;  // Shortcut.
590   return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumber);
591 }
592 
593 // static
ToNumeric(Isolate * isolate,Handle<Object> input)594 MaybeHandle<Object> Object::ToNumeric(Isolate* isolate, Handle<Object> input) {
595   if (input->IsNumber() || input->IsBigInt()) return input;  // Shortcut.
596   return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumeric);
597 }
598 
599 // static
ToInteger(Isolate * isolate,Handle<Object> input)600 MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
601   if (input->IsSmi()) return input;
602   return ConvertToInteger(isolate, input);
603 }
604 
605 // static
ToInt32(Isolate * isolate,Handle<Object> input)606 MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
607   if (input->IsSmi()) return input;
608   return ConvertToInt32(isolate, input);
609 }
610 
611 // static
ToUint32(Isolate * isolate,Handle<Object> input)612 MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
613   if (input->IsSmi()) return handle(Smi::cast(*input).ToUint32Smi(), isolate);
614   return ConvertToUint32(isolate, input);
615 }
616 
617 // static
ToString(Isolate * isolate,Handle<Object> input)618 MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
619   if (input->IsString()) return Handle<String>::cast(input);
620   return ConvertToString(isolate, input);
621 }
622 
623 // static
ToLength(Isolate * isolate,Handle<Object> input)624 MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
625   if (input->IsSmi()) {
626     int value = std::max(Smi::ToInt(*input), 0);
627     return handle(Smi::FromInt(value), isolate);
628   }
629   return ConvertToLength(isolate, input);
630 }
631 
632 // static
ToIndex(Isolate * isolate,Handle<Object> input,MessageTemplate error_index)633 MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
634                                     MessageTemplate error_index) {
635   if (input->IsSmi() && Smi::ToInt(*input) >= 0) return input;
636   return ConvertToIndex(isolate, input, error_index);
637 }
638 
GetProperty(Isolate * isolate,Handle<Object> object,Handle<Name> name)639 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
640                                         Handle<Name> name) {
641   LookupIterator it(isolate, object, name);
642   if (!it.IsFound()) return it.factory()->undefined_value();
643   return GetProperty(&it);
644 }
645 
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)646 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
647                                        uint32_t index) {
648   LookupIterator it(isolate, object, index);
649   if (!it.IsFound()) return it.factory()->undefined_value();
650   return GetProperty(&it);
651 }
652 
SetElement(Isolate * isolate,Handle<Object> object,uint32_t index,Handle<Object> value,ShouldThrow should_throw)653 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
654                                        uint32_t index, Handle<Object> value,
655                                        ShouldThrow should_throw) {
656   LookupIterator it(isolate, object, index);
657   MAYBE_RETURN_NULL(
658       SetProperty(&it, value, StoreOrigin::kMaybeKeyed, Just(should_throw)));
659   return value;
660 }
661 
ReadSandboxedPointerField(size_t offset,PtrComprCageBase cage_base)662 Address Object::ReadSandboxedPointerField(size_t offset,
663                                           PtrComprCageBase cage_base) const {
664   return i::ReadSandboxedPointerField(field_address(offset), cage_base);
665 }
666 
WriteSandboxedPointerField(size_t offset,PtrComprCageBase cage_base,Address value)667 void Object::WriteSandboxedPointerField(size_t offset,
668                                         PtrComprCageBase cage_base,
669                                         Address value) {
670   i::WriteSandboxedPointerField(field_address(offset), cage_base, value);
671 }
672 
WriteSandboxedPointerField(size_t offset,Isolate * isolate,Address value)673 void Object::WriteSandboxedPointerField(size_t offset, Isolate* isolate,
674                                         Address value) {
675   i::WriteSandboxedPointerField(field_address(offset),
676                                 PtrComprCageBase(isolate), value);
677 }
678 
InitExternalPointerField(size_t offset,Isolate * isolate,ExternalPointerTag tag)679 void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
680                                       ExternalPointerTag tag) {
681   i::InitExternalPointerField(field_address(offset), isolate, tag);
682 }
683 
InitExternalPointerField(size_t offset,Isolate * isolate,Address value,ExternalPointerTag tag)684 void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
685                                       Address value, ExternalPointerTag tag) {
686   i::InitExternalPointerField(field_address(offset), isolate, value, tag);
687 }
688 
ReadExternalPointerField(size_t offset,Isolate * isolate,ExternalPointerTag tag)689 Address Object::ReadExternalPointerField(size_t offset, Isolate* isolate,
690                                          ExternalPointerTag tag) const {
691   return i::ReadExternalPointerField(field_address(offset), isolate, tag);
692 }
693 
WriteExternalPointerField(size_t offset,Isolate * isolate,Address value,ExternalPointerTag tag)694 void Object::WriteExternalPointerField(size_t offset, Isolate* isolate,
695                                        Address value, ExternalPointerTag tag) {
696   i::WriteExternalPointerField(field_address(offset), isolate, value, tag);
697 }
698 
RawField(int byte_offset)699 ObjectSlot HeapObject::RawField(int byte_offset) const {
700   return ObjectSlot(field_address(byte_offset));
701 }
702 
RawMaybeWeakField(int byte_offset)703 MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
704   return MaybeObjectSlot(field_address(byte_offset));
705 }
706 
RawCodeField(int byte_offset)707 CodeObjectSlot HeapObject::RawCodeField(int byte_offset) const {
708   return CodeObjectSlot(field_address(byte_offset));
709 }
710 
RawExternalPointerField(int byte_offset)711 ExternalPointer_t HeapObject::RawExternalPointerField(int byte_offset) const {
712   return ReadRawExternalPointerField(field_address(byte_offset));
713 }
714 
FromMap(const Map map)715 MapWord MapWord::FromMap(const Map map) {
716   DCHECK(map.is_null() || !MapWord::IsPacked(map.ptr()));
717 #ifdef V8_MAP_PACKING
718   return MapWord(Pack(map.ptr()));
719 #else
720   return MapWord(map.ptr());
721 #endif
722 }
723 
ToMap()724 Map MapWord::ToMap() const {
725 #ifdef V8_MAP_PACKING
726   return Map::unchecked_cast(Object(Unpack(value_)));
727 #else
728   return Map::unchecked_cast(Object(value_));
729 #endif
730 }
731 
IsForwardingAddress()732 bool MapWord::IsForwardingAddress() const {
733   return (value_ & kForwardingTagMask) == kForwardingTag;
734 }
735 
FromForwardingAddress(HeapObject object)736 MapWord MapWord::FromForwardingAddress(HeapObject object) {
737   return MapWord(object.ptr() - kHeapObjectTag);
738 }
739 
ToForwardingAddress()740 HeapObject MapWord::ToForwardingAddress() {
741   DCHECK(IsForwardingAddress());
742   HeapObject obj = HeapObject::FromAddress(value_);
743   // For objects allocated outside of the main pointer compression cage the
744   // variant with explicit cage base must be used.
745   DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(obj));
746   return obj;
747 }
748 
ToForwardingAddress(PtrComprCageBase host_cage_base)749 HeapObject MapWord::ToForwardingAddress(PtrComprCageBase host_cage_base) {
750   DCHECK(IsForwardingAddress());
751   if (V8_EXTERNAL_CODE_SPACE_BOOL) {
752     // Recompress value_ using proper host_cage_base since the map word
753     // has the upper 32 bits that correspond to the main cage base value.
754     Address value =
755         DecompressTaggedPointer(host_cage_base, CompressTagged(value_));
756     return HeapObject::FromAddress(value);
757   }
758   return HeapObject::FromAddress(value_);
759 }
760 
761 #ifdef VERIFY_HEAP
VerifyObjectField(Isolate * isolate,int offset)762 void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
763   VerifyPointer(isolate, TaggedField<Object>::load(isolate, *this, offset));
764   STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
765 }
766 
VerifyMaybeObjectField(Isolate * isolate,int offset)767 void HeapObject::VerifyMaybeObjectField(Isolate* isolate, int offset) {
768   MaybeObject::VerifyMaybeObjectPointer(
769       isolate, TaggedField<MaybeObject>::load(isolate, *this, offset));
770   STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
771 }
772 
VerifySmiField(int offset)773 void HeapObject::VerifySmiField(int offset) {
774   CHECK(TaggedField<Object>::load(*this, offset).IsSmi());
775   STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
776 }
777 
778 #endif
779 
GetReadOnlyRoots()780 ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
781   return ReadOnlyHeap::GetReadOnlyRoots(*this);
782 }
783 
GetReadOnlyRoots(PtrComprCageBase cage_base)784 ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const {
785 #ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
786   DCHECK_NE(cage_base.address(), 0);
787   return ReadOnlyRoots(Isolate::FromRootAddress(cage_base.address()));
788 #else
789   return GetReadOnlyRoots();
790 #endif
791 }
792 
map()793 Map HeapObject::map() const {
794   // This method is never used for objects located in code space (Code and
795   // free space fillers) and thus it is fine to use auto-computed cage base
796   // value.
797   DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
798   PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
799   return HeapObject::map(cage_base);
800 }
map(PtrComprCageBase cage_base)801 Map HeapObject::map(PtrComprCageBase cage_base) const {
802   return map_word(cage_base, kRelaxedLoad).ToMap();
803 }
804 
set_map(Map value)805 void HeapObject::set_map(Map value) {
806   set_map<EmitWriteBarrier::kYes>(value, kRelaxedStore,
807                                   VerificationMode::kPotentialLayoutChange);
808 }
809 
set_map(Map value,ReleaseStoreTag tag)810 void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
811   set_map<EmitWriteBarrier::kYes>(value, kReleaseStore,
812                                   VerificationMode::kPotentialLayoutChange);
813 }
814 
set_map_safe_transition(Map value)815 void HeapObject::set_map_safe_transition(Map value) {
816   set_map<EmitWriteBarrier::kYes>(value, kRelaxedStore,
817                                   VerificationMode::kSafeMapTransition);
818 }
819 
set_map_safe_transition(Map value,ReleaseStoreTag tag)820 void HeapObject::set_map_safe_transition(Map value, ReleaseStoreTag tag) {
821   set_map<EmitWriteBarrier::kYes>(value, kReleaseStore,
822                                   VerificationMode::kSafeMapTransition);
823 }
824 
825 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map value,RelaxedStoreTag tag)826 void HeapObject::set_map_no_write_barrier(Map value, RelaxedStoreTag tag) {
827   set_map<EmitWriteBarrier::kNo>(value, kRelaxedStore,
828                                  VerificationMode::kPotentialLayoutChange);
829 }
830 
set_map_no_write_barrier(Map value,ReleaseStoreTag tag)831 void HeapObject::set_map_no_write_barrier(Map value, ReleaseStoreTag tag) {
832   set_map<EmitWriteBarrier::kNo>(value, kReleaseStore,
833                                  VerificationMode::kPotentialLayoutChange);
834 }
835 
836 template <HeapObject::EmitWriteBarrier emit_write_barrier, typename MemoryOrder>
set_map(Map value,MemoryOrder order,VerificationMode mode)837 void HeapObject::set_map(Map value, MemoryOrder order, VerificationMode mode) {
838 #if V8_ENABLE_WEBASSEMBLY
839   // In {WasmGraphBuilder::SetMap} and {WasmGraphBuilder::LoadMap}, we treat
840   // maps as immutable. Therefore we are not allowed to mutate them here.
841   DCHECK(!value.IsWasmStructMap() && !value.IsWasmArrayMap());
842 #endif
843   // Object layout changes are currently not supported on background threads.
844   // This method might change object layout and therefore can't be used on
845   // background threads.
846   DCHECK_IMPLIES(mode != VerificationMode::kSafeMapTransition,
847                  !LocalHeap::Current());
848 #ifdef VERIFY_HEAP
849   if (FLAG_verify_heap && !value.is_null()) {
850     Heap* heap = GetHeapFromWritableObject(*this);
851     if (mode == VerificationMode::kSafeMapTransition) {
852       heap->VerifySafeMapTransition(*this, value);
853     } else {
854       DCHECK_EQ(mode, VerificationMode::kPotentialLayoutChange);
855       heap->VerifyObjectLayoutChange(*this, value);
856     }
857   }
858 #endif
859   set_map_word(MapWord::FromMap(value), order);
860 #ifndef V8_DISABLE_WRITE_BARRIERS
861   if (!value.is_null()) {
862     if (emit_write_barrier == EmitWriteBarrier::kYes) {
863       WriteBarrier::Marking(*this, map_slot(), value);
864     } else {
865       DCHECK_EQ(emit_write_barrier, EmitWriteBarrier::kNo);
866       SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
867     }
868   }
869 #endif
870 }
871 
set_map_after_allocation(Map value,WriteBarrierMode mode)872 void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
873   MapWord mapword = MapWord::FromMap(value);
874   set_map_word(mapword, kRelaxedStore);
875 #ifndef V8_DISABLE_WRITE_BARRIERS
876   if (mode != SKIP_WRITE_BARRIER) {
877     DCHECK(!value.is_null());
878     WriteBarrier::Marking(*this, map_slot(), value);
879   } else {
880     SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
881   }
882 #endif
883 }
884 
DEF_ACQUIRE_GETTER(HeapObject,map,Map)885 DEF_ACQUIRE_GETTER(HeapObject, map, Map) {
886   return map_word(cage_base, kAcquireLoad).ToMap();
887 }
888 
map_slot()889 ObjectSlot HeapObject::map_slot() const {
890   return ObjectSlot(MapField::address(*this));
891 }
892 
map_word(RelaxedLoadTag tag)893 MapWord HeapObject::map_word(RelaxedLoadTag tag) const {
894   // This method is never used for objects located in code space (Code and
895   // free space fillers) and thus it is fine to use auto-computed cage base
896   // value.
897   DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
898   PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
899   return HeapObject::map_word(cage_base, tag);
900 }
map_word(PtrComprCageBase cage_base,RelaxedLoadTag tag)901 MapWord HeapObject::map_word(PtrComprCageBase cage_base,
902                              RelaxedLoadTag tag) const {
903   return MapField::Relaxed_Load_Map_Word(cage_base, *this);
904 }
905 
set_map_word(MapWord map_word,RelaxedStoreTag)906 void HeapObject::set_map_word(MapWord map_word, RelaxedStoreTag) {
907   MapField::Relaxed_Store_Map_Word(*this, map_word);
908 }
909 
map_word(AcquireLoadTag tag)910 MapWord HeapObject::map_word(AcquireLoadTag tag) const {
911   // This method is never used for objects located in code space (Code and
912   // free space fillers) and thus it is fine to use auto-computed cage base
913   // value.
914   DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
915   PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
916   return HeapObject::map_word(cage_base, tag);
917 }
map_word(PtrComprCageBase cage_base,AcquireLoadTag tag)918 MapWord HeapObject::map_word(PtrComprCageBase cage_base,
919                              AcquireLoadTag tag) const {
920   return MapField::Acquire_Load_No_Unpack(cage_base, *this);
921 }
922 
set_map_word(MapWord map_word,ReleaseStoreTag)923 void HeapObject::set_map_word(MapWord map_word, ReleaseStoreTag) {
924   MapField::Release_Store_Map_Word(*this, map_word);
925 }
926 
release_compare_and_swap_map_word(MapWord old_map_word,MapWord new_map_word)927 bool HeapObject::release_compare_and_swap_map_word(MapWord old_map_word,
928                                                    MapWord new_map_word) {
929   Tagged_t result =
930       MapField::Release_CompareAndSwap(*this, old_map_word, new_map_word);
931   return result == static_cast<Tagged_t>(old_map_word.ptr());
932 }
933 
934 // TODO(v8:11880): consider dropping parameterless version.
Size()935 int HeapObject::Size() const {
936   DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
937   PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
938   return HeapObject::Size(cage_base);
939 }
Size(PtrComprCageBase cage_base)940 int HeapObject::Size(PtrComprCageBase cage_base) const {
941   return SizeFromMap(map(cage_base));
942 }
943 
IsSpecialReceiverInstanceType(InstanceType instance_type)944 inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
945   return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
946 }
947 
948 // This should be in objects/map-inl.h, but can't, because of a cyclic
949 // dependency.
IsSpecialReceiverMap()950 bool Map::IsSpecialReceiverMap() const {
951   bool result = IsSpecialReceiverInstanceType(instance_type());
952   DCHECK_IMPLIES(!result,
953                  !has_named_interceptor() && !is_access_check_needed());
954   return result;
955 }
956 
IsCustomElementsReceiverInstanceType(InstanceType instance_type)957 inline bool IsCustomElementsReceiverInstanceType(InstanceType instance_type) {
958   return instance_type <= LAST_CUSTOM_ELEMENTS_RECEIVER;
959 }
960 
961 // This should be in objects/map-inl.h, but can't, because of a cyclic
962 // dependency.
IsCustomElementsReceiverMap()963 bool Map::IsCustomElementsReceiverMap() const {
964   return IsCustomElementsReceiverInstanceType(instance_type());
965 }
966 
ToArrayLength(uint32_t * index)967 bool Object::ToArrayLength(uint32_t* index) const {
968   return Object::ToUint32(index);
969 }
970 
ToArrayIndex(uint32_t * index)971 bool Object::ToArrayIndex(uint32_t* index) const {
972   return Object::ToUint32(index) && *index != kMaxUInt32;
973 }
974 
ToIntegerIndex(size_t * index)975 bool Object::ToIntegerIndex(size_t* index) const {
976   if (IsSmi()) {
977     int num = Smi::ToInt(*this);
978     if (num < 0) return false;
979     *index = static_cast<size_t>(num);
980     return true;
981   }
982   if (IsHeapNumber()) {
983     double num = HeapNumber::cast(*this).value();
984     if (!(num >= 0)) return false;  // Negation to catch NaNs.
985     constexpr double max =
986         std::min(kMaxSafeInteger,
987                  // The maximum size_t is reserved as "invalid" sentinel.
988                  static_cast<double>(std::numeric_limits<size_t>::max() - 1));
989     if (num > max) return false;
990     size_t result = static_cast<size_t>(num);
991     if (num != result) return false;  // Conversion lost fractional precision.
992     *index = result;
993     return true;
994   }
995   return false;
996 }
997 
GetWriteBarrierMode(const DisallowGarbageCollection & promise)998 WriteBarrierMode HeapObject::GetWriteBarrierMode(
999     const DisallowGarbageCollection& promise) {
1000   return GetWriteBarrierModeForObject(*this, &promise);
1001 }
1002 
1003 // static
RequiredAlignment(Map map)1004 AllocationAlignment HeapObject::RequiredAlignment(Map map) {
1005   // TODO(v8:4153): We should think about requiring double alignment
1006   // in general for ByteArray, since they are used as backing store for typed
1007   // arrays now.
1008   // TODO(ishell, v8:8875): Consider using aligned allocations for BigInt.
1009   if (USE_ALLOCATION_ALIGNMENT_BOOL) {
1010     int instance_type = map.instance_type();
1011     if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) return kDoubleAligned;
1012     if (instance_type == HEAP_NUMBER_TYPE) return kDoubleUnaligned;
1013   }
1014   return kTaggedAligned;
1015 }
1016 
GetFieldAddress(int field_offset)1017 Address HeapObject::GetFieldAddress(int field_offset) const {
1018   return field_address(field_offset);
1019 }
1020 
1021 // static
GreaterThan(Isolate * isolate,Handle<Object> x,Handle<Object> y)1022 Maybe<bool> Object::GreaterThan(Isolate* isolate, Handle<Object> x,
1023                                 Handle<Object> y) {
1024   Maybe<ComparisonResult> result = Compare(isolate, x, y);
1025   if (result.IsJust()) {
1026     switch (result.FromJust()) {
1027       case ComparisonResult::kGreaterThan:
1028         return Just(true);
1029       case ComparisonResult::kLessThan:
1030       case ComparisonResult::kEqual:
1031       case ComparisonResult::kUndefined:
1032         return Just(false);
1033     }
1034   }
1035   return Nothing<bool>();
1036 }
1037 
1038 // static
GreaterThanOrEqual(Isolate * isolate,Handle<Object> x,Handle<Object> y)1039 Maybe<bool> Object::GreaterThanOrEqual(Isolate* isolate, Handle<Object> x,
1040                                        Handle<Object> y) {
1041   Maybe<ComparisonResult> result = Compare(isolate, x, y);
1042   if (result.IsJust()) {
1043     switch (result.FromJust()) {
1044       case ComparisonResult::kEqual:
1045       case ComparisonResult::kGreaterThan:
1046         return Just(true);
1047       case ComparisonResult::kLessThan:
1048       case ComparisonResult::kUndefined:
1049         return Just(false);
1050     }
1051   }
1052   return Nothing<bool>();
1053 }
1054 
1055 // static
LessThan(Isolate * isolate,Handle<Object> x,Handle<Object> y)1056 Maybe<bool> Object::LessThan(Isolate* isolate, Handle<Object> x,
1057                              Handle<Object> y) {
1058   Maybe<ComparisonResult> result = Compare(isolate, x, y);
1059   if (result.IsJust()) {
1060     switch (result.FromJust()) {
1061       case ComparisonResult::kLessThan:
1062         return Just(true);
1063       case ComparisonResult::kEqual:
1064       case ComparisonResult::kGreaterThan:
1065       case ComparisonResult::kUndefined:
1066         return Just(false);
1067     }
1068   }
1069   return Nothing<bool>();
1070 }
1071 
1072 // static
LessThanOrEqual(Isolate * isolate,Handle<Object> x,Handle<Object> y)1073 Maybe<bool> Object::LessThanOrEqual(Isolate* isolate, Handle<Object> x,
1074                                     Handle<Object> y) {
1075   Maybe<ComparisonResult> result = Compare(isolate, x, y);
1076   if (result.IsJust()) {
1077     switch (result.FromJust()) {
1078       case ComparisonResult::kEqual:
1079       case ComparisonResult::kLessThan:
1080         return Just(true);
1081       case ComparisonResult::kGreaterThan:
1082       case ComparisonResult::kUndefined:
1083         return Just(false);
1084     }
1085   }
1086   return Nothing<bool>();
1087 }
1088 
GetPropertyOrElement(Isolate * isolate,Handle<Object> object,Handle<Name> name)1089 MaybeHandle<Object> Object::GetPropertyOrElement(Isolate* isolate,
1090                                                  Handle<Object> object,
1091                                                  Handle<Name> name) {
1092   PropertyKey key(isolate, name);
1093   LookupIterator it(isolate, object, key);
1094   return GetProperty(&it);
1095 }
1096 
SetPropertyOrElement(Isolate * isolate,Handle<Object> object,Handle<Name> name,Handle<Object> value,Maybe<ShouldThrow> should_throw,StoreOrigin store_origin)1097 MaybeHandle<Object> Object::SetPropertyOrElement(
1098     Isolate* isolate, Handle<Object> object, Handle<Name> name,
1099     Handle<Object> value, Maybe<ShouldThrow> should_throw,
1100     StoreOrigin store_origin) {
1101   PropertyKey key(isolate, name);
1102   LookupIterator it(isolate, object, key);
1103   MAYBE_RETURN_NULL(SetProperty(&it, value, store_origin, should_throw));
1104   return value;
1105 }
1106 
GetPropertyOrElement(Handle<Object> receiver,Handle<Name> name,Handle<JSReceiver> holder)1107 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
1108                                                  Handle<Name> name,
1109                                                  Handle<JSReceiver> holder) {
1110   Isolate* isolate = holder->GetIsolate();
1111   PropertyKey key(isolate, name);
1112   LookupIterator it(isolate, receiver, key, holder);
1113   return GetProperty(&it);
1114 }
1115 
1116 // static
GetSimpleHash(Object object)1117 Object Object::GetSimpleHash(Object object) {
1118   DisallowGarbageCollection no_gc;
1119   if (object.IsSmi()) {
1120     uint32_t hash = ComputeUnseededHash(Smi::ToInt(object));
1121     return Smi::FromInt(hash & Smi::kMaxValue);
1122   }
1123   auto instance_type = HeapObject::cast(object).map().instance_type();
1124   if (InstanceTypeChecker::IsHeapNumber(instance_type)) {
1125     double num = HeapNumber::cast(object).value();
1126     if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
1127     // Use ComputeUnseededHash for all values in Signed32 range, including -0,
1128     // which is considered equal to 0 because collections use SameValueZero.
1129     uint32_t hash;
1130     // Check range before conversion to avoid undefined behavior.
1131     if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
1132       hash = ComputeUnseededHash(FastD2I(num));
1133     } else {
1134       hash = ComputeLongHash(base::double_to_uint64(num));
1135     }
1136     return Smi::FromInt(hash & Smi::kMaxValue);
1137   } else if (InstanceTypeChecker::IsName(instance_type)) {
1138     uint32_t hash = Name::cast(object).EnsureHash();
1139     return Smi::FromInt(hash);
1140   } else if (InstanceTypeChecker::IsOddball(instance_type)) {
1141     uint32_t hash = Oddball::cast(object).to_string().EnsureHash();
1142     return Smi::FromInt(hash);
1143   } else if (InstanceTypeChecker::IsBigInt(instance_type)) {
1144     uint32_t hash = BigInt::cast(object).Hash();
1145     return Smi::FromInt(hash & Smi::kMaxValue);
1146   } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) {
1147     uint32_t hash = SharedFunctionInfo::cast(object).Hash();
1148     return Smi::FromInt(hash & Smi::kMaxValue);
1149   }
1150   DCHECK(object.IsJSReceiver());
1151   return object;
1152 }
1153 
GetHash()1154 Object Object::GetHash() {
1155   DisallowGarbageCollection no_gc;
1156   Object hash = GetSimpleHash(*this);
1157   if (hash.IsSmi()) return hash;
1158 
1159   DCHECK(IsJSReceiver());
1160   JSReceiver receiver = JSReceiver::cast(*this);
1161   return receiver.GetIdentityHash();
1162 }
1163 
IsShared()1164 bool Object::IsShared() const {
1165   // This logic should be kept in sync with fast paths in
1166   // CodeStubAssembler::SharedValueBarrier.
1167 
1168   // Smis are trivially shared.
1169   if (IsSmi()) return true;
1170 
1171   HeapObject object = HeapObject::cast(*this);
1172 
1173   // RO objects are shared when the RO space is shared.
1174   if (IsReadOnlyHeapObject(object)) {
1175     return ReadOnlyHeap::IsReadOnlySpaceShared();
1176   }
1177 
1178   // Check if this object is already shared.
1179   switch (object.map().instance_type()) {
1180     case SHARED_STRING_TYPE:
1181     case SHARED_ONE_BYTE_STRING_TYPE:
1182     case JS_SHARED_STRUCT_TYPE:
1183       DCHECK(object.InSharedHeap());
1184       return true;
1185     case INTERNALIZED_STRING_TYPE:
1186     case ONE_BYTE_INTERNALIZED_STRING_TYPE:
1187       if (FLAG_shared_string_table) {
1188         DCHECK(object.InSharedHeap());
1189         return true;
1190       }
1191       return false;
1192     case HEAP_NUMBER_TYPE:
1193       return object.InSharedWritableHeap();
1194     default:
1195       return false;
1196   }
1197 }
1198 
1199 // static
Share(Isolate * isolate,Handle<Object> value,ShouldThrow throw_if_cannot_be_shared)1200 MaybeHandle<Object> Object::Share(Isolate* isolate, Handle<Object> value,
1201                                   ShouldThrow throw_if_cannot_be_shared) {
1202   // Sharing values requires the RO space be shared.
1203   DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
1204   if (value->IsShared()) return value;
1205   return ShareSlow(isolate, Handle<HeapObject>::cast(value),
1206                    throw_if_cannot_be_shared);
1207 }
1208 
1209 // https://tc39.es/ecma262/#sec-canbeheldweakly
CanBeHeldWeakly()1210 bool Object::CanBeHeldWeakly() const {
1211   if (IsJSReceiver()) {
1212     // TODO(v8:12547) Shared structs and arrays should only be able to point
1213     // to shared values in weak collections. For now, disallow them as weak
1214     // collection keys.
1215     if (FLAG_harmony_struct) {
1216       return !IsJSSharedStruct();
1217     }
1218     return true;
1219   }
1220   return IsSymbol() && !Symbol::cast(*this).is_in_public_symbol_table();
1221 }
1222 
AsHandle(Handle<Object> key)1223 Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) {
1224   return key;
1225 }
1226 
Relocatable(Isolate * isolate)1227 Relocatable::Relocatable(Isolate* isolate) {
1228   isolate_ = isolate;
1229   prev_ = isolate->relocatable_top();
1230   isolate->set_relocatable_top(this);
1231 }
1232 
~Relocatable()1233 Relocatable::~Relocatable() {
1234   DCHECK_EQ(isolate_->relocatable_top(), this);
1235   isolate_->set_relocatable_top(prev_);
1236 }
1237 
1238 // Predictably converts HeapObject or Address to uint32 by calculating
1239 // offset of the address in respective MemoryChunk.
ObjectAddressForHashing(Address object)1240 static inline uint32_t ObjectAddressForHashing(Address object) {
1241   uint32_t value = static_cast<uint32_t>(object);
1242   return value & kPageAlignmentMask;
1243 }
1244 
MakeEntryPair(Isolate * isolate,size_t index,Handle<Object> value)1245 static inline Handle<Object> MakeEntryPair(Isolate* isolate, size_t index,
1246                                            Handle<Object> value) {
1247   Handle<Object> key = isolate->factory()->SizeToString(index);
1248   Handle<FixedArray> entry_storage = isolate->factory()->NewFixedArray(2);
1249   {
1250     entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
1251     entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
1252   }
1253   return isolate->factory()->NewJSArrayWithElements(entry_storage,
1254                                                     PACKED_ELEMENTS, 2);
1255 }
1256 
MakeEntryPair(Isolate * isolate,Handle<Object> key,Handle<Object> value)1257 static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Object> key,
1258                                            Handle<Object> value) {
1259   Handle<FixedArray> entry_storage = isolate->factory()->NewFixedArray(2);
1260   {
1261     entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
1262     entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
1263   }
1264   return isolate->factory()->NewJSArrayWithElements(entry_storage,
1265                                                     PACKED_ELEMENTS, 2);
1266 }
1267 
cast(Object object)1268 FreshlyAllocatedBigInt FreshlyAllocatedBigInt::cast(Object object) {
1269   SLOW_DCHECK(object.IsBigInt());
1270   return FreshlyAllocatedBigInt(object.ptr());
1271 }
1272 
1273 }  // namespace internal
1274 }  // namespace v8
1275 
1276 #include "src/objects/object-macros-undef.h"
1277 
1278 #endif  // V8_OBJECTS_OBJECTS_INL_H_
1279