1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/builtins/builtins.h"
18 #include "src/contexts-inl.h"
19 #include "src/conversions-inl.h"
20 #include "src/factory.h"
21 #include "src/feedback-vector-inl.h"
22 #include "src/field-index-inl.h"
23 #include "src/field-type.h"
24 #include "src/handles-inl.h"
25 #include "src/heap/heap-inl.h"
26 #include "src/heap/heap.h"
27 #include "src/isolate-inl.h"
28 #include "src/isolate.h"
29 #include "src/keys.h"
30 #include "src/layout-descriptor-inl.h"
31 #include "src/lookup-cache-inl.h"
32 #include "src/lookup.h"
33 #include "src/objects.h"
34 #include "src/objects/literal-objects.h"
35 #include "src/objects/module-info.h"
36 #include "src/objects/regexp-match-info.h"
37 #include "src/objects/scope-info.h"
38 #include "src/property.h"
39 #include "src/prototype.h"
40 #include "src/transitions-inl.h"
41 #include "src/v8memory.h"
42
43 namespace v8 {
44 namespace internal {
45
PropertyDetails(Smi * smi)46 PropertyDetails::PropertyDetails(Smi* smi) {
47 value_ = smi->value();
48 }
49
50
AsSmi()51 Smi* PropertyDetails::AsSmi() const {
52 // Ensure the upper 2 bits have the same value by sign extending it. This is
53 // necessary to be able to use the 31st bit of the property details.
54 int value = value_ << 1;
55 return Smi::FromInt(value >> 1);
56 }
57
58
field_width_in_words()59 int PropertyDetails::field_width_in_words() const {
60 DCHECK(location() == kField);
61 if (!FLAG_unbox_double_fields) return 1;
62 if (kDoubleSize == kPointerSize) return 1;
63 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
64 }
65
66 #define INT_ACCESSORS(holder, name, offset) \
67 int holder::name() const { return READ_INT_FIELD(this, offset); } \
68 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
69
70 #define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
71 set_condition) \
72 type* holder::name() const { \
73 DCHECK(get_condition); \
74 return type::cast(READ_FIELD(this, offset)); \
75 } \
76 void holder::set_##name(type* value, WriteBarrierMode mode) { \
77 DCHECK(set_condition); \
78 WRITE_FIELD(this, offset, value); \
79 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
80 }
81 #define ACCESSORS_CHECKED(holder, name, type, offset, condition) \
82 ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition)
83
84 #define ACCESSORS(holder, name, type, offset) \
85 ACCESSORS_CHECKED(holder, name, type, offset, true)
86
87 // Getter that returns a Smi as an int and writes an int as a Smi.
88 #define SMI_ACCESSORS_CHECKED(holder, name, offset, condition) \
89 int holder::name() const { \
90 DCHECK(condition); \
91 Object* value = READ_FIELD(this, offset); \
92 return Smi::cast(value)->value(); \
93 } \
94 void holder::set_##name(int value) { \
95 DCHECK(condition); \
96 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
97 }
98
99 #define SMI_ACCESSORS(holder, name, offset) \
100 SMI_ACCESSORS_CHECKED(holder, name, offset, true)
101
102 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
103 int holder::synchronized_##name() const { \
104 Object* value = ACQUIRE_READ_FIELD(this, offset); \
105 return Smi::cast(value)->value(); \
106 } \
107 void holder::synchronized_set_##name(int value) { \
108 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
109 }
110
111 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
112 int holder::nobarrier_##name() const { \
113 Object* value = NOBARRIER_READ_FIELD(this, offset); \
114 return Smi::cast(value)->value(); \
115 } \
116 void holder::nobarrier_set_##name(int value) { \
117 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
118 }
119
120 #define BOOL_GETTER(holder, field, name, offset) \
121 bool holder::name() const { \
122 return BooleanBit::get(field(), offset); \
123 } \
124
125
126 #define BOOL_ACCESSORS(holder, field, name, offset) \
127 bool holder::name() const { \
128 return BooleanBit::get(field(), offset); \
129 } \
130 void holder::set_##name(bool value) { \
131 set_##field(BooleanBit::set(field(), offset, value)); \
132 }
133
134 #define TYPE_CHECKER(type, instancetype) \
135 bool HeapObject::Is##type() const { \
136 return map()->instance_type() == instancetype; \
137 }
138
TYPE_CHECKER(ByteArray,BYTE_ARRAY_TYPE)139 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
140 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
141 TYPE_CHECKER(Cell, CELL_TYPE)
142 TYPE_CHECKER(Code, CODE_TYPE)
143 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
144 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
145 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
146 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
147 TYPE_CHECKER(JSArgumentsObject, JS_ARGUMENTS_TYPE)
148 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
149 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
150 TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
151 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
152 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
153 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
154 TYPE_CHECKER(JSError, JS_ERROR_TYPE)
155 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
156 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
157 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
158 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
159 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
160 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
161 TYPE_CHECKER(JSModuleNamespace, JS_MODULE_NAMESPACE_TYPE)
162 TYPE_CHECKER(JSPromiseCapability, JS_PROMISE_CAPABILITY_TYPE)
163 TYPE_CHECKER(JSPromise, JS_PROMISE_TYPE)
164 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
165 TYPE_CHECKER(JSSet, JS_SET_TYPE)
166 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
167 TYPE_CHECKER(JSAsyncFromSyncIterator, JS_ASYNC_FROM_SYNC_ITERATOR_TYPE)
168 TYPE_CHECKER(JSStringIterator, JS_STRING_ITERATOR_TYPE)
169 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
170 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
171 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
172 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
173 TYPE_CHECKER(Map, MAP_TYPE)
174 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
175 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
176 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
177 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
178 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
179 TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
180 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
181 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
182
183 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
184 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
185 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
186 #undef TYPED_ARRAY_TYPE_CHECKER
187
188 #undef TYPE_CHECKER
189
190 bool HeapObject::IsFixedArrayBase() const {
191 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
192 }
193
IsFixedArray()194 bool HeapObject::IsFixedArray() const {
195 InstanceType instance_type = map()->instance_type();
196 return instance_type == FIXED_ARRAY_TYPE ||
197 instance_type == TRANSITION_ARRAY_TYPE;
198 }
199
IsBoilerplateDescription()200 bool HeapObject::IsBoilerplateDescription() const { return IsFixedArray(); }
201
202 // External objects are not extensible, so the map check is enough.
IsExternal()203 bool HeapObject::IsExternal() const {
204 return map() == GetHeap()->external_map();
205 }
206
207 #define IS_TYPE_FUNCTION_DEF(type_) \
208 bool Object::Is##type_() const { \
209 return IsHeapObject() && HeapObject::cast(this)->Is##type_(); \
210 }
211 HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
212 #undef IS_TYPE_FUNCTION_DEF
213
214 #define IS_TYPE_FUNCTION_DEF(Type, Value) \
215 bool Object::Is##Type(Isolate* isolate) const { \
216 return this == isolate->heap()->Value(); \
217 } \
218 bool HeapObject::Is##Type(Isolate* isolate) const { \
219 return this == isolate->heap()->Value(); \
220 }
ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)221 ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
222 #undef IS_TYPE_FUNCTION_DEF
223
224 bool Object::IsNullOrUndefined(Isolate* isolate) const {
225 Heap* heap = isolate->heap();
226 return this == heap->null_value() || this == heap->undefined_value();
227 }
228
IsNullOrUndefined(Isolate * isolate)229 bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
230 Heap* heap = isolate->heap();
231 return this == heap->null_value() || this == heap->undefined_value();
232 }
233
IsString()234 bool HeapObject::IsString() const {
235 return map()->instance_type() < FIRST_NONSTRING_TYPE;
236 }
237
IsName()238 bool HeapObject::IsName() const {
239 return map()->instance_type() <= LAST_NAME_TYPE;
240 }
241
IsUniqueName()242 bool HeapObject::IsUniqueName() const {
243 return IsInternalizedString() || IsSymbol();
244 }
245
IsUniqueName()246 bool Name::IsUniqueName() const {
247 uint32_t type = map()->instance_type();
248 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
249 (kStringTag | kNotInternalizedTag);
250 }
251
IsFunction()252 bool HeapObject::IsFunction() const {
253 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
254 return map()->instance_type() >= FIRST_FUNCTION_TYPE;
255 }
256
IsCallable()257 bool HeapObject::IsCallable() const { return map()->is_callable(); }
258
IsConstructor()259 bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
260
IsTemplateInfo()261 bool HeapObject::IsTemplateInfo() const {
262 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
263 }
264
IsInternalizedString()265 bool HeapObject::IsInternalizedString() const {
266 uint32_t type = map()->instance_type();
267 STATIC_ASSERT(kNotInternalizedTag != 0);
268 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
269 (kStringTag | kInternalizedTag);
270 }
271
IsConsString()272 bool HeapObject::IsConsString() const {
273 if (!IsString()) return false;
274 return StringShape(String::cast(this)).IsCons();
275 }
276
IsThinString()277 bool HeapObject::IsThinString() const {
278 if (!IsString()) return false;
279 return StringShape(String::cast(this)).IsThin();
280 }
281
IsSlicedString()282 bool HeapObject::IsSlicedString() const {
283 if (!IsString()) return false;
284 return StringShape(String::cast(this)).IsSliced();
285 }
286
IsSeqString()287 bool HeapObject::IsSeqString() const {
288 if (!IsString()) return false;
289 return StringShape(String::cast(this)).IsSequential();
290 }
291
IsSeqOneByteString()292 bool HeapObject::IsSeqOneByteString() const {
293 if (!IsString()) return false;
294 return StringShape(String::cast(this)).IsSequential() &&
295 String::cast(this)->IsOneByteRepresentation();
296 }
297
IsSeqTwoByteString()298 bool HeapObject::IsSeqTwoByteString() const {
299 if (!IsString()) return false;
300 return StringShape(String::cast(this)).IsSequential() &&
301 String::cast(this)->IsTwoByteRepresentation();
302 }
303
IsExternalString()304 bool HeapObject::IsExternalString() const {
305 if (!IsString()) return false;
306 return StringShape(String::cast(this)).IsExternal();
307 }
308
IsExternalOneByteString()309 bool HeapObject::IsExternalOneByteString() const {
310 if (!IsString()) return false;
311 return StringShape(String::cast(this)).IsExternal() &&
312 String::cast(this)->IsOneByteRepresentation();
313 }
314
IsExternalTwoByteString()315 bool HeapObject::IsExternalTwoByteString() const {
316 if (!IsString()) return false;
317 return StringShape(String::cast(this)).IsExternal() &&
318 String::cast(this)->IsTwoByteRepresentation();
319 }
320
IsNumber()321 bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
322
IsFiller()323 bool HeapObject::IsFiller() const {
324 InstanceType instance_type = map()->instance_type();
325 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
326 }
327
IsFixedTypedArrayBase()328 bool HeapObject::IsFixedTypedArrayBase() const {
329 InstanceType instance_type = map()->instance_type();
330 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
331 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
332 }
333
IsJSReceiver()334 bool HeapObject::IsJSReceiver() const {
335 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
336 return map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
337 }
338
IsJSObject()339 bool HeapObject::IsJSObject() const {
340 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
341 return map()->IsJSObjectMap();
342 }
343
IsJSProxy()344 bool HeapObject::IsJSProxy() const { return map()->IsJSProxyMap(); }
345
IsJSArrayIterator()346 bool HeapObject::IsJSArrayIterator() const {
347 InstanceType instance_type = map()->instance_type();
348 return (instance_type >= FIRST_ARRAY_ITERATOR_TYPE &&
349 instance_type <= LAST_ARRAY_ITERATOR_TYPE);
350 }
351
IsJSWeakCollection()352 bool HeapObject::IsJSWeakCollection() const {
353 return IsJSWeakMap() || IsJSWeakSet();
354 }
355
IsJSCollection()356 bool HeapObject::IsJSCollection() const { return IsJSMap() || IsJSSet(); }
357
IsDescriptorArray()358 bool HeapObject::IsDescriptorArray() const { return IsFixedArray(); }
359
IsFrameArray()360 bool HeapObject::IsFrameArray() const { return IsFixedArray(); }
361
IsArrayList()362 bool HeapObject::IsArrayList() const { return IsFixedArray(); }
363
IsRegExpMatchInfo()364 bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArray(); }
365
IsLayoutDescriptor()366 bool Object::IsLayoutDescriptor() const {
367 return IsSmi() || IsFixedTypedArrayBase();
368 }
369
IsFeedbackVector()370 bool HeapObject::IsFeedbackVector() const {
371 return map() == GetHeap()->feedback_vector_map();
372 }
373
IsFeedbackMetadata()374 bool HeapObject::IsFeedbackMetadata() const { return IsFixedArray(); }
375
IsDeoptimizationInputData()376 bool HeapObject::IsDeoptimizationInputData() const {
377 // Must be a fixed array.
378 if (!IsFixedArray()) return false;
379
380 // There's no sure way to detect the difference between a fixed array and
381 // a deoptimization data array. Since this is used for asserts we can
382 // check that the length is zero or else the fixed size plus a multiple of
383 // the entry size.
384 int length = FixedArray::cast(this)->length();
385 if (length == 0) return true;
386
387 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
388 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
389 }
390
IsDeoptimizationOutputData()391 bool HeapObject::IsDeoptimizationOutputData() const {
392 if (!IsFixedArray()) return false;
393 // There's actually no way to see the difference between a fixed array and
394 // a deoptimization data array. Since this is used for asserts we can check
395 // that the length is plausible though.
396 if (FixedArray::cast(this)->length() % 2 != 0) return false;
397 return true;
398 }
399
IsHandlerTable()400 bool HeapObject::IsHandlerTable() const {
401 if (!IsFixedArray()) return false;
402 // There's actually no way to see the difference between a fixed array and
403 // a handler table array.
404 return true;
405 }
406
IsTemplateList()407 bool HeapObject::IsTemplateList() const {
408 if (!IsFixedArray()) return false;
409 // There's actually no way to see the difference between a fixed array and
410 // a template list.
411 if (FixedArray::cast(this)->length() < 1) return false;
412 return true;
413 }
414
IsDependentCode()415 bool HeapObject::IsDependentCode() const {
416 if (!IsFixedArray()) return false;
417 // There's actually no way to see the difference between a fixed array and
418 // a dependent codes array.
419 return true;
420 }
421
IsContext()422 bool HeapObject::IsContext() const {
423 Map* map = this->map();
424 Heap* heap = GetHeap();
425 return (
426 map == heap->function_context_map() || map == heap->catch_context_map() ||
427 map == heap->with_context_map() || map == heap->native_context_map() ||
428 map == heap->block_context_map() || map == heap->module_context_map() ||
429 map == heap->eval_context_map() || map == heap->script_context_map() ||
430 map == heap->debug_evaluate_context_map());
431 }
432
IsNativeContext()433 bool HeapObject::IsNativeContext() const {
434 return map() == GetHeap()->native_context_map();
435 }
436
IsScriptContextTable()437 bool HeapObject::IsScriptContextTable() const {
438 return map() == GetHeap()->script_context_table_map();
439 }
440
IsScopeInfo()441 bool HeapObject::IsScopeInfo() const {
442 return map() == GetHeap()->scope_info_map();
443 }
444
IsModuleInfo()445 bool HeapObject::IsModuleInfo() const {
446 return map() == GetHeap()->module_info_map();
447 }
448
449 template <>
450 inline bool Is<JSFunction>(Object* obj) {
451 return obj->IsJSFunction();
452 }
453
IsAbstractCode()454 bool HeapObject::IsAbstractCode() const {
455 return IsBytecodeArray() || IsCode();
456 }
457
IsStringWrapper()458 bool HeapObject::IsStringWrapper() const {
459 return IsJSValue() && JSValue::cast(this)->value()->IsString();
460 }
461
IsBoolean()462 bool HeapObject::IsBoolean() const {
463 return IsOddball() &&
464 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
465 }
466
IsJSArrayBufferView()467 bool HeapObject::IsJSArrayBufferView() const {
468 return IsJSDataView() || IsJSTypedArray();
469 }
470
471 template <>
472 inline bool Is<JSArray>(Object* obj) {
473 return obj->IsJSArray();
474 }
475
IsHashTable()476 bool HeapObject::IsHashTable() const {
477 return map() == GetHeap()->hash_table_map();
478 }
479
IsWeakHashTable()480 bool HeapObject::IsWeakHashTable() const { return IsHashTable(); }
481
IsDictionary()482 bool HeapObject::IsDictionary() const {
483 return IsHashTable() && this != GetHeap()->string_table();
484 }
485
IsNameDictionary()486 bool Object::IsNameDictionary() const { return IsDictionary(); }
487
IsGlobalDictionary()488 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
489
IsSeededNumberDictionary()490 bool Object::IsSeededNumberDictionary() const { return IsDictionary(); }
491
IsUnseededNumberDictionary()492 bool HeapObject::IsUnseededNumberDictionary() const {
493 return map() == GetHeap()->unseeded_number_dictionary_map();
494 }
495
IsStringTable()496 bool HeapObject::IsStringTable() const { return IsHashTable(); }
497
IsStringSet()498 bool HeapObject::IsStringSet() const { return IsHashTable(); }
499
IsObjectHashSet()500 bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
501
IsNormalizedMapCache()502 bool HeapObject::IsNormalizedMapCache() const {
503 return NormalizedMapCache::IsNormalizedMapCache(this);
504 }
505
GetIndex(Handle<Map> map)506 int NormalizedMapCache::GetIndex(Handle<Map> map) {
507 return map->Hash() % NormalizedMapCache::kEntries;
508 }
509
IsNormalizedMapCache(const HeapObject * obj)510 bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
511 if (!obj->IsFixedArray()) return false;
512 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
513 return false;
514 }
515 #ifdef VERIFY_HEAP
516 if (FLAG_verify_heap) {
517 reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj))
518 ->NormalizedMapCacheVerify();
519 }
520 #endif
521 return true;
522 }
523
IsCompilationCacheTable()524 bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
525
IsCodeCacheHashTable()526 bool HeapObject::IsCodeCacheHashTable() const { return IsHashTable(); }
527
IsMapCache()528 bool HeapObject::IsMapCache() const { return IsHashTable(); }
529
IsObjectHashTable()530 bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
531
IsOrderedHashTable()532 bool HeapObject::IsOrderedHashTable() const {
533 return map() == GetHeap()->ordered_hash_table_map();
534 }
535
IsOrderedHashSet()536 bool Object::IsOrderedHashSet() const { return IsOrderedHashTable(); }
537
IsOrderedHashMap()538 bool Object::IsOrderedHashMap() const { return IsOrderedHashTable(); }
539
IsPrimitive()540 bool Object::IsPrimitive() const {
541 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
542 }
543
IsJSGlobalProxy()544 bool HeapObject::IsJSGlobalProxy() const {
545 bool result = map()->instance_type() == JS_GLOBAL_PROXY_TYPE;
546 DCHECK(!result || map()->is_access_check_needed());
547 return result;
548 }
549
IsUndetectable()550 bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
551
IsAccessCheckNeeded()552 bool HeapObject::IsAccessCheckNeeded() const {
553 if (IsJSGlobalProxy()) {
554 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
555 JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
556 return proxy->IsDetachedFrom(global);
557 }
558 return map()->is_access_check_needed();
559 }
560
IsStruct()561 bool HeapObject::IsStruct() const {
562 switch (map()->instance_type()) {
563 #define MAKE_STRUCT_CASE(NAME, Name, name) \
564 case NAME##_TYPE: \
565 return true;
566 STRUCT_LIST(MAKE_STRUCT_CASE)
567 #undef MAKE_STRUCT_CASE
568 default:
569 return false;
570 }
571 }
572
573 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
574 bool Object::Is##Name() const { \
575 return IsHeapObject() && HeapObject::cast(this)->Is##Name(); \
576 } \
577 bool HeapObject::Is##Name() const { \
578 return map()->instance_type() == NAME##_TYPE; \
579 }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)580 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
581 #undef MAKE_STRUCT_PREDICATE
582
583 double Object::Number() const {
584 DCHECK(IsNumber());
585 return IsSmi()
586 ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
587 : reinterpret_cast<const HeapNumber*>(this)->value();
588 }
589
IsNaN()590 bool Object::IsNaN() const {
591 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
592 }
593
IsMinusZero()594 bool Object::IsMinusZero() const {
595 return this->IsHeapNumber() &&
596 i::IsMinusZero(HeapNumber::cast(this)->value());
597 }
598
599 // ------------------------------------
600 // Cast operations
601
602 #define CAST_ACCESSOR(type) \
603 type* type::cast(Object* object) { \
604 SLOW_DCHECK(object->Is##type()); \
605 return reinterpret_cast<type*>(object); \
606 } \
607 const type* type::cast(const Object* object) { \
608 SLOW_DCHECK(object->Is##type()); \
609 return reinterpret_cast<const type*>(object); \
610 }
611
612 CAST_ACCESSOR(AbstractCode)
CAST_ACCESSOR(ArrayList)613 CAST_ACCESSOR(ArrayList)
614 CAST_ACCESSOR(BoilerplateDescription)
615 CAST_ACCESSOR(ByteArray)
616 CAST_ACCESSOR(BytecodeArray)
617 CAST_ACCESSOR(Cell)
618 CAST_ACCESSOR(Code)
619 CAST_ACCESSOR(CodeCacheHashTable)
620 CAST_ACCESSOR(CompilationCacheTable)
621 CAST_ACCESSOR(ConsString)
622 CAST_ACCESSOR(DeoptimizationInputData)
623 CAST_ACCESSOR(DeoptimizationOutputData)
624 CAST_ACCESSOR(DependentCode)
625 CAST_ACCESSOR(DescriptorArray)
626 CAST_ACCESSOR(ExternalOneByteString)
627 CAST_ACCESSOR(ExternalString)
628 CAST_ACCESSOR(ExternalTwoByteString)
629 CAST_ACCESSOR(FixedArray)
630 CAST_ACCESSOR(FixedArrayBase)
631 CAST_ACCESSOR(FixedDoubleArray)
632 CAST_ACCESSOR(FixedTypedArrayBase)
633 CAST_ACCESSOR(Foreign)
634 CAST_ACCESSOR(FrameArray)
635 CAST_ACCESSOR(GlobalDictionary)
636 CAST_ACCESSOR(HandlerTable)
637 CAST_ACCESSOR(HeapObject)
638 CAST_ACCESSOR(JSArray)
639 CAST_ACCESSOR(JSArrayBuffer)
640 CAST_ACCESSOR(JSArrayBufferView)
641 CAST_ACCESSOR(JSBoundFunction)
642 CAST_ACCESSOR(JSDataView)
643 CAST_ACCESSOR(JSDate)
644 CAST_ACCESSOR(JSFunction)
645 CAST_ACCESSOR(JSGeneratorObject)
646 CAST_ACCESSOR(JSGlobalObject)
647 CAST_ACCESSOR(JSGlobalProxy)
648 CAST_ACCESSOR(JSMap)
649 CAST_ACCESSOR(JSMapIterator)
650 CAST_ACCESSOR(JSMessageObject)
651 CAST_ACCESSOR(JSModuleNamespace)
652 CAST_ACCESSOR(JSObject)
653 CAST_ACCESSOR(JSProxy)
654 CAST_ACCESSOR(JSReceiver)
655 CAST_ACCESSOR(JSRegExp)
656 CAST_ACCESSOR(JSPromiseCapability)
657 CAST_ACCESSOR(JSPromise)
658 CAST_ACCESSOR(JSSet)
659 CAST_ACCESSOR(JSSetIterator)
660 CAST_ACCESSOR(JSAsyncFromSyncIterator)
661 CAST_ACCESSOR(JSStringIterator)
662 CAST_ACCESSOR(JSArrayIterator)
663 CAST_ACCESSOR(JSTypedArray)
664 CAST_ACCESSOR(JSValue)
665 CAST_ACCESSOR(JSWeakCollection)
666 CAST_ACCESSOR(JSWeakMap)
667 CAST_ACCESSOR(JSWeakSet)
668 CAST_ACCESSOR(LayoutDescriptor)
669 CAST_ACCESSOR(Map)
670 CAST_ACCESSOR(ModuleInfo)
671 CAST_ACCESSOR(Name)
672 CAST_ACCESSOR(NameDictionary)
673 CAST_ACCESSOR(NormalizedMapCache)
674 CAST_ACCESSOR(Object)
675 CAST_ACCESSOR(ObjectHashTable)
676 CAST_ACCESSOR(ObjectHashSet)
677 CAST_ACCESSOR(Oddball)
678 CAST_ACCESSOR(OrderedHashMap)
679 CAST_ACCESSOR(OrderedHashSet)
680 CAST_ACCESSOR(PropertyCell)
681 CAST_ACCESSOR(TemplateList)
682 CAST_ACCESSOR(RegExpMatchInfo)
683 CAST_ACCESSOR(ScopeInfo)
684 CAST_ACCESSOR(SeededNumberDictionary)
685 CAST_ACCESSOR(SeqOneByteString)
686 CAST_ACCESSOR(SeqString)
687 CAST_ACCESSOR(SeqTwoByteString)
688 CAST_ACCESSOR(SharedFunctionInfo)
689 CAST_ACCESSOR(SlicedString)
690 CAST_ACCESSOR(Smi)
691 CAST_ACCESSOR(String)
692 CAST_ACCESSOR(StringSet)
693 CAST_ACCESSOR(StringTable)
694 CAST_ACCESSOR(Struct)
695 CAST_ACCESSOR(Symbol)
696 CAST_ACCESSOR(TemplateInfo)
697 CAST_ACCESSOR(ThinString)
698 CAST_ACCESSOR(UnseededNumberDictionary)
699 CAST_ACCESSOR(WeakCell)
700 CAST_ACCESSOR(WeakFixedArray)
701 CAST_ACCESSOR(WeakHashTable)
702
703 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
704 STRUCT_LIST(MAKE_STRUCT_CAST)
705 #undef MAKE_STRUCT_CAST
706
707 #undef CAST_ACCESSOR
708
709 bool Object::HasValidElements() {
710 // Dictionary is covered under FixedArray.
711 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
712 }
713
KeyEquals(Object * second)714 bool Object::KeyEquals(Object* second) {
715 Object* first = this;
716 if (second->IsNumber()) {
717 if (first->IsNumber()) return first->Number() == second->Number();
718 Object* temp = first;
719 first = second;
720 second = temp;
721 }
722 if (first->IsNumber()) {
723 DCHECK_LE(0, first->Number());
724 uint32_t expected = static_cast<uint32_t>(first->Number());
725 uint32_t index;
726 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
727 }
728 return Name::cast(first)->Equals(Name::cast(second));
729 }
730
FilterKey(PropertyFilter filter)731 bool Object::FilterKey(PropertyFilter filter) {
732 if (IsSymbol()) {
733 if (filter & SKIP_SYMBOLS) return true;
734 if (Symbol::cast(this)->is_private()) return true;
735 } else {
736 if (filter & SKIP_STRINGS) return true;
737 }
738 return false;
739 }
740
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)741 Handle<Object> Object::NewStorageFor(Isolate* isolate, Handle<Object> object,
742 Representation representation) {
743 if (!representation.IsDouble()) return object;
744 Handle<HeapNumber> result = isolate->factory()->NewHeapNumber(MUTABLE);
745 if (object->IsUninitialized(isolate)) {
746 result->set_value_as_bits(kHoleNanInt64);
747 } else if (object->IsMutableHeapNumber()) {
748 // Ensure that all bits of the double value are preserved.
749 result->set_value_as_bits(HeapNumber::cast(*object)->value_as_bits());
750 } else {
751 result->set_value(object->Number());
752 }
753 return result;
754 }
755
WrapForRead(Isolate * isolate,Handle<Object> object,Representation representation)756 Handle<Object> Object::WrapForRead(Isolate* isolate, Handle<Object> object,
757 Representation representation) {
758 DCHECK(!object->IsUninitialized(isolate));
759 if (!representation.IsDouble()) {
760 DCHECK(object->FitsRepresentation(representation));
761 return object;
762 }
763 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
764 }
765
StringShape(const String * str)766 StringShape::StringShape(const String* str)
767 : type_(str->map()->instance_type()) {
768 set_valid();
769 DCHECK((type_ & kIsNotStringMask) == kStringTag);
770 }
771
StringShape(Map * map)772 StringShape::StringShape(Map* map) : type_(map->instance_type()) {
773 set_valid();
774 DCHECK((type_ & kIsNotStringMask) == kStringTag);
775 }
776
StringShape(InstanceType t)777 StringShape::StringShape(InstanceType t) : type_(static_cast<uint32_t>(t)) {
778 set_valid();
779 DCHECK((type_ & kIsNotStringMask) == kStringTag);
780 }
781
IsInternalized()782 bool StringShape::IsInternalized() {
783 DCHECK(valid());
784 STATIC_ASSERT(kNotInternalizedTag != 0);
785 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
786 (kStringTag | kInternalizedTag);
787 }
788
IsOneByteRepresentation()789 bool String::IsOneByteRepresentation() const {
790 uint32_t type = map()->instance_type();
791 return (type & kStringEncodingMask) == kOneByteStringTag;
792 }
793
IsTwoByteRepresentation()794 bool String::IsTwoByteRepresentation() const {
795 uint32_t type = map()->instance_type();
796 return (type & kStringEncodingMask) == kTwoByteStringTag;
797 }
798
IsOneByteRepresentationUnderneath()799 bool String::IsOneByteRepresentationUnderneath() {
800 uint32_t type = map()->instance_type();
801 STATIC_ASSERT(kIsIndirectStringTag != 0);
802 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
803 DCHECK(IsFlat());
804 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
805 case kOneByteStringTag:
806 return true;
807 case kTwoByteStringTag:
808 return false;
809 default: // Cons or sliced string. Need to go deeper.
810 return GetUnderlying()->IsOneByteRepresentation();
811 }
812 }
813
IsTwoByteRepresentationUnderneath()814 bool String::IsTwoByteRepresentationUnderneath() {
815 uint32_t type = map()->instance_type();
816 STATIC_ASSERT(kIsIndirectStringTag != 0);
817 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
818 DCHECK(IsFlat());
819 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
820 case kOneByteStringTag:
821 return false;
822 case kTwoByteStringTag:
823 return true;
824 default: // Cons or sliced string. Need to go deeper.
825 return GetUnderlying()->IsTwoByteRepresentation();
826 }
827 }
828
HasOnlyOneByteChars()829 bool String::HasOnlyOneByteChars() {
830 uint32_t type = map()->instance_type();
831 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
832 IsOneByteRepresentation();
833 }
834
IsCons()835 bool StringShape::IsCons() {
836 return (type_ & kStringRepresentationMask) == kConsStringTag;
837 }
838
IsThin()839 bool StringShape::IsThin() {
840 return (type_ & kStringRepresentationMask) == kThinStringTag;
841 }
842
IsSliced()843 bool StringShape::IsSliced() {
844 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
845 }
846
IsIndirect()847 bool StringShape::IsIndirect() {
848 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
849 }
850
IsExternal()851 bool StringShape::IsExternal() {
852 return (type_ & kStringRepresentationMask) == kExternalStringTag;
853 }
854
IsSequential()855 bool StringShape::IsSequential() {
856 return (type_ & kStringRepresentationMask) == kSeqStringTag;
857 }
858
representation_tag()859 StringRepresentationTag StringShape::representation_tag() {
860 uint32_t tag = (type_ & kStringRepresentationMask);
861 return static_cast<StringRepresentationTag>(tag);
862 }
863
encoding_tag()864 uint32_t StringShape::encoding_tag() { return type_ & kStringEncodingMask; }
865
full_representation_tag()866 uint32_t StringShape::full_representation_tag() {
867 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
868 }
869
870 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
871 Internals::kFullStringRepresentationMask);
872
873 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
874 Internals::kStringEncodingMask);
875
IsSequentialOneByte()876 bool StringShape::IsSequentialOneByte() {
877 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
878 }
879
IsSequentialTwoByte()880 bool StringShape::IsSequentialTwoByte() {
881 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
882 }
883
IsExternalOneByte()884 bool StringShape::IsExternalOneByte() {
885 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
886 }
887
888 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
889 Internals::kExternalOneByteRepresentationTag);
890
891 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
892
IsExternalTwoByte()893 bool StringShape::IsExternalTwoByte() {
894 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
895 }
896
897 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
898 Internals::kExternalTwoByteRepresentationTag);
899
900 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
901
Get(int index)902 uc32 FlatStringReader::Get(int index) {
903 if (is_one_byte_) {
904 return Get<uint8_t>(index);
905 } else {
906 return Get<uc16>(index);
907 }
908 }
909
910 template <typename Char>
Get(int index)911 Char FlatStringReader::Get(int index) {
912 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
913 DCHECK(0 <= index && index <= length_);
914 if (sizeof(Char) == 1) {
915 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
916 } else {
917 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
918 }
919 }
920
AsHandle(Isolate * isolate,HashTableKey * key)921 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
922 return key->AsHandle(isolate);
923 }
924
AsHandle(Isolate * isolate,HashTableKey * key)925 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
926 HashTableKey* key) {
927 return key->AsHandle(isolate);
928 }
929
AsHandle(Isolate * isolate,HashTableKey * key)930 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
931 HashTableKey* key) {
932 return key->AsHandle(isolate);
933 }
934
935 template <typename Char>
936 class SequentialStringKey : public HashTableKey {
937 public:
SequentialStringKey(Vector<const Char> string,uint32_t seed)938 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
939 : string_(string), hash_field_(0), seed_(seed) {}
940
Hash()941 uint32_t Hash() override {
942 hash_field_ = StringHasher::HashSequentialString<Char>(
943 string_.start(), string_.length(), seed_);
944
945 uint32_t result = hash_field_ >> String::kHashShift;
946 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
947 return result;
948 }
949
HashForObject(Object * other)950 uint32_t HashForObject(Object* other) override {
951 return String::cast(other)->Hash();
952 }
953
954 Vector<const Char> string_;
955 uint32_t hash_field_;
956 uint32_t seed_;
957 };
958
959 class OneByteStringKey : public SequentialStringKey<uint8_t> {
960 public:
OneByteStringKey(Vector<const uint8_t> str,uint32_t seed)961 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
962 : SequentialStringKey<uint8_t>(str, seed) {}
963
IsMatch(Object * string)964 bool IsMatch(Object* string) override {
965 return String::cast(string)->IsOneByteEqualTo(string_);
966 }
967
968 Handle<Object> AsHandle(Isolate* isolate) override;
969 };
970
971 class SeqOneByteSubStringKey : public HashTableKey {
972 public:
SeqOneByteSubStringKey(Handle<SeqOneByteString> string,int from,int length)973 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
974 : string_(string), from_(from), length_(length) {
975 DCHECK(string_->IsSeqOneByteString());
976 }
977
Hash()978 uint32_t Hash() override {
979 DCHECK(length_ >= 0);
980 DCHECK(from_ + length_ <= string_->length());
981 const uint8_t* chars = string_->GetChars() + from_;
982 hash_field_ = StringHasher::HashSequentialString(
983 chars, length_, string_->GetHeap()->HashSeed());
984 uint32_t result = hash_field_ >> String::kHashShift;
985 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
986 return result;
987 }
988
HashForObject(Object * other)989 uint32_t HashForObject(Object* other) override {
990 return String::cast(other)->Hash();
991 }
992
993 bool IsMatch(Object* string) override;
994 Handle<Object> AsHandle(Isolate* isolate) override;
995
996 private:
997 Handle<SeqOneByteString> string_;
998 int from_;
999 int length_;
1000 uint32_t hash_field_;
1001 };
1002
1003 class TwoByteStringKey : public SequentialStringKey<uc16> {
1004 public:
TwoByteStringKey(Vector<const uc16> str,uint32_t seed)1005 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
1006 : SequentialStringKey<uc16>(str, seed) {}
1007
IsMatch(Object * string)1008 bool IsMatch(Object* string) override {
1009 return String::cast(string)->IsTwoByteEqualTo(string_);
1010 }
1011
1012 Handle<Object> AsHandle(Isolate* isolate) override;
1013 };
1014
1015 // Utf8StringKey carries a vector of chars as key.
1016 class Utf8StringKey : public HashTableKey {
1017 public:
Utf8StringKey(Vector<const char> string,uint32_t seed)1018 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
1019 : string_(string), hash_field_(0), seed_(seed) {}
1020
IsMatch(Object * string)1021 bool IsMatch(Object* string) override {
1022 return String::cast(string)->IsUtf8EqualTo(string_);
1023 }
1024
Hash()1025 uint32_t Hash() override {
1026 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
1027 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
1028 uint32_t result = hash_field_ >> String::kHashShift;
1029 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
1030 return result;
1031 }
1032
HashForObject(Object * other)1033 uint32_t HashForObject(Object* other) override {
1034 return String::cast(other)->Hash();
1035 }
1036
AsHandle(Isolate * isolate)1037 Handle<Object> AsHandle(Isolate* isolate) override {
1038 if (hash_field_ == 0) Hash();
1039 return isolate->factory()->NewInternalizedStringFromUtf8(string_, chars_,
1040 hash_field_);
1041 }
1042
1043 Vector<const char> string_;
1044 uint32_t hash_field_;
1045 int chars_; // Caches the number of characters when computing the hash code.
1046 uint32_t seed_;
1047 };
1048
OptimalRepresentation()1049 Representation Object::OptimalRepresentation() {
1050 if (!FLAG_track_fields) return Representation::Tagged();
1051 if (IsSmi()) {
1052 return Representation::Smi();
1053 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1054 return Representation::Double();
1055 } else if (FLAG_track_computed_fields &&
1056 IsUninitialized(HeapObject::cast(this)->GetIsolate())) {
1057 return Representation::None();
1058 } else if (FLAG_track_heap_object_fields) {
1059 DCHECK(IsHeapObject());
1060 return Representation::HeapObject();
1061 } else {
1062 return Representation::Tagged();
1063 }
1064 }
1065
1066
OptimalElementsKind()1067 ElementsKind Object::OptimalElementsKind() {
1068 if (IsSmi()) return FAST_SMI_ELEMENTS;
1069 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1070 return FAST_ELEMENTS;
1071 }
1072
1073
FitsRepresentation(Representation representation)1074 bool Object::FitsRepresentation(Representation representation) {
1075 if (FLAG_track_fields && representation.IsSmi()) {
1076 return IsSmi();
1077 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1078 return IsMutableHeapNumber() || IsNumber();
1079 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1080 return IsHeapObject();
1081 } else if (FLAG_track_fields && representation.IsNone()) {
1082 return false;
1083 }
1084 return true;
1085 }
1086
ToUint32(uint32_t * value)1087 bool Object::ToUint32(uint32_t* value) {
1088 if (IsSmi()) {
1089 int num = Smi::cast(this)->value();
1090 if (num < 0) return false;
1091 *value = static_cast<uint32_t>(num);
1092 return true;
1093 }
1094 if (IsHeapNumber()) {
1095 double num = HeapNumber::cast(this)->value();
1096 return DoubleToUint32IfEqualToSelf(num, value);
1097 }
1098 return false;
1099 }
1100
1101 // static
ToObject(Isolate * isolate,Handle<Object> object)1102 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1103 Handle<Object> object) {
1104 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
1105 return ToObject(isolate, object, isolate->native_context());
1106 }
1107
1108
1109 // static
ToName(Isolate * isolate,Handle<Object> input)1110 MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
1111 if (input->IsName()) return Handle<Name>::cast(input);
1112 return ConvertToName(isolate, input);
1113 }
1114
1115 // static
ToPropertyKey(Isolate * isolate,Handle<Object> value)1116 MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
1117 Handle<Object> value) {
1118 if (value->IsSmi() || HeapObject::cast(*value)->IsName()) return value;
1119 return ConvertToPropertyKey(isolate, value);
1120 }
1121
1122 // static
ToPrimitive(Handle<Object> input,ToPrimitiveHint hint)1123 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1124 ToPrimitiveHint hint) {
1125 if (input->IsPrimitive()) return input;
1126 return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1127 }
1128
1129 // static
ToNumber(Handle<Object> input)1130 MaybeHandle<Object> Object::ToNumber(Handle<Object> input) {
1131 if (input->IsNumber()) return input;
1132 return ConvertToNumber(HeapObject::cast(*input)->GetIsolate(), input);
1133 }
1134
1135 // static
ToInteger(Isolate * isolate,Handle<Object> input)1136 MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
1137 if (input->IsSmi()) return input;
1138 return ConvertToInteger(isolate, input);
1139 }
1140
1141 // static
ToInt32(Isolate * isolate,Handle<Object> input)1142 MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
1143 if (input->IsSmi()) return input;
1144 return ConvertToInt32(isolate, input);
1145 }
1146
1147 // static
ToUint32(Isolate * isolate,Handle<Object> input)1148 MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
1149 if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
1150 return ConvertToUint32(isolate, input);
1151 }
1152
1153 // static
ToString(Isolate * isolate,Handle<Object> input)1154 MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
1155 if (input->IsString()) return Handle<String>::cast(input);
1156 return ConvertToString(isolate, input);
1157 }
1158
1159 // static
ToLength(Isolate * isolate,Handle<Object> input)1160 MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
1161 if (input->IsSmi()) {
1162 int value = std::max(Smi::cast(*input)->value(), 0);
1163 return handle(Smi::FromInt(value), isolate);
1164 }
1165 return ConvertToLength(isolate, input);
1166 }
1167
1168 // static
ToIndex(Isolate * isolate,Handle<Object> input,MessageTemplate::Template error_index)1169 MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
1170 MessageTemplate::Template error_index) {
1171 if (input->IsSmi() && Smi::cast(*input)->value() >= 0) return input;
1172 return ConvertToIndex(isolate, input, error_index);
1173 }
1174
HasSpecificClassOf(String * name)1175 bool Object::HasSpecificClassOf(String* name) {
1176 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1177 }
1178
GetProperty(Handle<Object> object,Handle<Name> name)1179 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1180 Handle<Name> name) {
1181 LookupIterator it(object, name);
1182 if (!it.IsFound()) return it.factory()->undefined_value();
1183 return GetProperty(&it);
1184 }
1185
GetProperty(Handle<JSReceiver> receiver,Handle<Name> name)1186 MaybeHandle<Object> JSReceiver::GetProperty(Handle<JSReceiver> receiver,
1187 Handle<Name> name) {
1188 LookupIterator it(receiver, name, receiver);
1189 if (!it.IsFound()) return it.factory()->undefined_value();
1190 return Object::GetProperty(&it);
1191 }
1192
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)1193 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1194 uint32_t index) {
1195 LookupIterator it(isolate, object, index);
1196 if (!it.IsFound()) return it.factory()->undefined_value();
1197 return GetProperty(&it);
1198 }
1199
GetElement(Isolate * isolate,Handle<JSReceiver> receiver,uint32_t index)1200 MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate,
1201 Handle<JSReceiver> receiver,
1202 uint32_t index) {
1203 LookupIterator it(isolate, receiver, index, receiver);
1204 if (!it.IsFound()) return it.factory()->undefined_value();
1205 return Object::GetProperty(&it);
1206 }
1207
GetDataProperty(Handle<JSReceiver> object,Handle<Name> name)1208 Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
1209 Handle<Name> name) {
1210 LookupIterator it(object, name, object,
1211 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
1212 if (!it.IsFound()) return it.factory()->undefined_value();
1213 return GetDataProperty(&it);
1214 }
1215
SetElement(Isolate * isolate,Handle<Object> object,uint32_t index,Handle<Object> value,LanguageMode language_mode)1216 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1217 uint32_t index, Handle<Object> value,
1218 LanguageMode language_mode) {
1219 LookupIterator it(isolate, object, index);
1220 MAYBE_RETURN_NULL(
1221 SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
1222 return value;
1223 }
1224
GetPrototype(Isolate * isolate,Handle<JSReceiver> receiver)1225 MaybeHandle<Object> JSReceiver::GetPrototype(Isolate* isolate,
1226 Handle<JSReceiver> receiver) {
1227 // We don't expect access checks to be needed on JSProxy objects.
1228 DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
1229 PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
1230 PrototypeIterator::END_AT_NON_HIDDEN);
1231 do {
1232 if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
1233 } while (!iter.IsAtEnd());
1234 return PrototypeIterator::GetCurrent(iter);
1235 }
1236
GetProperty(Isolate * isolate,Handle<JSReceiver> receiver,const char * name)1237 MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
1238 Handle<JSReceiver> receiver,
1239 const char* name) {
1240 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1241 return GetProperty(receiver, str);
1242 }
1243
1244 // static
OwnPropertyKeys(Handle<JSReceiver> object)1245 MUST_USE_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys(
1246 Handle<JSReceiver> object) {
1247 return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
1248 ALL_PROPERTIES,
1249 GetKeysConversion::kConvertToString);
1250 }
1251
PrototypeHasNoElements(Isolate * isolate,JSObject * object)1252 bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject* object) {
1253 DisallowHeapAllocation no_gc;
1254 HeapObject* prototype = HeapObject::cast(object->map()->prototype());
1255 HeapObject* null = isolate->heap()->null_value();
1256 HeapObject* empty = isolate->heap()->empty_fixed_array();
1257 while (prototype != null) {
1258 Map* map = prototype->map();
1259 if (map->instance_type() <= LAST_CUSTOM_ELEMENTS_RECEIVER) return false;
1260 if (JSObject::cast(prototype)->elements() != empty) return false;
1261 prototype = HeapObject::cast(map->prototype());
1262 }
1263 return true;
1264 }
1265
1266 #define FIELD_ADDR(p, offset) \
1267 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1268
1269 #define FIELD_ADDR_CONST(p, offset) \
1270 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1271
1272 #define READ_FIELD(p, offset) \
1273 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1274
1275 #define ACQUIRE_READ_FIELD(p, offset) \
1276 reinterpret_cast<Object*>(base::Acquire_Load( \
1277 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1278
1279 #define NOBARRIER_READ_FIELD(p, offset) \
1280 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1281 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1282
1283 #define WRITE_FIELD(p, offset, value) \
1284 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1285
1286 #define RELEASE_WRITE_FIELD(p, offset, value) \
1287 base::Release_Store( \
1288 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1289 reinterpret_cast<base::AtomicWord>(value));
1290
1291 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1292 base::NoBarrier_Store( \
1293 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1294 reinterpret_cast<base::AtomicWord>(value));
1295
1296 #define WRITE_BARRIER(heap, object, offset, value) \
1297 heap->incremental_marking()->RecordWrite( \
1298 object, HeapObject::RawField(object, offset), value); \
1299 heap->RecordWrite(object, offset, value);
1300
1301 #define FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(heap, array, start, length) \
1302 do { \
1303 heap->RecordFixedArrayElements(array, start, length); \
1304 heap->incremental_marking()->IterateBlackObject(array); \
1305 } while (false)
1306
1307 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1308 if (mode != SKIP_WRITE_BARRIER) { \
1309 if (mode == UPDATE_WRITE_BARRIER) { \
1310 heap->incremental_marking()->RecordWrite( \
1311 object, HeapObject::RawField(object, offset), value); \
1312 } \
1313 heap->RecordWrite(object, offset, value); \
1314 }
1315
1316 #define READ_DOUBLE_FIELD(p, offset) \
1317 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1318
1319 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1320 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1321
1322 #define READ_INT_FIELD(p, offset) \
1323 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1324
1325 #define WRITE_INT_FIELD(p, offset, value) \
1326 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1327
1328 #define READ_INTPTR_FIELD(p, offset) \
1329 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1330
1331 #define WRITE_INTPTR_FIELD(p, offset, value) \
1332 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1333
1334 #define READ_UINT8_FIELD(p, offset) \
1335 (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1336
1337 #define WRITE_UINT8_FIELD(p, offset, value) \
1338 (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1339
1340 #define READ_INT8_FIELD(p, offset) \
1341 (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1342
1343 #define WRITE_INT8_FIELD(p, offset, value) \
1344 (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1345
1346 #define READ_UINT16_FIELD(p, offset) \
1347 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1348
1349 #define WRITE_UINT16_FIELD(p, offset, value) \
1350 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1351
1352 #define READ_INT16_FIELD(p, offset) \
1353 (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1354
1355 #define WRITE_INT16_FIELD(p, offset, value) \
1356 (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1357
1358 #define READ_UINT32_FIELD(p, offset) \
1359 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1360
1361 #define WRITE_UINT32_FIELD(p, offset, value) \
1362 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1363
1364 #define READ_INT32_FIELD(p, offset) \
1365 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1366
1367 #define WRITE_INT32_FIELD(p, offset, value) \
1368 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1369
1370 #define READ_FLOAT_FIELD(p, offset) \
1371 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1372
1373 #define WRITE_FLOAT_FIELD(p, offset, value) \
1374 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1375
1376 #define READ_UINT64_FIELD(p, offset) \
1377 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1378
1379 #define WRITE_UINT64_FIELD(p, offset, value) \
1380 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1381
1382 #define READ_INT64_FIELD(p, offset) \
1383 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1384
1385 #define WRITE_INT64_FIELD(p, offset, value) \
1386 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1387
1388 #define READ_BYTE_FIELD(p, offset) \
1389 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1390
1391 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1392 static_cast<byte>(base::NoBarrier_Load( \
1393 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1394
1395 #define WRITE_BYTE_FIELD(p, offset, value) \
1396 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1397
1398 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1399 base::NoBarrier_Store( \
1400 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1401 static_cast<base::Atomic8>(value));
1402
RawField(HeapObject * obj,int byte_offset)1403 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1404 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1405 }
1406
1407
FromMap(const Map * map)1408 MapWord MapWord::FromMap(const Map* map) {
1409 return MapWord(reinterpret_cast<uintptr_t>(map));
1410 }
1411
1412
ToMap()1413 Map* MapWord::ToMap() {
1414 return reinterpret_cast<Map*>(value_);
1415 }
1416
IsForwardingAddress()1417 bool MapWord::IsForwardingAddress() const {
1418 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1419 }
1420
1421
FromForwardingAddress(HeapObject * object)1422 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1423 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1424 return MapWord(reinterpret_cast<uintptr_t>(raw));
1425 }
1426
1427
ToForwardingAddress()1428 HeapObject* MapWord::ToForwardingAddress() {
1429 DCHECK(IsForwardingAddress());
1430 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1431 }
1432
1433
1434 #ifdef VERIFY_HEAP
VerifyObjectField(int offset)1435 void HeapObject::VerifyObjectField(int offset) {
1436 VerifyPointer(READ_FIELD(this, offset));
1437 }
1438
VerifySmiField(int offset)1439 void HeapObject::VerifySmiField(int offset) {
1440 CHECK(READ_FIELD(this, offset)->IsSmi());
1441 }
1442 #endif
1443
1444
GetHeap()1445 Heap* HeapObject::GetHeap() const {
1446 Heap* heap = MemoryChunk::FromAddress(
1447 reinterpret_cast<Address>(const_cast<HeapObject*>(this)))
1448 ->heap();
1449 SLOW_DCHECK(heap != NULL);
1450 return heap;
1451 }
1452
1453
GetIsolate()1454 Isolate* HeapObject::GetIsolate() const {
1455 return GetHeap()->isolate();
1456 }
1457
1458
map()1459 Map* HeapObject::map() const {
1460 #ifdef DEBUG
1461 // Clear mark potentially added by PathTracer.
1462 uintptr_t raw_value =
1463 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1464 return MapWord::FromRawValue(raw_value).ToMap();
1465 #else
1466 return map_word().ToMap();
1467 #endif
1468 }
1469
1470
set_map(Map * value)1471 void HeapObject::set_map(Map* value) {
1472 set_map_word(MapWord::FromMap(value));
1473 if (value != nullptr) {
1474 // TODO(1600) We are passing NULL as a slot because maps can never be on
1475 // evacuation candidate.
1476 value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
1477 #ifdef VERIFY_HEAP
1478 value->GetHeap()->VerifyObjectLayoutChange(this, value);
1479 #endif
1480 }
1481 }
1482
1483
synchronized_map()1484 Map* HeapObject::synchronized_map() {
1485 return synchronized_map_word().ToMap();
1486 }
1487
1488
synchronized_set_map(Map * value)1489 void HeapObject::synchronized_set_map(Map* value) {
1490 synchronized_set_map_word(MapWord::FromMap(value));
1491 if (value != nullptr) {
1492 // TODO(1600) We are passing NULL as a slot because maps can never be on
1493 // evacuation candidate.
1494 value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
1495 #ifdef VERIFY_HEAP
1496 value->GetHeap()->VerifyObjectLayoutChange(this, value);
1497 #endif
1498 }
1499 }
1500
1501
synchronized_set_map_no_write_barrier(Map * value)1502 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1503 synchronized_set_map_word(MapWord::FromMap(value));
1504 }
1505
1506
1507 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)1508 void HeapObject::set_map_no_write_barrier(Map* value) {
1509 set_map_word(MapWord::FromMap(value));
1510 }
1511
1512
map_word()1513 MapWord HeapObject::map_word() const {
1514 return MapWord(
1515 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1516 }
1517
1518
set_map_word(MapWord map_word)1519 void HeapObject::set_map_word(MapWord map_word) {
1520 NOBARRIER_WRITE_FIELD(
1521 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1522 }
1523
1524
synchronized_map_word()1525 MapWord HeapObject::synchronized_map_word() const {
1526 return MapWord(
1527 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1528 }
1529
1530
synchronized_set_map_word(MapWord map_word)1531 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1532 RELEASE_WRITE_FIELD(
1533 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1534 }
1535
1536
Size()1537 int HeapObject::Size() {
1538 return SizeFromMap(map());
1539 }
1540
1541
value()1542 double HeapNumber::value() const {
1543 return READ_DOUBLE_FIELD(this, kValueOffset);
1544 }
1545
1546
set_value(double value)1547 void HeapNumber::set_value(double value) {
1548 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1549 }
1550
value_as_bits()1551 uint64_t HeapNumber::value_as_bits() const {
1552 return READ_UINT64_FIELD(this, kValueOffset);
1553 }
1554
set_value_as_bits(uint64_t bits)1555 void HeapNumber::set_value_as_bits(uint64_t bits) {
1556 WRITE_UINT64_FIELD(this, kValueOffset, bits);
1557 }
1558
get_exponent()1559 int HeapNumber::get_exponent() {
1560 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1561 kExponentShift) - kExponentBias;
1562 }
1563
1564
get_sign()1565 int HeapNumber::get_sign() {
1566 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1567 }
1568
ACCESSORS(JSReceiver,properties,FixedArray,kPropertiesOffset)1569 ACCESSORS(JSReceiver, properties, FixedArray, kPropertiesOffset)
1570
1571
1572 Object** FixedArray::GetFirstElementAddress() {
1573 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1574 }
1575
1576
ContainsOnlySmisOrHoles()1577 bool FixedArray::ContainsOnlySmisOrHoles() {
1578 Object* the_hole = GetHeap()->the_hole_value();
1579 Object** current = GetFirstElementAddress();
1580 for (int i = 0; i < length(); ++i) {
1581 Object* candidate = *current++;
1582 if (!candidate->IsSmi() && candidate != the_hole) return false;
1583 }
1584 return true;
1585 }
1586
1587
elements()1588 FixedArrayBase* JSObject::elements() const {
1589 Object* array = READ_FIELD(this, kElementsOffset);
1590 return static_cast<FixedArrayBase*>(array);
1591 }
1592
1593
Initialize()1594 void AllocationSite::Initialize() {
1595 set_transition_info(Smi::kZero);
1596 SetElementsKind(GetInitialFastElementsKind());
1597 set_nested_site(Smi::kZero);
1598 set_pretenure_data(0);
1599 set_pretenure_create_count(0);
1600 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1601 SKIP_WRITE_BARRIER);
1602 }
1603
1604
IsZombie()1605 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1606
1607
IsMaybeTenure()1608 bool AllocationSite::IsMaybeTenure() {
1609 return pretenure_decision() == kMaybeTenure;
1610 }
1611
1612
PretenuringDecisionMade()1613 bool AllocationSite::PretenuringDecisionMade() {
1614 return pretenure_decision() != kUndecided;
1615 }
1616
1617
MarkZombie()1618 void AllocationSite::MarkZombie() {
1619 DCHECK(!IsZombie());
1620 Initialize();
1621 set_pretenure_decision(kZombie);
1622 }
1623
1624
GetElementsKind()1625 ElementsKind AllocationSite::GetElementsKind() {
1626 DCHECK(!SitePointsToLiteral());
1627 int value = Smi::cast(transition_info())->value();
1628 return ElementsKindBits::decode(value);
1629 }
1630
1631
SetElementsKind(ElementsKind kind)1632 void AllocationSite::SetElementsKind(ElementsKind kind) {
1633 int value = Smi::cast(transition_info())->value();
1634 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1635 SKIP_WRITE_BARRIER);
1636 }
1637
1638
CanInlineCall()1639 bool AllocationSite::CanInlineCall() {
1640 int value = Smi::cast(transition_info())->value();
1641 return DoNotInlineBit::decode(value) == 0;
1642 }
1643
1644
SetDoNotInlineCall()1645 void AllocationSite::SetDoNotInlineCall() {
1646 int value = Smi::cast(transition_info())->value();
1647 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1648 SKIP_WRITE_BARRIER);
1649 }
1650
1651
SitePointsToLiteral()1652 bool AllocationSite::SitePointsToLiteral() {
1653 // If transition_info is a smi, then it represents an ElementsKind
1654 // for a constructed array. Otherwise, it must be a boilerplate
1655 // for an object or array literal.
1656 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1657 }
1658
1659
1660 // Heuristic: We only need to create allocation site info if the boilerplate
1661 // elements kind is the initial elements kind.
GetMode(ElementsKind boilerplate_elements_kind)1662 AllocationSiteMode AllocationSite::GetMode(
1663 ElementsKind boilerplate_elements_kind) {
1664 if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1665 return TRACK_ALLOCATION_SITE;
1666 }
1667
1668 return DONT_TRACK_ALLOCATION_SITE;
1669 }
1670
CanTrack(InstanceType type)1671 inline bool AllocationSite::CanTrack(InstanceType type) {
1672 if (FLAG_turbo) {
1673 // TurboFan doesn't care at all about String pretenuring feedback,
1674 // so don't bother even trying to track that.
1675 return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
1676 }
1677 if (FLAG_allocation_site_pretenuring) {
1678 return type == JS_ARRAY_TYPE ||
1679 type == JS_OBJECT_TYPE ||
1680 type < FIRST_NONSTRING_TYPE;
1681 }
1682 return type == JS_ARRAY_TYPE;
1683 }
1684
1685
pretenure_decision()1686 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1687 int value = pretenure_data();
1688 return PretenureDecisionBits::decode(value);
1689 }
1690
1691
set_pretenure_decision(PretenureDecision decision)1692 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1693 int value = pretenure_data();
1694 set_pretenure_data(PretenureDecisionBits::update(value, decision));
1695 }
1696
1697
deopt_dependent_code()1698 bool AllocationSite::deopt_dependent_code() {
1699 int value = pretenure_data();
1700 return DeoptDependentCodeBit::decode(value);
1701 }
1702
1703
set_deopt_dependent_code(bool deopt)1704 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1705 int value = pretenure_data();
1706 set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1707 }
1708
1709
memento_found_count()1710 int AllocationSite::memento_found_count() {
1711 int value = pretenure_data();
1712 return MementoFoundCountBits::decode(value);
1713 }
1714
1715
set_memento_found_count(int count)1716 inline void AllocationSite::set_memento_found_count(int count) {
1717 int value = pretenure_data();
1718 // Verify that we can count more mementos than we can possibly find in one
1719 // new space collection.
1720 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1721 (Heap::kMinObjectSizeInWords * kPointerSize +
1722 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1723 DCHECK(count < MementoFoundCountBits::kMax);
1724 set_pretenure_data(MementoFoundCountBits::update(value, count));
1725 }
1726
1727
memento_create_count()1728 int AllocationSite::memento_create_count() { return pretenure_create_count(); }
1729
1730
set_memento_create_count(int count)1731 void AllocationSite::set_memento_create_count(int count) {
1732 set_pretenure_create_count(count);
1733 }
1734
1735
IncrementMementoFoundCount(int increment)1736 bool AllocationSite::IncrementMementoFoundCount(int increment) {
1737 if (IsZombie()) return false;
1738
1739 int value = memento_found_count();
1740 set_memento_found_count(value + increment);
1741 return memento_found_count() >= kPretenureMinimumCreated;
1742 }
1743
1744
IncrementMementoCreateCount()1745 inline void AllocationSite::IncrementMementoCreateCount() {
1746 DCHECK(FLAG_allocation_site_pretenuring);
1747 int value = memento_create_count();
1748 set_memento_create_count(value + 1);
1749 }
1750
1751
MakePretenureDecision(PretenureDecision current_decision,double ratio,bool maximum_size_scavenge)1752 inline bool AllocationSite::MakePretenureDecision(
1753 PretenureDecision current_decision,
1754 double ratio,
1755 bool maximum_size_scavenge) {
1756 // Here we just allow state transitions from undecided or maybe tenure
1757 // to don't tenure, maybe tenure, or tenure.
1758 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1759 if (ratio >= kPretenureRatio) {
1760 // We just transition into tenure state when the semi-space was at
1761 // maximum capacity.
1762 if (maximum_size_scavenge) {
1763 set_deopt_dependent_code(true);
1764 set_pretenure_decision(kTenure);
1765 // Currently we just need to deopt when we make a state transition to
1766 // tenure.
1767 return true;
1768 }
1769 set_pretenure_decision(kMaybeTenure);
1770 } else {
1771 set_pretenure_decision(kDontTenure);
1772 }
1773 }
1774 return false;
1775 }
1776
1777
DigestPretenuringFeedback(bool maximum_size_scavenge)1778 inline bool AllocationSite::DigestPretenuringFeedback(
1779 bool maximum_size_scavenge) {
1780 bool deopt = false;
1781 int create_count = memento_create_count();
1782 int found_count = memento_found_count();
1783 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1784 double ratio =
1785 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1786 static_cast<double>(found_count) / create_count : 0.0;
1787 PretenureDecision current_decision = pretenure_decision();
1788
1789 if (minimum_mementos_created) {
1790 deopt = MakePretenureDecision(
1791 current_decision, ratio, maximum_size_scavenge);
1792 }
1793
1794 if (FLAG_trace_pretenuring_statistics) {
1795 PrintIsolate(GetIsolate(),
1796 "pretenuring: AllocationSite(%p): (created, found, ratio) "
1797 "(%d, %d, %f) %s => %s\n",
1798 static_cast<void*>(this), create_count, found_count, ratio,
1799 PretenureDecisionName(current_decision),
1800 PretenureDecisionName(pretenure_decision()));
1801 }
1802
1803 // Clear feedback calculation fields until the next gc.
1804 set_memento_found_count(0);
1805 set_memento_create_count(0);
1806 return deopt;
1807 }
1808
1809
IsValid()1810 bool AllocationMemento::IsValid() {
1811 return allocation_site()->IsAllocationSite() &&
1812 !AllocationSite::cast(allocation_site())->IsZombie();
1813 }
1814
1815
GetAllocationSite()1816 AllocationSite* AllocationMemento::GetAllocationSite() {
1817 DCHECK(IsValid());
1818 return AllocationSite::cast(allocation_site());
1819 }
1820
GetAllocationSiteUnchecked()1821 Address AllocationMemento::GetAllocationSiteUnchecked() {
1822 return reinterpret_cast<Address>(allocation_site());
1823 }
1824
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1825 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1826 JSObject::ValidateElements(object);
1827 ElementsKind elements_kind = object->map()->elements_kind();
1828 if (!IsFastObjectElementsKind(elements_kind)) {
1829 if (IsFastHoleyElementsKind(elements_kind)) {
1830 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1831 } else {
1832 TransitionElementsKind(object, FAST_ELEMENTS);
1833 }
1834 }
1835 }
1836
1837
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1838 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1839 Object** objects,
1840 uint32_t count,
1841 EnsureElementsMode mode) {
1842 ElementsKind current_kind = object->GetElementsKind();
1843 ElementsKind target_kind = current_kind;
1844 {
1845 DisallowHeapAllocation no_allocation;
1846 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1847 bool is_holey = IsFastHoleyElementsKind(current_kind);
1848 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1849 Object* the_hole = object->GetHeap()->the_hole_value();
1850 for (uint32_t i = 0; i < count; ++i) {
1851 Object* current = *objects++;
1852 if (current == the_hole) {
1853 is_holey = true;
1854 target_kind = GetHoleyElementsKind(target_kind);
1855 } else if (!current->IsSmi()) {
1856 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1857 if (IsFastSmiElementsKind(target_kind)) {
1858 if (is_holey) {
1859 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1860 } else {
1861 target_kind = FAST_DOUBLE_ELEMENTS;
1862 }
1863 }
1864 } else if (is_holey) {
1865 target_kind = FAST_HOLEY_ELEMENTS;
1866 break;
1867 } else {
1868 target_kind = FAST_ELEMENTS;
1869 }
1870 }
1871 }
1872 }
1873 if (target_kind != current_kind) {
1874 TransitionElementsKind(object, target_kind);
1875 }
1876 }
1877
1878
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1879 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1880 Handle<FixedArrayBase> elements,
1881 uint32_t length,
1882 EnsureElementsMode mode) {
1883 Heap* heap = object->GetHeap();
1884 if (elements->map() != heap->fixed_double_array_map()) {
1885 DCHECK(elements->map() == heap->fixed_array_map() ||
1886 elements->map() == heap->fixed_cow_array_map());
1887 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1888 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1889 }
1890 Object** objects =
1891 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1892 EnsureCanContainElements(object, objects, length, mode);
1893 return;
1894 }
1895
1896 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1897 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1898 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1899 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1900 Handle<FixedDoubleArray> double_array =
1901 Handle<FixedDoubleArray>::cast(elements);
1902 for (uint32_t i = 0; i < length; ++i) {
1903 if (double_array->is_the_hole(i)) {
1904 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1905 return;
1906 }
1907 }
1908 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1909 }
1910 }
1911
1912
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1913 void JSObject::SetMapAndElements(Handle<JSObject> object,
1914 Handle<Map> new_map,
1915 Handle<FixedArrayBase> value) {
1916 JSObject::MigrateToMap(object, new_map);
1917 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1918 (*value == object->GetHeap()->empty_fixed_array()) ||
1919 object->map()->has_fast_string_wrapper_elements()) ==
1920 (value->map() == object->GetHeap()->fixed_array_map() ||
1921 value->map() == object->GetHeap()->fixed_cow_array_map()));
1922 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1923 (object->map()->has_fast_double_elements() ==
1924 value->IsFixedDoubleArray()));
1925 object->set_elements(*value);
1926 }
1927
1928
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1929 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1930 WRITE_FIELD(this, kElementsOffset, value);
1931 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1932 }
1933
1934
initialize_elements()1935 void JSObject::initialize_elements() {
1936 FixedArrayBase* elements = map()->GetInitialElements();
1937 WRITE_FIELD(this, kElementsOffset, elements);
1938 }
1939
1940
GetIndexedInterceptor()1941 InterceptorInfo* JSObject::GetIndexedInterceptor() {
1942 return map()->GetIndexedInterceptor();
1943 }
1944
GetNamedInterceptor()1945 InterceptorInfo* JSObject::GetNamedInterceptor() {
1946 return map()->GetNamedInterceptor();
1947 }
1948
GetNamedInterceptor()1949 InterceptorInfo* Map::GetNamedInterceptor() {
1950 DCHECK(has_named_interceptor());
1951 JSFunction* constructor = JSFunction::cast(GetConstructor());
1952 DCHECK(constructor->shared()->IsApiFunction());
1953 return InterceptorInfo::cast(
1954 constructor->shared()->get_api_func_data()->named_property_handler());
1955 }
1956
GetIndexedInterceptor()1957 InterceptorInfo* Map::GetIndexedInterceptor() {
1958 DCHECK(has_indexed_interceptor());
1959 JSFunction* constructor = JSFunction::cast(GetConstructor());
1960 DCHECK(constructor->shared()->IsApiFunction());
1961 return InterceptorInfo::cast(
1962 constructor->shared()->get_api_func_data()->indexed_property_handler());
1963 }
1964
to_number_raw()1965 double Oddball::to_number_raw() const {
1966 return READ_DOUBLE_FIELD(this, kToNumberRawOffset);
1967 }
1968
set_to_number_raw(double value)1969 void Oddball::set_to_number_raw(double value) {
1970 WRITE_DOUBLE_FIELD(this, kToNumberRawOffset, value);
1971 }
1972
ACCESSORS(Oddball,to_string,String,kToStringOffset)1973 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1974 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1975 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
1976
1977
1978 byte Oddball::kind() const {
1979 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1980 }
1981
1982
set_kind(byte value)1983 void Oddball::set_kind(byte value) {
1984 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1985 }
1986
1987
1988 // static
ToNumber(Handle<Oddball> input)1989 Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
1990 return handle(input->to_number(), input->GetIsolate());
1991 }
1992
1993
ACCESSORS(Cell,value,Object,kValueOffset)1994 ACCESSORS(Cell, value, Object, kValueOffset)
1995 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1996 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1997 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1998
1999
2000 PropertyDetails PropertyCell::property_details() {
2001 return PropertyDetails(Smi::cast(property_details_raw()));
2002 }
2003
2004
set_property_details(PropertyDetails details)2005 void PropertyCell::set_property_details(PropertyDetails details) {
2006 set_property_details_raw(details.AsSmi());
2007 }
2008
2009
value()2010 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2011
2012
clear()2013 void WeakCell::clear() {
2014 // Either the garbage collector is clearing the cell or we are simply
2015 // initializing the root empty weak cell.
2016 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
2017 this == GetHeap()->empty_weak_cell());
2018 WRITE_FIELD(this, kValueOffset, Smi::kZero);
2019 }
2020
2021
initialize(HeapObject * val)2022 void WeakCell::initialize(HeapObject* val) {
2023 WRITE_FIELD(this, kValueOffset, val);
2024 // We just have to execute the generational barrier here because we never
2025 // mark through a weak cell and collect evacuation candidates when we process
2026 // all weak cells.
2027 WriteBarrierMode mode = ObjectMarking::IsBlack(this)
2028 ? UPDATE_WRITE_BARRIER
2029 : UPDATE_WEAK_WRITE_BARRIER;
2030 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
2031 }
2032
cleared()2033 bool WeakCell::cleared() const { return value() == Smi::kZero; }
2034
next()2035 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2036
2037
set_next(Object * val,WriteBarrierMode mode)2038 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2039 WRITE_FIELD(this, kNextOffset, val);
2040 if (mode == UPDATE_WRITE_BARRIER) {
2041 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2042 }
2043 }
2044
2045
clear_next(Object * the_hole_value)2046 void WeakCell::clear_next(Object* the_hole_value) {
2047 DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
2048 set_next(the_hole_value, SKIP_WRITE_BARRIER);
2049 }
2050
next_cleared()2051 bool WeakCell::next_cleared() { return next()->IsTheHole(GetIsolate()); }
2052
GetHeaderSize()2053 int JSObject::GetHeaderSize() { return GetHeaderSize(map()->instance_type()); }
2054
2055
GetHeaderSize(InstanceType type)2056 int JSObject::GetHeaderSize(InstanceType type) {
2057 // Check for the most common kind of JavaScript object before
2058 // falling into the generic switch. This speeds up the internal
2059 // field operations considerably on average.
2060 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2061 switch (type) {
2062 case JS_API_OBJECT_TYPE:
2063 case JS_SPECIAL_API_OBJECT_TYPE:
2064 return JSObject::kHeaderSize;
2065 case JS_GENERATOR_OBJECT_TYPE:
2066 return JSGeneratorObject::kSize;
2067 case JS_GLOBAL_PROXY_TYPE:
2068 return JSGlobalProxy::kSize;
2069 case JS_GLOBAL_OBJECT_TYPE:
2070 return JSGlobalObject::kSize;
2071 case JS_BOUND_FUNCTION_TYPE:
2072 return JSBoundFunction::kSize;
2073 case JS_FUNCTION_TYPE:
2074 return JSFunction::kSize;
2075 case JS_VALUE_TYPE:
2076 return JSValue::kSize;
2077 case JS_DATE_TYPE:
2078 return JSDate::kSize;
2079 case JS_ARRAY_TYPE:
2080 return JSArray::kSize;
2081 case JS_ARRAY_BUFFER_TYPE:
2082 return JSArrayBuffer::kSize;
2083 case JS_TYPED_ARRAY_TYPE:
2084 return JSTypedArray::kSize;
2085 case JS_DATA_VIEW_TYPE:
2086 return JSDataView::kSize;
2087 case JS_SET_TYPE:
2088 return JSSet::kSize;
2089 case JS_MAP_TYPE:
2090 return JSMap::kSize;
2091 case JS_SET_ITERATOR_TYPE:
2092 return JSSetIterator::kSize;
2093 case JS_MAP_ITERATOR_TYPE:
2094 return JSMapIterator::kSize;
2095 case JS_WEAK_MAP_TYPE:
2096 return JSWeakMap::kSize;
2097 case JS_WEAK_SET_TYPE:
2098 return JSWeakSet::kSize;
2099 case JS_PROMISE_CAPABILITY_TYPE:
2100 return JSPromiseCapability::kSize;
2101 case JS_PROMISE_TYPE:
2102 return JSPromise::kSize;
2103 case JS_REGEXP_TYPE:
2104 return JSRegExp::kSize;
2105 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2106 return JSObject::kHeaderSize;
2107 case JS_MESSAGE_OBJECT_TYPE:
2108 return JSMessageObject::kSize;
2109 case JS_ARGUMENTS_TYPE:
2110 return JSArgumentsObject::kHeaderSize;
2111 case JS_ERROR_TYPE:
2112 return JSObject::kHeaderSize;
2113 case JS_STRING_ITERATOR_TYPE:
2114 return JSStringIterator::kSize;
2115 case JS_MODULE_NAMESPACE_TYPE:
2116 return JSModuleNamespace::kHeaderSize;
2117 default:
2118 if (type >= FIRST_ARRAY_ITERATOR_TYPE &&
2119 type <= LAST_ARRAY_ITERATOR_TYPE) {
2120 return JSArrayIterator::kSize;
2121 }
2122 UNREACHABLE();
2123 return 0;
2124 }
2125 }
2126
IsSpecialReceiverInstanceType(InstanceType instance_type)2127 inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
2128 return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
2129 }
2130
GetInternalFieldCount(Map * map)2131 int JSObject::GetInternalFieldCount(Map* map) {
2132 int instance_size = map->instance_size();
2133 if (instance_size == kVariableSizeSentinel) return 0;
2134 InstanceType instance_type = map->instance_type();
2135 return ((instance_size - GetHeaderSize(instance_type)) >> kPointerSizeLog2) -
2136 map->GetInObjectProperties();
2137 }
2138
2139
GetInternalFieldCount()2140 int JSObject::GetInternalFieldCount() { return GetInternalFieldCount(map()); }
2141
2142
GetInternalFieldOffset(int index)2143 int JSObject::GetInternalFieldOffset(int index) {
2144 DCHECK(index < GetInternalFieldCount() && index >= 0);
2145 return GetHeaderSize() + (kPointerSize * index);
2146 }
2147
2148
GetInternalField(int index)2149 Object* JSObject::GetInternalField(int index) {
2150 DCHECK(index < GetInternalFieldCount() && index >= 0);
2151 // Internal objects do follow immediately after the header, whereas in-object
2152 // properties are at the end of the object. Therefore there is no need
2153 // to adjust the index here.
2154 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2155 }
2156
2157
SetInternalField(int index,Object * value)2158 void JSObject::SetInternalField(int index, Object* value) {
2159 DCHECK(index < GetInternalFieldCount() && index >= 0);
2160 // Internal objects do follow immediately after the header, whereas in-object
2161 // properties are at the end of the object. Therefore there is no need
2162 // to adjust the index here.
2163 int offset = GetHeaderSize() + (kPointerSize * index);
2164 WRITE_FIELD(this, offset, value);
2165 WRITE_BARRIER(GetHeap(), this, offset, value);
2166 }
2167
2168
SetInternalField(int index,Smi * value)2169 void JSObject::SetInternalField(int index, Smi* value) {
2170 DCHECK(index < GetInternalFieldCount() && index >= 0);
2171 // Internal objects do follow immediately after the header, whereas in-object
2172 // properties are at the end of the object. Therefore there is no need
2173 // to adjust the index here.
2174 int offset = GetHeaderSize() + (kPointerSize * index);
2175 WRITE_FIELD(this, offset, value);
2176 }
2177
2178
IsUnboxedDoubleField(FieldIndex index)2179 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2180 if (!FLAG_unbox_double_fields) return false;
2181 return map()->IsUnboxedDoubleField(index);
2182 }
2183
2184
IsUnboxedDoubleField(FieldIndex index)2185 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2186 if (!FLAG_unbox_double_fields) return false;
2187 if (index.is_hidden_field() || !index.is_inobject()) return false;
2188 return !layout_descriptor()->IsTagged(index.property_index());
2189 }
2190
2191
2192 // Access fast-case object properties at index. The use of these routines
2193 // is needed to correctly distinguish between properties stored in-object and
2194 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)2195 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2196 DCHECK(!IsUnboxedDoubleField(index));
2197 if (index.is_inobject()) {
2198 return READ_FIELD(this, index.offset());
2199 } else {
2200 return properties()->get(index.outobject_array_index());
2201 }
2202 }
2203
2204
RawFastDoublePropertyAt(FieldIndex index)2205 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2206 DCHECK(IsUnboxedDoubleField(index));
2207 return READ_DOUBLE_FIELD(this, index.offset());
2208 }
2209
RawFastDoublePropertyAsBitsAt(FieldIndex index)2210 uint64_t JSObject::RawFastDoublePropertyAsBitsAt(FieldIndex index) {
2211 DCHECK(IsUnboxedDoubleField(index));
2212 return READ_UINT64_FIELD(this, index.offset());
2213 }
2214
RawFastPropertyAtPut(FieldIndex index,Object * value)2215 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2216 if (index.is_inobject()) {
2217 int offset = index.offset();
2218 WRITE_FIELD(this, offset, value);
2219 WRITE_BARRIER(GetHeap(), this, offset, value);
2220 } else {
2221 properties()->set(index.outobject_array_index(), value);
2222 }
2223 }
2224
RawFastDoublePropertyAsBitsAtPut(FieldIndex index,uint64_t bits)2225 void JSObject::RawFastDoublePropertyAsBitsAtPut(FieldIndex index,
2226 uint64_t bits) {
2227 WRITE_UINT64_FIELD(this, index.offset(), bits);
2228 }
2229
FastPropertyAtPut(FieldIndex index,Object * value)2230 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2231 if (IsUnboxedDoubleField(index)) {
2232 DCHECK(value->IsMutableHeapNumber());
2233 // Ensure that all bits of the double value are preserved.
2234 RawFastDoublePropertyAsBitsAtPut(index,
2235 HeapNumber::cast(value)->value_as_bits());
2236 } else {
2237 RawFastPropertyAtPut(index, value);
2238 }
2239 }
2240
WriteToField(int descriptor,PropertyDetails details,Object * value)2241 void JSObject::WriteToField(int descriptor, PropertyDetails details,
2242 Object* value) {
2243 DCHECK_EQ(kField, details.location());
2244 DCHECK_EQ(kData, details.kind());
2245 DisallowHeapAllocation no_gc;
2246 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2247 if (details.representation().IsDouble()) {
2248 // Nothing more to be done.
2249 if (value->IsUninitialized(this->GetIsolate())) {
2250 return;
2251 }
2252 // Manipulating the signaling NaN used for the hole and uninitialized
2253 // double field sentinel in C++, e.g. with bit_cast or value()/set_value(),
2254 // will change its value on ia32 (the x87 stack is used to return values
2255 // and stores to the stack silently clear the signalling bit).
2256 uint64_t bits;
2257 if (value->IsSmi()) {
2258 bits = bit_cast<uint64_t>(static_cast<double>(Smi::cast(value)->value()));
2259 } else {
2260 DCHECK(value->IsHeapNumber());
2261 bits = HeapNumber::cast(value)->value_as_bits();
2262 }
2263 if (IsUnboxedDoubleField(index)) {
2264 RawFastDoublePropertyAsBitsAtPut(index, bits);
2265 } else {
2266 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2267 DCHECK(box->IsMutableHeapNumber());
2268 box->set_value_as_bits(bits);
2269 }
2270 } else {
2271 RawFastPropertyAtPut(index, value);
2272 }
2273 }
2274
GetInObjectPropertyOffset(int index)2275 int JSObject::GetInObjectPropertyOffset(int index) {
2276 return map()->GetInObjectPropertyOffset(index);
2277 }
2278
2279
InObjectPropertyAt(int index)2280 Object* JSObject::InObjectPropertyAt(int index) {
2281 int offset = GetInObjectPropertyOffset(index);
2282 return READ_FIELD(this, offset);
2283 }
2284
2285
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)2286 Object* JSObject::InObjectPropertyAtPut(int index,
2287 Object* value,
2288 WriteBarrierMode mode) {
2289 // Adjust for the number of properties stored in the object.
2290 int offset = GetInObjectPropertyOffset(index);
2291 WRITE_FIELD(this, offset, value);
2292 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2293 return value;
2294 }
2295
2296
InitializeBody(Map * map,int start_offset,Object * pre_allocated_value,Object * filler_value)2297 void JSObject::InitializeBody(Map* map, int start_offset,
2298 Object* pre_allocated_value,
2299 Object* filler_value) {
2300 DCHECK(!filler_value->IsHeapObject() ||
2301 !GetHeap()->InNewSpace(filler_value));
2302 DCHECK(!pre_allocated_value->IsHeapObject() ||
2303 !GetHeap()->InNewSpace(pre_allocated_value));
2304 int size = map->instance_size();
2305 int offset = start_offset;
2306 if (filler_value != pre_allocated_value) {
2307 int end_of_pre_allocated_offset =
2308 size - (map->unused_property_fields() * kPointerSize);
2309 DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
2310 while (offset < end_of_pre_allocated_offset) {
2311 WRITE_FIELD(this, offset, pre_allocated_value);
2312 offset += kPointerSize;
2313 }
2314 }
2315 while (offset < size) {
2316 WRITE_FIELD(this, offset, filler_value);
2317 offset += kPointerSize;
2318 }
2319 }
2320
2321
TooManyFastProperties(StoreFromKeyed store_mode)2322 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2323 if (unused_property_fields() != 0) return false;
2324 if (is_prototype_map()) return false;
2325 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2326 int limit = Max(minimum, GetInObjectProperties());
2327 int external = NumberOfFields() - GetInObjectProperties();
2328 return external > limit;
2329 }
2330
2331
InitializeBody(int object_size)2332 void Struct::InitializeBody(int object_size) {
2333 Object* value = GetHeap()->undefined_value();
2334 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2335 WRITE_FIELD(this, offset, value);
2336 }
2337 }
2338
ToArrayLength(uint32_t * index)2339 bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
2340
2341
ToArrayIndex(uint32_t * index)2342 bool Object::ToArrayIndex(uint32_t* index) {
2343 return Object::ToUint32(index) && *index != kMaxUInt32;
2344 }
2345
2346
VerifyApiCallResultType()2347 void Object::VerifyApiCallResultType() {
2348 #if DEBUG
2349 if (IsSmi()) return;
2350 DCHECK(IsHeapObject());
2351 Isolate* isolate = HeapObject::cast(this)->GetIsolate();
2352 if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() ||
2353 IsUndefined(isolate) || IsTrue(isolate) || IsFalse(isolate) ||
2354 IsNull(isolate))) {
2355 FATAL("API call returned invalid object");
2356 }
2357 #endif // DEBUG
2358 }
2359
2360
get(int index)2361 Object* FixedArray::get(int index) const {
2362 SLOW_DCHECK(index >= 0 && index < this->length());
2363 return NOBARRIER_READ_FIELD(this, kHeaderSize + index * kPointerSize);
2364 }
2365
get(FixedArray * array,int index,Isolate * isolate)2366 Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) {
2367 return handle(array->get(index), isolate);
2368 }
2369
2370 template <class T>
GetValue(Isolate * isolate,int index)2371 MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
2372 Object* obj = get(index);
2373 if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
2374 return Handle<T>(T::cast(obj), isolate);
2375 }
2376
2377 template <class T>
GetValueChecked(Isolate * isolate,int index)2378 Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
2379 Object* obj = get(index);
2380 CHECK(!obj->IsUndefined(isolate));
2381 return Handle<T>(T::cast(obj), isolate);
2382 }
is_the_hole(Isolate * isolate,int index)2383 bool FixedArray::is_the_hole(Isolate* isolate, int index) {
2384 return get(index)->IsTheHole(isolate);
2385 }
2386
set(int index,Smi * value)2387 void FixedArray::set(int index, Smi* value) {
2388 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2389 DCHECK(index >= 0 && index < this->length());
2390 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2391 int offset = kHeaderSize + index * kPointerSize;
2392 NOBARRIER_WRITE_FIELD(this, offset, value);
2393 }
2394
2395
set(int index,Object * value)2396 void FixedArray::set(int index, Object* value) {
2397 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2398 DCHECK(IsFixedArray());
2399 DCHECK_GE(index, 0);
2400 DCHECK_LT(index, this->length());
2401 int offset = kHeaderSize + index * kPointerSize;
2402 NOBARRIER_WRITE_FIELD(this, offset, value);
2403 WRITE_BARRIER(GetHeap(), this, offset, value);
2404 }
2405
2406
get_scalar(int index)2407 double FixedDoubleArray::get_scalar(int index) {
2408 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2409 map() != GetHeap()->fixed_array_map());
2410 DCHECK(index >= 0 && index < this->length());
2411 DCHECK(!is_the_hole(index));
2412 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2413 }
2414
2415
get_representation(int index)2416 uint64_t FixedDoubleArray::get_representation(int index) {
2417 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2418 map() != GetHeap()->fixed_array_map());
2419 DCHECK(index >= 0 && index < this->length());
2420 int offset = kHeaderSize + index * kDoubleSize;
2421 return READ_UINT64_FIELD(this, offset);
2422 }
2423
get(FixedDoubleArray * array,int index,Isolate * isolate)2424 Handle<Object> FixedDoubleArray::get(FixedDoubleArray* array, int index,
2425 Isolate* isolate) {
2426 if (array->is_the_hole(index)) {
2427 return isolate->factory()->the_hole_value();
2428 } else {
2429 return isolate->factory()->NewNumber(array->get_scalar(index));
2430 }
2431 }
2432
2433
set(int index,double value)2434 void FixedDoubleArray::set(int index, double value) {
2435 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2436 map() != GetHeap()->fixed_array_map());
2437 int offset = kHeaderSize + index * kDoubleSize;
2438 if (std::isnan(value)) {
2439 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2440 } else {
2441 WRITE_DOUBLE_FIELD(this, offset, value);
2442 }
2443 DCHECK(!is_the_hole(index));
2444 }
2445
set_the_hole(Isolate * isolate,int index)2446 void FixedDoubleArray::set_the_hole(Isolate* isolate, int index) {
2447 set_the_hole(index);
2448 }
2449
set_the_hole(int index)2450 void FixedDoubleArray::set_the_hole(int index) {
2451 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2452 map() != GetHeap()->fixed_array_map());
2453 int offset = kHeaderSize + index * kDoubleSize;
2454 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2455 }
2456
is_the_hole(Isolate * isolate,int index)2457 bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
2458 return is_the_hole(index);
2459 }
2460
is_the_hole(int index)2461 bool FixedDoubleArray::is_the_hole(int index) {
2462 return get_representation(index) == kHoleNanInt64;
2463 }
2464
2465
data_start()2466 double* FixedDoubleArray::data_start() {
2467 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2468 }
2469
2470
FillWithHoles(int from,int to)2471 void FixedDoubleArray::FillWithHoles(int from, int to) {
2472 for (int i = from; i < to; i++) {
2473 set_the_hole(i);
2474 }
2475 }
2476
2477
Get(int index)2478 Object* WeakFixedArray::Get(int index) const {
2479 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2480 if (raw->IsSmi()) return raw;
2481 DCHECK(raw->IsWeakCell());
2482 return WeakCell::cast(raw)->value();
2483 }
2484
2485
IsEmptySlot(int index)2486 bool WeakFixedArray::IsEmptySlot(int index) const {
2487 DCHECK(index < Length());
2488 return Get(index)->IsSmi();
2489 }
2490
2491
Clear(int index)2492 void WeakFixedArray::Clear(int index) {
2493 FixedArray::cast(this)->set(index + kFirstIndex, Smi::kZero);
2494 }
2495
2496
Length()2497 int WeakFixedArray::Length() const {
2498 return FixedArray::cast(this)->length() - kFirstIndex;
2499 }
2500
2501
last_used_index()2502 int WeakFixedArray::last_used_index() const {
2503 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2504 }
2505
2506
set_last_used_index(int index)2507 void WeakFixedArray::set_last_used_index(int index) {
2508 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2509 }
2510
2511
2512 template <class T>
Next()2513 T* WeakFixedArray::Iterator::Next() {
2514 if (list_ != NULL) {
2515 // Assert that list did not change during iteration.
2516 DCHECK_EQ(last_used_index_, list_->last_used_index());
2517 while (index_ < list_->Length()) {
2518 Object* item = list_->Get(index_++);
2519 if (item != Empty()) return T::cast(item);
2520 }
2521 list_ = NULL;
2522 }
2523 return NULL;
2524 }
2525
2526
Length()2527 int ArrayList::Length() {
2528 if (FixedArray::cast(this)->length() == 0) return 0;
2529 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2530 }
2531
2532
SetLength(int length)2533 void ArrayList::SetLength(int length) {
2534 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2535 }
2536
2537
Get(int index)2538 Object* ArrayList::Get(int index) {
2539 return FixedArray::cast(this)->get(kFirstIndex + index);
2540 }
2541
2542
Slot(int index)2543 Object** ArrayList::Slot(int index) {
2544 return data_start() + kFirstIndex + index;
2545 }
2546
Set(int index,Object * obj,WriteBarrierMode mode)2547 void ArrayList::Set(int index, Object* obj, WriteBarrierMode mode) {
2548 FixedArray::cast(this)->set(kFirstIndex + index, obj, mode);
2549 }
2550
2551
Clear(int index,Object * undefined)2552 void ArrayList::Clear(int index, Object* undefined) {
2553 DCHECK(undefined->IsUndefined(GetIsolate()));
2554 FixedArray::cast(this)
2555 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2556 }
2557
NumberOfCaptureRegisters()2558 int RegExpMatchInfo::NumberOfCaptureRegisters() {
2559 DCHECK_GE(length(), kLastMatchOverhead);
2560 Object* obj = get(kNumberOfCapturesIndex);
2561 return Smi::cast(obj)->value();
2562 }
2563
SetNumberOfCaptureRegisters(int value)2564 void RegExpMatchInfo::SetNumberOfCaptureRegisters(int value) {
2565 DCHECK_GE(length(), kLastMatchOverhead);
2566 set(kNumberOfCapturesIndex, Smi::FromInt(value));
2567 }
2568
LastSubject()2569 String* RegExpMatchInfo::LastSubject() {
2570 DCHECK_GE(length(), kLastMatchOverhead);
2571 Object* obj = get(kLastSubjectIndex);
2572 return String::cast(obj);
2573 }
2574
SetLastSubject(String * value)2575 void RegExpMatchInfo::SetLastSubject(String* value) {
2576 DCHECK_GE(length(), kLastMatchOverhead);
2577 set(kLastSubjectIndex, value);
2578 }
2579
LastInput()2580 Object* RegExpMatchInfo::LastInput() {
2581 DCHECK_GE(length(), kLastMatchOverhead);
2582 return get(kLastInputIndex);
2583 }
2584
SetLastInput(Object * value)2585 void RegExpMatchInfo::SetLastInput(Object* value) {
2586 DCHECK_GE(length(), kLastMatchOverhead);
2587 set(kLastInputIndex, value);
2588 }
2589
Capture(int i)2590 int RegExpMatchInfo::Capture(int i) {
2591 DCHECK_LT(i, NumberOfCaptureRegisters());
2592 Object* obj = get(kFirstCaptureIndex + i);
2593 return Smi::cast(obj)->value();
2594 }
2595
SetCapture(int i,int value)2596 void RegExpMatchInfo::SetCapture(int i, int value) {
2597 DCHECK_LT(i, NumberOfCaptureRegisters());
2598 set(kFirstCaptureIndex + i, Smi::FromInt(value));
2599 }
2600
GetWriteBarrierMode(const DisallowHeapAllocation & promise)2601 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2602 const DisallowHeapAllocation& promise) {
2603 Heap* heap = GetHeap();
2604 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2605 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2606 return UPDATE_WRITE_BARRIER;
2607 }
2608
2609
RequiredAlignment()2610 AllocationAlignment HeapObject::RequiredAlignment() {
2611 #ifdef V8_HOST_ARCH_32_BIT
2612 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2613 FixedArrayBase::cast(this)->length() != 0) {
2614 return kDoubleAligned;
2615 }
2616 if (IsHeapNumber()) return kDoubleUnaligned;
2617 #endif // V8_HOST_ARCH_32_BIT
2618 return kWordAligned;
2619 }
2620
2621
set(int index,Object * value,WriteBarrierMode mode)2622 void FixedArray::set(int index,
2623 Object* value,
2624 WriteBarrierMode mode) {
2625 DCHECK_NE(map(), GetHeap()->fixed_cow_array_map());
2626 DCHECK_GE(index, 0);
2627 DCHECK_LT(index, this->length());
2628 int offset = kHeaderSize + index * kPointerSize;
2629 NOBARRIER_WRITE_FIELD(this, offset, value);
2630 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2631 }
2632
2633
NoWriteBarrierSet(FixedArray * array,int index,Object * value)2634 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2635 int index,
2636 Object* value) {
2637 DCHECK_NE(array->map(), array->GetHeap()->fixed_cow_array_map());
2638 DCHECK_GE(index, 0);
2639 DCHECK_LT(index, array->length());
2640 DCHECK(!array->GetHeap()->InNewSpace(value));
2641 NOBARRIER_WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2642 }
2643
set_undefined(int index)2644 void FixedArray::set_undefined(int index) {
2645 set_undefined(GetIsolate(), index);
2646 }
2647
set_undefined(Isolate * isolate,int index)2648 void FixedArray::set_undefined(Isolate* isolate, int index) {
2649 FixedArray::NoWriteBarrierSet(this, index,
2650 isolate->heap()->undefined_value());
2651 }
2652
set_null(int index)2653 void FixedArray::set_null(int index) { set_null(GetIsolate(), index); }
2654
set_null(Isolate * isolate,int index)2655 void FixedArray::set_null(Isolate* isolate, int index) {
2656 FixedArray::NoWriteBarrierSet(this, index, isolate->heap()->null_value());
2657 }
2658
set_the_hole(int index)2659 void FixedArray::set_the_hole(int index) { set_the_hole(GetIsolate(), index); }
2660
set_the_hole(Isolate * isolate,int index)2661 void FixedArray::set_the_hole(Isolate* isolate, int index) {
2662 FixedArray::NoWriteBarrierSet(this, index, isolate->heap()->the_hole_value());
2663 }
2664
FillWithHoles(int from,int to)2665 void FixedArray::FillWithHoles(int from, int to) {
2666 Isolate* isolate = GetIsolate();
2667 for (int i = from; i < to; i++) {
2668 set_the_hole(isolate, i);
2669 }
2670 }
2671
2672
data_start()2673 Object** FixedArray::data_start() {
2674 return HeapObject::RawField(this, kHeaderSize);
2675 }
2676
2677
RawFieldOfElementAt(int index)2678 Object** FixedArray::RawFieldOfElementAt(int index) {
2679 return HeapObject::RawField(this, OffsetOfElementAt(index));
2680 }
2681
2682 #define DEFINE_FRAME_ARRAY_ACCESSORS(name, type) \
2683 type* FrameArray::name(int frame_ix) const { \
2684 Object* obj = \
2685 get(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset); \
2686 return type::cast(obj); \
2687 } \
2688 \
2689 void FrameArray::Set##name(int frame_ix, type* value) { \
2690 set(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset, value); \
2691 }
FRAME_ARRAY_FIELD_LIST(DEFINE_FRAME_ARRAY_ACCESSORS)2692 FRAME_ARRAY_FIELD_LIST(DEFINE_FRAME_ARRAY_ACCESSORS)
2693 #undef DEFINE_FRAME_ARRAY_ACCESSORS
2694
2695 bool FrameArray::IsWasmFrame(int frame_ix) const {
2696 const int flags = Flags(frame_ix)->value();
2697 return (flags & kIsWasmFrame) != 0;
2698 }
2699
IsAsmJsWasmFrame(int frame_ix)2700 bool FrameArray::IsAsmJsWasmFrame(int frame_ix) const {
2701 const int flags = Flags(frame_ix)->value();
2702 return (flags & kIsAsmJsWasmFrame) != 0;
2703 }
2704
FrameCount()2705 int FrameArray::FrameCount() const {
2706 const int frame_count = Smi::cast(get(kFrameCountIndex))->value();
2707 DCHECK_LE(0, frame_count);
2708 return frame_count;
2709 }
2710
IsEmpty()2711 bool DescriptorArray::IsEmpty() {
2712 DCHECK(length() >= kFirstIndex ||
2713 this == GetHeap()->empty_descriptor_array());
2714 return length() < kFirstIndex;
2715 }
2716
2717
number_of_descriptors()2718 int DescriptorArray::number_of_descriptors() {
2719 DCHECK(length() >= kFirstIndex || IsEmpty());
2720 int len = length();
2721 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2722 }
2723
2724
number_of_descriptors_storage()2725 int DescriptorArray::number_of_descriptors_storage() {
2726 int len = length();
2727 return len == 0 ? 0 : (len - kFirstIndex) / kEntrySize;
2728 }
2729
2730
NumberOfSlackDescriptors()2731 int DescriptorArray::NumberOfSlackDescriptors() {
2732 return number_of_descriptors_storage() - number_of_descriptors();
2733 }
2734
2735
SetNumberOfDescriptors(int number_of_descriptors)2736 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2737 WRITE_FIELD(
2738 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2739 }
2740
2741
number_of_entries()2742 inline int DescriptorArray::number_of_entries() {
2743 return number_of_descriptors();
2744 }
2745
2746
HasEnumCache()2747 bool DescriptorArray::HasEnumCache() {
2748 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2749 }
2750
2751
CopyEnumCacheFrom(DescriptorArray * array)2752 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2753 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2754 }
2755
2756
GetEnumCache()2757 FixedArray* DescriptorArray::GetEnumCache() {
2758 DCHECK(HasEnumCache());
2759 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2760 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2761 }
2762
2763
HasEnumIndicesCache()2764 bool DescriptorArray::HasEnumIndicesCache() {
2765 if (IsEmpty()) return false;
2766 Object* object = get(kEnumCacheIndex);
2767 if (object->IsSmi()) return false;
2768 FixedArray* bridge = FixedArray::cast(object);
2769 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2770 }
2771
2772
GetEnumIndicesCache()2773 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2774 DCHECK(HasEnumIndicesCache());
2775 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2776 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2777 }
2778
2779
GetEnumCacheSlot()2780 Object** DescriptorArray::GetEnumCacheSlot() {
2781 DCHECK(HasEnumCache());
2782 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2783 kEnumCacheOffset);
2784 }
2785
2786 // Perform a binary search in a fixed array.
2787 template <SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int valid_entries,int * out_insertion_index)2788 int BinarySearch(T* array, Name* name, int valid_entries,
2789 int* out_insertion_index) {
2790 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2791 int low = 0;
2792 int high = array->number_of_entries() - 1;
2793 uint32_t hash = name->hash_field();
2794 int limit = high;
2795
2796 DCHECK(low <= high);
2797
2798 while (low != high) {
2799 int mid = low + (high - low) / 2;
2800 Name* mid_name = array->GetSortedKey(mid);
2801 uint32_t mid_hash = mid_name->hash_field();
2802
2803 if (mid_hash >= hash) {
2804 high = mid;
2805 } else {
2806 low = mid + 1;
2807 }
2808 }
2809
2810 for (; low <= limit; ++low) {
2811 int sort_index = array->GetSortedKeyIndex(low);
2812 Name* entry = array->GetKey(sort_index);
2813 uint32_t current_hash = entry->hash_field();
2814 if (current_hash != hash) {
2815 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2816 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2817 }
2818 return T::kNotFound;
2819 }
2820 if (entry == name) {
2821 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2822 return sort_index;
2823 }
2824 return T::kNotFound;
2825 }
2826 }
2827
2828 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2829 *out_insertion_index = limit + 1;
2830 }
2831 return T::kNotFound;
2832 }
2833
2834
2835 // Perform a linear search in this fixed array. len is the number of entry
2836 // indices that are valid.
2837 template <SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int valid_entries,int * out_insertion_index)2838 int LinearSearch(T* array, Name* name, int valid_entries,
2839 int* out_insertion_index) {
2840 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2841 uint32_t hash = name->hash_field();
2842 int len = array->number_of_entries();
2843 for (int number = 0; number < len; number++) {
2844 int sorted_index = array->GetSortedKeyIndex(number);
2845 Name* entry = array->GetKey(sorted_index);
2846 uint32_t current_hash = entry->hash_field();
2847 if (current_hash > hash) {
2848 *out_insertion_index = sorted_index;
2849 return T::kNotFound;
2850 }
2851 if (entry == name) return sorted_index;
2852 }
2853 *out_insertion_index = len;
2854 return T::kNotFound;
2855 } else {
2856 DCHECK_LE(valid_entries, array->number_of_entries());
2857 DCHECK_NULL(out_insertion_index); // Not supported here.
2858 for (int number = 0; number < valid_entries; number++) {
2859 if (array->GetKey(number) == name) return number;
2860 }
2861 return T::kNotFound;
2862 }
2863 }
2864
2865
2866 template <SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries,int * out_insertion_index)2867 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2868 SLOW_DCHECK(array->IsSortedNoDuplicates());
2869
2870 if (valid_entries == 0) {
2871 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2872 *out_insertion_index = 0;
2873 }
2874 return T::kNotFound;
2875 }
2876
2877 // Fast case: do linear search for small arrays.
2878 const int kMaxElementsForLinearSearch = 8;
2879 if (valid_entries <= kMaxElementsForLinearSearch) {
2880 return LinearSearch<search_mode>(array, name, valid_entries,
2881 out_insertion_index);
2882 }
2883
2884 // Slow case: perform binary search.
2885 return BinarySearch<search_mode>(array, name, valid_entries,
2886 out_insertion_index);
2887 }
2888
2889
Search(Name * name,int valid_descriptors)2890 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2891 DCHECK(name->IsUniqueName());
2892 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2893 }
2894
SearchWithCache(Isolate * isolate,Name * name,Map * map)2895 int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
2896 DCHECK(name->IsUniqueName());
2897 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2898 if (number_of_own_descriptors == 0) return kNotFound;
2899
2900 DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
2901 int number = cache->Lookup(map, name);
2902
2903 if (number == DescriptorLookupCache::kAbsent) {
2904 number = Search(name, number_of_own_descriptors);
2905 cache->Update(map, name, number);
2906 }
2907
2908 return number;
2909 }
2910
GetLastDescriptorDetails()2911 PropertyDetails Map::GetLastDescriptorDetails() {
2912 return instance_descriptors()->GetDetails(LastAdded());
2913 }
2914
2915
LastAdded()2916 int Map::LastAdded() {
2917 int number_of_own_descriptors = NumberOfOwnDescriptors();
2918 DCHECK(number_of_own_descriptors > 0);
2919 return number_of_own_descriptors - 1;
2920 }
2921
2922
NumberOfOwnDescriptors()2923 int Map::NumberOfOwnDescriptors() {
2924 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2925 }
2926
2927
SetNumberOfOwnDescriptors(int number)2928 void Map::SetNumberOfOwnDescriptors(int number) {
2929 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2930 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2931 }
2932
2933
EnumLength()2934 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2935
2936
SetEnumLength(int length)2937 void Map::SetEnumLength(int length) {
2938 if (length != kInvalidEnumCacheSentinel) {
2939 DCHECK(length >= 0);
2940 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2941 DCHECK(length <= NumberOfOwnDescriptors());
2942 }
2943 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2944 }
2945
2946
GetInitialElements()2947 FixedArrayBase* Map::GetInitialElements() {
2948 FixedArrayBase* result = nullptr;
2949 if (has_fast_elements() || has_fast_string_wrapper_elements()) {
2950 result = GetHeap()->empty_fixed_array();
2951 } else if (has_fast_sloppy_arguments_elements()) {
2952 result = GetHeap()->empty_sloppy_arguments_elements();
2953 } else if (has_fixed_typed_array_elements()) {
2954 result = GetHeap()->EmptyFixedTypedArrayForMap(this);
2955 } else {
2956 UNREACHABLE();
2957 }
2958 DCHECK(!GetHeap()->InNewSpace(result));
2959 return result;
2960 }
2961
GetKeySlot(int descriptor_number)2962 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2963 DCHECK(descriptor_number < number_of_descriptors());
2964 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2965 }
2966
2967
GetDescriptorStartSlot(int descriptor_number)2968 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2969 return GetKeySlot(descriptor_number);
2970 }
2971
2972
GetDescriptorEndSlot(int descriptor_number)2973 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2974 return GetValueSlot(descriptor_number - 1) + 1;
2975 }
2976
2977
GetKey(int descriptor_number)2978 Name* DescriptorArray::GetKey(int descriptor_number) {
2979 DCHECK(descriptor_number < number_of_descriptors());
2980 return Name::cast(get(ToKeyIndex(descriptor_number)));
2981 }
2982
2983
GetSortedKeyIndex(int descriptor_number)2984 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2985 return GetDetails(descriptor_number).pointer();
2986 }
2987
2988
GetSortedKey(int descriptor_number)2989 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2990 return GetKey(GetSortedKeyIndex(descriptor_number));
2991 }
2992
2993
SetSortedKey(int descriptor_index,int pointer)2994 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2995 PropertyDetails details = GetDetails(descriptor_index);
2996 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2997 }
2998
2999
GetValueSlot(int descriptor_number)3000 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3001 DCHECK(descriptor_number < number_of_descriptors());
3002 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3003 }
3004
3005
GetValueOffset(int descriptor_number)3006 int DescriptorArray::GetValueOffset(int descriptor_number) {
3007 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3008 }
3009
3010
GetValue(int descriptor_number)3011 Object* DescriptorArray::GetValue(int descriptor_number) {
3012 DCHECK(descriptor_number < number_of_descriptors());
3013 return get(ToValueIndex(descriptor_number));
3014 }
3015
3016
SetValue(int descriptor_index,Object * value)3017 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3018 set(ToValueIndex(descriptor_index), value);
3019 }
3020
3021
GetDetails(int descriptor_number)3022 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3023 DCHECK(descriptor_number < number_of_descriptors());
3024 Object* details = get(ToDetailsIndex(descriptor_number));
3025 return PropertyDetails(Smi::cast(details));
3026 }
3027
GetFieldIndex(int descriptor_number)3028 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3029 DCHECK(GetDetails(descriptor_number).location() == kField);
3030 return GetDetails(descriptor_number).field_index();
3031 }
3032
GetFieldType(int descriptor_number)3033 FieldType* DescriptorArray::GetFieldType(int descriptor_number) {
3034 DCHECK(GetDetails(descriptor_number).location() == kField);
3035 Object* wrapped_type = GetValue(descriptor_number);
3036 return Map::UnwrapFieldType(wrapped_type);
3037 }
3038
Get(int descriptor_number,Descriptor * desc)3039 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3040 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3041 handle(GetValue(descriptor_number), GetIsolate()),
3042 GetDetails(descriptor_number));
3043 }
3044
Set(int descriptor_number,Name * key,Object * value,PropertyDetails details)3045 void DescriptorArray::Set(int descriptor_number, Name* key, Object* value,
3046 PropertyDetails details) {
3047 // Range check.
3048 DCHECK(descriptor_number < number_of_descriptors());
3049 set(ToKeyIndex(descriptor_number), key);
3050 set(ToValueIndex(descriptor_number), value);
3051 set(ToDetailsIndex(descriptor_number), details.AsSmi());
3052 }
3053
Set(int descriptor_number,Descriptor * desc)3054 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3055 Name* key = *desc->GetKey();
3056 Object* value = *desc->GetValue();
3057 Set(descriptor_number, key, value, desc->GetDetails());
3058 }
3059
3060
Append(Descriptor * desc)3061 void DescriptorArray::Append(Descriptor* desc) {
3062 DisallowHeapAllocation no_gc;
3063 int descriptor_number = number_of_descriptors();
3064 SetNumberOfDescriptors(descriptor_number + 1);
3065 Set(descriptor_number, desc);
3066
3067 uint32_t hash = desc->GetKey()->Hash();
3068
3069 int insertion;
3070
3071 for (insertion = descriptor_number; insertion > 0; --insertion) {
3072 Name* key = GetSortedKey(insertion - 1);
3073 if (key->Hash() <= hash) break;
3074 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3075 }
3076
3077 SetSortedKey(insertion, descriptor_number);
3078 }
3079
3080
SwapSortedKeys(int first,int second)3081 void DescriptorArray::SwapSortedKeys(int first, int second) {
3082 int first_key = GetSortedKeyIndex(first);
3083 SetSortedKey(first, GetSortedKeyIndex(second));
3084 SetSortedKey(second, first_key);
3085 }
3086
3087
NumberOfElements()3088 int HashTableBase::NumberOfElements() {
3089 return Smi::cast(get(kNumberOfElementsIndex))->value();
3090 }
3091
3092
NumberOfDeletedElements()3093 int HashTableBase::NumberOfDeletedElements() {
3094 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3095 }
3096
3097
Capacity()3098 int HashTableBase::Capacity() {
3099 return Smi::cast(get(kCapacityIndex))->value();
3100 }
3101
3102
ElementAdded()3103 void HashTableBase::ElementAdded() {
3104 SetNumberOfElements(NumberOfElements() + 1);
3105 }
3106
3107
ElementRemoved()3108 void HashTableBase::ElementRemoved() {
3109 SetNumberOfElements(NumberOfElements() - 1);
3110 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3111 }
3112
3113
ElementsRemoved(int n)3114 void HashTableBase::ElementsRemoved(int n) {
3115 SetNumberOfElements(NumberOfElements() - n);
3116 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3117 }
3118
3119
3120 // static
ComputeCapacity(int at_least_space_for)3121 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3122 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3123 return Max(capacity, kMinCapacity);
3124 }
3125
IsKey(Isolate * isolate,Object * k)3126 bool HashTableBase::IsKey(Isolate* isolate, Object* k) {
3127 Heap* heap = isolate->heap();
3128 return k != heap->the_hole_value() && k != heap->undefined_value();
3129 }
3130
IsKey(Object * k)3131 bool HashTableBase::IsKey(Object* k) {
3132 Isolate* isolate = this->GetIsolate();
3133 return !k->IsTheHole(isolate) && !k->IsUndefined(isolate);
3134 }
3135
3136
SetNumberOfElements(int nof)3137 void HashTableBase::SetNumberOfElements(int nof) {
3138 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3139 }
3140
3141
SetNumberOfDeletedElements(int nod)3142 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3143 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3144 }
3145
3146 template <typename Key>
GetMap(Isolate * isolate)3147 Map* BaseShape<Key>::GetMap(Isolate* isolate) {
3148 return isolate->heap()->hash_table_map();
3149 }
3150
3151 template <typename Derived, typename Shape, typename Key>
FindEntry(Key key)3152 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3153 return FindEntry(GetIsolate(), key);
3154 }
3155
3156
3157 template<typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)3158 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3159 return FindEntry(isolate, key, HashTable::Hash(key));
3160 }
3161
3162 // Find entry for key otherwise return kNotFound.
3163 template <typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key,int32_t hash)3164 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3165 int32_t hash) {
3166 uint32_t capacity = Capacity();
3167 uint32_t entry = FirstProbe(hash, capacity);
3168 uint32_t count = 1;
3169 // EnsureCapacity will guarantee the hash table is never full.
3170 Object* undefined = isolate->heap()->undefined_value();
3171 Object* the_hole = isolate->heap()->the_hole_value();
3172 while (true) {
3173 Object* element = KeyAt(entry);
3174 // Empty entry. Uses raw unchecked accessors because it is called by the
3175 // string table during bootstrapping.
3176 if (element == undefined) break;
3177 if (element != the_hole && Shape::IsMatch(key, element)) return entry;
3178 entry = NextProbe(entry, count++, capacity);
3179 }
3180 return kNotFound;
3181 }
3182
3183 template <typename Derived, typename Shape, typename Key>
Has(Key key)3184 bool HashTable<Derived, Shape, Key>::Has(Key key) {
3185 return FindEntry(key) != kNotFound;
3186 }
3187
3188 template <typename Derived, typename Shape, typename Key>
Has(Isolate * isolate,Key key)3189 bool HashTable<Derived, Shape, Key>::Has(Isolate* isolate, Key key) {
3190 return FindEntry(isolate, key) != kNotFound;
3191 }
3192
Has(Isolate * isolate,Handle<Object> key,int32_t hash)3193 bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key, int32_t hash) {
3194 return FindEntry(isolate, key, hash) != kNotFound;
3195 }
3196
Has(Isolate * isolate,Handle<Object> key)3197 bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
3198 Object* hash = key->GetHash();
3199 if (!hash->IsSmi()) return false;
3200 return FindEntry(isolate, key, Smi::cast(hash)->value()) != kNotFound;
3201 }
3202
IsMatch(String * key,Object * value)3203 bool StringSetShape::IsMatch(String* key, Object* value) {
3204 return value->IsString() && key->Equals(String::cast(value));
3205 }
3206
Hash(String * key)3207 uint32_t StringSetShape::Hash(String* key) { return key->Hash(); }
3208
HashForObject(String * key,Object * object)3209 uint32_t StringSetShape::HashForObject(String* key, Object* object) {
3210 return object->IsString() ? String::cast(object)->Hash() : 0;
3211 }
3212
requires_slow_elements()3213 bool SeededNumberDictionary::requires_slow_elements() {
3214 Object* max_index_object = get(kMaxNumberKeyIndex);
3215 if (!max_index_object->IsSmi()) return false;
3216 return 0 !=
3217 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3218 }
3219
3220
max_number_key()3221 uint32_t SeededNumberDictionary::max_number_key() {
3222 DCHECK(!requires_slow_elements());
3223 Object* max_index_object = get(kMaxNumberKeyIndex);
3224 if (!max_index_object->IsSmi()) return 0;
3225 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3226 return value >> kRequiresSlowElementsTagSize;
3227 }
3228
3229
set_requires_slow_elements()3230 void SeededNumberDictionary::set_requires_slow_elements() {
3231 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3232 }
3233
3234
3235 template <class T>
cast(Object * object)3236 PodArray<T>* PodArray<T>::cast(Object* object) {
3237 SLOW_DCHECK(object->IsByteArray());
3238 return reinterpret_cast<PodArray<T>*>(object);
3239 }
3240 template <class T>
cast(const Object * object)3241 const PodArray<T>* PodArray<T>::cast(const Object* object) {
3242 SLOW_DCHECK(object->IsByteArray());
3243 return reinterpret_cast<const PodArray<T>*>(object);
3244 }
3245
3246 // static
3247 template <class T>
New(Isolate * isolate,int length,PretenureFlag pretenure)3248 Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
3249 PretenureFlag pretenure) {
3250 return Handle<PodArray<T>>::cast(
3251 isolate->factory()->NewByteArray(length * sizeof(T), pretenure));
3252 }
3253
3254 // static
3255 template <class Traits>
3256 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3257 FixedTypedArray<Traits>::kInstanceType;
3258
3259
3260 template <class Traits>
cast(Object * object)3261 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3262 SLOW_DCHECK(object->IsHeapObject() &&
3263 HeapObject::cast(object)->map()->instance_type() ==
3264 Traits::kInstanceType);
3265 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3266 }
3267
3268
3269 template <class Traits>
3270 const FixedTypedArray<Traits>*
cast(const Object * object)3271 FixedTypedArray<Traits>::cast(const Object* object) {
3272 SLOW_DCHECK(object->IsHeapObject() &&
3273 HeapObject::cast(object)->map()->instance_type() ==
3274 Traits::kInstanceType);
3275 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3276 }
3277
3278
3279 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3280 type* DeoptimizationInputData::name() { \
3281 return type::cast(get(k##name##Index)); \
3282 } \
3283 void DeoptimizationInputData::Set##name(type* value) { \
3284 set(k##name##Index, value); \
3285 }
3286
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,ByteArray)3287 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3288 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3289 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3290 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3291 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3292 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3293 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3294 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3295 DEFINE_DEOPT_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
3296
3297 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3298
3299
3300 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3301 type* DeoptimizationInputData::name(int i) { \
3302 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3303 } \
3304 void DeoptimizationInputData::Set##name(int i, type* value) { \
3305 set(IndexForEntry(i) + k##name##Offset, value); \
3306 }
3307
3308 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3309 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3310 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3311 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3312
3313 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3314
3315
3316 BailoutId DeoptimizationInputData::AstId(int i) {
3317 return BailoutId(AstIdRaw(i)->value());
3318 }
3319
3320
SetAstId(int i,BailoutId value)3321 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3322 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3323 }
3324
3325
DeoptCount()3326 int DeoptimizationInputData::DeoptCount() {
3327 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3328 }
3329
3330
DeoptPoints()3331 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3332
3333
AstId(int index)3334 BailoutId DeoptimizationOutputData::AstId(int index) {
3335 return BailoutId(Smi::cast(get(index * 2))->value());
3336 }
3337
3338
SetAstId(int index,BailoutId id)3339 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3340 set(index * 2, Smi::FromInt(id.ToInt()));
3341 }
3342
3343
PcAndState(int index)3344 Smi* DeoptimizationOutputData::PcAndState(int index) {
3345 return Smi::cast(get(1 + index * 2));
3346 }
3347
3348
SetPcAndState(int index,Smi * offset)3349 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3350 set(1 + index * 2, offset);
3351 }
3352
GetRangeStart(int index)3353 int HandlerTable::GetRangeStart(int index) const {
3354 return Smi::cast(get(index * kRangeEntrySize + kRangeStartIndex))->value();
3355 }
3356
GetRangeEnd(int index)3357 int HandlerTable::GetRangeEnd(int index) const {
3358 return Smi::cast(get(index * kRangeEntrySize + kRangeEndIndex))->value();
3359 }
3360
GetRangeHandler(int index)3361 int HandlerTable::GetRangeHandler(int index) const {
3362 return HandlerOffsetField::decode(
3363 Smi::cast(get(index * kRangeEntrySize + kRangeHandlerIndex))->value());
3364 }
3365
GetRangeData(int index)3366 int HandlerTable::GetRangeData(int index) const {
3367 return Smi::cast(get(index * kRangeEntrySize + kRangeDataIndex))->value();
3368 }
3369
SetRangeStart(int index,int value)3370 void HandlerTable::SetRangeStart(int index, int value) {
3371 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3372 }
3373
3374
SetRangeEnd(int index,int value)3375 void HandlerTable::SetRangeEnd(int index, int value) {
3376 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3377 }
3378
3379
SetRangeHandler(int index,int offset,CatchPrediction prediction)3380 void HandlerTable::SetRangeHandler(int index, int offset,
3381 CatchPrediction prediction) {
3382 int value = HandlerOffsetField::encode(offset) |
3383 HandlerPredictionField::encode(prediction);
3384 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3385 }
3386
SetRangeData(int index,int value)3387 void HandlerTable::SetRangeData(int index, int value) {
3388 set(index * kRangeEntrySize + kRangeDataIndex, Smi::FromInt(value));
3389 }
3390
3391
SetReturnOffset(int index,int value)3392 void HandlerTable::SetReturnOffset(int index, int value) {
3393 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3394 }
3395
SetReturnHandler(int index,int offset)3396 void HandlerTable::SetReturnHandler(int index, int offset) {
3397 int value = HandlerOffsetField::encode(offset);
3398 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3399 }
3400
NumberOfRangeEntries()3401 int HandlerTable::NumberOfRangeEntries() const {
3402 return length() / kRangeEntrySize;
3403 }
3404
3405 template <typename Derived, typename Shape, typename Key>
3406 HashTable<Derived, Shape, Key>*
cast(Object * obj)3407 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3408 SLOW_DCHECK(obj->IsHashTable());
3409 return reinterpret_cast<HashTable*>(obj);
3410 }
3411
3412
3413 template <typename Derived, typename Shape, typename Key>
3414 const HashTable<Derived, Shape, Key>*
cast(const Object * obj)3415 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3416 SLOW_DCHECK(obj->IsHashTable());
3417 return reinterpret_cast<const HashTable*>(obj);
3418 }
3419
3420
SMI_ACCESSORS(FixedArrayBase,length,kLengthOffset)3421 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3422 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3423
3424 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3425 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3426
3427 SMI_ACCESSORS(String, length, kLengthOffset)
3428 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3429
3430
3431 int FreeSpace::Size() { return size(); }
3432
3433
next()3434 FreeSpace* FreeSpace::next() {
3435 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3436 (!GetHeap()->deserialization_complete() && map() == NULL));
3437 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3438 return reinterpret_cast<FreeSpace*>(
3439 Memory::Address_at(address() + kNextOffset));
3440 }
3441
3442
set_next(FreeSpace * next)3443 void FreeSpace::set_next(FreeSpace* next) {
3444 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3445 (!GetHeap()->deserialization_complete() && map() == NULL));
3446 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3447 base::NoBarrier_Store(
3448 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3449 reinterpret_cast<base::AtomicWord>(next));
3450 }
3451
3452
cast(HeapObject * o)3453 FreeSpace* FreeSpace::cast(HeapObject* o) {
3454 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3455 return reinterpret_cast<FreeSpace*>(o);
3456 }
3457
3458
hash_field()3459 uint32_t Name::hash_field() {
3460 return READ_UINT32_FIELD(this, kHashFieldOffset);
3461 }
3462
3463
set_hash_field(uint32_t value)3464 void Name::set_hash_field(uint32_t value) {
3465 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3466 #if V8_HOST_ARCH_64_BIT
3467 #if V8_TARGET_LITTLE_ENDIAN
3468 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3469 #else
3470 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3471 #endif
3472 #endif
3473 }
3474
3475
Equals(Name * other)3476 bool Name::Equals(Name* other) {
3477 if (other == this) return true;
3478 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3479 this->IsSymbol() || other->IsSymbol()) {
3480 return false;
3481 }
3482 return String::cast(this)->SlowEquals(String::cast(other));
3483 }
3484
3485
Equals(Handle<Name> one,Handle<Name> two)3486 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3487 if (one.is_identical_to(two)) return true;
3488 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3489 one->IsSymbol() || two->IsSymbol()) {
3490 return false;
3491 }
3492 return String::SlowEquals(Handle<String>::cast(one),
3493 Handle<String>::cast(two));
3494 }
3495
3496
ACCESSORS(Symbol,name,Object,kNameOffset)3497 ACCESSORS(Symbol, name, Object, kNameOffset)
3498 SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
3499 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3500 BOOL_ACCESSORS(Symbol, flags, is_well_known_symbol, kWellKnownSymbolBit)
3501 BOOL_ACCESSORS(Symbol, flags, is_public, kPublicBit)
3502
3503 bool String::Equals(String* other) {
3504 if (other == this) return true;
3505 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3506 return false;
3507 }
3508 return SlowEquals(other);
3509 }
3510
3511
Equals(Handle<String> one,Handle<String> two)3512 bool String::Equals(Handle<String> one, Handle<String> two) {
3513 if (one.is_identical_to(two)) return true;
3514 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3515 return false;
3516 }
3517 return SlowEquals(one, two);
3518 }
3519
3520
Flatten(Handle<String> string,PretenureFlag pretenure)3521 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3522 if (string->IsConsString()) {
3523 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3524 if (cons->IsFlat()) {
3525 string = handle(cons->first());
3526 } else {
3527 return SlowFlatten(cons, pretenure);
3528 }
3529 }
3530 if (string->IsThinString()) {
3531 string = handle(Handle<ThinString>::cast(string)->actual());
3532 DCHECK(!string->IsConsString());
3533 }
3534 return string;
3535 }
3536
3537
Get(int index)3538 uint16_t String::Get(int index) {
3539 DCHECK(index >= 0 && index < length());
3540 switch (StringShape(this).full_representation_tag()) {
3541 case kSeqStringTag | kOneByteStringTag:
3542 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3543 case kSeqStringTag | kTwoByteStringTag:
3544 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3545 case kConsStringTag | kOneByteStringTag:
3546 case kConsStringTag | kTwoByteStringTag:
3547 return ConsString::cast(this)->ConsStringGet(index);
3548 case kExternalStringTag | kOneByteStringTag:
3549 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3550 case kExternalStringTag | kTwoByteStringTag:
3551 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3552 case kSlicedStringTag | kOneByteStringTag:
3553 case kSlicedStringTag | kTwoByteStringTag:
3554 return SlicedString::cast(this)->SlicedStringGet(index);
3555 case kThinStringTag | kOneByteStringTag:
3556 case kThinStringTag | kTwoByteStringTag:
3557 return ThinString::cast(this)->ThinStringGet(index);
3558 default:
3559 break;
3560 }
3561
3562 UNREACHABLE();
3563 return 0;
3564 }
3565
3566
Set(int index,uint16_t value)3567 void String::Set(int index, uint16_t value) {
3568 DCHECK(index >= 0 && index < length());
3569 DCHECK(StringShape(this).IsSequential());
3570
3571 return this->IsOneByteRepresentation()
3572 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3573 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3574 }
3575
3576
IsFlat()3577 bool String::IsFlat() {
3578 if (!StringShape(this).IsCons()) return true;
3579 return ConsString::cast(this)->second()->length() == 0;
3580 }
3581
3582
GetUnderlying()3583 String* String::GetUnderlying() {
3584 // Giving direct access to underlying string only makes sense if the
3585 // wrapping string is already flattened.
3586 DCHECK(this->IsFlat());
3587 DCHECK(StringShape(this).IsIndirect());
3588 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3589 STATIC_ASSERT(ConsString::kFirstOffset == ThinString::kActualOffset);
3590 const int kUnderlyingOffset = SlicedString::kParentOffset;
3591 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3592 }
3593
3594
3595 template<class Visitor>
VisitFlat(Visitor * visitor,String * string,const int offset)3596 ConsString* String::VisitFlat(Visitor* visitor,
3597 String* string,
3598 const int offset) {
3599 int slice_offset = offset;
3600 const int length = string->length();
3601 DCHECK(offset <= length);
3602 while (true) {
3603 int32_t type = string->map()->instance_type();
3604 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3605 case kSeqStringTag | kOneByteStringTag:
3606 visitor->VisitOneByteString(
3607 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3608 length - offset);
3609 return NULL;
3610
3611 case kSeqStringTag | kTwoByteStringTag:
3612 visitor->VisitTwoByteString(
3613 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3614 length - offset);
3615 return NULL;
3616
3617 case kExternalStringTag | kOneByteStringTag:
3618 visitor->VisitOneByteString(
3619 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3620 length - offset);
3621 return NULL;
3622
3623 case kExternalStringTag | kTwoByteStringTag:
3624 visitor->VisitTwoByteString(
3625 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3626 length - offset);
3627 return NULL;
3628
3629 case kSlicedStringTag | kOneByteStringTag:
3630 case kSlicedStringTag | kTwoByteStringTag: {
3631 SlicedString* slicedString = SlicedString::cast(string);
3632 slice_offset += slicedString->offset();
3633 string = slicedString->parent();
3634 continue;
3635 }
3636
3637 case kConsStringTag | kOneByteStringTag:
3638 case kConsStringTag | kTwoByteStringTag:
3639 return ConsString::cast(string);
3640
3641 case kThinStringTag | kOneByteStringTag:
3642 case kThinStringTag | kTwoByteStringTag:
3643 string = ThinString::cast(string)->actual();
3644 continue;
3645
3646 default:
3647 UNREACHABLE();
3648 return NULL;
3649 }
3650 }
3651 }
3652
3653
3654 template <>
GetCharVector()3655 inline Vector<const uint8_t> String::GetCharVector() {
3656 String::FlatContent flat = GetFlatContent();
3657 DCHECK(flat.IsOneByte());
3658 return flat.ToOneByteVector();
3659 }
3660
3661
3662 template <>
GetCharVector()3663 inline Vector<const uc16> String::GetCharVector() {
3664 String::FlatContent flat = GetFlatContent();
3665 DCHECK(flat.IsTwoByte());
3666 return flat.ToUC16Vector();
3667 }
3668
ToValidIndex(Object * number)3669 uint32_t String::ToValidIndex(Object* number) {
3670 uint32_t index = PositiveNumberToUint32(number);
3671 uint32_t length_value = static_cast<uint32_t>(length());
3672 if (index > length_value) return length_value;
3673 return index;
3674 }
3675
SeqOneByteStringGet(int index)3676 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3677 DCHECK(index >= 0 && index < length());
3678 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3679 }
3680
3681
SeqOneByteStringSet(int index,uint16_t value)3682 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3683 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3684 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3685 static_cast<byte>(value));
3686 }
3687
3688
GetCharsAddress()3689 Address SeqOneByteString::GetCharsAddress() {
3690 return FIELD_ADDR(this, kHeaderSize);
3691 }
3692
3693
GetChars()3694 uint8_t* SeqOneByteString::GetChars() {
3695 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3696 }
3697
3698
GetCharsAddress()3699 Address SeqTwoByteString::GetCharsAddress() {
3700 return FIELD_ADDR(this, kHeaderSize);
3701 }
3702
3703
GetChars()3704 uc16* SeqTwoByteString::GetChars() {
3705 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3706 }
3707
3708
SeqTwoByteStringGet(int index)3709 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3710 DCHECK(index >= 0 && index < length());
3711 return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3712 }
3713
3714
SeqTwoByteStringSet(int index,uint16_t value)3715 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3716 DCHECK(index >= 0 && index < length());
3717 WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3718 }
3719
3720
SeqTwoByteStringSize(InstanceType instance_type)3721 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3722 return SizeFor(length());
3723 }
3724
3725
SeqOneByteStringSize(InstanceType instance_type)3726 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3727 return SizeFor(length());
3728 }
3729
3730
parent()3731 String* SlicedString::parent() {
3732 return String::cast(READ_FIELD(this, kParentOffset));
3733 }
3734
3735
set_parent(String * parent,WriteBarrierMode mode)3736 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3737 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3738 WRITE_FIELD(this, kParentOffset, parent);
3739 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3740 }
3741
3742
SMI_ACCESSORS(SlicedString,offset,kOffsetOffset)3743 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3744
3745
3746 String* ConsString::first() {
3747 return String::cast(READ_FIELD(this, kFirstOffset));
3748 }
3749
3750
unchecked_first()3751 Object* ConsString::unchecked_first() {
3752 return READ_FIELD(this, kFirstOffset);
3753 }
3754
3755
set_first(String * value,WriteBarrierMode mode)3756 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3757 WRITE_FIELD(this, kFirstOffset, value);
3758 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3759 }
3760
3761
second()3762 String* ConsString::second() {
3763 return String::cast(READ_FIELD(this, kSecondOffset));
3764 }
3765
3766
unchecked_second()3767 Object* ConsString::unchecked_second() {
3768 return READ_FIELD(this, kSecondOffset);
3769 }
3770
3771
set_second(String * value,WriteBarrierMode mode)3772 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3773 WRITE_FIELD(this, kSecondOffset, value);
3774 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3775 }
3776
3777 ACCESSORS(ThinString, actual, String, kActualOffset);
3778
is_short()3779 bool ExternalString::is_short() {
3780 InstanceType type = map()->instance_type();
3781 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3782 }
3783
3784
resource()3785 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3786 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3787 }
3788
3789
update_data_cache()3790 void ExternalOneByteString::update_data_cache() {
3791 if (is_short()) return;
3792 const char** data_field =
3793 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3794 *data_field = resource()->data();
3795 }
3796
3797
set_resource(const ExternalOneByteString::Resource * resource)3798 void ExternalOneByteString::set_resource(
3799 const ExternalOneByteString::Resource* resource) {
3800 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3801 *reinterpret_cast<const Resource**>(
3802 FIELD_ADDR(this, kResourceOffset)) = resource;
3803 if (resource != NULL) update_data_cache();
3804 }
3805
3806
GetChars()3807 const uint8_t* ExternalOneByteString::GetChars() {
3808 return reinterpret_cast<const uint8_t*>(resource()->data());
3809 }
3810
3811
ExternalOneByteStringGet(int index)3812 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3813 DCHECK(index >= 0 && index < length());
3814 return GetChars()[index];
3815 }
3816
3817
resource()3818 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3819 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3820 }
3821
3822
update_data_cache()3823 void ExternalTwoByteString::update_data_cache() {
3824 if (is_short()) return;
3825 const uint16_t** data_field =
3826 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3827 *data_field = resource()->data();
3828 }
3829
3830
set_resource(const ExternalTwoByteString::Resource * resource)3831 void ExternalTwoByteString::set_resource(
3832 const ExternalTwoByteString::Resource* resource) {
3833 *reinterpret_cast<const Resource**>(
3834 FIELD_ADDR(this, kResourceOffset)) = resource;
3835 if (resource != NULL) update_data_cache();
3836 }
3837
3838
GetChars()3839 const uint16_t* ExternalTwoByteString::GetChars() {
3840 return resource()->data();
3841 }
3842
3843
ExternalTwoByteStringGet(int index)3844 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3845 DCHECK(index >= 0 && index < length());
3846 return GetChars()[index];
3847 }
3848
3849
ExternalTwoByteStringGetData(unsigned start)3850 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3851 unsigned start) {
3852 return GetChars() + start;
3853 }
3854
3855
OffsetForDepth(int depth)3856 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3857
3858
PushLeft(ConsString * string)3859 void ConsStringIterator::PushLeft(ConsString* string) {
3860 frames_[depth_++ & kDepthMask] = string;
3861 }
3862
3863
PushRight(ConsString * string)3864 void ConsStringIterator::PushRight(ConsString* string) {
3865 // Inplace update.
3866 frames_[(depth_-1) & kDepthMask] = string;
3867 }
3868
3869
AdjustMaximumDepth()3870 void ConsStringIterator::AdjustMaximumDepth() {
3871 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3872 }
3873
3874
Pop()3875 void ConsStringIterator::Pop() {
3876 DCHECK(depth_ > 0);
3877 DCHECK(depth_ <= maximum_depth_);
3878 depth_--;
3879 }
3880
3881
GetNext()3882 uint16_t StringCharacterStream::GetNext() {
3883 DCHECK(buffer8_ != NULL && end_ != NULL);
3884 // Advance cursor if needed.
3885 if (buffer8_ == end_) HasMore();
3886 DCHECK(buffer8_ < end_);
3887 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3888 }
3889
3890
StringCharacterStream(String * string,int offset)3891 StringCharacterStream::StringCharacterStream(String* string, int offset)
3892 : is_one_byte_(false) {
3893 Reset(string, offset);
3894 }
3895
3896
Reset(String * string,int offset)3897 void StringCharacterStream::Reset(String* string, int offset) {
3898 buffer8_ = NULL;
3899 end_ = NULL;
3900 ConsString* cons_string = String::VisitFlat(this, string, offset);
3901 iter_.Reset(cons_string, offset);
3902 if (cons_string != NULL) {
3903 string = iter_.Next(&offset);
3904 if (string != NULL) String::VisitFlat(this, string, offset);
3905 }
3906 }
3907
3908
HasMore()3909 bool StringCharacterStream::HasMore() {
3910 if (buffer8_ != end_) return true;
3911 int offset;
3912 String* string = iter_.Next(&offset);
3913 DCHECK_EQ(offset, 0);
3914 if (string == NULL) return false;
3915 String::VisitFlat(this, string);
3916 DCHECK(buffer8_ != end_);
3917 return true;
3918 }
3919
3920
VisitOneByteString(const uint8_t * chars,int length)3921 void StringCharacterStream::VisitOneByteString(
3922 const uint8_t* chars, int length) {
3923 is_one_byte_ = true;
3924 buffer8_ = chars;
3925 end_ = chars + length;
3926 }
3927
3928
VisitTwoByteString(const uint16_t * chars,int length)3929 void StringCharacterStream::VisitTwoByteString(
3930 const uint16_t* chars, int length) {
3931 is_one_byte_ = false;
3932 buffer16_ = chars;
3933 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3934 }
3935
3936
Size()3937 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
3938
get(int index)3939 byte ByteArray::get(int index) {
3940 DCHECK(index >= 0 && index < this->length());
3941 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3942 }
3943
set(int index,byte value)3944 void ByteArray::set(int index, byte value) {
3945 DCHECK(index >= 0 && index < this->length());
3946 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3947 }
3948
copy_in(int index,const byte * buffer,int length)3949 void ByteArray::copy_in(int index, const byte* buffer, int length) {
3950 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
3951 index + length <= this->length());
3952 byte* dst_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
3953 memcpy(dst_addr, buffer, length);
3954 }
3955
copy_out(int index,byte * buffer,int length)3956 void ByteArray::copy_out(int index, byte* buffer, int length) {
3957 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
3958 index + length <= this->length());
3959 const byte* src_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
3960 memcpy(buffer, src_addr, length);
3961 }
3962
get_int(int index)3963 int ByteArray::get_int(int index) {
3964 DCHECK(index >= 0 && index < this->length() / kIntSize);
3965 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3966 }
3967
set_int(int index,int value)3968 void ByteArray::set_int(int index, int value) {
3969 DCHECK(index >= 0 && index < this->length() / kIntSize);
3970 WRITE_INT_FIELD(this, kHeaderSize + index * kIntSize, value);
3971 }
3972
FromDataStartAddress(Address address)3973 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3974 DCHECK_TAG_ALIGNED(address);
3975 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3976 }
3977
3978
ByteArraySize()3979 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
3980
3981
GetDataStartAddress()3982 Address ByteArray::GetDataStartAddress() {
3983 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3984 }
3985
3986
get(int index)3987 byte BytecodeArray::get(int index) {
3988 DCHECK(index >= 0 && index < this->length());
3989 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3990 }
3991
3992
set(int index,byte value)3993 void BytecodeArray::set(int index, byte value) {
3994 DCHECK(index >= 0 && index < this->length());
3995 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3996 }
3997
3998
set_frame_size(int frame_size)3999 void BytecodeArray::set_frame_size(int frame_size) {
4000 DCHECK_GE(frame_size, 0);
4001 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4002 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4003 }
4004
4005
frame_size()4006 int BytecodeArray::frame_size() const {
4007 return READ_INT_FIELD(this, kFrameSizeOffset);
4008 }
4009
4010
register_count()4011 int BytecodeArray::register_count() const {
4012 return frame_size() / kPointerSize;
4013 }
4014
4015
set_parameter_count(int number_of_parameters)4016 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4017 DCHECK_GE(number_of_parameters, 0);
4018 // Parameter count is stored as the size on stack of the parameters to allow
4019 // it to be used directly by generated code.
4020 WRITE_INT_FIELD(this, kParameterSizeOffset,
4021 (number_of_parameters << kPointerSizeLog2));
4022 }
4023
interrupt_budget()4024 int BytecodeArray::interrupt_budget() const {
4025 return READ_INT_FIELD(this, kInterruptBudgetOffset);
4026 }
4027
set_interrupt_budget(int interrupt_budget)4028 void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
4029 DCHECK_GE(interrupt_budget, 0);
4030 WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
4031 }
4032
osr_loop_nesting_level()4033 int BytecodeArray::osr_loop_nesting_level() const {
4034 return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
4035 }
4036
set_osr_loop_nesting_level(int depth)4037 void BytecodeArray::set_osr_loop_nesting_level(int depth) {
4038 DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
4039 STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
4040 WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
4041 }
4042
bytecode_age()4043 BytecodeArray::Age BytecodeArray::bytecode_age() const {
4044 return static_cast<Age>(READ_INT8_FIELD(this, kBytecodeAgeOffset));
4045 }
4046
set_bytecode_age(BytecodeArray::Age age)4047 void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
4048 DCHECK_GE(age, kFirstBytecodeAge);
4049 DCHECK_LE(age, kLastBytecodeAge);
4050 STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
4051 WRITE_INT8_FIELD(this, kBytecodeAgeOffset, static_cast<int8_t>(age));
4052 }
4053
parameter_count()4054 int BytecodeArray::parameter_count() const {
4055 // Parameter count is stored as the size on stack of the parameters to allow
4056 // it to be used directly by generated code.
4057 return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4058 }
4059
ACCESSORS(BytecodeArray,constant_pool,FixedArray,kConstantPoolOffset)4060 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4061 ACCESSORS(BytecodeArray, handler_table, FixedArray, kHandlerTableOffset)
4062 ACCESSORS(BytecodeArray, source_position_table, ByteArray,
4063 kSourcePositionTableOffset)
4064
4065 Address BytecodeArray::GetFirstBytecodeAddress() {
4066 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4067 }
4068
4069
BytecodeArraySize()4070 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4071
SizeIncludingMetadata()4072 int BytecodeArray::SizeIncludingMetadata() {
4073 int size = BytecodeArraySize();
4074 size += constant_pool()->Size();
4075 size += handler_table()->Size();
4076 size += source_position_table()->Size();
4077 return size;
4078 }
4079
ACCESSORS(FixedTypedArrayBase,base_pointer,Object,kBasePointerOffset)4080 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4081
4082
4083 void* FixedTypedArrayBase::external_pointer() const {
4084 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4085 return reinterpret_cast<void*>(ptr);
4086 }
4087
4088
set_external_pointer(void * value,WriteBarrierMode mode)4089 void FixedTypedArrayBase::set_external_pointer(void* value,
4090 WriteBarrierMode mode) {
4091 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4092 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4093 }
4094
4095
DataPtr()4096 void* FixedTypedArrayBase::DataPtr() {
4097 return reinterpret_cast<void*>(
4098 reinterpret_cast<intptr_t>(base_pointer()) +
4099 reinterpret_cast<intptr_t>(external_pointer()));
4100 }
4101
4102
ElementSize(InstanceType type)4103 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4104 int element_size;
4105 switch (type) {
4106 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4107 case FIXED_##TYPE##_ARRAY_TYPE: \
4108 element_size = size; \
4109 break;
4110
4111 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4112 #undef TYPED_ARRAY_CASE
4113 default:
4114 UNREACHABLE();
4115 return 0;
4116 }
4117 return element_size;
4118 }
4119
4120
DataSize(InstanceType type)4121 int FixedTypedArrayBase::DataSize(InstanceType type) {
4122 if (base_pointer() == Smi::kZero) return 0;
4123 return length() * ElementSize(type);
4124 }
4125
4126
DataSize()4127 int FixedTypedArrayBase::DataSize() {
4128 return DataSize(map()->instance_type());
4129 }
4130
4131
size()4132 int FixedTypedArrayBase::size() {
4133 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4134 }
4135
4136
TypedArraySize(InstanceType type)4137 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4138 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4139 }
4140
4141
TypedArraySize(InstanceType type,int length)4142 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4143 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4144 }
4145
4146
defaultValue()4147 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4148
4149
defaultValue()4150 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4151
4152
defaultValue()4153 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4154
4155
defaultValue()4156 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4157
4158
defaultValue()4159 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4160
4161
defaultValue()4162 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4163
4164
defaultValue()4165 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4166
4167
defaultValue()4168 float Float32ArrayTraits::defaultValue() {
4169 return std::numeric_limits<float>::quiet_NaN();
4170 }
4171
4172
defaultValue()4173 double Float64ArrayTraits::defaultValue() {
4174 return std::numeric_limits<double>::quiet_NaN();
4175 }
4176
4177
4178 template <class Traits>
get_scalar(int index)4179 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4180 DCHECK((index >= 0) && (index < this->length()));
4181 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4182 return ptr[index];
4183 }
4184
4185
4186 template <class Traits>
set(int index,ElementType value)4187 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4188 DCHECK((index >= 0) && (index < this->length()));
4189 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4190 ptr[index] = value;
4191 }
4192
4193
4194 template <class Traits>
from_int(int value)4195 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4196 return static_cast<ElementType>(value);
4197 }
4198
4199
4200 template <> inline
from_int(int value)4201 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4202 if (value < 0) return 0;
4203 if (value > 0xFF) return 0xFF;
4204 return static_cast<uint8_t>(value);
4205 }
4206
4207
4208 template <class Traits>
from_double(double value)4209 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4210 double value) {
4211 return static_cast<ElementType>(DoubleToInt32(value));
4212 }
4213
4214
4215 template<> inline
from_double(double value)4216 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4217 // Handle NaNs and less than zero values which clamp to zero.
4218 if (!(value > 0)) return 0;
4219 if (value > 0xFF) return 0xFF;
4220 return static_cast<uint8_t>(lrint(value));
4221 }
4222
4223
4224 template<> inline
from_double(double value)4225 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4226 return static_cast<float>(value);
4227 }
4228
4229
4230 template<> inline
from_double(double value)4231 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4232 return value;
4233 }
4234
4235 template <class Traits>
get(FixedTypedArray<Traits> * array,int index)4236 Handle<Object> FixedTypedArray<Traits>::get(FixedTypedArray<Traits>* array,
4237 int index) {
4238 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4239 }
4240
4241
4242 template <class Traits>
SetValue(uint32_t index,Object * value)4243 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4244 ElementType cast_value = Traits::defaultValue();
4245 if (value->IsSmi()) {
4246 int int_value = Smi::cast(value)->value();
4247 cast_value = from_int(int_value);
4248 } else if (value->IsHeapNumber()) {
4249 double double_value = HeapNumber::cast(value)->value();
4250 cast_value = from_double(double_value);
4251 } else {
4252 // Clamp undefined to the default value. All other types have been
4253 // converted to a number type further up in the call chain.
4254 DCHECK(value->IsUndefined(GetIsolate()));
4255 }
4256 set(index, cast_value);
4257 }
4258
4259
ToHandle(Isolate * isolate,uint8_t scalar)4260 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4261 return handle(Smi::FromInt(scalar), isolate);
4262 }
4263
4264
ToHandle(Isolate * isolate,uint8_t scalar)4265 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4266 uint8_t scalar) {
4267 return handle(Smi::FromInt(scalar), isolate);
4268 }
4269
4270
ToHandle(Isolate * isolate,int8_t scalar)4271 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4272 return handle(Smi::FromInt(scalar), isolate);
4273 }
4274
4275
ToHandle(Isolate * isolate,uint16_t scalar)4276 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4277 return handle(Smi::FromInt(scalar), isolate);
4278 }
4279
4280
ToHandle(Isolate * isolate,int16_t scalar)4281 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4282 return handle(Smi::FromInt(scalar), isolate);
4283 }
4284
4285
ToHandle(Isolate * isolate,uint32_t scalar)4286 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4287 return isolate->factory()->NewNumberFromUint(scalar);
4288 }
4289
4290
ToHandle(Isolate * isolate,int32_t scalar)4291 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4292 return isolate->factory()->NewNumberFromInt(scalar);
4293 }
4294
4295
ToHandle(Isolate * isolate,float scalar)4296 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4297 return isolate->factory()->NewNumber(scalar);
4298 }
4299
4300
ToHandle(Isolate * isolate,double scalar)4301 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4302 return isolate->factory()->NewNumber(scalar);
4303 }
4304
4305
visitor_id()4306 int Map::visitor_id() {
4307 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4308 }
4309
4310
set_visitor_id(int id)4311 void Map::set_visitor_id(int id) {
4312 DCHECK(0 <= id && id < 256);
4313 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4314 }
4315
4316
instance_size()4317 int Map::instance_size() {
4318 return NOBARRIER_READ_BYTE_FIELD(
4319 this, kInstanceSizeOffset) << kPointerSizeLog2;
4320 }
4321
4322
inobject_properties_or_constructor_function_index()4323 int Map::inobject_properties_or_constructor_function_index() {
4324 return READ_BYTE_FIELD(this,
4325 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4326 }
4327
4328
set_inobject_properties_or_constructor_function_index(int value)4329 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4330 DCHECK(0 <= value && value < 256);
4331 WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4332 static_cast<byte>(value));
4333 }
4334
4335
GetInObjectProperties()4336 int Map::GetInObjectProperties() {
4337 DCHECK(IsJSObjectMap());
4338 return inobject_properties_or_constructor_function_index();
4339 }
4340
4341
SetInObjectProperties(int value)4342 void Map::SetInObjectProperties(int value) {
4343 DCHECK(IsJSObjectMap());
4344 set_inobject_properties_or_constructor_function_index(value);
4345 }
4346
4347
GetConstructorFunctionIndex()4348 int Map::GetConstructorFunctionIndex() {
4349 DCHECK(IsPrimitiveMap());
4350 return inobject_properties_or_constructor_function_index();
4351 }
4352
4353
SetConstructorFunctionIndex(int value)4354 void Map::SetConstructorFunctionIndex(int value) {
4355 DCHECK(IsPrimitiveMap());
4356 set_inobject_properties_or_constructor_function_index(value);
4357 }
4358
4359
GetInObjectPropertyOffset(int index)4360 int Map::GetInObjectPropertyOffset(int index) {
4361 // Adjust for the number of properties stored in the object.
4362 index -= GetInObjectProperties();
4363 DCHECK(index <= 0);
4364 return instance_size() + (index * kPointerSize);
4365 }
4366
4367
AddMissingTransitionsForTesting(Handle<Map> split_map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)4368 Handle<Map> Map::AddMissingTransitionsForTesting(
4369 Handle<Map> split_map, Handle<DescriptorArray> descriptors,
4370 Handle<LayoutDescriptor> full_layout_descriptor) {
4371 return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
4372 }
4373
4374
SizeFromMap(Map * map)4375 int HeapObject::SizeFromMap(Map* map) {
4376 int instance_size = map->instance_size();
4377 if (instance_size != kVariableSizeSentinel) return instance_size;
4378 // Only inline the most frequent cases.
4379 InstanceType instance_type = map->instance_type();
4380 if (instance_type == FIXED_ARRAY_TYPE ||
4381 instance_type == TRANSITION_ARRAY_TYPE) {
4382 return FixedArray::SizeFor(
4383 reinterpret_cast<FixedArray*>(this)->synchronized_length());
4384 }
4385 if (instance_type == ONE_BYTE_STRING_TYPE ||
4386 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4387 // Strings may get concurrently truncated, hence we have to access its
4388 // length synchronized.
4389 return SeqOneByteString::SizeFor(
4390 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4391 }
4392 if (instance_type == BYTE_ARRAY_TYPE) {
4393 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4394 }
4395 if (instance_type == BYTECODE_ARRAY_TYPE) {
4396 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4397 }
4398 if (instance_type == FREE_SPACE_TYPE) {
4399 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4400 }
4401 if (instance_type == STRING_TYPE ||
4402 instance_type == INTERNALIZED_STRING_TYPE) {
4403 // Strings may get concurrently truncated, hence we have to access its
4404 // length synchronized.
4405 return SeqTwoByteString::SizeFor(
4406 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4407 }
4408 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4409 return FixedDoubleArray::SizeFor(
4410 reinterpret_cast<FixedDoubleArray*>(this)->length());
4411 }
4412 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4413 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4414 return reinterpret_cast<FixedTypedArrayBase*>(
4415 this)->TypedArraySize(instance_type);
4416 }
4417 DCHECK(instance_type == CODE_TYPE);
4418 return reinterpret_cast<Code*>(this)->CodeSize();
4419 }
4420
4421
set_instance_size(int value)4422 void Map::set_instance_size(int value) {
4423 DCHECK_EQ(0, value & (kPointerSize - 1));
4424 value >>= kPointerSizeLog2;
4425 DCHECK(0 <= value && value < 256);
4426 NOBARRIER_WRITE_BYTE_FIELD(
4427 this, kInstanceSizeOffset, static_cast<byte>(value));
4428 }
4429
4430
clear_unused()4431 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4432
4433
instance_type()4434 InstanceType Map::instance_type() {
4435 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4436 }
4437
4438
set_instance_type(InstanceType value)4439 void Map::set_instance_type(InstanceType value) {
4440 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4441 }
4442
4443
unused_property_fields()4444 int Map::unused_property_fields() {
4445 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4446 }
4447
4448
set_unused_property_fields(int value)4449 void Map::set_unused_property_fields(int value) {
4450 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4451 }
4452
4453
bit_field()4454 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4455
4456
set_bit_field(byte value)4457 void Map::set_bit_field(byte value) {
4458 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4459 }
4460
4461
bit_field2()4462 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4463
4464
set_bit_field2(byte value)4465 void Map::set_bit_field2(byte value) {
4466 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4467 }
4468
4469
set_non_instance_prototype(bool value)4470 void Map::set_non_instance_prototype(bool value) {
4471 if (value) {
4472 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4473 } else {
4474 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4475 }
4476 }
4477
4478
has_non_instance_prototype()4479 bool Map::has_non_instance_prototype() {
4480 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4481 }
4482
4483
set_is_constructor(bool value)4484 void Map::set_is_constructor(bool value) {
4485 if (value) {
4486 set_bit_field(bit_field() | (1 << kIsConstructor));
4487 } else {
4488 set_bit_field(bit_field() & ~(1 << kIsConstructor));
4489 }
4490 }
4491
4492
is_constructor()4493 bool Map::is_constructor() const {
4494 return ((1 << kIsConstructor) & bit_field()) != 0;
4495 }
4496
set_has_hidden_prototype(bool value)4497 void Map::set_has_hidden_prototype(bool value) {
4498 set_bit_field3(HasHiddenPrototype::update(bit_field3(), value));
4499 }
4500
has_hidden_prototype()4501 bool Map::has_hidden_prototype() const {
4502 return HasHiddenPrototype::decode(bit_field3());
4503 }
4504
4505
set_has_indexed_interceptor()4506 void Map::set_has_indexed_interceptor() {
4507 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4508 }
4509
4510
has_indexed_interceptor()4511 bool Map::has_indexed_interceptor() {
4512 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4513 }
4514
4515
set_is_undetectable()4516 void Map::set_is_undetectable() {
4517 set_bit_field(bit_field() | (1 << kIsUndetectable));
4518 }
4519
4520
is_undetectable()4521 bool Map::is_undetectable() {
4522 return ((1 << kIsUndetectable) & bit_field()) != 0;
4523 }
4524
4525
set_has_named_interceptor()4526 void Map::set_has_named_interceptor() {
4527 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4528 }
4529
4530
has_named_interceptor()4531 bool Map::has_named_interceptor() {
4532 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4533 }
4534
4535
set_is_access_check_needed(bool access_check_needed)4536 void Map::set_is_access_check_needed(bool access_check_needed) {
4537 if (access_check_needed) {
4538 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4539 } else {
4540 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4541 }
4542 }
4543
4544
is_access_check_needed()4545 bool Map::is_access_check_needed() {
4546 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4547 }
4548
4549
set_is_extensible(bool value)4550 void Map::set_is_extensible(bool value) {
4551 if (value) {
4552 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4553 } else {
4554 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4555 }
4556 }
4557
is_extensible()4558 bool Map::is_extensible() {
4559 return ((1 << kIsExtensible) & bit_field2()) != 0;
4560 }
4561
4562
set_is_prototype_map(bool value)4563 void Map::set_is_prototype_map(bool value) {
4564 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4565 }
4566
is_prototype_map()4567 bool Map::is_prototype_map() const {
4568 return IsPrototypeMapBits::decode(bit_field2());
4569 }
4570
should_be_fast_prototype_map()4571 bool Map::should_be_fast_prototype_map() const {
4572 if (!prototype_info()->IsPrototypeInfo()) return false;
4573 return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
4574 }
4575
set_elements_kind(ElementsKind elements_kind)4576 void Map::set_elements_kind(ElementsKind elements_kind) {
4577 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4578 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4579 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4580 DCHECK(this->elements_kind() == elements_kind);
4581 }
4582
4583
elements_kind()4584 ElementsKind Map::elements_kind() {
4585 return Map::ElementsKindBits::decode(bit_field2());
4586 }
4587
4588
has_fast_smi_elements()4589 bool Map::has_fast_smi_elements() {
4590 return IsFastSmiElementsKind(elements_kind());
4591 }
4592
has_fast_object_elements()4593 bool Map::has_fast_object_elements() {
4594 return IsFastObjectElementsKind(elements_kind());
4595 }
4596
has_fast_smi_or_object_elements()4597 bool Map::has_fast_smi_or_object_elements() {
4598 return IsFastSmiOrObjectElementsKind(elements_kind());
4599 }
4600
has_fast_double_elements()4601 bool Map::has_fast_double_elements() {
4602 return IsFastDoubleElementsKind(elements_kind());
4603 }
4604
has_fast_elements()4605 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4606
has_sloppy_arguments_elements()4607 bool Map::has_sloppy_arguments_elements() {
4608 return IsSloppyArgumentsElements(elements_kind());
4609 }
4610
has_fast_sloppy_arguments_elements()4611 bool Map::has_fast_sloppy_arguments_elements() {
4612 return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
4613 }
4614
has_fast_string_wrapper_elements()4615 bool Map::has_fast_string_wrapper_elements() {
4616 return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
4617 }
4618
has_fixed_typed_array_elements()4619 bool Map::has_fixed_typed_array_elements() {
4620 return IsFixedTypedArrayElementsKind(elements_kind());
4621 }
4622
has_dictionary_elements()4623 bool Map::has_dictionary_elements() {
4624 return IsDictionaryElementsKind(elements_kind());
4625 }
4626
4627
set_dictionary_map(bool value)4628 void Map::set_dictionary_map(bool value) {
4629 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4630 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4631 set_bit_field3(new_bit_field3);
4632 }
4633
4634
is_dictionary_map()4635 bool Map::is_dictionary_map() {
4636 return DictionaryMap::decode(bit_field3());
4637 }
4638
4639
flags()4640 Code::Flags Code::flags() {
4641 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4642 }
4643
4644
set_owns_descriptors(bool owns_descriptors)4645 void Map::set_owns_descriptors(bool owns_descriptors) {
4646 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4647 }
4648
4649
owns_descriptors()4650 bool Map::owns_descriptors() {
4651 return OwnsDescriptors::decode(bit_field3());
4652 }
4653
4654
set_is_callable()4655 void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
4656
4657
is_callable()4658 bool Map::is_callable() const {
4659 return ((1 << kIsCallable) & bit_field()) != 0;
4660 }
4661
4662
deprecate()4663 void Map::deprecate() {
4664 set_bit_field3(Deprecated::update(bit_field3(), true));
4665 }
4666
4667
is_deprecated()4668 bool Map::is_deprecated() {
4669 return Deprecated::decode(bit_field3());
4670 }
4671
4672
set_migration_target(bool value)4673 void Map::set_migration_target(bool value) {
4674 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4675 }
4676
4677
is_migration_target()4678 bool Map::is_migration_target() {
4679 return IsMigrationTarget::decode(bit_field3());
4680 }
4681
set_immutable_proto(bool value)4682 void Map::set_immutable_proto(bool value) {
4683 set_bit_field3(ImmutablePrototype::update(bit_field3(), value));
4684 }
4685
is_immutable_proto()4686 bool Map::is_immutable_proto() {
4687 return ImmutablePrototype::decode(bit_field3());
4688 }
4689
set_new_target_is_base(bool value)4690 void Map::set_new_target_is_base(bool value) {
4691 set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
4692 }
4693
4694
new_target_is_base()4695 bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
4696
4697
set_construction_counter(int value)4698 void Map::set_construction_counter(int value) {
4699 set_bit_field3(ConstructionCounter::update(bit_field3(), value));
4700 }
4701
4702
construction_counter()4703 int Map::construction_counter() {
4704 return ConstructionCounter::decode(bit_field3());
4705 }
4706
4707
mark_unstable()4708 void Map::mark_unstable() {
4709 set_bit_field3(IsUnstable::update(bit_field3(), true));
4710 }
4711
4712
is_stable()4713 bool Map::is_stable() {
4714 return !IsUnstable::decode(bit_field3());
4715 }
4716
4717
has_code_cache()4718 bool Map::has_code_cache() {
4719 // Code caches are always fixed arrays. The empty fixed array is used as a
4720 // sentinel for an absent code cache.
4721 return code_cache()->length() != 0;
4722 }
4723
4724
CanBeDeprecated()4725 bool Map::CanBeDeprecated() {
4726 int descriptor = LastAdded();
4727 for (int i = 0; i <= descriptor; i++) {
4728 PropertyDetails details = instance_descriptors()->GetDetails(i);
4729 if (details.representation().IsNone()) return true;
4730 if (details.representation().IsSmi()) return true;
4731 if (details.representation().IsDouble()) return true;
4732 if (details.representation().IsHeapObject()) return true;
4733 if (details.kind() == kData && details.location() == kDescriptor) {
4734 return true;
4735 }
4736 }
4737 return false;
4738 }
4739
4740
NotifyLeafMapLayoutChange()4741 void Map::NotifyLeafMapLayoutChange() {
4742 if (is_stable()) {
4743 mark_unstable();
4744 dependent_code()->DeoptimizeDependentCodeGroup(
4745 GetIsolate(),
4746 DependentCode::kPrototypeCheckGroup);
4747 }
4748 }
4749
4750
CanTransition()4751 bool Map::CanTransition() {
4752 // Only JSObject and subtypes have map transitions and back pointers.
4753 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4754 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4755 }
4756
4757
IsBooleanMap()4758 bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
IsPrimitiveMap()4759 bool Map::IsPrimitiveMap() {
4760 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4761 return instance_type() <= LAST_PRIMITIVE_TYPE;
4762 }
IsJSReceiverMap()4763 bool Map::IsJSReceiverMap() {
4764 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4765 return instance_type() >= FIRST_JS_RECEIVER_TYPE;
4766 }
IsJSObjectMap()4767 bool Map::IsJSObjectMap() {
4768 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4769 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4770 }
IsJSArrayMap()4771 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
IsJSFunctionMap()4772 bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
IsStringMap()4773 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
IsJSProxyMap()4774 bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
IsJSGlobalProxyMap()4775 bool Map::IsJSGlobalProxyMap() {
4776 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4777 }
IsJSGlobalObjectMap()4778 bool Map::IsJSGlobalObjectMap() {
4779 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4780 }
IsJSTypedArrayMap()4781 bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
IsJSDataViewMap()4782 bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
4783
IsSpecialReceiverMap()4784 bool Map::IsSpecialReceiverMap() {
4785 bool result = IsSpecialReceiverInstanceType(instance_type());
4786 DCHECK_IMPLIES(!result,
4787 !has_named_interceptor() && !is_access_check_needed());
4788 return result;
4789 }
4790
CanOmitMapChecks()4791 bool Map::CanOmitMapChecks() {
4792 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4793 }
4794
4795
next_link()4796 DependentCode* DependentCode::next_link() {
4797 return DependentCode::cast(get(kNextLinkIndex));
4798 }
4799
4800
set_next_link(DependentCode * next)4801 void DependentCode::set_next_link(DependentCode* next) {
4802 set(kNextLinkIndex, next);
4803 }
4804
4805
flags()4806 int DependentCode::flags() { return Smi::cast(get(kFlagsIndex))->value(); }
4807
4808
set_flags(int flags)4809 void DependentCode::set_flags(int flags) {
4810 set(kFlagsIndex, Smi::FromInt(flags));
4811 }
4812
4813
count()4814 int DependentCode::count() { return CountField::decode(flags()); }
4815
set_count(int value)4816 void DependentCode::set_count(int value) {
4817 set_flags(CountField::update(flags(), value));
4818 }
4819
4820
group()4821 DependentCode::DependencyGroup DependentCode::group() {
4822 return static_cast<DependencyGroup>(GroupField::decode(flags()));
4823 }
4824
4825
set_group(DependentCode::DependencyGroup group)4826 void DependentCode::set_group(DependentCode::DependencyGroup group) {
4827 set_flags(GroupField::update(flags(), static_cast<int>(group)));
4828 }
4829
4830
set_object_at(int i,Object * object)4831 void DependentCode::set_object_at(int i, Object* object) {
4832 set(kCodesStartIndex + i, object);
4833 }
4834
4835
object_at(int i)4836 Object* DependentCode::object_at(int i) {
4837 return get(kCodesStartIndex + i);
4838 }
4839
4840
clear_at(int i)4841 void DependentCode::clear_at(int i) {
4842 set_undefined(kCodesStartIndex + i);
4843 }
4844
4845
copy(int from,int to)4846 void DependentCode::copy(int from, int to) {
4847 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4848 }
4849
4850
set_flags(Code::Flags flags)4851 void Code::set_flags(Code::Flags flags) {
4852 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4853 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4854 }
4855
4856
kind()4857 Code::Kind Code::kind() {
4858 return ExtractKindFromFlags(flags());
4859 }
4860
IsCodeStubOrIC()4861 bool Code::IsCodeStubOrIC() {
4862 switch (kind()) {
4863 case STUB:
4864 case HANDLER:
4865 #define CASE_KIND(kind) case kind:
4866 IC_KIND_LIST(CASE_KIND)
4867 #undef CASE_KIND
4868 return true;
4869 default:
4870 return false;
4871 }
4872 }
4873
extra_ic_state()4874 ExtraICState Code::extra_ic_state() {
4875 DCHECK(is_binary_op_stub() || is_compare_ic_stub() ||
4876 is_to_boolean_ic_stub() || is_debug_stub());
4877 return ExtractExtraICStateFromFlags(flags());
4878 }
4879
4880
4881 // For initialization.
set_raw_kind_specific_flags1(int value)4882 void Code::set_raw_kind_specific_flags1(int value) {
4883 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4884 }
4885
4886
set_raw_kind_specific_flags2(int value)4887 void Code::set_raw_kind_specific_flags2(int value) {
4888 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4889 }
4890
4891
is_crankshafted()4892 inline bool Code::is_crankshafted() {
4893 return IsCrankshaftedField::decode(
4894 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4895 }
4896
4897
is_hydrogen_stub()4898 inline bool Code::is_hydrogen_stub() {
4899 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4900 }
4901
is_interpreter_trampoline_builtin()4902 inline bool Code::is_interpreter_trampoline_builtin() {
4903 Builtins* builtins = GetIsolate()->builtins();
4904 return this == *builtins->InterpreterEntryTrampoline() ||
4905 this == *builtins->InterpreterEnterBytecodeAdvance() ||
4906 this == *builtins->InterpreterEnterBytecodeDispatch();
4907 }
4908
has_unwinding_info()4909 inline bool Code::has_unwinding_info() const {
4910 return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
4911 }
4912
set_has_unwinding_info(bool state)4913 inline void Code::set_has_unwinding_info(bool state) {
4914 uint32_t previous = READ_UINT32_FIELD(this, kFlagsOffset);
4915 uint32_t updated_value = HasUnwindingInfoField::update(previous, state);
4916 WRITE_UINT32_FIELD(this, kFlagsOffset, updated_value);
4917 }
4918
set_is_crankshafted(bool value)4919 inline void Code::set_is_crankshafted(bool value) {
4920 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4921 int updated = IsCrankshaftedField::update(previous, value);
4922 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4923 }
4924
4925
is_turbofanned()4926 inline bool Code::is_turbofanned() {
4927 return IsTurbofannedField::decode(
4928 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4929 }
4930
4931
set_is_turbofanned(bool value)4932 inline void Code::set_is_turbofanned(bool value) {
4933 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4934 int updated = IsTurbofannedField::update(previous, value);
4935 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4936 }
4937
4938
can_have_weak_objects()4939 inline bool Code::can_have_weak_objects() {
4940 DCHECK(kind() == OPTIMIZED_FUNCTION);
4941 return CanHaveWeakObjectsField::decode(
4942 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4943 }
4944
4945
set_can_have_weak_objects(bool value)4946 inline void Code::set_can_have_weak_objects(bool value) {
4947 DCHECK(kind() == OPTIMIZED_FUNCTION);
4948 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4949 int updated = CanHaveWeakObjectsField::update(previous, value);
4950 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4951 }
4952
is_construct_stub()4953 inline bool Code::is_construct_stub() {
4954 DCHECK(kind() == BUILTIN);
4955 return IsConstructStubField::decode(
4956 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4957 }
4958
set_is_construct_stub(bool value)4959 inline void Code::set_is_construct_stub(bool value) {
4960 DCHECK(kind() == BUILTIN);
4961 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4962 int updated = IsConstructStubField::update(previous, value);
4963 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4964 }
4965
is_promise_rejection()4966 inline bool Code::is_promise_rejection() {
4967 DCHECK(kind() == BUILTIN);
4968 return IsPromiseRejectionField::decode(
4969 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4970 }
4971
set_is_promise_rejection(bool value)4972 inline void Code::set_is_promise_rejection(bool value) {
4973 DCHECK(kind() == BUILTIN);
4974 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4975 int updated = IsPromiseRejectionField::update(previous, value);
4976 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4977 }
4978
is_exception_caught()4979 inline bool Code::is_exception_caught() {
4980 DCHECK(kind() == BUILTIN);
4981 return IsExceptionCaughtField::decode(
4982 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4983 }
4984
set_is_exception_caught(bool value)4985 inline void Code::set_is_exception_caught(bool value) {
4986 DCHECK(kind() == BUILTIN);
4987 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4988 int updated = IsExceptionCaughtField::update(previous, value);
4989 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4990 }
4991
has_deoptimization_support()4992 bool Code::has_deoptimization_support() {
4993 DCHECK_EQ(FUNCTION, kind());
4994 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4995 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4996 }
4997
4998
set_has_deoptimization_support(bool value)4999 void Code::set_has_deoptimization_support(bool value) {
5000 DCHECK_EQ(FUNCTION, kind());
5001 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5002 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5003 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5004 }
5005
5006
has_debug_break_slots()5007 bool Code::has_debug_break_slots() {
5008 DCHECK_EQ(FUNCTION, kind());
5009 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5010 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5011 }
5012
5013
set_has_debug_break_slots(bool value)5014 void Code::set_has_debug_break_slots(bool value) {
5015 DCHECK_EQ(FUNCTION, kind());
5016 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5017 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5018 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5019 }
5020
5021
has_reloc_info_for_serialization()5022 bool Code::has_reloc_info_for_serialization() {
5023 DCHECK_EQ(FUNCTION, kind());
5024 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5025 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
5026 }
5027
5028
set_has_reloc_info_for_serialization(bool value)5029 void Code::set_has_reloc_info_for_serialization(bool value) {
5030 DCHECK_EQ(FUNCTION, kind());
5031 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5032 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
5033 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5034 }
5035
5036
allow_osr_at_loop_nesting_level()5037 int Code::allow_osr_at_loop_nesting_level() {
5038 DCHECK_EQ(FUNCTION, kind());
5039 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5040 return AllowOSRAtLoopNestingLevelField::decode(fields);
5041 }
5042
5043
set_allow_osr_at_loop_nesting_level(int level)5044 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5045 DCHECK_EQ(FUNCTION, kind());
5046 DCHECK(level >= 0 && level <= AbstractCode::kMaxLoopNestingMarker);
5047 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5048 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5049 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5050 }
5051
5052
profiler_ticks()5053 int Code::profiler_ticks() {
5054 DCHECK_EQ(FUNCTION, kind());
5055 return ProfilerTicksField::decode(
5056 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5057 }
5058
5059
set_profiler_ticks(int ticks)5060 void Code::set_profiler_ticks(int ticks) {
5061 if (kind() == FUNCTION) {
5062 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5063 unsigned updated = ProfilerTicksField::update(previous, ticks);
5064 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5065 }
5066 }
5067
builtin_index()5068 int Code::builtin_index() { return READ_INT_FIELD(this, kBuiltinIndexOffset); }
5069
set_builtin_index(int index)5070 void Code::set_builtin_index(int index) {
5071 WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
5072 }
5073
5074
stack_slots()5075 unsigned Code::stack_slots() {
5076 DCHECK(is_crankshafted());
5077 return StackSlotsField::decode(
5078 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5079 }
5080
5081
set_stack_slots(unsigned slots)5082 void Code::set_stack_slots(unsigned slots) {
5083 CHECK(slots <= (1 << kStackSlotsBitCount));
5084 DCHECK(is_crankshafted());
5085 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5086 int updated = StackSlotsField::update(previous, slots);
5087 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5088 }
5089
5090
safepoint_table_offset()5091 unsigned Code::safepoint_table_offset() {
5092 DCHECK(is_crankshafted());
5093 return SafepointTableOffsetField::decode(
5094 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5095 }
5096
5097
set_safepoint_table_offset(unsigned offset)5098 void Code::set_safepoint_table_offset(unsigned offset) {
5099 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5100 DCHECK(is_crankshafted());
5101 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5102 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5103 int updated = SafepointTableOffsetField::update(previous, offset);
5104 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5105 }
5106
5107
back_edge_table_offset()5108 unsigned Code::back_edge_table_offset() {
5109 DCHECK_EQ(FUNCTION, kind());
5110 return BackEdgeTableOffsetField::decode(
5111 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5112 }
5113
5114
set_back_edge_table_offset(unsigned offset)5115 void Code::set_back_edge_table_offset(unsigned offset) {
5116 DCHECK_EQ(FUNCTION, kind());
5117 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5118 offset = offset >> kPointerSizeLog2;
5119 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5120 int updated = BackEdgeTableOffsetField::update(previous, offset);
5121 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5122 }
5123
5124
back_edges_patched_for_osr()5125 bool Code::back_edges_patched_for_osr() {
5126 DCHECK_EQ(FUNCTION, kind());
5127 return allow_osr_at_loop_nesting_level() > 0;
5128 }
5129
5130
to_boolean_state()5131 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5132
5133
marked_for_deoptimization()5134 bool Code::marked_for_deoptimization() {
5135 DCHECK(kind() == OPTIMIZED_FUNCTION);
5136 return MarkedForDeoptimizationField::decode(
5137 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5138 }
5139
5140
set_marked_for_deoptimization(bool flag)5141 void Code::set_marked_for_deoptimization(bool flag) {
5142 DCHECK(kind() == OPTIMIZED_FUNCTION);
5143 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5144 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5145 int updated = MarkedForDeoptimizationField::update(previous, flag);
5146 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5147 }
5148
5149
is_inline_cache_stub()5150 bool Code::is_inline_cache_stub() {
5151 Kind kind = this->kind();
5152 switch (kind) {
5153 #define CASE(name) case name: return true;
5154 IC_KIND_LIST(CASE)
5155 #undef CASE
5156 default: return false;
5157 }
5158 }
5159
is_debug_stub()5160 bool Code::is_debug_stub() {
5161 if (kind() != BUILTIN) return false;
5162 switch (builtin_index()) {
5163 #define CASE_DEBUG_BUILTIN(name) case Builtins::k##name:
5164 BUILTIN_LIST_DBG(CASE_DEBUG_BUILTIN)
5165 #undef CASE_DEBUG_BUILTIN
5166 return true;
5167 default:
5168 return false;
5169 }
5170 return false;
5171 }
is_handler()5172 bool Code::is_handler() { return kind() == HANDLER; }
is_stub()5173 bool Code::is_stub() { return kind() == STUB; }
is_binary_op_stub()5174 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
is_compare_ic_stub()5175 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
is_to_boolean_ic_stub()5176 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
is_optimized_code()5177 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
is_wasm_code()5178 bool Code::is_wasm_code() { return kind() == WASM_FUNCTION; }
5179
constant_pool()5180 Address Code::constant_pool() {
5181 Address constant_pool = NULL;
5182 if (FLAG_enable_embedded_constant_pool) {
5183 int offset = constant_pool_offset();
5184 if (offset < instruction_size()) {
5185 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5186 }
5187 }
5188 return constant_pool;
5189 }
5190
ComputeFlags(Kind kind,ExtraICState extra_ic_state,CacheHolderFlag holder)5191 Code::Flags Code::ComputeFlags(Kind kind, ExtraICState extra_ic_state,
5192 CacheHolderFlag holder) {
5193 // Compute the bit mask.
5194 unsigned int bits = KindField::encode(kind) |
5195 ExtraICStateField::encode(extra_ic_state) |
5196 CacheHolderField::encode(holder);
5197 return static_cast<Flags>(bits);
5198 }
5199
ComputeHandlerFlags(Kind handler_kind,CacheHolderFlag holder)5200 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
5201 CacheHolderFlag holder) {
5202 return ComputeFlags(Code::HANDLER, handler_kind, holder);
5203 }
5204
5205
ExtractKindFromFlags(Flags flags)5206 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5207 return KindField::decode(flags);
5208 }
5209
5210
ExtractExtraICStateFromFlags(Flags flags)5211 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5212 return ExtraICStateField::decode(flags);
5213 }
5214
5215
ExtractCacheHolderFromFlags(Flags flags)5216 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5217 return CacheHolderField::decode(flags);
5218 }
5219
RemoveHolderFromFlags(Flags flags)5220 Code::Flags Code::RemoveHolderFromFlags(Flags flags) {
5221 int bits = flags & ~CacheHolderField::kMask;
5222 return static_cast<Flags>(bits);
5223 }
5224
5225
GetCodeFromTargetAddress(Address address)5226 Code* Code::GetCodeFromTargetAddress(Address address) {
5227 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5228 // GetCodeFromTargetAddress might be called when marking objects during mark
5229 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5230 // Code::cast. Code::cast does not work when the object's map is
5231 // marked.
5232 Code* result = reinterpret_cast<Code*>(code);
5233 return result;
5234 }
5235
5236
GetObjectFromEntryAddress(Address location_of_address)5237 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5238 return HeapObject::
5239 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5240 }
5241
5242
CanContainWeakObjects()5243 bool Code::CanContainWeakObjects() {
5244 return is_optimized_code() && can_have_weak_objects();
5245 }
5246
5247
IsWeakObject(Object * object)5248 bool Code::IsWeakObject(Object* object) {
5249 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5250 }
5251
5252
IsWeakObjectInOptimizedCode(Object * object)5253 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5254 if (object->IsMap()) {
5255 return Map::cast(object)->CanTransition();
5256 }
5257 if (object->IsCell()) {
5258 object = Cell::cast(object)->value();
5259 } else if (object->IsPropertyCell()) {
5260 object = PropertyCell::cast(object)->value();
5261 }
5262 if (object->IsJSReceiver() || object->IsContext()) {
5263 return true;
5264 }
5265 return false;
5266 }
5267
5268
instruction_size()5269 int AbstractCode::instruction_size() {
5270 if (IsCode()) {
5271 return GetCode()->instruction_size();
5272 } else {
5273 return GetBytecodeArray()->length();
5274 }
5275 }
5276
source_position_table()5277 ByteArray* AbstractCode::source_position_table() {
5278 if (IsCode()) {
5279 return GetCode()->source_position_table();
5280 } else {
5281 return GetBytecodeArray()->source_position_table();
5282 }
5283 }
5284
set_source_position_table(ByteArray * source_position_table)5285 void AbstractCode::set_source_position_table(ByteArray* source_position_table) {
5286 if (IsCode()) {
5287 GetCode()->set_source_position_table(source_position_table);
5288 } else {
5289 GetBytecodeArray()->set_source_position_table(source_position_table);
5290 }
5291 }
5292
SizeIncludingMetadata()5293 int AbstractCode::SizeIncludingMetadata() {
5294 if (IsCode()) {
5295 return GetCode()->SizeIncludingMetadata();
5296 } else {
5297 return GetBytecodeArray()->SizeIncludingMetadata();
5298 }
5299 }
ExecutableSize()5300 int AbstractCode::ExecutableSize() {
5301 if (IsCode()) {
5302 return GetCode()->ExecutableSize();
5303 } else {
5304 return GetBytecodeArray()->BytecodeArraySize();
5305 }
5306 }
5307
instruction_start()5308 Address AbstractCode::instruction_start() {
5309 if (IsCode()) {
5310 return GetCode()->instruction_start();
5311 } else {
5312 return GetBytecodeArray()->GetFirstBytecodeAddress();
5313 }
5314 }
5315
instruction_end()5316 Address AbstractCode::instruction_end() {
5317 if (IsCode()) {
5318 return GetCode()->instruction_end();
5319 } else {
5320 return GetBytecodeArray()->GetFirstBytecodeAddress() +
5321 GetBytecodeArray()->length();
5322 }
5323 }
5324
contains(byte * inner_pointer)5325 bool AbstractCode::contains(byte* inner_pointer) {
5326 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5327 }
5328
kind()5329 AbstractCode::Kind AbstractCode::kind() {
5330 if (IsCode()) {
5331 STATIC_ASSERT(AbstractCode::FUNCTION ==
5332 static_cast<AbstractCode::Kind>(Code::FUNCTION));
5333 return static_cast<AbstractCode::Kind>(GetCode()->kind());
5334 } else {
5335 return INTERPRETED_FUNCTION;
5336 }
5337 }
5338
GetCode()5339 Code* AbstractCode::GetCode() { return Code::cast(this); }
5340
GetBytecodeArray()5341 BytecodeArray* AbstractCode::GetBytecodeArray() {
5342 return BytecodeArray::cast(this);
5343 }
5344
prototype()5345 Object* Map::prototype() const {
5346 return READ_FIELD(this, kPrototypeOffset);
5347 }
5348
5349
set_prototype(Object * value,WriteBarrierMode mode)5350 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5351 DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
5352 WRITE_FIELD(this, kPrototypeOffset, value);
5353 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5354 }
5355
5356
layout_descriptor_gc_safe()5357 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5358 Object* layout_desc = READ_FIELD(this, kLayoutDescriptorOffset);
5359 return LayoutDescriptor::cast_gc_safe(layout_desc);
5360 }
5361
5362
HasFastPointerLayout()5363 bool Map::HasFastPointerLayout() const {
5364 Object* layout_desc = READ_FIELD(this, kLayoutDescriptorOffset);
5365 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5366 }
5367
5368
UpdateDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5369 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5370 LayoutDescriptor* layout_desc) {
5371 set_instance_descriptors(descriptors);
5372 if (FLAG_unbox_double_fields) {
5373 if (layout_descriptor()->IsSlowLayout()) {
5374 set_layout_descriptor(layout_desc);
5375 }
5376 #ifdef VERIFY_HEAP
5377 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5378 if (FLAG_verify_heap) {
5379 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5380 CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5381 }
5382 #else
5383 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5384 DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5385 #endif
5386 }
5387 }
5388
5389
InitializeDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5390 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5391 LayoutDescriptor* layout_desc) {
5392 int len = descriptors->number_of_descriptors();
5393 set_instance_descriptors(descriptors);
5394 SetNumberOfOwnDescriptors(len);
5395
5396 if (FLAG_unbox_double_fields) {
5397 set_layout_descriptor(layout_desc);
5398 #ifdef VERIFY_HEAP
5399 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5400 if (FLAG_verify_heap) {
5401 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5402 }
5403 #else
5404 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5405 #endif
5406 set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
5407 }
5408 }
5409
5410
ACCESSORS(Map,instance_descriptors,DescriptorArray,kDescriptorsOffset)5411 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5412 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDescriptorOffset)
5413
5414 void Map::set_bit_field3(uint32_t bits) {
5415 if (kInt32Size != kPointerSize) {
5416 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5417 }
5418 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5419 }
5420
5421
bit_field3()5422 uint32_t Map::bit_field3() const {
5423 return READ_UINT32_FIELD(this, kBitField3Offset);
5424 }
5425
5426
GetLayoutDescriptor()5427 LayoutDescriptor* Map::GetLayoutDescriptor() {
5428 return FLAG_unbox_double_fields ? layout_descriptor()
5429 : LayoutDescriptor::FastPointerLayout();
5430 }
5431
5432
AppendDescriptor(Descriptor * desc)5433 void Map::AppendDescriptor(Descriptor* desc) {
5434 DescriptorArray* descriptors = instance_descriptors();
5435 int number_of_own_descriptors = NumberOfOwnDescriptors();
5436 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5437 descriptors->Append(desc);
5438 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5439
5440 // This function does not support appending double field descriptors and
5441 // it should never try to (otherwise, layout descriptor must be updated too).
5442 #ifdef DEBUG
5443 PropertyDetails details = desc->GetDetails();
5444 CHECK(details.location() != kField || !details.representation().IsDouble());
5445 #endif
5446 }
5447
5448
GetBackPointer()5449 Object* Map::GetBackPointer() {
5450 Object* object = constructor_or_backpointer();
5451 if (object->IsMap()) {
5452 return object;
5453 }
5454 return GetIsolate()->heap()->undefined_value();
5455 }
5456
5457
ElementsTransitionMap()5458 Map* Map::ElementsTransitionMap() {
5459 return TransitionArray::SearchSpecial(
5460 this, GetHeap()->elements_transition_symbol());
5461 }
5462
5463
ACCESSORS(Map,raw_transitions,Object,kTransitionsOrPrototypeInfoOffset)5464 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5465
5466
5467 Object* Map::prototype_info() const {
5468 DCHECK(is_prototype_map());
5469 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5470 }
5471
5472
set_prototype_info(Object * value,WriteBarrierMode mode)5473 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5474 DCHECK(is_prototype_map());
5475 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5476 CONDITIONAL_WRITE_BARRIER(
5477 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5478 }
5479
5480
SetBackPointer(Object * value,WriteBarrierMode mode)5481 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5482 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5483 DCHECK(value->IsMap());
5484 DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
5485 DCHECK(!value->IsMap() ||
5486 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5487 set_constructor_or_backpointer(value, mode);
5488 }
5489
ACCESSORS(Map,code_cache,FixedArray,kCodeCacheOffset)5490 ACCESSORS(Map, code_cache, FixedArray, kCodeCacheOffset)
5491 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5492 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5493 ACCESSORS(Map, constructor_or_backpointer, Object,
5494 kConstructorOrBackPointerOffset)
5495
5496
5497 Object* Map::GetConstructor() const {
5498 Object* maybe_constructor = constructor_or_backpointer();
5499 // Follow any back pointers.
5500 while (maybe_constructor->IsMap()) {
5501 maybe_constructor =
5502 Map::cast(maybe_constructor)->constructor_or_backpointer();
5503 }
5504 return maybe_constructor;
5505 }
5506
5507
SetConstructor(Object * constructor,WriteBarrierMode mode)5508 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5509 // Never overwrite a back pointer with a constructor.
5510 DCHECK(!constructor_or_backpointer()->IsMap());
5511 set_constructor_or_backpointer(constructor, mode);
5512 }
5513
5514
CopyInitialMap(Handle<Map> map)5515 Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
5516 return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
5517 map->unused_property_fields());
5518 }
5519
5520
5521 ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
5522 kBoundTargetFunctionOffset)
5523 ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
5524 ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
5525
5526 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5527 ACCESSORS(JSFunction, feedback_vector_cell, Cell, kFeedbackVectorOffset)
5528 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5529
5530 ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
5531 ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5532
5533 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5534 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5535
5536 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5537 SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
5538 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5539 kExpectedReceiverTypeOffset)
5540
5541 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
5542 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
5543 ACCESSORS(AccessorInfo, js_getter, Object, kJsGetterOffset)
5544 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
5545
5546 ACCESSORS(PromiseResolveThenableJobInfo, thenable, JSReceiver, kThenableOffset)
5547 ACCESSORS(PromiseResolveThenableJobInfo, then, JSReceiver, kThenOffset)
5548 ACCESSORS(PromiseResolveThenableJobInfo, resolve, JSFunction, kResolveOffset)
5549 ACCESSORS(PromiseResolveThenableJobInfo, reject, JSFunction, kRejectOffset)
5550 ACCESSORS(PromiseResolveThenableJobInfo, context, Context, kContextOffset);
5551
5552 ACCESSORS(PromiseReactionJobInfo, value, Object, kValueOffset);
5553 ACCESSORS(PromiseReactionJobInfo, tasks, Object, kTasksOffset);
5554 ACCESSORS(PromiseReactionJobInfo, deferred_promise, Object,
5555 kDeferredPromiseOffset);
5556 ACCESSORS(PromiseReactionJobInfo, deferred_on_resolve, Object,
5557 kDeferredOnResolveOffset);
5558 ACCESSORS(PromiseReactionJobInfo, deferred_on_reject, Object,
5559 kDeferredOnRejectOffset);
5560 ACCESSORS(PromiseReactionJobInfo, context, Context, kContextOffset);
5561
ObjectCreateMap()5562 Map* PrototypeInfo::ObjectCreateMap() {
5563 return Map::cast(WeakCell::cast(object_create_map())->value());
5564 }
5565
5566 // static
SetObjectCreateMap(Handle<PrototypeInfo> info,Handle<Map> map)5567 void PrototypeInfo::SetObjectCreateMap(Handle<PrototypeInfo> info,
5568 Handle<Map> map) {
5569 Handle<WeakCell> cell = Map::WeakCellForMap(map);
5570 info->set_object_create_map(*cell);
5571 }
5572
HasObjectCreateMap()5573 bool PrototypeInfo::HasObjectCreateMap() {
5574 Object* cache = object_create_map();
5575 return cache->IsWeakCell() && !WeakCell::cast(cache)->cleared();
5576 }
5577
instantiated()5578 bool FunctionTemplateInfo::instantiated() {
5579 return shared_function_info()->IsSharedFunctionInfo();
5580 }
5581
GetParent(Isolate * isolate)5582 FunctionTemplateInfo* FunctionTemplateInfo::GetParent(Isolate* isolate) {
5583 Object* parent = parent_template();
5584 return parent->IsUndefined(isolate) ? nullptr
5585 : FunctionTemplateInfo::cast(parent);
5586 }
5587
GetParent(Isolate * isolate)5588 ObjectTemplateInfo* ObjectTemplateInfo::GetParent(Isolate* isolate) {
5589 Object* maybe_ctor = constructor();
5590 if (maybe_ctor->IsUndefined(isolate)) return nullptr;
5591 FunctionTemplateInfo* constructor = FunctionTemplateInfo::cast(maybe_ctor);
5592 while (true) {
5593 constructor = constructor->GetParent(isolate);
5594 if (constructor == nullptr) return nullptr;
5595 Object* maybe_obj = constructor->instance_template();
5596 if (!maybe_obj->IsUndefined(isolate)) {
5597 return ObjectTemplateInfo::cast(maybe_obj);
5598 }
5599 }
5600 return nullptr;
5601 }
5602
ACCESSORS(PrototypeInfo,weak_cell,Object,kWeakCellOffset)5603 ACCESSORS(PrototypeInfo, weak_cell, Object, kWeakCellOffset)
5604 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5605 ACCESSORS(PrototypeInfo, object_create_map, Object, kObjectCreateMap)
5606 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5607 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5608 SMI_ACCESSORS(PrototypeInfo, bit_field, kBitFieldOffset)
5609 BOOL_ACCESSORS(PrototypeInfo, bit_field, should_be_fast_map, kShouldBeFastBit)
5610
5611 ACCESSORS(Tuple2, value1, Object, kValue1Offset)
5612 ACCESSORS(Tuple2, value2, Object, kValue2Offset)
5613 ACCESSORS(Tuple3, value3, Object, kValue3Offset)
5614
5615 ACCESSORS(ContextExtension, scope_info, ScopeInfo, kScopeInfoOffset)
5616 ACCESSORS(ContextExtension, extension, Object, kExtensionOffset)
5617
5618 SMI_ACCESSORS(ConstantElementsPair, elements_kind, kElementsKindOffset)
5619 ACCESSORS(ConstantElementsPair, constant_values, FixedArrayBase,
5620 kConstantValuesOffset)
5621
5622 ACCESSORS(JSModuleNamespace, module, Module, kModuleOffset)
5623
5624 ACCESSORS(Module, code, Object, kCodeOffset)
5625 ACCESSORS(Module, exports, ObjectHashTable, kExportsOffset)
5626 ACCESSORS(Module, regular_exports, FixedArray, kRegularExportsOffset)
5627 ACCESSORS(Module, regular_imports, FixedArray, kRegularImportsOffset)
5628 ACCESSORS(Module, module_namespace, HeapObject, kModuleNamespaceOffset)
5629 ACCESSORS(Module, requested_modules, FixedArray, kRequestedModulesOffset)
5630 SMI_ACCESSORS(Module, hash, kHashOffset)
5631
5632 bool Module::evaluated() const { return code()->IsModuleInfo(); }
5633
set_evaluated()5634 void Module::set_evaluated() {
5635 DCHECK(instantiated());
5636 DCHECK(!evaluated());
5637 return set_code(
5638 JSFunction::cast(code())->shared()->scope_info()->ModuleDescriptorInfo());
5639 }
5640
instantiated()5641 bool Module::instantiated() const { return !code()->IsSharedFunctionInfo(); }
5642
info()5643 ModuleInfo* Module::info() const {
5644 if (evaluated()) return ModuleInfo::cast(code());
5645 ScopeInfo* scope_info = instantiated()
5646 ? JSFunction::cast(code())->shared()->scope_info()
5647 : SharedFunctionInfo::cast(code())->scope_info();
5648 return scope_info->ModuleDescriptorInfo();
5649 }
5650
ACCESSORS(AccessorPair,getter,Object,kGetterOffset)5651 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5652 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5653
5654 ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
5655 ACCESSORS(AccessCheckInfo, named_interceptor, Object, kNamedInterceptorOffset)
5656 ACCESSORS(AccessCheckInfo, indexed_interceptor, Object,
5657 kIndexedInterceptorOffset)
5658 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5659
5660 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5661 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5662 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5663 ACCESSORS(InterceptorInfo, descriptor, Object, kDescriptorOffset)
5664 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5665 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5666 ACCESSORS(InterceptorInfo, definer, Object, kDefinerOffset)
5667 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5668 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5669 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5670 kCanInterceptSymbolsBit)
5671 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5672 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5673
5674 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5675 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5676 ACCESSORS(CallHandlerInfo, fast_handler, Object, kFastHandlerOffset)
5677
5678 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5679 ACCESSORS(TemplateInfo, serial_number, Object, kSerialNumberOffset)
5680 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5681 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5682 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5683
5684 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5685 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5686 kPrototypeTemplateOffset)
5687 ACCESSORS(FunctionTemplateInfo, prototype_provider_template, Object,
5688 kPrototypeProviderTemplateOffset)
5689
5690 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5691 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5692 kNamedPropertyHandlerOffset)
5693 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5694 kIndexedPropertyHandlerOffset)
5695 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5696 kInstanceTemplateOffset)
5697 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5698 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5699 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5700 kInstanceCallHandlerOffset)
5701 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5702 kAccessCheckInfoOffset)
5703 ACCESSORS(FunctionTemplateInfo, shared_function_info, Object,
5704 kSharedFunctionInfoOffset)
5705 ACCESSORS(FunctionTemplateInfo, cached_property_name, Object,
5706 kCachedPropertyNameOffset)
5707
5708 SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
5709
5710 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5711 ACCESSORS(ObjectTemplateInfo, data, Object, kDataOffset)
5712
5713 int ObjectTemplateInfo::internal_field_count() const {
5714 Object* value = data();
5715 DCHECK(value->IsSmi());
5716 return InternalFieldCount::decode(Smi::cast(value)->value());
5717 }
5718
set_internal_field_count(int count)5719 void ObjectTemplateInfo::set_internal_field_count(int count) {
5720 return set_data(Smi::FromInt(
5721 InternalFieldCount::update(Smi::cast(data())->value(), count)));
5722 }
5723
immutable_proto()5724 bool ObjectTemplateInfo::immutable_proto() const {
5725 Object* value = data();
5726 DCHECK(value->IsSmi());
5727 return IsImmutablePrototype::decode(Smi::cast(value)->value());
5728 }
5729
set_immutable_proto(bool immutable)5730 void ObjectTemplateInfo::set_immutable_proto(bool immutable) {
5731 return set_data(Smi::FromInt(
5732 IsImmutablePrototype::update(Smi::cast(data())->value(), immutable)));
5733 }
5734
length()5735 int TemplateList::length() const {
5736 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
5737 }
5738
get(int index)5739 Object* TemplateList::get(int index) const {
5740 return FixedArray::cast(this)->get(kFirstElementIndex + index);
5741 }
5742
set(int index,Object * value)5743 void TemplateList::set(int index, Object* value) {
5744 FixedArray::cast(this)->set(kFirstElementIndex + index, value);
5745 }
5746
ACCESSORS(AllocationSite,transition_info,Object,kTransitionInfoOffset)5747 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5748 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5749 SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
5750 SMI_ACCESSORS(AllocationSite, pretenure_create_count,
5751 kPretenureCreateCountOffset)
5752 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5753 kDependentCodeOffset)
5754 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5755 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5756
5757 ACCESSORS(Script, source, Object, kSourceOffset)
5758 ACCESSORS(Script, name, Object, kNameOffset)
5759 SMI_ACCESSORS(Script, id, kIdOffset)
5760 SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
5761 SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
5762 ACCESSORS(Script, context_data, Object, kContextOffset)
5763 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5764 SMI_ACCESSORS(Script, type, kTypeOffset)
5765 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5766 ACCESSORS_CHECKED(Script, eval_from_shared, Object, kEvalFromSharedOffset,
5767 this->type() != TYPE_WASM)
5768 SMI_ACCESSORS_CHECKED(Script, eval_from_position, kEvalFromPositionOffset,
5769 this->type() != TYPE_WASM)
5770 ACCESSORS(Script, shared_function_infos, FixedArray, kSharedFunctionInfosOffset)
5771 SMI_ACCESSORS(Script, flags, kFlagsOffset)
5772 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5773 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5774 ACCESSORS_CHECKED(Script, wasm_compiled_module, Object, kEvalFromSharedOffset,
5775 this->type() == TYPE_WASM)
5776
5777 Script::CompilationType Script::compilation_type() {
5778 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5779 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5780 }
set_compilation_type(CompilationType type)5781 void Script::set_compilation_type(CompilationType type) {
5782 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5783 type == COMPILATION_TYPE_EVAL));
5784 }
compilation_state()5785 Script::CompilationState Script::compilation_state() {
5786 return BooleanBit::get(flags(), kCompilationStateBit) ?
5787 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5788 }
set_compilation_state(CompilationState state)5789 void Script::set_compilation_state(CompilationState state) {
5790 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5791 state == COMPILATION_STATE_COMPILED));
5792 }
origin_options()5793 ScriptOriginOptions Script::origin_options() {
5794 return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
5795 kOriginOptionsShift);
5796 }
set_origin_options(ScriptOriginOptions origin_options)5797 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5798 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5799 set_flags((flags() & ~kOriginOptionsMask) |
5800 (origin_options.Flags() << kOriginOptionsShift));
5801 }
5802
5803
ACCESSORS(DebugInfo,shared,SharedFunctionInfo,kSharedFunctionInfoIndex)5804 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5805 SMI_ACCESSORS(DebugInfo, debugger_hints, kDebuggerHintsIndex)
5806 ACCESSORS(DebugInfo, debug_bytecode_array, Object, kDebugBytecodeArrayIndex)
5807 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5808
5809 bool DebugInfo::HasDebugBytecodeArray() {
5810 return debug_bytecode_array()->IsBytecodeArray();
5811 }
5812
HasDebugCode()5813 bool DebugInfo::HasDebugCode() {
5814 Code* code = shared()->code();
5815 bool has = code->kind() == Code::FUNCTION;
5816 DCHECK(!has || code->has_debug_break_slots());
5817 return has;
5818 }
5819
OriginalBytecodeArray()5820 BytecodeArray* DebugInfo::OriginalBytecodeArray() {
5821 DCHECK(HasDebugBytecodeArray());
5822 return shared()->bytecode_array();
5823 }
5824
DebugBytecodeArray()5825 BytecodeArray* DebugInfo::DebugBytecodeArray() {
5826 DCHECK(HasDebugBytecodeArray());
5827 return BytecodeArray::cast(debug_bytecode_array());
5828 }
5829
DebugCode()5830 Code* DebugInfo::DebugCode() {
5831 DCHECK(HasDebugCode());
5832 return shared()->code();
5833 }
5834
SMI_ACCESSORS(BreakPointInfo,source_position,kSourcePositionIndex)5835 SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
5836 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5837
5838 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5839 ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
5840 kOptimizedCodeMapOffset)
5841 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5842 ACCESSORS(SharedFunctionInfo, feedback_metadata, FeedbackMetadata,
5843 kFeedbackMetadataOffset)
5844 SMI_ACCESSORS(SharedFunctionInfo, function_literal_id, kFunctionLiteralIdOffset)
5845 #if TRACE_MAPS
5846 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5847 #endif
5848 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5849 kInstanceClassNameOffset)
5850 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5851 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5852 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5853 ACCESSORS(SharedFunctionInfo, function_identifier, Object,
5854 kFunctionIdentifierOffset)
5855
5856 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5857 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5858 kHiddenPrototypeBit)
5859 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5860 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5861 kNeedsAccessCheckBit)
5862 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5863 kReadOnlyPrototypeBit)
5864 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5865 kRemovePrototypeBit)
5866 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5867 kDoNotCacheBit)
5868 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5869 kAcceptAnyReceiver)
5870 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_named_expression,
5871 kIsNamedExpressionBit)
5872 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5873 kIsTopLevelBit)
5874
5875 #if V8_HOST_ARCH_32_BIT
5876 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5877 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5878 kFormalParameterCountOffset)
5879 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5880 kExpectedNofPropertiesOffset)
5881 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5882 kStartPositionAndTypeOffset)
5883 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5884 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5885 kFunctionTokenPositionOffset)
5886 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5887 kCompilerHintsOffset)
5888 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5889 kOptCountAndBailoutReasonOffset)
5890 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5891 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5892 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5893
5894 #else
5895
5896 #if V8_TARGET_LITTLE_ENDIAN
5897 #define PSEUDO_SMI_LO_ALIGN 0
5898 #define PSEUDO_SMI_HI_ALIGN kIntSize
5899 #else
5900 #define PSEUDO_SMI_LO_ALIGN kIntSize
5901 #define PSEUDO_SMI_HI_ALIGN 0
5902 #endif
5903
5904 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5905 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5906 int holder::name() const { \
5907 int value = READ_INT_FIELD(this, offset); \
5908 DCHECK(kHeapObjectTag == 1); \
5909 DCHECK((value & kHeapObjectTag) == 0); \
5910 return value >> 1; \
5911 } \
5912 void holder::set_##name(int value) { \
5913 DCHECK(kHeapObjectTag == 1); \
5914 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5915 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5916 }
5917
5918 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5919 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5920 INT_ACCESSORS(holder, name, offset)
5921
5922
5923 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5924 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5925 kFormalParameterCountOffset)
5926
5927 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5928 expected_nof_properties,
5929 kExpectedNofPropertiesOffset)
5930
5931 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5932 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5933 start_position_and_type,
5934 kStartPositionAndTypeOffset)
5935
5936 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5937 function_token_position,
5938 kFunctionTokenPositionOffset)
5939 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5940 compiler_hints,
5941 kCompilerHintsOffset)
5942
5943 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5944 opt_count_and_bailout_reason,
5945 kOptCountAndBailoutReasonOffset)
5946 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5947
5948 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5949 ast_node_count,
5950 kAstNodeCountOffset)
5951 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5952 profiler_ticks,
5953 kProfilerTicksOffset)
5954
5955 #endif
5956
5957 AbstractCode* SharedFunctionInfo::abstract_code() {
5958 if (HasBytecodeArray()) {
5959 return AbstractCode::cast(bytecode_array());
5960 } else {
5961 return AbstractCode::cast(code());
5962 }
5963 }
5964
BOOL_ACCESSORS(SharedFunctionInfo,compiler_hints,allows_lazy_compilation,kAllowLazyCompilation)5965 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5966 kAllowLazyCompilation)
5967 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_arguments,
5968 kUsesArguments)
5969 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, has_duplicate_parameters,
5970 kHasDuplicateParameters)
5971 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5972 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_declaration,
5973 kIsDeclaration)
5974 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, marked_for_tier_up,
5975 kMarkedForTierUp)
5976
5977 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5978 kNeedsHomeObject)
5979 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5980 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5981 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, must_use_ignition_turbo,
5982 kMustUseIgnitionTurbo)
5983 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5984 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_asm_wasm_broken,
5985 kIsAsmWasmBroken)
5986
5987 BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled,
5988 kOptimizationDisabled)
5989
5990 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5991 set_compiler_hints(BooleanBit::set(compiler_hints(),
5992 kOptimizationDisabled,
5993 disable));
5994 }
5995
language_mode()5996 LanguageMode SharedFunctionInfo::language_mode() {
5997 STATIC_ASSERT(LANGUAGE_END == 2);
5998 return construct_language_mode(
5999 BooleanBit::get(compiler_hints(), kStrictModeFunction));
6000 }
6001
set_language_mode(LanguageMode language_mode)6002 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
6003 STATIC_ASSERT(LANGUAGE_END == 2);
6004 // We only allow language mode transitions that set the same language mode
6005 // again or go up in the chain:
6006 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
6007 int hints = compiler_hints();
6008 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
6009 set_compiler_hints(hints);
6010 }
6011
kind()6012 FunctionKind SharedFunctionInfo::kind() const {
6013 return FunctionKindBits::decode(compiler_hints());
6014 }
6015
set_kind(FunctionKind kind)6016 void SharedFunctionInfo::set_kind(FunctionKind kind) {
6017 DCHECK(IsValidFunctionKind(kind));
6018 int hints = compiler_hints();
6019 hints = FunctionKindBits::update(hints, kind);
6020 set_compiler_hints(hints);
6021 }
6022
BOOL_ACCESSORS(SharedFunctionInfo,debugger_hints,name_should_print_as_anonymous,kNameShouldPrintAsAnonymous)6023 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints,
6024 name_should_print_as_anonymous, kNameShouldPrintAsAnonymous)
6025 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, is_anonymous_expression,
6026 kIsAnonymousExpression)
6027 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, deserialized, kDeserialized)
6028 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, has_no_side_effect,
6029 kHasNoSideEffect)
6030 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, computed_has_no_side_effect,
6031 kComputedHasNoSideEffect)
6032 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, debug_is_blackboxed,
6033 kDebugIsBlackboxed)
6034 BOOL_ACCESSORS(SharedFunctionInfo, debugger_hints, computed_debug_is_blackboxed,
6035 kComputedDebugIsBlackboxed)
6036
6037 bool Script::HasValidSource() {
6038 Object* src = this->source();
6039 if (!src->IsString()) return true;
6040 String* src_str = String::cast(src);
6041 if (!StringShape(src_str).IsExternal()) return true;
6042 if (src_str->IsOneByteRepresentation()) {
6043 return ExternalOneByteString::cast(src)->resource() != NULL;
6044 } else if (src_str->IsTwoByteRepresentation()) {
6045 return ExternalTwoByteString::cast(src)->resource() != NULL;
6046 }
6047 return true;
6048 }
6049
6050
DontAdaptArguments()6051 void SharedFunctionInfo::DontAdaptArguments() {
6052 DCHECK(code()->kind() == Code::BUILTIN || code()->kind() == Code::STUB);
6053 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
6054 }
6055
6056
start_position()6057 int SharedFunctionInfo::start_position() const {
6058 return start_position_and_type() >> kStartPositionShift;
6059 }
6060
6061
set_start_position(int start_position)6062 void SharedFunctionInfo::set_start_position(int start_position) {
6063 set_start_position_and_type((start_position << kStartPositionShift)
6064 | (start_position_and_type() & ~kStartPositionMask));
6065 }
6066
6067
code()6068 Code* SharedFunctionInfo::code() const {
6069 return Code::cast(READ_FIELD(this, kCodeOffset));
6070 }
6071
6072
set_code(Code * value,WriteBarrierMode mode)6073 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
6074 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
6075 // If the SharedFunctionInfo has bytecode we should never mark it for lazy
6076 // compile, since the bytecode is never flushed.
6077 DCHECK(value != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy) ||
6078 !HasBytecodeArray());
6079 WRITE_FIELD(this, kCodeOffset, value);
6080 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
6081 }
6082
6083
ReplaceCode(Code * value)6084 void SharedFunctionInfo::ReplaceCode(Code* value) {
6085 // If the GC metadata field is already used then the function was
6086 // enqueued as a code flushing candidate and we remove it now.
6087 if (code()->gc_metadata() != NULL) {
6088 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
6089 flusher->EvictCandidate(this);
6090 }
6091
6092 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
6093 #ifdef DEBUG
6094 Code::VerifyRecompiledCode(code(), value);
6095 #endif // DEBUG
6096
6097 set_code(value);
6098 }
6099
IsInterpreted()6100 bool SharedFunctionInfo::IsInterpreted() const {
6101 return code()->is_interpreter_trampoline_builtin();
6102 }
6103
HasBaselineCode()6104 bool SharedFunctionInfo::HasBaselineCode() const {
6105 return code()->kind() == Code::FUNCTION;
6106 }
6107
scope_info()6108 ScopeInfo* SharedFunctionInfo::scope_info() const {
6109 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
6110 }
6111
6112
set_scope_info(ScopeInfo * value,WriteBarrierMode mode)6113 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
6114 WriteBarrierMode mode) {
6115 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
6116 CONDITIONAL_WRITE_BARRIER(GetHeap(),
6117 this,
6118 kScopeInfoOffset,
6119 reinterpret_cast<Object*>(value),
6120 mode);
6121 }
6122
ACCESSORS(SharedFunctionInfo,outer_scope_info,HeapObject,kOuterScopeInfoOffset)6123 ACCESSORS(SharedFunctionInfo, outer_scope_info, HeapObject,
6124 kOuterScopeInfoOffset)
6125
6126 bool SharedFunctionInfo::is_compiled() const {
6127 Builtins* builtins = GetIsolate()->builtins();
6128 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
6129 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
6130 DCHECK(code() != builtins->builtin(Builtins::kCompileBaseline));
6131 return code() != builtins->builtin(Builtins::kCompileLazy);
6132 }
6133
6134
has_simple_parameters()6135 bool SharedFunctionInfo::has_simple_parameters() {
6136 return scope_info()->HasSimpleParameters();
6137 }
6138
HasDebugInfo()6139 bool SharedFunctionInfo::HasDebugInfo() const {
6140 bool has_debug_info = !debug_info()->IsSmi();
6141 DCHECK_EQ(debug_info()->IsStruct(), has_debug_info);
6142 DCHECK(!has_debug_info || HasDebugCode());
6143 return has_debug_info;
6144 }
6145
GetDebugInfo()6146 DebugInfo* SharedFunctionInfo::GetDebugInfo() const {
6147 DCHECK(HasDebugInfo());
6148 return DebugInfo::cast(debug_info());
6149 }
6150
HasDebugCode()6151 bool SharedFunctionInfo::HasDebugCode() const {
6152 if (HasBaselineCode()) return code()->has_debug_break_slots();
6153 return HasBytecodeArray();
6154 }
6155
debugger_hints()6156 int SharedFunctionInfo::debugger_hints() const {
6157 if (HasDebugInfo()) return GetDebugInfo()->debugger_hints();
6158 return Smi::cast(debug_info())->value();
6159 }
6160
set_debugger_hints(int value)6161 void SharedFunctionInfo::set_debugger_hints(int value) {
6162 if (HasDebugInfo()) {
6163 GetDebugInfo()->set_debugger_hints(value);
6164 } else {
6165 set_debug_info(Smi::FromInt(value));
6166 }
6167 }
6168
IsApiFunction()6169 bool SharedFunctionInfo::IsApiFunction() {
6170 return function_data()->IsFunctionTemplateInfo();
6171 }
6172
6173
get_api_func_data()6174 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
6175 DCHECK(IsApiFunction());
6176 return FunctionTemplateInfo::cast(function_data());
6177 }
6178
set_api_func_data(FunctionTemplateInfo * data)6179 void SharedFunctionInfo::set_api_func_data(FunctionTemplateInfo* data) {
6180 DCHECK(function_data()->IsUndefined(GetIsolate()));
6181 set_function_data(data);
6182 }
6183
HasBytecodeArray()6184 bool SharedFunctionInfo::HasBytecodeArray() const {
6185 return function_data()->IsBytecodeArray();
6186 }
6187
bytecode_array()6188 BytecodeArray* SharedFunctionInfo::bytecode_array() const {
6189 DCHECK(HasBytecodeArray());
6190 return BytecodeArray::cast(function_data());
6191 }
6192
set_bytecode_array(BytecodeArray * bytecode)6193 void SharedFunctionInfo::set_bytecode_array(BytecodeArray* bytecode) {
6194 DCHECK(function_data()->IsUndefined(GetIsolate()));
6195 set_function_data(bytecode);
6196 }
6197
ClearBytecodeArray()6198 void SharedFunctionInfo::ClearBytecodeArray() {
6199 DCHECK(function_data()->IsUndefined(GetIsolate()) || HasBytecodeArray());
6200 set_function_data(GetHeap()->undefined_value());
6201 }
6202
HasAsmWasmData()6203 bool SharedFunctionInfo::HasAsmWasmData() const {
6204 return function_data()->IsFixedArray();
6205 }
6206
asm_wasm_data()6207 FixedArray* SharedFunctionInfo::asm_wasm_data() const {
6208 DCHECK(HasAsmWasmData());
6209 return FixedArray::cast(function_data());
6210 }
6211
set_asm_wasm_data(FixedArray * data)6212 void SharedFunctionInfo::set_asm_wasm_data(FixedArray* data) {
6213 DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
6214 set_function_data(data);
6215 }
6216
ClearAsmWasmData()6217 void SharedFunctionInfo::ClearAsmWasmData() {
6218 DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
6219 set_function_data(GetHeap()->undefined_value());
6220 }
6221
HasBuiltinFunctionId()6222 bool SharedFunctionInfo::HasBuiltinFunctionId() {
6223 return function_identifier()->IsSmi();
6224 }
6225
builtin_function_id()6226 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6227 DCHECK(HasBuiltinFunctionId());
6228 return static_cast<BuiltinFunctionId>(
6229 Smi::cast(function_identifier())->value());
6230 }
6231
set_builtin_function_id(BuiltinFunctionId id)6232 void SharedFunctionInfo::set_builtin_function_id(BuiltinFunctionId id) {
6233 set_function_identifier(Smi::FromInt(id));
6234 }
6235
HasInferredName()6236 bool SharedFunctionInfo::HasInferredName() {
6237 return function_identifier()->IsString();
6238 }
6239
inferred_name()6240 String* SharedFunctionInfo::inferred_name() {
6241 if (HasInferredName()) {
6242 return String::cast(function_identifier());
6243 }
6244 Isolate* isolate = GetIsolate();
6245 DCHECK(function_identifier()->IsUndefined(isolate) || HasBuiltinFunctionId());
6246 return isolate->heap()->empty_string();
6247 }
6248
set_inferred_name(String * inferred_name)6249 void SharedFunctionInfo::set_inferred_name(String* inferred_name) {
6250 DCHECK(function_identifier()->IsUndefined(GetIsolate()) || HasInferredName());
6251 set_function_identifier(inferred_name);
6252 }
6253
ic_age()6254 int SharedFunctionInfo::ic_age() {
6255 return ICAgeBits::decode(counters());
6256 }
6257
6258
set_ic_age(int ic_age)6259 void SharedFunctionInfo::set_ic_age(int ic_age) {
6260 set_counters(ICAgeBits::update(counters(), ic_age));
6261 }
6262
6263
deopt_count()6264 int SharedFunctionInfo::deopt_count() {
6265 return DeoptCountBits::decode(counters());
6266 }
6267
6268
set_deopt_count(int deopt_count)6269 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6270 set_counters(DeoptCountBits::update(counters(), deopt_count));
6271 }
6272
6273
increment_deopt_count()6274 void SharedFunctionInfo::increment_deopt_count() {
6275 int value = counters();
6276 int deopt_count = DeoptCountBits::decode(value);
6277 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6278 set_counters(DeoptCountBits::update(value, deopt_count));
6279 }
6280
6281
opt_reenable_tries()6282 int SharedFunctionInfo::opt_reenable_tries() {
6283 return OptReenableTriesBits::decode(counters());
6284 }
6285
6286
set_opt_reenable_tries(int tries)6287 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6288 set_counters(OptReenableTriesBits::update(counters(), tries));
6289 }
6290
6291
opt_count()6292 int SharedFunctionInfo::opt_count() {
6293 return OptCountBits::decode(opt_count_and_bailout_reason());
6294 }
6295
6296
set_opt_count(int opt_count)6297 void SharedFunctionInfo::set_opt_count(int opt_count) {
6298 set_opt_count_and_bailout_reason(
6299 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6300 }
6301
6302
disable_optimization_reason()6303 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6304 return static_cast<BailoutReason>(
6305 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6306 }
6307
6308
has_deoptimization_support()6309 bool SharedFunctionInfo::has_deoptimization_support() {
6310 Code* code = this->code();
6311 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6312 }
6313
6314
TryReenableOptimization()6315 void SharedFunctionInfo::TryReenableOptimization() {
6316 int tries = opt_reenable_tries();
6317 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6318 // We reenable optimization whenever the number of tries is a large
6319 // enough power of 2.
6320 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6321 set_optimization_disabled(false);
6322 set_opt_count(0);
6323 set_deopt_count(0);
6324 }
6325 }
6326
6327
set_disable_optimization_reason(BailoutReason reason)6328 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6329 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6330 opt_count_and_bailout_reason(), reason));
6331 }
6332
IsUserJavaScript()6333 bool SharedFunctionInfo::IsUserJavaScript() {
6334 Object* script_obj = script();
6335 if (script_obj->IsUndefined(GetIsolate())) return false;
6336 Script* script = Script::cast(script_obj);
6337 return static_cast<Script::Type>(script->type()) == Script::TYPE_NORMAL;
6338 }
6339
IsSubjectToDebugging()6340 bool SharedFunctionInfo::IsSubjectToDebugging() {
6341 return IsUserJavaScript() && !HasAsmWasmData();
6342 }
6343
OptimizedCodeMapIsCleared()6344 bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
6345 return optimized_code_map() == GetHeap()->empty_fixed_array();
6346 }
6347
feedback_vector()6348 FeedbackVector* JSFunction::feedback_vector() const {
6349 DCHECK(feedback_vector_cell()->value()->IsFeedbackVector());
6350 return FeedbackVector::cast(feedback_vector_cell()->value());
6351 }
6352
IsOptimized()6353 bool JSFunction::IsOptimized() {
6354 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6355 }
6356
IsInterpreted()6357 bool JSFunction::IsInterpreted() {
6358 return code()->is_interpreter_trampoline_builtin();
6359 }
6360
IsMarkedForBaseline()6361 bool JSFunction::IsMarkedForBaseline() {
6362 return code() ==
6363 GetIsolate()->builtins()->builtin(Builtins::kCompileBaseline);
6364 }
6365
IsMarkedForOptimization()6366 bool JSFunction::IsMarkedForOptimization() {
6367 return code() == GetIsolate()->builtins()->builtin(
6368 Builtins::kCompileOptimized);
6369 }
6370
6371
IsMarkedForConcurrentOptimization()6372 bool JSFunction::IsMarkedForConcurrentOptimization() {
6373 return code() == GetIsolate()->builtins()->builtin(
6374 Builtins::kCompileOptimizedConcurrent);
6375 }
6376
6377
IsInOptimizationQueue()6378 bool JSFunction::IsInOptimizationQueue() {
6379 return code() == GetIsolate()->builtins()->builtin(
6380 Builtins::kInOptimizationQueue);
6381 }
6382
6383
CompleteInobjectSlackTrackingIfActive()6384 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
6385 if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
6386 initial_map()->CompleteInobjectSlackTracking();
6387 }
6388 }
6389
6390
IsInobjectSlackTrackingInProgress()6391 bool Map::IsInobjectSlackTrackingInProgress() {
6392 return construction_counter() != Map::kNoSlackTracking;
6393 }
6394
6395
InobjectSlackTrackingStep()6396 void Map::InobjectSlackTrackingStep() {
6397 if (!IsInobjectSlackTrackingInProgress()) return;
6398 int counter = construction_counter();
6399 set_construction_counter(counter - 1);
6400 if (counter == kSlackTrackingCounterEnd) {
6401 CompleteInobjectSlackTracking();
6402 }
6403 }
6404
abstract_code()6405 AbstractCode* JSFunction::abstract_code() {
6406 if (IsInterpreted()) {
6407 return AbstractCode::cast(shared()->bytecode_array());
6408 } else {
6409 return AbstractCode::cast(code());
6410 }
6411 }
6412
code()6413 Code* JSFunction::code() {
6414 return Code::cast(
6415 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6416 }
6417
6418
set_code(Code * value)6419 void JSFunction::set_code(Code* value) {
6420 DCHECK(!GetHeap()->InNewSpace(value));
6421 Address entry = value->entry();
6422 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6423 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6424 this,
6425 HeapObject::RawField(this, kCodeEntryOffset),
6426 value);
6427 }
6428
6429
set_code_no_write_barrier(Code * value)6430 void JSFunction::set_code_no_write_barrier(Code* value) {
6431 DCHECK(!GetHeap()->InNewSpace(value));
6432 Address entry = value->entry();
6433 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6434 }
6435
6436
ReplaceCode(Code * code)6437 void JSFunction::ReplaceCode(Code* code) {
6438 bool was_optimized = IsOptimized();
6439 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6440
6441 if (was_optimized && is_optimized) {
6442 shared()->EvictFromOptimizedCodeMap(this->code(),
6443 "Replacing with another optimized code");
6444 }
6445
6446 set_code(code);
6447
6448 // Add/remove the function from the list of optimized functions for this
6449 // context based on the state change.
6450 if (!was_optimized && is_optimized) {
6451 context()->native_context()->AddOptimizedFunction(this);
6452 }
6453 if (was_optimized && !is_optimized) {
6454 // TODO(titzer): linear in the number of optimized functions; fix!
6455 context()->native_context()->RemoveOptimizedFunction(this);
6456 }
6457 }
6458
has_feedback_vector()6459 bool JSFunction::has_feedback_vector() const {
6460 return !feedback_vector_cell()->value()->IsUndefined(GetIsolate());
6461 }
6462
GetFeedbackVectorState(Isolate * isolate)6463 JSFunction::FeedbackVectorState JSFunction::GetFeedbackVectorState(
6464 Isolate* isolate) const {
6465 Cell* cell = feedback_vector_cell();
6466 if (cell == isolate->heap()->undefined_cell()) {
6467 return TOP_LEVEL_SCRIPT_NEEDS_VECTOR;
6468 } else if (cell->value() == isolate->heap()->undefined_value() ||
6469 !has_feedback_vector()) {
6470 return NEEDS_VECTOR;
6471 }
6472 return HAS_VECTOR;
6473 }
6474
context()6475 Context* JSFunction::context() {
6476 return Context::cast(READ_FIELD(this, kContextOffset));
6477 }
6478
has_context()6479 bool JSFunction::has_context() const {
6480 return READ_FIELD(this, kContextOffset)->IsContext();
6481 }
6482
global_proxy()6483 JSObject* JSFunction::global_proxy() {
6484 return context()->global_proxy();
6485 }
6486
6487
native_context()6488 Context* JSFunction::native_context() { return context()->native_context(); }
6489
6490
set_context(Object * value)6491 void JSFunction::set_context(Object* value) {
6492 DCHECK(value->IsUndefined(GetIsolate()) || value->IsContext());
6493 WRITE_FIELD(this, kContextOffset, value);
6494 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6495 }
6496
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)6497 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6498 kPrototypeOrInitialMapOffset)
6499
6500
6501 Map* JSFunction::initial_map() {
6502 return Map::cast(prototype_or_initial_map());
6503 }
6504
6505
has_initial_map()6506 bool JSFunction::has_initial_map() {
6507 return prototype_or_initial_map()->IsMap();
6508 }
6509
6510
has_instance_prototype()6511 bool JSFunction::has_instance_prototype() {
6512 return has_initial_map() ||
6513 !prototype_or_initial_map()->IsTheHole(GetIsolate());
6514 }
6515
6516
has_prototype()6517 bool JSFunction::has_prototype() {
6518 return map()->has_non_instance_prototype() || has_instance_prototype();
6519 }
6520
6521
instance_prototype()6522 Object* JSFunction::instance_prototype() {
6523 DCHECK(has_instance_prototype());
6524 if (has_initial_map()) return initial_map()->prototype();
6525 // When there is no initial map and the prototype is a JSObject, the
6526 // initial map field is used for the prototype field.
6527 return prototype_or_initial_map();
6528 }
6529
6530
prototype()6531 Object* JSFunction::prototype() {
6532 DCHECK(has_prototype());
6533 // If the function's prototype property has been set to a non-JSObject
6534 // value, that value is stored in the constructor field of the map.
6535 if (map()->has_non_instance_prototype()) {
6536 Object* prototype = map()->GetConstructor();
6537 // The map must have a prototype in that field, not a back pointer.
6538 DCHECK(!prototype->IsMap());
6539 return prototype;
6540 }
6541 return instance_prototype();
6542 }
6543
6544
is_compiled()6545 bool JSFunction::is_compiled() {
6546 Builtins* builtins = GetIsolate()->builtins();
6547 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6548 code() != builtins->builtin(Builtins::kCompileBaseline) &&
6549 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6550 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6551 }
6552
ACCESSORS(JSProxy,target,JSReceiver,kTargetOffset)6553 ACCESSORS(JSProxy, target, JSReceiver, kTargetOffset)
6554 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6555 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6556
6557 bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
6558
ACCESSORS(JSCollection,table,Object,kTableOffset)6559 ACCESSORS(JSCollection, table, Object, kTableOffset)
6560
6561
6562 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6563 template<class Derived, class TableType> \
6564 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6565 return type::cast(READ_FIELD(this, offset)); \
6566 } \
6567 template<class Derived, class TableType> \
6568 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6569 type* value, WriteBarrierMode mode) { \
6570 WRITE_FIELD(this, offset, value); \
6571 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6572 }
6573
6574 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6575 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6576 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6577
6578 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6579
6580
6581 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6582 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6583
6584
6585 Address Foreign::foreign_address() {
6586 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6587 }
6588
6589
set_foreign_address(Address value)6590 void Foreign::set_foreign_address(Address value) {
6591 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6592 }
6593
6594
ACCESSORS(JSGeneratorObject,function,JSFunction,kFunctionOffset)6595 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6596 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6597 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6598 ACCESSORS(JSGeneratorObject, input_or_debug_pos, Object, kInputOrDebugPosOffset)
6599 SMI_ACCESSORS(JSGeneratorObject, resume_mode, kResumeModeOffset)
6600 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6601 ACCESSORS(JSGeneratorObject, register_file, FixedArray, kRegisterFileOffset)
6602
6603 bool JSGeneratorObject::is_suspended() const {
6604 DCHECK_LT(kGeneratorExecuting, 0);
6605 DCHECK_LT(kGeneratorClosed, 0);
6606 return continuation() >= 0;
6607 }
6608
is_closed()6609 bool JSGeneratorObject::is_closed() const {
6610 return continuation() == kGeneratorClosed;
6611 }
6612
is_executing()6613 bool JSGeneratorObject::is_executing() const {
6614 return continuation() == kGeneratorExecuting;
6615 }
6616
ACCESSORS(JSValue,value,Object,kValueOffset)6617 ACCESSORS(JSValue, value, Object, kValueOffset)
6618
6619
6620 HeapNumber* HeapNumber::cast(Object* object) {
6621 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6622 return reinterpret_cast<HeapNumber*>(object);
6623 }
6624
6625
cast(const Object * object)6626 const HeapNumber* HeapNumber::cast(const Object* object) {
6627 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6628 return reinterpret_cast<const HeapNumber*>(object);
6629 }
6630
6631
ACCESSORS(JSDate,value,Object,kValueOffset)6632 ACCESSORS(JSDate, value, Object, kValueOffset)
6633 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6634 ACCESSORS(JSDate, year, Object, kYearOffset)
6635 ACCESSORS(JSDate, month, Object, kMonthOffset)
6636 ACCESSORS(JSDate, day, Object, kDayOffset)
6637 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6638 ACCESSORS(JSDate, hour, Object, kHourOffset)
6639 ACCESSORS(JSDate, min, Object, kMinOffset)
6640 ACCESSORS(JSDate, sec, Object, kSecOffset)
6641
6642
6643 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6644 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6645 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6646 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6647 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6648 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6649 SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
6650
6651 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6652 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6653 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6654 #define CODE_ACCESSORS(name, type, offset) \
6655 ACCESSORS_CHECKED2(Code, name, type, offset, true, \
6656 !GetHeap()->InNewSpace(value))
6657 CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
6658 CODE_ACCESSORS(handler_table, FixedArray, kHandlerTableOffset)
6659 CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6660 CODE_ACCESSORS(source_position_table, ByteArray, kSourcePositionTableOffset)
6661 CODE_ACCESSORS(raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6662 CODE_ACCESSORS(next_code_link, Object, kNextCodeLinkOffset)
6663 #undef CODE_ACCESSORS
6664
6665 void Code::WipeOutHeader() {
6666 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6667 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6668 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6669 WRITE_FIELD(this, kSourcePositionTableOffset, NULL);
6670 // Do not wipe out major/minor keys on a code stub or IC
6671 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6672 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6673 }
6674 WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
6675 WRITE_FIELD(this, kGCMetadataOffset, NULL);
6676 }
6677
6678
type_feedback_info()6679 Object* Code::type_feedback_info() {
6680 DCHECK(kind() == FUNCTION);
6681 return raw_type_feedback_info();
6682 }
6683
6684
set_type_feedback_info(Object * value,WriteBarrierMode mode)6685 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6686 DCHECK(kind() == FUNCTION);
6687 set_raw_type_feedback_info(value, mode);
6688 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6689 value, mode);
6690 }
6691
6692
stub_key()6693 uint32_t Code::stub_key() {
6694 DCHECK(IsCodeStubOrIC());
6695 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6696 return static_cast<uint32_t>(smi_key->value());
6697 }
6698
6699
set_stub_key(uint32_t key)6700 void Code::set_stub_key(uint32_t key) {
6701 DCHECK(IsCodeStubOrIC());
6702 set_raw_type_feedback_info(Smi::FromInt(key));
6703 }
6704
6705
ACCESSORS(Code,gc_metadata,Object,kGCMetadataOffset)6706 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6707 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6708
6709
6710 byte* Code::instruction_start() {
6711 return FIELD_ADDR(this, kHeaderSize);
6712 }
6713
6714
instruction_end()6715 byte* Code::instruction_end() {
6716 return instruction_start() + instruction_size();
6717 }
6718
GetUnwindingInfoSizeOffset()6719 int Code::GetUnwindingInfoSizeOffset() const {
6720 DCHECK(has_unwinding_info());
6721 return RoundUp(kHeaderSize + instruction_size(), kInt64Size);
6722 }
6723
unwinding_info_size()6724 int Code::unwinding_info_size() const {
6725 DCHECK(has_unwinding_info());
6726 return static_cast<int>(
6727 READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
6728 }
6729
set_unwinding_info_size(int value)6730 void Code::set_unwinding_info_size(int value) {
6731 DCHECK(has_unwinding_info());
6732 WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
6733 }
6734
unwinding_info_start()6735 byte* Code::unwinding_info_start() {
6736 DCHECK(has_unwinding_info());
6737 return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
6738 }
6739
unwinding_info_end()6740 byte* Code::unwinding_info_end() {
6741 DCHECK(has_unwinding_info());
6742 return unwinding_info_start() + unwinding_info_size();
6743 }
6744
body_size()6745 int Code::body_size() {
6746 int unpadded_body_size =
6747 has_unwinding_info()
6748 ? static_cast<int>(unwinding_info_end() - instruction_start())
6749 : instruction_size();
6750 return RoundUp(unpadded_body_size, kObjectAlignment);
6751 }
6752
SizeIncludingMetadata()6753 int Code::SizeIncludingMetadata() {
6754 int size = CodeSize();
6755 size += relocation_info()->Size();
6756 size += deoptimization_data()->Size();
6757 size += handler_table()->Size();
6758 if (kind() == FUNCTION) size += source_position_table()->Size();
6759 return size;
6760 }
6761
unchecked_relocation_info()6762 ByteArray* Code::unchecked_relocation_info() {
6763 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6764 }
6765
6766
relocation_start()6767 byte* Code::relocation_start() {
6768 return unchecked_relocation_info()->GetDataStartAddress();
6769 }
6770
6771
relocation_size()6772 int Code::relocation_size() {
6773 return unchecked_relocation_info()->length();
6774 }
6775
6776
entry()6777 byte* Code::entry() {
6778 return instruction_start();
6779 }
6780
6781
contains(byte * inner_pointer)6782 bool Code::contains(byte* inner_pointer) {
6783 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6784 }
6785
6786
ExecutableSize()6787 int Code::ExecutableSize() {
6788 // Check that the assumptions about the layout of the code object holds.
6789 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6790 Code::kHeaderSize);
6791 return instruction_size() + Code::kHeaderSize;
6792 }
6793
6794
CodeSize()6795 int Code::CodeSize() { return SizeFor(body_size()); }
6796
6797
ACCESSORS(JSArray,length,Object,kLengthOffset)6798 ACCESSORS(JSArray, length, Object, kLengthOffset)
6799
6800
6801 void* JSArrayBuffer::backing_store() const {
6802 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6803 return reinterpret_cast<void*>(ptr);
6804 }
6805
6806
set_backing_store(void * value,WriteBarrierMode mode)6807 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6808 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6809 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6810 }
6811
6812
ACCESSORS(JSArrayBuffer,byte_length,Object,kByteLengthOffset)6813 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6814
6815
6816 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6817 if (kInt32Size != kPointerSize) {
6818 #if V8_TARGET_LITTLE_ENDIAN
6819 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6820 #else
6821 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6822 #endif
6823 }
6824 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6825 }
6826
6827
bit_field()6828 uint32_t JSArrayBuffer::bit_field() const {
6829 return READ_UINT32_FIELD(this, kBitFieldOffset);
6830 }
6831
6832
is_external()6833 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6834
6835
set_is_external(bool value)6836 void JSArrayBuffer::set_is_external(bool value) {
6837 DCHECK(!value || !has_guard_region());
6838 set_bit_field(IsExternal::update(bit_field(), value));
6839 }
6840
6841
is_neuterable()6842 bool JSArrayBuffer::is_neuterable() {
6843 return IsNeuterable::decode(bit_field());
6844 }
6845
6846
set_is_neuterable(bool value)6847 void JSArrayBuffer::set_is_neuterable(bool value) {
6848 set_bit_field(IsNeuterable::update(bit_field(), value));
6849 }
6850
6851
was_neutered()6852 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6853
6854
set_was_neutered(bool value)6855 void JSArrayBuffer::set_was_neutered(bool value) {
6856 set_bit_field(WasNeutered::update(bit_field(), value));
6857 }
6858
6859
is_shared()6860 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6861
6862
set_is_shared(bool value)6863 void JSArrayBuffer::set_is_shared(bool value) {
6864 set_bit_field(IsShared::update(bit_field(), value));
6865 }
6866
has_guard_region()6867 bool JSArrayBuffer::has_guard_region() {
6868 return HasGuardRegion::decode(bit_field());
6869 }
6870
set_has_guard_region(bool value)6871 void JSArrayBuffer::set_has_guard_region(bool value) {
6872 set_bit_field(HasGuardRegion::update(bit_field(), value));
6873 }
6874
byte_offset()6875 Object* JSArrayBufferView::byte_offset() const {
6876 if (WasNeutered()) return Smi::kZero;
6877 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6878 }
6879
6880
set_byte_offset(Object * value,WriteBarrierMode mode)6881 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6882 WRITE_FIELD(this, kByteOffsetOffset, value);
6883 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6884 }
6885
6886
byte_length()6887 Object* JSArrayBufferView::byte_length() const {
6888 if (WasNeutered()) return Smi::kZero;
6889 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6890 }
6891
6892
set_byte_length(Object * value,WriteBarrierMode mode)6893 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6894 WRITE_FIELD(this, kByteLengthOffset, value);
6895 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6896 }
6897
6898
ACCESSORS(JSArrayBufferView,buffer,Object,kBufferOffset)6899 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6900 #ifdef VERIFY_HEAP
6901 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6902 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6903 #endif
6904
6905
6906 bool JSArrayBufferView::WasNeutered() const {
6907 return JSArrayBuffer::cast(buffer())->was_neutered();
6908 }
6909
6910
length()6911 Object* JSTypedArray::length() const {
6912 if (WasNeutered()) return Smi::kZero;
6913 return Object::cast(READ_FIELD(this, kLengthOffset));
6914 }
6915
6916
length_value()6917 uint32_t JSTypedArray::length_value() const {
6918 if (WasNeutered()) return 0;
6919 uint32_t index = 0;
6920 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6921 return index;
6922 }
6923
6924
set_length(Object * value,WriteBarrierMode mode)6925 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6926 WRITE_FIELD(this, kLengthOffset, value);
6927 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6928 }
6929
6930 // static
Validate(Isolate * isolate,Handle<Object> receiver,const char * method_name)6931 MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
6932 Handle<Object> receiver,
6933 const char* method_name) {
6934 if (V8_UNLIKELY(!receiver->IsJSTypedArray())) {
6935 const MessageTemplate::Template message = MessageTemplate::kNotTypedArray;
6936 THROW_NEW_ERROR(isolate, NewTypeError(message), JSTypedArray);
6937 }
6938
6939 // TODO(caitp): throw if array.[[ViewedArrayBuffer]] is neutered (per v8:4648)
6940 return Handle<JSTypedArray>::cast(receiver);
6941 }
6942
6943 #ifdef VERIFY_HEAP
ACCESSORS(JSTypedArray,raw_length,Object,kLengthOffset)6944 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6945 #endif
6946
6947 ACCESSORS(JSPromiseCapability, promise, Object, kPromiseOffset)
6948 ACCESSORS(JSPromiseCapability, resolve, Object, kResolveOffset)
6949 ACCESSORS(JSPromiseCapability, reject, Object, kRejectOffset)
6950
6951 SMI_ACCESSORS(JSPromise, status, kStatusOffset)
6952 ACCESSORS(JSPromise, result, Object, kResultOffset)
6953 ACCESSORS(JSPromise, deferred_promise, Object, kDeferredPromiseOffset)
6954 ACCESSORS(JSPromise, deferred_on_resolve, Object, kDeferredOnResolveOffset)
6955 ACCESSORS(JSPromise, deferred_on_reject, Object, kDeferredOnRejectOffset)
6956 ACCESSORS(JSPromise, fulfill_reactions, Object, kFulfillReactionsOffset)
6957 ACCESSORS(JSPromise, reject_reactions, Object, kRejectReactionsOffset)
6958 SMI_ACCESSORS(JSPromise, flags, kFlagsOffset)
6959 BOOL_ACCESSORS(JSPromise, flags, has_handler, kHasHandlerBit)
6960 BOOL_ACCESSORS(JSPromise, flags, handled_hint, kHandledHintBit)
6961
6962 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6963 ACCESSORS(JSRegExp, flags, Object, kFlagsOffset)
6964 ACCESSORS(JSRegExp, source, Object, kSourceOffset)
6965
6966
6967 JSRegExp::Type JSRegExp::TypeTag() {
6968 Object* data = this->data();
6969 if (data->IsUndefined(GetIsolate())) return JSRegExp::NOT_COMPILED;
6970 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6971 return static_cast<JSRegExp::Type>(smi->value());
6972 }
6973
6974
CaptureCount()6975 int JSRegExp::CaptureCount() {
6976 switch (TypeTag()) {
6977 case ATOM:
6978 return 0;
6979 case IRREGEXP:
6980 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6981 default:
6982 UNREACHABLE();
6983 return -1;
6984 }
6985 }
6986
6987
GetFlags()6988 JSRegExp::Flags JSRegExp::GetFlags() {
6989 DCHECK(this->data()->IsFixedArray());
6990 Object* data = this->data();
6991 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6992 return Flags(smi->value());
6993 }
6994
6995
Pattern()6996 String* JSRegExp::Pattern() {
6997 DCHECK(this->data()->IsFixedArray());
6998 Object* data = this->data();
6999 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
7000 return pattern;
7001 }
7002
7003
DataAt(int index)7004 Object* JSRegExp::DataAt(int index) {
7005 DCHECK(TypeTag() != NOT_COMPILED);
7006 return FixedArray::cast(data())->get(index);
7007 }
7008
7009
SetDataAt(int index,Object * value)7010 void JSRegExp::SetDataAt(int index, Object* value) {
7011 DCHECK(TypeTag() != NOT_COMPILED);
7012 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
7013 FixedArray::cast(data())->set(index, value);
7014 }
7015
SetLastIndex(int index)7016 void JSRegExp::SetLastIndex(int index) {
7017 static const int offset =
7018 kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
7019 Smi* value = Smi::FromInt(index);
7020 WRITE_FIELD(this, offset, value);
7021 }
7022
LastIndex()7023 Object* JSRegExp::LastIndex() {
7024 static const int offset =
7025 kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
7026 return READ_FIELD(this, offset);
7027 }
7028
GetElementsKind()7029 ElementsKind JSObject::GetElementsKind() {
7030 ElementsKind kind = map()->elements_kind();
7031 #if VERIFY_HEAP && DEBUG
7032 FixedArrayBase* fixed_array =
7033 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
7034
7035 // If a GC was caused while constructing this object, the elements
7036 // pointer may point to a one pointer filler map.
7037 if (ElementsAreSafeToExamine()) {
7038 Map* map = fixed_array->map();
7039 if (IsFastSmiOrObjectElementsKind(kind)) {
7040 DCHECK(map == GetHeap()->fixed_array_map() ||
7041 map == GetHeap()->fixed_cow_array_map());
7042 } else if (IsFastDoubleElementsKind(kind)) {
7043 DCHECK(fixed_array->IsFixedDoubleArray() ||
7044 fixed_array == GetHeap()->empty_fixed_array());
7045 } else if (kind == DICTIONARY_ELEMENTS) {
7046 DCHECK(fixed_array->IsFixedArray());
7047 DCHECK(fixed_array->IsDictionary());
7048 } else {
7049 DCHECK(kind > DICTIONARY_ELEMENTS);
7050 }
7051 DCHECK(!IsSloppyArgumentsElements(kind) ||
7052 (elements()->IsFixedArray() && elements()->length() >= 2));
7053 }
7054 #endif
7055 return kind;
7056 }
7057
7058
HasFastObjectElements()7059 bool JSObject::HasFastObjectElements() {
7060 return IsFastObjectElementsKind(GetElementsKind());
7061 }
7062
7063
HasFastSmiElements()7064 bool JSObject::HasFastSmiElements() {
7065 return IsFastSmiElementsKind(GetElementsKind());
7066 }
7067
7068
HasFastSmiOrObjectElements()7069 bool JSObject::HasFastSmiOrObjectElements() {
7070 return IsFastSmiOrObjectElementsKind(GetElementsKind());
7071 }
7072
7073
HasFastDoubleElements()7074 bool JSObject::HasFastDoubleElements() {
7075 return IsFastDoubleElementsKind(GetElementsKind());
7076 }
7077
7078
HasFastHoleyElements()7079 bool JSObject::HasFastHoleyElements() {
7080 return IsFastHoleyElementsKind(GetElementsKind());
7081 }
7082
7083
HasFastElements()7084 bool JSObject::HasFastElements() {
7085 return IsFastElementsKind(GetElementsKind());
7086 }
7087
7088
HasDictionaryElements()7089 bool JSObject::HasDictionaryElements() {
7090 return GetElementsKind() == DICTIONARY_ELEMENTS;
7091 }
7092
7093
HasFastArgumentsElements()7094 bool JSObject::HasFastArgumentsElements() {
7095 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
7096 }
7097
7098
HasSlowArgumentsElements()7099 bool JSObject::HasSlowArgumentsElements() {
7100 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
7101 }
7102
7103
HasSloppyArgumentsElements()7104 bool JSObject::HasSloppyArgumentsElements() {
7105 return IsSloppyArgumentsElements(GetElementsKind());
7106 }
7107
HasStringWrapperElements()7108 bool JSObject::HasStringWrapperElements() {
7109 return IsStringWrapperElementsKind(GetElementsKind());
7110 }
7111
HasFastStringWrapperElements()7112 bool JSObject::HasFastStringWrapperElements() {
7113 return GetElementsKind() == FAST_STRING_WRAPPER_ELEMENTS;
7114 }
7115
HasSlowStringWrapperElements()7116 bool JSObject::HasSlowStringWrapperElements() {
7117 return GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS;
7118 }
7119
HasFixedTypedArrayElements()7120 bool JSObject::HasFixedTypedArrayElements() {
7121 DCHECK_NOT_NULL(elements());
7122 return map()->has_fixed_typed_array_elements();
7123 }
7124
7125 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
7126 bool JSObject::HasFixed##Type##Elements() { \
7127 HeapObject* array = elements(); \
7128 DCHECK(array != NULL); \
7129 if (!array->IsHeapObject()) return false; \
7130 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
7131 }
7132
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)7133 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
7134
7135 #undef FIXED_TYPED_ELEMENTS_CHECK
7136
7137
7138 bool JSObject::HasNamedInterceptor() {
7139 return map()->has_named_interceptor();
7140 }
7141
7142
HasIndexedInterceptor()7143 bool JSObject::HasIndexedInterceptor() {
7144 return map()->has_indexed_interceptor();
7145 }
7146
7147
global_dictionary()7148 GlobalDictionary* JSObject::global_dictionary() {
7149 DCHECK(!HasFastProperties());
7150 DCHECK(IsJSGlobalObject());
7151 return GlobalDictionary::cast(properties());
7152 }
7153
7154
element_dictionary()7155 SeededNumberDictionary* JSObject::element_dictionary() {
7156 DCHECK(HasDictionaryElements() || HasSlowStringWrapperElements());
7157 return SeededNumberDictionary::cast(elements());
7158 }
7159
7160
IsHashFieldComputed(uint32_t field)7161 bool Name::IsHashFieldComputed(uint32_t field) {
7162 return (field & kHashNotComputedMask) == 0;
7163 }
7164
7165
HasHashCode()7166 bool Name::HasHashCode() {
7167 return IsHashFieldComputed(hash_field());
7168 }
7169
7170
Hash()7171 uint32_t Name::Hash() {
7172 // Fast case: has hash code already been computed?
7173 uint32_t field = hash_field();
7174 if (IsHashFieldComputed(field)) return field >> kHashShift;
7175 // Slow case: compute hash code and set it. Has to be a string.
7176 return String::cast(this)->ComputeAndSetHash();
7177 }
7178
7179
IsPrivate()7180 bool Name::IsPrivate() {
7181 return this->IsSymbol() && Symbol::cast(this)->is_private();
7182 }
7183
7184
StringHasher(int length,uint32_t seed)7185 StringHasher::StringHasher(int length, uint32_t seed)
7186 : length_(length),
7187 raw_running_hash_(seed),
7188 array_index_(0),
7189 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
7190 is_first_char_(true) {
7191 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
7192 }
7193
7194
has_trivial_hash()7195 bool StringHasher::has_trivial_hash() {
7196 return length_ > String::kMaxHashCalcLength;
7197 }
7198
7199
AddCharacterCore(uint32_t running_hash,uint16_t c)7200 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
7201 running_hash += c;
7202 running_hash += (running_hash << 10);
7203 running_hash ^= (running_hash >> 6);
7204 return running_hash;
7205 }
7206
7207
GetHashCore(uint32_t running_hash)7208 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
7209 running_hash += (running_hash << 3);
7210 running_hash ^= (running_hash >> 11);
7211 running_hash += (running_hash << 15);
7212 if ((running_hash & String::kHashBitMask) == 0) {
7213 return kZeroHash;
7214 }
7215 return running_hash;
7216 }
7217
7218
ComputeRunningHash(uint32_t running_hash,const uc16 * chars,int length)7219 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
7220 const uc16* chars, int length) {
7221 DCHECK_NOT_NULL(chars);
7222 DCHECK(length >= 0);
7223 for (int i = 0; i < length; ++i) {
7224 running_hash = AddCharacterCore(running_hash, *chars++);
7225 }
7226 return running_hash;
7227 }
7228
7229
ComputeRunningHashOneByte(uint32_t running_hash,const char * chars,int length)7230 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
7231 const char* chars,
7232 int length) {
7233 DCHECK_NOT_NULL(chars);
7234 DCHECK(length >= 0);
7235 for (int i = 0; i < length; ++i) {
7236 uint16_t c = static_cast<uint16_t>(*chars++);
7237 running_hash = AddCharacterCore(running_hash, c);
7238 }
7239 return running_hash;
7240 }
7241
7242
AddCharacter(uint16_t c)7243 void StringHasher::AddCharacter(uint16_t c) {
7244 // Use the Jenkins one-at-a-time hash function to update the hash
7245 // for the given character.
7246 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
7247 }
7248
7249
UpdateIndex(uint16_t c)7250 bool StringHasher::UpdateIndex(uint16_t c) {
7251 DCHECK(is_array_index_);
7252 if (c < '0' || c > '9') {
7253 is_array_index_ = false;
7254 return false;
7255 }
7256 int d = c - '0';
7257 if (is_first_char_) {
7258 is_first_char_ = false;
7259 if (c == '0' && length_ > 1) {
7260 is_array_index_ = false;
7261 return false;
7262 }
7263 }
7264 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
7265 is_array_index_ = false;
7266 return false;
7267 }
7268 array_index_ = array_index_ * 10 + d;
7269 return true;
7270 }
7271
7272
7273 template<typename Char>
AddCharacters(const Char * chars,int length)7274 inline void StringHasher::AddCharacters(const Char* chars, int length) {
7275 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
7276 int i = 0;
7277 if (is_array_index_) {
7278 for (; i < length; i++) {
7279 AddCharacter(chars[i]);
7280 if (!UpdateIndex(chars[i])) {
7281 i++;
7282 break;
7283 }
7284 }
7285 }
7286 for (; i < length; i++) {
7287 DCHECK(!is_array_index_);
7288 AddCharacter(chars[i]);
7289 }
7290 }
7291
7292
7293 template <typename schar>
HashSequentialString(const schar * chars,int length,uint32_t seed)7294 uint32_t StringHasher::HashSequentialString(const schar* chars,
7295 int length,
7296 uint32_t seed) {
7297 StringHasher hasher(length, seed);
7298 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
7299 return hasher.GetHashField();
7300 }
7301
7302
IteratingStringHasher(int len,uint32_t seed)7303 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
7304 : StringHasher(len, seed) {}
7305
7306
Hash(String * string,uint32_t seed)7307 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
7308 IteratingStringHasher hasher(string->length(), seed);
7309 // Nothing to do.
7310 if (hasher.has_trivial_hash()) return hasher.GetHashField();
7311 ConsString* cons_string = String::VisitFlat(&hasher, string);
7312 if (cons_string == nullptr) return hasher.GetHashField();
7313 hasher.VisitConsString(cons_string);
7314 return hasher.GetHashField();
7315 }
7316
7317
VisitOneByteString(const uint8_t * chars,int length)7318 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
7319 int length) {
7320 AddCharacters(chars, length);
7321 }
7322
7323
VisitTwoByteString(const uint16_t * chars,int length)7324 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
7325 int length) {
7326 AddCharacters(chars, length);
7327 }
7328
7329
AsArrayIndex(uint32_t * index)7330 bool Name::AsArrayIndex(uint32_t* index) {
7331 return IsString() && String::cast(this)->AsArrayIndex(index);
7332 }
7333
7334
AsArrayIndex(uint32_t * index)7335 bool String::AsArrayIndex(uint32_t* index) {
7336 uint32_t field = hash_field();
7337 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
7338 return false;
7339 }
7340 return SlowAsArrayIndex(index);
7341 }
7342
7343
SetForwardedInternalizedString(String * canonical)7344 void String::SetForwardedInternalizedString(String* canonical) {
7345 DCHECK(IsInternalizedString());
7346 DCHECK(HasHashCode());
7347 if (canonical == this) return; // No need to forward.
7348 DCHECK(SlowEquals(canonical));
7349 DCHECK(canonical->IsInternalizedString());
7350 DCHECK(canonical->HasHashCode());
7351 WRITE_FIELD(this, kHashFieldSlot, canonical);
7352 // Setting the hash field to a tagged value sets the LSB, causing the hash
7353 // code to be interpreted as uninitialized. We use this fact to recognize
7354 // that we have a forwarded string.
7355 DCHECK(!HasHashCode());
7356 }
7357
7358
GetForwardedInternalizedString()7359 String* String::GetForwardedInternalizedString() {
7360 DCHECK(IsInternalizedString());
7361 if (HasHashCode()) return this;
7362 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7363 DCHECK(canonical->IsInternalizedString());
7364 DCHECK(SlowEquals(canonical));
7365 DCHECK(canonical->HasHashCode());
7366 return canonical;
7367 }
7368
7369
7370 // static
GreaterThan(Handle<Object> x,Handle<Object> y)7371 Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y) {
7372 Maybe<ComparisonResult> result = Compare(x, y);
7373 if (result.IsJust()) {
7374 switch (result.FromJust()) {
7375 case ComparisonResult::kGreaterThan:
7376 return Just(true);
7377 case ComparisonResult::kLessThan:
7378 case ComparisonResult::kEqual:
7379 case ComparisonResult::kUndefined:
7380 return Just(false);
7381 }
7382 }
7383 return Nothing<bool>();
7384 }
7385
7386
7387 // static
GreaterThanOrEqual(Handle<Object> x,Handle<Object> y)7388 Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y) {
7389 Maybe<ComparisonResult> result = Compare(x, y);
7390 if (result.IsJust()) {
7391 switch (result.FromJust()) {
7392 case ComparisonResult::kEqual:
7393 case ComparisonResult::kGreaterThan:
7394 return Just(true);
7395 case ComparisonResult::kLessThan:
7396 case ComparisonResult::kUndefined:
7397 return Just(false);
7398 }
7399 }
7400 return Nothing<bool>();
7401 }
7402
7403
7404 // static
LessThan(Handle<Object> x,Handle<Object> y)7405 Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y) {
7406 Maybe<ComparisonResult> result = Compare(x, y);
7407 if (result.IsJust()) {
7408 switch (result.FromJust()) {
7409 case ComparisonResult::kLessThan:
7410 return Just(true);
7411 case ComparisonResult::kEqual:
7412 case ComparisonResult::kGreaterThan:
7413 case ComparisonResult::kUndefined:
7414 return Just(false);
7415 }
7416 }
7417 return Nothing<bool>();
7418 }
7419
7420
7421 // static
LessThanOrEqual(Handle<Object> x,Handle<Object> y)7422 Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y) {
7423 Maybe<ComparisonResult> result = Compare(x, y);
7424 if (result.IsJust()) {
7425 switch (result.FromJust()) {
7426 case ComparisonResult::kEqual:
7427 case ComparisonResult::kLessThan:
7428 return Just(true);
7429 case ComparisonResult::kGreaterThan:
7430 case ComparisonResult::kUndefined:
7431 return Just(false);
7432 }
7433 }
7434 return Nothing<bool>();
7435 }
7436
GetPropertyOrElement(Handle<Object> object,Handle<Name> name)7437 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7438 Handle<Name> name) {
7439 LookupIterator it =
7440 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7441 return GetProperty(&it);
7442 }
7443
SetPropertyOrElement(Handle<Object> object,Handle<Name> name,Handle<Object> value,LanguageMode language_mode,StoreFromKeyed store_mode)7444 MaybeHandle<Object> Object::SetPropertyOrElement(Handle<Object> object,
7445 Handle<Name> name,
7446 Handle<Object> value,
7447 LanguageMode language_mode,
7448 StoreFromKeyed store_mode) {
7449 LookupIterator it =
7450 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7451 MAYBE_RETURN_NULL(SetProperty(&it, value, language_mode, store_mode));
7452 return value;
7453 }
7454
GetPropertyOrElement(Handle<Object> receiver,Handle<Name> name,Handle<JSReceiver> holder)7455 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
7456 Handle<Name> name,
7457 Handle<JSReceiver> holder) {
7458 LookupIterator it = LookupIterator::PropertyOrElement(
7459 name->GetIsolate(), receiver, name, holder);
7460 return GetProperty(&it);
7461 }
7462
7463
initialize_properties()7464 void JSReceiver::initialize_properties() {
7465 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
7466 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_properties_dictionary()));
7467 if (map()->is_dictionary_map()) {
7468 WRITE_FIELD(this, kPropertiesOffset,
7469 GetHeap()->empty_properties_dictionary());
7470 } else {
7471 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
7472 }
7473 }
7474
7475
HasFastProperties()7476 bool JSReceiver::HasFastProperties() {
7477 DCHECK_EQ(properties()->IsDictionary(), map()->is_dictionary_map());
7478 return !properties()->IsDictionary();
7479 }
7480
7481
property_dictionary()7482 NameDictionary* JSReceiver::property_dictionary() {
7483 DCHECK(!HasFastProperties());
7484 DCHECK(!IsJSGlobalObject());
7485 return NameDictionary::cast(properties());
7486 }
7487
HasProperty(Handle<JSReceiver> object,Handle<Name> name)7488 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7489 Handle<Name> name) {
7490 LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(),
7491 object, name, object);
7492 return HasProperty(&it);
7493 }
7494
7495
HasOwnProperty(Handle<JSReceiver> object,Handle<Name> name)7496 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7497 Handle<Name> name) {
7498 if (object->IsJSObject()) { // Shortcut
7499 LookupIterator it = LookupIterator::PropertyOrElement(
7500 object->GetIsolate(), object, name, object, LookupIterator::OWN);
7501 return HasProperty(&it);
7502 }
7503
7504 Maybe<PropertyAttributes> attributes =
7505 JSReceiver::GetOwnPropertyAttributes(object, name);
7506 MAYBE_RETURN(attributes, Nothing<bool>());
7507 return Just(attributes.FromJust() != ABSENT);
7508 }
7509
HasOwnProperty(Handle<JSReceiver> object,uint32_t index)7510 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7511 uint32_t index) {
7512 if (object->IsJSObject()) { // Shortcut
7513 LookupIterator it(object->GetIsolate(), object, index, object,
7514 LookupIterator::OWN);
7515 return HasProperty(&it);
7516 }
7517
7518 Maybe<PropertyAttributes> attributes =
7519 JSReceiver::GetOwnPropertyAttributes(object, index);
7520 MAYBE_RETURN(attributes, Nothing<bool>());
7521 return Just(attributes.FromJust() != ABSENT);
7522 }
7523
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7524 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7525 Handle<JSReceiver> object, Handle<Name> name) {
7526 LookupIterator it = LookupIterator::PropertyOrElement(name->GetIsolate(),
7527 object, name, object);
7528 return GetPropertyAttributes(&it);
7529 }
7530
7531
GetOwnPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7532 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7533 Handle<JSReceiver> object, Handle<Name> name) {
7534 LookupIterator it = LookupIterator::PropertyOrElement(
7535 name->GetIsolate(), object, name, object, LookupIterator::OWN);
7536 return GetPropertyAttributes(&it);
7537 }
7538
GetOwnPropertyAttributes(Handle<JSReceiver> object,uint32_t index)7539 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7540 Handle<JSReceiver> object, uint32_t index) {
7541 LookupIterator it(object->GetIsolate(), object, index, object,
7542 LookupIterator::OWN);
7543 return GetPropertyAttributes(&it);
7544 }
7545
HasElement(Handle<JSReceiver> object,uint32_t index)7546 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7547 LookupIterator it(object->GetIsolate(), object, index, object);
7548 return HasProperty(&it);
7549 }
7550
7551
GetElementAttributes(Handle<JSReceiver> object,uint32_t index)7552 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7553 Handle<JSReceiver> object, uint32_t index) {
7554 Isolate* isolate = object->GetIsolate();
7555 LookupIterator it(isolate, object, index, object);
7556 return GetPropertyAttributes(&it);
7557 }
7558
7559
GetOwnElementAttributes(Handle<JSReceiver> object,uint32_t index)7560 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7561 Handle<JSReceiver> object, uint32_t index) {
7562 Isolate* isolate = object->GetIsolate();
7563 LookupIterator it(isolate, object, index, object, LookupIterator::OWN);
7564 return GetPropertyAttributes(&it);
7565 }
7566
7567
IsDetached()7568 bool JSGlobalObject::IsDetached() {
7569 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7570 }
7571
7572
IsDetachedFrom(JSGlobalObject * global)7573 bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
7574 const PrototypeIterator iter(this->GetIsolate(),
7575 const_cast<JSGlobalProxy*>(this));
7576 return iter.GetCurrent() != global;
7577 }
7578
SizeWithInternalFields(int internal_field_count)7579 inline int JSGlobalProxy::SizeWithInternalFields(int internal_field_count) {
7580 DCHECK_GE(internal_field_count, 0);
7581 return kSize + internal_field_count * kPointerSize;
7582 }
7583
GetOrCreateIdentityHash(Isolate * isolate,Handle<JSReceiver> object)7584 Smi* JSReceiver::GetOrCreateIdentityHash(Isolate* isolate,
7585 Handle<JSReceiver> object) {
7586 return object->IsJSProxy() ? JSProxy::GetOrCreateIdentityHash(
7587 isolate, Handle<JSProxy>::cast(object))
7588 : JSObject::GetOrCreateIdentityHash(
7589 isolate, Handle<JSObject>::cast(object));
7590 }
7591
GetIdentityHash(Isolate * isolate,Handle<JSReceiver> receiver)7592 Object* JSReceiver::GetIdentityHash(Isolate* isolate,
7593 Handle<JSReceiver> receiver) {
7594 return receiver->IsJSProxy()
7595 ? JSProxy::GetIdentityHash(Handle<JSProxy>::cast(receiver))
7596 : JSObject::GetIdentityHash(isolate,
7597 Handle<JSObject>::cast(receiver));
7598 }
7599
7600
all_can_read()7601 bool AccessorInfo::all_can_read() {
7602 return BooleanBit::get(flag(), kAllCanReadBit);
7603 }
7604
7605
set_all_can_read(bool value)7606 void AccessorInfo::set_all_can_read(bool value) {
7607 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7608 }
7609
7610
all_can_write()7611 bool AccessorInfo::all_can_write() {
7612 return BooleanBit::get(flag(), kAllCanWriteBit);
7613 }
7614
7615
set_all_can_write(bool value)7616 void AccessorInfo::set_all_can_write(bool value) {
7617 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7618 }
7619
7620
is_special_data_property()7621 bool AccessorInfo::is_special_data_property() {
7622 return BooleanBit::get(flag(), kSpecialDataProperty);
7623 }
7624
7625
set_is_special_data_property(bool value)7626 void AccessorInfo::set_is_special_data_property(bool value) {
7627 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7628 }
7629
replace_on_access()7630 bool AccessorInfo::replace_on_access() {
7631 return BooleanBit::get(flag(), kReplaceOnAccess);
7632 }
7633
set_replace_on_access(bool value)7634 void AccessorInfo::set_replace_on_access(bool value) {
7635 set_flag(BooleanBit::set(flag(), kReplaceOnAccess, value));
7636 }
7637
is_sloppy()7638 bool AccessorInfo::is_sloppy() { return BooleanBit::get(flag(), kIsSloppy); }
7639
set_is_sloppy(bool value)7640 void AccessorInfo::set_is_sloppy(bool value) {
7641 set_flag(BooleanBit::set(flag(), kIsSloppy, value));
7642 }
7643
property_attributes()7644 PropertyAttributes AccessorInfo::property_attributes() {
7645 return AttributesField::decode(static_cast<uint32_t>(flag()));
7646 }
7647
7648
set_property_attributes(PropertyAttributes attributes)7649 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7650 set_flag(AttributesField::update(flag(), attributes));
7651 }
7652
IsTemplateFor(JSObject * object)7653 bool FunctionTemplateInfo::IsTemplateFor(JSObject* object) {
7654 return IsTemplateFor(object->map());
7655 }
7656
IsCompatibleReceiver(Object * receiver)7657 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7658 if (!HasExpectedReceiverType()) return true;
7659 if (!receiver->IsJSObject()) return false;
7660 return FunctionTemplateInfo::cast(expected_receiver_type())
7661 ->IsTemplateFor(JSObject::cast(receiver)->map());
7662 }
7663
7664
HasExpectedReceiverType()7665 bool AccessorInfo::HasExpectedReceiverType() {
7666 return expected_receiver_type()->IsFunctionTemplateInfo();
7667 }
7668
7669
get(AccessorComponent component)7670 Object* AccessorPair::get(AccessorComponent component) {
7671 return component == ACCESSOR_GETTER ? getter() : setter();
7672 }
7673
7674
set(AccessorComponent component,Object * value)7675 void AccessorPair::set(AccessorComponent component, Object* value) {
7676 if (component == ACCESSOR_GETTER) {
7677 set_getter(value);
7678 } else {
7679 set_setter(value);
7680 }
7681 }
7682
7683
SetComponents(Object * getter,Object * setter)7684 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7685 Isolate* isolate = GetIsolate();
7686 if (!getter->IsNull(isolate)) set_getter(getter);
7687 if (!setter->IsNull(isolate)) set_setter(setter);
7688 }
7689
7690
Equals(AccessorPair * pair)7691 bool AccessorPair::Equals(AccessorPair* pair) {
7692 return (this == pair) || pair->Equals(getter(), setter());
7693 }
7694
7695
Equals(Object * getter_value,Object * setter_value)7696 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7697 return (getter() == getter_value) && (setter() == setter_value);
7698 }
7699
7700
ContainsAccessor()7701 bool AccessorPair::ContainsAccessor() {
7702 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7703 }
7704
7705
IsJSAccessor(Object * obj)7706 bool AccessorPair::IsJSAccessor(Object* obj) {
7707 return obj->IsCallable() || obj->IsUndefined(GetIsolate());
7708 }
7709
7710
7711 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value)7712 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7713 Handle<Object> key,
7714 Handle<Object> value) {
7715 this->SetEntry(entry, key, value, PropertyDetails(Smi::kZero));
7716 }
7717
7718
7719 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7720 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7721 Handle<Object> key,
7722 Handle<Object> value,
7723 PropertyDetails details) {
7724 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7725 }
7726
7727
7728 template <typename Key>
7729 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7730 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7731 Handle<Object> key,
7732 Handle<Object> value,
7733 PropertyDetails details) {
7734 STATIC_ASSERT(Dictionary::kEntrySize == 2 || Dictionary::kEntrySize == 3);
7735 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7736 int index = dict->EntryToIndex(entry);
7737 DisallowHeapAllocation no_gc;
7738 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7739 dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7740 dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7741 if (Dictionary::kEntrySize == 3) {
7742 dict->set(index + Dictionary::kEntryDetailsIndex, details.AsSmi());
7743 }
7744 }
7745
7746
7747 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7748 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7749 Handle<Object> key, Handle<Object> value,
7750 PropertyDetails details) {
7751 STATIC_ASSERT(Dictionary::kEntrySize == 2);
7752 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7753 DCHECK(value->IsPropertyCell());
7754 int index = dict->EntryToIndex(entry);
7755 DisallowHeapAllocation no_gc;
7756 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7757 dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7758 dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7759 PropertyCell::cast(*value)->set_property_details(details);
7760 }
7761
7762
IsMatch(uint32_t key,Object * other)7763 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7764 DCHECK(other->IsNumber());
7765 return key == static_cast<uint32_t>(other->Number());
7766 }
7767
7768
Hash(uint32_t key)7769 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7770 return ComputeIntegerHash(key, 0);
7771 }
7772
7773
HashForObject(uint32_t key,Object * other)7774 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7775 Object* other) {
7776 DCHECK(other->IsNumber());
7777 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7778 }
7779
GetMap(Isolate * isolate)7780 Map* UnseededNumberDictionaryShape::GetMap(Isolate* isolate) {
7781 return isolate->heap()->unseeded_number_dictionary_map();
7782 }
7783
SeededHash(uint32_t key,uint32_t seed)7784 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7785 return ComputeIntegerHash(key, seed);
7786 }
7787
7788
SeededHashForObject(uint32_t key,uint32_t seed,Object * other)7789 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7790 uint32_t seed,
7791 Object* other) {
7792 DCHECK(other->IsNumber());
7793 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7794 }
7795
7796
AsHandle(Isolate * isolate,uint32_t key)7797 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7798 return isolate->factory()->NewNumberFromUint(key);
7799 }
7800
7801
IsMatch(Handle<Name> key,Object * other)7802 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7803 // We know that all entries in a hash table had their hash keys created.
7804 // Use that knowledge to have fast failure.
7805 if (key->Hash() != Name::cast(other)->Hash()) return false;
7806 return key->Equals(Name::cast(other));
7807 }
7808
7809
Hash(Handle<Name> key)7810 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7811 return key->Hash();
7812 }
7813
7814
HashForObject(Handle<Name> key,Object * other)7815 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7816 return Name::cast(other)->Hash();
7817 }
7818
7819
AsHandle(Isolate * isolate,Handle<Name> key)7820 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7821 Handle<Name> key) {
7822 DCHECK(key->IsUniqueName());
7823 return key;
7824 }
7825
7826
DoGenerateNewEnumerationIndices(Handle<NameDictionary> dictionary)7827 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7828 Handle<NameDictionary> dictionary) {
7829 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7830 }
7831
7832
7833 template <typename Dictionary>
DetailsAt(Dictionary * dict,int entry)7834 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7835 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7836 Object* raw_value = dict->ValueAt(entry);
7837 DCHECK(raw_value->IsPropertyCell());
7838 PropertyCell* cell = PropertyCell::cast(raw_value);
7839 return cell->property_details();
7840 }
7841
7842
7843 template <typename Dictionary>
DetailsAtPut(Dictionary * dict,int entry,PropertyDetails value)7844 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7845 PropertyDetails value) {
7846 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7847 Object* raw_value = dict->ValueAt(entry);
7848 DCHECK(raw_value->IsPropertyCell());
7849 PropertyCell* cell = PropertyCell::cast(raw_value);
7850 cell->set_property_details(value);
7851 }
7852
7853
7854 template <typename Dictionary>
IsDeleted(Dictionary * dict,int entry)7855 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7856 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7857 Isolate* isolate = dict->GetIsolate();
7858 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole(isolate);
7859 }
7860
7861
IsMatch(Handle<Object> key,Object * other)7862 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7863 return key->SameValue(other);
7864 }
7865
7866
Hash(Handle<Object> key)7867 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7868 return Smi::cast(key->GetHash())->value();
7869 }
7870
7871
HashForObject(Handle<Object> key,Object * other)7872 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7873 Object* other) {
7874 return Smi::cast(other->GetHash())->value();
7875 }
7876
7877
AsHandle(Isolate * isolate,Handle<Object> key)7878 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7879 Handle<Object> key) {
7880 return key;
7881 }
7882
7883
Shrink(Handle<ObjectHashTable> table,Handle<Object> key)7884 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7885 Handle<ObjectHashTable> table, Handle<Object> key) {
7886 return DerivedHashTable::Shrink(table, key);
7887 }
7888
7889
ValueAt(int entry)7890 Object* OrderedHashMap::ValueAt(int entry) {
7891 return get(EntryToIndex(entry) + kValueOffset);
7892 }
7893
7894
7895 template <int entrysize>
IsMatch(Handle<Object> key,Object * other)7896 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7897 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7898 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7899 : *key == other;
7900 }
7901
7902
7903 template <int entrysize>
Hash(Handle<Object> key)7904 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7905 intptr_t hash =
7906 key->IsWeakCell()
7907 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7908 : reinterpret_cast<intptr_t>(*key);
7909 return (uint32_t)(hash & 0xFFFFFFFF);
7910 }
7911
7912
7913 template <int entrysize>
HashForObject(Handle<Object> key,Object * other)7914 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7915 Object* other) {
7916 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7917 intptr_t hash = reinterpret_cast<intptr_t>(other);
7918 return (uint32_t)(hash & 0xFFFFFFFF);
7919 }
7920
7921
7922 template <int entrysize>
AsHandle(Isolate * isolate,Handle<Object> key)7923 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7924 Handle<Object> key) {
7925 return key;
7926 }
7927
7928
ACCESSORS(ModuleInfoEntry,export_name,Object,kExportNameOffset)7929 ACCESSORS(ModuleInfoEntry, export_name, Object, kExportNameOffset)
7930 ACCESSORS(ModuleInfoEntry, local_name, Object, kLocalNameOffset)
7931 ACCESSORS(ModuleInfoEntry, import_name, Object, kImportNameOffset)
7932 SMI_ACCESSORS(ModuleInfoEntry, module_request, kModuleRequestOffset)
7933 SMI_ACCESSORS(ModuleInfoEntry, cell_index, kCellIndexOffset)
7934 SMI_ACCESSORS(ModuleInfoEntry, beg_pos, kBegPosOffset)
7935 SMI_ACCESSORS(ModuleInfoEntry, end_pos, kEndPosOffset)
7936
7937 void Map::ClearCodeCache(Heap* heap) {
7938 // No write barrier is needed since empty_fixed_array is not in new space.
7939 // Please note this function is used during marking:
7940 // - MarkCompactCollector::MarkUnmarkedObject
7941 // - IncrementalMarking::Step
7942 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7943 }
7944
7945
SlackForArraySize(int old_size,int size_limit)7946 int Map::SlackForArraySize(int old_size, int size_limit) {
7947 const int max_slack = size_limit - old_size;
7948 CHECK_LE(0, max_slack);
7949 if (old_size < 4) {
7950 DCHECK_LE(1, max_slack);
7951 return 1;
7952 }
7953 return Min(max_slack, old_size / 4);
7954 }
7955
7956
set_length(Smi * length)7957 void JSArray::set_length(Smi* length) {
7958 // Don't need a write barrier for a Smi.
7959 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7960 }
7961
7962
SetLengthWouldNormalize(Heap * heap,uint32_t new_length)7963 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7964 // This constant is somewhat arbitrary. Any large enough value would work.
7965 const uint32_t kMaxFastArrayLength = 32 * 1024 * 1024;
7966 // If the new array won't fit in a some non-trivial fraction of the max old
7967 // space size, then force it to go dictionary mode.
7968 uint32_t heap_based_upper_bound =
7969 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7970 return new_length >= Min(kMaxFastArrayLength, heap_based_upper_bound);
7971 }
7972
7973
AllowsSetLength()7974 bool JSArray::AllowsSetLength() {
7975 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7976 DCHECK(result == !HasFixedTypedArrayElements());
7977 return result;
7978 }
7979
7980
SetContent(Handle<JSArray> array,Handle<FixedArrayBase> storage)7981 void JSArray::SetContent(Handle<JSArray> array,
7982 Handle<FixedArrayBase> storage) {
7983 EnsureCanContainElements(array, storage, storage->length(),
7984 ALLOW_COPIED_DOUBLE_ELEMENTS);
7985
7986 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7987 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7988 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7989 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7990 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7991 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7992 array->set_elements(*storage);
7993 array->set_length(Smi::FromInt(storage->length()));
7994 }
7995
7996
HasArrayPrototype(Isolate * isolate)7997 bool JSArray::HasArrayPrototype(Isolate* isolate) {
7998 return map()->prototype() == *isolate->initial_array_prototype();
7999 }
8000
8001
ic_total_count()8002 int TypeFeedbackInfo::ic_total_count() {
8003 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8004 return ICTotalCountField::decode(current);
8005 }
8006
8007
set_ic_total_count(int count)8008 void TypeFeedbackInfo::set_ic_total_count(int count) {
8009 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8010 value = ICTotalCountField::update(value,
8011 ICTotalCountField::decode(count));
8012 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
8013 }
8014
8015
ic_with_type_info_count()8016 int TypeFeedbackInfo::ic_with_type_info_count() {
8017 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8018 return ICsWithTypeInfoCountField::decode(current);
8019 }
8020
8021
change_ic_with_type_info_count(int delta)8022 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
8023 if (delta == 0) return;
8024 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8025 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
8026 // We can get negative count here when the type-feedback info is
8027 // shared between two code objects. The can only happen when
8028 // the debugger made a shallow copy of code object (see Heap::CopyCode).
8029 // Since we do not optimize when the debugger is active, we can skip
8030 // this counter update.
8031 if (new_count >= 0) {
8032 new_count &= ICsWithTypeInfoCountField::kMask;
8033 value = ICsWithTypeInfoCountField::update(value, new_count);
8034 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
8035 }
8036 }
8037
8038
ic_generic_count()8039 int TypeFeedbackInfo::ic_generic_count() {
8040 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
8041 }
8042
8043
change_ic_generic_count(int delta)8044 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
8045 if (delta == 0) return;
8046 int new_count = ic_generic_count() + delta;
8047 if (new_count >= 0) {
8048 new_count &= ~Smi::kMinValue;
8049 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
8050 }
8051 }
8052
8053
initialize_storage()8054 void TypeFeedbackInfo::initialize_storage() {
8055 WRITE_FIELD(this, kStorage1Offset, Smi::kZero);
8056 WRITE_FIELD(this, kStorage2Offset, Smi::kZero);
8057 WRITE_FIELD(this, kStorage3Offset, Smi::kZero);
8058 }
8059
8060
change_own_type_change_checksum()8061 void TypeFeedbackInfo::change_own_type_change_checksum() {
8062 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8063 int checksum = OwnTypeChangeChecksum::decode(value);
8064 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
8065 value = OwnTypeChangeChecksum::update(value, checksum);
8066 // Ensure packed bit field is in Smi range.
8067 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
8068 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
8069 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
8070 }
8071
8072
set_inlined_type_change_checksum(int checksum)8073 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
8074 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8075 int mask = (1 << kTypeChangeChecksumBits) - 1;
8076 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
8077 // Ensure packed bit field is in Smi range.
8078 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
8079 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
8080 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
8081 }
8082
8083
own_type_change_checksum()8084 int TypeFeedbackInfo::own_type_change_checksum() {
8085 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8086 return OwnTypeChangeChecksum::decode(value);
8087 }
8088
8089
matches_inlined_type_change_checksum(int checksum)8090 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
8091 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8092 int mask = (1 << kTypeChangeChecksumBits) - 1;
8093 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
8094 }
8095
8096
SMI_ACCESSORS(AliasedArgumentsEntry,aliased_context_slot,kAliasedContextSlot)8097 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
8098
8099
8100 Relocatable::Relocatable(Isolate* isolate) {
8101 isolate_ = isolate;
8102 prev_ = isolate->relocatable_top();
8103 isolate->set_relocatable_top(this);
8104 }
8105
8106
~Relocatable()8107 Relocatable::~Relocatable() {
8108 DCHECK_EQ(isolate_->relocatable_top(), this);
8109 isolate_->set_relocatable_top(prev_);
8110 }
8111
8112
8113 template<class Derived, class TableType>
CurrentKey()8114 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
8115 TableType* table(TableType::cast(this->table()));
8116 int index = Smi::cast(this->index())->value();
8117 Object* key = table->KeyAt(index);
8118 DCHECK(!key->IsTheHole(table->GetIsolate()));
8119 return key;
8120 }
8121
8122
PopulateValueArray(FixedArray * array)8123 void JSSetIterator::PopulateValueArray(FixedArray* array) {
8124 array->set(0, CurrentKey());
8125 }
8126
8127
PopulateValueArray(FixedArray * array)8128 void JSMapIterator::PopulateValueArray(FixedArray* array) {
8129 array->set(0, CurrentKey());
8130 array->set(1, CurrentValue());
8131 }
8132
8133
CurrentValue()8134 Object* JSMapIterator::CurrentValue() {
8135 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
8136 int index = Smi::cast(this->index())->value();
8137 Object* value = table->ValueAt(index);
8138 DCHECK(!value->IsTheHole(table->GetIsolate()));
8139 return value;
8140 }
8141
8142
SubStringRange(String * string,int first,int length)8143 String::SubStringRange::SubStringRange(String* string, int first, int length)
8144 : string_(string),
8145 first_(first),
8146 length_(length == -1 ? string->length() : length) {}
8147
8148
8149 class String::SubStringRange::iterator final {
8150 public:
8151 typedef std::forward_iterator_tag iterator_category;
8152 typedef int difference_type;
8153 typedef uc16 value_type;
8154 typedef uc16* pointer;
8155 typedef uc16& reference;
8156
iterator(const iterator & other)8157 iterator(const iterator& other)
8158 : content_(other.content_), offset_(other.offset_) {}
8159
8160 uc16 operator*() { return content_.Get(offset_); }
8161 bool operator==(const iterator& other) const {
8162 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
8163 }
8164 bool operator!=(const iterator& other) const {
8165 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
8166 }
8167 iterator& operator++() {
8168 ++offset_;
8169 return *this;
8170 }
8171 iterator operator++(int);
8172
8173 private:
8174 friend class String;
iterator(String * from,int offset)8175 iterator(String* from, int offset)
8176 : content_(from->GetFlatContent()), offset_(offset) {}
8177 String::FlatContent content_;
8178 int offset_;
8179 };
8180
8181
begin()8182 String::SubStringRange::iterator String::SubStringRange::begin() {
8183 return String::SubStringRange::iterator(string_, first_);
8184 }
8185
8186
end()8187 String::SubStringRange::iterator String::SubStringRange::end() {
8188 return String::SubStringRange::iterator(string_, first_ + length_);
8189 }
8190
8191
8192 // Predictably converts HeapObject* or Address to uint32 by calculating
8193 // offset of the address in respective MemoryChunk.
ObjectAddressForHashing(void * object)8194 static inline uint32_t ObjectAddressForHashing(void* object) {
8195 uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
8196 return value & MemoryChunk::kAlignmentMask;
8197 }
8198
MakeEntryPair(Isolate * isolate,uint32_t index,Handle<Object> value)8199 static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
8200 Handle<Object> value) {
8201 Handle<Object> key = isolate->factory()->Uint32ToString(index);
8202 Handle<FixedArray> entry_storage =
8203 isolate->factory()->NewUninitializedFixedArray(2);
8204 {
8205 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
8206 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
8207 }
8208 return isolate->factory()->NewJSArrayWithElements(entry_storage,
8209 FAST_ELEMENTS, 2);
8210 }
8211
MakeEntryPair(Isolate * isolate,Handle<Name> key,Handle<Object> value)8212 static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Name> key,
8213 Handle<Object> value) {
8214 Handle<FixedArray> entry_storage =
8215 isolate->factory()->NewUninitializedFixedArray(2);
8216 {
8217 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
8218 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
8219 }
8220 return isolate->factory()->NewJSArrayWithElements(entry_storage,
8221 FAST_ELEMENTS, 2);
8222 }
8223
8224 ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
8225 ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
8226
8227 ACCESSORS(JSArrayIterator, object, Object, kIteratedObjectOffset)
8228 ACCESSORS(JSArrayIterator, index, Object, kNextIndexOffset)
8229 ACCESSORS(JSArrayIterator, object_map, Object, kIteratedObjectMapOffset)
8230
8231 ACCESSORS(JSAsyncFromSyncIterator, sync_iterator, JSReceiver,
8232 kSyncIteratorOffset)
8233
8234 ACCESSORS(JSStringIterator, string, String, kStringOffset)
8235 SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
8236
8237 #undef INT_ACCESSORS
8238 #undef ACCESSORS
8239 #undef ACCESSORS_CHECKED
8240 #undef ACCESSORS_CHECKED2
8241 #undef SMI_ACCESSORS
8242 #undef SYNCHRONIZED_SMI_ACCESSORS
8243 #undef NOBARRIER_SMI_ACCESSORS
8244 #undef BOOL_GETTER
8245 #undef BOOL_ACCESSORS
8246 #undef FIELD_ADDR
8247 #undef FIELD_ADDR_CONST
8248 #undef READ_FIELD
8249 #undef NOBARRIER_READ_FIELD
8250 #undef WRITE_FIELD
8251 #undef NOBARRIER_WRITE_FIELD
8252 #undef WRITE_BARRIER
8253 #undef CONDITIONAL_WRITE_BARRIER
8254 #undef READ_DOUBLE_FIELD
8255 #undef WRITE_DOUBLE_FIELD
8256 #undef READ_INT_FIELD
8257 #undef WRITE_INT_FIELD
8258 #undef READ_INTPTR_FIELD
8259 #undef WRITE_INTPTR_FIELD
8260 #undef READ_UINT8_FIELD
8261 #undef WRITE_UINT8_FIELD
8262 #undef READ_INT8_FIELD
8263 #undef WRITE_INT8_FIELD
8264 #undef READ_UINT16_FIELD
8265 #undef WRITE_UINT16_FIELD
8266 #undef READ_INT16_FIELD
8267 #undef WRITE_INT16_FIELD
8268 #undef READ_UINT32_FIELD
8269 #undef WRITE_UINT32_FIELD
8270 #undef READ_INT32_FIELD
8271 #undef WRITE_INT32_FIELD
8272 #undef READ_FLOAT_FIELD
8273 #undef WRITE_FLOAT_FIELD
8274 #undef READ_UINT64_FIELD
8275 #undef WRITE_UINT64_FIELD
8276 #undef READ_INT64_FIELD
8277 #undef WRITE_INT64_FIELD
8278 #undef READ_BYTE_FIELD
8279 #undef WRITE_BYTE_FIELD
8280 #undef NOBARRIER_READ_BYTE_FIELD
8281 #undef NOBARRIER_WRITE_BYTE_FIELD
8282
8283 } // namespace internal
8284 } // namespace v8
8285
8286 #endif // V8_OBJECTS_INL_H_
8287