• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11 
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14 
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/builtins.h"
18 #include "src/contexts-inl.h"
19 #include "src/conversions-inl.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/field-type.h"
23 #include "src/handles-inl.h"
24 #include "src/heap/heap-inl.h"
25 #include "src/heap/heap.h"
26 #include "src/isolate-inl.h"
27 #include "src/isolate.h"
28 #include "src/keys.h"
29 #include "src/layout-descriptor-inl.h"
30 #include "src/lookup.h"
31 #include "src/objects.h"
32 #include "src/property.h"
33 #include "src/prototype.h"
34 #include "src/transitions-inl.h"
35 #include "src/type-feedback-vector-inl.h"
36 #include "src/v8memory.h"
37 
38 namespace v8 {
39 namespace internal {
40 
PropertyDetails(Smi * smi)41 PropertyDetails::PropertyDetails(Smi* smi) {
42   value_ = smi->value();
43 }
44 
45 
AsSmi()46 Smi* PropertyDetails::AsSmi() const {
47   // Ensure the upper 2 bits have the same value by sign extending it. This is
48   // necessary to be able to use the 31st bit of the property details.
49   int value = value_ << 1;
50   return Smi::FromInt(value >> 1);
51 }
52 
53 
field_width_in_words()54 int PropertyDetails::field_width_in_words() const {
55   DCHECK(location() == kField);
56   if (!FLAG_unbox_double_fields) return 1;
57   if (kDoubleSize == kPointerSize) return 1;
58   return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
59 }
60 
61 #define TYPE_CHECKER(type, instancetype)           \
62   bool HeapObject::Is##type() const {              \
63     return map()->instance_type() == instancetype; \
64   }
65 
66 #define CAST_ACCESSOR(type)                       \
67   type* type::cast(Object* object) {              \
68     SLOW_DCHECK(object->Is##type());              \
69     return reinterpret_cast<type*>(object);       \
70   }                                               \
71   const type* type::cast(const Object* object) {  \
72     SLOW_DCHECK(object->Is##type());              \
73     return reinterpret_cast<const type*>(object); \
74   }
75 
76 
77 #define INT_ACCESSORS(holder, name, offset)                                   \
78   int holder::name() const { return READ_INT_FIELD(this, offset); }           \
79   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
80 
81 
82 #define ACCESSORS(holder, name, type, offset)                                 \
83   type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
84   void holder::set_##name(type* value, WriteBarrierMode mode) {               \
85     WRITE_FIELD(this, offset, value);                                         \
86     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);          \
87   }
88 
89 
90 // Getter that returns a Smi as an int and writes an int as a Smi.
91 #define SMI_ACCESSORS(holder, name, offset)             \
92   int holder::name() const {                            \
93     Object* value = READ_FIELD(this, offset);           \
94     return Smi::cast(value)->value();                   \
95   }                                                     \
96   void holder::set_##name(int value) {                  \
97     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
98   }
99 
100 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)    \
101   int holder::synchronized_##name() const {                 \
102     Object* value = ACQUIRE_READ_FIELD(this, offset);       \
103     return Smi::cast(value)->value();                       \
104   }                                                         \
105   void holder::synchronized_set_##name(int value) {         \
106     RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107   }
108 
109 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset)          \
110   int holder::nobarrier_##name() const {                       \
111     Object* value = NOBARRIER_READ_FIELD(this, offset);        \
112     return Smi::cast(value)->value();                          \
113   }                                                            \
114   void holder::nobarrier_set_##name(int value) {               \
115     NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value));  \
116   }
117 
118 #define BOOL_GETTER(holder, field, name, offset)           \
119   bool holder::name() const {                              \
120     return BooleanBit::get(field(), offset);               \
121   }                                                        \
122 
123 
124 #define BOOL_ACCESSORS(holder, field, name, offset)        \
125   bool holder::name() const {                              \
126     return BooleanBit::get(field(), offset);               \
127   }                                                        \
128   void holder::set_##name(bool value) {                    \
129     set_##field(BooleanBit::set(field(), offset, value));  \
130   }
131 
IsFixedArrayBase()132 bool HeapObject::IsFixedArrayBase() const {
133   return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
134 }
135 
IsFixedArray()136 bool HeapObject::IsFixedArray() const {
137   InstanceType instance_type = map()->instance_type();
138   return instance_type == FIXED_ARRAY_TYPE ||
139          instance_type == TRANSITION_ARRAY_TYPE;
140 }
141 
142 
143 // External objects are not extensible, so the map check is enough.
IsExternal()144 bool HeapObject::IsExternal() const {
145   return map() == GetHeap()->external_map();
146 }
147 
148 
TYPE_CHECKER(HeapNumber,HEAP_NUMBER_TYPE)149 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
150 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
151 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
152 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
153 
154 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
155   bool HeapObject::Is##Type() const { return map() == GetHeap()->type##_map(); }
156 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
157 #undef SIMD128_TYPE_CHECKER
158 
159 #define IS_TYPE_FUNCTION_DEF(type_)                               \
160   bool Object::Is##type_() const {                                \
161     return IsHeapObject() && HeapObject::cast(this)->Is##type_(); \
162   }
163 HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
164 #undef IS_TYPE_FUNCTION_DEF
165 
166 #define IS_TYPE_FUNCTION_DEF(Type, Value)             \
167   bool Object::Is##Type(Isolate* isolate) const {     \
168     return this == isolate->heap()->Value();          \
169   }                                                   \
170   bool HeapObject::Is##Type(Isolate* isolate) const { \
171     return this == isolate->heap()->Value();          \
172   }
173 ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
174 #undef IS_TYPE_FUNCTION_DEF
175 
176 bool HeapObject::IsString() const {
177   return map()->instance_type() < FIRST_NONSTRING_TYPE;
178 }
179 
IsName()180 bool HeapObject::IsName() const {
181   return map()->instance_type() <= LAST_NAME_TYPE;
182 }
183 
IsUniqueName()184 bool HeapObject::IsUniqueName() const {
185   return IsInternalizedString() || IsSymbol();
186 }
187 
IsUniqueName()188 bool Name::IsUniqueName() const {
189   uint32_t type = map()->instance_type();
190   return (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
191          (kStringTag | kNotInternalizedTag);
192 }
193 
IsFunction()194 bool HeapObject::IsFunction() const {
195   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
196   return map()->instance_type() >= FIRST_FUNCTION_TYPE;
197 }
198 
IsCallable()199 bool HeapObject::IsCallable() const { return map()->is_callable(); }
200 
IsConstructor()201 bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
202 
IsTemplateInfo()203 bool HeapObject::IsTemplateInfo() const {
204   return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
205 }
206 
IsInternalizedString()207 bool HeapObject::IsInternalizedString() const {
208   uint32_t type = map()->instance_type();
209   STATIC_ASSERT(kNotInternalizedTag != 0);
210   return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
211       (kStringTag | kInternalizedTag);
212 }
213 
IsConsString()214 bool HeapObject::IsConsString() const {
215   if (!IsString()) return false;
216   return StringShape(String::cast(this)).IsCons();
217 }
218 
IsSlicedString()219 bool HeapObject::IsSlicedString() const {
220   if (!IsString()) return false;
221   return StringShape(String::cast(this)).IsSliced();
222 }
223 
IsSeqString()224 bool HeapObject::IsSeqString() const {
225   if (!IsString()) return false;
226   return StringShape(String::cast(this)).IsSequential();
227 }
228 
IsSeqOneByteString()229 bool HeapObject::IsSeqOneByteString() const {
230   if (!IsString()) return false;
231   return StringShape(String::cast(this)).IsSequential() &&
232          String::cast(this)->IsOneByteRepresentation();
233 }
234 
IsSeqTwoByteString()235 bool HeapObject::IsSeqTwoByteString() const {
236   if (!IsString()) return false;
237   return StringShape(String::cast(this)).IsSequential() &&
238          String::cast(this)->IsTwoByteRepresentation();
239 }
240 
IsExternalString()241 bool HeapObject::IsExternalString() const {
242   if (!IsString()) return false;
243   return StringShape(String::cast(this)).IsExternal();
244 }
245 
IsExternalOneByteString()246 bool HeapObject::IsExternalOneByteString() const {
247   if (!IsString()) return false;
248   return StringShape(String::cast(this)).IsExternal() &&
249          String::cast(this)->IsOneByteRepresentation();
250 }
251 
IsExternalTwoByteString()252 bool HeapObject::IsExternalTwoByteString() const {
253   if (!IsString()) return false;
254   return StringShape(String::cast(this)).IsExternal() &&
255          String::cast(this)->IsTwoByteRepresentation();
256 }
257 
HasValidElements()258 bool Object::HasValidElements() {
259   // Dictionary is covered under FixedArray.
260   return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
261 }
262 
263 
KeyEquals(Object * second)264 bool Object::KeyEquals(Object* second) {
265   Object* first = this;
266   if (second->IsNumber()) {
267     if (first->IsNumber()) return first->Number() == second->Number();
268     Object* temp = first;
269     first = second;
270     second = temp;
271   }
272   if (first->IsNumber()) {
273     DCHECK_LE(0, first->Number());
274     uint32_t expected = static_cast<uint32_t>(first->Number());
275     uint32_t index;
276     return Name::cast(second)->AsArrayIndex(&index) && index == expected;
277   }
278   return Name::cast(first)->Equals(Name::cast(second));
279 }
280 
281 
FilterKey(PropertyFilter filter)282 bool Object::FilterKey(PropertyFilter filter) {
283   if (IsSymbol()) {
284     if (filter & SKIP_SYMBOLS) return true;
285     if (Symbol::cast(this)->is_private()) return true;
286   } else {
287     if (filter & SKIP_STRINGS) return true;
288   }
289   return false;
290 }
291 
292 
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)293 Handle<Object> Object::NewStorageFor(Isolate* isolate,
294                                      Handle<Object> object,
295                                      Representation representation) {
296   if (representation.IsSmi() && object->IsUninitialized(isolate)) {
297     return handle(Smi::FromInt(0), isolate);
298   }
299   if (!representation.IsDouble()) return object;
300   double value;
301   if (object->IsUninitialized(isolate)) {
302     value = 0;
303   } else if (object->IsMutableHeapNumber()) {
304     value = HeapNumber::cast(*object)->value();
305   } else {
306     value = object->Number();
307   }
308   return isolate->factory()->NewHeapNumber(value, MUTABLE);
309 }
310 
311 
WrapForRead(Isolate * isolate,Handle<Object> object,Representation representation)312 Handle<Object> Object::WrapForRead(Isolate* isolate,
313                                    Handle<Object> object,
314                                    Representation representation) {
315   DCHECK(!object->IsUninitialized(isolate));
316   if (!representation.IsDouble()) {
317     DCHECK(object->FitsRepresentation(representation));
318     return object;
319   }
320   return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
321 }
322 
323 
StringShape(const String * str)324 StringShape::StringShape(const String* str)
325   : type_(str->map()->instance_type()) {
326   set_valid();
327   DCHECK((type_ & kIsNotStringMask) == kStringTag);
328 }
329 
330 
StringShape(Map * map)331 StringShape::StringShape(Map* map)
332   : type_(map->instance_type()) {
333   set_valid();
334   DCHECK((type_ & kIsNotStringMask) == kStringTag);
335 }
336 
337 
StringShape(InstanceType t)338 StringShape::StringShape(InstanceType t)
339   : type_(static_cast<uint32_t>(t)) {
340   set_valid();
341   DCHECK((type_ & kIsNotStringMask) == kStringTag);
342 }
343 
344 
IsInternalized()345 bool StringShape::IsInternalized() {
346   DCHECK(valid());
347   STATIC_ASSERT(kNotInternalizedTag != 0);
348   return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
349       (kStringTag | kInternalizedTag);
350 }
351 
352 
IsOneByteRepresentation()353 bool String::IsOneByteRepresentation() const {
354   uint32_t type = map()->instance_type();
355   return (type & kStringEncodingMask) == kOneByteStringTag;
356 }
357 
358 
IsTwoByteRepresentation()359 bool String::IsTwoByteRepresentation() const {
360   uint32_t type = map()->instance_type();
361   return (type & kStringEncodingMask) == kTwoByteStringTag;
362 }
363 
364 
IsOneByteRepresentationUnderneath()365 bool String::IsOneByteRepresentationUnderneath() {
366   uint32_t type = map()->instance_type();
367   STATIC_ASSERT(kIsIndirectStringTag != 0);
368   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
369   DCHECK(IsFlat());
370   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371     case kOneByteStringTag:
372       return true;
373     case kTwoByteStringTag:
374       return false;
375     default:  // Cons or sliced string.  Need to go deeper.
376       return GetUnderlying()->IsOneByteRepresentation();
377   }
378 }
379 
380 
IsTwoByteRepresentationUnderneath()381 bool String::IsTwoByteRepresentationUnderneath() {
382   uint32_t type = map()->instance_type();
383   STATIC_ASSERT(kIsIndirectStringTag != 0);
384   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
385   DCHECK(IsFlat());
386   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
387     case kOneByteStringTag:
388       return false;
389     case kTwoByteStringTag:
390       return true;
391     default:  // Cons or sliced string.  Need to go deeper.
392       return GetUnderlying()->IsTwoByteRepresentation();
393   }
394 }
395 
396 
HasOnlyOneByteChars()397 bool String::HasOnlyOneByteChars() {
398   uint32_t type = map()->instance_type();
399   return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
400          IsOneByteRepresentation();
401 }
402 
403 
IsCons()404 bool StringShape::IsCons() {
405   return (type_ & kStringRepresentationMask) == kConsStringTag;
406 }
407 
408 
IsSliced()409 bool StringShape::IsSliced() {
410   return (type_ & kStringRepresentationMask) == kSlicedStringTag;
411 }
412 
413 
IsIndirect()414 bool StringShape::IsIndirect() {
415   return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
416 }
417 
418 
IsExternal()419 bool StringShape::IsExternal() {
420   return (type_ & kStringRepresentationMask) == kExternalStringTag;
421 }
422 
423 
IsSequential()424 bool StringShape::IsSequential() {
425   return (type_ & kStringRepresentationMask) == kSeqStringTag;
426 }
427 
428 
representation_tag()429 StringRepresentationTag StringShape::representation_tag() {
430   uint32_t tag = (type_ & kStringRepresentationMask);
431   return static_cast<StringRepresentationTag>(tag);
432 }
433 
434 
encoding_tag()435 uint32_t StringShape::encoding_tag() {
436   return type_ & kStringEncodingMask;
437 }
438 
439 
full_representation_tag()440 uint32_t StringShape::full_representation_tag() {
441   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
442 }
443 
444 
445 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
446              Internals::kFullStringRepresentationMask);
447 
448 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
449              Internals::kStringEncodingMask);
450 
451 
IsSequentialOneByte()452 bool StringShape::IsSequentialOneByte() {
453   return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
454 }
455 
456 
IsSequentialTwoByte()457 bool StringShape::IsSequentialTwoByte() {
458   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
459 }
460 
461 
IsExternalOneByte()462 bool StringShape::IsExternalOneByte() {
463   return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
464 }
465 
466 
467 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
468               Internals::kExternalOneByteRepresentationTag);
469 
470 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
471 
472 
IsExternalTwoByte()473 bool StringShape::IsExternalTwoByte() {
474   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
475 }
476 
477 
478 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
479              Internals::kExternalTwoByteRepresentationTag);
480 
481 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
482 
483 
Get(int index)484 uc32 FlatStringReader::Get(int index) {
485   if (is_one_byte_) {
486     return Get<uint8_t>(index);
487   } else {
488     return Get<uc16>(index);
489   }
490 }
491 
492 
493 template <typename Char>
Get(int index)494 Char FlatStringReader::Get(int index) {
495   DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
496   DCHECK(0 <= index && index <= length_);
497   if (sizeof(Char) == 1) {
498     return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
499   } else {
500     return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
501   }
502 }
503 
504 
AsHandle(Isolate * isolate,HashTableKey * key)505 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
506   return key->AsHandle(isolate);
507 }
508 
509 
AsHandle(Isolate * isolate,HashTableKey * key)510 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
511                                                HashTableKey* key) {
512   return key->AsHandle(isolate);
513 }
514 
515 
AsHandle(Isolate * isolate,HashTableKey * key)516 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
517                                                  HashTableKey* key) {
518   return key->AsHandle(isolate);
519 }
520 
521 template <typename Char>
522 class SequentialStringKey : public HashTableKey {
523  public:
SequentialStringKey(Vector<const Char> string,uint32_t seed)524   explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
525       : string_(string), hash_field_(0), seed_(seed) { }
526 
Hash()527   uint32_t Hash() override {
528     hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
529                                                            string_.length(),
530                                                            seed_);
531 
532     uint32_t result = hash_field_ >> String::kHashShift;
533     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
534     return result;
535   }
536 
537 
HashForObject(Object * other)538   uint32_t HashForObject(Object* other) override {
539     return String::cast(other)->Hash();
540   }
541 
542   Vector<const Char> string_;
543   uint32_t hash_field_;
544   uint32_t seed_;
545 };
546 
547 
548 class OneByteStringKey : public SequentialStringKey<uint8_t> {
549  public:
OneByteStringKey(Vector<const uint8_t> str,uint32_t seed)550   OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
551       : SequentialStringKey<uint8_t>(str, seed) { }
552 
IsMatch(Object * string)553   bool IsMatch(Object* string) override {
554     return String::cast(string)->IsOneByteEqualTo(string_);
555   }
556 
557   Handle<Object> AsHandle(Isolate* isolate) override;
558 };
559 
560 
561 class SeqOneByteSubStringKey : public HashTableKey {
562  public:
SeqOneByteSubStringKey(Handle<SeqOneByteString> string,int from,int length)563   SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
564       : string_(string), from_(from), length_(length) {
565     DCHECK(string_->IsSeqOneByteString());
566   }
567 
Hash()568   uint32_t Hash() override {
569     DCHECK(length_ >= 0);
570     DCHECK(from_ + length_ <= string_->length());
571     const uint8_t* chars = string_->GetChars() + from_;
572     hash_field_ = StringHasher::HashSequentialString(
573         chars, length_, string_->GetHeap()->HashSeed());
574     uint32_t result = hash_field_ >> String::kHashShift;
575     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
576     return result;
577   }
578 
HashForObject(Object * other)579   uint32_t HashForObject(Object* other) override {
580     return String::cast(other)->Hash();
581   }
582 
583   bool IsMatch(Object* string) override;
584   Handle<Object> AsHandle(Isolate* isolate) override;
585 
586  private:
587   Handle<SeqOneByteString> string_;
588   int from_;
589   int length_;
590   uint32_t hash_field_;
591 };
592 
593 
594 class TwoByteStringKey : public SequentialStringKey<uc16> {
595  public:
TwoByteStringKey(Vector<const uc16> str,uint32_t seed)596   explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
597       : SequentialStringKey<uc16>(str, seed) { }
598 
IsMatch(Object * string)599   bool IsMatch(Object* string) override {
600     return String::cast(string)->IsTwoByteEqualTo(string_);
601   }
602 
603   Handle<Object> AsHandle(Isolate* isolate) override;
604 };
605 
606 
607 // Utf8StringKey carries a vector of chars as key.
608 class Utf8StringKey : public HashTableKey {
609  public:
Utf8StringKey(Vector<const char> string,uint32_t seed)610   explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
611       : string_(string), hash_field_(0), seed_(seed) { }
612 
IsMatch(Object * string)613   bool IsMatch(Object* string) override {
614     return String::cast(string)->IsUtf8EqualTo(string_);
615   }
616 
Hash()617   uint32_t Hash() override {
618     if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
619     hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
620     uint32_t result = hash_field_ >> String::kHashShift;
621     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
622     return result;
623   }
624 
HashForObject(Object * other)625   uint32_t HashForObject(Object* other) override {
626     return String::cast(other)->Hash();
627   }
628 
AsHandle(Isolate * isolate)629   Handle<Object> AsHandle(Isolate* isolate) override {
630     if (hash_field_ == 0) Hash();
631     return isolate->factory()->NewInternalizedStringFromUtf8(
632         string_, chars_, hash_field_);
633   }
634 
635   Vector<const char> string_;
636   uint32_t hash_field_;
637   int chars_;  // Caches the number of characters when computing the hash code.
638   uint32_t seed_;
639 };
640 
641 
IsNumber()642 bool Object::IsNumber() const {
643   return IsSmi() || IsHeapNumber();
644 }
645 
646 
TYPE_CHECKER(ByteArray,BYTE_ARRAY_TYPE)647 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
648 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
649 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
650 
651 bool HeapObject::IsFiller() const {
652   InstanceType instance_type = map()->instance_type();
653   return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
654 }
655 
656 
657 
658 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)               \
659   TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
660 
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)661 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
662 #undef TYPED_ARRAY_TYPE_CHECKER
663 
664 bool HeapObject::IsFixedTypedArrayBase() const {
665   InstanceType instance_type = map()->instance_type();
666   return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
667           instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
668 }
669 
IsJSReceiver()670 bool HeapObject::IsJSReceiver() const {
671   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
672   return map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
673 }
674 
IsJSObject()675 bool HeapObject::IsJSObject() const {
676   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
677   return map()->IsJSObjectMap();
678 }
679 
IsJSProxy()680 bool HeapObject::IsJSProxy() const { return map()->IsJSProxyMap(); }
681 
TYPE_CHECKER(JSSet,JS_SET_TYPE)682 TYPE_CHECKER(JSSet, JS_SET_TYPE)
683 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
684 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
685 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
686 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
687 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
688 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
689 TYPE_CHECKER(Map, MAP_TYPE)
690 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
691 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
692 TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
693 
694 bool HeapObject::IsJSWeakCollection() const {
695   return IsJSWeakMap() || IsJSWeakSet();
696 }
697 
IsDescriptorArray()698 bool HeapObject::IsDescriptorArray() const { return IsFixedArray(); }
699 
IsArrayList()700 bool HeapObject::IsArrayList() const { return IsFixedArray(); }
701 
IsLayoutDescriptor()702 bool Object::IsLayoutDescriptor() const {
703   return IsSmi() || IsFixedTypedArrayBase();
704 }
705 
IsTypeFeedbackVector()706 bool HeapObject::IsTypeFeedbackVector() const { return IsFixedArray(); }
707 
IsTypeFeedbackMetadata()708 bool HeapObject::IsTypeFeedbackMetadata() const { return IsFixedArray(); }
709 
IsLiteralsArray()710 bool HeapObject::IsLiteralsArray() const { return IsFixedArray(); }
711 
IsDeoptimizationInputData()712 bool HeapObject::IsDeoptimizationInputData() const {
713   // Must be a fixed array.
714   if (!IsFixedArray()) return false;
715 
716   // There's no sure way to detect the difference between a fixed array and
717   // a deoptimization data array.  Since this is used for asserts we can
718   // check that the length is zero or else the fixed size plus a multiple of
719   // the entry size.
720   int length = FixedArray::cast(this)->length();
721   if (length == 0) return true;
722 
723   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
724   return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
725 }
726 
IsDeoptimizationOutputData()727 bool HeapObject::IsDeoptimizationOutputData() const {
728   if (!IsFixedArray()) return false;
729   // There's actually no way to see the difference between a fixed array and
730   // a deoptimization data array.  Since this is used for asserts we can check
731   // that the length is plausible though.
732   if (FixedArray::cast(this)->length() % 2 != 0) return false;
733   return true;
734 }
735 
IsHandlerTable()736 bool HeapObject::IsHandlerTable() const {
737   if (!IsFixedArray()) return false;
738   // There's actually no way to see the difference between a fixed array and
739   // a handler table array.
740   return true;
741 }
742 
IsDependentCode()743 bool HeapObject::IsDependentCode() const {
744   if (!IsFixedArray()) return false;
745   // There's actually no way to see the difference between a fixed array and
746   // a dependent codes array.
747   return true;
748 }
749 
IsContext()750 bool HeapObject::IsContext() const {
751   Map* map = this->map();
752   Heap* heap = GetHeap();
753   return (
754       map == heap->function_context_map() || map == heap->catch_context_map() ||
755       map == heap->with_context_map() || map == heap->native_context_map() ||
756       map == heap->block_context_map() || map == heap->module_context_map() ||
757       map == heap->script_context_map() ||
758       map == heap->debug_evaluate_context_map());
759 }
760 
IsNativeContext()761 bool HeapObject::IsNativeContext() const {
762   return map() == GetHeap()->native_context_map();
763 }
764 
IsScriptContextTable()765 bool HeapObject::IsScriptContextTable() const {
766   return map() == GetHeap()->script_context_table_map();
767 }
768 
IsScopeInfo()769 bool HeapObject::IsScopeInfo() const {
770   return map() == GetHeap()->scope_info_map();
771 }
772 
773 
TYPE_CHECKER(JSBoundFunction,JS_BOUND_FUNCTION_TYPE)774 TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
775 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
776 
777 
778 template <> inline bool Is<JSFunction>(Object* obj) {
779   return obj->IsJSFunction();
780 }
781 
782 
TYPE_CHECKER(Code,CODE_TYPE)783 TYPE_CHECKER(Code, CODE_TYPE)
784 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
785 TYPE_CHECKER(Cell, CELL_TYPE)
786 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
787 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
788 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
789 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
790 TYPE_CHECKER(JSError, JS_ERROR_TYPE)
791 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
792 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
793 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
794 TYPE_CHECKER(JSPromise, JS_PROMISE_TYPE)
795 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
796 
797 bool HeapObject::IsAbstractCode() const {
798   return IsBytecodeArray() || IsCode();
799 }
800 
IsStringWrapper()801 bool HeapObject::IsStringWrapper() const {
802   return IsJSValue() && JSValue::cast(this)->value()->IsString();
803 }
804 
805 
TYPE_CHECKER(Foreign,FOREIGN_TYPE)806 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
807 
808 bool HeapObject::IsBoolean() const {
809   return IsOddball() &&
810       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
811 }
812 
813 
TYPE_CHECKER(JSArray,JS_ARRAY_TYPE)814 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
815 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
816 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
817 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
818 
819 bool HeapObject::IsJSArrayBufferView() const {
820   return IsJSDataView() || IsJSTypedArray();
821 }
822 
823 
TYPE_CHECKER(JSRegExp,JS_REGEXP_TYPE)824 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
825 
826 
827 template <> inline bool Is<JSArray>(Object* obj) {
828   return obj->IsJSArray();
829 }
830 
IsHashTable()831 bool HeapObject::IsHashTable() const {
832   return map() == GetHeap()->hash_table_map();
833 }
834 
IsWeakHashTable()835 bool HeapObject::IsWeakHashTable() const { return IsHashTable(); }
836 
IsDictionary()837 bool HeapObject::IsDictionary() const {
838   return IsHashTable() && this != GetHeap()->string_table();
839 }
840 
841 
IsNameDictionary()842 bool Object::IsNameDictionary() const {
843   return IsDictionary();
844 }
845 
846 
IsGlobalDictionary()847 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
848 
849 
IsSeededNumberDictionary()850 bool Object::IsSeededNumberDictionary() const {
851   return IsDictionary();
852 }
853 
854 
IsUnseededNumberDictionary()855 bool Object::IsUnseededNumberDictionary() const {
856   return IsDictionary();
857 }
858 
IsStringTable()859 bool HeapObject::IsStringTable() const { return IsHashTable(); }
860 
IsStringSet()861 bool HeapObject::IsStringSet() const { return IsHashTable(); }
862 
IsNormalizedMapCache()863 bool HeapObject::IsNormalizedMapCache() const {
864   return NormalizedMapCache::IsNormalizedMapCache(this);
865 }
866 
867 
GetIndex(Handle<Map> map)868 int NormalizedMapCache::GetIndex(Handle<Map> map) {
869   return map->Hash() % NormalizedMapCache::kEntries;
870 }
871 
IsNormalizedMapCache(const HeapObject * obj)872 bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
873   if (!obj->IsFixedArray()) return false;
874   if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
875     return false;
876   }
877 #ifdef VERIFY_HEAP
878   if (FLAG_verify_heap) {
879     reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj))
880         ->NormalizedMapCacheVerify();
881   }
882 #endif
883   return true;
884 }
885 
IsCompilationCacheTable()886 bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
887 
IsCodeCacheHashTable()888 bool HeapObject::IsCodeCacheHashTable() const { return IsHashTable(); }
889 
IsMapCache()890 bool HeapObject::IsMapCache() const { return IsHashTable(); }
891 
IsObjectHashTable()892 bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
893 
IsOrderedHashTable()894 bool HeapObject::IsOrderedHashTable() const {
895   return map() == GetHeap()->ordered_hash_table_map();
896 }
897 
898 
IsOrderedHashSet()899 bool Object::IsOrderedHashSet() const {
900   return IsOrderedHashTable();
901 }
902 
903 
IsOrderedHashMap()904 bool Object::IsOrderedHashMap() const {
905   return IsOrderedHashTable();
906 }
907 
908 
IsPrimitive()909 bool Object::IsPrimitive() const {
910   return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
911 }
912 
IsJSGlobalProxy()913 bool HeapObject::IsJSGlobalProxy() const {
914   bool result = map()->instance_type() == JS_GLOBAL_PROXY_TYPE;
915   DCHECK(!result || map()->is_access_check_needed());
916   return result;
917 }
918 
919 
TYPE_CHECKER(JSGlobalObject,JS_GLOBAL_OBJECT_TYPE)920 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
921 
922 bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
923 
IsAccessCheckNeeded()924 bool HeapObject::IsAccessCheckNeeded() const {
925   if (IsJSGlobalProxy()) {
926     const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
927     JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
928     return proxy->IsDetachedFrom(global);
929   }
930   return map()->is_access_check_needed();
931 }
932 
IsStruct()933 bool HeapObject::IsStruct() const {
934   switch (map()->instance_type()) {
935 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
936   STRUCT_LIST(MAKE_STRUCT_CASE)
937 #undef MAKE_STRUCT_CASE
938     default: return false;
939   }
940 }
941 
942 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
943   bool Object::Is##Name() const {                                \
944     return IsHeapObject() && HeapObject::cast(this)->Is##Name(); \
945   }                                                              \
946   bool HeapObject::Is##Name() const {                            \
947     return map()->instance_type() == NAME##_TYPE;                \
948   }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)949 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
950 #undef MAKE_STRUCT_PREDICATE
951 
952 double Object::Number() const {
953   DCHECK(IsNumber());
954   return IsSmi()
955              ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
956              : reinterpret_cast<const HeapNumber*>(this)->value();
957 }
958 
959 
IsNaN()960 bool Object::IsNaN() const {
961   return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
962 }
963 
964 
IsMinusZero()965 bool Object::IsMinusZero() const {
966   return this->IsHeapNumber() &&
967          i::IsMinusZero(HeapNumber::cast(this)->value());
968 }
969 
970 
OptimalRepresentation()971 Representation Object::OptimalRepresentation() {
972   if (!FLAG_track_fields) return Representation::Tagged();
973   if (IsSmi()) {
974     return Representation::Smi();
975   } else if (FLAG_track_double_fields && IsHeapNumber()) {
976     return Representation::Double();
977   } else if (FLAG_track_computed_fields &&
978              IsUninitialized(HeapObject::cast(this)->GetIsolate())) {
979     return Representation::None();
980   } else if (FLAG_track_heap_object_fields) {
981     DCHECK(IsHeapObject());
982     return Representation::HeapObject();
983   } else {
984     return Representation::Tagged();
985   }
986 }
987 
988 
OptimalElementsKind()989 ElementsKind Object::OptimalElementsKind() {
990   if (IsSmi()) return FAST_SMI_ELEMENTS;
991   if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
992   return FAST_ELEMENTS;
993 }
994 
995 
FitsRepresentation(Representation representation)996 bool Object::FitsRepresentation(Representation representation) {
997   if (FLAG_track_fields && representation.IsSmi()) {
998     return IsSmi();
999   } else if (FLAG_track_double_fields && representation.IsDouble()) {
1000     return IsMutableHeapNumber() || IsNumber();
1001   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1002     return IsHeapObject();
1003   } else if (FLAG_track_fields && representation.IsNone()) {
1004     return false;
1005   }
1006   return true;
1007 }
1008 
ToUint32(uint32_t * value)1009 bool Object::ToUint32(uint32_t* value) {
1010   if (IsSmi()) {
1011     int num = Smi::cast(this)->value();
1012     if (num < 0) return false;
1013     *value = static_cast<uint32_t>(num);
1014     return true;
1015   }
1016   if (IsHeapNumber()) {
1017     double num = HeapNumber::cast(this)->value();
1018     if (num < 0) return false;
1019     uint32_t uint_value = FastD2UI(num);
1020     if (FastUI2D(uint_value) == num) {
1021       *value = uint_value;
1022       return true;
1023     }
1024   }
1025   return false;
1026 }
1027 
1028 // static
ToObject(Isolate * isolate,Handle<Object> object)1029 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1030                                          Handle<Object> object) {
1031   if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
1032   return ToObject(isolate, object, isolate->native_context());
1033 }
1034 
1035 
1036 // static
ToName(Isolate * isolate,Handle<Object> input)1037 MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
1038   if (input->IsName()) return Handle<Name>::cast(input);
1039   return ConvertToName(isolate, input);
1040 }
1041 
1042 // static
ToPrimitive(Handle<Object> input,ToPrimitiveHint hint)1043 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1044                                         ToPrimitiveHint hint) {
1045   if (input->IsPrimitive()) return input;
1046   return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1047 }
1048 
1049 
HasSpecificClassOf(String * name)1050 bool Object::HasSpecificClassOf(String* name) {
1051   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1052 }
1053 
GetProperty(Handle<Object> object,Handle<Name> name)1054 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1055                                         Handle<Name> name) {
1056   LookupIterator it(object, name);
1057   if (!it.IsFound()) return it.factory()->undefined_value();
1058   return GetProperty(&it);
1059 }
1060 
GetProperty(Handle<JSReceiver> receiver,Handle<Name> name)1061 MaybeHandle<Object> JSReceiver::GetProperty(Handle<JSReceiver> receiver,
1062                                             Handle<Name> name) {
1063   LookupIterator it(receiver, name, receiver);
1064   if (!it.IsFound()) return it.factory()->undefined_value();
1065   return Object::GetProperty(&it);
1066 }
1067 
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)1068 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1069                                        uint32_t index) {
1070   LookupIterator it(isolate, object, index);
1071   if (!it.IsFound()) return it.factory()->undefined_value();
1072   return GetProperty(&it);
1073 }
1074 
GetElement(Isolate * isolate,Handle<JSReceiver> receiver,uint32_t index)1075 MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate,
1076                                            Handle<JSReceiver> receiver,
1077                                            uint32_t index) {
1078   LookupIterator it(isolate, receiver, index, receiver);
1079   if (!it.IsFound()) return it.factory()->undefined_value();
1080   return Object::GetProperty(&it);
1081 }
1082 
GetDataProperty(Handle<JSReceiver> object,Handle<Name> name)1083 Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
1084                                            Handle<Name> name) {
1085   LookupIterator it(object, name, object,
1086                     LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
1087   if (!it.IsFound()) return it.factory()->undefined_value();
1088   return GetDataProperty(&it);
1089 }
1090 
SetElement(Isolate * isolate,Handle<Object> object,uint32_t index,Handle<Object> value,LanguageMode language_mode)1091 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1092                                        uint32_t index, Handle<Object> value,
1093                                        LanguageMode language_mode) {
1094   LookupIterator it(isolate, object, index);
1095   MAYBE_RETURN_NULL(
1096       SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
1097   return value;
1098 }
1099 
GetPrototype(Isolate * isolate,Handle<JSReceiver> receiver)1100 MaybeHandle<Object> JSReceiver::GetPrototype(Isolate* isolate,
1101                                              Handle<JSReceiver> receiver) {
1102   // We don't expect access checks to be needed on JSProxy objects.
1103   DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
1104   PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
1105                          PrototypeIterator::END_AT_NON_HIDDEN);
1106   do {
1107     if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
1108   } while (!iter.IsAtEnd());
1109   return PrototypeIterator::GetCurrent(iter);
1110 }
1111 
GetProperty(Isolate * isolate,Handle<JSReceiver> receiver,const char * name)1112 MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
1113                                             Handle<JSReceiver> receiver,
1114                                             const char* name) {
1115   Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1116   return GetProperty(receiver, str);
1117 }
1118 
1119 // static
OwnPropertyKeys(Handle<JSReceiver> object)1120 MUST_USE_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys(
1121     Handle<JSReceiver> object) {
1122   return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
1123                                  ALL_PROPERTIES,
1124                                  GetKeysConversion::kConvertToString);
1125 }
1126 
1127 #define FIELD_ADDR(p, offset) \
1128   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1129 
1130 #define FIELD_ADDR_CONST(p, offset) \
1131   (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1132 
1133 #define READ_FIELD(p, offset) \
1134   (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1135 
1136 #define ACQUIRE_READ_FIELD(p, offset)           \
1137   reinterpret_cast<Object*>(base::Acquire_Load( \
1138       reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1139 
1140 #define NOBARRIER_READ_FIELD(p, offset)           \
1141   reinterpret_cast<Object*>(base::NoBarrier_Load( \
1142       reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1143 
1144 #define WRITE_FIELD(p, offset, value) \
1145   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1146 
1147 #define RELEASE_WRITE_FIELD(p, offset, value)                     \
1148   base::Release_Store(                                            \
1149       reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1150       reinterpret_cast<base::AtomicWord>(value));
1151 
1152 #define NOBARRIER_WRITE_FIELD(p, offset, value)                   \
1153   base::NoBarrier_Store(                                          \
1154       reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1155       reinterpret_cast<base::AtomicWord>(value));
1156 
1157 #define WRITE_BARRIER(heap, object, offset, value)          \
1158   heap->incremental_marking()->RecordWrite(                 \
1159       object, HeapObject::RawField(object, offset), value); \
1160   heap->RecordWrite(object, offset, value);
1161 
1162 #define FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(heap, array, start, length) \
1163   do {                                                                 \
1164     heap->RecordFixedArrayElements(array, start, length);              \
1165     heap->incremental_marking()->IterateBlackObject(array);            \
1166   } while (false)
1167 
1168 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1169   if (mode != SKIP_WRITE_BARRIER) {                                  \
1170     if (mode == UPDATE_WRITE_BARRIER) {                              \
1171       heap->incremental_marking()->RecordWrite(                      \
1172           object, HeapObject::RawField(object, offset), value);      \
1173     }                                                                \
1174     heap->RecordWrite(object, offset, value);                        \
1175   }
1176 
1177 #define READ_DOUBLE_FIELD(p, offset) \
1178   ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1179 
1180 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1181   WriteDoubleValue(FIELD_ADDR(p, offset), value)
1182 
1183 #define READ_INT_FIELD(p, offset) \
1184   (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1185 
1186 #define WRITE_INT_FIELD(p, offset, value) \
1187   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1188 
1189 #define READ_INTPTR_FIELD(p, offset) \
1190   (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1191 
1192 #define WRITE_INTPTR_FIELD(p, offset, value) \
1193   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1194 
1195 #define READ_UINT8_FIELD(p, offset) \
1196   (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1197 
1198 #define WRITE_UINT8_FIELD(p, offset, value) \
1199   (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1200 
1201 #define READ_INT8_FIELD(p, offset) \
1202   (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1203 
1204 #define WRITE_INT8_FIELD(p, offset, value) \
1205   (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1206 
1207 #define READ_UINT16_FIELD(p, offset) \
1208   (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1209 
1210 #define WRITE_UINT16_FIELD(p, offset, value) \
1211   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1212 
1213 #define READ_INT16_FIELD(p, offset) \
1214   (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1215 
1216 #define WRITE_INT16_FIELD(p, offset, value) \
1217   (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1218 
1219 #define READ_UINT32_FIELD(p, offset) \
1220   (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1221 
1222 #define WRITE_UINT32_FIELD(p, offset, value) \
1223   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1224 
1225 #define READ_INT32_FIELD(p, offset) \
1226   (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1227 
1228 #define WRITE_INT32_FIELD(p, offset, value) \
1229   (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1230 
1231 #define READ_FLOAT_FIELD(p, offset) \
1232   (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1233 
1234 #define WRITE_FLOAT_FIELD(p, offset, value) \
1235   (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1236 
1237 #define READ_UINT64_FIELD(p, offset) \
1238   (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1239 
1240 #define WRITE_UINT64_FIELD(p, offset, value) \
1241   (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1242 
1243 #define READ_INT64_FIELD(p, offset) \
1244   (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1245 
1246 #define WRITE_INT64_FIELD(p, offset, value) \
1247   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1248 
1249 #define READ_BYTE_FIELD(p, offset) \
1250   (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1251 
1252 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1253   static_cast<byte>(base::NoBarrier_Load(    \
1254       reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1255 
1256 #define WRITE_BYTE_FIELD(p, offset, value) \
1257   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1258 
1259 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value)           \
1260   base::NoBarrier_Store(                                       \
1261       reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1262       static_cast<base::Atomic8>(value));
1263 
RawField(HeapObject * obj,int byte_offset)1264 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1265   return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1266 }
1267 
1268 
FromMap(const Map * map)1269 MapWord MapWord::FromMap(const Map* map) {
1270   return MapWord(reinterpret_cast<uintptr_t>(map));
1271 }
1272 
1273 
ToMap()1274 Map* MapWord::ToMap() {
1275   return reinterpret_cast<Map*>(value_);
1276 }
1277 
IsForwardingAddress()1278 bool MapWord::IsForwardingAddress() const {
1279   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1280 }
1281 
1282 
FromForwardingAddress(HeapObject * object)1283 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1284   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1285   return MapWord(reinterpret_cast<uintptr_t>(raw));
1286 }
1287 
1288 
ToForwardingAddress()1289 HeapObject* MapWord::ToForwardingAddress() {
1290   DCHECK(IsForwardingAddress());
1291   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1292 }
1293 
1294 
1295 #ifdef VERIFY_HEAP
VerifyObjectField(int offset)1296 void HeapObject::VerifyObjectField(int offset) {
1297   VerifyPointer(READ_FIELD(this, offset));
1298 }
1299 
VerifySmiField(int offset)1300 void HeapObject::VerifySmiField(int offset) {
1301   CHECK(READ_FIELD(this, offset)->IsSmi());
1302 }
1303 #endif
1304 
1305 
GetHeap()1306 Heap* HeapObject::GetHeap() const {
1307   Heap* heap = MemoryChunk::FromAddress(
1308                    reinterpret_cast<Address>(const_cast<HeapObject*>(this)))
1309                    ->heap();
1310   SLOW_DCHECK(heap != NULL);
1311   return heap;
1312 }
1313 
1314 
GetIsolate()1315 Isolate* HeapObject::GetIsolate() const {
1316   return GetHeap()->isolate();
1317 }
1318 
1319 
map()1320 Map* HeapObject::map() const {
1321 #ifdef DEBUG
1322   // Clear mark potentially added by PathTracer.
1323   uintptr_t raw_value =
1324       map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1325   return MapWord::FromRawValue(raw_value).ToMap();
1326 #else
1327   return map_word().ToMap();
1328 #endif
1329 }
1330 
1331 
set_map(Map * value)1332 void HeapObject::set_map(Map* value) {
1333   set_map_word(MapWord::FromMap(value));
1334   if (value != NULL) {
1335     // TODO(1600) We are passing NULL as a slot because maps can never be on
1336     // evacuation candidate.
1337     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1338   }
1339 }
1340 
1341 
synchronized_map()1342 Map* HeapObject::synchronized_map() {
1343   return synchronized_map_word().ToMap();
1344 }
1345 
1346 
synchronized_set_map(Map * value)1347 void HeapObject::synchronized_set_map(Map* value) {
1348   synchronized_set_map_word(MapWord::FromMap(value));
1349   if (value != NULL) {
1350     // TODO(1600) We are passing NULL as a slot because maps can never be on
1351     // evacuation candidate.
1352     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1353   }
1354 }
1355 
1356 
synchronized_set_map_no_write_barrier(Map * value)1357 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1358   synchronized_set_map_word(MapWord::FromMap(value));
1359 }
1360 
1361 
1362 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)1363 void HeapObject::set_map_no_write_barrier(Map* value) {
1364   set_map_word(MapWord::FromMap(value));
1365 }
1366 
1367 
map_word()1368 MapWord HeapObject::map_word() const {
1369   return MapWord(
1370       reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1371 }
1372 
1373 
set_map_word(MapWord map_word)1374 void HeapObject::set_map_word(MapWord map_word) {
1375   NOBARRIER_WRITE_FIELD(
1376       this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1377 }
1378 
1379 
synchronized_map_word()1380 MapWord HeapObject::synchronized_map_word() const {
1381   return MapWord(
1382       reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1383 }
1384 
1385 
synchronized_set_map_word(MapWord map_word)1386 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1387   RELEASE_WRITE_FIELD(
1388       this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1389 }
1390 
1391 
Size()1392 int HeapObject::Size() {
1393   return SizeFromMap(map());
1394 }
1395 
1396 
value()1397 double HeapNumber::value() const {
1398   return READ_DOUBLE_FIELD(this, kValueOffset);
1399 }
1400 
1401 
set_value(double value)1402 void HeapNumber::set_value(double value) {
1403   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1404 }
1405 
1406 
get_exponent()1407 int HeapNumber::get_exponent() {
1408   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1409           kExponentShift) - kExponentBias;
1410 }
1411 
1412 
get_sign()1413 int HeapNumber::get_sign() {
1414   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1415 }
1416 
1417 
Equals(Simd128Value * that)1418 bool Simd128Value::Equals(Simd128Value* that) {
1419   // TODO(bmeurer): This doesn't match the SIMD.js specification, but it seems
1420   // to be consistent with what the CompareICStub does, and what is tested in
1421   // the current SIMD.js testsuite.
1422   if (this == that) return true;
1423 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1424   if (this->Is##Type()) {                                      \
1425     if (!that->Is##Type()) return false;                       \
1426     return Type::cast(this)->Equals(Type::cast(that));         \
1427   }
1428   SIMD128_TYPES(SIMD128_VALUE)
1429 #undef SIMD128_VALUE
1430   return false;
1431 }
1432 
1433 
1434 // static
Equals(Handle<Simd128Value> one,Handle<Simd128Value> two)1435 bool Simd128Value::Equals(Handle<Simd128Value> one, Handle<Simd128Value> two) {
1436   return one->Equals(*two);
1437 }
1438 
1439 
1440 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1441   bool Type::Equals(Type* that) {                                     \
1442     for (int lane = 0; lane < lane_count; ++lane) {                   \
1443       if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1444     }                                                                 \
1445     return true;                                                      \
1446   }
1447 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1448 #undef SIMD128_VALUE_EQUALS
1449 
1450 
1451 #if defined(V8_TARGET_LITTLE_ENDIAN)
1452 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1453   lane_type value =                                                      \
1454       READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1455 #elif defined(V8_TARGET_BIG_ENDIAN)
1456 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1457   lane_type value = READ_##field_type##_FIELD(                           \
1458       this, kValueOffset + (lane_count - lane - 1) * field_size);
1459 #else
1460 #error Unknown byte ordering
1461 #endif
1462 
1463 #if defined(V8_TARGET_LITTLE_ENDIAN)
1464 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1465   WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1466 #elif defined(V8_TARGET_BIG_ENDIAN)
1467 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1468   WRITE_##field_type##_FIELD(                                         \
1469       this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1470 #else
1471 #error Unknown byte ordering
1472 #endif
1473 
1474 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1475                                  field_size)                              \
1476   lane_type type::get_lane(int lane) const {                              \
1477     DCHECK(lane < lane_count && lane >= 0);                               \
1478     SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size)      \
1479     return value;                                                         \
1480   }                                                                       \
1481                                                                           \
1482   void type::set_lane(int lane, lane_type value) {                        \
1483     DCHECK(lane < lane_count && lane >= 0);                               \
1484     SIMD128_WRITE_LANE(lane_count, field_type, field_size, value)         \
1485   }
1486 
1487 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1488 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1489 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1490 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1491 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1492 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1493 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1494 #undef SIMD128_NUMERIC_LANE_FNS
1495 
1496 
1497 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1498                                  field_size)                              \
1499   bool type::get_lane(int lane) const {                                   \
1500     DCHECK(lane < lane_count && lane >= 0);                               \
1501     SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size)      \
1502     DCHECK(value == 0 || value == -1);                                    \
1503     return value != 0;                                                    \
1504   }                                                                       \
1505                                                                           \
1506   void type::set_lane(int lane, bool value) {                             \
1507     DCHECK(lane < lane_count && lane >= 0);                               \
1508     int32_t int_val = value ? -1 : 0;                                     \
1509     SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val)       \
1510   }
1511 
1512 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1513 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1514 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1515 #undef SIMD128_BOOLEAN_LANE_FNS
1516 
1517 #undef SIMD128_READ_LANE
1518 #undef SIMD128_WRITE_LANE
1519 
1520 
ACCESSORS(JSReceiver,properties,FixedArray,kPropertiesOffset)1521 ACCESSORS(JSReceiver, properties, FixedArray, kPropertiesOffset)
1522 
1523 
1524 Object** FixedArray::GetFirstElementAddress() {
1525   return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1526 }
1527 
1528 
ContainsOnlySmisOrHoles()1529 bool FixedArray::ContainsOnlySmisOrHoles() {
1530   Object* the_hole = GetHeap()->the_hole_value();
1531   Object** current = GetFirstElementAddress();
1532   for (int i = 0; i < length(); ++i) {
1533     Object* candidate = *current++;
1534     if (!candidate->IsSmi() && candidate != the_hole) return false;
1535   }
1536   return true;
1537 }
1538 
1539 
elements()1540 FixedArrayBase* JSObject::elements() const {
1541   Object* array = READ_FIELD(this, kElementsOffset);
1542   return static_cast<FixedArrayBase*>(array);
1543 }
1544 
1545 
Initialize()1546 void AllocationSite::Initialize() {
1547   set_transition_info(Smi::FromInt(0));
1548   SetElementsKind(GetInitialFastElementsKind());
1549   set_nested_site(Smi::FromInt(0));
1550   set_pretenure_data(0);
1551   set_pretenure_create_count(0);
1552   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1553                      SKIP_WRITE_BARRIER);
1554 }
1555 
1556 
IsZombie()1557 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1558 
1559 
IsMaybeTenure()1560 bool AllocationSite::IsMaybeTenure() {
1561   return pretenure_decision() == kMaybeTenure;
1562 }
1563 
1564 
PretenuringDecisionMade()1565 bool AllocationSite::PretenuringDecisionMade() {
1566   return pretenure_decision() != kUndecided;
1567 }
1568 
1569 
MarkZombie()1570 void AllocationSite::MarkZombie() {
1571   DCHECK(!IsZombie());
1572   Initialize();
1573   set_pretenure_decision(kZombie);
1574 }
1575 
1576 
GetElementsKind()1577 ElementsKind AllocationSite::GetElementsKind() {
1578   DCHECK(!SitePointsToLiteral());
1579   int value = Smi::cast(transition_info())->value();
1580   return ElementsKindBits::decode(value);
1581 }
1582 
1583 
SetElementsKind(ElementsKind kind)1584 void AllocationSite::SetElementsKind(ElementsKind kind) {
1585   int value = Smi::cast(transition_info())->value();
1586   set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1587                       SKIP_WRITE_BARRIER);
1588 }
1589 
1590 
CanInlineCall()1591 bool AllocationSite::CanInlineCall() {
1592   int value = Smi::cast(transition_info())->value();
1593   return DoNotInlineBit::decode(value) == 0;
1594 }
1595 
1596 
SetDoNotInlineCall()1597 void AllocationSite::SetDoNotInlineCall() {
1598   int value = Smi::cast(transition_info())->value();
1599   set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1600                       SKIP_WRITE_BARRIER);
1601 }
1602 
1603 
SitePointsToLiteral()1604 bool AllocationSite::SitePointsToLiteral() {
1605   // If transition_info is a smi, then it represents an ElementsKind
1606   // for a constructed array. Otherwise, it must be a boilerplate
1607   // for an object or array literal.
1608   return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1609 }
1610 
1611 
1612 // Heuristic: We only need to create allocation site info if the boilerplate
1613 // elements kind is the initial elements kind.
GetMode(ElementsKind boilerplate_elements_kind)1614 AllocationSiteMode AllocationSite::GetMode(
1615     ElementsKind boilerplate_elements_kind) {
1616   if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1617     return TRACK_ALLOCATION_SITE;
1618   }
1619 
1620   return DONT_TRACK_ALLOCATION_SITE;
1621 }
1622 
1623 
GetMode(ElementsKind from,ElementsKind to)1624 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1625                                            ElementsKind to) {
1626   if (IsFastSmiElementsKind(from) &&
1627       IsMoreGeneralElementsKindTransition(from, to)) {
1628     return TRACK_ALLOCATION_SITE;
1629   }
1630 
1631   return DONT_TRACK_ALLOCATION_SITE;
1632 }
1633 
1634 
CanTrack(InstanceType type)1635 inline bool AllocationSite::CanTrack(InstanceType type) {
1636   if (FLAG_allocation_site_pretenuring) {
1637     return type == JS_ARRAY_TYPE ||
1638         type == JS_OBJECT_TYPE ||
1639         type < FIRST_NONSTRING_TYPE;
1640   }
1641   return type == JS_ARRAY_TYPE;
1642 }
1643 
1644 
pretenure_decision()1645 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1646   int value = pretenure_data();
1647   return PretenureDecisionBits::decode(value);
1648 }
1649 
1650 
set_pretenure_decision(PretenureDecision decision)1651 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1652   int value = pretenure_data();
1653   set_pretenure_data(PretenureDecisionBits::update(value, decision));
1654 }
1655 
1656 
deopt_dependent_code()1657 bool AllocationSite::deopt_dependent_code() {
1658   int value = pretenure_data();
1659   return DeoptDependentCodeBit::decode(value);
1660 }
1661 
1662 
set_deopt_dependent_code(bool deopt)1663 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1664   int value = pretenure_data();
1665   set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1666 }
1667 
1668 
memento_found_count()1669 int AllocationSite::memento_found_count() {
1670   int value = pretenure_data();
1671   return MementoFoundCountBits::decode(value);
1672 }
1673 
1674 
set_memento_found_count(int count)1675 inline void AllocationSite::set_memento_found_count(int count) {
1676   int value = pretenure_data();
1677   // Verify that we can count more mementos than we can possibly find in one
1678   // new space collection.
1679   DCHECK((GetHeap()->MaxSemiSpaceSize() /
1680           (Heap::kMinObjectSizeInWords * kPointerSize +
1681            AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1682   DCHECK(count < MementoFoundCountBits::kMax);
1683   set_pretenure_data(MementoFoundCountBits::update(value, count));
1684 }
1685 
1686 
memento_create_count()1687 int AllocationSite::memento_create_count() { return pretenure_create_count(); }
1688 
1689 
set_memento_create_count(int count)1690 void AllocationSite::set_memento_create_count(int count) {
1691   set_pretenure_create_count(count);
1692 }
1693 
1694 
IncrementMementoFoundCount(int increment)1695 bool AllocationSite::IncrementMementoFoundCount(int increment) {
1696   if (IsZombie()) return false;
1697 
1698   int value = memento_found_count();
1699   set_memento_found_count(value + increment);
1700   return memento_found_count() >= kPretenureMinimumCreated;
1701 }
1702 
1703 
IncrementMementoCreateCount()1704 inline void AllocationSite::IncrementMementoCreateCount() {
1705   DCHECK(FLAG_allocation_site_pretenuring);
1706   int value = memento_create_count();
1707   set_memento_create_count(value + 1);
1708 }
1709 
1710 
MakePretenureDecision(PretenureDecision current_decision,double ratio,bool maximum_size_scavenge)1711 inline bool AllocationSite::MakePretenureDecision(
1712     PretenureDecision current_decision,
1713     double ratio,
1714     bool maximum_size_scavenge) {
1715   // Here we just allow state transitions from undecided or maybe tenure
1716   // to don't tenure, maybe tenure, or tenure.
1717   if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1718     if (ratio >= kPretenureRatio) {
1719       // We just transition into tenure state when the semi-space was at
1720       // maximum capacity.
1721       if (maximum_size_scavenge) {
1722         set_deopt_dependent_code(true);
1723         set_pretenure_decision(kTenure);
1724         // Currently we just need to deopt when we make a state transition to
1725         // tenure.
1726         return true;
1727       }
1728       set_pretenure_decision(kMaybeTenure);
1729     } else {
1730       set_pretenure_decision(kDontTenure);
1731     }
1732   }
1733   return false;
1734 }
1735 
1736 
DigestPretenuringFeedback(bool maximum_size_scavenge)1737 inline bool AllocationSite::DigestPretenuringFeedback(
1738     bool maximum_size_scavenge) {
1739   bool deopt = false;
1740   int create_count = memento_create_count();
1741   int found_count = memento_found_count();
1742   bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1743   double ratio =
1744       minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1745           static_cast<double>(found_count) / create_count : 0.0;
1746   PretenureDecision current_decision = pretenure_decision();
1747 
1748   if (minimum_mementos_created) {
1749     deopt = MakePretenureDecision(
1750         current_decision, ratio, maximum_size_scavenge);
1751   }
1752 
1753   if (FLAG_trace_pretenuring_statistics) {
1754     PrintIsolate(GetIsolate(),
1755                  "pretenuring: AllocationSite(%p): (created, found, ratio) "
1756                  "(%d, %d, %f) %s => %s\n",
1757                  static_cast<void*>(this), create_count, found_count, ratio,
1758                  PretenureDecisionName(current_decision),
1759                  PretenureDecisionName(pretenure_decision()));
1760   }
1761 
1762   // Clear feedback calculation fields until the next gc.
1763   set_memento_found_count(0);
1764   set_memento_create_count(0);
1765   return deopt;
1766 }
1767 
1768 
IsValid()1769 bool AllocationMemento::IsValid() {
1770   return allocation_site()->IsAllocationSite() &&
1771          !AllocationSite::cast(allocation_site())->IsZombie();
1772 }
1773 
1774 
GetAllocationSite()1775 AllocationSite* AllocationMemento::GetAllocationSite() {
1776   DCHECK(IsValid());
1777   return AllocationSite::cast(allocation_site());
1778 }
1779 
GetAllocationSiteUnchecked()1780 Address AllocationMemento::GetAllocationSiteUnchecked() {
1781   return reinterpret_cast<Address>(allocation_site());
1782 }
1783 
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1784 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1785   JSObject::ValidateElements(object);
1786   ElementsKind elements_kind = object->map()->elements_kind();
1787   if (!IsFastObjectElementsKind(elements_kind)) {
1788     if (IsFastHoleyElementsKind(elements_kind)) {
1789       TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1790     } else {
1791       TransitionElementsKind(object, FAST_ELEMENTS);
1792     }
1793   }
1794 }
1795 
1796 
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1797 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1798                                         Object** objects,
1799                                         uint32_t count,
1800                                         EnsureElementsMode mode) {
1801   ElementsKind current_kind = object->GetElementsKind();
1802   ElementsKind target_kind = current_kind;
1803   {
1804     DisallowHeapAllocation no_allocation;
1805     DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1806     bool is_holey = IsFastHoleyElementsKind(current_kind);
1807     if (current_kind == FAST_HOLEY_ELEMENTS) return;
1808     Object* the_hole = object->GetHeap()->the_hole_value();
1809     for (uint32_t i = 0; i < count; ++i) {
1810       Object* current = *objects++;
1811       if (current == the_hole) {
1812         is_holey = true;
1813         target_kind = GetHoleyElementsKind(target_kind);
1814       } else if (!current->IsSmi()) {
1815         if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1816           if (IsFastSmiElementsKind(target_kind)) {
1817             if (is_holey) {
1818               target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1819             } else {
1820               target_kind = FAST_DOUBLE_ELEMENTS;
1821             }
1822           }
1823         } else if (is_holey) {
1824           target_kind = FAST_HOLEY_ELEMENTS;
1825           break;
1826         } else {
1827           target_kind = FAST_ELEMENTS;
1828         }
1829       }
1830     }
1831   }
1832   if (target_kind != current_kind) {
1833     TransitionElementsKind(object, target_kind);
1834   }
1835 }
1836 
1837 
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1838 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1839                                         Handle<FixedArrayBase> elements,
1840                                         uint32_t length,
1841                                         EnsureElementsMode mode) {
1842   Heap* heap = object->GetHeap();
1843   if (elements->map() != heap->fixed_double_array_map()) {
1844     DCHECK(elements->map() == heap->fixed_array_map() ||
1845            elements->map() == heap->fixed_cow_array_map());
1846     if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1847       mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1848     }
1849     Object** objects =
1850         Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1851     EnsureCanContainElements(object, objects, length, mode);
1852     return;
1853   }
1854 
1855   DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1856   if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1857     TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1858   } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1859     Handle<FixedDoubleArray> double_array =
1860         Handle<FixedDoubleArray>::cast(elements);
1861     for (uint32_t i = 0; i < length; ++i) {
1862       if (double_array->is_the_hole(i)) {
1863         TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1864         return;
1865       }
1866     }
1867     TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1868   }
1869 }
1870 
1871 
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1872 void JSObject::SetMapAndElements(Handle<JSObject> object,
1873                                  Handle<Map> new_map,
1874                                  Handle<FixedArrayBase> value) {
1875   JSObject::MigrateToMap(object, new_map);
1876   DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1877           (*value == object->GetHeap()->empty_fixed_array()) ||
1878           object->map()->has_fast_string_wrapper_elements()) ==
1879          (value->map() == object->GetHeap()->fixed_array_map() ||
1880           value->map() == object->GetHeap()->fixed_cow_array_map()));
1881   DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1882          (object->map()->has_fast_double_elements() ==
1883           value->IsFixedDoubleArray()));
1884   object->set_elements(*value);
1885 }
1886 
1887 
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1888 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1889   WRITE_FIELD(this, kElementsOffset, value);
1890   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1891 }
1892 
1893 
initialize_elements()1894 void JSObject::initialize_elements() {
1895   FixedArrayBase* elements = map()->GetInitialElements();
1896   WRITE_FIELD(this, kElementsOffset, elements);
1897 }
1898 
1899 
GetIndexedInterceptor()1900 InterceptorInfo* JSObject::GetIndexedInterceptor() {
1901   return map()->GetIndexedInterceptor();
1902 }
1903 
GetNamedInterceptor()1904 InterceptorInfo* JSObject::GetNamedInterceptor() {
1905   return map()->GetNamedInterceptor();
1906 }
1907 
GetNamedInterceptor()1908 InterceptorInfo* Map::GetNamedInterceptor() {
1909   DCHECK(has_named_interceptor());
1910   JSFunction* constructor = JSFunction::cast(GetConstructor());
1911   DCHECK(constructor->shared()->IsApiFunction());
1912   return InterceptorInfo::cast(
1913       constructor->shared()->get_api_func_data()->named_property_handler());
1914 }
1915 
GetIndexedInterceptor()1916 InterceptorInfo* Map::GetIndexedInterceptor() {
1917   DCHECK(has_indexed_interceptor());
1918   JSFunction* constructor = JSFunction::cast(GetConstructor());
1919   DCHECK(constructor->shared()->IsApiFunction());
1920   return InterceptorInfo::cast(
1921       constructor->shared()->get_api_func_data()->indexed_property_handler());
1922 }
1923 
to_number_raw()1924 double Oddball::to_number_raw() const {
1925   return READ_DOUBLE_FIELD(this, kToNumberRawOffset);
1926 }
1927 
set_to_number_raw(double value)1928 void Oddball::set_to_number_raw(double value) {
1929   WRITE_DOUBLE_FIELD(this, kToNumberRawOffset, value);
1930 }
1931 
ACCESSORS(Oddball,to_string,String,kToStringOffset)1932 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1933 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1934 ACCESSORS(Oddball, to_boolean, Oddball, kToBooleanOffset)
1935 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
1936 
1937 
1938 byte Oddball::kind() const {
1939   return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1940 }
1941 
1942 
set_kind(byte value)1943 void Oddball::set_kind(byte value) {
1944   WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1945 }
1946 
1947 
1948 // static
ToNumber(Handle<Oddball> input)1949 Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
1950   return handle(input->to_number(), input->GetIsolate());
1951 }
1952 
1953 
ACCESSORS(Cell,value,Object,kValueOffset)1954 ACCESSORS(Cell, value, Object, kValueOffset)
1955 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1956 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1957 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1958 
1959 
1960 PropertyDetails PropertyCell::property_details() {
1961   return PropertyDetails(Smi::cast(property_details_raw()));
1962 }
1963 
1964 
set_property_details(PropertyDetails details)1965 void PropertyCell::set_property_details(PropertyDetails details) {
1966   set_property_details_raw(details.AsSmi());
1967 }
1968 
1969 
value()1970 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
1971 
1972 
clear()1973 void WeakCell::clear() {
1974   // Either the garbage collector is clearing the cell or we are simply
1975   // initializing the root empty weak cell.
1976   DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
1977          this == GetHeap()->empty_weak_cell());
1978   WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
1979 }
1980 
1981 
initialize(HeapObject * val)1982 void WeakCell::initialize(HeapObject* val) {
1983   WRITE_FIELD(this, kValueOffset, val);
1984   // We just have to execute the generational barrier here because we never
1985   // mark through a weak cell and collect evacuation candidates when we process
1986   // all weak cells.
1987   WriteBarrierMode mode =
1988       Page::FromAddress(this->address())->IsFlagSet(Page::BLACK_PAGE)
1989           ? UPDATE_WRITE_BARRIER
1990           : UPDATE_WEAK_WRITE_BARRIER;
1991   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
1992 }
1993 
1994 
cleared()1995 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
1996 
1997 
next()1998 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
1999 
2000 
set_next(Object * val,WriteBarrierMode mode)2001 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2002   WRITE_FIELD(this, kNextOffset, val);
2003   if (mode == UPDATE_WRITE_BARRIER) {
2004     WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2005   }
2006 }
2007 
2008 
clear_next(Object * the_hole_value)2009 void WeakCell::clear_next(Object* the_hole_value) {
2010   DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
2011   set_next(the_hole_value, SKIP_WRITE_BARRIER);
2012 }
2013 
next_cleared()2014 bool WeakCell::next_cleared() { return next()->IsTheHole(GetIsolate()); }
2015 
GetHeaderSize()2016 int JSObject::GetHeaderSize() { return GetHeaderSize(map()->instance_type()); }
2017 
2018 
GetHeaderSize(InstanceType type)2019 int JSObject::GetHeaderSize(InstanceType type) {
2020   // Check for the most common kind of JavaScript object before
2021   // falling into the generic switch. This speeds up the internal
2022   // field operations considerably on average.
2023   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2024   switch (type) {
2025     case JS_API_OBJECT_TYPE:
2026     case JS_SPECIAL_API_OBJECT_TYPE:
2027       return JSObject::kHeaderSize;
2028     case JS_GENERATOR_OBJECT_TYPE:
2029       return JSGeneratorObject::kSize;
2030     case JS_MODULE_TYPE:
2031       return JSModule::kSize;
2032     case JS_GLOBAL_PROXY_TYPE:
2033       return JSGlobalProxy::kSize;
2034     case JS_GLOBAL_OBJECT_TYPE:
2035       return JSGlobalObject::kSize;
2036     case JS_BOUND_FUNCTION_TYPE:
2037       return JSBoundFunction::kSize;
2038     case JS_FUNCTION_TYPE:
2039       return JSFunction::kSize;
2040     case JS_VALUE_TYPE:
2041       return JSValue::kSize;
2042     case JS_DATE_TYPE:
2043       return JSDate::kSize;
2044     case JS_ARRAY_TYPE:
2045       return JSArray::kSize;
2046     case JS_ARRAY_BUFFER_TYPE:
2047       return JSArrayBuffer::kSize;
2048     case JS_TYPED_ARRAY_TYPE:
2049       return JSTypedArray::kSize;
2050     case JS_DATA_VIEW_TYPE:
2051       return JSDataView::kSize;
2052     case JS_SET_TYPE:
2053       return JSSet::kSize;
2054     case JS_MAP_TYPE:
2055       return JSMap::kSize;
2056     case JS_SET_ITERATOR_TYPE:
2057       return JSSetIterator::kSize;
2058     case JS_MAP_ITERATOR_TYPE:
2059       return JSMapIterator::kSize;
2060     case JS_WEAK_MAP_TYPE:
2061       return JSWeakMap::kSize;
2062     case JS_WEAK_SET_TYPE:
2063       return JSWeakSet::kSize;
2064     case JS_PROMISE_TYPE:
2065       return JSObject::kHeaderSize;
2066     case JS_REGEXP_TYPE:
2067       return JSRegExp::kSize;
2068     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2069       return JSObject::kHeaderSize;
2070     case JS_MESSAGE_OBJECT_TYPE:
2071       return JSMessageObject::kSize;
2072     case JS_ARGUMENTS_TYPE:
2073       return JSArgumentsObject::kHeaderSize;
2074     case JS_ERROR_TYPE:
2075       return JSObject::kHeaderSize;
2076     default:
2077       UNREACHABLE();
2078       return 0;
2079   }
2080 }
2081 
2082 
GetInternalFieldCount(Map * map)2083 int JSObject::GetInternalFieldCount(Map* map) {
2084   int instance_size = map->instance_size();
2085   if (instance_size == kVariableSizeSentinel) return 0;
2086   InstanceType instance_type = map->instance_type();
2087   return ((instance_size - GetHeaderSize(instance_type)) >> kPointerSizeLog2) -
2088          map->GetInObjectProperties();
2089 }
2090 
2091 
GetInternalFieldCount()2092 int JSObject::GetInternalFieldCount() { return GetInternalFieldCount(map()); }
2093 
2094 
GetInternalFieldOffset(int index)2095 int JSObject::GetInternalFieldOffset(int index) {
2096   DCHECK(index < GetInternalFieldCount() && index >= 0);
2097   return GetHeaderSize() + (kPointerSize * index);
2098 }
2099 
2100 
GetInternalField(int index)2101 Object* JSObject::GetInternalField(int index) {
2102   DCHECK(index < GetInternalFieldCount() && index >= 0);
2103   // Internal objects do follow immediately after the header, whereas in-object
2104   // properties are at the end of the object. Therefore there is no need
2105   // to adjust the index here.
2106   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2107 }
2108 
2109 
SetInternalField(int index,Object * value)2110 void JSObject::SetInternalField(int index, Object* value) {
2111   DCHECK(index < GetInternalFieldCount() && index >= 0);
2112   // Internal objects do follow immediately after the header, whereas in-object
2113   // properties are at the end of the object. Therefore there is no need
2114   // to adjust the index here.
2115   int offset = GetHeaderSize() + (kPointerSize * index);
2116   WRITE_FIELD(this, offset, value);
2117   WRITE_BARRIER(GetHeap(), this, offset, value);
2118 }
2119 
2120 
SetInternalField(int index,Smi * value)2121 void JSObject::SetInternalField(int index, Smi* value) {
2122   DCHECK(index < GetInternalFieldCount() && index >= 0);
2123   // Internal objects do follow immediately after the header, whereas in-object
2124   // properties are at the end of the object. Therefore there is no need
2125   // to adjust the index here.
2126   int offset = GetHeaderSize() + (kPointerSize * index);
2127   WRITE_FIELD(this, offset, value);
2128 }
2129 
2130 
IsUnboxedDoubleField(FieldIndex index)2131 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2132   if (!FLAG_unbox_double_fields) return false;
2133   return map()->IsUnboxedDoubleField(index);
2134 }
2135 
2136 
IsUnboxedDoubleField(FieldIndex index)2137 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2138   if (!FLAG_unbox_double_fields) return false;
2139   if (index.is_hidden_field() || !index.is_inobject()) return false;
2140   return !layout_descriptor()->IsTagged(index.property_index());
2141 }
2142 
2143 
2144 // Access fast-case object properties at index. The use of these routines
2145 // is needed to correctly distinguish between properties stored in-object and
2146 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)2147 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2148   DCHECK(!IsUnboxedDoubleField(index));
2149   if (index.is_inobject()) {
2150     return READ_FIELD(this, index.offset());
2151   } else {
2152     return properties()->get(index.outobject_array_index());
2153   }
2154 }
2155 
2156 
RawFastDoublePropertyAt(FieldIndex index)2157 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2158   DCHECK(IsUnboxedDoubleField(index));
2159   return READ_DOUBLE_FIELD(this, index.offset());
2160 }
2161 
2162 
RawFastPropertyAtPut(FieldIndex index,Object * value)2163 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2164   if (index.is_inobject()) {
2165     int offset = index.offset();
2166     WRITE_FIELD(this, offset, value);
2167     WRITE_BARRIER(GetHeap(), this, offset, value);
2168   } else {
2169     properties()->set(index.outobject_array_index(), value);
2170   }
2171 }
2172 
2173 
RawFastDoublePropertyAtPut(FieldIndex index,double value)2174 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2175   WRITE_DOUBLE_FIELD(this, index.offset(), value);
2176 }
2177 
2178 
FastPropertyAtPut(FieldIndex index,Object * value)2179 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2180   if (IsUnboxedDoubleField(index)) {
2181     DCHECK(value->IsMutableHeapNumber());
2182     RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2183   } else {
2184     RawFastPropertyAtPut(index, value);
2185   }
2186 }
2187 
WriteToField(int descriptor,PropertyDetails details,Object * value)2188 void JSObject::WriteToField(int descriptor, PropertyDetails details,
2189                             Object* value) {
2190   DCHECK(details.type() == DATA);
2191   DisallowHeapAllocation no_gc;
2192   FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2193   if (details.representation().IsDouble()) {
2194     // Nothing more to be done.
2195     if (value->IsUninitialized(this->GetIsolate())) {
2196       return;
2197     }
2198     if (IsUnboxedDoubleField(index)) {
2199       RawFastDoublePropertyAtPut(index, value->Number());
2200     } else {
2201       HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2202       DCHECK(box->IsMutableHeapNumber());
2203       box->set_value(value->Number());
2204     }
2205   } else {
2206     RawFastPropertyAtPut(index, value);
2207   }
2208 }
2209 
WriteToField(int descriptor,Object * value)2210 void JSObject::WriteToField(int descriptor, Object* value) {
2211   DescriptorArray* desc = map()->instance_descriptors();
2212   PropertyDetails details = desc->GetDetails(descriptor);
2213   WriteToField(descriptor, details, value);
2214 }
2215 
GetInObjectPropertyOffset(int index)2216 int JSObject::GetInObjectPropertyOffset(int index) {
2217   return map()->GetInObjectPropertyOffset(index);
2218 }
2219 
2220 
InObjectPropertyAt(int index)2221 Object* JSObject::InObjectPropertyAt(int index) {
2222   int offset = GetInObjectPropertyOffset(index);
2223   return READ_FIELD(this, offset);
2224 }
2225 
2226 
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)2227 Object* JSObject::InObjectPropertyAtPut(int index,
2228                                         Object* value,
2229                                         WriteBarrierMode mode) {
2230   // Adjust for the number of properties stored in the object.
2231   int offset = GetInObjectPropertyOffset(index);
2232   WRITE_FIELD(this, offset, value);
2233   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2234   return value;
2235 }
2236 
2237 
InitializeBody(Map * map,int start_offset,Object * pre_allocated_value,Object * filler_value)2238 void JSObject::InitializeBody(Map* map, int start_offset,
2239                               Object* pre_allocated_value,
2240                               Object* filler_value) {
2241   DCHECK(!filler_value->IsHeapObject() ||
2242          !GetHeap()->InNewSpace(filler_value));
2243   DCHECK(!pre_allocated_value->IsHeapObject() ||
2244          !GetHeap()->InNewSpace(pre_allocated_value));
2245   int size = map->instance_size();
2246   int offset = start_offset;
2247   if (filler_value != pre_allocated_value) {
2248     int end_of_pre_allocated_offset =
2249         size - (map->unused_property_fields() * kPointerSize);
2250     DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
2251     while (offset < end_of_pre_allocated_offset) {
2252       WRITE_FIELD(this, offset, pre_allocated_value);
2253       offset += kPointerSize;
2254     }
2255   }
2256   while (offset < size) {
2257     WRITE_FIELD(this, offset, filler_value);
2258     offset += kPointerSize;
2259   }
2260 }
2261 
2262 
TooManyFastProperties(StoreFromKeyed store_mode)2263 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2264   if (unused_property_fields() != 0) return false;
2265   if (is_prototype_map()) return false;
2266   int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2267   int limit = Max(minimum, GetInObjectProperties());
2268   int external = NumberOfFields() - GetInObjectProperties();
2269   return external > limit;
2270 }
2271 
2272 
InitializeBody(int object_size)2273 void Struct::InitializeBody(int object_size) {
2274   Object* value = GetHeap()->undefined_value();
2275   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2276     WRITE_FIELD(this, offset, value);
2277   }
2278 }
2279 
ToArrayLength(uint32_t * index)2280 bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
2281 
2282 
ToArrayIndex(uint32_t * index)2283 bool Object::ToArrayIndex(uint32_t* index) {
2284   return Object::ToUint32(index) && *index != kMaxUInt32;
2285 }
2286 
2287 
VerifyApiCallResultType()2288 void Object::VerifyApiCallResultType() {
2289 #if DEBUG
2290   if (IsSmi()) return;
2291   DCHECK(IsHeapObject());
2292   Isolate* isolate = HeapObject::cast(this)->GetIsolate();
2293   if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() ||
2294         IsSimd128Value() || IsUndefined(isolate) || IsTrue(isolate) ||
2295         IsFalse(isolate) || IsNull(isolate))) {
2296     FATAL("API call returned invalid object");
2297   }
2298 #endif  // DEBUG
2299 }
2300 
2301 
get(int index)2302 Object* FixedArray::get(int index) const {
2303   SLOW_DCHECK(index >= 0 && index < this->length());
2304   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2305 }
2306 
get(FixedArray * array,int index,Isolate * isolate)2307 Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) {
2308   return handle(array->get(index), isolate);
2309 }
2310 
2311 
is_the_hole(int index)2312 bool FixedArray::is_the_hole(int index) {
2313   return get(index) == GetHeap()->the_hole_value();
2314 }
2315 
2316 
set(int index,Smi * value)2317 void FixedArray::set(int index, Smi* value) {
2318   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2319   DCHECK(index >= 0 && index < this->length());
2320   DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2321   int offset = kHeaderSize + index * kPointerSize;
2322   WRITE_FIELD(this, offset, value);
2323 }
2324 
2325 
set(int index,Object * value)2326 void FixedArray::set(int index, Object* value) {
2327   DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2328   DCHECK(IsFixedArray());
2329   DCHECK_GE(index, 0);
2330   DCHECK_LT(index, this->length());
2331   int offset = kHeaderSize + index * kPointerSize;
2332   WRITE_FIELD(this, offset, value);
2333   WRITE_BARRIER(GetHeap(), this, offset, value);
2334 }
2335 
2336 
get_scalar(int index)2337 double FixedDoubleArray::get_scalar(int index) {
2338   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2339          map() != GetHeap()->fixed_array_map());
2340   DCHECK(index >= 0 && index < this->length());
2341   DCHECK(!is_the_hole(index));
2342   return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2343 }
2344 
2345 
get_representation(int index)2346 uint64_t FixedDoubleArray::get_representation(int index) {
2347   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2348          map() != GetHeap()->fixed_array_map());
2349   DCHECK(index >= 0 && index < this->length());
2350   int offset = kHeaderSize + index * kDoubleSize;
2351   return READ_UINT64_FIELD(this, offset);
2352 }
2353 
get(FixedDoubleArray * array,int index,Isolate * isolate)2354 Handle<Object> FixedDoubleArray::get(FixedDoubleArray* array, int index,
2355                                      Isolate* isolate) {
2356   if (array->is_the_hole(index)) {
2357     return isolate->factory()->the_hole_value();
2358   } else {
2359     return isolate->factory()->NewNumber(array->get_scalar(index));
2360   }
2361 }
2362 
2363 
set(int index,double value)2364 void FixedDoubleArray::set(int index, double value) {
2365   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2366          map() != GetHeap()->fixed_array_map());
2367   int offset = kHeaderSize + index * kDoubleSize;
2368   if (std::isnan(value)) {
2369     WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2370   } else {
2371     WRITE_DOUBLE_FIELD(this, offset, value);
2372   }
2373   DCHECK(!is_the_hole(index));
2374 }
2375 
2376 
set_the_hole(int index)2377 void FixedDoubleArray::set_the_hole(int index) {
2378   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2379          map() != GetHeap()->fixed_array_map());
2380   int offset = kHeaderSize + index * kDoubleSize;
2381   WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2382 }
2383 
2384 
is_the_hole(int index)2385 bool FixedDoubleArray::is_the_hole(int index) {
2386   return get_representation(index) == kHoleNanInt64;
2387 }
2388 
2389 
data_start()2390 double* FixedDoubleArray::data_start() {
2391   return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2392 }
2393 
2394 
FillWithHoles(int from,int to)2395 void FixedDoubleArray::FillWithHoles(int from, int to) {
2396   for (int i = from; i < to; i++) {
2397     set_the_hole(i);
2398   }
2399 }
2400 
2401 
Get(int index)2402 Object* WeakFixedArray::Get(int index) const {
2403   Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2404   if (raw->IsSmi()) return raw;
2405   DCHECK(raw->IsWeakCell());
2406   return WeakCell::cast(raw)->value();
2407 }
2408 
2409 
IsEmptySlot(int index)2410 bool WeakFixedArray::IsEmptySlot(int index) const {
2411   DCHECK(index < Length());
2412   return Get(index)->IsSmi();
2413 }
2414 
2415 
Clear(int index)2416 void WeakFixedArray::Clear(int index) {
2417   FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2418 }
2419 
2420 
Length()2421 int WeakFixedArray::Length() const {
2422   return FixedArray::cast(this)->length() - kFirstIndex;
2423 }
2424 
2425 
last_used_index()2426 int WeakFixedArray::last_used_index() const {
2427   return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2428 }
2429 
2430 
set_last_used_index(int index)2431 void WeakFixedArray::set_last_used_index(int index) {
2432   FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2433 }
2434 
2435 
2436 template <class T>
Next()2437 T* WeakFixedArray::Iterator::Next() {
2438   if (list_ != NULL) {
2439     // Assert that list did not change during iteration.
2440     DCHECK_EQ(last_used_index_, list_->last_used_index());
2441     while (index_ < list_->Length()) {
2442       Object* item = list_->Get(index_++);
2443       if (item != Empty()) return T::cast(item);
2444     }
2445     list_ = NULL;
2446   }
2447   return NULL;
2448 }
2449 
2450 
Length()2451 int ArrayList::Length() {
2452   if (FixedArray::cast(this)->length() == 0) return 0;
2453   return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2454 }
2455 
2456 
SetLength(int length)2457 void ArrayList::SetLength(int length) {
2458   return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2459 }
2460 
2461 
Get(int index)2462 Object* ArrayList::Get(int index) {
2463   return FixedArray::cast(this)->get(kFirstIndex + index);
2464 }
2465 
2466 
Slot(int index)2467 Object** ArrayList::Slot(int index) {
2468   return data_start() + kFirstIndex + index;
2469 }
2470 
2471 
Set(int index,Object * obj)2472 void ArrayList::Set(int index, Object* obj) {
2473   FixedArray::cast(this)->set(kFirstIndex + index, obj);
2474 }
2475 
2476 
Clear(int index,Object * undefined)2477 void ArrayList::Clear(int index, Object* undefined) {
2478   DCHECK(undefined->IsUndefined(GetIsolate()));
2479   FixedArray::cast(this)
2480       ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2481 }
2482 
2483 
GetWriteBarrierMode(const DisallowHeapAllocation & promise)2484 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2485     const DisallowHeapAllocation& promise) {
2486   Heap* heap = GetHeap();
2487   if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2488   if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2489   return UPDATE_WRITE_BARRIER;
2490 }
2491 
2492 
RequiredAlignment()2493 AllocationAlignment HeapObject::RequiredAlignment() {
2494 #ifdef V8_HOST_ARCH_32_BIT
2495   if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2496       FixedArrayBase::cast(this)->length() != 0) {
2497     return kDoubleAligned;
2498   }
2499   if (IsHeapNumber()) return kDoubleUnaligned;
2500   if (IsSimd128Value()) return kSimd128Unaligned;
2501 #endif  // V8_HOST_ARCH_32_BIT
2502   return kWordAligned;
2503 }
2504 
2505 
set(int index,Object * value,WriteBarrierMode mode)2506 void FixedArray::set(int index,
2507                      Object* value,
2508                      WriteBarrierMode mode) {
2509   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2510   DCHECK(index >= 0 && index < this->length());
2511   int offset = kHeaderSize + index * kPointerSize;
2512   WRITE_FIELD(this, offset, value);
2513   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2514 }
2515 
2516 
NoWriteBarrierSet(FixedArray * array,int index,Object * value)2517 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2518                                    int index,
2519                                    Object* value) {
2520   DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2521   DCHECK(index >= 0 && index < array->length());
2522   DCHECK(!array->GetHeap()->InNewSpace(value));
2523   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2524 }
2525 
2526 
set_undefined(int index)2527 void FixedArray::set_undefined(int index) {
2528   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2529   DCHECK(index >= 0 && index < this->length());
2530   DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2531   WRITE_FIELD(this,
2532               kHeaderSize + index * kPointerSize,
2533               GetHeap()->undefined_value());
2534 }
2535 
2536 
set_null(int index)2537 void FixedArray::set_null(int index) {
2538   DCHECK(index >= 0 && index < this->length());
2539   DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2540   WRITE_FIELD(this,
2541               kHeaderSize + index * kPointerSize,
2542               GetHeap()->null_value());
2543 }
2544 
2545 
set_the_hole(int index)2546 void FixedArray::set_the_hole(int index) {
2547   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2548   DCHECK(index >= 0 && index < this->length());
2549   DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2550   WRITE_FIELD(this,
2551               kHeaderSize + index * kPointerSize,
2552               GetHeap()->the_hole_value());
2553 }
2554 
2555 
FillWithHoles(int from,int to)2556 void FixedArray::FillWithHoles(int from, int to) {
2557   for (int i = from; i < to; i++) {
2558     set_the_hole(i);
2559   }
2560 }
2561 
2562 
data_start()2563 Object** FixedArray::data_start() {
2564   return HeapObject::RawField(this, kHeaderSize);
2565 }
2566 
2567 
RawFieldOfElementAt(int index)2568 Object** FixedArray::RawFieldOfElementAt(int index) {
2569   return HeapObject::RawField(this, OffsetOfElementAt(index));
2570 }
2571 
2572 
IsEmpty()2573 bool DescriptorArray::IsEmpty() {
2574   DCHECK(length() >= kFirstIndex ||
2575          this == GetHeap()->empty_descriptor_array());
2576   return length() < kFirstIndex;
2577 }
2578 
2579 
number_of_descriptors()2580 int DescriptorArray::number_of_descriptors() {
2581   DCHECK(length() >= kFirstIndex || IsEmpty());
2582   int len = length();
2583   return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2584 }
2585 
2586 
number_of_descriptors_storage()2587 int DescriptorArray::number_of_descriptors_storage() {
2588   int len = length();
2589   return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2590 }
2591 
2592 
NumberOfSlackDescriptors()2593 int DescriptorArray::NumberOfSlackDescriptors() {
2594   return number_of_descriptors_storage() - number_of_descriptors();
2595 }
2596 
2597 
SetNumberOfDescriptors(int number_of_descriptors)2598 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2599   WRITE_FIELD(
2600       this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2601 }
2602 
2603 
number_of_entries()2604 inline int DescriptorArray::number_of_entries() {
2605   return number_of_descriptors();
2606 }
2607 
2608 
HasEnumCache()2609 bool DescriptorArray::HasEnumCache() {
2610   return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2611 }
2612 
2613 
CopyEnumCacheFrom(DescriptorArray * array)2614 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2615   set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2616 }
2617 
2618 
GetEnumCache()2619 FixedArray* DescriptorArray::GetEnumCache() {
2620   DCHECK(HasEnumCache());
2621   FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2622   return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2623 }
2624 
2625 
HasEnumIndicesCache()2626 bool DescriptorArray::HasEnumIndicesCache() {
2627   if (IsEmpty()) return false;
2628   Object* object = get(kEnumCacheIndex);
2629   if (object->IsSmi()) return false;
2630   FixedArray* bridge = FixedArray::cast(object);
2631   return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2632 }
2633 
2634 
GetEnumIndicesCache()2635 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2636   DCHECK(HasEnumIndicesCache());
2637   FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2638   return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2639 }
2640 
2641 
GetEnumCacheSlot()2642 Object** DescriptorArray::GetEnumCacheSlot() {
2643   DCHECK(HasEnumCache());
2644   return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2645                               kEnumCacheOffset);
2646 }
2647 
2648 // Perform a binary search in a fixed array.
2649 template <SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int valid_entries,int * out_insertion_index)2650 int BinarySearch(T* array, Name* name, int valid_entries,
2651                  int* out_insertion_index) {
2652   DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2653   int low = 0;
2654   int high = array->number_of_entries() - 1;
2655   uint32_t hash = name->hash_field();
2656   int limit = high;
2657 
2658   DCHECK(low <= high);
2659 
2660   while (low != high) {
2661     int mid = low + (high - low) / 2;
2662     Name* mid_name = array->GetSortedKey(mid);
2663     uint32_t mid_hash = mid_name->hash_field();
2664 
2665     if (mid_hash >= hash) {
2666       high = mid;
2667     } else {
2668       low = mid + 1;
2669     }
2670   }
2671 
2672   for (; low <= limit; ++low) {
2673     int sort_index = array->GetSortedKeyIndex(low);
2674     Name* entry = array->GetKey(sort_index);
2675     uint32_t current_hash = entry->hash_field();
2676     if (current_hash != hash) {
2677       if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2678         *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2679       }
2680       return T::kNotFound;
2681     }
2682     if (entry == name) {
2683       if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2684         return sort_index;
2685       }
2686       return T::kNotFound;
2687     }
2688   }
2689 
2690   if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2691     *out_insertion_index = limit + 1;
2692   }
2693   return T::kNotFound;
2694 }
2695 
2696 
2697 // Perform a linear search in this fixed array. len is the number of entry
2698 // indices that are valid.
2699 template <SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int valid_entries,int * out_insertion_index)2700 int LinearSearch(T* array, Name* name, int valid_entries,
2701                  int* out_insertion_index) {
2702   if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2703     uint32_t hash = name->hash_field();
2704     int len = array->number_of_entries();
2705     for (int number = 0; number < len; number++) {
2706       int sorted_index = array->GetSortedKeyIndex(number);
2707       Name* entry = array->GetKey(sorted_index);
2708       uint32_t current_hash = entry->hash_field();
2709       if (current_hash > hash) {
2710         *out_insertion_index = sorted_index;
2711         return T::kNotFound;
2712       }
2713       if (entry == name) return sorted_index;
2714     }
2715     *out_insertion_index = len;
2716     return T::kNotFound;
2717   } else {
2718     DCHECK_LE(valid_entries, array->number_of_entries());
2719     DCHECK_NULL(out_insertion_index);  // Not supported here.
2720     for (int number = 0; number < valid_entries; number++) {
2721       if (array->GetKey(number) == name) return number;
2722     }
2723     return T::kNotFound;
2724   }
2725 }
2726 
2727 
2728 template <SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries,int * out_insertion_index)2729 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2730   SLOW_DCHECK(array->IsSortedNoDuplicates());
2731 
2732   if (valid_entries == 0) {
2733     if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2734       *out_insertion_index = 0;
2735     }
2736     return T::kNotFound;
2737   }
2738 
2739   // Fast case: do linear search for small arrays.
2740   const int kMaxElementsForLinearSearch = 8;
2741   if (valid_entries <= kMaxElementsForLinearSearch) {
2742     return LinearSearch<search_mode>(array, name, valid_entries,
2743                                      out_insertion_index);
2744   }
2745 
2746   // Slow case: perform binary search.
2747   return BinarySearch<search_mode>(array, name, valid_entries,
2748                                    out_insertion_index);
2749 }
2750 
2751 
Search(Name * name,int valid_descriptors)2752 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2753   DCHECK(name->IsUniqueName());
2754   return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2755 }
2756 
SearchWithCache(Isolate * isolate,Name * name,Map * map)2757 int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
2758   DCHECK(name->IsUniqueName());
2759   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2760   if (number_of_own_descriptors == 0) return kNotFound;
2761 
2762   DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
2763   int number = cache->Lookup(map, name);
2764 
2765   if (number == DescriptorLookupCache::kAbsent) {
2766     number = Search(name, number_of_own_descriptors);
2767     cache->Update(map, name, number);
2768   }
2769 
2770   return number;
2771 }
2772 
GetLastDescriptorDetails()2773 PropertyDetails Map::GetLastDescriptorDetails() {
2774   return instance_descriptors()->GetDetails(LastAdded());
2775 }
2776 
2777 
LastAdded()2778 int Map::LastAdded() {
2779   int number_of_own_descriptors = NumberOfOwnDescriptors();
2780   DCHECK(number_of_own_descriptors > 0);
2781   return number_of_own_descriptors - 1;
2782 }
2783 
2784 
NumberOfOwnDescriptors()2785 int Map::NumberOfOwnDescriptors() {
2786   return NumberOfOwnDescriptorsBits::decode(bit_field3());
2787 }
2788 
2789 
SetNumberOfOwnDescriptors(int number)2790 void Map::SetNumberOfOwnDescriptors(int number) {
2791   DCHECK(number <= instance_descriptors()->number_of_descriptors());
2792   set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2793 }
2794 
2795 
EnumLength()2796 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2797 
2798 
SetEnumLength(int length)2799 void Map::SetEnumLength(int length) {
2800   if (length != kInvalidEnumCacheSentinel) {
2801     DCHECK(length >= 0);
2802     DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2803     DCHECK(length <= NumberOfOwnDescriptors());
2804   }
2805   set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2806 }
2807 
2808 
GetInitialElements()2809 FixedArrayBase* Map::GetInitialElements() {
2810   FixedArrayBase* result = nullptr;
2811   if (has_fast_elements() || has_fast_string_wrapper_elements()) {
2812     result = GetHeap()->empty_fixed_array();
2813   } else if (has_fast_sloppy_arguments_elements()) {
2814     result = GetHeap()->empty_sloppy_arguments_elements();
2815   } else if (has_fixed_typed_array_elements()) {
2816     result = GetHeap()->EmptyFixedTypedArrayForMap(this);
2817   } else {
2818     UNREACHABLE();
2819   }
2820   DCHECK(!GetHeap()->InNewSpace(result));
2821   return result;
2822 }
2823 
2824 // static
ReconfigureProperty(Handle<Map> map,int modify_index,PropertyKind new_kind,PropertyAttributes new_attributes,Representation new_representation,Handle<FieldType> new_field_type,StoreMode store_mode)2825 Handle<Map> Map::ReconfigureProperty(Handle<Map> map, int modify_index,
2826                                      PropertyKind new_kind,
2827                                      PropertyAttributes new_attributes,
2828                                      Representation new_representation,
2829                                      Handle<FieldType> new_field_type,
2830                                      StoreMode store_mode) {
2831   return Reconfigure(map, map->elements_kind(), modify_index, new_kind,
2832                      new_attributes, new_representation, new_field_type,
2833                      store_mode);
2834 }
2835 
2836 // static
ReconfigureElementsKind(Handle<Map> map,ElementsKind new_elements_kind)2837 Handle<Map> Map::ReconfigureElementsKind(Handle<Map> map,
2838                                          ElementsKind new_elements_kind) {
2839   return Reconfigure(map, new_elements_kind, -1, kData, NONE,
2840                      Representation::None(), FieldType::None(map->GetIsolate()),
2841                      ALLOW_IN_DESCRIPTOR);
2842 }
2843 
GetKeySlot(int descriptor_number)2844 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2845   DCHECK(descriptor_number < number_of_descriptors());
2846   return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2847 }
2848 
2849 
GetDescriptorStartSlot(int descriptor_number)2850 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2851   return GetKeySlot(descriptor_number);
2852 }
2853 
2854 
GetDescriptorEndSlot(int descriptor_number)2855 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2856   return GetValueSlot(descriptor_number - 1) + 1;
2857 }
2858 
2859 
GetKey(int descriptor_number)2860 Name* DescriptorArray::GetKey(int descriptor_number) {
2861   DCHECK(descriptor_number < number_of_descriptors());
2862   return Name::cast(get(ToKeyIndex(descriptor_number)));
2863 }
2864 
2865 
GetSortedKeyIndex(int descriptor_number)2866 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2867   return GetDetails(descriptor_number).pointer();
2868 }
2869 
2870 
GetSortedKey(int descriptor_number)2871 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2872   return GetKey(GetSortedKeyIndex(descriptor_number));
2873 }
2874 
2875 
SetSortedKey(int descriptor_index,int pointer)2876 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2877   PropertyDetails details = GetDetails(descriptor_index);
2878   set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2879 }
2880 
2881 
SetRepresentation(int descriptor_index,Representation representation)2882 void DescriptorArray::SetRepresentation(int descriptor_index,
2883                                         Representation representation) {
2884   DCHECK(!representation.IsNone());
2885   PropertyDetails details = GetDetails(descriptor_index);
2886   set(ToDetailsIndex(descriptor_index),
2887       details.CopyWithRepresentation(representation).AsSmi());
2888 }
2889 
2890 
GetValueSlot(int descriptor_number)2891 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2892   DCHECK(descriptor_number < number_of_descriptors());
2893   return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2894 }
2895 
2896 
GetValueOffset(int descriptor_number)2897 int DescriptorArray::GetValueOffset(int descriptor_number) {
2898   return OffsetOfElementAt(ToValueIndex(descriptor_number));
2899 }
2900 
2901 
GetValue(int descriptor_number)2902 Object* DescriptorArray::GetValue(int descriptor_number) {
2903   DCHECK(descriptor_number < number_of_descriptors());
2904   return get(ToValueIndex(descriptor_number));
2905 }
2906 
2907 
SetValue(int descriptor_index,Object * value)2908 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2909   set(ToValueIndex(descriptor_index), value);
2910 }
2911 
2912 
GetDetails(int descriptor_number)2913 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2914   DCHECK(descriptor_number < number_of_descriptors());
2915   Object* details = get(ToDetailsIndex(descriptor_number));
2916   return PropertyDetails(Smi::cast(details));
2917 }
2918 
2919 
GetType(int descriptor_number)2920 PropertyType DescriptorArray::GetType(int descriptor_number) {
2921   return GetDetails(descriptor_number).type();
2922 }
2923 
2924 
GetFieldIndex(int descriptor_number)2925 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2926   DCHECK(GetDetails(descriptor_number).location() == kField);
2927   return GetDetails(descriptor_number).field_index();
2928 }
2929 
GetConstant(int descriptor_number)2930 Object* DescriptorArray::GetConstant(int descriptor_number) {
2931   return GetValue(descriptor_number);
2932 }
2933 
2934 
GetCallbacksObject(int descriptor_number)2935 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2936   DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2937   return GetValue(descriptor_number);
2938 }
2939 
2940 
GetCallbacks(int descriptor_number)2941 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2942   DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2943   Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2944   return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2945 }
2946 
2947 
Get(int descriptor_number,Descriptor * desc)2948 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2949   desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2950              handle(GetValue(descriptor_number), GetIsolate()),
2951              GetDetails(descriptor_number));
2952 }
2953 
2954 
SetDescriptor(int descriptor_number,Descriptor * desc)2955 void DescriptorArray::SetDescriptor(int descriptor_number, Descriptor* desc) {
2956   // Range check.
2957   DCHECK(descriptor_number < number_of_descriptors());
2958   set(ToKeyIndex(descriptor_number), *desc->GetKey());
2959   set(ToValueIndex(descriptor_number), *desc->GetValue());
2960   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2961 }
2962 
2963 
Set(int descriptor_number,Descriptor * desc)2964 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2965   // Range check.
2966   DCHECK(descriptor_number < number_of_descriptors());
2967 
2968   set(ToKeyIndex(descriptor_number), *desc->GetKey());
2969   set(ToValueIndex(descriptor_number), *desc->GetValue());
2970   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2971 }
2972 
2973 
Append(Descriptor * desc)2974 void DescriptorArray::Append(Descriptor* desc) {
2975   DisallowHeapAllocation no_gc;
2976   int descriptor_number = number_of_descriptors();
2977   SetNumberOfDescriptors(descriptor_number + 1);
2978   Set(descriptor_number, desc);
2979 
2980   uint32_t hash = desc->GetKey()->Hash();
2981 
2982   int insertion;
2983 
2984   for (insertion = descriptor_number; insertion > 0; --insertion) {
2985     Name* key = GetSortedKey(insertion - 1);
2986     if (key->Hash() <= hash) break;
2987     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2988   }
2989 
2990   SetSortedKey(insertion, descriptor_number);
2991 }
2992 
2993 
SwapSortedKeys(int first,int second)2994 void DescriptorArray::SwapSortedKeys(int first, int second) {
2995   int first_key = GetSortedKeyIndex(first);
2996   SetSortedKey(first, GetSortedKeyIndex(second));
2997   SetSortedKey(second, first_key);
2998 }
2999 
3000 
type()3001 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3002 
3003 
GetCallbackObject()3004 Object* DescriptorArray::Entry::GetCallbackObject() {
3005   return descs_->GetValue(index_);
3006 }
3007 
3008 
NumberOfElements()3009 int HashTableBase::NumberOfElements() {
3010   return Smi::cast(get(kNumberOfElementsIndex))->value();
3011 }
3012 
3013 
NumberOfDeletedElements()3014 int HashTableBase::NumberOfDeletedElements() {
3015   return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3016 }
3017 
3018 
Capacity()3019 int HashTableBase::Capacity() {
3020   return Smi::cast(get(kCapacityIndex))->value();
3021 }
3022 
3023 
ElementAdded()3024 void HashTableBase::ElementAdded() {
3025   SetNumberOfElements(NumberOfElements() + 1);
3026 }
3027 
3028 
ElementRemoved()3029 void HashTableBase::ElementRemoved() {
3030   SetNumberOfElements(NumberOfElements() - 1);
3031   SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3032 }
3033 
3034 
ElementsRemoved(int n)3035 void HashTableBase::ElementsRemoved(int n) {
3036   SetNumberOfElements(NumberOfElements() - n);
3037   SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3038 }
3039 
3040 
3041 // static
ComputeCapacity(int at_least_space_for)3042 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3043   const int kMinCapacity = 4;
3044   int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3045   return Max(capacity, kMinCapacity);
3046 }
3047 
IsKey(Isolate * isolate,Object * k)3048 bool HashTableBase::IsKey(Isolate* isolate, Object* k) {
3049   Heap* heap = isolate->heap();
3050   return k != heap->the_hole_value() && k != heap->undefined_value();
3051 }
3052 
IsKey(Object * k)3053 bool HashTableBase::IsKey(Object* k) {
3054   Isolate* isolate = this->GetIsolate();
3055   return !k->IsTheHole(isolate) && !k->IsUndefined(isolate);
3056 }
3057 
3058 
SetNumberOfElements(int nof)3059 void HashTableBase::SetNumberOfElements(int nof) {
3060   set(kNumberOfElementsIndex, Smi::FromInt(nof));
3061 }
3062 
3063 
SetNumberOfDeletedElements(int nod)3064 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3065   set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3066 }
3067 
3068 
3069 template <typename Derived, typename Shape, typename Key>
FindEntry(Key key)3070 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3071   return FindEntry(GetIsolate(), key);
3072 }
3073 
3074 
3075 template<typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)3076 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3077   return FindEntry(isolate, key, HashTable::Hash(key));
3078 }
3079 
3080 
3081 // Find entry for key otherwise return kNotFound.
3082 template <typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key,int32_t hash)3083 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3084                                               int32_t hash) {
3085   uint32_t capacity = Capacity();
3086   uint32_t entry = FirstProbe(hash, capacity);
3087   uint32_t count = 1;
3088   // EnsureCapacity will guarantee the hash table is never full.
3089   Object* undefined = isolate->heap()->undefined_value();
3090   Object* the_hole = isolate->heap()->the_hole_value();
3091   while (true) {
3092     Object* element = KeyAt(entry);
3093     // Empty entry. Uses raw unchecked accessors because it is called by the
3094     // string table during bootstrapping.
3095     if (element == undefined) break;
3096     if (element != the_hole && Shape::IsMatch(key, element)) return entry;
3097     entry = NextProbe(entry, count++, capacity);
3098   }
3099   return kNotFound;
3100 }
3101 
IsMatch(String * key,Object * value)3102 bool StringSetShape::IsMatch(String* key, Object* value) {
3103   return value->IsString() && key->Equals(String::cast(value));
3104 }
3105 
Hash(String * key)3106 uint32_t StringSetShape::Hash(String* key) { return key->Hash(); }
3107 
HashForObject(String * key,Object * object)3108 uint32_t StringSetShape::HashForObject(String* key, Object* object) {
3109   return object->IsString() ? String::cast(object)->Hash() : 0;
3110 }
3111 
requires_slow_elements()3112 bool SeededNumberDictionary::requires_slow_elements() {
3113   Object* max_index_object = get(kMaxNumberKeyIndex);
3114   if (!max_index_object->IsSmi()) return false;
3115   return 0 !=
3116       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3117 }
3118 
3119 
max_number_key()3120 uint32_t SeededNumberDictionary::max_number_key() {
3121   DCHECK(!requires_slow_elements());
3122   Object* max_index_object = get(kMaxNumberKeyIndex);
3123   if (!max_index_object->IsSmi()) return 0;
3124   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3125   return value >> kRequiresSlowElementsTagSize;
3126 }
3127 
3128 
set_requires_slow_elements()3129 void SeededNumberDictionary::set_requires_slow_elements() {
3130   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3131 }
3132 
3133 
3134 // ------------------------------------
3135 // Cast operations
3136 
3137 CAST_ACCESSOR(AbstractCode)
3138 CAST_ACCESSOR(ArrayList)
3139 CAST_ACCESSOR(Bool16x8)
3140 CAST_ACCESSOR(Bool32x4)
3141 CAST_ACCESSOR(Bool8x16)
3142 CAST_ACCESSOR(ByteArray)
3143 CAST_ACCESSOR(BytecodeArray)
3144 CAST_ACCESSOR(Cell)
3145 CAST_ACCESSOR(Code)
3146 CAST_ACCESSOR(CodeCacheHashTable)
3147 CAST_ACCESSOR(CompilationCacheTable)
3148 CAST_ACCESSOR(ConsString)
3149 CAST_ACCESSOR(DeoptimizationInputData)
3150 CAST_ACCESSOR(DeoptimizationOutputData)
3151 CAST_ACCESSOR(DependentCode)
3152 CAST_ACCESSOR(DescriptorArray)
3153 CAST_ACCESSOR(ExternalOneByteString)
3154 CAST_ACCESSOR(ExternalString)
3155 CAST_ACCESSOR(ExternalTwoByteString)
3156 CAST_ACCESSOR(FixedArray)
3157 CAST_ACCESSOR(FixedArrayBase)
3158 CAST_ACCESSOR(FixedDoubleArray)
3159 CAST_ACCESSOR(FixedTypedArrayBase)
3160 CAST_ACCESSOR(Float32x4)
3161 CAST_ACCESSOR(Foreign)
3162 CAST_ACCESSOR(GlobalDictionary)
3163 CAST_ACCESSOR(HandlerTable)
3164 CAST_ACCESSOR(HeapObject)
3165 CAST_ACCESSOR(Int16x8)
3166 CAST_ACCESSOR(Int32x4)
3167 CAST_ACCESSOR(Int8x16)
3168 CAST_ACCESSOR(JSArray)
3169 CAST_ACCESSOR(JSArrayBuffer)
3170 CAST_ACCESSOR(JSArrayBufferView)
3171 CAST_ACCESSOR(JSBoundFunction)
3172 CAST_ACCESSOR(JSDataView)
3173 CAST_ACCESSOR(JSDate)
3174 CAST_ACCESSOR(JSFunction)
3175 CAST_ACCESSOR(JSGeneratorObject)
3176 CAST_ACCESSOR(JSGlobalObject)
3177 CAST_ACCESSOR(JSGlobalProxy)
3178 CAST_ACCESSOR(JSMap)
3179 CAST_ACCESSOR(JSMapIterator)
3180 CAST_ACCESSOR(JSMessageObject)
3181 CAST_ACCESSOR(JSModule)
3182 CAST_ACCESSOR(JSObject)
3183 CAST_ACCESSOR(JSProxy)
3184 CAST_ACCESSOR(JSReceiver)
3185 CAST_ACCESSOR(JSRegExp)
3186 CAST_ACCESSOR(JSSet)
3187 CAST_ACCESSOR(JSSetIterator)
3188 CAST_ACCESSOR(JSTypedArray)
3189 CAST_ACCESSOR(JSValue)
3190 CAST_ACCESSOR(JSWeakMap)
3191 CAST_ACCESSOR(JSWeakSet)
3192 CAST_ACCESSOR(LayoutDescriptor)
3193 CAST_ACCESSOR(Map)
3194 CAST_ACCESSOR(Name)
3195 CAST_ACCESSOR(NameDictionary)
3196 CAST_ACCESSOR(NormalizedMapCache)
3197 CAST_ACCESSOR(Object)
3198 CAST_ACCESSOR(ObjectHashTable)
3199 CAST_ACCESSOR(Oddball)
3200 CAST_ACCESSOR(OrderedHashMap)
3201 CAST_ACCESSOR(OrderedHashSet)
3202 CAST_ACCESSOR(PropertyCell)
3203 CAST_ACCESSOR(ScopeInfo)
3204 CAST_ACCESSOR(SeededNumberDictionary)
3205 CAST_ACCESSOR(SeqOneByteString)
3206 CAST_ACCESSOR(SeqString)
3207 CAST_ACCESSOR(SeqTwoByteString)
3208 CAST_ACCESSOR(SharedFunctionInfo)
3209 CAST_ACCESSOR(Simd128Value)
3210 CAST_ACCESSOR(SlicedString)
3211 CAST_ACCESSOR(Smi)
3212 CAST_ACCESSOR(String)
3213 CAST_ACCESSOR(StringSet)
3214 CAST_ACCESSOR(StringTable)
3215 CAST_ACCESSOR(Struct)
3216 CAST_ACCESSOR(Symbol)
3217 CAST_ACCESSOR(Uint16x8)
3218 CAST_ACCESSOR(Uint32x4)
3219 CAST_ACCESSOR(Uint8x16)
3220 CAST_ACCESSOR(UnseededNumberDictionary)
3221 CAST_ACCESSOR(WeakCell)
3222 CAST_ACCESSOR(WeakFixedArray)
3223 CAST_ACCESSOR(WeakHashTable)
3224 
3225 
3226 // static
3227 template <class Traits>
3228 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3229     FixedTypedArray<Traits>::kInstanceType;
3230 
3231 
3232 template <class Traits>
cast(Object * object)3233 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3234   SLOW_DCHECK(object->IsHeapObject() &&
3235               HeapObject::cast(object)->map()->instance_type() ==
3236               Traits::kInstanceType);
3237   return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3238 }
3239 
3240 
3241 template <class Traits>
3242 const FixedTypedArray<Traits>*
cast(const Object * object)3243 FixedTypedArray<Traits>::cast(const Object* object) {
3244   SLOW_DCHECK(object->IsHeapObject() &&
3245               HeapObject::cast(object)->map()->instance_type() ==
3246               Traits::kInstanceType);
3247   return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3248 }
3249 
3250 
3251 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type)       \
3252   type* DeoptimizationInputData::name() {                \
3253     return type::cast(get(k##name##Index));              \
3254   }                                                      \
3255   void DeoptimizationInputData::Set##name(type* value) { \
3256     set(k##name##Index, value);                          \
3257   }
3258 
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,ByteArray)3259 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3260 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3261 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3262 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3263 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3264 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3265 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3266 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3267 
3268 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3269 
3270 
3271 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type)                \
3272   type* DeoptimizationInputData::name(int i) {                  \
3273     return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3274   }                                                             \
3275   void DeoptimizationInputData::Set##name(int i, type* value) { \
3276     set(IndexForEntry(i) + k##name##Offset, value);             \
3277   }
3278 
3279 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3280 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3281 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3282 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3283 
3284 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3285 
3286 
3287 BailoutId DeoptimizationInputData::AstId(int i) {
3288   return BailoutId(AstIdRaw(i)->value());
3289 }
3290 
3291 
SetAstId(int i,BailoutId value)3292 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3293   SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3294 }
3295 
3296 
DeoptCount()3297 int DeoptimizationInputData::DeoptCount() {
3298   return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3299 }
3300 
3301 
DeoptPoints()3302 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3303 
3304 
AstId(int index)3305 BailoutId DeoptimizationOutputData::AstId(int index) {
3306   return BailoutId(Smi::cast(get(index * 2))->value());
3307 }
3308 
3309 
SetAstId(int index,BailoutId id)3310 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3311   set(index * 2, Smi::FromInt(id.ToInt()));
3312 }
3313 
3314 
PcAndState(int index)3315 Smi* DeoptimizationOutputData::PcAndState(int index) {
3316   return Smi::cast(get(1 + index * 2));
3317 }
3318 
3319 
SetPcAndState(int index,Smi * offset)3320 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3321   set(1 + index * 2, offset);
3322 }
3323 
3324 
get(int index)3325 Object* LiteralsArray::get(int index) const { return FixedArray::get(index); }
3326 
3327 
set(int index,Object * value)3328 void LiteralsArray::set(int index, Object* value) {
3329   FixedArray::set(index, value);
3330 }
3331 
3332 
set(int index,Smi * value)3333 void LiteralsArray::set(int index, Smi* value) {
3334   FixedArray::set(index, value);
3335 }
3336 
3337 
set(int index,Object * value,WriteBarrierMode mode)3338 void LiteralsArray::set(int index, Object* value, WriteBarrierMode mode) {
3339   FixedArray::set(index, value, mode);
3340 }
3341 
3342 
cast(Object * object)3343 LiteralsArray* LiteralsArray::cast(Object* object) {
3344   SLOW_DCHECK(object->IsLiteralsArray());
3345   return reinterpret_cast<LiteralsArray*>(object);
3346 }
3347 
3348 
feedback_vector()3349 TypeFeedbackVector* LiteralsArray::feedback_vector() const {
3350   if (length() == 0) {
3351     return TypeFeedbackVector::cast(
3352         const_cast<FixedArray*>(FixedArray::cast(this)));
3353   }
3354   return TypeFeedbackVector::cast(get(kVectorIndex));
3355 }
3356 
3357 
set_feedback_vector(TypeFeedbackVector * vector)3358 void LiteralsArray::set_feedback_vector(TypeFeedbackVector* vector) {
3359   if (length() <= kVectorIndex) {
3360     DCHECK(vector->length() == 0);
3361     return;
3362   }
3363   set(kVectorIndex, vector);
3364 }
3365 
3366 
literal(int literal_index)3367 Object* LiteralsArray::literal(int literal_index) const {
3368   return get(kFirstLiteralIndex + literal_index);
3369 }
3370 
3371 
set_literal(int literal_index,Object * literal)3372 void LiteralsArray::set_literal(int literal_index, Object* literal) {
3373   set(kFirstLiteralIndex + literal_index, literal);
3374 }
3375 
set_literal_undefined(int literal_index)3376 void LiteralsArray::set_literal_undefined(int literal_index) {
3377   set_undefined(kFirstLiteralIndex + literal_index);
3378 }
3379 
literals_count()3380 int LiteralsArray::literals_count() const {
3381   return length() - kFirstLiteralIndex;
3382 }
3383 
GetRangeStart(int index)3384 int HandlerTable::GetRangeStart(int index) const {
3385   return Smi::cast(get(index * kRangeEntrySize + kRangeStartIndex))->value();
3386 }
3387 
GetRangeEnd(int index)3388 int HandlerTable::GetRangeEnd(int index) const {
3389   return Smi::cast(get(index * kRangeEntrySize + kRangeEndIndex))->value();
3390 }
3391 
GetRangeHandler(int index)3392 int HandlerTable::GetRangeHandler(int index) const {
3393   return HandlerOffsetField::decode(
3394       Smi::cast(get(index * kRangeEntrySize + kRangeHandlerIndex))->value());
3395 }
3396 
GetRangeData(int index)3397 int HandlerTable::GetRangeData(int index) const {
3398   return Smi::cast(get(index * kRangeEntrySize + kRangeDataIndex))->value();
3399 }
3400 
GetRangePrediction(int index)3401 HandlerTable::CatchPrediction HandlerTable::GetRangePrediction(
3402     int index) const {
3403   return HandlerPredictionField::decode(
3404       Smi::cast(get(index * kRangeEntrySize + kRangeHandlerIndex))->value());
3405 }
3406 
SetRangeStart(int index,int value)3407 void HandlerTable::SetRangeStart(int index, int value) {
3408   set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3409 }
3410 
3411 
SetRangeEnd(int index,int value)3412 void HandlerTable::SetRangeEnd(int index, int value) {
3413   set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3414 }
3415 
3416 
SetRangeHandler(int index,int offset,CatchPrediction prediction)3417 void HandlerTable::SetRangeHandler(int index, int offset,
3418                                    CatchPrediction prediction) {
3419   int value = HandlerOffsetField::encode(offset) |
3420               HandlerPredictionField::encode(prediction);
3421   set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3422 }
3423 
SetRangeData(int index,int value)3424 void HandlerTable::SetRangeData(int index, int value) {
3425   set(index * kRangeEntrySize + kRangeDataIndex, Smi::FromInt(value));
3426 }
3427 
3428 
SetReturnOffset(int index,int value)3429 void HandlerTable::SetReturnOffset(int index, int value) {
3430   set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3431 }
3432 
3433 
SetReturnHandler(int index,int offset,CatchPrediction prediction)3434 void HandlerTable::SetReturnHandler(int index, int offset,
3435                                     CatchPrediction prediction) {
3436   int value = HandlerOffsetField::encode(offset) |
3437               HandlerPredictionField::encode(prediction);
3438   set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3439 }
3440 
NumberOfRangeEntries()3441 int HandlerTable::NumberOfRangeEntries() const {
3442   return length() / kRangeEntrySize;
3443 }
3444 
3445 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)3446   STRUCT_LIST(MAKE_STRUCT_CAST)
3447 #undef MAKE_STRUCT_CAST
3448 
3449 
3450 template <typename Derived, typename Shape, typename Key>
3451 HashTable<Derived, Shape, Key>*
3452 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3453   SLOW_DCHECK(obj->IsHashTable());
3454   return reinterpret_cast<HashTable*>(obj);
3455 }
3456 
3457 
3458 template <typename Derived, typename Shape, typename Key>
3459 const HashTable<Derived, Shape, Key>*
cast(const Object * obj)3460 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3461   SLOW_DCHECK(obj->IsHashTable());
3462   return reinterpret_cast<const HashTable*>(obj);
3463 }
3464 
3465 
SMI_ACCESSORS(FixedArrayBase,length,kLengthOffset)3466 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3467 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3468 
3469 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3470 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3471 
3472 SMI_ACCESSORS(String, length, kLengthOffset)
3473 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3474 
3475 
3476 int FreeSpace::Size() { return size(); }
3477 
3478 
next()3479 FreeSpace* FreeSpace::next() {
3480   DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3481          (!GetHeap()->deserialization_complete() && map() == NULL));
3482   DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3483   return reinterpret_cast<FreeSpace*>(
3484       Memory::Address_at(address() + kNextOffset));
3485 }
3486 
3487 
set_next(FreeSpace * next)3488 void FreeSpace::set_next(FreeSpace* next) {
3489   DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3490          (!GetHeap()->deserialization_complete() && map() == NULL));
3491   DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3492   base::NoBarrier_Store(
3493       reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3494       reinterpret_cast<base::AtomicWord>(next));
3495 }
3496 
3497 
cast(HeapObject * o)3498 FreeSpace* FreeSpace::cast(HeapObject* o) {
3499   SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3500   return reinterpret_cast<FreeSpace*>(o);
3501 }
3502 
3503 
hash_field()3504 uint32_t Name::hash_field() {
3505   return READ_UINT32_FIELD(this, kHashFieldOffset);
3506 }
3507 
3508 
set_hash_field(uint32_t value)3509 void Name::set_hash_field(uint32_t value) {
3510   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3511 #if V8_HOST_ARCH_64_BIT
3512 #if V8_TARGET_LITTLE_ENDIAN
3513   WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3514 #else
3515   WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3516 #endif
3517 #endif
3518 }
3519 
3520 
Equals(Name * other)3521 bool Name::Equals(Name* other) {
3522   if (other == this) return true;
3523   if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3524       this->IsSymbol() || other->IsSymbol()) {
3525     return false;
3526   }
3527   return String::cast(this)->SlowEquals(String::cast(other));
3528 }
3529 
3530 
Equals(Handle<Name> one,Handle<Name> two)3531 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3532   if (one.is_identical_to(two)) return true;
3533   if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3534       one->IsSymbol() || two->IsSymbol()) {
3535     return false;
3536   }
3537   return String::SlowEquals(Handle<String>::cast(one),
3538                             Handle<String>::cast(two));
3539 }
3540 
3541 
ACCESSORS(Symbol,name,Object,kNameOffset)3542 ACCESSORS(Symbol, name, Object, kNameOffset)
3543 SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
3544 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3545 BOOL_ACCESSORS(Symbol, flags, is_well_known_symbol, kWellKnownSymbolBit)
3546 
3547 
3548 bool String::Equals(String* other) {
3549   if (other == this) return true;
3550   if (this->IsInternalizedString() && other->IsInternalizedString()) {
3551     return false;
3552   }
3553   return SlowEquals(other);
3554 }
3555 
3556 
Equals(Handle<String> one,Handle<String> two)3557 bool String::Equals(Handle<String> one, Handle<String> two) {
3558   if (one.is_identical_to(two)) return true;
3559   if (one->IsInternalizedString() && two->IsInternalizedString()) {
3560     return false;
3561   }
3562   return SlowEquals(one, two);
3563 }
3564 
3565 
Flatten(Handle<String> string,PretenureFlag pretenure)3566 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3567   if (!string->IsConsString()) return string;
3568   Handle<ConsString> cons = Handle<ConsString>::cast(string);
3569   if (cons->IsFlat()) return handle(cons->first());
3570   return SlowFlatten(cons, pretenure);
3571 }
3572 
3573 
Get(int index)3574 uint16_t String::Get(int index) {
3575   DCHECK(index >= 0 && index < length());
3576   switch (StringShape(this).full_representation_tag()) {
3577     case kSeqStringTag | kOneByteStringTag:
3578       return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3579     case kSeqStringTag | kTwoByteStringTag:
3580       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3581     case kConsStringTag | kOneByteStringTag:
3582     case kConsStringTag | kTwoByteStringTag:
3583       return ConsString::cast(this)->ConsStringGet(index);
3584     case kExternalStringTag | kOneByteStringTag:
3585       return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3586     case kExternalStringTag | kTwoByteStringTag:
3587       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3588     case kSlicedStringTag | kOneByteStringTag:
3589     case kSlicedStringTag | kTwoByteStringTag:
3590       return SlicedString::cast(this)->SlicedStringGet(index);
3591     default:
3592       break;
3593   }
3594 
3595   UNREACHABLE();
3596   return 0;
3597 }
3598 
3599 
Set(int index,uint16_t value)3600 void String::Set(int index, uint16_t value) {
3601   DCHECK(index >= 0 && index < length());
3602   DCHECK(StringShape(this).IsSequential());
3603 
3604   return this->IsOneByteRepresentation()
3605       ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3606       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3607 }
3608 
3609 
IsFlat()3610 bool String::IsFlat() {
3611   if (!StringShape(this).IsCons()) return true;
3612   return ConsString::cast(this)->second()->length() == 0;
3613 }
3614 
3615 
GetUnderlying()3616 String* String::GetUnderlying() {
3617   // Giving direct access to underlying string only makes sense if the
3618   // wrapping string is already flattened.
3619   DCHECK(this->IsFlat());
3620   DCHECK(StringShape(this).IsIndirect());
3621   STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3622   const int kUnderlyingOffset = SlicedString::kParentOffset;
3623   return String::cast(READ_FIELD(this, kUnderlyingOffset));
3624 }
3625 
3626 
3627 template<class Visitor>
VisitFlat(Visitor * visitor,String * string,const int offset)3628 ConsString* String::VisitFlat(Visitor* visitor,
3629                               String* string,
3630                               const int offset) {
3631   int slice_offset = offset;
3632   const int length = string->length();
3633   DCHECK(offset <= length);
3634   while (true) {
3635     int32_t type = string->map()->instance_type();
3636     switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3637       case kSeqStringTag | kOneByteStringTag:
3638         visitor->VisitOneByteString(
3639             SeqOneByteString::cast(string)->GetChars() + slice_offset,
3640             length - offset);
3641         return NULL;
3642 
3643       case kSeqStringTag | kTwoByteStringTag:
3644         visitor->VisitTwoByteString(
3645             SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3646             length - offset);
3647         return NULL;
3648 
3649       case kExternalStringTag | kOneByteStringTag:
3650         visitor->VisitOneByteString(
3651             ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3652             length - offset);
3653         return NULL;
3654 
3655       case kExternalStringTag | kTwoByteStringTag:
3656         visitor->VisitTwoByteString(
3657             ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3658             length - offset);
3659         return NULL;
3660 
3661       case kSlicedStringTag | kOneByteStringTag:
3662       case kSlicedStringTag | kTwoByteStringTag: {
3663         SlicedString* slicedString = SlicedString::cast(string);
3664         slice_offset += slicedString->offset();
3665         string = slicedString->parent();
3666         continue;
3667       }
3668 
3669       case kConsStringTag | kOneByteStringTag:
3670       case kConsStringTag | kTwoByteStringTag:
3671         return ConsString::cast(string);
3672 
3673       default:
3674         UNREACHABLE();
3675         return NULL;
3676     }
3677   }
3678 }
3679 
3680 
3681 template <>
GetCharVector()3682 inline Vector<const uint8_t> String::GetCharVector() {
3683   String::FlatContent flat = GetFlatContent();
3684   DCHECK(flat.IsOneByte());
3685   return flat.ToOneByteVector();
3686 }
3687 
3688 
3689 template <>
GetCharVector()3690 inline Vector<const uc16> String::GetCharVector() {
3691   String::FlatContent flat = GetFlatContent();
3692   DCHECK(flat.IsTwoByte());
3693   return flat.ToUC16Vector();
3694 }
3695 
3696 
SeqOneByteStringGet(int index)3697 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3698   DCHECK(index >= 0 && index < length());
3699   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3700 }
3701 
3702 
SeqOneByteStringSet(int index,uint16_t value)3703 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3704   DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3705   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3706                    static_cast<byte>(value));
3707 }
3708 
3709 
GetCharsAddress()3710 Address SeqOneByteString::GetCharsAddress() {
3711   return FIELD_ADDR(this, kHeaderSize);
3712 }
3713 
3714 
GetChars()3715 uint8_t* SeqOneByteString::GetChars() {
3716   return reinterpret_cast<uint8_t*>(GetCharsAddress());
3717 }
3718 
3719 
GetCharsAddress()3720 Address SeqTwoByteString::GetCharsAddress() {
3721   return FIELD_ADDR(this, kHeaderSize);
3722 }
3723 
3724 
GetChars()3725 uc16* SeqTwoByteString::GetChars() {
3726   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3727 }
3728 
3729 
SeqTwoByteStringGet(int index)3730 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3731   DCHECK(index >= 0 && index < length());
3732   return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3733 }
3734 
3735 
SeqTwoByteStringSet(int index,uint16_t value)3736 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3737   DCHECK(index >= 0 && index < length());
3738   WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3739 }
3740 
3741 
SeqTwoByteStringSize(InstanceType instance_type)3742 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3743   return SizeFor(length());
3744 }
3745 
3746 
SeqOneByteStringSize(InstanceType instance_type)3747 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3748   return SizeFor(length());
3749 }
3750 
3751 
parent()3752 String* SlicedString::parent() {
3753   return String::cast(READ_FIELD(this, kParentOffset));
3754 }
3755 
3756 
set_parent(String * parent,WriteBarrierMode mode)3757 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3758   DCHECK(parent->IsSeqString() || parent->IsExternalString());
3759   WRITE_FIELD(this, kParentOffset, parent);
3760   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3761 }
3762 
3763 
SMI_ACCESSORS(SlicedString,offset,kOffsetOffset)3764 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3765 
3766 
3767 String* ConsString::first() {
3768   return String::cast(READ_FIELD(this, kFirstOffset));
3769 }
3770 
3771 
unchecked_first()3772 Object* ConsString::unchecked_first() {
3773   return READ_FIELD(this, kFirstOffset);
3774 }
3775 
3776 
set_first(String * value,WriteBarrierMode mode)3777 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3778   WRITE_FIELD(this, kFirstOffset, value);
3779   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3780 }
3781 
3782 
second()3783 String* ConsString::second() {
3784   return String::cast(READ_FIELD(this, kSecondOffset));
3785 }
3786 
3787 
unchecked_second()3788 Object* ConsString::unchecked_second() {
3789   return READ_FIELD(this, kSecondOffset);
3790 }
3791 
3792 
set_second(String * value,WriteBarrierMode mode)3793 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3794   WRITE_FIELD(this, kSecondOffset, value);
3795   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3796 }
3797 
3798 
is_short()3799 bool ExternalString::is_short() {
3800   InstanceType type = map()->instance_type();
3801   return (type & kShortExternalStringMask) == kShortExternalStringTag;
3802 }
3803 
3804 
resource()3805 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3806   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3807 }
3808 
3809 
update_data_cache()3810 void ExternalOneByteString::update_data_cache() {
3811   if (is_short()) return;
3812   const char** data_field =
3813       reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3814   *data_field = resource()->data();
3815 }
3816 
3817 
set_resource(const ExternalOneByteString::Resource * resource)3818 void ExternalOneByteString::set_resource(
3819     const ExternalOneByteString::Resource* resource) {
3820   DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3821   *reinterpret_cast<const Resource**>(
3822       FIELD_ADDR(this, kResourceOffset)) = resource;
3823   if (resource != NULL) update_data_cache();
3824 }
3825 
3826 
GetChars()3827 const uint8_t* ExternalOneByteString::GetChars() {
3828   return reinterpret_cast<const uint8_t*>(resource()->data());
3829 }
3830 
3831 
ExternalOneByteStringGet(int index)3832 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3833   DCHECK(index >= 0 && index < length());
3834   return GetChars()[index];
3835 }
3836 
3837 
resource()3838 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3839   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3840 }
3841 
3842 
update_data_cache()3843 void ExternalTwoByteString::update_data_cache() {
3844   if (is_short()) return;
3845   const uint16_t** data_field =
3846       reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3847   *data_field = resource()->data();
3848 }
3849 
3850 
set_resource(const ExternalTwoByteString::Resource * resource)3851 void ExternalTwoByteString::set_resource(
3852     const ExternalTwoByteString::Resource* resource) {
3853   *reinterpret_cast<const Resource**>(
3854       FIELD_ADDR(this, kResourceOffset)) = resource;
3855   if (resource != NULL) update_data_cache();
3856 }
3857 
3858 
GetChars()3859 const uint16_t* ExternalTwoByteString::GetChars() {
3860   return resource()->data();
3861 }
3862 
3863 
ExternalTwoByteStringGet(int index)3864 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3865   DCHECK(index >= 0 && index < length());
3866   return GetChars()[index];
3867 }
3868 
3869 
ExternalTwoByteStringGetData(unsigned start)3870 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3871       unsigned start) {
3872   return GetChars() + start;
3873 }
3874 
3875 
OffsetForDepth(int depth)3876 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3877 
3878 
PushLeft(ConsString * string)3879 void ConsStringIterator::PushLeft(ConsString* string) {
3880   frames_[depth_++ & kDepthMask] = string;
3881 }
3882 
3883 
PushRight(ConsString * string)3884 void ConsStringIterator::PushRight(ConsString* string) {
3885   // Inplace update.
3886   frames_[(depth_-1) & kDepthMask] = string;
3887 }
3888 
3889 
AdjustMaximumDepth()3890 void ConsStringIterator::AdjustMaximumDepth() {
3891   if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3892 }
3893 
3894 
Pop()3895 void ConsStringIterator::Pop() {
3896   DCHECK(depth_ > 0);
3897   DCHECK(depth_ <= maximum_depth_);
3898   depth_--;
3899 }
3900 
3901 
GetNext()3902 uint16_t StringCharacterStream::GetNext() {
3903   DCHECK(buffer8_ != NULL && end_ != NULL);
3904   // Advance cursor if needed.
3905   if (buffer8_ == end_) HasMore();
3906   DCHECK(buffer8_ < end_);
3907   return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3908 }
3909 
3910 
StringCharacterStream(String * string,int offset)3911 StringCharacterStream::StringCharacterStream(String* string, int offset)
3912     : is_one_byte_(false) {
3913   Reset(string, offset);
3914 }
3915 
3916 
Reset(String * string,int offset)3917 void StringCharacterStream::Reset(String* string, int offset) {
3918   buffer8_ = NULL;
3919   end_ = NULL;
3920   ConsString* cons_string = String::VisitFlat(this, string, offset);
3921   iter_.Reset(cons_string, offset);
3922   if (cons_string != NULL) {
3923     string = iter_.Next(&offset);
3924     if (string != NULL) String::VisitFlat(this, string, offset);
3925   }
3926 }
3927 
3928 
HasMore()3929 bool StringCharacterStream::HasMore() {
3930   if (buffer8_ != end_) return true;
3931   int offset;
3932   String* string = iter_.Next(&offset);
3933   DCHECK_EQ(offset, 0);
3934   if (string == NULL) return false;
3935   String::VisitFlat(this, string);
3936   DCHECK(buffer8_ != end_);
3937   return true;
3938 }
3939 
3940 
VisitOneByteString(const uint8_t * chars,int length)3941 void StringCharacterStream::VisitOneByteString(
3942     const uint8_t* chars, int length) {
3943   is_one_byte_ = true;
3944   buffer8_ = chars;
3945   end_ = chars + length;
3946 }
3947 
3948 
VisitTwoByteString(const uint16_t * chars,int length)3949 void StringCharacterStream::VisitTwoByteString(
3950     const uint16_t* chars, int length) {
3951   is_one_byte_ = false;
3952   buffer16_ = chars;
3953   end_ = reinterpret_cast<const uint8_t*>(chars + length);
3954 }
3955 
3956 
Size()3957 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
3958 
get(int index)3959 byte ByteArray::get(int index) {
3960   DCHECK(index >= 0 && index < this->length());
3961   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3962 }
3963 
3964 
set(int index,byte value)3965 void ByteArray::set(int index, byte value) {
3966   DCHECK(index >= 0 && index < this->length());
3967   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3968 }
3969 
copy_in(int index,const byte * buffer,int length)3970 void ByteArray::copy_in(int index, const byte* buffer, int length) {
3971   DCHECK(index >= 0 && length >= 0 && index + length >= index &&
3972          index + length <= this->length());
3973   byte* dst_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
3974   memcpy(dst_addr, buffer, length);
3975 }
3976 
copy_out(int index,byte * buffer,int length)3977 void ByteArray::copy_out(int index, byte* buffer, int length) {
3978   DCHECK(index >= 0 && length >= 0 && index + length >= index &&
3979          index + length <= this->length());
3980   const byte* src_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
3981   memcpy(buffer, src_addr, length);
3982 }
3983 
get_int(int index)3984 int ByteArray::get_int(int index) {
3985   DCHECK(index >= 0 && index < this->length() / kIntSize);
3986   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3987 }
3988 
set_int(int index,int value)3989 void ByteArray::set_int(int index, int value) {
3990   DCHECK(index >= 0 && index < this->length() / kIntSize);
3991   WRITE_INT_FIELD(this, kHeaderSize + index * kIntSize, value);
3992 }
3993 
FromDataStartAddress(Address address)3994 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3995   DCHECK_TAG_ALIGNED(address);
3996   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3997 }
3998 
3999 
ByteArraySize()4000 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4001 
4002 
GetDataStartAddress()4003 Address ByteArray::GetDataStartAddress() {
4004   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4005 }
4006 
4007 
get(int index)4008 byte BytecodeArray::get(int index) {
4009   DCHECK(index >= 0 && index < this->length());
4010   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4011 }
4012 
4013 
set(int index,byte value)4014 void BytecodeArray::set(int index, byte value) {
4015   DCHECK(index >= 0 && index < this->length());
4016   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4017 }
4018 
4019 
set_frame_size(int frame_size)4020 void BytecodeArray::set_frame_size(int frame_size) {
4021   DCHECK_GE(frame_size, 0);
4022   DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4023   WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4024 }
4025 
4026 
frame_size()4027 int BytecodeArray::frame_size() const {
4028   return READ_INT_FIELD(this, kFrameSizeOffset);
4029 }
4030 
4031 
register_count()4032 int BytecodeArray::register_count() const {
4033   return frame_size() / kPointerSize;
4034 }
4035 
4036 
set_parameter_count(int number_of_parameters)4037 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4038   DCHECK_GE(number_of_parameters, 0);
4039   // Parameter count is stored as the size on stack of the parameters to allow
4040   // it to be used directly by generated code.
4041   WRITE_INT_FIELD(this, kParameterSizeOffset,
4042                   (number_of_parameters << kPointerSizeLog2));
4043 }
4044 
interrupt_budget()4045 int BytecodeArray::interrupt_budget() const {
4046   return READ_INT_FIELD(this, kInterruptBudgetOffset);
4047 }
4048 
set_interrupt_budget(int interrupt_budget)4049 void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
4050   DCHECK_GE(interrupt_budget, 0);
4051   WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
4052 }
4053 
parameter_count()4054 int BytecodeArray::parameter_count() const {
4055   // Parameter count is stored as the size on stack of the parameters to allow
4056   // it to be used directly by generated code.
4057   return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4058 }
4059 
4060 
ACCESSORS(BytecodeArray,constant_pool,FixedArray,kConstantPoolOffset)4061 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4062 ACCESSORS(BytecodeArray, handler_table, FixedArray, kHandlerTableOffset)
4063 ACCESSORS(BytecodeArray, source_position_table, ByteArray,
4064           kSourcePositionTableOffset)
4065 
4066 Address BytecodeArray::GetFirstBytecodeAddress() {
4067   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4068 }
4069 
4070 
BytecodeArraySize()4071 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4072 
SizeIncludingMetadata()4073 int BytecodeArray::SizeIncludingMetadata() {
4074   int size = BytecodeArraySize();
4075   size += constant_pool()->Size();
4076   size += handler_table()->Size();
4077   size += source_position_table()->Size();
4078   return size;
4079 }
4080 
ACCESSORS(FixedTypedArrayBase,base_pointer,Object,kBasePointerOffset)4081 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4082 
4083 
4084 void* FixedTypedArrayBase::external_pointer() const {
4085   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4086   return reinterpret_cast<void*>(ptr);
4087 }
4088 
4089 
set_external_pointer(void * value,WriteBarrierMode mode)4090 void FixedTypedArrayBase::set_external_pointer(void* value,
4091                                                WriteBarrierMode mode) {
4092   intptr_t ptr = reinterpret_cast<intptr_t>(value);
4093   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4094 }
4095 
4096 
DataPtr()4097 void* FixedTypedArrayBase::DataPtr() {
4098   return reinterpret_cast<void*>(
4099       reinterpret_cast<intptr_t>(base_pointer()) +
4100       reinterpret_cast<intptr_t>(external_pointer()));
4101 }
4102 
4103 
ElementSize(InstanceType type)4104 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4105   int element_size;
4106   switch (type) {
4107 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
4108     case FIXED_##TYPE##_ARRAY_TYPE:                                           \
4109       element_size = size;                                                    \
4110       break;
4111 
4112     TYPED_ARRAYS(TYPED_ARRAY_CASE)
4113 #undef TYPED_ARRAY_CASE
4114     default:
4115       UNREACHABLE();
4116       return 0;
4117   }
4118   return element_size;
4119 }
4120 
4121 
DataSize(InstanceType type)4122 int FixedTypedArrayBase::DataSize(InstanceType type) {
4123   if (base_pointer() == Smi::FromInt(0)) return 0;
4124   return length() * ElementSize(type);
4125 }
4126 
4127 
DataSize()4128 int FixedTypedArrayBase::DataSize() {
4129   return DataSize(map()->instance_type());
4130 }
4131 
4132 
size()4133 int FixedTypedArrayBase::size() {
4134   return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4135 }
4136 
4137 
TypedArraySize(InstanceType type)4138 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4139   return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4140 }
4141 
4142 
TypedArraySize(InstanceType type,int length)4143 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4144   return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4145 }
4146 
4147 
defaultValue()4148 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4149 
4150 
defaultValue()4151 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4152 
4153 
defaultValue()4154 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4155 
4156 
defaultValue()4157 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4158 
4159 
defaultValue()4160 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4161 
4162 
defaultValue()4163 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4164 
4165 
defaultValue()4166 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4167 
4168 
defaultValue()4169 float Float32ArrayTraits::defaultValue() {
4170   return std::numeric_limits<float>::quiet_NaN();
4171 }
4172 
4173 
defaultValue()4174 double Float64ArrayTraits::defaultValue() {
4175   return std::numeric_limits<double>::quiet_NaN();
4176 }
4177 
4178 
4179 template <class Traits>
get_scalar(int index)4180 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4181   DCHECK((index >= 0) && (index < this->length()));
4182   ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4183   return ptr[index];
4184 }
4185 
4186 
4187 template <class Traits>
set(int index,ElementType value)4188 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4189   DCHECK((index >= 0) && (index < this->length()));
4190   ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4191   ptr[index] = value;
4192 }
4193 
4194 
4195 template <class Traits>
from_int(int value)4196 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4197   return static_cast<ElementType>(value);
4198 }
4199 
4200 
4201 template <> inline
from_int(int value)4202 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4203   if (value < 0) return 0;
4204   if (value > 0xFF) return 0xFF;
4205   return static_cast<uint8_t>(value);
4206 }
4207 
4208 
4209 template <class Traits>
from_double(double value)4210 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4211     double value) {
4212   return static_cast<ElementType>(DoubleToInt32(value));
4213 }
4214 
4215 
4216 template<> inline
from_double(double value)4217 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4218   // Handle NaNs and less than zero values which clamp to zero.
4219   if (!(value > 0)) return 0;
4220   if (value > 0xFF) return 0xFF;
4221   return static_cast<uint8_t>(lrint(value));
4222 }
4223 
4224 
4225 template<> inline
from_double(double value)4226 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4227   return static_cast<float>(value);
4228 }
4229 
4230 
4231 template<> inline
from_double(double value)4232 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4233   return value;
4234 }
4235 
4236 template <class Traits>
get(FixedTypedArray<Traits> * array,int index)4237 Handle<Object> FixedTypedArray<Traits>::get(FixedTypedArray<Traits>* array,
4238                                             int index) {
4239   return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4240 }
4241 
4242 
4243 template <class Traits>
SetValue(uint32_t index,Object * value)4244 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4245   ElementType cast_value = Traits::defaultValue();
4246   if (value->IsSmi()) {
4247     int int_value = Smi::cast(value)->value();
4248     cast_value = from_int(int_value);
4249   } else if (value->IsHeapNumber()) {
4250     double double_value = HeapNumber::cast(value)->value();
4251     cast_value = from_double(double_value);
4252   } else {
4253     // Clamp undefined to the default value. All other types have been
4254     // converted to a number type further up in the call chain.
4255     DCHECK(value->IsUndefined(GetIsolate()));
4256   }
4257   set(index, cast_value);
4258 }
4259 
4260 
ToHandle(Isolate * isolate,uint8_t scalar)4261 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4262   return handle(Smi::FromInt(scalar), isolate);
4263 }
4264 
4265 
ToHandle(Isolate * isolate,uint8_t scalar)4266 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4267                                                  uint8_t scalar) {
4268   return handle(Smi::FromInt(scalar), isolate);
4269 }
4270 
4271 
ToHandle(Isolate * isolate,int8_t scalar)4272 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4273   return handle(Smi::FromInt(scalar), isolate);
4274 }
4275 
4276 
ToHandle(Isolate * isolate,uint16_t scalar)4277 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4278   return handle(Smi::FromInt(scalar), isolate);
4279 }
4280 
4281 
ToHandle(Isolate * isolate,int16_t scalar)4282 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4283   return handle(Smi::FromInt(scalar), isolate);
4284 }
4285 
4286 
ToHandle(Isolate * isolate,uint32_t scalar)4287 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4288   return isolate->factory()->NewNumberFromUint(scalar);
4289 }
4290 
4291 
ToHandle(Isolate * isolate,int32_t scalar)4292 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4293   return isolate->factory()->NewNumberFromInt(scalar);
4294 }
4295 
4296 
ToHandle(Isolate * isolate,float scalar)4297 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4298   return isolate->factory()->NewNumber(scalar);
4299 }
4300 
4301 
ToHandle(Isolate * isolate,double scalar)4302 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4303   return isolate->factory()->NewNumber(scalar);
4304 }
4305 
4306 
visitor_id()4307 int Map::visitor_id() {
4308   return READ_BYTE_FIELD(this, kVisitorIdOffset);
4309 }
4310 
4311 
set_visitor_id(int id)4312 void Map::set_visitor_id(int id) {
4313   DCHECK(0 <= id && id < 256);
4314   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4315 }
4316 
4317 
instance_size()4318 int Map::instance_size() {
4319   return NOBARRIER_READ_BYTE_FIELD(
4320       this, kInstanceSizeOffset) << kPointerSizeLog2;
4321 }
4322 
4323 
inobject_properties_or_constructor_function_index()4324 int Map::inobject_properties_or_constructor_function_index() {
4325   return READ_BYTE_FIELD(this,
4326                          kInObjectPropertiesOrConstructorFunctionIndexOffset);
4327 }
4328 
4329 
set_inobject_properties_or_constructor_function_index(int value)4330 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4331   DCHECK(0 <= value && value < 256);
4332   WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4333                    static_cast<byte>(value));
4334 }
4335 
4336 
GetInObjectProperties()4337 int Map::GetInObjectProperties() {
4338   DCHECK(IsJSObjectMap());
4339   return inobject_properties_or_constructor_function_index();
4340 }
4341 
4342 
SetInObjectProperties(int value)4343 void Map::SetInObjectProperties(int value) {
4344   DCHECK(IsJSObjectMap());
4345   set_inobject_properties_or_constructor_function_index(value);
4346 }
4347 
4348 
GetConstructorFunctionIndex()4349 int Map::GetConstructorFunctionIndex() {
4350   DCHECK(IsPrimitiveMap());
4351   return inobject_properties_or_constructor_function_index();
4352 }
4353 
4354 
SetConstructorFunctionIndex(int value)4355 void Map::SetConstructorFunctionIndex(int value) {
4356   DCHECK(IsPrimitiveMap());
4357   set_inobject_properties_or_constructor_function_index(value);
4358 }
4359 
4360 
GetInObjectPropertyOffset(int index)4361 int Map::GetInObjectPropertyOffset(int index) {
4362   // Adjust for the number of properties stored in the object.
4363   index -= GetInObjectProperties();
4364   DCHECK(index <= 0);
4365   return instance_size() + (index * kPointerSize);
4366 }
4367 
4368 
AddMissingTransitionsForTesting(Handle<Map> split_map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)4369 Handle<Map> Map::AddMissingTransitionsForTesting(
4370     Handle<Map> split_map, Handle<DescriptorArray> descriptors,
4371     Handle<LayoutDescriptor> full_layout_descriptor) {
4372   return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
4373 }
4374 
4375 
SizeFromMap(Map * map)4376 int HeapObject::SizeFromMap(Map* map) {
4377   int instance_size = map->instance_size();
4378   if (instance_size != kVariableSizeSentinel) return instance_size;
4379   // Only inline the most frequent cases.
4380   InstanceType instance_type = map->instance_type();
4381   if (instance_type == FIXED_ARRAY_TYPE ||
4382       instance_type == TRANSITION_ARRAY_TYPE) {
4383     return FixedArray::SizeFor(
4384         reinterpret_cast<FixedArray*>(this)->synchronized_length());
4385   }
4386   if (instance_type == ONE_BYTE_STRING_TYPE ||
4387       instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4388     // Strings may get concurrently truncated, hence we have to access its
4389     // length synchronized.
4390     return SeqOneByteString::SizeFor(
4391         reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4392   }
4393   if (instance_type == BYTE_ARRAY_TYPE) {
4394     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4395   }
4396   if (instance_type == BYTECODE_ARRAY_TYPE) {
4397     return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4398   }
4399   if (instance_type == FREE_SPACE_TYPE) {
4400     return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4401   }
4402   if (instance_type == STRING_TYPE ||
4403       instance_type == INTERNALIZED_STRING_TYPE) {
4404     // Strings may get concurrently truncated, hence we have to access its
4405     // length synchronized.
4406     return SeqTwoByteString::SizeFor(
4407         reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4408   }
4409   if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4410     return FixedDoubleArray::SizeFor(
4411         reinterpret_cast<FixedDoubleArray*>(this)->length());
4412   }
4413   if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4414       instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4415     return reinterpret_cast<FixedTypedArrayBase*>(
4416         this)->TypedArraySize(instance_type);
4417   }
4418   DCHECK(instance_type == CODE_TYPE);
4419   return reinterpret_cast<Code*>(this)->CodeSize();
4420 }
4421 
4422 
set_instance_size(int value)4423 void Map::set_instance_size(int value) {
4424   DCHECK_EQ(0, value & (kPointerSize - 1));
4425   value >>= kPointerSizeLog2;
4426   DCHECK(0 <= value && value < 256);
4427   NOBARRIER_WRITE_BYTE_FIELD(
4428       this, kInstanceSizeOffset, static_cast<byte>(value));
4429 }
4430 
4431 
clear_unused()4432 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4433 
4434 
instance_type()4435 InstanceType Map::instance_type() {
4436   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4437 }
4438 
4439 
set_instance_type(InstanceType value)4440 void Map::set_instance_type(InstanceType value) {
4441   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4442 }
4443 
4444 
unused_property_fields()4445 int Map::unused_property_fields() {
4446   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4447 }
4448 
4449 
set_unused_property_fields(int value)4450 void Map::set_unused_property_fields(int value) {
4451   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4452 }
4453 
4454 
bit_field()4455 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4456 
4457 
set_bit_field(byte value)4458 void Map::set_bit_field(byte value) {
4459   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4460 }
4461 
4462 
bit_field2()4463 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4464 
4465 
set_bit_field2(byte value)4466 void Map::set_bit_field2(byte value) {
4467   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4468 }
4469 
4470 
set_non_instance_prototype(bool value)4471 void Map::set_non_instance_prototype(bool value) {
4472   if (value) {
4473     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4474   } else {
4475     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4476   }
4477 }
4478 
4479 
has_non_instance_prototype()4480 bool Map::has_non_instance_prototype() {
4481   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4482 }
4483 
4484 
set_is_constructor(bool value)4485 void Map::set_is_constructor(bool value) {
4486   if (value) {
4487     set_bit_field(bit_field() | (1 << kIsConstructor));
4488   } else {
4489     set_bit_field(bit_field() & ~(1 << kIsConstructor));
4490   }
4491 }
4492 
4493 
is_constructor()4494 bool Map::is_constructor() const {
4495   return ((1 << kIsConstructor) & bit_field()) != 0;
4496 }
4497 
set_has_hidden_prototype(bool value)4498 void Map::set_has_hidden_prototype(bool value) {
4499   set_bit_field3(HasHiddenPrototype::update(bit_field3(), value));
4500 }
4501 
has_hidden_prototype()4502 bool Map::has_hidden_prototype() const {
4503   return HasHiddenPrototype::decode(bit_field3());
4504 }
4505 
4506 
set_has_indexed_interceptor()4507 void Map::set_has_indexed_interceptor() {
4508   set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4509 }
4510 
4511 
has_indexed_interceptor()4512 bool Map::has_indexed_interceptor() {
4513   return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4514 }
4515 
4516 
set_is_undetectable()4517 void Map::set_is_undetectable() {
4518   set_bit_field(bit_field() | (1 << kIsUndetectable));
4519 }
4520 
4521 
is_undetectable()4522 bool Map::is_undetectable() {
4523   return ((1 << kIsUndetectable) & bit_field()) != 0;
4524 }
4525 
4526 
set_has_named_interceptor()4527 void Map::set_has_named_interceptor() {
4528   set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4529 }
4530 
4531 
has_named_interceptor()4532 bool Map::has_named_interceptor() {
4533   return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4534 }
4535 
4536 
set_is_access_check_needed(bool access_check_needed)4537 void Map::set_is_access_check_needed(bool access_check_needed) {
4538   if (access_check_needed) {
4539     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4540   } else {
4541     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4542   }
4543 }
4544 
4545 
is_access_check_needed()4546 bool Map::is_access_check_needed() {
4547   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4548 }
4549 
4550 
set_is_extensible(bool value)4551 void Map::set_is_extensible(bool value) {
4552   if (value) {
4553     set_bit_field2(bit_field2() | (1 << kIsExtensible));
4554   } else {
4555     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4556   }
4557 }
4558 
is_extensible()4559 bool Map::is_extensible() {
4560   return ((1 << kIsExtensible) & bit_field2()) != 0;
4561 }
4562 
4563 
set_is_prototype_map(bool value)4564 void Map::set_is_prototype_map(bool value) {
4565   set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4566 }
4567 
is_prototype_map()4568 bool Map::is_prototype_map() const {
4569   return IsPrototypeMapBits::decode(bit_field2());
4570 }
4571 
should_be_fast_prototype_map()4572 bool Map::should_be_fast_prototype_map() const {
4573   if (!prototype_info()->IsPrototypeInfo()) return false;
4574   return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
4575 }
4576 
set_elements_kind(ElementsKind elements_kind)4577 void Map::set_elements_kind(ElementsKind elements_kind) {
4578   DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4579   DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4580   set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4581   DCHECK(this->elements_kind() == elements_kind);
4582 }
4583 
4584 
elements_kind()4585 ElementsKind Map::elements_kind() {
4586   return Map::ElementsKindBits::decode(bit_field2());
4587 }
4588 
4589 
has_fast_smi_elements()4590 bool Map::has_fast_smi_elements() {
4591   return IsFastSmiElementsKind(elements_kind());
4592 }
4593 
has_fast_object_elements()4594 bool Map::has_fast_object_elements() {
4595   return IsFastObjectElementsKind(elements_kind());
4596 }
4597 
has_fast_smi_or_object_elements()4598 bool Map::has_fast_smi_or_object_elements() {
4599   return IsFastSmiOrObjectElementsKind(elements_kind());
4600 }
4601 
has_fast_double_elements()4602 bool Map::has_fast_double_elements() {
4603   return IsFastDoubleElementsKind(elements_kind());
4604 }
4605 
has_fast_elements()4606 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4607 
has_sloppy_arguments_elements()4608 bool Map::has_sloppy_arguments_elements() {
4609   return IsSloppyArgumentsElements(elements_kind());
4610 }
4611 
has_fast_sloppy_arguments_elements()4612 bool Map::has_fast_sloppy_arguments_elements() {
4613   return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
4614 }
4615 
has_fast_string_wrapper_elements()4616 bool Map::has_fast_string_wrapper_elements() {
4617   return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
4618 }
4619 
has_fixed_typed_array_elements()4620 bool Map::has_fixed_typed_array_elements() {
4621   return IsFixedTypedArrayElementsKind(elements_kind());
4622 }
4623 
has_dictionary_elements()4624 bool Map::has_dictionary_elements() {
4625   return IsDictionaryElementsKind(elements_kind());
4626 }
4627 
4628 
set_dictionary_map(bool value)4629 void Map::set_dictionary_map(bool value) {
4630   uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4631   new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4632   set_bit_field3(new_bit_field3);
4633 }
4634 
4635 
is_dictionary_map()4636 bool Map::is_dictionary_map() {
4637   return DictionaryMap::decode(bit_field3());
4638 }
4639 
4640 
flags()4641 Code::Flags Code::flags() {
4642   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4643 }
4644 
4645 
set_owns_descriptors(bool owns_descriptors)4646 void Map::set_owns_descriptors(bool owns_descriptors) {
4647   set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4648 }
4649 
4650 
owns_descriptors()4651 bool Map::owns_descriptors() {
4652   return OwnsDescriptors::decode(bit_field3());
4653 }
4654 
4655 
set_is_callable()4656 void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
4657 
4658 
is_callable()4659 bool Map::is_callable() const {
4660   return ((1 << kIsCallable) & bit_field()) != 0;
4661 }
4662 
4663 
deprecate()4664 void Map::deprecate() {
4665   set_bit_field3(Deprecated::update(bit_field3(), true));
4666 }
4667 
4668 
is_deprecated()4669 bool Map::is_deprecated() {
4670   return Deprecated::decode(bit_field3());
4671 }
4672 
4673 
set_migration_target(bool value)4674 void Map::set_migration_target(bool value) {
4675   set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4676 }
4677 
4678 
is_migration_target()4679 bool Map::is_migration_target() {
4680   return IsMigrationTarget::decode(bit_field3());
4681 }
4682 
4683 
set_new_target_is_base(bool value)4684 void Map::set_new_target_is_base(bool value) {
4685   set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
4686 }
4687 
4688 
new_target_is_base()4689 bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
4690 
4691 
set_construction_counter(int value)4692 void Map::set_construction_counter(int value) {
4693   set_bit_field3(ConstructionCounter::update(bit_field3(), value));
4694 }
4695 
4696 
construction_counter()4697 int Map::construction_counter() {
4698   return ConstructionCounter::decode(bit_field3());
4699 }
4700 
4701 
mark_unstable()4702 void Map::mark_unstable() {
4703   set_bit_field3(IsUnstable::update(bit_field3(), true));
4704 }
4705 
4706 
is_stable()4707 bool Map::is_stable() {
4708   return !IsUnstable::decode(bit_field3());
4709 }
4710 
4711 
has_code_cache()4712 bool Map::has_code_cache() {
4713   // Code caches are always fixed arrays. The empty fixed array is used as a
4714   // sentinel for an absent code cache.
4715   return code_cache()->length() != 0;
4716 }
4717 
4718 
CanBeDeprecated()4719 bool Map::CanBeDeprecated() {
4720   int descriptor = LastAdded();
4721   for (int i = 0; i <= descriptor; i++) {
4722     PropertyDetails details = instance_descriptors()->GetDetails(i);
4723     if (details.representation().IsNone()) return true;
4724     if (details.representation().IsSmi()) return true;
4725     if (details.representation().IsDouble()) return true;
4726     if (details.representation().IsHeapObject()) return true;
4727     if (details.type() == DATA_CONSTANT) return true;
4728   }
4729   return false;
4730 }
4731 
4732 
NotifyLeafMapLayoutChange()4733 void Map::NotifyLeafMapLayoutChange() {
4734   if (is_stable()) {
4735     mark_unstable();
4736     dependent_code()->DeoptimizeDependentCodeGroup(
4737         GetIsolate(),
4738         DependentCode::kPrototypeCheckGroup);
4739   }
4740 }
4741 
4742 
CanTransition()4743 bool Map::CanTransition() {
4744   // Only JSObject and subtypes have map transitions and back pointers.
4745   STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4746   return instance_type() >= FIRST_JS_OBJECT_TYPE;
4747 }
4748 
4749 
IsBooleanMap()4750 bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
IsPrimitiveMap()4751 bool Map::IsPrimitiveMap() {
4752   STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4753   return instance_type() <= LAST_PRIMITIVE_TYPE;
4754 }
IsJSReceiverMap()4755 bool Map::IsJSReceiverMap() {
4756   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4757   return instance_type() >= FIRST_JS_RECEIVER_TYPE;
4758 }
IsJSObjectMap()4759 bool Map::IsJSObjectMap() {
4760   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4761   return instance_type() >= FIRST_JS_OBJECT_TYPE;
4762 }
IsJSArrayMap()4763 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
IsJSFunctionMap()4764 bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
IsStringMap()4765 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
IsJSProxyMap()4766 bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
IsJSGlobalProxyMap()4767 bool Map::IsJSGlobalProxyMap() {
4768   return instance_type() == JS_GLOBAL_PROXY_TYPE;
4769 }
IsJSGlobalObjectMap()4770 bool Map::IsJSGlobalObjectMap() {
4771   return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4772 }
IsJSTypedArrayMap()4773 bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
IsJSDataViewMap()4774 bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
4775 
4776 
CanOmitMapChecks()4777 bool Map::CanOmitMapChecks() {
4778   return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4779 }
4780 
4781 
next_link()4782 DependentCode* DependentCode::next_link() {
4783   return DependentCode::cast(get(kNextLinkIndex));
4784 }
4785 
4786 
set_next_link(DependentCode * next)4787 void DependentCode::set_next_link(DependentCode* next) {
4788   set(kNextLinkIndex, next);
4789 }
4790 
4791 
flags()4792 int DependentCode::flags() { return Smi::cast(get(kFlagsIndex))->value(); }
4793 
4794 
set_flags(int flags)4795 void DependentCode::set_flags(int flags) {
4796   set(kFlagsIndex, Smi::FromInt(flags));
4797 }
4798 
4799 
count()4800 int DependentCode::count() { return CountField::decode(flags()); }
4801 
set_count(int value)4802 void DependentCode::set_count(int value) {
4803   set_flags(CountField::update(flags(), value));
4804 }
4805 
4806 
group()4807 DependentCode::DependencyGroup DependentCode::group() {
4808   return static_cast<DependencyGroup>(GroupField::decode(flags()));
4809 }
4810 
4811 
set_group(DependentCode::DependencyGroup group)4812 void DependentCode::set_group(DependentCode::DependencyGroup group) {
4813   set_flags(GroupField::update(flags(), static_cast<int>(group)));
4814 }
4815 
4816 
set_object_at(int i,Object * object)4817 void DependentCode::set_object_at(int i, Object* object) {
4818   set(kCodesStartIndex + i, object);
4819 }
4820 
4821 
object_at(int i)4822 Object* DependentCode::object_at(int i) {
4823   return get(kCodesStartIndex + i);
4824 }
4825 
4826 
clear_at(int i)4827 void DependentCode::clear_at(int i) {
4828   set_undefined(kCodesStartIndex + i);
4829 }
4830 
4831 
copy(int from,int to)4832 void DependentCode::copy(int from, int to) {
4833   set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4834 }
4835 
4836 
set_flags(Code::Flags flags)4837 void Code::set_flags(Code::Flags flags) {
4838   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4839   WRITE_INT_FIELD(this, kFlagsOffset, flags);
4840 }
4841 
4842 
kind()4843 Code::Kind Code::kind() {
4844   return ExtractKindFromFlags(flags());
4845 }
4846 
IsCodeStubOrIC()4847 bool Code::IsCodeStubOrIC() {
4848   switch (kind()) {
4849     case STUB:
4850     case HANDLER:
4851 #define CASE_KIND(kind) case kind:
4852       IC_KIND_LIST(CASE_KIND)
4853 #undef CASE_KIND
4854       return true;
4855     default:
4856       return false;
4857   }
4858 }
4859 
extra_ic_state()4860 ExtraICState Code::extra_ic_state() {
4861   DCHECK(is_inline_cache_stub() || is_debug_stub());
4862   return ExtractExtraICStateFromFlags(flags());
4863 }
4864 
4865 
4866 // For initialization.
set_raw_kind_specific_flags1(int value)4867 void Code::set_raw_kind_specific_flags1(int value) {
4868   WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4869 }
4870 
4871 
set_raw_kind_specific_flags2(int value)4872 void Code::set_raw_kind_specific_flags2(int value) {
4873   WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4874 }
4875 
4876 
is_crankshafted()4877 inline bool Code::is_crankshafted() {
4878   return IsCrankshaftedField::decode(
4879       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4880 }
4881 
4882 
is_hydrogen_stub()4883 inline bool Code::is_hydrogen_stub() {
4884   return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4885 }
4886 
is_interpreter_trampoline_builtin()4887 inline bool Code::is_interpreter_trampoline_builtin() {
4888   Builtins* builtins = GetIsolate()->builtins();
4889   return this == *builtins->InterpreterEntryTrampoline() ||
4890          this == *builtins->InterpreterEnterBytecodeDispatch() ||
4891          this == *builtins->InterpreterMarkBaselineOnReturn();
4892 }
4893 
has_unwinding_info()4894 inline bool Code::has_unwinding_info() const {
4895   return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
4896 }
4897 
set_has_unwinding_info(bool state)4898 inline void Code::set_has_unwinding_info(bool state) {
4899   uint32_t previous = READ_UINT32_FIELD(this, kFlagsOffset);
4900   uint32_t updated_value = HasUnwindingInfoField::update(previous, state);
4901   WRITE_UINT32_FIELD(this, kFlagsOffset, updated_value);
4902 }
4903 
set_is_crankshafted(bool value)4904 inline void Code::set_is_crankshafted(bool value) {
4905   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4906   int updated = IsCrankshaftedField::update(previous, value);
4907   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4908 }
4909 
4910 
is_turbofanned()4911 inline bool Code::is_turbofanned() {
4912   return IsTurbofannedField::decode(
4913       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4914 }
4915 
4916 
set_is_turbofanned(bool value)4917 inline void Code::set_is_turbofanned(bool value) {
4918   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4919   int updated = IsTurbofannedField::update(previous, value);
4920   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4921 }
4922 
4923 
can_have_weak_objects()4924 inline bool Code::can_have_weak_objects() {
4925   DCHECK(kind() == OPTIMIZED_FUNCTION);
4926   return CanHaveWeakObjectsField::decode(
4927       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4928 }
4929 
4930 
set_can_have_weak_objects(bool value)4931 inline void Code::set_can_have_weak_objects(bool value) {
4932   DCHECK(kind() == OPTIMIZED_FUNCTION);
4933   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4934   int updated = CanHaveWeakObjectsField::update(previous, value);
4935   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4936 }
4937 
4938 
has_deoptimization_support()4939 bool Code::has_deoptimization_support() {
4940   DCHECK_EQ(FUNCTION, kind());
4941   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4942   return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4943 }
4944 
4945 
set_has_deoptimization_support(bool value)4946 void Code::set_has_deoptimization_support(bool value) {
4947   DCHECK_EQ(FUNCTION, kind());
4948   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4949   flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4950   WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4951 }
4952 
4953 
has_debug_break_slots()4954 bool Code::has_debug_break_slots() {
4955   DCHECK_EQ(FUNCTION, kind());
4956   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4957   return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4958 }
4959 
4960 
set_has_debug_break_slots(bool value)4961 void Code::set_has_debug_break_slots(bool value) {
4962   DCHECK_EQ(FUNCTION, kind());
4963   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4964   flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4965   WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4966 }
4967 
4968 
has_reloc_info_for_serialization()4969 bool Code::has_reloc_info_for_serialization() {
4970   DCHECK_EQ(FUNCTION, kind());
4971   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4972   return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4973 }
4974 
4975 
set_has_reloc_info_for_serialization(bool value)4976 void Code::set_has_reloc_info_for_serialization(bool value) {
4977   DCHECK_EQ(FUNCTION, kind());
4978   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4979   flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4980   WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4981 }
4982 
4983 
allow_osr_at_loop_nesting_level()4984 int Code::allow_osr_at_loop_nesting_level() {
4985   DCHECK_EQ(FUNCTION, kind());
4986   int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4987   return AllowOSRAtLoopNestingLevelField::decode(fields);
4988 }
4989 
4990 
set_allow_osr_at_loop_nesting_level(int level)4991 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4992   DCHECK_EQ(FUNCTION, kind());
4993   DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4994   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4995   int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4996   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4997 }
4998 
4999 
profiler_ticks()5000 int Code::profiler_ticks() {
5001   DCHECK_EQ(FUNCTION, kind());
5002   return ProfilerTicksField::decode(
5003       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5004 }
5005 
5006 
set_profiler_ticks(int ticks)5007 void Code::set_profiler_ticks(int ticks) {
5008   if (kind() == FUNCTION) {
5009     unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5010     unsigned updated = ProfilerTicksField::update(previous, ticks);
5011     WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5012   }
5013 }
5014 
builtin_index()5015 int Code::builtin_index() { return READ_INT_FIELD(this, kBuiltinIndexOffset); }
5016 
set_builtin_index(int index)5017 void Code::set_builtin_index(int index) {
5018   WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
5019 }
5020 
5021 
stack_slots()5022 unsigned Code::stack_slots() {
5023   DCHECK(is_crankshafted());
5024   return StackSlotsField::decode(
5025       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5026 }
5027 
5028 
set_stack_slots(unsigned slots)5029 void Code::set_stack_slots(unsigned slots) {
5030   CHECK(slots <= (1 << kStackSlotsBitCount));
5031   DCHECK(is_crankshafted());
5032   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5033   int updated = StackSlotsField::update(previous, slots);
5034   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5035 }
5036 
5037 
safepoint_table_offset()5038 unsigned Code::safepoint_table_offset() {
5039   DCHECK(is_crankshafted());
5040   return SafepointTableOffsetField::decode(
5041       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5042 }
5043 
5044 
set_safepoint_table_offset(unsigned offset)5045 void Code::set_safepoint_table_offset(unsigned offset) {
5046   CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5047   DCHECK(is_crankshafted());
5048   DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5049   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5050   int updated = SafepointTableOffsetField::update(previous, offset);
5051   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5052 }
5053 
5054 
back_edge_table_offset()5055 unsigned Code::back_edge_table_offset() {
5056   DCHECK_EQ(FUNCTION, kind());
5057   return BackEdgeTableOffsetField::decode(
5058       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5059 }
5060 
5061 
set_back_edge_table_offset(unsigned offset)5062 void Code::set_back_edge_table_offset(unsigned offset) {
5063   DCHECK_EQ(FUNCTION, kind());
5064   DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5065   offset = offset >> kPointerSizeLog2;
5066   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5067   int updated = BackEdgeTableOffsetField::update(previous, offset);
5068   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5069 }
5070 
5071 
back_edges_patched_for_osr()5072 bool Code::back_edges_patched_for_osr() {
5073   DCHECK_EQ(FUNCTION, kind());
5074   return allow_osr_at_loop_nesting_level() > 0;
5075 }
5076 
5077 
to_boolean_state()5078 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5079 
5080 
marked_for_deoptimization()5081 bool Code::marked_for_deoptimization() {
5082   DCHECK(kind() == OPTIMIZED_FUNCTION);
5083   return MarkedForDeoptimizationField::decode(
5084       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5085 }
5086 
5087 
set_marked_for_deoptimization(bool flag)5088 void Code::set_marked_for_deoptimization(bool flag) {
5089   DCHECK(kind() == OPTIMIZED_FUNCTION);
5090   DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5091   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5092   int updated = MarkedForDeoptimizationField::update(previous, flag);
5093   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5094 }
5095 
5096 
is_inline_cache_stub()5097 bool Code::is_inline_cache_stub() {
5098   Kind kind = this->kind();
5099   switch (kind) {
5100 #define CASE(name) case name: return true;
5101     IC_KIND_LIST(CASE)
5102 #undef CASE
5103     default: return false;
5104   }
5105 }
5106 
is_debug_stub()5107 bool Code::is_debug_stub() {
5108   if (kind() != BUILTIN) return false;
5109   switch (builtin_index()) {
5110 #define CASE_DEBUG_BUILTIN(name, kind, extra) case Builtins::k##name:
5111     BUILTIN_LIST_DEBUG_A(CASE_DEBUG_BUILTIN)
5112 #undef CASE_DEBUG_BUILTIN
5113       return true;
5114     default:
5115       return false;
5116   }
5117   return false;
5118 }
is_handler()5119 bool Code::is_handler() { return kind() == HANDLER; }
is_call_stub()5120 bool Code::is_call_stub() { return kind() == CALL_IC; }
is_binary_op_stub()5121 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
is_compare_ic_stub()5122 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
is_to_boolean_ic_stub()5123 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
is_optimized_code()5124 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
is_wasm_code()5125 bool Code::is_wasm_code() { return kind() == WASM_FUNCTION; }
5126 
constant_pool()5127 Address Code::constant_pool() {
5128   Address constant_pool = NULL;
5129   if (FLAG_enable_embedded_constant_pool) {
5130     int offset = constant_pool_offset();
5131     if (offset < instruction_size()) {
5132       constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5133     }
5134   }
5135   return constant_pool;
5136 }
5137 
ComputeFlags(Kind kind,ExtraICState extra_ic_state,CacheHolderFlag holder)5138 Code::Flags Code::ComputeFlags(Kind kind, ExtraICState extra_ic_state,
5139                                CacheHolderFlag holder) {
5140   // TODO(ishell): remove ICStateField.
5141   // Compute the bit mask.
5142   unsigned int bits = KindField::encode(kind) |
5143                       ICStateField::encode(MONOMORPHIC) |
5144                       ExtraICStateField::encode(extra_ic_state) |
5145                       CacheHolderField::encode(holder);
5146   return static_cast<Flags>(bits);
5147 }
5148 
ComputeHandlerFlags(Kind handler_kind,CacheHolderFlag holder)5149 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
5150                                       CacheHolderFlag holder) {
5151   return ComputeFlags(Code::HANDLER, handler_kind, holder);
5152 }
5153 
5154 
ExtractKindFromFlags(Flags flags)5155 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5156   return KindField::decode(flags);
5157 }
5158 
5159 
ExtractExtraICStateFromFlags(Flags flags)5160 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5161   return ExtraICStateField::decode(flags);
5162 }
5163 
5164 
ExtractCacheHolderFromFlags(Flags flags)5165 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5166   return CacheHolderField::decode(flags);
5167 }
5168 
RemoveHolderFromFlags(Flags flags)5169 Code::Flags Code::RemoveHolderFromFlags(Flags flags) {
5170   int bits = flags & ~CacheHolderField::kMask;
5171   return static_cast<Flags>(bits);
5172 }
5173 
5174 
GetCodeFromTargetAddress(Address address)5175 Code* Code::GetCodeFromTargetAddress(Address address) {
5176   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5177   // GetCodeFromTargetAddress might be called when marking objects during mark
5178   // sweep. reinterpret_cast is therefore used instead of the more appropriate
5179   // Code::cast. Code::cast does not work when the object's map is
5180   // marked.
5181   Code* result = reinterpret_cast<Code*>(code);
5182   return result;
5183 }
5184 
5185 
GetObjectFromEntryAddress(Address location_of_address)5186 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5187   return HeapObject::
5188       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5189 }
5190 
5191 
CanContainWeakObjects()5192 bool Code::CanContainWeakObjects() {
5193   return is_optimized_code() && can_have_weak_objects();
5194 }
5195 
5196 
IsWeakObject(Object * object)5197 bool Code::IsWeakObject(Object* object) {
5198   return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5199 }
5200 
5201 
IsWeakObjectInOptimizedCode(Object * object)5202 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5203   if (object->IsMap()) {
5204     return Map::cast(object)->CanTransition() &&
5205            FLAG_weak_embedded_maps_in_optimized_code;
5206   }
5207   if (object->IsCell()) {
5208     object = Cell::cast(object)->value();
5209   } else if (object->IsPropertyCell()) {
5210     object = PropertyCell::cast(object)->value();
5211   }
5212   if (object->IsJSReceiver()) {
5213     return FLAG_weak_embedded_objects_in_optimized_code;
5214   }
5215   if (object->IsContext()) {
5216     // Contexts of inlined functions are embedded in optimized code.
5217     return FLAG_weak_embedded_objects_in_optimized_code;
5218   }
5219   return false;
5220 }
5221 
5222 
5223 class Code::FindAndReplacePattern {
5224  public:
FindAndReplacePattern()5225   FindAndReplacePattern() : count_(0) { }
Add(Handle<Map> map_to_find,Handle<Object> obj_to_replace)5226   void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5227     DCHECK(count_ < kMaxCount);
5228     find_[count_] = map_to_find;
5229     replace_[count_] = obj_to_replace;
5230     ++count_;
5231   }
5232  private:
5233   static const int kMaxCount = 4;
5234   int count_;
5235   Handle<Map> find_[kMaxCount];
5236   Handle<Object> replace_[kMaxCount];
5237   friend class Code;
5238 };
5239 
instruction_size()5240 int AbstractCode::instruction_size() {
5241   if (IsCode()) {
5242     return GetCode()->instruction_size();
5243   } else {
5244     return GetBytecodeArray()->length();
5245   }
5246 }
5247 
SizeIncludingMetadata()5248 int AbstractCode::SizeIncludingMetadata() {
5249   if (IsCode()) {
5250     return GetCode()->SizeIncludingMetadata();
5251   } else {
5252     return GetBytecodeArray()->SizeIncludingMetadata();
5253   }
5254 }
ExecutableSize()5255 int AbstractCode::ExecutableSize() {
5256   if (IsCode()) {
5257     return GetCode()->ExecutableSize();
5258   } else {
5259     return GetBytecodeArray()->BytecodeArraySize();
5260   }
5261 }
5262 
instruction_start()5263 Address AbstractCode::instruction_start() {
5264   if (IsCode()) {
5265     return GetCode()->instruction_start();
5266   } else {
5267     return GetBytecodeArray()->GetFirstBytecodeAddress();
5268   }
5269 }
5270 
instruction_end()5271 Address AbstractCode::instruction_end() {
5272   if (IsCode()) {
5273     return GetCode()->instruction_end();
5274   } else {
5275     return GetBytecodeArray()->GetFirstBytecodeAddress() +
5276            GetBytecodeArray()->length();
5277   }
5278 }
5279 
contains(byte * inner_pointer)5280 bool AbstractCode::contains(byte* inner_pointer) {
5281   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5282 }
5283 
kind()5284 AbstractCode::Kind AbstractCode::kind() {
5285   if (IsCode()) {
5286     STATIC_ASSERT(AbstractCode::FUNCTION ==
5287                   static_cast<AbstractCode::Kind>(Code::FUNCTION));
5288     return static_cast<AbstractCode::Kind>(GetCode()->kind());
5289   } else {
5290     return INTERPRETED_FUNCTION;
5291   }
5292 }
5293 
GetCode()5294 Code* AbstractCode::GetCode() { return Code::cast(this); }
5295 
GetBytecodeArray()5296 BytecodeArray* AbstractCode::GetBytecodeArray() {
5297   return BytecodeArray::cast(this);
5298 }
5299 
prototype()5300 Object* Map::prototype() const {
5301   return READ_FIELD(this, kPrototypeOffset);
5302 }
5303 
5304 
set_prototype(Object * value,WriteBarrierMode mode)5305 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5306   DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
5307   WRITE_FIELD(this, kPrototypeOffset, value);
5308   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5309 }
5310 
5311 
layout_descriptor_gc_safe()5312 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5313   Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5314   return LayoutDescriptor::cast_gc_safe(layout_desc);
5315 }
5316 
5317 
HasFastPointerLayout()5318 bool Map::HasFastPointerLayout() const {
5319   Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5320   return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5321 }
5322 
5323 
UpdateDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5324 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5325                             LayoutDescriptor* layout_desc) {
5326   set_instance_descriptors(descriptors);
5327   if (FLAG_unbox_double_fields) {
5328     if (layout_descriptor()->IsSlowLayout()) {
5329       set_layout_descriptor(layout_desc);
5330     }
5331 #ifdef VERIFY_HEAP
5332     // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5333     if (FLAG_verify_heap) {
5334       CHECK(layout_descriptor()->IsConsistentWithMap(this));
5335       CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5336     }
5337 #else
5338     SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5339     DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5340 #endif
5341   }
5342 }
5343 
5344 
InitializeDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5345 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5346                                 LayoutDescriptor* layout_desc) {
5347   int len = descriptors->number_of_descriptors();
5348   set_instance_descriptors(descriptors);
5349   SetNumberOfOwnDescriptors(len);
5350 
5351   if (FLAG_unbox_double_fields) {
5352     set_layout_descriptor(layout_desc);
5353 #ifdef VERIFY_HEAP
5354     // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5355     if (FLAG_verify_heap) {
5356       CHECK(layout_descriptor()->IsConsistentWithMap(this));
5357     }
5358 #else
5359     SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5360 #endif
5361     set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
5362   }
5363 }
5364 
5365 
ACCESSORS(Map,instance_descriptors,DescriptorArray,kDescriptorsOffset)5366 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5367 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5368 
5369 
5370 void Map::set_bit_field3(uint32_t bits) {
5371   if (kInt32Size != kPointerSize) {
5372     WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5373   }
5374   WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5375 }
5376 
5377 
bit_field3()5378 uint32_t Map::bit_field3() const {
5379   return READ_UINT32_FIELD(this, kBitField3Offset);
5380 }
5381 
5382 
GetLayoutDescriptor()5383 LayoutDescriptor* Map::GetLayoutDescriptor() {
5384   return FLAG_unbox_double_fields ? layout_descriptor()
5385                                   : LayoutDescriptor::FastPointerLayout();
5386 }
5387 
5388 
AppendDescriptor(Descriptor * desc)5389 void Map::AppendDescriptor(Descriptor* desc) {
5390   DescriptorArray* descriptors = instance_descriptors();
5391   int number_of_own_descriptors = NumberOfOwnDescriptors();
5392   DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5393   descriptors->Append(desc);
5394   SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5395 
5396 // This function does not support appending double field descriptors and
5397 // it should never try to (otherwise, layout descriptor must be updated too).
5398 #ifdef DEBUG
5399   PropertyDetails details = desc->GetDetails();
5400   CHECK(details.type() != DATA || !details.representation().IsDouble());
5401 #endif
5402 }
5403 
5404 
GetBackPointer()5405 Object* Map::GetBackPointer() {
5406   Object* object = constructor_or_backpointer();
5407   if (object->IsMap()) {
5408     return object;
5409   }
5410   return GetIsolate()->heap()->undefined_value();
5411 }
5412 
5413 
ElementsTransitionMap()5414 Map* Map::ElementsTransitionMap() {
5415   return TransitionArray::SearchSpecial(
5416       this, GetHeap()->elements_transition_symbol());
5417 }
5418 
5419 
ACCESSORS(Map,raw_transitions,Object,kTransitionsOrPrototypeInfoOffset)5420 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5421 
5422 
5423 Object* Map::prototype_info() const {
5424   DCHECK(is_prototype_map());
5425   return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5426 }
5427 
5428 
set_prototype_info(Object * value,WriteBarrierMode mode)5429 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5430   DCHECK(is_prototype_map());
5431   WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5432   CONDITIONAL_WRITE_BARRIER(
5433       GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5434 }
5435 
5436 
SetBackPointer(Object * value,WriteBarrierMode mode)5437 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5438   DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5439   DCHECK(value->IsMap());
5440   DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
5441   DCHECK(!value->IsMap() ||
5442          Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5443   set_constructor_or_backpointer(value, mode);
5444 }
5445 
ACCESSORS(Map,code_cache,FixedArray,kCodeCacheOffset)5446 ACCESSORS(Map, code_cache, FixedArray, kCodeCacheOffset)
5447 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5448 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5449 ACCESSORS(Map, constructor_or_backpointer, Object,
5450           kConstructorOrBackPointerOffset)
5451 
5452 
5453 Object* Map::GetConstructor() const {
5454   Object* maybe_constructor = constructor_or_backpointer();
5455   // Follow any back pointers.
5456   while (maybe_constructor->IsMap()) {
5457     maybe_constructor =
5458         Map::cast(maybe_constructor)->constructor_or_backpointer();
5459   }
5460   return maybe_constructor;
5461 }
5462 
5463 
SetConstructor(Object * constructor,WriteBarrierMode mode)5464 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5465   // Never overwrite a back pointer with a constructor.
5466   DCHECK(!constructor_or_backpointer()->IsMap());
5467   set_constructor_or_backpointer(constructor, mode);
5468 }
5469 
5470 
CopyInitialMap(Handle<Map> map)5471 Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
5472   return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
5473                         map->unused_property_fields());
5474 }
5475 
5476 
ACCESSORS(JSBoundFunction,bound_target_function,JSReceiver,kBoundTargetFunctionOffset)5477 ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
5478           kBoundTargetFunctionOffset)
5479 ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
5480 ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
5481 
5482 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5483 ACCESSORS(JSFunction, literals, LiteralsArray, kLiteralsOffset)
5484 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5485 
5486 ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
5487 ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5488 
5489 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5490 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5491 
5492 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5493 SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
5494 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5495           kExpectedReceiverTypeOffset)
5496 
5497 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
5498 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
5499 ACCESSORS(AccessorInfo, js_getter, Object, kJsGetterOffset)
5500 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
5501 
5502 ACCESSORS(Box, value, Object, kValueOffset)
5503 
5504 Map* PrototypeInfo::ObjectCreateMap() {
5505   return Map::cast(WeakCell::cast(object_create_map())->value());
5506 }
5507 
5508 // static
SetObjectCreateMap(Handle<PrototypeInfo> info,Handle<Map> map)5509 void PrototypeInfo::SetObjectCreateMap(Handle<PrototypeInfo> info,
5510                                        Handle<Map> map) {
5511   Handle<WeakCell> cell = Map::WeakCellForMap(map);
5512   info->set_object_create_map(*cell);
5513 }
5514 
HasObjectCreateMap()5515 bool PrototypeInfo::HasObjectCreateMap() {
5516   Object* cache = object_create_map();
5517   return cache->IsWeakCell() && !WeakCell::cast(cache)->cleared();
5518 }
5519 
instantiated()5520 bool FunctionTemplateInfo::instantiated() {
5521   return shared_function_info()->IsSharedFunctionInfo();
5522 }
5523 
ACCESSORS(PrototypeInfo,prototype_users,Object,kPrototypeUsersOffset)5524 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5525 ACCESSORS(PrototypeInfo, object_create_map, Object, kObjectCreateMap)
5526 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5527 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5528 SMI_ACCESSORS(PrototypeInfo, bit_field, kBitFieldOffset)
5529 BOOL_ACCESSORS(PrototypeInfo, bit_field, should_be_fast_map, kShouldBeFastBit)
5530 
5531 ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
5532           kScopeInfoOffset)
5533 ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
5534           kExtensionOffset)
5535 
5536 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5537 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5538 
5539 ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
5540 ACCESSORS(AccessCheckInfo, named_interceptor, Object, kNamedInterceptorOffset)
5541 ACCESSORS(AccessCheckInfo, indexed_interceptor, Object,
5542           kIndexedInterceptorOffset)
5543 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5544 
5545 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5546 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5547 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5548 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5549 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5550 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5551 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5552 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5553                kCanInterceptSymbolsBit)
5554 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5555 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5556 
5557 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5558 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5559 ACCESSORS(CallHandlerInfo, fast_handler, Object, kFastHandlerOffset)
5560 
5561 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5562 ACCESSORS(TemplateInfo, serial_number, Object, kSerialNumberOffset)
5563 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5564 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5565 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5566 
5567 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5568 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5569           kPrototypeTemplateOffset)
5570 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5571 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5572           kNamedPropertyHandlerOffset)
5573 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5574           kIndexedPropertyHandlerOffset)
5575 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5576           kInstanceTemplateOffset)
5577 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5578 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5579 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5580           kInstanceCallHandlerOffset)
5581 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5582           kAccessCheckInfoOffset)
5583 ACCESSORS(FunctionTemplateInfo, shared_function_info, Object,
5584           kSharedFunctionInfoOffset)
5585 
5586 SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
5587 
5588 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5589 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5590           kInternalFieldCountOffset)
5591 
5592 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5593 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5594 SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
5595 SMI_ACCESSORS(AllocationSite, pretenure_create_count,
5596               kPretenureCreateCountOffset)
5597 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5598           kDependentCodeOffset)
5599 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5600 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5601 
5602 ACCESSORS(Script, source, Object, kSourceOffset)
5603 ACCESSORS(Script, name, Object, kNameOffset)
5604 SMI_ACCESSORS(Script, id, kIdOffset)
5605 SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
5606 SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
5607 ACCESSORS(Script, context_data, Object, kContextOffset)
5608 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5609 SMI_ACCESSORS(Script, type, kTypeOffset)
5610 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5611 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5612 SMI_ACCESSORS(Script, eval_from_position, kEvalFromPositionOffset)
5613 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5614 SMI_ACCESSORS(Script, flags, kFlagsOffset)
5615 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5616 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5617 
5618 Script::CompilationType Script::compilation_type() {
5619   return BooleanBit::get(flags(), kCompilationTypeBit) ?
5620       COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5621 }
set_compilation_type(CompilationType type)5622 void Script::set_compilation_type(CompilationType type) {
5623   set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5624       type == COMPILATION_TYPE_EVAL));
5625 }
hide_source()5626 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
set_hide_source(bool value)5627 void Script::set_hide_source(bool value) {
5628   set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5629 }
compilation_state()5630 Script::CompilationState Script::compilation_state() {
5631   return BooleanBit::get(flags(), kCompilationStateBit) ?
5632       COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5633 }
set_compilation_state(CompilationState state)5634 void Script::set_compilation_state(CompilationState state) {
5635   set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5636       state == COMPILATION_STATE_COMPILED));
5637 }
origin_options()5638 ScriptOriginOptions Script::origin_options() {
5639   return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
5640                              kOriginOptionsShift);
5641 }
set_origin_options(ScriptOriginOptions origin_options)5642 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5643   DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5644   set_flags((flags() & ~kOriginOptionsMask) |
5645             (origin_options.Flags() << kOriginOptionsShift));
5646 }
5647 
5648 
ACCESSORS(DebugInfo,shared,SharedFunctionInfo,kSharedFunctionInfoIndex)5649 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5650 ACCESSORS(DebugInfo, abstract_code, AbstractCode, kAbstractCodeIndex)
5651 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5652 
5653 BytecodeArray* DebugInfo::original_bytecode_array() {
5654   return shared()->bytecode_array();
5655 }
5656 
SMI_ACCESSORS(BreakPointInfo,code_offset,kCodeOffsetIndex)5657 SMI_ACCESSORS(BreakPointInfo, code_offset, kCodeOffsetIndex)
5658 SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
5659 SMI_ACCESSORS(BreakPointInfo, statement_position, kStatementPositionIndex)
5660 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5661 
5662 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5663 ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
5664           kOptimizedCodeMapOffset)
5665 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5666 ACCESSORS(SharedFunctionInfo, feedback_metadata, TypeFeedbackMetadata,
5667           kFeedbackMetadataOffset)
5668 #if TRACE_MAPS
5669 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5670 #endif
5671 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5672           kInstanceClassNameOffset)
5673 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5674 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5675 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5676 ACCESSORS(SharedFunctionInfo, function_identifier, Object,
5677           kFunctionIdentifierOffset)
5678 
5679 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5680 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5681                kHiddenPrototypeBit)
5682 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5683 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5684                kNeedsAccessCheckBit)
5685 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5686                kReadOnlyPrototypeBit)
5687 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5688                kRemovePrototypeBit)
5689 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5690                kDoNotCacheBit)
5691 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5692                kAcceptAnyReceiver)
5693 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_named_expression,
5694                kIsNamedExpressionBit)
5695 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5696                kIsTopLevelBit)
5697 
5698 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5699                kAllowLazyCompilation)
5700 BOOL_ACCESSORS(SharedFunctionInfo,
5701                compiler_hints,
5702                allows_lazy_compilation_without_context,
5703                kAllowLazyCompilationWithoutContext)
5704 BOOL_ACCESSORS(SharedFunctionInfo,
5705                compiler_hints,
5706                uses_arguments,
5707                kUsesArguments)
5708 BOOL_ACCESSORS(SharedFunctionInfo,
5709                compiler_hints,
5710                has_duplicate_parameters,
5711                kHasDuplicateParameters)
5712 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5713 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5714 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5715                kNeverCompiled)
5716 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_declaration,
5717                kIsDeclaration)
5718 
5719 #if V8_HOST_ARCH_32_BIT
5720 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5721 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5722               kFormalParameterCountOffset)
5723 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5724               kExpectedNofPropertiesOffset)
5725 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5726 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5727               kStartPositionAndTypeOffset)
5728 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5729 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5730               kFunctionTokenPositionOffset)
5731 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5732               kCompilerHintsOffset)
5733 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5734               kOptCountAndBailoutReasonOffset)
5735 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5736 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5737 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5738 
5739 #else
5740 
5741 #if V8_TARGET_LITTLE_ENDIAN
5742 #define PSEUDO_SMI_LO_ALIGN 0
5743 #define PSEUDO_SMI_HI_ALIGN kIntSize
5744 #else
5745 #define PSEUDO_SMI_LO_ALIGN kIntSize
5746 #define PSEUDO_SMI_HI_ALIGN 0
5747 #endif
5748 
5749 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)                          \
5750   STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN);         \
5751   int holder::name() const {                                                   \
5752     int value = READ_INT_FIELD(this, offset);                                  \
5753     DCHECK(kHeapObjectTag == 1);                                               \
5754     DCHECK((value & kHeapObjectTag) == 0);                                     \
5755     return value >> 1;                                                         \
5756   }                                                                            \
5757   void holder::set_##name(int value) {                                         \
5758     DCHECK(kHeapObjectTag == 1);                                               \
5759     DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5760     WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag);             \
5761   }
5762 
5763 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)                  \
5764   STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5765   INT_ACCESSORS(holder, name, offset)
5766 
5767 
5768 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5769 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5770                         kFormalParameterCountOffset)
5771 
5772 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5773                         expected_nof_properties,
5774                         kExpectedNofPropertiesOffset)
5775 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5776 
5777 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5778 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5779                         start_position_and_type,
5780                         kStartPositionAndTypeOffset)
5781 
5782 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5783                         function_token_position,
5784                         kFunctionTokenPositionOffset)
5785 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5786                         compiler_hints,
5787                         kCompilerHintsOffset)
5788 
5789 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5790                         opt_count_and_bailout_reason,
5791                         kOptCountAndBailoutReasonOffset)
5792 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5793 
5794 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5795                         ast_node_count,
5796                         kAstNodeCountOffset)
5797 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5798                         profiler_ticks,
5799                         kProfilerTicksOffset)
5800 
5801 #endif
5802 
5803 
5804 BOOL_GETTER(SharedFunctionInfo,
5805             compiler_hints,
5806             optimization_disabled,
5807             kOptimizationDisabled)
5808 
5809 AbstractCode* SharedFunctionInfo::abstract_code() {
5810   if (HasBytecodeArray()) {
5811     return AbstractCode::cast(bytecode_array());
5812   } else {
5813     return AbstractCode::cast(code());
5814   }
5815 }
5816 
set_optimization_disabled(bool disable)5817 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5818   set_compiler_hints(BooleanBit::set(compiler_hints(),
5819                                      kOptimizationDisabled,
5820                                      disable));
5821 }
5822 
5823 
language_mode()5824 LanguageMode SharedFunctionInfo::language_mode() {
5825   STATIC_ASSERT(LANGUAGE_END == 3);
5826   return construct_language_mode(
5827       BooleanBit::get(compiler_hints(), kStrictModeFunction));
5828 }
5829 
5830 
set_language_mode(LanguageMode language_mode)5831 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5832   STATIC_ASSERT(LANGUAGE_END == 3);
5833   // We only allow language mode transitions that set the same language mode
5834   // again or go up in the chain:
5835   DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5836   int hints = compiler_hints();
5837   hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5838   set_compiler_hints(hints);
5839 }
5840 
5841 
kind()5842 FunctionKind SharedFunctionInfo::kind() {
5843   return FunctionKindBits::decode(compiler_hints());
5844 }
5845 
5846 
set_kind(FunctionKind kind)5847 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5848   DCHECK(IsValidFunctionKind(kind));
5849   int hints = compiler_hints();
5850   hints = FunctionKindBits::update(hints, kind);
5851   set_compiler_hints(hints);
5852 }
5853 
BOOL_ACCESSORS(SharedFunctionInfo,compiler_hints,needs_home_object,kNeedsHomeObject)5854 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5855                kNeedsHomeObject)
5856 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5857 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5858 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5859                name_should_print_as_anonymous,
5860                kNameShouldPrintAsAnonymous)
5861 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous_expression,
5862                kIsAnonymousExpression)
5863 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5864 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5865                kDontCrankshaft)
5866 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5867 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5868 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5869 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_async, kIsAsyncFunction)
5870 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5871                kIsConciseMethod)
5872 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_getter_function,
5873                kIsGetterFunction)
5874 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_setter_function,
5875                kIsSetterFunction)
5876 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5877                kIsDefaultConstructor)
5878 
5879 inline bool SharedFunctionInfo::is_resumable() const {
5880   return is_generator() || is_async();
5881 }
5882 
HasValidSource()5883 bool Script::HasValidSource() {
5884   Object* src = this->source();
5885   if (!src->IsString()) return true;
5886   String* src_str = String::cast(src);
5887   if (!StringShape(src_str).IsExternal()) return true;
5888   if (src_str->IsOneByteRepresentation()) {
5889     return ExternalOneByteString::cast(src)->resource() != NULL;
5890   } else if (src_str->IsTwoByteRepresentation()) {
5891     return ExternalTwoByteString::cast(src)->resource() != NULL;
5892   }
5893   return true;
5894 }
5895 
5896 
DontAdaptArguments()5897 void SharedFunctionInfo::DontAdaptArguments() {
5898   DCHECK(code()->kind() == Code::BUILTIN || code()->kind() == Code::STUB);
5899   set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5900 }
5901 
5902 
start_position()5903 int SharedFunctionInfo::start_position() const {
5904   return start_position_and_type() >> kStartPositionShift;
5905 }
5906 
5907 
set_start_position(int start_position)5908 void SharedFunctionInfo::set_start_position(int start_position) {
5909   set_start_position_and_type((start_position << kStartPositionShift)
5910     | (start_position_and_type() & ~kStartPositionMask));
5911 }
5912 
5913 
code()5914 Code* SharedFunctionInfo::code() const {
5915   return Code::cast(READ_FIELD(this, kCodeOffset));
5916 }
5917 
5918 
set_code(Code * value,WriteBarrierMode mode)5919 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5920   DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5921   WRITE_FIELD(this, kCodeOffset, value);
5922   CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5923 }
5924 
5925 
ReplaceCode(Code * value)5926 void SharedFunctionInfo::ReplaceCode(Code* value) {
5927   // If the GC metadata field is already used then the function was
5928   // enqueued as a code flushing candidate and we remove it now.
5929   if (code()->gc_metadata() != NULL) {
5930     CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5931     flusher->EvictCandidate(this);
5932   }
5933 
5934   DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5935 #ifdef DEBUG
5936   Code::VerifyRecompiledCode(code(), value);
5937 #endif  // DEBUG
5938 
5939   set_code(value);
5940 
5941   if (is_compiled()) set_never_compiled(false);
5942 }
5943 
5944 
scope_info()5945 ScopeInfo* SharedFunctionInfo::scope_info() const {
5946   return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5947 }
5948 
5949 
set_scope_info(ScopeInfo * value,WriteBarrierMode mode)5950 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5951                                         WriteBarrierMode mode) {
5952   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5953   CONDITIONAL_WRITE_BARRIER(GetHeap(),
5954                             this,
5955                             kScopeInfoOffset,
5956                             reinterpret_cast<Object*>(value),
5957                             mode);
5958 }
5959 
5960 
is_compiled()5961 bool SharedFunctionInfo::is_compiled() {
5962   Builtins* builtins = GetIsolate()->builtins();
5963   DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5964   DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5965   DCHECK(code() != builtins->builtin(Builtins::kCompileBaseline));
5966   return code() != builtins->builtin(Builtins::kCompileLazy);
5967 }
5968 
5969 
has_simple_parameters()5970 bool SharedFunctionInfo::has_simple_parameters() {
5971   return scope_info()->HasSimpleParameters();
5972 }
5973 
5974 
HasDebugInfo()5975 bool SharedFunctionInfo::HasDebugInfo() {
5976   bool has_debug_info = debug_info()->IsStruct();
5977   DCHECK(!has_debug_info || HasDebugCode());
5978   return has_debug_info;
5979 }
5980 
5981 
GetDebugInfo()5982 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
5983   DCHECK(HasDebugInfo());
5984   return DebugInfo::cast(debug_info());
5985 }
5986 
5987 
HasDebugCode()5988 bool SharedFunctionInfo::HasDebugCode() {
5989   return HasBytecodeArray() ||
5990          (code()->kind() == Code::FUNCTION && code()->has_debug_break_slots());
5991 }
5992 
5993 
IsApiFunction()5994 bool SharedFunctionInfo::IsApiFunction() {
5995   return function_data()->IsFunctionTemplateInfo();
5996 }
5997 
5998 
get_api_func_data()5999 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
6000   DCHECK(IsApiFunction());
6001   return FunctionTemplateInfo::cast(function_data());
6002 }
6003 
set_api_func_data(FunctionTemplateInfo * data)6004 void SharedFunctionInfo::set_api_func_data(FunctionTemplateInfo* data) {
6005   DCHECK(function_data()->IsUndefined(GetIsolate()));
6006   set_function_data(data);
6007 }
6008 
HasBytecodeArray()6009 bool SharedFunctionInfo::HasBytecodeArray() {
6010   return function_data()->IsBytecodeArray();
6011 }
6012 
bytecode_array()6013 BytecodeArray* SharedFunctionInfo::bytecode_array() {
6014   DCHECK(HasBytecodeArray());
6015   return BytecodeArray::cast(function_data());
6016 }
6017 
set_bytecode_array(BytecodeArray * bytecode)6018 void SharedFunctionInfo::set_bytecode_array(BytecodeArray* bytecode) {
6019   DCHECK(function_data()->IsUndefined(GetIsolate()));
6020   set_function_data(bytecode);
6021 }
6022 
ClearBytecodeArray()6023 void SharedFunctionInfo::ClearBytecodeArray() {
6024   DCHECK(function_data()->IsUndefined(GetIsolate()) || HasBytecodeArray());
6025   set_function_data(GetHeap()->undefined_value());
6026 }
6027 
HasBuiltinFunctionId()6028 bool SharedFunctionInfo::HasBuiltinFunctionId() {
6029   return function_identifier()->IsSmi();
6030 }
6031 
builtin_function_id()6032 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6033   DCHECK(HasBuiltinFunctionId());
6034   return static_cast<BuiltinFunctionId>(
6035       Smi::cast(function_identifier())->value());
6036 }
6037 
set_builtin_function_id(BuiltinFunctionId id)6038 void SharedFunctionInfo::set_builtin_function_id(BuiltinFunctionId id) {
6039   set_function_identifier(Smi::FromInt(id));
6040 }
6041 
HasInferredName()6042 bool SharedFunctionInfo::HasInferredName() {
6043   return function_identifier()->IsString();
6044 }
6045 
inferred_name()6046 String* SharedFunctionInfo::inferred_name() {
6047   if (HasInferredName()) {
6048     return String::cast(function_identifier());
6049   }
6050   Isolate* isolate = GetIsolate();
6051   DCHECK(function_identifier()->IsUndefined(isolate) || HasBuiltinFunctionId());
6052   return isolate->heap()->empty_string();
6053 }
6054 
set_inferred_name(String * inferred_name)6055 void SharedFunctionInfo::set_inferred_name(String* inferred_name) {
6056   DCHECK(function_identifier()->IsUndefined(GetIsolate()) || HasInferredName());
6057   set_function_identifier(inferred_name);
6058 }
6059 
ic_age()6060 int SharedFunctionInfo::ic_age() {
6061   return ICAgeBits::decode(counters());
6062 }
6063 
6064 
set_ic_age(int ic_age)6065 void SharedFunctionInfo::set_ic_age(int ic_age) {
6066   set_counters(ICAgeBits::update(counters(), ic_age));
6067 }
6068 
6069 
deopt_count()6070 int SharedFunctionInfo::deopt_count() {
6071   return DeoptCountBits::decode(counters());
6072 }
6073 
6074 
set_deopt_count(int deopt_count)6075 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6076   set_counters(DeoptCountBits::update(counters(), deopt_count));
6077 }
6078 
6079 
increment_deopt_count()6080 void SharedFunctionInfo::increment_deopt_count() {
6081   int value = counters();
6082   int deopt_count = DeoptCountBits::decode(value);
6083   deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6084   set_counters(DeoptCountBits::update(value, deopt_count));
6085 }
6086 
6087 
opt_reenable_tries()6088 int SharedFunctionInfo::opt_reenable_tries() {
6089   return OptReenableTriesBits::decode(counters());
6090 }
6091 
6092 
set_opt_reenable_tries(int tries)6093 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6094   set_counters(OptReenableTriesBits::update(counters(), tries));
6095 }
6096 
6097 
opt_count()6098 int SharedFunctionInfo::opt_count() {
6099   return OptCountBits::decode(opt_count_and_bailout_reason());
6100 }
6101 
6102 
set_opt_count(int opt_count)6103 void SharedFunctionInfo::set_opt_count(int opt_count) {
6104   set_opt_count_and_bailout_reason(
6105       OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6106 }
6107 
6108 
disable_optimization_reason()6109 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6110   return static_cast<BailoutReason>(
6111       DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6112 }
6113 
6114 
has_deoptimization_support()6115 bool SharedFunctionInfo::has_deoptimization_support() {
6116   Code* code = this->code();
6117   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6118 }
6119 
6120 
TryReenableOptimization()6121 void SharedFunctionInfo::TryReenableOptimization() {
6122   int tries = opt_reenable_tries();
6123   set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6124   // We reenable optimization whenever the number of tries is a large
6125   // enough power of 2.
6126   if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6127     set_optimization_disabled(false);
6128     set_opt_count(0);
6129     set_deopt_count(0);
6130   }
6131 }
6132 
6133 
set_disable_optimization_reason(BailoutReason reason)6134 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6135   set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6136       opt_count_and_bailout_reason(), reason));
6137 }
6138 
6139 
IsBuiltin()6140 bool SharedFunctionInfo::IsBuiltin() {
6141   Object* script_obj = script();
6142   if (script_obj->IsUndefined(GetIsolate())) return true;
6143   Script* script = Script::cast(script_obj);
6144   Script::Type type = static_cast<Script::Type>(script->type());
6145   return type != Script::TYPE_NORMAL;
6146 }
6147 
6148 
IsSubjectToDebugging()6149 bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
6150 
6151 
OptimizedCodeMapIsCleared()6152 bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
6153   return optimized_code_map() == GetHeap()->cleared_optimized_code_map();
6154 }
6155 
6156 
IsOptimized()6157 bool JSFunction::IsOptimized() {
6158   return code()->kind() == Code::OPTIMIZED_FUNCTION;
6159 }
6160 
IsMarkedForBaseline()6161 bool JSFunction::IsMarkedForBaseline() {
6162   return code() ==
6163          GetIsolate()->builtins()->builtin(Builtins::kCompileBaseline);
6164 }
6165 
IsMarkedForOptimization()6166 bool JSFunction::IsMarkedForOptimization() {
6167   return code() == GetIsolate()->builtins()->builtin(
6168       Builtins::kCompileOptimized);
6169 }
6170 
6171 
IsMarkedForConcurrentOptimization()6172 bool JSFunction::IsMarkedForConcurrentOptimization() {
6173   return code() == GetIsolate()->builtins()->builtin(
6174       Builtins::kCompileOptimizedConcurrent);
6175 }
6176 
6177 
IsInOptimizationQueue()6178 bool JSFunction::IsInOptimizationQueue() {
6179   return code() == GetIsolate()->builtins()->builtin(
6180       Builtins::kInOptimizationQueue);
6181 }
6182 
6183 
CompleteInobjectSlackTrackingIfActive()6184 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
6185   if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
6186     initial_map()->CompleteInobjectSlackTracking();
6187   }
6188 }
6189 
6190 
IsInobjectSlackTrackingInProgress()6191 bool Map::IsInobjectSlackTrackingInProgress() {
6192   return construction_counter() != Map::kNoSlackTracking;
6193 }
6194 
6195 
InobjectSlackTrackingStep()6196 void Map::InobjectSlackTrackingStep() {
6197   if (!IsInobjectSlackTrackingInProgress()) return;
6198   int counter = construction_counter();
6199   set_construction_counter(counter - 1);
6200   if (counter == kSlackTrackingCounterEnd) {
6201     CompleteInobjectSlackTracking();
6202   }
6203 }
6204 
abstract_code()6205 AbstractCode* JSFunction::abstract_code() {
6206   Code* code = this->code();
6207   if (code->is_interpreter_trampoline_builtin()) {
6208     return AbstractCode::cast(shared()->bytecode_array());
6209   } else {
6210     return AbstractCode::cast(code);
6211   }
6212 }
6213 
code()6214 Code* JSFunction::code() {
6215   return Code::cast(
6216       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6217 }
6218 
6219 
set_code(Code * value)6220 void JSFunction::set_code(Code* value) {
6221   DCHECK(!GetHeap()->InNewSpace(value));
6222   Address entry = value->entry();
6223   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6224   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6225       this,
6226       HeapObject::RawField(this, kCodeEntryOffset),
6227       value);
6228 }
6229 
6230 
set_code_no_write_barrier(Code * value)6231 void JSFunction::set_code_no_write_barrier(Code* value) {
6232   DCHECK(!GetHeap()->InNewSpace(value));
6233   Address entry = value->entry();
6234   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6235 }
6236 
6237 
ReplaceCode(Code * code)6238 void JSFunction::ReplaceCode(Code* code) {
6239   bool was_optimized = IsOptimized();
6240   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6241 
6242   if (was_optimized && is_optimized) {
6243     shared()->EvictFromOptimizedCodeMap(this->code(),
6244         "Replacing with another optimized code");
6245   }
6246 
6247   set_code(code);
6248 
6249   // Add/remove the function from the list of optimized functions for this
6250   // context based on the state change.
6251   if (!was_optimized && is_optimized) {
6252     context()->native_context()->AddOptimizedFunction(this);
6253   }
6254   if (was_optimized && !is_optimized) {
6255     // TODO(titzer): linear in the number of optimized functions; fix!
6256     context()->native_context()->RemoveOptimizedFunction(this);
6257   }
6258 }
6259 
6260 
context()6261 Context* JSFunction::context() {
6262   return Context::cast(READ_FIELD(this, kContextOffset));
6263 }
6264 
6265 
global_proxy()6266 JSObject* JSFunction::global_proxy() {
6267   return context()->global_proxy();
6268 }
6269 
6270 
native_context()6271 Context* JSFunction::native_context() { return context()->native_context(); }
6272 
6273 
set_context(Object * value)6274 void JSFunction::set_context(Object* value) {
6275   DCHECK(value->IsUndefined(GetIsolate()) || value->IsContext());
6276   WRITE_FIELD(this, kContextOffset, value);
6277   WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6278 }
6279 
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)6280 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6281           kPrototypeOrInitialMapOffset)
6282 
6283 
6284 Map* JSFunction::initial_map() {
6285   return Map::cast(prototype_or_initial_map());
6286 }
6287 
6288 
has_initial_map()6289 bool JSFunction::has_initial_map() {
6290   return prototype_or_initial_map()->IsMap();
6291 }
6292 
6293 
has_instance_prototype()6294 bool JSFunction::has_instance_prototype() {
6295   return has_initial_map() ||
6296          !prototype_or_initial_map()->IsTheHole(GetIsolate());
6297 }
6298 
6299 
has_prototype()6300 bool JSFunction::has_prototype() {
6301   return map()->has_non_instance_prototype() || has_instance_prototype();
6302 }
6303 
6304 
instance_prototype()6305 Object* JSFunction::instance_prototype() {
6306   DCHECK(has_instance_prototype());
6307   if (has_initial_map()) return initial_map()->prototype();
6308   // When there is no initial map and the prototype is a JSObject, the
6309   // initial map field is used for the prototype field.
6310   return prototype_or_initial_map();
6311 }
6312 
6313 
prototype()6314 Object* JSFunction::prototype() {
6315   DCHECK(has_prototype());
6316   // If the function's prototype property has been set to a non-JSObject
6317   // value, that value is stored in the constructor field of the map.
6318   if (map()->has_non_instance_prototype()) {
6319     Object* prototype = map()->GetConstructor();
6320     // The map must have a prototype in that field, not a back pointer.
6321     DCHECK(!prototype->IsMap());
6322     return prototype;
6323   }
6324   return instance_prototype();
6325 }
6326 
6327 
is_compiled()6328 bool JSFunction::is_compiled() {
6329   Builtins* builtins = GetIsolate()->builtins();
6330   return code() != builtins->builtin(Builtins::kCompileLazy) &&
6331          code() != builtins->builtin(Builtins::kCompileBaseline) &&
6332          code() != builtins->builtin(Builtins::kCompileOptimized) &&
6333          code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6334 }
6335 
feedback_vector()6336 TypeFeedbackVector* JSFunction::feedback_vector() {
6337   LiteralsArray* array = literals();
6338   return array->feedback_vector();
6339 }
6340 
ACCESSORS(JSProxy,target,JSReceiver,kTargetOffset)6341 ACCESSORS(JSProxy, target, JSReceiver, kTargetOffset)
6342 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6343 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6344 
6345 bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
6346 
ACCESSORS(JSCollection,table,Object,kTableOffset)6347 ACCESSORS(JSCollection, table, Object, kTableOffset)
6348 
6349 
6350 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset)    \
6351   template<class Derived, class TableType>                           \
6352   type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6353     return type::cast(READ_FIELD(this, offset));                     \
6354   }                                                                  \
6355   template<class Derived, class TableType>                           \
6356   void OrderedHashTableIterator<Derived, TableType>::set_##name(     \
6357       type* value, WriteBarrierMode mode) {                          \
6358     WRITE_FIELD(this, offset, value);                                \
6359     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6360   }
6361 
6362 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6363 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6364 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6365 
6366 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6367 
6368 
6369 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6370 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6371 
6372 
6373 Address Foreign::foreign_address() {
6374   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6375 }
6376 
6377 
set_foreign_address(Address value)6378 void Foreign::set_foreign_address(Address value) {
6379   WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6380 }
6381 
6382 
ACCESSORS(JSGeneratorObject,function,JSFunction,kFunctionOffset)6383 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6384 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6385 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6386 ACCESSORS(JSGeneratorObject, input_or_debug_pos, Object, kInputOrDebugPosOffset)
6387 SMI_ACCESSORS(JSGeneratorObject, resume_mode, kResumeModeOffset)
6388 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6389 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6390 
6391 bool JSGeneratorObject::is_suspended() const {
6392   DCHECK_LT(kGeneratorExecuting, 0);
6393   DCHECK_LT(kGeneratorClosed, 0);
6394   return continuation() >= 0;
6395 }
6396 
is_closed()6397 bool JSGeneratorObject::is_closed() const {
6398   return continuation() == kGeneratorClosed;
6399 }
6400 
is_executing()6401 bool JSGeneratorObject::is_executing() const {
6402   return continuation() == kGeneratorExecuting;
6403 }
6404 
ACCESSORS(JSModule,context,Object,kContextOffset)6405 ACCESSORS(JSModule, context, Object, kContextOffset)
6406 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6407 
6408 
6409 ACCESSORS(JSValue, value, Object, kValueOffset)
6410 
6411 
6412 HeapNumber* HeapNumber::cast(Object* object) {
6413   SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6414   return reinterpret_cast<HeapNumber*>(object);
6415 }
6416 
6417 
cast(const Object * object)6418 const HeapNumber* HeapNumber::cast(const Object* object) {
6419   SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6420   return reinterpret_cast<const HeapNumber*>(object);
6421 }
6422 
6423 
ACCESSORS(JSDate,value,Object,kValueOffset)6424 ACCESSORS(JSDate, value, Object, kValueOffset)
6425 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6426 ACCESSORS(JSDate, year, Object, kYearOffset)
6427 ACCESSORS(JSDate, month, Object, kMonthOffset)
6428 ACCESSORS(JSDate, day, Object, kDayOffset)
6429 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6430 ACCESSORS(JSDate, hour, Object, kHourOffset)
6431 ACCESSORS(JSDate, min, Object, kMinOffset)
6432 ACCESSORS(JSDate, sec, Object, kSecOffset)
6433 
6434 
6435 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6436 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6437 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6438 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6439 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6440 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6441 
6442 
6443 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6444 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6445 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6446 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6447 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6448 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6449 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6450 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6451 
6452 void Code::WipeOutHeader() {
6453   WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6454   WRITE_FIELD(this, kHandlerTableOffset, NULL);
6455   WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6456   // Do not wipe out major/minor keys on a code stub or IC
6457   if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6458     WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6459   }
6460   WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
6461   WRITE_FIELD(this, kGCMetadataOffset, NULL);
6462 }
6463 
6464 
type_feedback_info()6465 Object* Code::type_feedback_info() {
6466   DCHECK(kind() == FUNCTION);
6467   return raw_type_feedback_info();
6468 }
6469 
6470 
set_type_feedback_info(Object * value,WriteBarrierMode mode)6471 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6472   DCHECK(kind() == FUNCTION);
6473   set_raw_type_feedback_info(value, mode);
6474   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6475                             value, mode);
6476 }
6477 
6478 
stub_key()6479 uint32_t Code::stub_key() {
6480   DCHECK(IsCodeStubOrIC());
6481   Smi* smi_key = Smi::cast(raw_type_feedback_info());
6482   return static_cast<uint32_t>(smi_key->value());
6483 }
6484 
6485 
set_stub_key(uint32_t key)6486 void Code::set_stub_key(uint32_t key) {
6487   DCHECK(IsCodeStubOrIC());
6488   set_raw_type_feedback_info(Smi::FromInt(key));
6489 }
6490 
6491 
ACCESSORS(Code,gc_metadata,Object,kGCMetadataOffset)6492 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6493 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6494 
6495 
6496 byte* Code::instruction_start()  {
6497   return FIELD_ADDR(this, kHeaderSize);
6498 }
6499 
6500 
instruction_end()6501 byte* Code::instruction_end()  {
6502   return instruction_start() + instruction_size();
6503 }
6504 
GetUnwindingInfoSizeOffset()6505 int Code::GetUnwindingInfoSizeOffset() const {
6506   DCHECK(has_unwinding_info());
6507   return RoundUp(kHeaderSize + instruction_size(), kInt64Size);
6508 }
6509 
unwinding_info_size()6510 int Code::unwinding_info_size() const {
6511   DCHECK(has_unwinding_info());
6512   return static_cast<int>(
6513       READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
6514 }
6515 
set_unwinding_info_size(int value)6516 void Code::set_unwinding_info_size(int value) {
6517   DCHECK(has_unwinding_info());
6518   WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
6519 }
6520 
unwinding_info_start()6521 byte* Code::unwinding_info_start() {
6522   DCHECK(has_unwinding_info());
6523   return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
6524 }
6525 
unwinding_info_end()6526 byte* Code::unwinding_info_end() {
6527   DCHECK(has_unwinding_info());
6528   return unwinding_info_start() + unwinding_info_size();
6529 }
6530 
body_size()6531 int Code::body_size() {
6532   int unpadded_body_size =
6533       has_unwinding_info()
6534           ? static_cast<int>(unwinding_info_end() - instruction_start())
6535           : instruction_size();
6536   return RoundUp(unpadded_body_size, kObjectAlignment);
6537 }
6538 
SizeIncludingMetadata()6539 int Code::SizeIncludingMetadata() {
6540   int size = CodeSize();
6541   size += relocation_info()->Size();
6542   size += deoptimization_data()->Size();
6543   size += handler_table()->Size();
6544   return size;
6545 }
6546 
unchecked_relocation_info()6547 ByteArray* Code::unchecked_relocation_info() {
6548   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6549 }
6550 
6551 
relocation_start()6552 byte* Code::relocation_start() {
6553   return unchecked_relocation_info()->GetDataStartAddress();
6554 }
6555 
6556 
relocation_size()6557 int Code::relocation_size() {
6558   return unchecked_relocation_info()->length();
6559 }
6560 
6561 
entry()6562 byte* Code::entry() {
6563   return instruction_start();
6564 }
6565 
6566 
contains(byte * inner_pointer)6567 bool Code::contains(byte* inner_pointer) {
6568   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6569 }
6570 
6571 
ExecutableSize()6572 int Code::ExecutableSize() {
6573   // Check that the assumptions about the layout of the code object holds.
6574   DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6575             Code::kHeaderSize);
6576   return instruction_size() + Code::kHeaderSize;
6577 }
6578 
6579 
CodeSize()6580 int Code::CodeSize() { return SizeFor(body_size()); }
6581 
6582 
ACCESSORS(JSArray,length,Object,kLengthOffset)6583 ACCESSORS(JSArray, length, Object, kLengthOffset)
6584 
6585 
6586 void* JSArrayBuffer::backing_store() const {
6587   intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6588   return reinterpret_cast<void*>(ptr);
6589 }
6590 
6591 
set_backing_store(void * value,WriteBarrierMode mode)6592 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6593   intptr_t ptr = reinterpret_cast<intptr_t>(value);
6594   WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6595 }
6596 
6597 
ACCESSORS(JSArrayBuffer,byte_length,Object,kByteLengthOffset)6598 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6599 
6600 
6601 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6602   if (kInt32Size != kPointerSize) {
6603 #if V8_TARGET_LITTLE_ENDIAN
6604     WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6605 #else
6606     WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6607 #endif
6608   }
6609   WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6610 }
6611 
6612 
bit_field()6613 uint32_t JSArrayBuffer::bit_field() const {
6614   return READ_UINT32_FIELD(this, kBitFieldOffset);
6615 }
6616 
6617 
is_external()6618 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6619 
6620 
set_is_external(bool value)6621 void JSArrayBuffer::set_is_external(bool value) {
6622   set_bit_field(IsExternal::update(bit_field(), value));
6623 }
6624 
6625 
is_neuterable()6626 bool JSArrayBuffer::is_neuterable() {
6627   return IsNeuterable::decode(bit_field());
6628 }
6629 
6630 
set_is_neuterable(bool value)6631 void JSArrayBuffer::set_is_neuterable(bool value) {
6632   set_bit_field(IsNeuterable::update(bit_field(), value));
6633 }
6634 
6635 
was_neutered()6636 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6637 
6638 
set_was_neutered(bool value)6639 void JSArrayBuffer::set_was_neutered(bool value) {
6640   set_bit_field(WasNeutered::update(bit_field(), value));
6641 }
6642 
6643 
is_shared()6644 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6645 
6646 
set_is_shared(bool value)6647 void JSArrayBuffer::set_is_shared(bool value) {
6648   set_bit_field(IsShared::update(bit_field(), value));
6649 }
6650 
6651 
byte_offset()6652 Object* JSArrayBufferView::byte_offset() const {
6653   if (WasNeutered()) return Smi::FromInt(0);
6654   return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6655 }
6656 
6657 
set_byte_offset(Object * value,WriteBarrierMode mode)6658 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6659   WRITE_FIELD(this, kByteOffsetOffset, value);
6660   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6661 }
6662 
6663 
byte_length()6664 Object* JSArrayBufferView::byte_length() const {
6665   if (WasNeutered()) return Smi::FromInt(0);
6666   return Object::cast(READ_FIELD(this, kByteLengthOffset));
6667 }
6668 
6669 
set_byte_length(Object * value,WriteBarrierMode mode)6670 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6671   WRITE_FIELD(this, kByteLengthOffset, value);
6672   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6673 }
6674 
6675 
ACCESSORS(JSArrayBufferView,buffer,Object,kBufferOffset)6676 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6677 #ifdef VERIFY_HEAP
6678 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6679 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6680 #endif
6681 
6682 
6683 bool JSArrayBufferView::WasNeutered() const {
6684   return JSArrayBuffer::cast(buffer())->was_neutered();
6685 }
6686 
6687 
length()6688 Object* JSTypedArray::length() const {
6689   if (WasNeutered()) return Smi::FromInt(0);
6690   return Object::cast(READ_FIELD(this, kLengthOffset));
6691 }
6692 
6693 
length_value()6694 uint32_t JSTypedArray::length_value() const {
6695   if (WasNeutered()) return 0;
6696   uint32_t index = 0;
6697   CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6698   return index;
6699 }
6700 
6701 
set_length(Object * value,WriteBarrierMode mode)6702 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6703   WRITE_FIELD(this, kLengthOffset, value);
6704   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6705 }
6706 
6707 
6708 #ifdef VERIFY_HEAP
ACCESSORS(JSTypedArray,raw_length,Object,kLengthOffset)6709 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6710 #endif
6711 
6712 
6713 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6714 ACCESSORS(JSRegExp, flags, Object, kFlagsOffset)
6715 ACCESSORS(JSRegExp, source, Object, kSourceOffset)
6716 
6717 
6718 JSRegExp::Type JSRegExp::TypeTag() {
6719   Object* data = this->data();
6720   if (data->IsUndefined(GetIsolate())) return JSRegExp::NOT_COMPILED;
6721   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6722   return static_cast<JSRegExp::Type>(smi->value());
6723 }
6724 
6725 
CaptureCount()6726 int JSRegExp::CaptureCount() {
6727   switch (TypeTag()) {
6728     case ATOM:
6729       return 0;
6730     case IRREGEXP:
6731       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6732     default:
6733       UNREACHABLE();
6734       return -1;
6735   }
6736 }
6737 
6738 
GetFlags()6739 JSRegExp::Flags JSRegExp::GetFlags() {
6740   DCHECK(this->data()->IsFixedArray());
6741   Object* data = this->data();
6742   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6743   return Flags(smi->value());
6744 }
6745 
6746 
Pattern()6747 String* JSRegExp::Pattern() {
6748   DCHECK(this->data()->IsFixedArray());
6749   Object* data = this->data();
6750   String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6751   return pattern;
6752 }
6753 
6754 
DataAt(int index)6755 Object* JSRegExp::DataAt(int index) {
6756   DCHECK(TypeTag() != NOT_COMPILED);
6757   return FixedArray::cast(data())->get(index);
6758 }
6759 
6760 
SetDataAt(int index,Object * value)6761 void JSRegExp::SetDataAt(int index, Object* value) {
6762   DCHECK(TypeTag() != NOT_COMPILED);
6763   DCHECK(index >= kDataIndex);  // Only implementation data can be set this way.
6764   FixedArray::cast(data())->set(index, value);
6765 }
6766 
6767 
GetElementsKind()6768 ElementsKind JSObject::GetElementsKind() {
6769   ElementsKind kind = map()->elements_kind();
6770 #if VERIFY_HEAP && DEBUG
6771   FixedArrayBase* fixed_array =
6772       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6773 
6774   // If a GC was caused while constructing this object, the elements
6775   // pointer may point to a one pointer filler map.
6776   if (ElementsAreSafeToExamine()) {
6777     Map* map = fixed_array->map();
6778     if (IsFastSmiOrObjectElementsKind(kind)) {
6779       DCHECK(map == GetHeap()->fixed_array_map() ||
6780              map == GetHeap()->fixed_cow_array_map());
6781     } else if (IsFastDoubleElementsKind(kind)) {
6782       DCHECK(fixed_array->IsFixedDoubleArray() ||
6783              fixed_array == GetHeap()->empty_fixed_array());
6784     } else if (kind == DICTIONARY_ELEMENTS) {
6785       DCHECK(fixed_array->IsFixedArray());
6786       DCHECK(fixed_array->IsDictionary());
6787     } else {
6788       DCHECK(kind > DICTIONARY_ELEMENTS);
6789     }
6790     DCHECK(!IsSloppyArgumentsElements(kind) ||
6791            (elements()->IsFixedArray() && elements()->length() >= 2));
6792   }
6793 #endif
6794   return kind;
6795 }
6796 
6797 
HasFastObjectElements()6798 bool JSObject::HasFastObjectElements() {
6799   return IsFastObjectElementsKind(GetElementsKind());
6800 }
6801 
6802 
HasFastSmiElements()6803 bool JSObject::HasFastSmiElements() {
6804   return IsFastSmiElementsKind(GetElementsKind());
6805 }
6806 
6807 
HasFastSmiOrObjectElements()6808 bool JSObject::HasFastSmiOrObjectElements() {
6809   return IsFastSmiOrObjectElementsKind(GetElementsKind());
6810 }
6811 
6812 
HasFastDoubleElements()6813 bool JSObject::HasFastDoubleElements() {
6814   return IsFastDoubleElementsKind(GetElementsKind());
6815 }
6816 
6817 
HasFastHoleyElements()6818 bool JSObject::HasFastHoleyElements() {
6819   return IsFastHoleyElementsKind(GetElementsKind());
6820 }
6821 
6822 
HasFastElements()6823 bool JSObject::HasFastElements() {
6824   return IsFastElementsKind(GetElementsKind());
6825 }
6826 
6827 
HasDictionaryElements()6828 bool JSObject::HasDictionaryElements() {
6829   return GetElementsKind() == DICTIONARY_ELEMENTS;
6830 }
6831 
6832 
HasFastArgumentsElements()6833 bool JSObject::HasFastArgumentsElements() {
6834   return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6835 }
6836 
6837 
HasSlowArgumentsElements()6838 bool JSObject::HasSlowArgumentsElements() {
6839   return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6840 }
6841 
6842 
HasSloppyArgumentsElements()6843 bool JSObject::HasSloppyArgumentsElements() {
6844   return IsSloppyArgumentsElements(GetElementsKind());
6845 }
6846 
HasStringWrapperElements()6847 bool JSObject::HasStringWrapperElements() {
6848   return IsStringWrapperElementsKind(GetElementsKind());
6849 }
6850 
HasFastStringWrapperElements()6851 bool JSObject::HasFastStringWrapperElements() {
6852   return GetElementsKind() == FAST_STRING_WRAPPER_ELEMENTS;
6853 }
6854 
HasSlowStringWrapperElements()6855 bool JSObject::HasSlowStringWrapperElements() {
6856   return GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS;
6857 }
6858 
HasFixedTypedArrayElements()6859 bool JSObject::HasFixedTypedArrayElements() {
6860   DCHECK_NOT_NULL(elements());
6861   return map()->has_fixed_typed_array_elements();
6862 }
6863 
6864 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)      \
6865   bool JSObject::HasFixed##Type##Elements() {                          \
6866     HeapObject* array = elements();                                    \
6867     DCHECK(array != NULL);                                             \
6868     if (!array->IsHeapObject()) return false;                          \
6869     return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6870   }
6871 
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)6872 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6873 
6874 #undef FIXED_TYPED_ELEMENTS_CHECK
6875 
6876 
6877 bool JSObject::HasNamedInterceptor() {
6878   return map()->has_named_interceptor();
6879 }
6880 
6881 
HasIndexedInterceptor()6882 bool JSObject::HasIndexedInterceptor() {
6883   return map()->has_indexed_interceptor();
6884 }
6885 
6886 
global_dictionary()6887 GlobalDictionary* JSObject::global_dictionary() {
6888   DCHECK(!HasFastProperties());
6889   DCHECK(IsJSGlobalObject());
6890   return GlobalDictionary::cast(properties());
6891 }
6892 
6893 
element_dictionary()6894 SeededNumberDictionary* JSObject::element_dictionary() {
6895   DCHECK(HasDictionaryElements() || HasSlowStringWrapperElements());
6896   return SeededNumberDictionary::cast(elements());
6897 }
6898 
6899 
IsHashFieldComputed(uint32_t field)6900 bool Name::IsHashFieldComputed(uint32_t field) {
6901   return (field & kHashNotComputedMask) == 0;
6902 }
6903 
6904 
HasHashCode()6905 bool Name::HasHashCode() {
6906   return IsHashFieldComputed(hash_field());
6907 }
6908 
6909 
Hash()6910 uint32_t Name::Hash() {
6911   // Fast case: has hash code already been computed?
6912   uint32_t field = hash_field();
6913   if (IsHashFieldComputed(field)) return field >> kHashShift;
6914   // Slow case: compute hash code and set it. Has to be a string.
6915   return String::cast(this)->ComputeAndSetHash();
6916 }
6917 
6918 
IsPrivate()6919 bool Name::IsPrivate() {
6920   return this->IsSymbol() && Symbol::cast(this)->is_private();
6921 }
6922 
6923 
StringHasher(int length,uint32_t seed)6924 StringHasher::StringHasher(int length, uint32_t seed)
6925   : length_(length),
6926     raw_running_hash_(seed),
6927     array_index_(0),
6928     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6929     is_first_char_(true) {
6930   DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6931 }
6932 
6933 
has_trivial_hash()6934 bool StringHasher::has_trivial_hash() {
6935   return length_ > String::kMaxHashCalcLength;
6936 }
6937 
6938 
AddCharacterCore(uint32_t running_hash,uint16_t c)6939 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6940   running_hash += c;
6941   running_hash += (running_hash << 10);
6942   running_hash ^= (running_hash >> 6);
6943   return running_hash;
6944 }
6945 
6946 
GetHashCore(uint32_t running_hash)6947 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6948   running_hash += (running_hash << 3);
6949   running_hash ^= (running_hash >> 11);
6950   running_hash += (running_hash << 15);
6951   if ((running_hash & String::kHashBitMask) == 0) {
6952     return kZeroHash;
6953   }
6954   return running_hash;
6955 }
6956 
6957 
ComputeRunningHash(uint32_t running_hash,const uc16 * chars,int length)6958 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6959                                           const uc16* chars, int length) {
6960   DCHECK_NOT_NULL(chars);
6961   DCHECK(length >= 0);
6962   for (int i = 0; i < length; ++i) {
6963     running_hash = AddCharacterCore(running_hash, *chars++);
6964   }
6965   return running_hash;
6966 }
6967 
6968 
ComputeRunningHashOneByte(uint32_t running_hash,const char * chars,int length)6969 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6970                                                  const char* chars,
6971                                                  int length) {
6972   DCHECK_NOT_NULL(chars);
6973   DCHECK(length >= 0);
6974   for (int i = 0; i < length; ++i) {
6975     uint16_t c = static_cast<uint16_t>(*chars++);
6976     running_hash = AddCharacterCore(running_hash, c);
6977   }
6978   return running_hash;
6979 }
6980 
6981 
AddCharacter(uint16_t c)6982 void StringHasher::AddCharacter(uint16_t c) {
6983   // Use the Jenkins one-at-a-time hash function to update the hash
6984   // for the given character.
6985   raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6986 }
6987 
6988 
UpdateIndex(uint16_t c)6989 bool StringHasher::UpdateIndex(uint16_t c) {
6990   DCHECK(is_array_index_);
6991   if (c < '0' || c > '9') {
6992     is_array_index_ = false;
6993     return false;
6994   }
6995   int d = c - '0';
6996   if (is_first_char_) {
6997     is_first_char_ = false;
6998     if (c == '0' && length_ > 1) {
6999       is_array_index_ = false;
7000       return false;
7001     }
7002   }
7003   if (array_index_ > 429496729U - ((d + 3) >> 3)) {
7004     is_array_index_ = false;
7005     return false;
7006   }
7007   array_index_ = array_index_ * 10 + d;
7008   return true;
7009 }
7010 
7011 
7012 template<typename Char>
AddCharacters(const Char * chars,int length)7013 inline void StringHasher::AddCharacters(const Char* chars, int length) {
7014   DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
7015   int i = 0;
7016   if (is_array_index_) {
7017     for (; i < length; i++) {
7018       AddCharacter(chars[i]);
7019       if (!UpdateIndex(chars[i])) {
7020         i++;
7021         break;
7022       }
7023     }
7024   }
7025   for (; i < length; i++) {
7026     DCHECK(!is_array_index_);
7027     AddCharacter(chars[i]);
7028   }
7029 }
7030 
7031 
7032 template <typename schar>
HashSequentialString(const schar * chars,int length,uint32_t seed)7033 uint32_t StringHasher::HashSequentialString(const schar* chars,
7034                                             int length,
7035                                             uint32_t seed) {
7036   StringHasher hasher(length, seed);
7037   if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
7038   return hasher.GetHashField();
7039 }
7040 
7041 
IteratingStringHasher(int len,uint32_t seed)7042 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
7043     : StringHasher(len, seed) {}
7044 
7045 
Hash(String * string,uint32_t seed)7046 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
7047   IteratingStringHasher hasher(string->length(), seed);
7048   // Nothing to do.
7049   if (hasher.has_trivial_hash()) return hasher.GetHashField();
7050   ConsString* cons_string = String::VisitFlat(&hasher, string);
7051   if (cons_string == nullptr) return hasher.GetHashField();
7052   hasher.VisitConsString(cons_string);
7053   return hasher.GetHashField();
7054 }
7055 
7056 
VisitOneByteString(const uint8_t * chars,int length)7057 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
7058                                                int length) {
7059   AddCharacters(chars, length);
7060 }
7061 
7062 
VisitTwoByteString(const uint16_t * chars,int length)7063 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
7064                                                int length) {
7065   AddCharacters(chars, length);
7066 }
7067 
7068 
AsArrayIndex(uint32_t * index)7069 bool Name::AsArrayIndex(uint32_t* index) {
7070   return IsString() && String::cast(this)->AsArrayIndex(index);
7071 }
7072 
7073 
AsArrayIndex(uint32_t * index)7074 bool String::AsArrayIndex(uint32_t* index) {
7075   uint32_t field = hash_field();
7076   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
7077     return false;
7078   }
7079   return SlowAsArrayIndex(index);
7080 }
7081 
7082 
SetForwardedInternalizedString(String * canonical)7083 void String::SetForwardedInternalizedString(String* canonical) {
7084   DCHECK(IsInternalizedString());
7085   DCHECK(HasHashCode());
7086   if (canonical == this) return;  // No need to forward.
7087   DCHECK(SlowEquals(canonical));
7088   DCHECK(canonical->IsInternalizedString());
7089   DCHECK(canonical->HasHashCode());
7090   WRITE_FIELD(this, kHashFieldSlot, canonical);
7091   // Setting the hash field to a tagged value sets the LSB, causing the hash
7092   // code to be interpreted as uninitialized.  We use this fact to recognize
7093   // that we have a forwarded string.
7094   DCHECK(!HasHashCode());
7095 }
7096 
7097 
GetForwardedInternalizedString()7098 String* String::GetForwardedInternalizedString() {
7099   DCHECK(IsInternalizedString());
7100   if (HasHashCode()) return this;
7101   String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7102   DCHECK(canonical->IsInternalizedString());
7103   DCHECK(SlowEquals(canonical));
7104   DCHECK(canonical->HasHashCode());
7105   return canonical;
7106 }
7107 
7108 
7109 // static
GreaterThan(Handle<Object> x,Handle<Object> y)7110 Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y) {
7111   Maybe<ComparisonResult> result = Compare(x, y);
7112   if (result.IsJust()) {
7113     switch (result.FromJust()) {
7114       case ComparisonResult::kGreaterThan:
7115         return Just(true);
7116       case ComparisonResult::kLessThan:
7117       case ComparisonResult::kEqual:
7118       case ComparisonResult::kUndefined:
7119         return Just(false);
7120     }
7121   }
7122   return Nothing<bool>();
7123 }
7124 
7125 
7126 // static
GreaterThanOrEqual(Handle<Object> x,Handle<Object> y)7127 Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y) {
7128   Maybe<ComparisonResult> result = Compare(x, y);
7129   if (result.IsJust()) {
7130     switch (result.FromJust()) {
7131       case ComparisonResult::kEqual:
7132       case ComparisonResult::kGreaterThan:
7133         return Just(true);
7134       case ComparisonResult::kLessThan:
7135       case ComparisonResult::kUndefined:
7136         return Just(false);
7137     }
7138   }
7139   return Nothing<bool>();
7140 }
7141 
7142 
7143 // static
LessThan(Handle<Object> x,Handle<Object> y)7144 Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y) {
7145   Maybe<ComparisonResult> result = Compare(x, y);
7146   if (result.IsJust()) {
7147     switch (result.FromJust()) {
7148       case ComparisonResult::kLessThan:
7149         return Just(true);
7150       case ComparisonResult::kEqual:
7151       case ComparisonResult::kGreaterThan:
7152       case ComparisonResult::kUndefined:
7153         return Just(false);
7154     }
7155   }
7156   return Nothing<bool>();
7157 }
7158 
7159 
7160 // static
LessThanOrEqual(Handle<Object> x,Handle<Object> y)7161 Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y) {
7162   Maybe<ComparisonResult> result = Compare(x, y);
7163   if (result.IsJust()) {
7164     switch (result.FromJust()) {
7165       case ComparisonResult::kEqual:
7166       case ComparisonResult::kLessThan:
7167         return Just(true);
7168       case ComparisonResult::kGreaterThan:
7169       case ComparisonResult::kUndefined:
7170         return Just(false);
7171     }
7172   }
7173   return Nothing<bool>();
7174 }
7175 
GetPropertyOrElement(Handle<Object> object,Handle<Name> name)7176 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7177                                                  Handle<Name> name) {
7178   LookupIterator it =
7179       LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7180   return GetProperty(&it);
7181 }
7182 
SetPropertyOrElement(Handle<Object> object,Handle<Name> name,Handle<Object> value,LanguageMode language_mode,StoreFromKeyed store_mode)7183 MaybeHandle<Object> Object::SetPropertyOrElement(Handle<Object> object,
7184                                                  Handle<Name> name,
7185                                                  Handle<Object> value,
7186                                                  LanguageMode language_mode,
7187                                                  StoreFromKeyed store_mode) {
7188   LookupIterator it =
7189       LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7190   MAYBE_RETURN_NULL(SetProperty(&it, value, language_mode, store_mode));
7191   return value;
7192 }
7193 
GetPropertyOrElement(Handle<Object> receiver,Handle<Name> name,Handle<JSReceiver> holder)7194 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
7195                                                  Handle<Name> name,
7196                                                  Handle<JSReceiver> holder) {
7197   LookupIterator it = LookupIterator::PropertyOrElement(
7198       name->GetIsolate(), receiver, name, holder);
7199   return GetProperty(&it);
7200 }
7201 
7202 
initialize_properties()7203 void JSReceiver::initialize_properties() {
7204   DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
7205   DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_properties_dictionary()));
7206   if (map()->is_dictionary_map()) {
7207     WRITE_FIELD(this, kPropertiesOffset,
7208                 GetHeap()->empty_properties_dictionary());
7209   } else {
7210     WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
7211   }
7212 }
7213 
7214 
HasFastProperties()7215 bool JSReceiver::HasFastProperties() {
7216   DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
7217   return !properties()->IsDictionary();
7218 }
7219 
7220 
property_dictionary()7221 NameDictionary* JSReceiver::property_dictionary() {
7222   DCHECK(!HasFastProperties());
7223   DCHECK(!IsJSGlobalObject());
7224   return NameDictionary::cast(properties());
7225 }
7226 
HasProperty(Handle<JSReceiver> object,Handle<Name> name)7227 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7228                                     Handle<Name> name) {
7229   LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(),
7230                                                         object, name, object);
7231   return HasProperty(&it);
7232 }
7233 
7234 
HasOwnProperty(Handle<JSReceiver> object,Handle<Name> name)7235 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7236                                        Handle<Name> name) {
7237   if (object->IsJSObject()) {  // Shortcut
7238     LookupIterator it = LookupIterator::PropertyOrElement(
7239         object->GetIsolate(), object, name, object, LookupIterator::OWN);
7240     return HasProperty(&it);
7241   }
7242 
7243   Maybe<PropertyAttributes> attributes =
7244       JSReceiver::GetOwnPropertyAttributes(object, name);
7245   MAYBE_RETURN(attributes, Nothing<bool>());
7246   return Just(attributes.FromJust() != ABSENT);
7247 }
7248 
HasOwnProperty(Handle<JSReceiver> object,uint32_t index)7249 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7250                                        uint32_t index) {
7251   if (object->IsJSObject()) {  // Shortcut
7252     LookupIterator it(object->GetIsolate(), object, index, object,
7253                       LookupIterator::OWN);
7254     return HasProperty(&it);
7255   }
7256 
7257   Maybe<PropertyAttributes> attributes =
7258       JSReceiver::GetOwnPropertyAttributes(object, index);
7259   MAYBE_RETURN(attributes, Nothing<bool>());
7260   return Just(attributes.FromJust() != ABSENT);
7261 }
7262 
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7263 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7264     Handle<JSReceiver> object, Handle<Name> name) {
7265   LookupIterator it = LookupIterator::PropertyOrElement(name->GetIsolate(),
7266                                                         object, name, object);
7267   return GetPropertyAttributes(&it);
7268 }
7269 
7270 
GetOwnPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7271 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7272     Handle<JSReceiver> object, Handle<Name> name) {
7273   LookupIterator it = LookupIterator::PropertyOrElement(
7274       name->GetIsolate(), object, name, object, LookupIterator::OWN);
7275   return GetPropertyAttributes(&it);
7276 }
7277 
GetOwnPropertyAttributes(Handle<JSReceiver> object,uint32_t index)7278 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7279     Handle<JSReceiver> object, uint32_t index) {
7280   LookupIterator it(object->GetIsolate(), object, index, object,
7281                     LookupIterator::OWN);
7282   return GetPropertyAttributes(&it);
7283 }
7284 
HasElement(Handle<JSReceiver> object,uint32_t index)7285 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7286   LookupIterator it(object->GetIsolate(), object, index, object);
7287   return HasProperty(&it);
7288 }
7289 
7290 
GetElementAttributes(Handle<JSReceiver> object,uint32_t index)7291 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7292     Handle<JSReceiver> object, uint32_t index) {
7293   Isolate* isolate = object->GetIsolate();
7294   LookupIterator it(isolate, object, index, object);
7295   return GetPropertyAttributes(&it);
7296 }
7297 
7298 
GetOwnElementAttributes(Handle<JSReceiver> object,uint32_t index)7299 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7300     Handle<JSReceiver> object, uint32_t index) {
7301   Isolate* isolate = object->GetIsolate();
7302   LookupIterator it(isolate, object, index, object, LookupIterator::OWN);
7303   return GetPropertyAttributes(&it);
7304 }
7305 
7306 
IsDetached()7307 bool JSGlobalObject::IsDetached() {
7308   return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7309 }
7310 
7311 
IsDetachedFrom(JSGlobalObject * global)7312 bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
7313   const PrototypeIterator iter(this->GetIsolate(),
7314                                const_cast<JSGlobalProxy*>(this));
7315   return iter.GetCurrent() != global;
7316 }
7317 
GetOrCreateIdentityHash(Isolate * isolate,Handle<JSReceiver> object)7318 Smi* JSReceiver::GetOrCreateIdentityHash(Isolate* isolate,
7319                                          Handle<JSReceiver> object) {
7320   return object->IsJSProxy() ? JSProxy::GetOrCreateIdentityHash(
7321                                    isolate, Handle<JSProxy>::cast(object))
7322                              : JSObject::GetOrCreateIdentityHash(
7323                                    isolate, Handle<JSObject>::cast(object));
7324 }
7325 
GetIdentityHash(Isolate * isolate,Handle<JSReceiver> receiver)7326 Object* JSReceiver::GetIdentityHash(Isolate* isolate,
7327                                     Handle<JSReceiver> receiver) {
7328   return receiver->IsJSProxy()
7329              ? JSProxy::GetIdentityHash(Handle<JSProxy>::cast(receiver))
7330              : JSObject::GetIdentityHash(isolate,
7331                                          Handle<JSObject>::cast(receiver));
7332 }
7333 
7334 
all_can_read()7335 bool AccessorInfo::all_can_read() {
7336   return BooleanBit::get(flag(), kAllCanReadBit);
7337 }
7338 
7339 
set_all_can_read(bool value)7340 void AccessorInfo::set_all_can_read(bool value) {
7341   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7342 }
7343 
7344 
all_can_write()7345 bool AccessorInfo::all_can_write() {
7346   return BooleanBit::get(flag(), kAllCanWriteBit);
7347 }
7348 
7349 
set_all_can_write(bool value)7350 void AccessorInfo::set_all_can_write(bool value) {
7351   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7352 }
7353 
7354 
is_special_data_property()7355 bool AccessorInfo::is_special_data_property() {
7356   return BooleanBit::get(flag(), kSpecialDataProperty);
7357 }
7358 
7359 
set_is_special_data_property(bool value)7360 void AccessorInfo::set_is_special_data_property(bool value) {
7361   set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7362 }
7363 
is_sloppy()7364 bool AccessorInfo::is_sloppy() { return BooleanBit::get(flag(), kIsSloppy); }
7365 
set_is_sloppy(bool value)7366 void AccessorInfo::set_is_sloppy(bool value) {
7367   set_flag(BooleanBit::set(flag(), kIsSloppy, value));
7368 }
7369 
property_attributes()7370 PropertyAttributes AccessorInfo::property_attributes() {
7371   return AttributesField::decode(static_cast<uint32_t>(flag()));
7372 }
7373 
7374 
set_property_attributes(PropertyAttributes attributes)7375 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7376   set_flag(AttributesField::update(flag(), attributes));
7377 }
7378 
IsTemplateFor(JSObject * object)7379 bool FunctionTemplateInfo::IsTemplateFor(JSObject* object) {
7380   return IsTemplateFor(object->map());
7381 }
7382 
IsCompatibleReceiver(Object * receiver)7383 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7384   if (!HasExpectedReceiverType()) return true;
7385   if (!receiver->IsJSObject()) return false;
7386   return FunctionTemplateInfo::cast(expected_receiver_type())
7387       ->IsTemplateFor(JSObject::cast(receiver)->map());
7388 }
7389 
7390 
HasExpectedReceiverType()7391 bool AccessorInfo::HasExpectedReceiverType() {
7392   return expected_receiver_type()->IsFunctionTemplateInfo();
7393 }
7394 
7395 
get(AccessorComponent component)7396 Object* AccessorPair::get(AccessorComponent component) {
7397   return component == ACCESSOR_GETTER ? getter() : setter();
7398 }
7399 
7400 
set(AccessorComponent component,Object * value)7401 void AccessorPair::set(AccessorComponent component, Object* value) {
7402   if (component == ACCESSOR_GETTER) {
7403     set_getter(value);
7404   } else {
7405     set_setter(value);
7406   }
7407 }
7408 
7409 
SetComponents(Object * getter,Object * setter)7410 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7411   Isolate* isolate = GetIsolate();
7412   if (!getter->IsNull(isolate)) set_getter(getter);
7413   if (!setter->IsNull(isolate)) set_setter(setter);
7414 }
7415 
7416 
Equals(AccessorPair * pair)7417 bool AccessorPair::Equals(AccessorPair* pair) {
7418   return (this == pair) || pair->Equals(getter(), setter());
7419 }
7420 
7421 
Equals(Object * getter_value,Object * setter_value)7422 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7423   return (getter() == getter_value) && (setter() == setter_value);
7424 }
7425 
7426 
ContainsAccessor()7427 bool AccessorPair::ContainsAccessor() {
7428   return IsJSAccessor(getter()) || IsJSAccessor(setter());
7429 }
7430 
7431 
IsJSAccessor(Object * obj)7432 bool AccessorPair::IsJSAccessor(Object* obj) {
7433   return obj->IsCallable() || obj->IsUndefined(GetIsolate());
7434 }
7435 
7436 
7437 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value)7438 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7439                                                Handle<Object> key,
7440                                                Handle<Object> value) {
7441   this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7442 }
7443 
7444 
7445 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7446 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7447                                                Handle<Object> key,
7448                                                Handle<Object> value,
7449                                                PropertyDetails details) {
7450   Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7451 }
7452 
7453 
7454 template <typename Key>
7455 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7456 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7457                                         Handle<Object> key,
7458                                         Handle<Object> value,
7459                                         PropertyDetails details) {
7460   STATIC_ASSERT(Dictionary::kEntrySize == 3);
7461   DCHECK(!key->IsName() || details.dictionary_index() > 0);
7462   int index = dict->EntryToIndex(entry);
7463   DisallowHeapAllocation no_gc;
7464   WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7465   dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7466   dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7467   dict->set(index + Dictionary::kEntryDetailsIndex, details.AsSmi());
7468 }
7469 
7470 
7471 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7472 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7473                                      Handle<Object> key, Handle<Object> value,
7474                                      PropertyDetails details) {
7475   STATIC_ASSERT(Dictionary::kEntrySize == 2);
7476   DCHECK(!key->IsName() || details.dictionary_index() > 0);
7477   DCHECK(value->IsPropertyCell());
7478   int index = dict->EntryToIndex(entry);
7479   DisallowHeapAllocation no_gc;
7480   WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7481   dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7482   dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7483   PropertyCell::cast(*value)->set_property_details(details);
7484 }
7485 
7486 
IsMatch(uint32_t key,Object * other)7487 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7488   DCHECK(other->IsNumber());
7489   return key == static_cast<uint32_t>(other->Number());
7490 }
7491 
7492 
Hash(uint32_t key)7493 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7494   return ComputeIntegerHash(key, 0);
7495 }
7496 
7497 
HashForObject(uint32_t key,Object * other)7498 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7499                                                       Object* other) {
7500   DCHECK(other->IsNumber());
7501   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7502 }
7503 
7504 
SeededHash(uint32_t key,uint32_t seed)7505 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7506   return ComputeIntegerHash(key, seed);
7507 }
7508 
7509 
SeededHashForObject(uint32_t key,uint32_t seed,Object * other)7510 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7511                                                           uint32_t seed,
7512                                                           Object* other) {
7513   DCHECK(other->IsNumber());
7514   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7515 }
7516 
7517 
AsHandle(Isolate * isolate,uint32_t key)7518 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7519   return isolate->factory()->NewNumberFromUint(key);
7520 }
7521 
7522 
IsMatch(Handle<Name> key,Object * other)7523 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7524   // We know that all entries in a hash table had their hash keys created.
7525   // Use that knowledge to have fast failure.
7526   if (key->Hash() != Name::cast(other)->Hash()) return false;
7527   return key->Equals(Name::cast(other));
7528 }
7529 
7530 
Hash(Handle<Name> key)7531 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7532   return key->Hash();
7533 }
7534 
7535 
HashForObject(Handle<Name> key,Object * other)7536 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7537   return Name::cast(other)->Hash();
7538 }
7539 
7540 
AsHandle(Isolate * isolate,Handle<Name> key)7541 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7542                                              Handle<Name> key) {
7543   DCHECK(key->IsUniqueName());
7544   return key;
7545 }
7546 
7547 
DoGenerateNewEnumerationIndices(Handle<NameDictionary> dictionary)7548 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7549     Handle<NameDictionary> dictionary) {
7550   return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7551 }
7552 
7553 
7554 template <typename Dictionary>
DetailsAt(Dictionary * dict,int entry)7555 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7556   DCHECK(entry >= 0);  // Not found is -1, which is not caught by get().
7557   Object* raw_value = dict->ValueAt(entry);
7558   DCHECK(raw_value->IsPropertyCell());
7559   PropertyCell* cell = PropertyCell::cast(raw_value);
7560   return cell->property_details();
7561 }
7562 
7563 
7564 template <typename Dictionary>
DetailsAtPut(Dictionary * dict,int entry,PropertyDetails value)7565 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7566                                          PropertyDetails value) {
7567   DCHECK(entry >= 0);  // Not found is -1, which is not caught by get().
7568   Object* raw_value = dict->ValueAt(entry);
7569   DCHECK(raw_value->IsPropertyCell());
7570   PropertyCell* cell = PropertyCell::cast(raw_value);
7571   cell->set_property_details(value);
7572 }
7573 
7574 
7575 template <typename Dictionary>
IsDeleted(Dictionary * dict,int entry)7576 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7577   DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7578   Isolate* isolate = dict->GetIsolate();
7579   return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole(isolate);
7580 }
7581 
7582 
IsMatch(Handle<Object> key,Object * other)7583 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7584   return key->SameValue(other);
7585 }
7586 
7587 
Hash(Handle<Object> key)7588 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7589   return Smi::cast(key->GetHash())->value();
7590 }
7591 
7592 
HashForObject(Handle<Object> key,Object * other)7593 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7594                                              Object* other) {
7595   return Smi::cast(other->GetHash())->value();
7596 }
7597 
7598 
AsHandle(Isolate * isolate,Handle<Object> key)7599 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7600                                               Handle<Object> key) {
7601   return key;
7602 }
7603 
7604 
Shrink(Handle<ObjectHashTable> table,Handle<Object> key)7605 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7606     Handle<ObjectHashTable> table, Handle<Object> key) {
7607   return DerivedHashTable::Shrink(table, key);
7608 }
7609 
7610 
ValueAt(int entry)7611 Object* OrderedHashMap::ValueAt(int entry) {
7612   return get(EntryToIndex(entry) + kValueOffset);
7613 }
7614 
7615 
7616 template <int entrysize>
IsMatch(Handle<Object> key,Object * other)7617 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7618   if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7619   return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7620                            : *key == other;
7621 }
7622 
7623 
7624 template <int entrysize>
Hash(Handle<Object> key)7625 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7626   intptr_t hash =
7627       key->IsWeakCell()
7628           ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7629           : reinterpret_cast<intptr_t>(*key);
7630   return (uint32_t)(hash & 0xFFFFFFFF);
7631 }
7632 
7633 
7634 template <int entrysize>
HashForObject(Handle<Object> key,Object * other)7635 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7636                                                       Object* other) {
7637   if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7638   intptr_t hash = reinterpret_cast<intptr_t>(other);
7639   return (uint32_t)(hash & 0xFFFFFFFF);
7640 }
7641 
7642 
7643 template <int entrysize>
AsHandle(Isolate * isolate,Handle<Object> key)7644 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7645                                                        Handle<Object> key) {
7646   return key;
7647 }
7648 
7649 
IsAsmModule()7650 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7651 
7652 
IsAsmFunction()7653 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7654 
7655 
HasSimpleParameters()7656 bool ScopeInfo::HasSimpleParameters() {
7657   return HasSimpleParametersField::decode(Flags());
7658 }
7659 
7660 
7661 #define SCOPE_INFO_FIELD_ACCESSORS(name)                                      \
7662   void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7663   int ScopeInfo::name() {                                                     \
7664     if (length() > 0) {                                                       \
7665       return Smi::cast(get(k##name))->value();                                \
7666     } else {                                                                  \
7667       return 0;                                                               \
7668     }                                                                         \
7669   }
FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)7670 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7671 #undef SCOPE_INFO_FIELD_ACCESSORS
7672 
7673 
7674 void Map::ClearCodeCache(Heap* heap) {
7675   // No write barrier is needed since empty_fixed_array is not in new space.
7676   // Please note this function is used during marking:
7677   //  - MarkCompactCollector::MarkUnmarkedObject
7678   //  - IncrementalMarking::Step
7679   WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7680 }
7681 
7682 
SlackForArraySize(int old_size,int size_limit)7683 int Map::SlackForArraySize(int old_size, int size_limit) {
7684   const int max_slack = size_limit - old_size;
7685   CHECK_LE(0, max_slack);
7686   if (old_size < 4) {
7687     DCHECK_LE(1, max_slack);
7688     return 1;
7689   }
7690   return Min(max_slack, old_size / 4);
7691 }
7692 
7693 
set_length(Smi * length)7694 void JSArray::set_length(Smi* length) {
7695   // Don't need a write barrier for a Smi.
7696   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7697 }
7698 
7699 
SetLengthWouldNormalize(Heap * heap,uint32_t new_length)7700 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7701   // If the new array won't fit in a some non-trivial fraction of the max old
7702   // space size, then force it to go dictionary mode.
7703   uint32_t max_fast_array_size =
7704       static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7705   return new_length >= max_fast_array_size;
7706 }
7707 
7708 
AllowsSetLength()7709 bool JSArray::AllowsSetLength() {
7710   bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7711   DCHECK(result == !HasFixedTypedArrayElements());
7712   return result;
7713 }
7714 
7715 
SetContent(Handle<JSArray> array,Handle<FixedArrayBase> storage)7716 void JSArray::SetContent(Handle<JSArray> array,
7717                          Handle<FixedArrayBase> storage) {
7718   EnsureCanContainElements(array, storage, storage->length(),
7719                            ALLOW_COPIED_DOUBLE_ELEMENTS);
7720 
7721   DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7722           IsFastDoubleElementsKind(array->GetElementsKind())) ||
7723          ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7724           (IsFastObjectElementsKind(array->GetElementsKind()) ||
7725            (IsFastSmiElementsKind(array->GetElementsKind()) &&
7726             Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7727   array->set_elements(*storage);
7728   array->set_length(Smi::FromInt(storage->length()));
7729 }
7730 
7731 
HasArrayPrototype(Isolate * isolate)7732 bool JSArray::HasArrayPrototype(Isolate* isolate) {
7733   return map()->prototype() == *isolate->initial_array_prototype();
7734 }
7735 
7736 
ic_total_count()7737 int TypeFeedbackInfo::ic_total_count() {
7738   int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7739   return ICTotalCountField::decode(current);
7740 }
7741 
7742 
set_ic_total_count(int count)7743 void TypeFeedbackInfo::set_ic_total_count(int count) {
7744   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7745   value = ICTotalCountField::update(value,
7746                                     ICTotalCountField::decode(count));
7747   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7748 }
7749 
7750 
ic_with_type_info_count()7751 int TypeFeedbackInfo::ic_with_type_info_count() {
7752   int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7753   return ICsWithTypeInfoCountField::decode(current);
7754 }
7755 
7756 
change_ic_with_type_info_count(int delta)7757 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7758   if (delta == 0) return;
7759   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7760   int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7761   // We can get negative count here when the type-feedback info is
7762   // shared between two code objects. The can only happen when
7763   // the debugger made a shallow copy of code object (see Heap::CopyCode).
7764   // Since we do not optimize when the debugger is active, we can skip
7765   // this counter update.
7766   if (new_count >= 0) {
7767     new_count &= ICsWithTypeInfoCountField::kMask;
7768     value = ICsWithTypeInfoCountField::update(value, new_count);
7769     WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7770   }
7771 }
7772 
7773 
ic_generic_count()7774 int TypeFeedbackInfo::ic_generic_count() {
7775   return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7776 }
7777 
7778 
change_ic_generic_count(int delta)7779 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7780   if (delta == 0) return;
7781   int new_count = ic_generic_count() + delta;
7782   if (new_count >= 0) {
7783     new_count &= ~Smi::kMinValue;
7784     WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7785   }
7786 }
7787 
7788 
initialize_storage()7789 void TypeFeedbackInfo::initialize_storage() {
7790   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7791   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7792   WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7793 }
7794 
7795 
change_own_type_change_checksum()7796 void TypeFeedbackInfo::change_own_type_change_checksum() {
7797   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7798   int checksum = OwnTypeChangeChecksum::decode(value);
7799   checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7800   value = OwnTypeChangeChecksum::update(value, checksum);
7801   // Ensure packed bit field is in Smi range.
7802   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7803   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7804   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7805 }
7806 
7807 
set_inlined_type_change_checksum(int checksum)7808 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7809   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7810   int mask = (1 << kTypeChangeChecksumBits) - 1;
7811   value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7812   // Ensure packed bit field is in Smi range.
7813   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7814   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7815   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7816 }
7817 
7818 
own_type_change_checksum()7819 int TypeFeedbackInfo::own_type_change_checksum() {
7820   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7821   return OwnTypeChangeChecksum::decode(value);
7822 }
7823 
7824 
matches_inlined_type_change_checksum(int checksum)7825 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7826   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7827   int mask = (1 << kTypeChangeChecksumBits) - 1;
7828   return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7829 }
7830 
7831 
SMI_ACCESSORS(AliasedArgumentsEntry,aliased_context_slot,kAliasedContextSlot)7832 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7833 
7834 
7835 Relocatable::Relocatable(Isolate* isolate) {
7836   isolate_ = isolate;
7837   prev_ = isolate->relocatable_top();
7838   isolate->set_relocatable_top(this);
7839 }
7840 
7841 
~Relocatable()7842 Relocatable::~Relocatable() {
7843   DCHECK_EQ(isolate_->relocatable_top(), this);
7844   isolate_->set_relocatable_top(prev_);
7845 }
7846 
7847 
7848 template<class Derived, class TableType>
CurrentKey()7849 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7850   TableType* table(TableType::cast(this->table()));
7851   int index = Smi::cast(this->index())->value();
7852   Object* key = table->KeyAt(index);
7853   DCHECK(!key->IsTheHole(table->GetIsolate()));
7854   return key;
7855 }
7856 
7857 
PopulateValueArray(FixedArray * array)7858 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7859   array->set(0, CurrentKey());
7860 }
7861 
7862 
PopulateValueArray(FixedArray * array)7863 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7864   array->set(0, CurrentKey());
7865   array->set(1, CurrentValue());
7866 }
7867 
7868 
CurrentValue()7869 Object* JSMapIterator::CurrentValue() {
7870   OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7871   int index = Smi::cast(this->index())->value();
7872   Object* value = table->ValueAt(index);
7873   DCHECK(!value->IsTheHole(table->GetIsolate()));
7874   return value;
7875 }
7876 
7877 
SubStringRange(String * string,int first,int length)7878 String::SubStringRange::SubStringRange(String* string, int first, int length)
7879     : string_(string),
7880       first_(first),
7881       length_(length == -1 ? string->length() : length) {}
7882 
7883 
7884 class String::SubStringRange::iterator final {
7885  public:
7886   typedef std::forward_iterator_tag iterator_category;
7887   typedef int difference_type;
7888   typedef uc16 value_type;
7889   typedef uc16* pointer;
7890   typedef uc16& reference;
7891 
iterator(const iterator & other)7892   iterator(const iterator& other)
7893       : content_(other.content_), offset_(other.offset_) {}
7894 
7895   uc16 operator*() { return content_.Get(offset_); }
7896   bool operator==(const iterator& other) const {
7897     return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7898   }
7899   bool operator!=(const iterator& other) const {
7900     return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7901   }
7902   iterator& operator++() {
7903     ++offset_;
7904     return *this;
7905   }
7906   iterator operator++(int);
7907 
7908  private:
7909   friend class String;
iterator(String * from,int offset)7910   iterator(String* from, int offset)
7911       : content_(from->GetFlatContent()), offset_(offset) {}
7912   String::FlatContent content_;
7913   int offset_;
7914 };
7915 
7916 
begin()7917 String::SubStringRange::iterator String::SubStringRange::begin() {
7918   return String::SubStringRange::iterator(string_, first_);
7919 }
7920 
7921 
end()7922 String::SubStringRange::iterator String::SubStringRange::end() {
7923   return String::SubStringRange::iterator(string_, first_ + length_);
7924 }
7925 
7926 
7927 // Predictably converts HeapObject* or Address to uint32 by calculating
7928 // offset of the address in respective MemoryChunk.
ObjectAddressForHashing(void * object)7929 static inline uint32_t ObjectAddressForHashing(void* object) {
7930   uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
7931   return value & MemoryChunk::kAlignmentMask;
7932 }
7933 
MakeEntryPair(Isolate * isolate,uint32_t index,Handle<Object> value)7934 static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
7935                                            Handle<Object> value) {
7936   Handle<Object> key = isolate->factory()->Uint32ToString(index);
7937   Handle<FixedArray> entry_storage =
7938       isolate->factory()->NewUninitializedFixedArray(2);
7939   {
7940     entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
7941     entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
7942   }
7943   return isolate->factory()->NewJSArrayWithElements(entry_storage,
7944                                                     FAST_ELEMENTS, 2);
7945 }
7946 
MakeEntryPair(Isolate * isolate,Handle<Name> key,Handle<Object> value)7947 static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Name> key,
7948                                            Handle<Object> value) {
7949   Handle<FixedArray> entry_storage =
7950       isolate->factory()->NewUninitializedFixedArray(2);
7951   {
7952     entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
7953     entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
7954   }
7955   return isolate->factory()->NewJSArrayWithElements(entry_storage,
7956                                                     FAST_ELEMENTS, 2);
7957 }
7958 
7959 #undef TYPE_CHECKER
7960 #undef CAST_ACCESSOR
7961 #undef INT_ACCESSORS
7962 #undef ACCESSORS
7963 #undef SMI_ACCESSORS
7964 #undef SYNCHRONIZED_SMI_ACCESSORS
7965 #undef NOBARRIER_SMI_ACCESSORS
7966 #undef BOOL_GETTER
7967 #undef BOOL_ACCESSORS
7968 #undef FIELD_ADDR
7969 #undef FIELD_ADDR_CONST
7970 #undef READ_FIELD
7971 #undef NOBARRIER_READ_FIELD
7972 #undef WRITE_FIELD
7973 #undef NOBARRIER_WRITE_FIELD
7974 #undef WRITE_BARRIER
7975 #undef CONDITIONAL_WRITE_BARRIER
7976 #undef READ_DOUBLE_FIELD
7977 #undef WRITE_DOUBLE_FIELD
7978 #undef READ_INT_FIELD
7979 #undef WRITE_INT_FIELD
7980 #undef READ_INTPTR_FIELD
7981 #undef WRITE_INTPTR_FIELD
7982 #undef READ_UINT8_FIELD
7983 #undef WRITE_UINT8_FIELD
7984 #undef READ_INT8_FIELD
7985 #undef WRITE_INT8_FIELD
7986 #undef READ_UINT16_FIELD
7987 #undef WRITE_UINT16_FIELD
7988 #undef READ_INT16_FIELD
7989 #undef WRITE_INT16_FIELD
7990 #undef READ_UINT32_FIELD
7991 #undef WRITE_UINT32_FIELD
7992 #undef READ_INT32_FIELD
7993 #undef WRITE_INT32_FIELD
7994 #undef READ_FLOAT_FIELD
7995 #undef WRITE_FLOAT_FIELD
7996 #undef READ_UINT64_FIELD
7997 #undef WRITE_UINT64_FIELD
7998 #undef READ_INT64_FIELD
7999 #undef WRITE_INT64_FIELD
8000 #undef READ_BYTE_FIELD
8001 #undef WRITE_BYTE_FIELD
8002 #undef NOBARRIER_READ_BYTE_FIELD
8003 #undef NOBARRIER_WRITE_BYTE_FIELD
8004 
8005 }  // namespace internal
8006 }  // namespace v8
8007 
8008 #endif  // V8_OBJECTS_INL_H_
8009