1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14
15 #include "src/base/atomicops.h"
16 #include "src/elements.h"
17 #include "src/objects.h"
18 #include "src/contexts.h"
19 #include "src/conversions-inl.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap.h"
22 #include "src/isolate.h"
23 #include "src/heap-inl.h"
24 #include "src/property.h"
25 #include "src/spaces.h"
26 #include "src/store-buffer.h"
27 #include "src/v8memory.h"
28 #include "src/factory.h"
29 #include "src/incremental-marking.h"
30 #include "src/transitions-inl.h"
31 #include "src/objects-visiting.h"
32 #include "src/lookup.h"
33
34 namespace v8 {
35 namespace internal {
36
PropertyDetails(Smi * smi)37 PropertyDetails::PropertyDetails(Smi* smi) {
38 value_ = smi->value();
39 }
40
41
AsSmi()42 Smi* PropertyDetails::AsSmi() const {
43 // Ensure the upper 2 bits have the same value by sign extending it. This is
44 // necessary to be able to use the 31st bit of the property details.
45 int value = value_ << 1;
46 return Smi::FromInt(value >> 1);
47 }
48
49
AsDeleted()50 PropertyDetails PropertyDetails::AsDeleted() const {
51 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
52 return PropertyDetails(smi);
53 }
54
55
56 #define TYPE_CHECKER(type, instancetype) \
57 bool Object::Is##type() { \
58 return Object::IsHeapObject() && \
59 HeapObject::cast(this)->map()->instance_type() == instancetype; \
60 }
61
62
63 #define CAST_ACCESSOR(type) \
64 type* type::cast(Object* object) { \
65 SLOW_ASSERT(object->Is##type()); \
66 return reinterpret_cast<type*>(object); \
67 }
68
69
70 #define INT_ACCESSORS(holder, name, offset) \
71 int holder::name() { return READ_INT_FIELD(this, offset); } \
72 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
73
74
75 #define ACCESSORS(holder, name, type, offset) \
76 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
77 void holder::set_##name(type* value, WriteBarrierMode mode) { \
78 WRITE_FIELD(this, offset, value); \
79 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
80 }
81
82
83 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
84 #define ACCESSORS_TO_SMI(holder, name, offset) \
85 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
86 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
87 WRITE_FIELD(this, offset, value); \
88 }
89
90
91 // Getter that returns a Smi as an int and writes an int as a Smi.
92 #define SMI_ACCESSORS(holder, name, offset) \
93 int holder::name() { \
94 Object* value = READ_FIELD(this, offset); \
95 return Smi::cast(value)->value(); \
96 } \
97 void holder::set_##name(int value) { \
98 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
99 }
100
101 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
102 int holder::synchronized_##name() { \
103 Object* value = ACQUIRE_READ_FIELD(this, offset); \
104 return Smi::cast(value)->value(); \
105 } \
106 void holder::synchronized_set_##name(int value) { \
107 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108 }
109
110 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
111 int holder::nobarrier_##name() { \
112 Object* value = NOBARRIER_READ_FIELD(this, offset); \
113 return Smi::cast(value)->value(); \
114 } \
115 void holder::nobarrier_set_##name(int value) { \
116 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 }
118
119 #define BOOL_GETTER(holder, field, name, offset) \
120 bool holder::name() { \
121 return BooleanBit::get(field(), offset); \
122 } \
123
124
125 #define BOOL_ACCESSORS(holder, field, name, offset) \
126 bool holder::name() { \
127 return BooleanBit::get(field(), offset); \
128 } \
129 void holder::set_##name(bool value) { \
130 set_##field(BooleanBit::set(field(), offset, value)); \
131 }
132
133
IsFixedArrayBase()134 bool Object::IsFixedArrayBase() {
135 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
136 IsFixedTypedArrayBase() || IsExternalArray();
137 }
138
139
140 // External objects are not extensible, so the map check is enough.
IsExternal()141 bool Object::IsExternal() {
142 return Object::IsHeapObject() &&
143 HeapObject::cast(this)->map() ==
144 HeapObject::cast(this)->GetHeap()->external_map();
145 }
146
147
IsAccessorInfo()148 bool Object::IsAccessorInfo() {
149 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
150 }
151
152
IsSmi()153 bool Object::IsSmi() {
154 return HAS_SMI_TAG(this);
155 }
156
157
IsHeapObject()158 bool Object::IsHeapObject() {
159 return Internals::HasHeapObjectTag(this);
160 }
161
162
TYPE_CHECKER(HeapNumber,HEAP_NUMBER_TYPE)163 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
164 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
165
166
167 bool Object::IsString() {
168 return Object::IsHeapObject()
169 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
170 }
171
172
IsName()173 bool Object::IsName() {
174 return IsString() || IsSymbol();
175 }
176
177
IsUniqueName()178 bool Object::IsUniqueName() {
179 return IsInternalizedString() || IsSymbol();
180 }
181
182
IsSpecObject()183 bool Object::IsSpecObject() {
184 return Object::IsHeapObject()
185 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
186 }
187
188
IsSpecFunction()189 bool Object::IsSpecFunction() {
190 if (!Object::IsHeapObject()) return false;
191 InstanceType type = HeapObject::cast(this)->map()->instance_type();
192 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
193 }
194
195
IsTemplateInfo()196 bool Object::IsTemplateInfo() {
197 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
198 }
199
200
IsInternalizedString()201 bool Object::IsInternalizedString() {
202 if (!this->IsHeapObject()) return false;
203 uint32_t type = HeapObject::cast(this)->map()->instance_type();
204 STATIC_ASSERT(kNotInternalizedTag != 0);
205 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
206 (kStringTag | kInternalizedTag);
207 }
208
209
IsConsString()210 bool Object::IsConsString() {
211 if (!IsString()) return false;
212 return StringShape(String::cast(this)).IsCons();
213 }
214
215
IsSlicedString()216 bool Object::IsSlicedString() {
217 if (!IsString()) return false;
218 return StringShape(String::cast(this)).IsSliced();
219 }
220
221
IsSeqString()222 bool Object::IsSeqString() {
223 if (!IsString()) return false;
224 return StringShape(String::cast(this)).IsSequential();
225 }
226
227
IsSeqOneByteString()228 bool Object::IsSeqOneByteString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsSequential() &&
231 String::cast(this)->IsOneByteRepresentation();
232 }
233
234
IsSeqTwoByteString()235 bool Object::IsSeqTwoByteString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsTwoByteRepresentation();
239 }
240
241
IsExternalString()242 bool Object::IsExternalString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsExternal();
245 }
246
247
IsExternalAsciiString()248 bool Object::IsExternalAsciiString() {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsExternal() &&
251 String::cast(this)->IsOneByteRepresentation();
252 }
253
254
IsExternalTwoByteString()255 bool Object::IsExternalTwoByteString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsTwoByteRepresentation();
259 }
260
261
HasValidElements()262 bool Object::HasValidElements() {
263 // Dictionary is covered under FixedArray.
264 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
265 IsFixedTypedArrayBase();
266 }
267
268
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)269 Handle<Object> Object::NewStorageFor(Isolate* isolate,
270 Handle<Object> object,
271 Representation representation) {
272 if (representation.IsSmi() && object->IsUninitialized()) {
273 return handle(Smi::FromInt(0), isolate);
274 }
275 if (!representation.IsDouble()) return object;
276 if (object->IsUninitialized()) {
277 return isolate->factory()->NewHeapNumber(0);
278 }
279 return isolate->factory()->NewHeapNumber(object->Number());
280 }
281
282
StringShape(String * str)283 StringShape::StringShape(String* str)
284 : type_(str->map()->instance_type()) {
285 set_valid();
286 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 }
288
289
StringShape(Map * map)290 StringShape::StringShape(Map* map)
291 : type_(map->instance_type()) {
292 set_valid();
293 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 }
295
296
StringShape(InstanceType t)297 StringShape::StringShape(InstanceType t)
298 : type_(static_cast<uint32_t>(t)) {
299 set_valid();
300 ASSERT((type_ & kIsNotStringMask) == kStringTag);
301 }
302
303
IsInternalized()304 bool StringShape::IsInternalized() {
305 ASSERT(valid());
306 STATIC_ASSERT(kNotInternalizedTag != 0);
307 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
308 (kStringTag | kInternalizedTag);
309 }
310
311
IsOneByteRepresentation()312 bool String::IsOneByteRepresentation() {
313 uint32_t type = map()->instance_type();
314 return (type & kStringEncodingMask) == kOneByteStringTag;
315 }
316
317
IsTwoByteRepresentation()318 bool String::IsTwoByteRepresentation() {
319 uint32_t type = map()->instance_type();
320 return (type & kStringEncodingMask) == kTwoByteStringTag;
321 }
322
323
IsOneByteRepresentationUnderneath()324 bool String::IsOneByteRepresentationUnderneath() {
325 uint32_t type = map()->instance_type();
326 STATIC_ASSERT(kIsIndirectStringTag != 0);
327 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
328 ASSERT(IsFlat());
329 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
330 case kOneByteStringTag:
331 return true;
332 case kTwoByteStringTag:
333 return false;
334 default: // Cons or sliced string. Need to go deeper.
335 return GetUnderlying()->IsOneByteRepresentation();
336 }
337 }
338
339
IsTwoByteRepresentationUnderneath()340 bool String::IsTwoByteRepresentationUnderneath() {
341 uint32_t type = map()->instance_type();
342 STATIC_ASSERT(kIsIndirectStringTag != 0);
343 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
344 ASSERT(IsFlat());
345 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
346 case kOneByteStringTag:
347 return false;
348 case kTwoByteStringTag:
349 return true;
350 default: // Cons or sliced string. Need to go deeper.
351 return GetUnderlying()->IsTwoByteRepresentation();
352 }
353 }
354
355
HasOnlyOneByteChars()356 bool String::HasOnlyOneByteChars() {
357 uint32_t type = map()->instance_type();
358 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
359 IsOneByteRepresentation();
360 }
361
362
IsCons()363 bool StringShape::IsCons() {
364 return (type_ & kStringRepresentationMask) == kConsStringTag;
365 }
366
367
IsSliced()368 bool StringShape::IsSliced() {
369 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
370 }
371
372
IsIndirect()373 bool StringShape::IsIndirect() {
374 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
375 }
376
377
IsExternal()378 bool StringShape::IsExternal() {
379 return (type_ & kStringRepresentationMask) == kExternalStringTag;
380 }
381
382
IsSequential()383 bool StringShape::IsSequential() {
384 return (type_ & kStringRepresentationMask) == kSeqStringTag;
385 }
386
387
representation_tag()388 StringRepresentationTag StringShape::representation_tag() {
389 uint32_t tag = (type_ & kStringRepresentationMask);
390 return static_cast<StringRepresentationTag>(tag);
391 }
392
393
encoding_tag()394 uint32_t StringShape::encoding_tag() {
395 return type_ & kStringEncodingMask;
396 }
397
398
full_representation_tag()399 uint32_t StringShape::full_representation_tag() {
400 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
401 }
402
403
404 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
405 Internals::kFullStringRepresentationMask);
406
407 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
408 Internals::kStringEncodingMask);
409
410
IsSequentialAscii()411 bool StringShape::IsSequentialAscii() {
412 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
413 }
414
415
IsSequentialTwoByte()416 bool StringShape::IsSequentialTwoByte() {
417 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
418 }
419
420
IsExternalAscii()421 bool StringShape::IsExternalAscii() {
422 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
423 }
424
425
426 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
427 Internals::kExternalAsciiRepresentationTag);
428
429 STATIC_ASSERT(v8::String::ASCII_ENCODING == kOneByteStringTag);
430
431
IsExternalTwoByte()432 bool StringShape::IsExternalTwoByte() {
433 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
434 }
435
436
437 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
438 Internals::kExternalTwoByteRepresentationTag);
439
440 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
441
Get(int index)442 uc32 FlatStringReader::Get(int index) {
443 ASSERT(0 <= index && index <= length_);
444 if (is_ascii_) {
445 return static_cast<const byte*>(start_)[index];
446 } else {
447 return static_cast<const uc16*>(start_)[index];
448 }
449 }
450
451
AsHandle(Isolate * isolate,HashTableKey * key)452 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
453 return key->AsHandle(isolate);
454 }
455
456
AsHandle(Isolate * isolate,HashTableKey * key)457 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
458 return key->AsHandle(isolate);
459 }
460
461
AsHandle(Isolate * isolate,HashTableKey * key)462 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
463 HashTableKey* key) {
464 return key->AsHandle(isolate);
465 }
466
467
AsHandle(Isolate * isolate,HashTableKey * key)468 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
469 HashTableKey* key) {
470 return key->AsHandle(isolate);
471 }
472
473 template <typename Char>
474 class SequentialStringKey : public HashTableKey {
475 public:
SequentialStringKey(Vector<const Char> string,uint32_t seed)476 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
477 : string_(string), hash_field_(0), seed_(seed) { }
478
Hash()479 virtual uint32_t Hash() V8_OVERRIDE {
480 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
481 string_.length(),
482 seed_);
483
484 uint32_t result = hash_field_ >> String::kHashShift;
485 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
486 return result;
487 }
488
489
HashForObject(Object * other)490 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
491 return String::cast(other)->Hash();
492 }
493
494 Vector<const Char> string_;
495 uint32_t hash_field_;
496 uint32_t seed_;
497 };
498
499
500 class OneByteStringKey : public SequentialStringKey<uint8_t> {
501 public:
OneByteStringKey(Vector<const uint8_t> str,uint32_t seed)502 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
503 : SequentialStringKey<uint8_t>(str, seed) { }
504
IsMatch(Object * string)505 virtual bool IsMatch(Object* string) V8_OVERRIDE {
506 return String::cast(string)->IsOneByteEqualTo(string_);
507 }
508
509 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
510 };
511
512
513 template<class Char>
514 class SubStringKey : public HashTableKey {
515 public:
SubStringKey(Handle<String> string,int from,int length)516 SubStringKey(Handle<String> string, int from, int length)
517 : string_(string), from_(from), length_(length) {
518 if (string_->IsSlicedString()) {
519 string_ = Handle<String>(Unslice(*string_, &from_));
520 }
521 ASSERT(string_->IsSeqString() || string->IsExternalString());
522 }
523
Hash()524 virtual uint32_t Hash() V8_OVERRIDE {
525 ASSERT(length_ >= 0);
526 ASSERT(from_ + length_ <= string_->length());
527 const Char* chars = GetChars() + from_;
528 hash_field_ = StringHasher::HashSequentialString(
529 chars, length_, string_->GetHeap()->HashSeed());
530 uint32_t result = hash_field_ >> String::kHashShift;
531 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
532 return result;
533 }
534
HashForObject(Object * other)535 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
536 return String::cast(other)->Hash();
537 }
538
539 virtual bool IsMatch(Object* string) V8_OVERRIDE;
540 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
541
542 private:
543 const Char* GetChars();
Unslice(String * string,int * offset)544 String* Unslice(String* string, int* offset) {
545 while (string->IsSlicedString()) {
546 SlicedString* sliced = SlicedString::cast(string);
547 *offset += sliced->offset();
548 string = sliced->parent();
549 }
550 return string;
551 }
552
553 Handle<String> string_;
554 int from_;
555 int length_;
556 uint32_t hash_field_;
557 };
558
559
560 class TwoByteStringKey : public SequentialStringKey<uc16> {
561 public:
TwoByteStringKey(Vector<const uc16> str,uint32_t seed)562 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
563 : SequentialStringKey<uc16>(str, seed) { }
564
IsMatch(Object * string)565 virtual bool IsMatch(Object* string) V8_OVERRIDE {
566 return String::cast(string)->IsTwoByteEqualTo(string_);
567 }
568
569 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
570 };
571
572
573 // Utf8StringKey carries a vector of chars as key.
574 class Utf8StringKey : public HashTableKey {
575 public:
Utf8StringKey(Vector<const char> string,uint32_t seed)576 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
577 : string_(string), hash_field_(0), seed_(seed) { }
578
IsMatch(Object * string)579 virtual bool IsMatch(Object* string) V8_OVERRIDE {
580 return String::cast(string)->IsUtf8EqualTo(string_);
581 }
582
Hash()583 virtual uint32_t Hash() V8_OVERRIDE {
584 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
585 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
586 uint32_t result = hash_field_ >> String::kHashShift;
587 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
588 return result;
589 }
590
HashForObject(Object * other)591 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
592 return String::cast(other)->Hash();
593 }
594
AsHandle(Isolate * isolate)595 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
596 if (hash_field_ == 0) Hash();
597 return isolate->factory()->NewInternalizedStringFromUtf8(
598 string_, chars_, hash_field_);
599 }
600
601 Vector<const char> string_;
602 uint32_t hash_field_;
603 int chars_; // Caches the number of characters when computing the hash code.
604 uint32_t seed_;
605 };
606
607
IsNumber()608 bool Object::IsNumber() {
609 return IsSmi() || IsHeapNumber();
610 }
611
612
TYPE_CHECKER(ByteArray,BYTE_ARRAY_TYPE)613 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
614 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
615
616
617 bool Object::IsFiller() {
618 if (!Object::IsHeapObject()) return false;
619 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
620 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
621 }
622
623
IsExternalArray()624 bool Object::IsExternalArray() {
625 if (!Object::IsHeapObject())
626 return false;
627 InstanceType instance_type =
628 HeapObject::cast(this)->map()->instance_type();
629 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
630 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
631 }
632
633
634 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
635 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
636 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
637
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)638 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
639 #undef TYPED_ARRAY_TYPE_CHECKER
640
641
642 bool Object::IsFixedTypedArrayBase() {
643 if (!Object::IsHeapObject()) return false;
644
645 InstanceType instance_type =
646 HeapObject::cast(this)->map()->instance_type();
647 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
648 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
649 }
650
651
IsJSReceiver()652 bool Object::IsJSReceiver() {
653 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
654 return IsHeapObject() &&
655 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
656 }
657
658
IsJSObject()659 bool Object::IsJSObject() {
660 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
661 return IsHeapObject() &&
662 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
663 }
664
665
IsJSProxy()666 bool Object::IsJSProxy() {
667 if (!Object::IsHeapObject()) return false;
668 return HeapObject::cast(this)->map()->IsJSProxyMap();
669 }
670
671
TYPE_CHECKER(JSFunctionProxy,JS_FUNCTION_PROXY_TYPE)672 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
673 TYPE_CHECKER(JSSet, JS_SET_TYPE)
674 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
675 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
676 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
677 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
678 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
679 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
680 TYPE_CHECKER(Map, MAP_TYPE)
681 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
682 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
683 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
684
685
686 bool Object::IsJSWeakCollection() {
687 return IsJSWeakMap() || IsJSWeakSet();
688 }
689
690
IsDescriptorArray()691 bool Object::IsDescriptorArray() {
692 return IsFixedArray();
693 }
694
695
IsTransitionArray()696 bool Object::IsTransitionArray() {
697 return IsFixedArray();
698 }
699
700
IsDeoptimizationInputData()701 bool Object::IsDeoptimizationInputData() {
702 // Must be a fixed array.
703 if (!IsFixedArray()) return false;
704
705 // There's no sure way to detect the difference between a fixed array and
706 // a deoptimization data array. Since this is used for asserts we can
707 // check that the length is zero or else the fixed size plus a multiple of
708 // the entry size.
709 int length = FixedArray::cast(this)->length();
710 if (length == 0) return true;
711
712 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
713 return length >= 0 &&
714 length % DeoptimizationInputData::kDeoptEntrySize == 0;
715 }
716
717
IsDeoptimizationOutputData()718 bool Object::IsDeoptimizationOutputData() {
719 if (!IsFixedArray()) return false;
720 // There's actually no way to see the difference between a fixed array and
721 // a deoptimization data array. Since this is used for asserts we can check
722 // that the length is plausible though.
723 if (FixedArray::cast(this)->length() % 2 != 0) return false;
724 return true;
725 }
726
727
IsDependentCode()728 bool Object::IsDependentCode() {
729 if (!IsFixedArray()) return false;
730 // There's actually no way to see the difference between a fixed array and
731 // a dependent codes array.
732 return true;
733 }
734
735
IsContext()736 bool Object::IsContext() {
737 if (!Object::IsHeapObject()) return false;
738 Map* map = HeapObject::cast(this)->map();
739 Heap* heap = map->GetHeap();
740 return (map == heap->function_context_map() ||
741 map == heap->catch_context_map() ||
742 map == heap->with_context_map() ||
743 map == heap->native_context_map() ||
744 map == heap->block_context_map() ||
745 map == heap->module_context_map() ||
746 map == heap->global_context_map());
747 }
748
749
IsNativeContext()750 bool Object::IsNativeContext() {
751 return Object::IsHeapObject() &&
752 HeapObject::cast(this)->map() ==
753 HeapObject::cast(this)->GetHeap()->native_context_map();
754 }
755
756
IsScopeInfo()757 bool Object::IsScopeInfo() {
758 return Object::IsHeapObject() &&
759 HeapObject::cast(this)->map() ==
760 HeapObject::cast(this)->GetHeap()->scope_info_map();
761 }
762
763
TYPE_CHECKER(JSFunction,JS_FUNCTION_TYPE)764 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
765
766
767 template <> inline bool Is<JSFunction>(Object* obj) {
768 return obj->IsJSFunction();
769 }
770
771
TYPE_CHECKER(Code,CODE_TYPE)772 TYPE_CHECKER(Code, CODE_TYPE)
773 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
774 TYPE_CHECKER(Cell, CELL_TYPE)
775 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
776 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
777 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
778 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
779 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
780 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
781 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
782
783
784 bool Object::IsStringWrapper() {
785 return IsJSValue() && JSValue::cast(this)->value()->IsString();
786 }
787
788
TYPE_CHECKER(Foreign,FOREIGN_TYPE)789 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
790
791
792 bool Object::IsBoolean() {
793 return IsOddball() &&
794 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
795 }
796
797
TYPE_CHECKER(JSArray,JS_ARRAY_TYPE)798 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
799 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
800 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
801 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
802
803
804 bool Object::IsJSArrayBufferView() {
805 return IsJSDataView() || IsJSTypedArray();
806 }
807
808
TYPE_CHECKER(JSRegExp,JS_REGEXP_TYPE)809 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
810
811
812 template <> inline bool Is<JSArray>(Object* obj) {
813 return obj->IsJSArray();
814 }
815
816
IsHashTable()817 bool Object::IsHashTable() {
818 return Object::IsHeapObject() &&
819 HeapObject::cast(this)->map() ==
820 HeapObject::cast(this)->GetHeap()->hash_table_map();
821 }
822
823
IsDictionary()824 bool Object::IsDictionary() {
825 return IsHashTable() &&
826 this != HeapObject::cast(this)->GetHeap()->string_table();
827 }
828
829
IsStringTable()830 bool Object::IsStringTable() {
831 return IsHashTable();
832 }
833
834
IsJSFunctionResultCache()835 bool Object::IsJSFunctionResultCache() {
836 if (!IsFixedArray()) return false;
837 FixedArray* self = FixedArray::cast(this);
838 int length = self->length();
839 if (length < JSFunctionResultCache::kEntriesIndex) return false;
840 if ((length - JSFunctionResultCache::kEntriesIndex)
841 % JSFunctionResultCache::kEntrySize != 0) {
842 return false;
843 }
844 #ifdef VERIFY_HEAP
845 if (FLAG_verify_heap) {
846 reinterpret_cast<JSFunctionResultCache*>(this)->
847 JSFunctionResultCacheVerify();
848 }
849 #endif
850 return true;
851 }
852
853
IsNormalizedMapCache()854 bool Object::IsNormalizedMapCache() {
855 return NormalizedMapCache::IsNormalizedMapCache(this);
856 }
857
858
GetIndex(Handle<Map> map)859 int NormalizedMapCache::GetIndex(Handle<Map> map) {
860 return map->Hash() % NormalizedMapCache::kEntries;
861 }
862
863
IsNormalizedMapCache(Object * obj)864 bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) {
865 if (!obj->IsFixedArray()) return false;
866 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
867 return false;
868 }
869 #ifdef VERIFY_HEAP
870 if (FLAG_verify_heap) {
871 reinterpret_cast<NormalizedMapCache*>(obj)->NormalizedMapCacheVerify();
872 }
873 #endif
874 return true;
875 }
876
877
IsCompilationCacheTable()878 bool Object::IsCompilationCacheTable() {
879 return IsHashTable();
880 }
881
882
IsCodeCacheHashTable()883 bool Object::IsCodeCacheHashTable() {
884 return IsHashTable();
885 }
886
887
IsPolymorphicCodeCacheHashTable()888 bool Object::IsPolymorphicCodeCacheHashTable() {
889 return IsHashTable();
890 }
891
892
IsMapCache()893 bool Object::IsMapCache() {
894 return IsHashTable();
895 }
896
897
IsObjectHashTable()898 bool Object::IsObjectHashTable() {
899 return IsHashTable();
900 }
901
902
IsOrderedHashTable()903 bool Object::IsOrderedHashTable() {
904 return IsHeapObject() &&
905 HeapObject::cast(this)->map() ==
906 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
907 }
908
909
IsPrimitive()910 bool Object::IsPrimitive() {
911 return IsOddball() || IsNumber() || IsString();
912 }
913
914
IsJSGlobalProxy()915 bool Object::IsJSGlobalProxy() {
916 bool result = IsHeapObject() &&
917 (HeapObject::cast(this)->map()->instance_type() ==
918 JS_GLOBAL_PROXY_TYPE);
919 ASSERT(!result ||
920 HeapObject::cast(this)->map()->is_access_check_needed());
921 return result;
922 }
923
924
IsGlobalObject()925 bool Object::IsGlobalObject() {
926 if (!IsHeapObject()) return false;
927
928 InstanceType type = HeapObject::cast(this)->map()->instance_type();
929 return type == JS_GLOBAL_OBJECT_TYPE ||
930 type == JS_BUILTINS_OBJECT_TYPE;
931 }
932
933
TYPE_CHECKER(JSGlobalObject,JS_GLOBAL_OBJECT_TYPE)934 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
935 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
936
937
938 bool Object::IsUndetectableObject() {
939 return IsHeapObject()
940 && HeapObject::cast(this)->map()->is_undetectable();
941 }
942
943
IsAccessCheckNeeded()944 bool Object::IsAccessCheckNeeded() {
945 if (!IsHeapObject()) return false;
946 if (IsJSGlobalProxy()) {
947 JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
948 GlobalObject* global =
949 proxy->GetIsolate()->context()->global_object();
950 return proxy->IsDetachedFrom(global);
951 }
952 return HeapObject::cast(this)->map()->is_access_check_needed();
953 }
954
955
IsStruct()956 bool Object::IsStruct() {
957 if (!IsHeapObject()) return false;
958 switch (HeapObject::cast(this)->map()->instance_type()) {
959 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
960 STRUCT_LIST(MAKE_STRUCT_CASE)
961 #undef MAKE_STRUCT_CASE
962 default: return false;
963 }
964 }
965
966
967 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
968 bool Object::Is##Name() { \
969 return Object::IsHeapObject() \
970 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
971 }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)972 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
973 #undef MAKE_STRUCT_PREDICATE
974
975
976 bool Object::IsUndefined() {
977 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
978 }
979
980
IsNull()981 bool Object::IsNull() {
982 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
983 }
984
985
IsTheHole()986 bool Object::IsTheHole() {
987 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
988 }
989
990
IsException()991 bool Object::IsException() {
992 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
993 }
994
995
IsUninitialized()996 bool Object::IsUninitialized() {
997 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
998 }
999
1000
IsTrue()1001 bool Object::IsTrue() {
1002 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1003 }
1004
1005
IsFalse()1006 bool Object::IsFalse() {
1007 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1008 }
1009
1010
IsArgumentsMarker()1011 bool Object::IsArgumentsMarker() {
1012 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1013 }
1014
1015
Number()1016 double Object::Number() {
1017 ASSERT(IsNumber());
1018 return IsSmi()
1019 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1020 : reinterpret_cast<HeapNumber*>(this)->value();
1021 }
1022
1023
IsNaN()1024 bool Object::IsNaN() {
1025 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1026 }
1027
1028
ToSmi(Isolate * isolate,Handle<Object> object)1029 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1030 if (object->IsSmi()) return Handle<Smi>::cast(object);
1031 if (object->IsHeapNumber()) {
1032 double value = Handle<HeapNumber>::cast(object)->value();
1033 int int_value = FastD2I(value);
1034 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1035 return handle(Smi::FromInt(int_value), isolate);
1036 }
1037 }
1038 return Handle<Smi>();
1039 }
1040
1041
ToObject(Isolate * isolate,Handle<Object> object)1042 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1043 Handle<Object> object) {
1044 return ToObject(
1045 isolate, object, handle(isolate->context()->native_context(), isolate));
1046 }
1047
1048
HasSpecificClassOf(String * name)1049 bool Object::HasSpecificClassOf(String* name) {
1050 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1051 }
1052
1053
GetProperty(Handle<Object> object,Handle<Name> name)1054 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1055 Handle<Name> name) {
1056 LookupIterator it(object, name);
1057 return GetProperty(&it);
1058 }
1059
1060
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)1061 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1062 Handle<Object> object,
1063 uint32_t index) {
1064 // GetElement can trigger a getter which can cause allocation.
1065 // This was not always the case. This ASSERT is here to catch
1066 // leftover incorrect uses.
1067 ASSERT(AllowHeapAllocation::IsAllowed());
1068 return Object::GetElementWithReceiver(isolate, object, object, index);
1069 }
1070
1071
GetPropertyOrElement(Handle<Object> object,Handle<Name> name)1072 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1073 Handle<Name> name) {
1074 uint32_t index;
1075 Isolate* isolate = name->GetIsolate();
1076 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1077 return GetProperty(object, name);
1078 }
1079
1080
GetProperty(Isolate * isolate,Handle<Object> object,const char * name)1081 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1082 Handle<Object> object,
1083 const char* name) {
1084 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1085 ASSERT(!str.is_null());
1086 #ifdef DEBUG
1087 uint32_t index; // Assert that the name is not an array index.
1088 ASSERT(!str->AsArrayIndex(&index));
1089 #endif // DEBUG
1090 return GetProperty(object, str);
1091 }
1092
1093
GetElementWithHandler(Handle<JSProxy> proxy,Handle<Object> receiver,uint32_t index)1094 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1095 Handle<Object> receiver,
1096 uint32_t index) {
1097 return GetPropertyWithHandler(
1098 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1099 }
1100
1101
SetElementWithHandler(Handle<JSProxy> proxy,Handle<JSReceiver> receiver,uint32_t index,Handle<Object> value,StrictMode strict_mode)1102 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1103 Handle<JSReceiver> receiver,
1104 uint32_t index,
1105 Handle<Object> value,
1106 StrictMode strict_mode) {
1107 Isolate* isolate = proxy->GetIsolate();
1108 Handle<String> name = isolate->factory()->Uint32ToString(index);
1109 return SetPropertyWithHandler(
1110 proxy, receiver, name, value, NONE, strict_mode);
1111 }
1112
1113
HasElementWithHandler(Handle<JSProxy> proxy,uint32_t index)1114 bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) {
1115 Isolate* isolate = proxy->GetIsolate();
1116 Handle<String> name = isolate->factory()->Uint32ToString(index);
1117 return HasPropertyWithHandler(proxy, name);
1118 }
1119
1120
1121 #define FIELD_ADDR(p, offset) \
1122 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1123
1124 #define READ_FIELD(p, offset) \
1125 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1126
1127 #define ACQUIRE_READ_FIELD(p, offset) \
1128 reinterpret_cast<Object*>(base::Acquire_Load( \
1129 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset))))
1130
1131 #define NOBARRIER_READ_FIELD(p, offset) \
1132 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1133 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset))))
1134
1135 #define WRITE_FIELD(p, offset, value) \
1136 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1137
1138 #define RELEASE_WRITE_FIELD(p, offset, value) \
1139 base::Release_Store( \
1140 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1141 reinterpret_cast<base::AtomicWord>(value));
1142
1143 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1144 base::NoBarrier_Store( \
1145 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1146 reinterpret_cast<base::AtomicWord>(value));
1147
1148 #define WRITE_BARRIER(heap, object, offset, value) \
1149 heap->incremental_marking()->RecordWrite( \
1150 object, HeapObject::RawField(object, offset), value); \
1151 if (heap->InNewSpace(value)) { \
1152 heap->RecordWrite(object->address(), offset); \
1153 }
1154
1155 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1156 if (mode == UPDATE_WRITE_BARRIER) { \
1157 heap->incremental_marking()->RecordWrite( \
1158 object, HeapObject::RawField(object, offset), value); \
1159 if (heap->InNewSpace(value)) { \
1160 heap->RecordWrite(object->address(), offset); \
1161 } \
1162 }
1163
1164 #ifndef V8_TARGET_ARCH_MIPS
1165 #define READ_DOUBLE_FIELD(p, offset) \
1166 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1167 #else // V8_TARGET_ARCH_MIPS
1168 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1169 // non-64-bit aligned HeapNumber::value.
read_double_field(void * p,int offset)1170 static inline double read_double_field(void* p, int offset) {
1171 union conversion {
1172 double d;
1173 uint32_t u[2];
1174 } c;
1175 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1176 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1177 return c.d;
1178 }
1179 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1180 #endif // V8_TARGET_ARCH_MIPS
1181
1182 #ifndef V8_TARGET_ARCH_MIPS
1183 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1184 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1185 #else // V8_TARGET_ARCH_MIPS
1186 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1187 // non-64-bit aligned HeapNumber::value.
write_double_field(void * p,int offset,double value)1188 static inline void write_double_field(void* p, int offset,
1189 double value) {
1190 union conversion {
1191 double d;
1192 uint32_t u[2];
1193 } c;
1194 c.d = value;
1195 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1196 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1197 }
1198 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1199 write_double_field(p, offset, value)
1200 #endif // V8_TARGET_ARCH_MIPS
1201
1202
1203 #define READ_INT_FIELD(p, offset) \
1204 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1205
1206 #define WRITE_INT_FIELD(p, offset, value) \
1207 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1208
1209 #define READ_INTPTR_FIELD(p, offset) \
1210 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1211
1212 #define WRITE_INTPTR_FIELD(p, offset, value) \
1213 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1214
1215 #define READ_UINT32_FIELD(p, offset) \
1216 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1217
1218 #define WRITE_UINT32_FIELD(p, offset, value) \
1219 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1220
1221 #define READ_INT32_FIELD(p, offset) \
1222 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1223
1224 #define WRITE_INT32_FIELD(p, offset, value) \
1225 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1226
1227 #define READ_INT64_FIELD(p, offset) \
1228 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1229
1230 #define WRITE_INT64_FIELD(p, offset, value) \
1231 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1232
1233 #define READ_SHORT_FIELD(p, offset) \
1234 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1235
1236 #define WRITE_SHORT_FIELD(p, offset, value) \
1237 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1238
1239 #define READ_BYTE_FIELD(p, offset) \
1240 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1241
1242 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1243 static_cast<byte>(base::NoBarrier_Load( \
1244 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1245
1246 #define WRITE_BYTE_FIELD(p, offset, value) \
1247 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1248
1249 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1250 base::NoBarrier_Store( \
1251 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1252 static_cast<base::Atomic8>(value));
1253
RawField(HeapObject * obj,int byte_offset)1254 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1255 return &READ_FIELD(obj, byte_offset);
1256 }
1257
1258
value()1259 int Smi::value() {
1260 return Internals::SmiValue(this);
1261 }
1262
1263
FromInt(int value)1264 Smi* Smi::FromInt(int value) {
1265 ASSERT(Smi::IsValid(value));
1266 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1267 }
1268
1269
FromIntptr(intptr_t value)1270 Smi* Smi::FromIntptr(intptr_t value) {
1271 ASSERT(Smi::IsValid(value));
1272 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1273 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1274 }
1275
1276
IsValid(intptr_t value)1277 bool Smi::IsValid(intptr_t value) {
1278 bool result = Internals::IsValidSmi(value);
1279 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1280 return result;
1281 }
1282
1283
FromMap(Map * map)1284 MapWord MapWord::FromMap(Map* map) {
1285 return MapWord(reinterpret_cast<uintptr_t>(map));
1286 }
1287
1288
ToMap()1289 Map* MapWord::ToMap() {
1290 return reinterpret_cast<Map*>(value_);
1291 }
1292
1293
IsForwardingAddress()1294 bool MapWord::IsForwardingAddress() {
1295 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1296 }
1297
1298
FromForwardingAddress(HeapObject * object)1299 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1300 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1301 return MapWord(reinterpret_cast<uintptr_t>(raw));
1302 }
1303
1304
ToForwardingAddress()1305 HeapObject* MapWord::ToForwardingAddress() {
1306 ASSERT(IsForwardingAddress());
1307 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1308 }
1309
1310
1311 #ifdef VERIFY_HEAP
VerifyObjectField(int offset)1312 void HeapObject::VerifyObjectField(int offset) {
1313 VerifyPointer(READ_FIELD(this, offset));
1314 }
1315
VerifySmiField(int offset)1316 void HeapObject::VerifySmiField(int offset) {
1317 CHECK(READ_FIELD(this, offset)->IsSmi());
1318 }
1319 #endif
1320
1321
GetHeap()1322 Heap* HeapObject::GetHeap() {
1323 Heap* heap =
1324 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1325 SLOW_ASSERT(heap != NULL);
1326 return heap;
1327 }
1328
1329
GetIsolate()1330 Isolate* HeapObject::GetIsolate() {
1331 return GetHeap()->isolate();
1332 }
1333
1334
map()1335 Map* HeapObject::map() {
1336 #ifdef DEBUG
1337 // Clear mark potentially added by PathTracer.
1338 uintptr_t raw_value =
1339 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1340 return MapWord::FromRawValue(raw_value).ToMap();
1341 #else
1342 return map_word().ToMap();
1343 #endif
1344 }
1345
1346
set_map(Map * value)1347 void HeapObject::set_map(Map* value) {
1348 set_map_word(MapWord::FromMap(value));
1349 if (value != NULL) {
1350 // TODO(1600) We are passing NULL as a slot because maps can never be on
1351 // evacuation candidate.
1352 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1353 }
1354 }
1355
1356
synchronized_map()1357 Map* HeapObject::synchronized_map() {
1358 return synchronized_map_word().ToMap();
1359 }
1360
1361
synchronized_set_map(Map * value)1362 void HeapObject::synchronized_set_map(Map* value) {
1363 synchronized_set_map_word(MapWord::FromMap(value));
1364 if (value != NULL) {
1365 // TODO(1600) We are passing NULL as a slot because maps can never be on
1366 // evacuation candidate.
1367 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1368 }
1369 }
1370
1371
synchronized_set_map_no_write_barrier(Map * value)1372 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1373 synchronized_set_map_word(MapWord::FromMap(value));
1374 }
1375
1376
1377 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)1378 void HeapObject::set_map_no_write_barrier(Map* value) {
1379 set_map_word(MapWord::FromMap(value));
1380 }
1381
1382
map_word()1383 MapWord HeapObject::map_word() {
1384 return MapWord(
1385 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1386 }
1387
1388
set_map_word(MapWord map_word)1389 void HeapObject::set_map_word(MapWord map_word) {
1390 NOBARRIER_WRITE_FIELD(
1391 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1392 }
1393
1394
synchronized_map_word()1395 MapWord HeapObject::synchronized_map_word() {
1396 return MapWord(
1397 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1398 }
1399
1400
synchronized_set_map_word(MapWord map_word)1401 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1402 RELEASE_WRITE_FIELD(
1403 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1404 }
1405
1406
FromAddress(Address address)1407 HeapObject* HeapObject::FromAddress(Address address) {
1408 ASSERT_TAG_ALIGNED(address);
1409 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1410 }
1411
1412
address()1413 Address HeapObject::address() {
1414 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1415 }
1416
1417
Size()1418 int HeapObject::Size() {
1419 return SizeFromMap(map());
1420 }
1421
1422
IteratePointers(ObjectVisitor * v,int start,int end)1423 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1424 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1425 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1426 }
1427
1428
IteratePointer(ObjectVisitor * v,int offset)1429 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1430 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1431 }
1432
1433
IterateNextCodeLink(ObjectVisitor * v,int offset)1434 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1435 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1436 }
1437
1438
value()1439 double HeapNumber::value() {
1440 return READ_DOUBLE_FIELD(this, kValueOffset);
1441 }
1442
1443
set_value(double value)1444 void HeapNumber::set_value(double value) {
1445 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1446 }
1447
1448
get_exponent()1449 int HeapNumber::get_exponent() {
1450 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1451 kExponentShift) - kExponentBias;
1452 }
1453
1454
get_sign()1455 int HeapNumber::get_sign() {
1456 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1457 }
1458
1459
ACCESSORS(JSObject,properties,FixedArray,kPropertiesOffset)1460 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1461
1462
1463 Object** FixedArray::GetFirstElementAddress() {
1464 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1465 }
1466
1467
ContainsOnlySmisOrHoles()1468 bool FixedArray::ContainsOnlySmisOrHoles() {
1469 Object* the_hole = GetHeap()->the_hole_value();
1470 Object** current = GetFirstElementAddress();
1471 for (int i = 0; i < length(); ++i) {
1472 Object* candidate = *current++;
1473 if (!candidate->IsSmi() && candidate != the_hole) return false;
1474 }
1475 return true;
1476 }
1477
1478
elements()1479 FixedArrayBase* JSObject::elements() {
1480 Object* array = READ_FIELD(this, kElementsOffset);
1481 return static_cast<FixedArrayBase*>(array);
1482 }
1483
1484
ValidateElements(Handle<JSObject> object)1485 void JSObject::ValidateElements(Handle<JSObject> object) {
1486 #ifdef ENABLE_SLOW_ASSERTS
1487 if (FLAG_enable_slow_asserts) {
1488 ElementsAccessor* accessor = object->GetElementsAccessor();
1489 accessor->Validate(object);
1490 }
1491 #endif
1492 }
1493
1494
Initialize()1495 void AllocationSite::Initialize() {
1496 set_transition_info(Smi::FromInt(0));
1497 SetElementsKind(GetInitialFastElementsKind());
1498 set_nested_site(Smi::FromInt(0));
1499 set_pretenure_data(Smi::FromInt(0));
1500 set_pretenure_create_count(Smi::FromInt(0));
1501 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1502 SKIP_WRITE_BARRIER);
1503 }
1504
1505
MarkZombie()1506 void AllocationSite::MarkZombie() {
1507 ASSERT(!IsZombie());
1508 Initialize();
1509 set_pretenure_decision(kZombie);
1510 }
1511
1512
1513 // Heuristic: We only need to create allocation site info if the boilerplate
1514 // elements kind is the initial elements kind.
GetMode(ElementsKind boilerplate_elements_kind)1515 AllocationSiteMode AllocationSite::GetMode(
1516 ElementsKind boilerplate_elements_kind) {
1517 if (FLAG_pretenuring_call_new ||
1518 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1519 return TRACK_ALLOCATION_SITE;
1520 }
1521
1522 return DONT_TRACK_ALLOCATION_SITE;
1523 }
1524
1525
GetMode(ElementsKind from,ElementsKind to)1526 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1527 ElementsKind to) {
1528 if (FLAG_pretenuring_call_new ||
1529 (IsFastSmiElementsKind(from) &&
1530 IsMoreGeneralElementsKindTransition(from, to))) {
1531 return TRACK_ALLOCATION_SITE;
1532 }
1533
1534 return DONT_TRACK_ALLOCATION_SITE;
1535 }
1536
1537
CanTrack(InstanceType type)1538 inline bool AllocationSite::CanTrack(InstanceType type) {
1539 if (FLAG_allocation_site_pretenuring) {
1540 return type == JS_ARRAY_TYPE ||
1541 type == JS_OBJECT_TYPE ||
1542 type < FIRST_NONSTRING_TYPE;
1543 }
1544 return type == JS_ARRAY_TYPE;
1545 }
1546
1547
ToDependencyGroup(Reason reason)1548 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1549 Reason reason) {
1550 switch (reason) {
1551 case TENURING:
1552 return DependentCode::kAllocationSiteTenuringChangedGroup;
1553 break;
1554 case TRANSITIONS:
1555 return DependentCode::kAllocationSiteTransitionChangedGroup;
1556 break;
1557 }
1558 UNREACHABLE();
1559 return DependentCode::kAllocationSiteTransitionChangedGroup;
1560 }
1561
1562
set_memento_found_count(int count)1563 inline void AllocationSite::set_memento_found_count(int count) {
1564 int value = pretenure_data()->value();
1565 // Verify that we can count more mementos than we can possibly find in one
1566 // new space collection.
1567 ASSERT((GetHeap()->MaxSemiSpaceSize() /
1568 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1569 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1570 ASSERT(count < MementoFoundCountBits::kMax);
1571 set_pretenure_data(
1572 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1573 SKIP_WRITE_BARRIER);
1574 }
1575
IncrementMementoFoundCount()1576 inline bool AllocationSite::IncrementMementoFoundCount() {
1577 if (IsZombie()) return false;
1578
1579 int value = memento_found_count();
1580 set_memento_found_count(value + 1);
1581 return memento_found_count() == kPretenureMinimumCreated;
1582 }
1583
1584
IncrementMementoCreateCount()1585 inline void AllocationSite::IncrementMementoCreateCount() {
1586 ASSERT(FLAG_allocation_site_pretenuring);
1587 int value = memento_create_count();
1588 set_memento_create_count(value + 1);
1589 }
1590
1591
MakePretenureDecision(PretenureDecision current_decision,double ratio,bool maximum_size_scavenge)1592 inline bool AllocationSite::MakePretenureDecision(
1593 PretenureDecision current_decision,
1594 double ratio,
1595 bool maximum_size_scavenge) {
1596 // Here we just allow state transitions from undecided or maybe tenure
1597 // to don't tenure, maybe tenure, or tenure.
1598 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1599 if (ratio >= kPretenureRatio) {
1600 // We just transition into tenure state when the semi-space was at
1601 // maximum capacity.
1602 if (maximum_size_scavenge) {
1603 set_deopt_dependent_code(true);
1604 set_pretenure_decision(kTenure);
1605 // Currently we just need to deopt when we make a state transition to
1606 // tenure.
1607 return true;
1608 }
1609 set_pretenure_decision(kMaybeTenure);
1610 } else {
1611 set_pretenure_decision(kDontTenure);
1612 }
1613 }
1614 return false;
1615 }
1616
1617
DigestPretenuringFeedback(bool maximum_size_scavenge)1618 inline bool AllocationSite::DigestPretenuringFeedback(
1619 bool maximum_size_scavenge) {
1620 bool deopt = false;
1621 int create_count = memento_create_count();
1622 int found_count = memento_found_count();
1623 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1624 double ratio =
1625 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1626 static_cast<double>(found_count) / create_count : 0.0;
1627 PretenureDecision current_decision = pretenure_decision();
1628
1629 if (minimum_mementos_created) {
1630 deopt = MakePretenureDecision(
1631 current_decision, ratio, maximum_size_scavenge);
1632 }
1633
1634 if (FLAG_trace_pretenuring_statistics) {
1635 PrintF(
1636 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1637 static_cast<void*>(this), create_count, found_count, ratio,
1638 PretenureDecisionName(current_decision),
1639 PretenureDecisionName(pretenure_decision()));
1640 }
1641
1642 // Clear feedback calculation fields until the next gc.
1643 set_memento_found_count(0);
1644 set_memento_create_count(0);
1645 return deopt;
1646 }
1647
1648
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1649 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1650 JSObject::ValidateElements(object);
1651 ElementsKind elements_kind = object->map()->elements_kind();
1652 if (!IsFastObjectElementsKind(elements_kind)) {
1653 if (IsFastHoleyElementsKind(elements_kind)) {
1654 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1655 } else {
1656 TransitionElementsKind(object, FAST_ELEMENTS);
1657 }
1658 }
1659 }
1660
1661
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1662 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1663 Object** objects,
1664 uint32_t count,
1665 EnsureElementsMode mode) {
1666 ElementsKind current_kind = object->map()->elements_kind();
1667 ElementsKind target_kind = current_kind;
1668 {
1669 DisallowHeapAllocation no_allocation;
1670 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1671 bool is_holey = IsFastHoleyElementsKind(current_kind);
1672 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1673 Heap* heap = object->GetHeap();
1674 Object* the_hole = heap->the_hole_value();
1675 for (uint32_t i = 0; i < count; ++i) {
1676 Object* current = *objects++;
1677 if (current == the_hole) {
1678 is_holey = true;
1679 target_kind = GetHoleyElementsKind(target_kind);
1680 } else if (!current->IsSmi()) {
1681 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1682 if (IsFastSmiElementsKind(target_kind)) {
1683 if (is_holey) {
1684 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1685 } else {
1686 target_kind = FAST_DOUBLE_ELEMENTS;
1687 }
1688 }
1689 } else if (is_holey) {
1690 target_kind = FAST_HOLEY_ELEMENTS;
1691 break;
1692 } else {
1693 target_kind = FAST_ELEMENTS;
1694 }
1695 }
1696 }
1697 }
1698 if (target_kind != current_kind) {
1699 TransitionElementsKind(object, target_kind);
1700 }
1701 }
1702
1703
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1704 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1705 Handle<FixedArrayBase> elements,
1706 uint32_t length,
1707 EnsureElementsMode mode) {
1708 Heap* heap = object->GetHeap();
1709 if (elements->map() != heap->fixed_double_array_map()) {
1710 ASSERT(elements->map() == heap->fixed_array_map() ||
1711 elements->map() == heap->fixed_cow_array_map());
1712 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1713 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1714 }
1715 Object** objects =
1716 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1717 EnsureCanContainElements(object, objects, length, mode);
1718 return;
1719 }
1720
1721 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1722 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1723 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1724 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1725 Handle<FixedDoubleArray> double_array =
1726 Handle<FixedDoubleArray>::cast(elements);
1727 for (uint32_t i = 0; i < length; ++i) {
1728 if (double_array->is_the_hole(i)) {
1729 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1730 return;
1731 }
1732 }
1733 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1734 }
1735 }
1736
1737
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1738 void JSObject::SetMapAndElements(Handle<JSObject> object,
1739 Handle<Map> new_map,
1740 Handle<FixedArrayBase> value) {
1741 JSObject::MigrateToMap(object, new_map);
1742 ASSERT((object->map()->has_fast_smi_or_object_elements() ||
1743 (*value == object->GetHeap()->empty_fixed_array())) ==
1744 (value->map() == object->GetHeap()->fixed_array_map() ||
1745 value->map() == object->GetHeap()->fixed_cow_array_map()));
1746 ASSERT((*value == object->GetHeap()->empty_fixed_array()) ||
1747 (object->map()->has_fast_double_elements() ==
1748 value->IsFixedDoubleArray()));
1749 object->set_elements(*value);
1750 }
1751
1752
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1753 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1754 WRITE_FIELD(this, kElementsOffset, value);
1755 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1756 }
1757
1758
initialize_properties()1759 void JSObject::initialize_properties() {
1760 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1761 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1762 }
1763
1764
initialize_elements()1765 void JSObject::initialize_elements() {
1766 FixedArrayBase* elements = map()->GetInitialElements();
1767 WRITE_FIELD(this, kElementsOffset, elements);
1768 }
1769
1770
ExpectedTransitionKey(Handle<Map> map)1771 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1772 DisallowHeapAllocation no_gc;
1773 if (!map->HasTransitionArray()) return Handle<String>::null();
1774 TransitionArray* transitions = map->transitions();
1775 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1776 int transition = TransitionArray::kSimpleTransitionIndex;
1777 PropertyDetails details = transitions->GetTargetDetails(transition);
1778 Name* name = transitions->GetKey(transition);
1779 if (details.type() != FIELD) return Handle<String>::null();
1780 if (details.attributes() != NONE) return Handle<String>::null();
1781 if (!name->IsString()) return Handle<String>::null();
1782 return Handle<String>(String::cast(name));
1783 }
1784
1785
ExpectedTransitionTarget(Handle<Map> map)1786 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1787 ASSERT(!ExpectedTransitionKey(map).is_null());
1788 return Handle<Map>(map->transitions()->GetTarget(
1789 TransitionArray::kSimpleTransitionIndex));
1790 }
1791
1792
FindTransitionToField(Handle<Map> map,Handle<Name> key)1793 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1794 DisallowHeapAllocation no_allocation;
1795 if (!map->HasTransitionArray()) return Handle<Map>::null();
1796 TransitionArray* transitions = map->transitions();
1797 int transition = transitions->Search(*key);
1798 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1799 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1800 if (target_details.type() != FIELD) return Handle<Map>::null();
1801 if (target_details.attributes() != NONE) return Handle<Map>::null();
1802 return Handle<Map>(transitions->GetTarget(transition));
1803 }
1804
1805
ACCESSORS(Oddball,to_string,String,kToStringOffset)1806 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1807 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1808
1809
1810 byte Oddball::kind() {
1811 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1812 }
1813
1814
set_kind(byte value)1815 void Oddball::set_kind(byte value) {
1816 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1817 }
1818
1819
value()1820 Object* Cell::value() {
1821 return READ_FIELD(this, kValueOffset);
1822 }
1823
1824
set_value(Object * val,WriteBarrierMode ignored)1825 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1826 // The write barrier is not used for global property cells.
1827 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1828 WRITE_FIELD(this, kValueOffset, val);
1829 }
1830
ACCESSORS(PropertyCell,dependent_code,DependentCode,kDependentCodeOffset)1831 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1832
1833 Object* PropertyCell::type_raw() {
1834 return READ_FIELD(this, kTypeOffset);
1835 }
1836
1837
set_type_raw(Object * val,WriteBarrierMode ignored)1838 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1839 WRITE_FIELD(this, kTypeOffset, val);
1840 }
1841
1842
GetHeaderSize()1843 int JSObject::GetHeaderSize() {
1844 InstanceType type = map()->instance_type();
1845 // Check for the most common kind of JavaScript object before
1846 // falling into the generic switch. This speeds up the internal
1847 // field operations considerably on average.
1848 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1849 switch (type) {
1850 case JS_GENERATOR_OBJECT_TYPE:
1851 return JSGeneratorObject::kSize;
1852 case JS_MODULE_TYPE:
1853 return JSModule::kSize;
1854 case JS_GLOBAL_PROXY_TYPE:
1855 return JSGlobalProxy::kSize;
1856 case JS_GLOBAL_OBJECT_TYPE:
1857 return JSGlobalObject::kSize;
1858 case JS_BUILTINS_OBJECT_TYPE:
1859 return JSBuiltinsObject::kSize;
1860 case JS_FUNCTION_TYPE:
1861 return JSFunction::kSize;
1862 case JS_VALUE_TYPE:
1863 return JSValue::kSize;
1864 case JS_DATE_TYPE:
1865 return JSDate::kSize;
1866 case JS_ARRAY_TYPE:
1867 return JSArray::kSize;
1868 case JS_ARRAY_BUFFER_TYPE:
1869 return JSArrayBuffer::kSize;
1870 case JS_TYPED_ARRAY_TYPE:
1871 return JSTypedArray::kSize;
1872 case JS_DATA_VIEW_TYPE:
1873 return JSDataView::kSize;
1874 case JS_SET_TYPE:
1875 return JSSet::kSize;
1876 case JS_MAP_TYPE:
1877 return JSMap::kSize;
1878 case JS_SET_ITERATOR_TYPE:
1879 return JSSetIterator::kSize;
1880 case JS_MAP_ITERATOR_TYPE:
1881 return JSMapIterator::kSize;
1882 case JS_WEAK_MAP_TYPE:
1883 return JSWeakMap::kSize;
1884 case JS_WEAK_SET_TYPE:
1885 return JSWeakSet::kSize;
1886 case JS_REGEXP_TYPE:
1887 return JSRegExp::kSize;
1888 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1889 return JSObject::kHeaderSize;
1890 case JS_MESSAGE_OBJECT_TYPE:
1891 return JSMessageObject::kSize;
1892 default:
1893 // TODO(jkummerow): Re-enable this. Blink currently hits this
1894 // from its CustomElementConstructorBuilder.
1895 // UNREACHABLE();
1896 return 0;
1897 }
1898 }
1899
1900
GetInternalFieldCount()1901 int JSObject::GetInternalFieldCount() {
1902 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1903 // Make sure to adjust for the number of in-object properties. These
1904 // properties do contribute to the size, but are not internal fields.
1905 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1906 map()->inobject_properties();
1907 }
1908
1909
GetInternalFieldOffset(int index)1910 int JSObject::GetInternalFieldOffset(int index) {
1911 ASSERT(index < GetInternalFieldCount() && index >= 0);
1912 return GetHeaderSize() + (kPointerSize * index);
1913 }
1914
1915
GetInternalField(int index)1916 Object* JSObject::GetInternalField(int index) {
1917 ASSERT(index < GetInternalFieldCount() && index >= 0);
1918 // Internal objects do follow immediately after the header, whereas in-object
1919 // properties are at the end of the object. Therefore there is no need
1920 // to adjust the index here.
1921 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1922 }
1923
1924
SetInternalField(int index,Object * value)1925 void JSObject::SetInternalField(int index, Object* value) {
1926 ASSERT(index < GetInternalFieldCount() && index >= 0);
1927 // Internal objects do follow immediately after the header, whereas in-object
1928 // properties are at the end of the object. Therefore there is no need
1929 // to adjust the index here.
1930 int offset = GetHeaderSize() + (kPointerSize * index);
1931 WRITE_FIELD(this, offset, value);
1932 WRITE_BARRIER(GetHeap(), this, offset, value);
1933 }
1934
1935
SetInternalField(int index,Smi * value)1936 void JSObject::SetInternalField(int index, Smi* value) {
1937 ASSERT(index < GetInternalFieldCount() && index >= 0);
1938 // Internal objects do follow immediately after the header, whereas in-object
1939 // properties are at the end of the object. Therefore there is no need
1940 // to adjust the index here.
1941 int offset = GetHeaderSize() + (kPointerSize * index);
1942 WRITE_FIELD(this, offset, value);
1943 }
1944
1945
1946 // Access fast-case object properties at index. The use of these routines
1947 // is needed to correctly distinguish between properties stored in-object and
1948 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)1949 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
1950 if (index.is_inobject()) {
1951 return READ_FIELD(this, index.offset());
1952 } else {
1953 return properties()->get(index.outobject_array_index());
1954 }
1955 }
1956
1957
FastPropertyAtPut(FieldIndex index,Object * value)1958 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
1959 if (index.is_inobject()) {
1960 int offset = index.offset();
1961 WRITE_FIELD(this, offset, value);
1962 WRITE_BARRIER(GetHeap(), this, offset, value);
1963 } else {
1964 properties()->set(index.outobject_array_index(), value);
1965 }
1966 }
1967
1968
GetInObjectPropertyOffset(int index)1969 int JSObject::GetInObjectPropertyOffset(int index) {
1970 return map()->GetInObjectPropertyOffset(index);
1971 }
1972
1973
InObjectPropertyAt(int index)1974 Object* JSObject::InObjectPropertyAt(int index) {
1975 int offset = GetInObjectPropertyOffset(index);
1976 return READ_FIELD(this, offset);
1977 }
1978
1979
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)1980 Object* JSObject::InObjectPropertyAtPut(int index,
1981 Object* value,
1982 WriteBarrierMode mode) {
1983 // Adjust for the number of properties stored in the object.
1984 int offset = GetInObjectPropertyOffset(index);
1985 WRITE_FIELD(this, offset, value);
1986 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1987 return value;
1988 }
1989
1990
1991
InitializeBody(Map * map,Object * pre_allocated_value,Object * filler_value)1992 void JSObject::InitializeBody(Map* map,
1993 Object* pre_allocated_value,
1994 Object* filler_value) {
1995 ASSERT(!filler_value->IsHeapObject() ||
1996 !GetHeap()->InNewSpace(filler_value));
1997 ASSERT(!pre_allocated_value->IsHeapObject() ||
1998 !GetHeap()->InNewSpace(pre_allocated_value));
1999 int size = map->instance_size();
2000 int offset = kHeaderSize;
2001 if (filler_value != pre_allocated_value) {
2002 int pre_allocated = map->pre_allocated_property_fields();
2003 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2004 for (int i = 0; i < pre_allocated; i++) {
2005 WRITE_FIELD(this, offset, pre_allocated_value);
2006 offset += kPointerSize;
2007 }
2008 }
2009 while (offset < size) {
2010 WRITE_FIELD(this, offset, filler_value);
2011 offset += kPointerSize;
2012 }
2013 }
2014
2015
HasFastProperties()2016 bool JSObject::HasFastProperties() {
2017 ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2018 return !properties()->IsDictionary();
2019 }
2020
2021
TooManyFastProperties(StoreFromKeyed store_mode)2022 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
2023 // Allow extra fast properties if the object has more than
2024 // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2025 // very unlikely that the object is being used as a dictionary and there is a
2026 // good chance that allowing more map transitions will be worth it.
2027 Map* map = this->map();
2028 if (map->unused_property_fields() != 0) return false;
2029
2030 int inobject = map->inobject_properties();
2031
2032 int limit;
2033 if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2034 limit = Max(inobject, kMaxFastProperties);
2035 } else {
2036 limit = Max(inobject, kFastPropertiesSoftLimit);
2037 }
2038 return properties()->length() > limit;
2039 }
2040
2041
InitializeBody(int object_size)2042 void Struct::InitializeBody(int object_size) {
2043 Object* value = GetHeap()->undefined_value();
2044 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2045 WRITE_FIELD(this, offset, value);
2046 }
2047 }
2048
2049
ToArrayIndex(uint32_t * index)2050 bool Object::ToArrayIndex(uint32_t* index) {
2051 if (IsSmi()) {
2052 int value = Smi::cast(this)->value();
2053 if (value < 0) return false;
2054 *index = value;
2055 return true;
2056 }
2057 if (IsHeapNumber()) {
2058 double value = HeapNumber::cast(this)->value();
2059 uint32_t uint_value = static_cast<uint32_t>(value);
2060 if (value == static_cast<double>(uint_value)) {
2061 *index = uint_value;
2062 return true;
2063 }
2064 }
2065 return false;
2066 }
2067
2068
IsStringObjectWithCharacterAt(uint32_t index)2069 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2070 if (!this->IsJSValue()) return false;
2071
2072 JSValue* js_value = JSValue::cast(this);
2073 if (!js_value->value()->IsString()) return false;
2074
2075 String* str = String::cast(js_value->value());
2076 if (index >= static_cast<uint32_t>(str->length())) return false;
2077
2078 return true;
2079 }
2080
2081
VerifyApiCallResultType()2082 void Object::VerifyApiCallResultType() {
2083 #if ENABLE_EXTRA_CHECKS
2084 if (!(IsSmi() ||
2085 IsString() ||
2086 IsSymbol() ||
2087 IsSpecObject() ||
2088 IsHeapNumber() ||
2089 IsUndefined() ||
2090 IsTrue() ||
2091 IsFalse() ||
2092 IsNull())) {
2093 FATAL("API call returned invalid object");
2094 }
2095 #endif // ENABLE_EXTRA_CHECKS
2096 }
2097
2098
cast(Object * object)2099 FixedArrayBase* FixedArrayBase::cast(Object* object) {
2100 ASSERT(object->IsFixedArrayBase());
2101 return reinterpret_cast<FixedArrayBase*>(object);
2102 }
2103
2104
get(int index)2105 Object* FixedArray::get(int index) {
2106 SLOW_ASSERT(index >= 0 && index < this->length());
2107 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2108 }
2109
2110
get(Handle<FixedArray> array,int index)2111 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2112 return handle(array->get(index), array->GetIsolate());
2113 }
2114
2115
is_the_hole(int index)2116 bool FixedArray::is_the_hole(int index) {
2117 return get(index) == GetHeap()->the_hole_value();
2118 }
2119
2120
set(int index,Smi * value)2121 void FixedArray::set(int index, Smi* value) {
2122 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2123 ASSERT(index >= 0 && index < this->length());
2124 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2125 int offset = kHeaderSize + index * kPointerSize;
2126 WRITE_FIELD(this, offset, value);
2127 }
2128
2129
set(int index,Object * value)2130 void FixedArray::set(int index, Object* value) {
2131 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2132 ASSERT(index >= 0 && index < this->length());
2133 int offset = kHeaderSize + index * kPointerSize;
2134 WRITE_FIELD(this, offset, value);
2135 WRITE_BARRIER(GetHeap(), this, offset, value);
2136 }
2137
2138
is_the_hole_nan(double value)2139 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2140 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2141 }
2142
2143
hole_nan_as_double()2144 inline double FixedDoubleArray::hole_nan_as_double() {
2145 return BitCast<double, uint64_t>(kHoleNanInt64);
2146 }
2147
2148
canonical_not_the_hole_nan_as_double()2149 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2150 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2151 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2152 return OS::nan_value();
2153 }
2154
2155
get_scalar(int index)2156 double FixedDoubleArray::get_scalar(int index) {
2157 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2158 map() != GetHeap()->fixed_array_map());
2159 ASSERT(index >= 0 && index < this->length());
2160 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2161 ASSERT(!is_the_hole_nan(result));
2162 return result;
2163 }
2164
get_representation(int index)2165 int64_t FixedDoubleArray::get_representation(int index) {
2166 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2167 map() != GetHeap()->fixed_array_map());
2168 ASSERT(index >= 0 && index < this->length());
2169 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2170 }
2171
2172
get(Handle<FixedDoubleArray> array,int index)2173 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2174 int index) {
2175 if (array->is_the_hole(index)) {
2176 return array->GetIsolate()->factory()->the_hole_value();
2177 } else {
2178 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2179 }
2180 }
2181
2182
set(int index,double value)2183 void FixedDoubleArray::set(int index, double value) {
2184 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2185 map() != GetHeap()->fixed_array_map());
2186 int offset = kHeaderSize + index * kDoubleSize;
2187 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2188 WRITE_DOUBLE_FIELD(this, offset, value);
2189 }
2190
2191
set_the_hole(int index)2192 void FixedDoubleArray::set_the_hole(int index) {
2193 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2194 map() != GetHeap()->fixed_array_map());
2195 int offset = kHeaderSize + index * kDoubleSize;
2196 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2197 }
2198
2199
is_the_hole(int index)2200 bool FixedDoubleArray::is_the_hole(int index) {
2201 int offset = kHeaderSize + index * kDoubleSize;
2202 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2203 }
2204
2205
data_start()2206 double* FixedDoubleArray::data_start() {
2207 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2208 }
2209
2210
FillWithHoles(int from,int to)2211 void FixedDoubleArray::FillWithHoles(int from, int to) {
2212 for (int i = from; i < to; i++) {
2213 set_the_hole(i);
2214 }
2215 }
2216
2217
is_extended_layout()2218 bool ConstantPoolArray::is_extended_layout() {
2219 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2220 return IsExtendedField::decode(small_layout_1);
2221 }
2222
2223
final_section()2224 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2225 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2226 }
2227
2228
first_extended_section_index()2229 int ConstantPoolArray::first_extended_section_index() {
2230 ASSERT(is_extended_layout());
2231 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2232 return TotalCountField::decode(small_layout_2);
2233 }
2234
2235
get_extended_section_header_offset()2236 int ConstantPoolArray::get_extended_section_header_offset() {
2237 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2238 }
2239
2240
get_weak_object_state()2241 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2242 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2243 return WeakObjectStateField::decode(small_layout_2);
2244 }
2245
2246
set_weak_object_state(ConstantPoolArray::WeakObjectState state)2247 void ConstantPoolArray::set_weak_object_state(
2248 ConstantPoolArray::WeakObjectState state) {
2249 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2250 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2251 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2252 }
2253
2254
first_index(Type type,LayoutSection section)2255 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2256 int index = 0;
2257 if (section == EXTENDED_SECTION) {
2258 ASSERT(is_extended_layout());
2259 index += first_extended_section_index();
2260 }
2261
2262 for (Type type_iter = FIRST_TYPE; type_iter < type;
2263 type_iter = next_type(type_iter)) {
2264 index += number_of_entries(type_iter, section);
2265 }
2266
2267 return index;
2268 }
2269
2270
last_index(Type type,LayoutSection section)2271 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2272 return first_index(type, section) + number_of_entries(type, section) - 1;
2273 }
2274
2275
number_of_entries(Type type,LayoutSection section)2276 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2277 if (section == SMALL_SECTION) {
2278 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2279 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2280 switch (type) {
2281 case INT64:
2282 return Int64CountField::decode(small_layout_1);
2283 case CODE_PTR:
2284 return CodePtrCountField::decode(small_layout_1);
2285 case HEAP_PTR:
2286 return HeapPtrCountField::decode(small_layout_1);
2287 case INT32:
2288 return Int32CountField::decode(small_layout_2);
2289 default:
2290 UNREACHABLE();
2291 return 0;
2292 }
2293 } else {
2294 ASSERT(section == EXTENDED_SECTION && is_extended_layout());
2295 int offset = get_extended_section_header_offset();
2296 switch (type) {
2297 case INT64:
2298 offset += kExtendedInt64CountOffset;
2299 break;
2300 case CODE_PTR:
2301 offset += kExtendedCodePtrCountOffset;
2302 break;
2303 case HEAP_PTR:
2304 offset += kExtendedHeapPtrCountOffset;
2305 break;
2306 case INT32:
2307 offset += kExtendedInt32CountOffset;
2308 break;
2309 default:
2310 UNREACHABLE();
2311 }
2312 return READ_INT_FIELD(this, offset);
2313 }
2314 }
2315
2316
get_type(int index)2317 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2318 LayoutSection section;
2319 if (is_extended_layout() && index >= first_extended_section_index()) {
2320 section = EXTENDED_SECTION;
2321 } else {
2322 section = SMALL_SECTION;
2323 }
2324
2325 Type type = FIRST_TYPE;
2326 while (index > last_index(type, section)) {
2327 type = next_type(type);
2328 }
2329 ASSERT(type <= LAST_TYPE);
2330 return type;
2331 }
2332
2333
get_int64_entry(int index)2334 int64_t ConstantPoolArray::get_int64_entry(int index) {
2335 ASSERT(map() == GetHeap()->constant_pool_array_map());
2336 ASSERT(get_type(index) == INT64);
2337 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2338 }
2339
2340
get_int64_entry_as_double(int index)2341 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2342 STATIC_ASSERT(kDoubleSize == kInt64Size);
2343 ASSERT(map() == GetHeap()->constant_pool_array_map());
2344 ASSERT(get_type(index) == INT64);
2345 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2346 }
2347
2348
get_code_ptr_entry(int index)2349 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2350 ASSERT(map() == GetHeap()->constant_pool_array_map());
2351 ASSERT(get_type(index) == CODE_PTR);
2352 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2353 }
2354
2355
get_heap_ptr_entry(int index)2356 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2357 ASSERT(map() == GetHeap()->constant_pool_array_map());
2358 ASSERT(get_type(index) == HEAP_PTR);
2359 return READ_FIELD(this, OffsetOfElementAt(index));
2360 }
2361
2362
get_int32_entry(int index)2363 int32_t ConstantPoolArray::get_int32_entry(int index) {
2364 ASSERT(map() == GetHeap()->constant_pool_array_map());
2365 ASSERT(get_type(index) == INT32);
2366 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2367 }
2368
2369
set(int index,int64_t value)2370 void ConstantPoolArray::set(int index, int64_t value) {
2371 ASSERT(map() == GetHeap()->constant_pool_array_map());
2372 ASSERT(get_type(index) == INT64);
2373 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2374 }
2375
2376
set(int index,double value)2377 void ConstantPoolArray::set(int index, double value) {
2378 STATIC_ASSERT(kDoubleSize == kInt64Size);
2379 ASSERT(map() == GetHeap()->constant_pool_array_map());
2380 ASSERT(get_type(index) == INT64);
2381 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2382 }
2383
2384
set(int index,Address value)2385 void ConstantPoolArray::set(int index, Address value) {
2386 ASSERT(map() == GetHeap()->constant_pool_array_map());
2387 ASSERT(get_type(index) == CODE_PTR);
2388 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2389 }
2390
2391
set(int index,Object * value)2392 void ConstantPoolArray::set(int index, Object* value) {
2393 ASSERT(map() == GetHeap()->constant_pool_array_map());
2394 ASSERT(get_type(index) == HEAP_PTR);
2395 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2396 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2397 }
2398
2399
set(int index,int32_t value)2400 void ConstantPoolArray::set(int index, int32_t value) {
2401 ASSERT(map() == GetHeap()->constant_pool_array_map());
2402 ASSERT(get_type(index) == INT32);
2403 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2404 }
2405
2406
Init(const NumberOfEntries & small)2407 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2408 uint32_t small_layout_1 =
2409 Int64CountField::encode(small.count_of(INT64)) |
2410 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2411 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2412 IsExtendedField::encode(false);
2413 uint32_t small_layout_2 =
2414 Int32CountField::encode(small.count_of(INT32)) |
2415 TotalCountField::encode(small.total_count()) |
2416 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2417 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2418 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2419 if (kHeaderSize != kFirstEntryOffset) {
2420 ASSERT(kFirstEntryOffset - kHeaderSize == kInt32Size);
2421 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2422 }
2423 }
2424
2425
InitExtended(const NumberOfEntries & small,const NumberOfEntries & extended)2426 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2427 const NumberOfEntries& extended) {
2428 // Initialize small layout fields first.
2429 Init(small);
2430
2431 // Set is_extended_layout field.
2432 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2433 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2434 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2435
2436 // Initialize the extended layout fields.
2437 int extended_header_offset = get_extended_section_header_offset();
2438 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2439 extended.count_of(INT64));
2440 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2441 extended.count_of(CODE_PTR));
2442 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2443 extended.count_of(HEAP_PTR));
2444 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2445 extended.count_of(INT32));
2446 }
2447
2448
size()2449 int ConstantPoolArray::size() {
2450 NumberOfEntries small(this, SMALL_SECTION);
2451 if (!is_extended_layout()) {
2452 return SizeFor(small);
2453 } else {
2454 NumberOfEntries extended(this, EXTENDED_SECTION);
2455 return SizeForExtended(small, extended);
2456 }
2457 }
2458
2459
length()2460 int ConstantPoolArray::length() {
2461 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2462 int length = TotalCountField::decode(small_layout_2);
2463 if (is_extended_layout()) {
2464 length += number_of_entries(INT64, EXTENDED_SECTION) +
2465 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2466 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2467 number_of_entries(INT32, EXTENDED_SECTION);
2468 }
2469 return length;
2470 }
2471
2472
next_index()2473 int ConstantPoolArray::Iterator::next_index() {
2474 ASSERT(!is_finished());
2475 int ret = next_index_++;
2476 update_section();
2477 return ret;
2478 }
2479
2480
is_finished()2481 bool ConstantPoolArray::Iterator::is_finished() {
2482 return next_index_ > array_->last_index(type_, final_section_);
2483 }
2484
2485
update_section()2486 void ConstantPoolArray::Iterator::update_section() {
2487 if (next_index_ > array_->last_index(type_, current_section_) &&
2488 current_section_ != final_section_) {
2489 ASSERT(final_section_ == EXTENDED_SECTION);
2490 current_section_ = EXTENDED_SECTION;
2491 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2492 }
2493 }
2494
2495
GetWriteBarrierMode(const DisallowHeapAllocation & promise)2496 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2497 const DisallowHeapAllocation& promise) {
2498 Heap* heap = GetHeap();
2499 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2500 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2501 return UPDATE_WRITE_BARRIER;
2502 }
2503
2504
set(int index,Object * value,WriteBarrierMode mode)2505 void FixedArray::set(int index,
2506 Object* value,
2507 WriteBarrierMode mode) {
2508 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2509 ASSERT(index >= 0 && index < this->length());
2510 int offset = kHeaderSize + index * kPointerSize;
2511 WRITE_FIELD(this, offset, value);
2512 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2513 }
2514
2515
NoIncrementalWriteBarrierSet(FixedArray * array,int index,Object * value)2516 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2517 int index,
2518 Object* value) {
2519 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2520 ASSERT(index >= 0 && index < array->length());
2521 int offset = kHeaderSize + index * kPointerSize;
2522 WRITE_FIELD(array, offset, value);
2523 Heap* heap = array->GetHeap();
2524 if (heap->InNewSpace(value)) {
2525 heap->RecordWrite(array->address(), offset);
2526 }
2527 }
2528
2529
NoWriteBarrierSet(FixedArray * array,int index,Object * value)2530 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2531 int index,
2532 Object* value) {
2533 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2534 ASSERT(index >= 0 && index < array->length());
2535 ASSERT(!array->GetHeap()->InNewSpace(value));
2536 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2537 }
2538
2539
set_undefined(int index)2540 void FixedArray::set_undefined(int index) {
2541 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2542 ASSERT(index >= 0 && index < this->length());
2543 ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2544 WRITE_FIELD(this,
2545 kHeaderSize + index * kPointerSize,
2546 GetHeap()->undefined_value());
2547 }
2548
2549
set_null(int index)2550 void FixedArray::set_null(int index) {
2551 ASSERT(index >= 0 && index < this->length());
2552 ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2553 WRITE_FIELD(this,
2554 kHeaderSize + index * kPointerSize,
2555 GetHeap()->null_value());
2556 }
2557
2558
set_the_hole(int index)2559 void FixedArray::set_the_hole(int index) {
2560 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2561 ASSERT(index >= 0 && index < this->length());
2562 ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2563 WRITE_FIELD(this,
2564 kHeaderSize + index * kPointerSize,
2565 GetHeap()->the_hole_value());
2566 }
2567
2568
FillWithHoles(int from,int to)2569 void FixedArray::FillWithHoles(int from, int to) {
2570 for (int i = from; i < to; i++) {
2571 set_the_hole(i);
2572 }
2573 }
2574
2575
data_start()2576 Object** FixedArray::data_start() {
2577 return HeapObject::RawField(this, kHeaderSize);
2578 }
2579
2580
IsEmpty()2581 bool DescriptorArray::IsEmpty() {
2582 ASSERT(length() >= kFirstIndex ||
2583 this == GetHeap()->empty_descriptor_array());
2584 return length() < kFirstIndex;
2585 }
2586
2587
SetNumberOfDescriptors(int number_of_descriptors)2588 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2589 WRITE_FIELD(
2590 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2591 }
2592
2593
2594 // Perform a binary search in a fixed array. Low and high are entry indices. If
2595 // there are three entries in this array it should be called with low=0 and
2596 // high=2.
2597 template<SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int low,int high,int valid_entries)2598 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2599 uint32_t hash = name->Hash();
2600 int limit = high;
2601
2602 ASSERT(low <= high);
2603
2604 while (low != high) {
2605 int mid = (low + high) / 2;
2606 Name* mid_name = array->GetSortedKey(mid);
2607 uint32_t mid_hash = mid_name->Hash();
2608
2609 if (mid_hash >= hash) {
2610 high = mid;
2611 } else {
2612 low = mid + 1;
2613 }
2614 }
2615
2616 for (; low <= limit; ++low) {
2617 int sort_index = array->GetSortedKeyIndex(low);
2618 Name* entry = array->GetKey(sort_index);
2619 if (entry->Hash() != hash) break;
2620 if (entry->Equals(name)) {
2621 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2622 return sort_index;
2623 }
2624 return T::kNotFound;
2625 }
2626 }
2627
2628 return T::kNotFound;
2629 }
2630
2631
2632 // Perform a linear search in this fixed array. len is the number of entry
2633 // indices that are valid.
2634 template<SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int len,int valid_entries)2635 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2636 uint32_t hash = name->Hash();
2637 if (search_mode == ALL_ENTRIES) {
2638 for (int number = 0; number < len; number++) {
2639 int sorted_index = array->GetSortedKeyIndex(number);
2640 Name* entry = array->GetKey(sorted_index);
2641 uint32_t current_hash = entry->Hash();
2642 if (current_hash > hash) break;
2643 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2644 }
2645 } else {
2646 ASSERT(len >= valid_entries);
2647 for (int number = 0; number < valid_entries; number++) {
2648 Name* entry = array->GetKey(number);
2649 uint32_t current_hash = entry->Hash();
2650 if (current_hash == hash && entry->Equals(name)) return number;
2651 }
2652 }
2653 return T::kNotFound;
2654 }
2655
2656
2657 template<SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries)2658 int Search(T* array, Name* name, int valid_entries) {
2659 if (search_mode == VALID_ENTRIES) {
2660 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2661 } else {
2662 SLOW_ASSERT(array->IsSortedNoDuplicates());
2663 }
2664
2665 int nof = array->number_of_entries();
2666 if (nof == 0) return T::kNotFound;
2667
2668 // Fast case: do linear search for small arrays.
2669 const int kMaxElementsForLinearSearch = 8;
2670 if ((search_mode == ALL_ENTRIES &&
2671 nof <= kMaxElementsForLinearSearch) ||
2672 (search_mode == VALID_ENTRIES &&
2673 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2674 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2675 }
2676
2677 // Slow case: perform binary search.
2678 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2679 }
2680
2681
Search(Name * name,int valid_descriptors)2682 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2683 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2684 }
2685
2686
SearchWithCache(Name * name,Map * map)2687 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2688 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2689 if (number_of_own_descriptors == 0) return kNotFound;
2690
2691 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2692 int number = cache->Lookup(map, name);
2693
2694 if (number == DescriptorLookupCache::kAbsent) {
2695 number = Search(name, number_of_own_descriptors);
2696 cache->Update(map, name, number);
2697 }
2698
2699 return number;
2700 }
2701
2702
GetLastDescriptorDetails()2703 PropertyDetails Map::GetLastDescriptorDetails() {
2704 return instance_descriptors()->GetDetails(LastAdded());
2705 }
2706
2707
LookupDescriptor(JSObject * holder,Name * name,LookupResult * result)2708 void Map::LookupDescriptor(JSObject* holder,
2709 Name* name,
2710 LookupResult* result) {
2711 DescriptorArray* descriptors = this->instance_descriptors();
2712 int number = descriptors->SearchWithCache(name, this);
2713 if (number == DescriptorArray::kNotFound) return result->NotFound();
2714 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2715 }
2716
2717
LookupTransition(JSObject * holder,Name * name,LookupResult * result)2718 void Map::LookupTransition(JSObject* holder,
2719 Name* name,
2720 LookupResult* result) {
2721 int transition_index = this->SearchTransition(name);
2722 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2723 result->TransitionResult(holder, this->GetTransition(transition_index));
2724 }
2725
2726
GetInitialElements()2727 FixedArrayBase* Map::GetInitialElements() {
2728 if (has_fast_smi_or_object_elements() ||
2729 has_fast_double_elements()) {
2730 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2731 return GetHeap()->empty_fixed_array();
2732 } else if (has_external_array_elements()) {
2733 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2734 ASSERT(!GetHeap()->InNewSpace(empty_array));
2735 return empty_array;
2736 } else if (has_fixed_typed_array_elements()) {
2737 FixedTypedArrayBase* empty_array =
2738 GetHeap()->EmptyFixedTypedArrayForMap(this);
2739 ASSERT(!GetHeap()->InNewSpace(empty_array));
2740 return empty_array;
2741 } else if (has_dictionary_elements()) {
2742 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_slow_element_dictionary()));
2743 return GetHeap()->empty_slow_element_dictionary();
2744 } else {
2745 UNREACHABLE();
2746 }
2747 return NULL;
2748 }
2749
2750
GetKeySlot(int descriptor_number)2751 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2752 ASSERT(descriptor_number < number_of_descriptors());
2753 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2754 }
2755
2756
GetDescriptorStartSlot(int descriptor_number)2757 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2758 return GetKeySlot(descriptor_number);
2759 }
2760
2761
GetDescriptorEndSlot(int descriptor_number)2762 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2763 return GetValueSlot(descriptor_number - 1) + 1;
2764 }
2765
2766
GetKey(int descriptor_number)2767 Name* DescriptorArray::GetKey(int descriptor_number) {
2768 ASSERT(descriptor_number < number_of_descriptors());
2769 return Name::cast(get(ToKeyIndex(descriptor_number)));
2770 }
2771
2772
GetSortedKeyIndex(int descriptor_number)2773 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2774 return GetDetails(descriptor_number).pointer();
2775 }
2776
2777
GetSortedKey(int descriptor_number)2778 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2779 return GetKey(GetSortedKeyIndex(descriptor_number));
2780 }
2781
2782
SetSortedKey(int descriptor_index,int pointer)2783 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2784 PropertyDetails details = GetDetails(descriptor_index);
2785 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2786 }
2787
2788
SetRepresentation(int descriptor_index,Representation representation)2789 void DescriptorArray::SetRepresentation(int descriptor_index,
2790 Representation representation) {
2791 ASSERT(!representation.IsNone());
2792 PropertyDetails details = GetDetails(descriptor_index);
2793 set(ToDetailsIndex(descriptor_index),
2794 details.CopyWithRepresentation(representation).AsSmi());
2795 }
2796
2797
GetValueSlot(int descriptor_number)2798 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2799 ASSERT(descriptor_number < number_of_descriptors());
2800 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2801 }
2802
2803
GetValue(int descriptor_number)2804 Object* DescriptorArray::GetValue(int descriptor_number) {
2805 ASSERT(descriptor_number < number_of_descriptors());
2806 return get(ToValueIndex(descriptor_number));
2807 }
2808
2809
SetValue(int descriptor_index,Object * value)2810 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2811 set(ToValueIndex(descriptor_index), value);
2812 }
2813
2814
GetDetails(int descriptor_number)2815 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2816 ASSERT(descriptor_number < number_of_descriptors());
2817 Object* details = get(ToDetailsIndex(descriptor_number));
2818 return PropertyDetails(Smi::cast(details));
2819 }
2820
2821
GetType(int descriptor_number)2822 PropertyType DescriptorArray::GetType(int descriptor_number) {
2823 return GetDetails(descriptor_number).type();
2824 }
2825
2826
GetFieldIndex(int descriptor_number)2827 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2828 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2829 return GetDetails(descriptor_number).field_index();
2830 }
2831
2832
GetFieldType(int descriptor_number)2833 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2834 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2835 return HeapType::cast(GetValue(descriptor_number));
2836 }
2837
2838
GetConstant(int descriptor_number)2839 Object* DescriptorArray::GetConstant(int descriptor_number) {
2840 return GetValue(descriptor_number);
2841 }
2842
2843
GetCallbacksObject(int descriptor_number)2844 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2845 ASSERT(GetType(descriptor_number) == CALLBACKS);
2846 return GetValue(descriptor_number);
2847 }
2848
2849
GetCallbacks(int descriptor_number)2850 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2851 ASSERT(GetType(descriptor_number) == CALLBACKS);
2852 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2853 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2854 }
2855
2856
Get(int descriptor_number,Descriptor * desc)2857 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2858 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2859 handle(GetValue(descriptor_number), GetIsolate()),
2860 GetDetails(descriptor_number));
2861 }
2862
2863
Set(int descriptor_number,Descriptor * desc,const WhitenessWitness &)2864 void DescriptorArray::Set(int descriptor_number,
2865 Descriptor* desc,
2866 const WhitenessWitness&) {
2867 // Range check.
2868 ASSERT(descriptor_number < number_of_descriptors());
2869
2870 NoIncrementalWriteBarrierSet(this,
2871 ToKeyIndex(descriptor_number),
2872 *desc->GetKey());
2873 NoIncrementalWriteBarrierSet(this,
2874 ToValueIndex(descriptor_number),
2875 *desc->GetValue());
2876 NoIncrementalWriteBarrierSet(this,
2877 ToDetailsIndex(descriptor_number),
2878 desc->GetDetails().AsSmi());
2879 }
2880
2881
Set(int descriptor_number,Descriptor * desc)2882 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2883 // Range check.
2884 ASSERT(descriptor_number < number_of_descriptors());
2885
2886 set(ToKeyIndex(descriptor_number), *desc->GetKey());
2887 set(ToValueIndex(descriptor_number), *desc->GetValue());
2888 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2889 }
2890
2891
Append(Descriptor * desc,const WhitenessWitness & witness)2892 void DescriptorArray::Append(Descriptor* desc,
2893 const WhitenessWitness& witness) {
2894 DisallowHeapAllocation no_gc;
2895 int descriptor_number = number_of_descriptors();
2896 SetNumberOfDescriptors(descriptor_number + 1);
2897 Set(descriptor_number, desc, witness);
2898
2899 uint32_t hash = desc->GetKey()->Hash();
2900
2901 int insertion;
2902
2903 for (insertion = descriptor_number; insertion > 0; --insertion) {
2904 Name* key = GetSortedKey(insertion - 1);
2905 if (key->Hash() <= hash) break;
2906 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2907 }
2908
2909 SetSortedKey(insertion, descriptor_number);
2910 }
2911
2912
Append(Descriptor * desc)2913 void DescriptorArray::Append(Descriptor* desc) {
2914 DisallowHeapAllocation no_gc;
2915 int descriptor_number = number_of_descriptors();
2916 SetNumberOfDescriptors(descriptor_number + 1);
2917 Set(descriptor_number, desc);
2918
2919 uint32_t hash = desc->GetKey()->Hash();
2920
2921 int insertion;
2922
2923 for (insertion = descriptor_number; insertion > 0; --insertion) {
2924 Name* key = GetSortedKey(insertion - 1);
2925 if (key->Hash() <= hash) break;
2926 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2927 }
2928
2929 SetSortedKey(insertion, descriptor_number);
2930 }
2931
2932
SwapSortedKeys(int first,int second)2933 void DescriptorArray::SwapSortedKeys(int first, int second) {
2934 int first_key = GetSortedKeyIndex(first);
2935 SetSortedKey(first, GetSortedKeyIndex(second));
2936 SetSortedKey(second, first_key);
2937 }
2938
2939
WhitenessWitness(DescriptorArray * array)2940 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2941 : marking_(array->GetHeap()->incremental_marking()) {
2942 marking_->EnterNoMarkingScope();
2943 ASSERT(!marking_->IsMarking() ||
2944 Marking::Color(array) == Marking::WHITE_OBJECT);
2945 }
2946
2947
~WhitenessWitness()2948 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2949 marking_->LeaveNoMarkingScope();
2950 }
2951
2952
2953 template<typename Derived, typename Shape, typename Key>
ComputeCapacity(int at_least_space_for)2954 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
2955 const int kMinCapacity = 32;
2956 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2957 if (capacity < kMinCapacity) {
2958 capacity = kMinCapacity; // Guarantee min capacity.
2959 }
2960 return capacity;
2961 }
2962
2963
2964 template<typename Derived, typename Shape, typename Key>
FindEntry(Key key)2965 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
2966 return FindEntry(GetIsolate(), key);
2967 }
2968
2969
2970 // Find entry for key otherwise return kNotFound.
2971 template<typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)2972 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2973 uint32_t capacity = Capacity();
2974 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
2975 uint32_t count = 1;
2976 // EnsureCapacity will guarantee the hash table is never full.
2977 while (true) {
2978 Object* element = KeyAt(entry);
2979 // Empty entry. Uses raw unchecked accessors because it is called by the
2980 // string table during bootstrapping.
2981 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2982 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2983 Shape::IsMatch(key, element)) return entry;
2984 entry = NextProbe(entry, count++, capacity);
2985 }
2986 return kNotFound;
2987 }
2988
2989
requires_slow_elements()2990 bool SeededNumberDictionary::requires_slow_elements() {
2991 Object* max_index_object = get(kMaxNumberKeyIndex);
2992 if (!max_index_object->IsSmi()) return false;
2993 return 0 !=
2994 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2995 }
2996
max_number_key()2997 uint32_t SeededNumberDictionary::max_number_key() {
2998 ASSERT(!requires_slow_elements());
2999 Object* max_index_object = get(kMaxNumberKeyIndex);
3000 if (!max_index_object->IsSmi()) return 0;
3001 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3002 return value >> kRequiresSlowElementsTagSize;
3003 }
3004
set_requires_slow_elements()3005 void SeededNumberDictionary::set_requires_slow_elements() {
3006 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3007 }
3008
3009
3010 // ------------------------------------
3011 // Cast operations
3012
3013
3014 CAST_ACCESSOR(FixedArray)
CAST_ACCESSOR(FixedDoubleArray)3015 CAST_ACCESSOR(FixedDoubleArray)
3016 CAST_ACCESSOR(FixedTypedArrayBase)
3017 CAST_ACCESSOR(ConstantPoolArray)
3018 CAST_ACCESSOR(DescriptorArray)
3019 CAST_ACCESSOR(DeoptimizationInputData)
3020 CAST_ACCESSOR(DeoptimizationOutputData)
3021 CAST_ACCESSOR(DependentCode)
3022 CAST_ACCESSOR(StringTable)
3023 CAST_ACCESSOR(JSFunctionResultCache)
3024 CAST_ACCESSOR(NormalizedMapCache)
3025 CAST_ACCESSOR(ScopeInfo)
3026 CAST_ACCESSOR(CompilationCacheTable)
3027 CAST_ACCESSOR(CodeCacheHashTable)
3028 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3029 CAST_ACCESSOR(MapCache)
3030 CAST_ACCESSOR(String)
3031 CAST_ACCESSOR(SeqString)
3032 CAST_ACCESSOR(SeqOneByteString)
3033 CAST_ACCESSOR(SeqTwoByteString)
3034 CAST_ACCESSOR(SlicedString)
3035 CAST_ACCESSOR(ConsString)
3036 CAST_ACCESSOR(ExternalString)
3037 CAST_ACCESSOR(ExternalAsciiString)
3038 CAST_ACCESSOR(ExternalTwoByteString)
3039 CAST_ACCESSOR(Symbol)
3040 CAST_ACCESSOR(Name)
3041 CAST_ACCESSOR(JSReceiver)
3042 CAST_ACCESSOR(JSObject)
3043 CAST_ACCESSOR(Smi)
3044 CAST_ACCESSOR(HeapObject)
3045 CAST_ACCESSOR(HeapNumber)
3046 CAST_ACCESSOR(Oddball)
3047 CAST_ACCESSOR(Cell)
3048 CAST_ACCESSOR(PropertyCell)
3049 CAST_ACCESSOR(SharedFunctionInfo)
3050 CAST_ACCESSOR(Map)
3051 CAST_ACCESSOR(JSFunction)
3052 CAST_ACCESSOR(GlobalObject)
3053 CAST_ACCESSOR(JSGlobalProxy)
3054 CAST_ACCESSOR(JSGlobalObject)
3055 CAST_ACCESSOR(JSBuiltinsObject)
3056 CAST_ACCESSOR(Code)
3057 CAST_ACCESSOR(JSArray)
3058 CAST_ACCESSOR(JSArrayBuffer)
3059 CAST_ACCESSOR(JSArrayBufferView)
3060 CAST_ACCESSOR(JSTypedArray)
3061 CAST_ACCESSOR(JSDataView)
3062 CAST_ACCESSOR(JSRegExp)
3063 CAST_ACCESSOR(JSProxy)
3064 CAST_ACCESSOR(JSFunctionProxy)
3065 CAST_ACCESSOR(JSSet)
3066 CAST_ACCESSOR(JSMap)
3067 CAST_ACCESSOR(JSSetIterator)
3068 CAST_ACCESSOR(JSMapIterator)
3069 CAST_ACCESSOR(JSWeakMap)
3070 CAST_ACCESSOR(JSWeakSet)
3071 CAST_ACCESSOR(Foreign)
3072 CAST_ACCESSOR(ByteArray)
3073 CAST_ACCESSOR(FreeSpace)
3074 CAST_ACCESSOR(ExternalArray)
3075 CAST_ACCESSOR(ExternalInt8Array)
3076 CAST_ACCESSOR(ExternalUint8Array)
3077 CAST_ACCESSOR(ExternalInt16Array)
3078 CAST_ACCESSOR(ExternalUint16Array)
3079 CAST_ACCESSOR(ExternalInt32Array)
3080 CAST_ACCESSOR(ExternalUint32Array)
3081 CAST_ACCESSOR(ExternalFloat32Array)
3082 CAST_ACCESSOR(ExternalFloat64Array)
3083 CAST_ACCESSOR(ExternalUint8ClampedArray)
3084 CAST_ACCESSOR(Struct)
3085 CAST_ACCESSOR(AccessorInfo)
3086
3087 template <class Traits>
3088 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3089 SLOW_ASSERT(object->IsHeapObject() &&
3090 HeapObject::cast(object)->map()->instance_type() ==
3091 Traits::kInstanceType);
3092 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3093 }
3094
3095
3096 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)3097 STRUCT_LIST(MAKE_STRUCT_CAST)
3098 #undef MAKE_STRUCT_CAST
3099
3100
3101 template <typename Derived, typename Shape, typename Key>
3102 HashTable<Derived, Shape, Key>*
3103 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3104 ASSERT(obj->IsHashTable());
3105 return reinterpret_cast<HashTable*>(obj);
3106 }
3107
3108
SMI_ACCESSORS(FixedArrayBase,length,kLengthOffset)3109 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3110 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3111
3112 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3113 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3114
3115 SMI_ACCESSORS(String, length, kLengthOffset)
3116 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3117
3118
3119 uint32_t Name::hash_field() {
3120 return READ_UINT32_FIELD(this, kHashFieldOffset);
3121 }
3122
3123
set_hash_field(uint32_t value)3124 void Name::set_hash_field(uint32_t value) {
3125 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3126 #if V8_HOST_ARCH_64_BIT
3127 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3128 #endif
3129 }
3130
3131
Equals(Name * other)3132 bool Name::Equals(Name* other) {
3133 if (other == this) return true;
3134 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3135 this->IsSymbol() || other->IsSymbol()) {
3136 return false;
3137 }
3138 return String::cast(this)->SlowEquals(String::cast(other));
3139 }
3140
3141
Equals(Handle<Name> one,Handle<Name> two)3142 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3143 if (one.is_identical_to(two)) return true;
3144 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3145 one->IsSymbol() || two->IsSymbol()) {
3146 return false;
3147 }
3148 return String::SlowEquals(Handle<String>::cast(one),
3149 Handle<String>::cast(two));
3150 }
3151
3152
ACCESSORS(Symbol,name,Object,kNameOffset)3153 ACCESSORS(Symbol, name, Object, kNameOffset)
3154 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3155 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3156
3157
3158 bool String::Equals(String* other) {
3159 if (other == this) return true;
3160 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3161 return false;
3162 }
3163 return SlowEquals(other);
3164 }
3165
3166
Equals(Handle<String> one,Handle<String> two)3167 bool String::Equals(Handle<String> one, Handle<String> two) {
3168 if (one.is_identical_to(two)) return true;
3169 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3170 return false;
3171 }
3172 return SlowEquals(one, two);
3173 }
3174
3175
Flatten(Handle<String> string,PretenureFlag pretenure)3176 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3177 if (!string->IsConsString()) return string;
3178 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3179 if (cons->IsFlat()) return handle(cons->first());
3180 return SlowFlatten(cons, pretenure);
3181 }
3182
3183
Get(int index)3184 uint16_t String::Get(int index) {
3185 ASSERT(index >= 0 && index < length());
3186 switch (StringShape(this).full_representation_tag()) {
3187 case kSeqStringTag | kOneByteStringTag:
3188 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3189 case kSeqStringTag | kTwoByteStringTag:
3190 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3191 case kConsStringTag | kOneByteStringTag:
3192 case kConsStringTag | kTwoByteStringTag:
3193 return ConsString::cast(this)->ConsStringGet(index);
3194 case kExternalStringTag | kOneByteStringTag:
3195 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3196 case kExternalStringTag | kTwoByteStringTag:
3197 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3198 case kSlicedStringTag | kOneByteStringTag:
3199 case kSlicedStringTag | kTwoByteStringTag:
3200 return SlicedString::cast(this)->SlicedStringGet(index);
3201 default:
3202 break;
3203 }
3204
3205 UNREACHABLE();
3206 return 0;
3207 }
3208
3209
Set(int index,uint16_t value)3210 void String::Set(int index, uint16_t value) {
3211 ASSERT(index >= 0 && index < length());
3212 ASSERT(StringShape(this).IsSequential());
3213
3214 return this->IsOneByteRepresentation()
3215 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3216 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3217 }
3218
3219
IsFlat()3220 bool String::IsFlat() {
3221 if (!StringShape(this).IsCons()) return true;
3222 return ConsString::cast(this)->second()->length() == 0;
3223 }
3224
3225
GetUnderlying()3226 String* String::GetUnderlying() {
3227 // Giving direct access to underlying string only makes sense if the
3228 // wrapping string is already flattened.
3229 ASSERT(this->IsFlat());
3230 ASSERT(StringShape(this).IsIndirect());
3231 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3232 const int kUnderlyingOffset = SlicedString::kParentOffset;
3233 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3234 }
3235
3236
3237 template<class Visitor>
VisitFlat(Visitor * visitor,String * string,const int offset)3238 ConsString* String::VisitFlat(Visitor* visitor,
3239 String* string,
3240 const int offset) {
3241 int slice_offset = offset;
3242 const int length = string->length();
3243 ASSERT(offset <= length);
3244 while (true) {
3245 int32_t type = string->map()->instance_type();
3246 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3247 case kSeqStringTag | kOneByteStringTag:
3248 visitor->VisitOneByteString(
3249 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3250 length - offset);
3251 return NULL;
3252
3253 case kSeqStringTag | kTwoByteStringTag:
3254 visitor->VisitTwoByteString(
3255 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3256 length - offset);
3257 return NULL;
3258
3259 case kExternalStringTag | kOneByteStringTag:
3260 visitor->VisitOneByteString(
3261 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3262 length - offset);
3263 return NULL;
3264
3265 case kExternalStringTag | kTwoByteStringTag:
3266 visitor->VisitTwoByteString(
3267 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3268 length - offset);
3269 return NULL;
3270
3271 case kSlicedStringTag | kOneByteStringTag:
3272 case kSlicedStringTag | kTwoByteStringTag: {
3273 SlicedString* slicedString = SlicedString::cast(string);
3274 slice_offset += slicedString->offset();
3275 string = slicedString->parent();
3276 continue;
3277 }
3278
3279 case kConsStringTag | kOneByteStringTag:
3280 case kConsStringTag | kTwoByteStringTag:
3281 return ConsString::cast(string);
3282
3283 default:
3284 UNREACHABLE();
3285 return NULL;
3286 }
3287 }
3288 }
3289
3290
SeqOneByteStringGet(int index)3291 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3292 ASSERT(index >= 0 && index < length());
3293 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3294 }
3295
3296
SeqOneByteStringSet(int index,uint16_t value)3297 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3298 ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3299 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3300 static_cast<byte>(value));
3301 }
3302
3303
GetCharsAddress()3304 Address SeqOneByteString::GetCharsAddress() {
3305 return FIELD_ADDR(this, kHeaderSize);
3306 }
3307
3308
GetChars()3309 uint8_t* SeqOneByteString::GetChars() {
3310 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3311 }
3312
3313
GetCharsAddress()3314 Address SeqTwoByteString::GetCharsAddress() {
3315 return FIELD_ADDR(this, kHeaderSize);
3316 }
3317
3318
GetChars()3319 uc16* SeqTwoByteString::GetChars() {
3320 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3321 }
3322
3323
SeqTwoByteStringGet(int index)3324 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3325 ASSERT(index >= 0 && index < length());
3326 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3327 }
3328
3329
SeqTwoByteStringSet(int index,uint16_t value)3330 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3331 ASSERT(index >= 0 && index < length());
3332 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3333 }
3334
3335
SeqTwoByteStringSize(InstanceType instance_type)3336 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3337 return SizeFor(length());
3338 }
3339
3340
SeqOneByteStringSize(InstanceType instance_type)3341 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3342 return SizeFor(length());
3343 }
3344
3345
parent()3346 String* SlicedString::parent() {
3347 return String::cast(READ_FIELD(this, kParentOffset));
3348 }
3349
3350
set_parent(String * parent,WriteBarrierMode mode)3351 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3352 ASSERT(parent->IsSeqString() || parent->IsExternalString());
3353 WRITE_FIELD(this, kParentOffset, parent);
3354 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3355 }
3356
3357
SMI_ACCESSORS(SlicedString,offset,kOffsetOffset)3358 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3359
3360
3361 String* ConsString::first() {
3362 return String::cast(READ_FIELD(this, kFirstOffset));
3363 }
3364
3365
unchecked_first()3366 Object* ConsString::unchecked_first() {
3367 return READ_FIELD(this, kFirstOffset);
3368 }
3369
3370
set_first(String * value,WriteBarrierMode mode)3371 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3372 WRITE_FIELD(this, kFirstOffset, value);
3373 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3374 }
3375
3376
second()3377 String* ConsString::second() {
3378 return String::cast(READ_FIELD(this, kSecondOffset));
3379 }
3380
3381
unchecked_second()3382 Object* ConsString::unchecked_second() {
3383 return READ_FIELD(this, kSecondOffset);
3384 }
3385
3386
set_second(String * value,WriteBarrierMode mode)3387 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3388 WRITE_FIELD(this, kSecondOffset, value);
3389 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3390 }
3391
3392
is_short()3393 bool ExternalString::is_short() {
3394 InstanceType type = map()->instance_type();
3395 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3396 }
3397
3398
resource()3399 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3400 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3401 }
3402
3403
update_data_cache()3404 void ExternalAsciiString::update_data_cache() {
3405 if (is_short()) return;
3406 const char** data_field =
3407 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3408 *data_field = resource()->data();
3409 }
3410
3411
set_resource(const ExternalAsciiString::Resource * resource)3412 void ExternalAsciiString::set_resource(
3413 const ExternalAsciiString::Resource* resource) {
3414 ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3415 *reinterpret_cast<const Resource**>(
3416 FIELD_ADDR(this, kResourceOffset)) = resource;
3417 if (resource != NULL) update_data_cache();
3418 }
3419
3420
GetChars()3421 const uint8_t* ExternalAsciiString::GetChars() {
3422 return reinterpret_cast<const uint8_t*>(resource()->data());
3423 }
3424
3425
ExternalAsciiStringGet(int index)3426 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3427 ASSERT(index >= 0 && index < length());
3428 return GetChars()[index];
3429 }
3430
3431
resource()3432 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3433 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3434 }
3435
3436
update_data_cache()3437 void ExternalTwoByteString::update_data_cache() {
3438 if (is_short()) return;
3439 const uint16_t** data_field =
3440 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3441 *data_field = resource()->data();
3442 }
3443
3444
set_resource(const ExternalTwoByteString::Resource * resource)3445 void ExternalTwoByteString::set_resource(
3446 const ExternalTwoByteString::Resource* resource) {
3447 *reinterpret_cast<const Resource**>(
3448 FIELD_ADDR(this, kResourceOffset)) = resource;
3449 if (resource != NULL) update_data_cache();
3450 }
3451
3452
GetChars()3453 const uint16_t* ExternalTwoByteString::GetChars() {
3454 return resource()->data();
3455 }
3456
3457
ExternalTwoByteStringGet(int index)3458 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3459 ASSERT(index >= 0 && index < length());
3460 return GetChars()[index];
3461 }
3462
3463
ExternalTwoByteStringGetData(unsigned start)3464 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3465 unsigned start) {
3466 return GetChars() + start;
3467 }
3468
3469
OffsetForDepth(int depth)3470 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3471 return depth & kDepthMask;
3472 }
3473
3474
PushLeft(ConsString * string)3475 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3476 frames_[depth_++ & kDepthMask] = string;
3477 }
3478
3479
PushRight(ConsString * string)3480 void ConsStringIteratorOp::PushRight(ConsString* string) {
3481 // Inplace update.
3482 frames_[(depth_-1) & kDepthMask] = string;
3483 }
3484
3485
AdjustMaximumDepth()3486 void ConsStringIteratorOp::AdjustMaximumDepth() {
3487 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3488 }
3489
3490
Pop()3491 void ConsStringIteratorOp::Pop() {
3492 ASSERT(depth_ > 0);
3493 ASSERT(depth_ <= maximum_depth_);
3494 depth_--;
3495 }
3496
3497
GetNext()3498 uint16_t StringCharacterStream::GetNext() {
3499 ASSERT(buffer8_ != NULL && end_ != NULL);
3500 // Advance cursor if needed.
3501 if (buffer8_ == end_) HasMore();
3502 ASSERT(buffer8_ < end_);
3503 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3504 }
3505
3506
StringCharacterStream(String * string,ConsStringIteratorOp * op,int offset)3507 StringCharacterStream::StringCharacterStream(String* string,
3508 ConsStringIteratorOp* op,
3509 int offset)
3510 : is_one_byte_(false),
3511 op_(op) {
3512 Reset(string, offset);
3513 }
3514
3515
Reset(String * string,int offset)3516 void StringCharacterStream::Reset(String* string, int offset) {
3517 buffer8_ = NULL;
3518 end_ = NULL;
3519 ConsString* cons_string = String::VisitFlat(this, string, offset);
3520 op_->Reset(cons_string, offset);
3521 if (cons_string != NULL) {
3522 string = op_->Next(&offset);
3523 if (string != NULL) String::VisitFlat(this, string, offset);
3524 }
3525 }
3526
3527
HasMore()3528 bool StringCharacterStream::HasMore() {
3529 if (buffer8_ != end_) return true;
3530 int offset;
3531 String* string = op_->Next(&offset);
3532 ASSERT_EQ(offset, 0);
3533 if (string == NULL) return false;
3534 String::VisitFlat(this, string);
3535 ASSERT(buffer8_ != end_);
3536 return true;
3537 }
3538
3539
VisitOneByteString(const uint8_t * chars,int length)3540 void StringCharacterStream::VisitOneByteString(
3541 const uint8_t* chars, int length) {
3542 is_one_byte_ = true;
3543 buffer8_ = chars;
3544 end_ = chars + length;
3545 }
3546
3547
VisitTwoByteString(const uint16_t * chars,int length)3548 void StringCharacterStream::VisitTwoByteString(
3549 const uint16_t* chars, int length) {
3550 is_one_byte_ = false;
3551 buffer16_ = chars;
3552 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3553 }
3554
3555
MakeZeroSize()3556 void JSFunctionResultCache::MakeZeroSize() {
3557 set_finger_index(kEntriesIndex);
3558 set_size(kEntriesIndex);
3559 }
3560
3561
Clear()3562 void JSFunctionResultCache::Clear() {
3563 int cache_size = size();
3564 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3565 MemsetPointer(entries_start,
3566 GetHeap()->the_hole_value(),
3567 cache_size - kEntriesIndex);
3568 MakeZeroSize();
3569 }
3570
3571
size()3572 int JSFunctionResultCache::size() {
3573 return Smi::cast(get(kCacheSizeIndex))->value();
3574 }
3575
3576
set_size(int size)3577 void JSFunctionResultCache::set_size(int size) {
3578 set(kCacheSizeIndex, Smi::FromInt(size));
3579 }
3580
3581
finger_index()3582 int JSFunctionResultCache::finger_index() {
3583 return Smi::cast(get(kFingerIndex))->value();
3584 }
3585
3586
set_finger_index(int finger_index)3587 void JSFunctionResultCache::set_finger_index(int finger_index) {
3588 set(kFingerIndex, Smi::FromInt(finger_index));
3589 }
3590
3591
get(int index)3592 byte ByteArray::get(int index) {
3593 ASSERT(index >= 0 && index < this->length());
3594 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3595 }
3596
3597
set(int index,byte value)3598 void ByteArray::set(int index, byte value) {
3599 ASSERT(index >= 0 && index < this->length());
3600 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3601 }
3602
3603
get_int(int index)3604 int ByteArray::get_int(int index) {
3605 ASSERT(index >= 0 && (index * kIntSize) < this->length());
3606 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3607 }
3608
3609
FromDataStartAddress(Address address)3610 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3611 ASSERT_TAG_ALIGNED(address);
3612 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3613 }
3614
3615
GetDataStartAddress()3616 Address ByteArray::GetDataStartAddress() {
3617 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3618 }
3619
3620
external_uint8_clamped_pointer()3621 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3622 return reinterpret_cast<uint8_t*>(external_pointer());
3623 }
3624
3625
get_scalar(int index)3626 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3627 ASSERT((index >= 0) && (index < this->length()));
3628 uint8_t* ptr = external_uint8_clamped_pointer();
3629 return ptr[index];
3630 }
3631
3632
get(Handle<ExternalUint8ClampedArray> array,int index)3633 Handle<Object> ExternalUint8ClampedArray::get(
3634 Handle<ExternalUint8ClampedArray> array,
3635 int index) {
3636 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3637 array->GetIsolate());
3638 }
3639
3640
set(int index,uint8_t value)3641 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3642 ASSERT((index >= 0) && (index < this->length()));
3643 uint8_t* ptr = external_uint8_clamped_pointer();
3644 ptr[index] = value;
3645 }
3646
3647
external_pointer()3648 void* ExternalArray::external_pointer() {
3649 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3650 return reinterpret_cast<void*>(ptr);
3651 }
3652
3653
set_external_pointer(void * value,WriteBarrierMode mode)3654 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3655 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3656 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3657 }
3658
3659
get_scalar(int index)3660 int8_t ExternalInt8Array::get_scalar(int index) {
3661 ASSERT((index >= 0) && (index < this->length()));
3662 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3663 return ptr[index];
3664 }
3665
3666
get(Handle<ExternalInt8Array> array,int index)3667 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3668 int index) {
3669 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3670 array->GetIsolate());
3671 }
3672
3673
set(int index,int8_t value)3674 void ExternalInt8Array::set(int index, int8_t value) {
3675 ASSERT((index >= 0) && (index < this->length()));
3676 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3677 ptr[index] = value;
3678 }
3679
3680
get_scalar(int index)3681 uint8_t ExternalUint8Array::get_scalar(int index) {
3682 ASSERT((index >= 0) && (index < this->length()));
3683 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3684 return ptr[index];
3685 }
3686
3687
get(Handle<ExternalUint8Array> array,int index)3688 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3689 int index) {
3690 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3691 array->GetIsolate());
3692 }
3693
3694
set(int index,uint8_t value)3695 void ExternalUint8Array::set(int index, uint8_t value) {
3696 ASSERT((index >= 0) && (index < this->length()));
3697 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3698 ptr[index] = value;
3699 }
3700
3701
get_scalar(int index)3702 int16_t ExternalInt16Array::get_scalar(int index) {
3703 ASSERT((index >= 0) && (index < this->length()));
3704 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3705 return ptr[index];
3706 }
3707
3708
get(Handle<ExternalInt16Array> array,int index)3709 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3710 int index) {
3711 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3712 array->GetIsolate());
3713 }
3714
3715
set(int index,int16_t value)3716 void ExternalInt16Array::set(int index, int16_t value) {
3717 ASSERT((index >= 0) && (index < this->length()));
3718 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3719 ptr[index] = value;
3720 }
3721
3722
get_scalar(int index)3723 uint16_t ExternalUint16Array::get_scalar(int index) {
3724 ASSERT((index >= 0) && (index < this->length()));
3725 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3726 return ptr[index];
3727 }
3728
3729
get(Handle<ExternalUint16Array> array,int index)3730 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3731 int index) {
3732 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3733 array->GetIsolate());
3734 }
3735
3736
set(int index,uint16_t value)3737 void ExternalUint16Array::set(int index, uint16_t value) {
3738 ASSERT((index >= 0) && (index < this->length()));
3739 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3740 ptr[index] = value;
3741 }
3742
3743
get_scalar(int index)3744 int32_t ExternalInt32Array::get_scalar(int index) {
3745 ASSERT((index >= 0) && (index < this->length()));
3746 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3747 return ptr[index];
3748 }
3749
3750
get(Handle<ExternalInt32Array> array,int index)3751 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3752 int index) {
3753 return array->GetIsolate()->factory()->
3754 NewNumberFromInt(array->get_scalar(index));
3755 }
3756
3757
set(int index,int32_t value)3758 void ExternalInt32Array::set(int index, int32_t value) {
3759 ASSERT((index >= 0) && (index < this->length()));
3760 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3761 ptr[index] = value;
3762 }
3763
3764
get_scalar(int index)3765 uint32_t ExternalUint32Array::get_scalar(int index) {
3766 ASSERT((index >= 0) && (index < this->length()));
3767 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3768 return ptr[index];
3769 }
3770
3771
get(Handle<ExternalUint32Array> array,int index)3772 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3773 int index) {
3774 return array->GetIsolate()->factory()->
3775 NewNumberFromUint(array->get_scalar(index));
3776 }
3777
3778
set(int index,uint32_t value)3779 void ExternalUint32Array::set(int index, uint32_t value) {
3780 ASSERT((index >= 0) && (index < this->length()));
3781 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3782 ptr[index] = value;
3783 }
3784
3785
get_scalar(int index)3786 float ExternalFloat32Array::get_scalar(int index) {
3787 ASSERT((index >= 0) && (index < this->length()));
3788 float* ptr = static_cast<float*>(external_pointer());
3789 return ptr[index];
3790 }
3791
3792
get(Handle<ExternalFloat32Array> array,int index)3793 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3794 int index) {
3795 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3796 }
3797
3798
set(int index,float value)3799 void ExternalFloat32Array::set(int index, float value) {
3800 ASSERT((index >= 0) && (index < this->length()));
3801 float* ptr = static_cast<float*>(external_pointer());
3802 ptr[index] = value;
3803 }
3804
3805
get_scalar(int index)3806 double ExternalFloat64Array::get_scalar(int index) {
3807 ASSERT((index >= 0) && (index < this->length()));
3808 double* ptr = static_cast<double*>(external_pointer());
3809 return ptr[index];
3810 }
3811
3812
get(Handle<ExternalFloat64Array> array,int index)3813 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3814 int index) {
3815 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3816 }
3817
3818
set(int index,double value)3819 void ExternalFloat64Array::set(int index, double value) {
3820 ASSERT((index >= 0) && (index < this->length()));
3821 double* ptr = static_cast<double*>(external_pointer());
3822 ptr[index] = value;
3823 }
3824
3825
DataPtr()3826 void* FixedTypedArrayBase::DataPtr() {
3827 return FIELD_ADDR(this, kDataOffset);
3828 }
3829
3830
DataSize(InstanceType type)3831 int FixedTypedArrayBase::DataSize(InstanceType type) {
3832 int element_size;
3833 switch (type) {
3834 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3835 case FIXED_##TYPE##_ARRAY_TYPE: \
3836 element_size = size; \
3837 break;
3838
3839 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3840 #undef TYPED_ARRAY_CASE
3841 default:
3842 UNREACHABLE();
3843 return 0;
3844 }
3845 return length() * element_size;
3846 }
3847
3848
DataSize()3849 int FixedTypedArrayBase::DataSize() {
3850 return DataSize(map()->instance_type());
3851 }
3852
3853
size()3854 int FixedTypedArrayBase::size() {
3855 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
3856 }
3857
3858
TypedArraySize(InstanceType type)3859 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
3860 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
3861 }
3862
3863
defaultValue()3864 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
3865
3866
defaultValue()3867 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
3868
3869
defaultValue()3870 int8_t Int8ArrayTraits::defaultValue() { return 0; }
3871
3872
defaultValue()3873 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
3874
3875
defaultValue()3876 int16_t Int16ArrayTraits::defaultValue() { return 0; }
3877
3878
defaultValue()3879 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
3880
3881
defaultValue()3882 int32_t Int32ArrayTraits::defaultValue() { return 0; }
3883
3884
defaultValue()3885 float Float32ArrayTraits::defaultValue() {
3886 return static_cast<float>(OS::nan_value());
3887 }
3888
3889
defaultValue()3890 double Float64ArrayTraits::defaultValue() { return OS::nan_value(); }
3891
3892
3893 template <class Traits>
get_scalar(int index)3894 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3895 ASSERT((index >= 0) && (index < this->length()));
3896 ElementType* ptr = reinterpret_cast<ElementType*>(
3897 FIELD_ADDR(this, kDataOffset));
3898 return ptr[index];
3899 }
3900
3901
3902 template<> inline
3903 FixedTypedArray<Float64ArrayTraits>::ElementType
get_scalar(int index)3904 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
3905 ASSERT((index >= 0) && (index < this->length()));
3906 return READ_DOUBLE_FIELD(this, ElementOffset(index));
3907 }
3908
3909
3910 template <class Traits>
set(int index,ElementType value)3911 void FixedTypedArray<Traits>::set(int index, ElementType value) {
3912 ASSERT((index >= 0) && (index < this->length()));
3913 ElementType* ptr = reinterpret_cast<ElementType*>(
3914 FIELD_ADDR(this, kDataOffset));
3915 ptr[index] = value;
3916 }
3917
3918
3919 template<> inline
set(int index,Float64ArrayTraits::ElementType value)3920 void FixedTypedArray<Float64ArrayTraits>::set(
3921 int index, Float64ArrayTraits::ElementType value) {
3922 ASSERT((index >= 0) && (index < this->length()));
3923 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
3924 }
3925
3926
3927 template <class Traits>
from_int(int value)3928 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
3929 return static_cast<ElementType>(value);
3930 }
3931
3932
3933 template <> inline
from_int(int value)3934 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
3935 if (value < 0) return 0;
3936 if (value > 0xFF) return 0xFF;
3937 return static_cast<uint8_t>(value);
3938 }
3939
3940
3941 template <class Traits>
from_double(double value)3942 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
3943 double value) {
3944 return static_cast<ElementType>(DoubleToInt32(value));
3945 }
3946
3947
3948 template<> inline
from_double(double value)3949 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
3950 if (value < 0) return 0;
3951 if (value > 0xFF) return 0xFF;
3952 return static_cast<uint8_t>(lrint(value));
3953 }
3954
3955
3956 template<> inline
from_double(double value)3957 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
3958 return static_cast<float>(value);
3959 }
3960
3961
3962 template<> inline
from_double(double value)3963 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
3964 return value;
3965 }
3966
3967
3968 template <class Traits>
get(Handle<FixedTypedArray<Traits>> array,int index)3969 Handle<Object> FixedTypedArray<Traits>::get(
3970 Handle<FixedTypedArray<Traits> > array,
3971 int index) {
3972 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
3973 }
3974
3975
3976 template <class Traits>
SetValue(Handle<FixedTypedArray<Traits>> array,uint32_t index,Handle<Object> value)3977 Handle<Object> FixedTypedArray<Traits>::SetValue(
3978 Handle<FixedTypedArray<Traits> > array,
3979 uint32_t index,
3980 Handle<Object> value) {
3981 ElementType cast_value = Traits::defaultValue();
3982 if (index < static_cast<uint32_t>(array->length())) {
3983 if (value->IsSmi()) {
3984 int int_value = Handle<Smi>::cast(value)->value();
3985 cast_value = from_int(int_value);
3986 } else if (value->IsHeapNumber()) {
3987 double double_value = Handle<HeapNumber>::cast(value)->value();
3988 cast_value = from_double(double_value);
3989 } else {
3990 // Clamp undefined to the default value. All other types have been
3991 // converted to a number type further up in the call chain.
3992 ASSERT(value->IsUndefined());
3993 }
3994 array->set(index, cast_value);
3995 }
3996 return Traits::ToHandle(array->GetIsolate(), cast_value);
3997 }
3998
3999
ToHandle(Isolate * isolate,uint8_t scalar)4000 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4001 return handle(Smi::FromInt(scalar), isolate);
4002 }
4003
4004
ToHandle(Isolate * isolate,uint8_t scalar)4005 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4006 uint8_t scalar) {
4007 return handle(Smi::FromInt(scalar), isolate);
4008 }
4009
4010
ToHandle(Isolate * isolate,int8_t scalar)4011 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4012 return handle(Smi::FromInt(scalar), isolate);
4013 }
4014
4015
ToHandle(Isolate * isolate,uint16_t scalar)4016 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4017 return handle(Smi::FromInt(scalar), isolate);
4018 }
4019
4020
ToHandle(Isolate * isolate,int16_t scalar)4021 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4022 return handle(Smi::FromInt(scalar), isolate);
4023 }
4024
4025
ToHandle(Isolate * isolate,uint32_t scalar)4026 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4027 return isolate->factory()->NewNumberFromUint(scalar);
4028 }
4029
4030
ToHandle(Isolate * isolate,int32_t scalar)4031 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4032 return isolate->factory()->NewNumberFromInt(scalar);
4033 }
4034
4035
ToHandle(Isolate * isolate,float scalar)4036 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4037 return isolate->factory()->NewNumber(scalar);
4038 }
4039
4040
ToHandle(Isolate * isolate,double scalar)4041 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4042 return isolate->factory()->NewNumber(scalar);
4043 }
4044
4045
visitor_id()4046 int Map::visitor_id() {
4047 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4048 }
4049
4050
set_visitor_id(int id)4051 void Map::set_visitor_id(int id) {
4052 ASSERT(0 <= id && id < 256);
4053 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4054 }
4055
4056
instance_size()4057 int Map::instance_size() {
4058 return NOBARRIER_READ_BYTE_FIELD(
4059 this, kInstanceSizeOffset) << kPointerSizeLog2;
4060 }
4061
4062
inobject_properties()4063 int Map::inobject_properties() {
4064 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4065 }
4066
4067
pre_allocated_property_fields()4068 int Map::pre_allocated_property_fields() {
4069 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4070 }
4071
4072
GetInObjectPropertyOffset(int index)4073 int Map::GetInObjectPropertyOffset(int index) {
4074 // Adjust for the number of properties stored in the object.
4075 index -= inobject_properties();
4076 ASSERT(index <= 0);
4077 return instance_size() + (index * kPointerSize);
4078 }
4079
4080
SizeFromMap(Map * map)4081 int HeapObject::SizeFromMap(Map* map) {
4082 int instance_size = map->instance_size();
4083 if (instance_size != kVariableSizeSentinel) return instance_size;
4084 // Only inline the most frequent cases.
4085 InstanceType instance_type = map->instance_type();
4086 if (instance_type == FIXED_ARRAY_TYPE) {
4087 return FixedArray::BodyDescriptor::SizeOf(map, this);
4088 }
4089 if (instance_type == ASCII_STRING_TYPE ||
4090 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4091 return SeqOneByteString::SizeFor(
4092 reinterpret_cast<SeqOneByteString*>(this)->length());
4093 }
4094 if (instance_type == BYTE_ARRAY_TYPE) {
4095 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4096 }
4097 if (instance_type == FREE_SPACE_TYPE) {
4098 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4099 }
4100 if (instance_type == STRING_TYPE ||
4101 instance_type == INTERNALIZED_STRING_TYPE) {
4102 return SeqTwoByteString::SizeFor(
4103 reinterpret_cast<SeqTwoByteString*>(this)->length());
4104 }
4105 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4106 return FixedDoubleArray::SizeFor(
4107 reinterpret_cast<FixedDoubleArray*>(this)->length());
4108 }
4109 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4110 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4111 }
4112 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4113 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4114 return reinterpret_cast<FixedTypedArrayBase*>(
4115 this)->TypedArraySize(instance_type);
4116 }
4117 ASSERT(instance_type == CODE_TYPE);
4118 return reinterpret_cast<Code*>(this)->CodeSize();
4119 }
4120
4121
set_instance_size(int value)4122 void Map::set_instance_size(int value) {
4123 ASSERT_EQ(0, value & (kPointerSize - 1));
4124 value >>= kPointerSizeLog2;
4125 ASSERT(0 <= value && value < 256);
4126 NOBARRIER_WRITE_BYTE_FIELD(
4127 this, kInstanceSizeOffset, static_cast<byte>(value));
4128 }
4129
4130
set_inobject_properties(int value)4131 void Map::set_inobject_properties(int value) {
4132 ASSERT(0 <= value && value < 256);
4133 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4134 }
4135
4136
set_pre_allocated_property_fields(int value)4137 void Map::set_pre_allocated_property_fields(int value) {
4138 ASSERT(0 <= value && value < 256);
4139 WRITE_BYTE_FIELD(this,
4140 kPreAllocatedPropertyFieldsOffset,
4141 static_cast<byte>(value));
4142 }
4143
4144
instance_type()4145 InstanceType Map::instance_type() {
4146 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4147 }
4148
4149
set_instance_type(InstanceType value)4150 void Map::set_instance_type(InstanceType value) {
4151 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4152 }
4153
4154
unused_property_fields()4155 int Map::unused_property_fields() {
4156 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4157 }
4158
4159
set_unused_property_fields(int value)4160 void Map::set_unused_property_fields(int value) {
4161 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4162 }
4163
4164
bit_field()4165 byte Map::bit_field() {
4166 return READ_BYTE_FIELD(this, kBitFieldOffset);
4167 }
4168
4169
set_bit_field(byte value)4170 void Map::set_bit_field(byte value) {
4171 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4172 }
4173
4174
bit_field2()4175 byte Map::bit_field2() {
4176 return READ_BYTE_FIELD(this, kBitField2Offset);
4177 }
4178
4179
set_bit_field2(byte value)4180 void Map::set_bit_field2(byte value) {
4181 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4182 }
4183
4184
set_non_instance_prototype(bool value)4185 void Map::set_non_instance_prototype(bool value) {
4186 if (value) {
4187 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4188 } else {
4189 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4190 }
4191 }
4192
4193
has_non_instance_prototype()4194 bool Map::has_non_instance_prototype() {
4195 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4196 }
4197
4198
set_function_with_prototype(bool value)4199 void Map::set_function_with_prototype(bool value) {
4200 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4201 }
4202
4203
function_with_prototype()4204 bool Map::function_with_prototype() {
4205 return FunctionWithPrototype::decode(bit_field());
4206 }
4207
4208
set_is_access_check_needed(bool access_check_needed)4209 void Map::set_is_access_check_needed(bool access_check_needed) {
4210 if (access_check_needed) {
4211 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4212 } else {
4213 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4214 }
4215 }
4216
4217
is_access_check_needed()4218 bool Map::is_access_check_needed() {
4219 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4220 }
4221
4222
set_is_extensible(bool value)4223 void Map::set_is_extensible(bool value) {
4224 if (value) {
4225 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4226 } else {
4227 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4228 }
4229 }
4230
is_extensible()4231 bool Map::is_extensible() {
4232 return ((1 << kIsExtensible) & bit_field2()) != 0;
4233 }
4234
4235
set_is_shared(bool value)4236 void Map::set_is_shared(bool value) {
4237 set_bit_field3(IsShared::update(bit_field3(), value));
4238 }
4239
4240
is_shared()4241 bool Map::is_shared() {
4242 return IsShared::decode(bit_field3()); }
4243
4244
set_dictionary_map(bool value)4245 void Map::set_dictionary_map(bool value) {
4246 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4247 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4248 set_bit_field3(new_bit_field3);
4249 }
4250
4251
is_dictionary_map()4252 bool Map::is_dictionary_map() {
4253 return DictionaryMap::decode(bit_field3());
4254 }
4255
4256
flags()4257 Code::Flags Code::flags() {
4258 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4259 }
4260
4261
set_owns_descriptors(bool is_shared)4262 void Map::set_owns_descriptors(bool is_shared) {
4263 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
4264 }
4265
4266
owns_descriptors()4267 bool Map::owns_descriptors() {
4268 return OwnsDescriptors::decode(bit_field3());
4269 }
4270
4271
set_has_instance_call_handler()4272 void Map::set_has_instance_call_handler() {
4273 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4274 }
4275
4276
has_instance_call_handler()4277 bool Map::has_instance_call_handler() {
4278 return HasInstanceCallHandler::decode(bit_field3());
4279 }
4280
4281
deprecate()4282 void Map::deprecate() {
4283 set_bit_field3(Deprecated::update(bit_field3(), true));
4284 }
4285
4286
is_deprecated()4287 bool Map::is_deprecated() {
4288 return Deprecated::decode(bit_field3());
4289 }
4290
4291
set_migration_target(bool value)4292 void Map::set_migration_target(bool value) {
4293 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4294 }
4295
4296
is_migration_target()4297 bool Map::is_migration_target() {
4298 return IsMigrationTarget::decode(bit_field3());
4299 }
4300
4301
set_done_inobject_slack_tracking(bool value)4302 void Map::set_done_inobject_slack_tracking(bool value) {
4303 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4304 }
4305
4306
done_inobject_slack_tracking()4307 bool Map::done_inobject_slack_tracking() {
4308 return DoneInobjectSlackTracking::decode(bit_field3());
4309 }
4310
4311
set_construction_count(int value)4312 void Map::set_construction_count(int value) {
4313 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4314 }
4315
4316
construction_count()4317 int Map::construction_count() {
4318 return ConstructionCount::decode(bit_field3());
4319 }
4320
4321
freeze()4322 void Map::freeze() {
4323 set_bit_field3(IsFrozen::update(bit_field3(), true));
4324 }
4325
4326
is_frozen()4327 bool Map::is_frozen() {
4328 return IsFrozen::decode(bit_field3());
4329 }
4330
4331
mark_unstable()4332 void Map::mark_unstable() {
4333 set_bit_field3(IsUnstable::update(bit_field3(), true));
4334 }
4335
4336
is_stable()4337 bool Map::is_stable() {
4338 return !IsUnstable::decode(bit_field3());
4339 }
4340
4341
has_code_cache()4342 bool Map::has_code_cache() {
4343 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4344 }
4345
4346
CanBeDeprecated()4347 bool Map::CanBeDeprecated() {
4348 int descriptor = LastAdded();
4349 for (int i = 0; i <= descriptor; i++) {
4350 PropertyDetails details = instance_descriptors()->GetDetails(i);
4351 if (details.representation().IsNone()) return true;
4352 if (details.representation().IsSmi()) return true;
4353 if (details.representation().IsDouble()) return true;
4354 if (details.representation().IsHeapObject()) return true;
4355 if (details.type() == CONSTANT) return true;
4356 }
4357 return false;
4358 }
4359
4360
NotifyLeafMapLayoutChange()4361 void Map::NotifyLeafMapLayoutChange() {
4362 if (is_stable()) {
4363 mark_unstable();
4364 dependent_code()->DeoptimizeDependentCodeGroup(
4365 GetIsolate(),
4366 DependentCode::kPrototypeCheckGroup);
4367 }
4368 }
4369
4370
CanOmitMapChecks()4371 bool Map::CanOmitMapChecks() {
4372 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4373 }
4374
4375
number_of_entries(DependencyGroup group)4376 int DependentCode::number_of_entries(DependencyGroup group) {
4377 if (length() == 0) return 0;
4378 return Smi::cast(get(group))->value();
4379 }
4380
4381
set_number_of_entries(DependencyGroup group,int value)4382 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4383 set(group, Smi::FromInt(value));
4384 }
4385
4386
is_code_at(int i)4387 bool DependentCode::is_code_at(int i) {
4388 return get(kCodesStartIndex + i)->IsCode();
4389 }
4390
code_at(int i)4391 Code* DependentCode::code_at(int i) {
4392 return Code::cast(get(kCodesStartIndex + i));
4393 }
4394
4395
compilation_info_at(int i)4396 CompilationInfo* DependentCode::compilation_info_at(int i) {
4397 return reinterpret_cast<CompilationInfo*>(
4398 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4399 }
4400
4401
set_object_at(int i,Object * object)4402 void DependentCode::set_object_at(int i, Object* object) {
4403 set(kCodesStartIndex + i, object);
4404 }
4405
4406
object_at(int i)4407 Object* DependentCode::object_at(int i) {
4408 return get(kCodesStartIndex + i);
4409 }
4410
4411
slot_at(int i)4412 Object** DependentCode::slot_at(int i) {
4413 return RawFieldOfElementAt(kCodesStartIndex + i);
4414 }
4415
4416
clear_at(int i)4417 void DependentCode::clear_at(int i) {
4418 set_undefined(kCodesStartIndex + i);
4419 }
4420
4421
copy(int from,int to)4422 void DependentCode::copy(int from, int to) {
4423 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4424 }
4425
4426
ExtendGroup(DependencyGroup group)4427 void DependentCode::ExtendGroup(DependencyGroup group) {
4428 GroupStartIndexes starts(this);
4429 for (int g = kGroupCount - 1; g > group; g--) {
4430 if (starts.at(g) < starts.at(g + 1)) {
4431 copy(starts.at(g), starts.at(g + 1));
4432 }
4433 }
4434 }
4435
4436
set_flags(Code::Flags flags)4437 void Code::set_flags(Code::Flags flags) {
4438 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4439 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4440 }
4441
4442
kind()4443 Code::Kind Code::kind() {
4444 return ExtractKindFromFlags(flags());
4445 }
4446
4447
ic_state()4448 InlineCacheState Code::ic_state() {
4449 InlineCacheState result = ExtractICStateFromFlags(flags());
4450 // Only allow uninitialized or debugger states for non-IC code
4451 // objects. This is used in the debugger to determine whether or not
4452 // a call to code object has been replaced with a debug break call.
4453 ASSERT(is_inline_cache_stub() ||
4454 result == UNINITIALIZED ||
4455 result == DEBUG_STUB);
4456 return result;
4457 }
4458
4459
extra_ic_state()4460 ExtraICState Code::extra_ic_state() {
4461 ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4462 return ExtractExtraICStateFromFlags(flags());
4463 }
4464
4465
type()4466 Code::StubType Code::type() {
4467 return ExtractTypeFromFlags(flags());
4468 }
4469
4470
4471 // For initialization.
set_raw_kind_specific_flags1(int value)4472 void Code::set_raw_kind_specific_flags1(int value) {
4473 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4474 }
4475
4476
set_raw_kind_specific_flags2(int value)4477 void Code::set_raw_kind_specific_flags2(int value) {
4478 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4479 }
4480
4481
is_crankshafted()4482 inline bool Code::is_crankshafted() {
4483 return IsCrankshaftedField::decode(
4484 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4485 }
4486
4487
set_is_crankshafted(bool value)4488 inline void Code::set_is_crankshafted(bool value) {
4489 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4490 int updated = IsCrankshaftedField::update(previous, value);
4491 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4492 }
4493
4494
major_key()4495 int Code::major_key() {
4496 ASSERT(has_major_key());
4497 return StubMajorKeyField::decode(
4498 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4499 }
4500
4501
set_major_key(int major)4502 void Code::set_major_key(int major) {
4503 ASSERT(has_major_key());
4504 ASSERT(0 <= major && major < 256);
4505 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4506 int updated = StubMajorKeyField::update(previous, major);
4507 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4508 }
4509
4510
has_major_key()4511 bool Code::has_major_key() {
4512 return kind() == STUB ||
4513 kind() == HANDLER ||
4514 kind() == BINARY_OP_IC ||
4515 kind() == COMPARE_IC ||
4516 kind() == COMPARE_NIL_IC ||
4517 kind() == LOAD_IC ||
4518 kind() == KEYED_LOAD_IC ||
4519 kind() == STORE_IC ||
4520 kind() == CALL_IC ||
4521 kind() == KEYED_STORE_IC ||
4522 kind() == TO_BOOLEAN_IC;
4523 }
4524
4525
optimizable()4526 bool Code::optimizable() {
4527 ASSERT_EQ(FUNCTION, kind());
4528 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4529 }
4530
4531
set_optimizable(bool value)4532 void Code::set_optimizable(bool value) {
4533 ASSERT_EQ(FUNCTION, kind());
4534 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4535 }
4536
4537
has_deoptimization_support()4538 bool Code::has_deoptimization_support() {
4539 ASSERT_EQ(FUNCTION, kind());
4540 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4541 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4542 }
4543
4544
set_has_deoptimization_support(bool value)4545 void Code::set_has_deoptimization_support(bool value) {
4546 ASSERT_EQ(FUNCTION, kind());
4547 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4548 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4549 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4550 }
4551
4552
has_debug_break_slots()4553 bool Code::has_debug_break_slots() {
4554 ASSERT_EQ(FUNCTION, kind());
4555 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4556 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4557 }
4558
4559
set_has_debug_break_slots(bool value)4560 void Code::set_has_debug_break_slots(bool value) {
4561 ASSERT_EQ(FUNCTION, kind());
4562 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4563 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4564 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4565 }
4566
4567
is_compiled_optimizable()4568 bool Code::is_compiled_optimizable() {
4569 ASSERT_EQ(FUNCTION, kind());
4570 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4571 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4572 }
4573
4574
set_compiled_optimizable(bool value)4575 void Code::set_compiled_optimizable(bool value) {
4576 ASSERT_EQ(FUNCTION, kind());
4577 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4578 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4579 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4580 }
4581
4582
allow_osr_at_loop_nesting_level()4583 int Code::allow_osr_at_loop_nesting_level() {
4584 ASSERT_EQ(FUNCTION, kind());
4585 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
4586 }
4587
4588
set_allow_osr_at_loop_nesting_level(int level)4589 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4590 ASSERT_EQ(FUNCTION, kind());
4591 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4592 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
4593 }
4594
4595
profiler_ticks()4596 int Code::profiler_ticks() {
4597 ASSERT_EQ(FUNCTION, kind());
4598 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4599 }
4600
4601
set_profiler_ticks(int ticks)4602 void Code::set_profiler_ticks(int ticks) {
4603 ASSERT_EQ(FUNCTION, kind());
4604 ASSERT(ticks < 256);
4605 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4606 }
4607
4608
stack_slots()4609 unsigned Code::stack_slots() {
4610 ASSERT(is_crankshafted());
4611 return StackSlotsField::decode(
4612 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4613 }
4614
4615
set_stack_slots(unsigned slots)4616 void Code::set_stack_slots(unsigned slots) {
4617 CHECK(slots <= (1 << kStackSlotsBitCount));
4618 ASSERT(is_crankshafted());
4619 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4620 int updated = StackSlotsField::update(previous, slots);
4621 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4622 }
4623
4624
safepoint_table_offset()4625 unsigned Code::safepoint_table_offset() {
4626 ASSERT(is_crankshafted());
4627 return SafepointTableOffsetField::decode(
4628 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4629 }
4630
4631
set_safepoint_table_offset(unsigned offset)4632 void Code::set_safepoint_table_offset(unsigned offset) {
4633 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4634 ASSERT(is_crankshafted());
4635 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4636 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4637 int updated = SafepointTableOffsetField::update(previous, offset);
4638 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4639 }
4640
4641
back_edge_table_offset()4642 unsigned Code::back_edge_table_offset() {
4643 ASSERT_EQ(FUNCTION, kind());
4644 return BackEdgeTableOffsetField::decode(
4645 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4646 }
4647
4648
set_back_edge_table_offset(unsigned offset)4649 void Code::set_back_edge_table_offset(unsigned offset) {
4650 ASSERT_EQ(FUNCTION, kind());
4651 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4652 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4653 int updated = BackEdgeTableOffsetField::update(previous, offset);
4654 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4655 }
4656
4657
back_edges_patched_for_osr()4658 bool Code::back_edges_patched_for_osr() {
4659 ASSERT_EQ(FUNCTION, kind());
4660 return BackEdgesPatchedForOSRField::decode(
4661 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4662 }
4663
4664
set_back_edges_patched_for_osr(bool value)4665 void Code::set_back_edges_patched_for_osr(bool value) {
4666 ASSERT_EQ(FUNCTION, kind());
4667 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4668 int updated = BackEdgesPatchedForOSRField::update(previous, value);
4669 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4670 }
4671
4672
4673
to_boolean_state()4674 byte Code::to_boolean_state() {
4675 return extra_ic_state();
4676 }
4677
4678
has_function_cache()4679 bool Code::has_function_cache() {
4680 ASSERT(kind() == STUB);
4681 return HasFunctionCacheField::decode(
4682 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4683 }
4684
4685
set_has_function_cache(bool flag)4686 void Code::set_has_function_cache(bool flag) {
4687 ASSERT(kind() == STUB);
4688 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4689 int updated = HasFunctionCacheField::update(previous, flag);
4690 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4691 }
4692
4693
marked_for_deoptimization()4694 bool Code::marked_for_deoptimization() {
4695 ASSERT(kind() == OPTIMIZED_FUNCTION);
4696 return MarkedForDeoptimizationField::decode(
4697 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4698 }
4699
4700
set_marked_for_deoptimization(bool flag)4701 void Code::set_marked_for_deoptimization(bool flag) {
4702 ASSERT(kind() == OPTIMIZED_FUNCTION);
4703 ASSERT(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4704 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4705 int updated = MarkedForDeoptimizationField::update(previous, flag);
4706 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4707 }
4708
4709
is_weak_stub()4710 bool Code::is_weak_stub() {
4711 return CanBeWeakStub() && WeakStubField::decode(
4712 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4713 }
4714
4715
mark_as_weak_stub()4716 void Code::mark_as_weak_stub() {
4717 ASSERT(CanBeWeakStub());
4718 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4719 int updated = WeakStubField::update(previous, true);
4720 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4721 }
4722
4723
is_invalidated_weak_stub()4724 bool Code::is_invalidated_weak_stub() {
4725 return is_weak_stub() && InvalidatedWeakStubField::decode(
4726 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4727 }
4728
4729
mark_as_invalidated_weak_stub()4730 void Code::mark_as_invalidated_weak_stub() {
4731 ASSERT(is_inline_cache_stub());
4732 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4733 int updated = InvalidatedWeakStubField::update(previous, true);
4734 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4735 }
4736
4737
is_inline_cache_stub()4738 bool Code::is_inline_cache_stub() {
4739 Kind kind = this->kind();
4740 switch (kind) {
4741 #define CASE(name) case name: return true;
4742 IC_KIND_LIST(CASE)
4743 #undef CASE
4744 default: return false;
4745 }
4746 }
4747
4748
is_keyed_stub()4749 bool Code::is_keyed_stub() {
4750 return is_keyed_load_stub() || is_keyed_store_stub();
4751 }
4752
4753
is_debug_stub()4754 bool Code::is_debug_stub() {
4755 return ic_state() == DEBUG_STUB;
4756 }
4757
4758
constant_pool()4759 ConstantPoolArray* Code::constant_pool() {
4760 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4761 }
4762
4763
set_constant_pool(Object * value)4764 void Code::set_constant_pool(Object* value) {
4765 ASSERT(value->IsConstantPoolArray());
4766 WRITE_FIELD(this, kConstantPoolOffset, value);
4767 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4768 }
4769
4770
ComputeFlags(Kind kind,InlineCacheState ic_state,ExtraICState extra_ic_state,StubType type,InlineCacheHolderFlag holder)4771 Code::Flags Code::ComputeFlags(Kind kind,
4772 InlineCacheState ic_state,
4773 ExtraICState extra_ic_state,
4774 StubType type,
4775 InlineCacheHolderFlag holder) {
4776 // Compute the bit mask.
4777 unsigned int bits = KindField::encode(kind)
4778 | ICStateField::encode(ic_state)
4779 | TypeField::encode(type)
4780 | ExtraICStateField::encode(extra_ic_state)
4781 | CacheHolderField::encode(holder);
4782 return static_cast<Flags>(bits);
4783 }
4784
4785
ComputeMonomorphicFlags(Kind kind,ExtraICState extra_ic_state,InlineCacheHolderFlag holder,StubType type)4786 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4787 ExtraICState extra_ic_state,
4788 InlineCacheHolderFlag holder,
4789 StubType type) {
4790 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4791 }
4792
4793
ComputeHandlerFlags(Kind handler_kind,StubType type,InlineCacheHolderFlag holder)4794 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
4795 StubType type,
4796 InlineCacheHolderFlag holder) {
4797 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4798 }
4799
4800
ExtractKindFromFlags(Flags flags)4801 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4802 return KindField::decode(flags);
4803 }
4804
4805
ExtractICStateFromFlags(Flags flags)4806 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4807 return ICStateField::decode(flags);
4808 }
4809
4810
ExtractExtraICStateFromFlags(Flags flags)4811 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4812 return ExtraICStateField::decode(flags);
4813 }
4814
4815
ExtractTypeFromFlags(Flags flags)4816 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4817 return TypeField::decode(flags);
4818 }
4819
4820
ExtractCacheHolderFromFlags(Flags flags)4821 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4822 return CacheHolderField::decode(flags);
4823 }
4824
4825
RemoveTypeFromFlags(Flags flags)4826 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4827 int bits = flags & ~TypeField::kMask;
4828 return static_cast<Flags>(bits);
4829 }
4830
4831
GetCodeFromTargetAddress(Address address)4832 Code* Code::GetCodeFromTargetAddress(Address address) {
4833 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4834 // GetCodeFromTargetAddress might be called when marking objects during mark
4835 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4836 // Code::cast. Code::cast does not work when the object's map is
4837 // marked.
4838 Code* result = reinterpret_cast<Code*>(code);
4839 return result;
4840 }
4841
4842
GetObjectFromEntryAddress(Address location_of_address)4843 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4844 return HeapObject::
4845 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4846 }
4847
4848
IsWeakObjectInOptimizedCode(Object * object)4849 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
4850 if (!FLAG_collect_maps) return false;
4851 if (object->IsMap()) {
4852 return Map::cast(object)->CanTransition() &&
4853 FLAG_weak_embedded_maps_in_optimized_code;
4854 }
4855 if (object->IsJSObject() ||
4856 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
4857 return FLAG_weak_embedded_objects_in_optimized_code;
4858 }
4859 return false;
4860 }
4861
4862
4863 class Code::FindAndReplacePattern {
4864 public:
FindAndReplacePattern()4865 FindAndReplacePattern() : count_(0) { }
Add(Handle<Map> map_to_find,Handle<Object> obj_to_replace)4866 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
4867 ASSERT(count_ < kMaxCount);
4868 find_[count_] = map_to_find;
4869 replace_[count_] = obj_to_replace;
4870 ++count_;
4871 }
4872 private:
4873 static const int kMaxCount = 4;
4874 int count_;
4875 Handle<Map> find_[kMaxCount];
4876 Handle<Object> replace_[kMaxCount];
4877 friend class Code;
4878 };
4879
4880
IsWeakObjectInIC(Object * object)4881 bool Code::IsWeakObjectInIC(Object* object) {
4882 return object->IsMap() && Map::cast(object)->CanTransition() &&
4883 FLAG_collect_maps &&
4884 FLAG_weak_embedded_maps_in_ic;
4885 }
4886
4887
prototype()4888 Object* Map::prototype() {
4889 return READ_FIELD(this, kPrototypeOffset);
4890 }
4891
4892
set_prototype(Object * value,WriteBarrierMode mode)4893 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4894 ASSERT(value->IsNull() || value->IsJSReceiver());
4895 WRITE_FIELD(this, kPrototypeOffset, value);
4896 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4897 }
4898
4899
4900 // If the descriptor is using the empty transition array, install a new empty
4901 // transition array that will have place for an element transition.
EnsureHasTransitionArray(Handle<Map> map)4902 static void EnsureHasTransitionArray(Handle<Map> map) {
4903 Handle<TransitionArray> transitions;
4904 if (!map->HasTransitionArray()) {
4905 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4906 transitions->set_back_pointer_storage(map->GetBackPointer());
4907 } else if (!map->transitions()->IsFullTransitionArray()) {
4908 transitions = TransitionArray::ExtendToFullTransitionArray(map);
4909 } else {
4910 return;
4911 }
4912 map->set_transitions(*transitions);
4913 }
4914
4915
InitializeDescriptors(DescriptorArray * descriptors)4916 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
4917 int len = descriptors->number_of_descriptors();
4918 set_instance_descriptors(descriptors);
4919 SetNumberOfOwnDescriptors(len);
4920 }
4921
4922
ACCESSORS(Map,instance_descriptors,DescriptorArray,kDescriptorsOffset)4923 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4924
4925
4926 void Map::set_bit_field3(uint32_t bits) {
4927 if (kInt32Size != kPointerSize) {
4928 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
4929 }
4930 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
4931 }
4932
4933
bit_field3()4934 uint32_t Map::bit_field3() {
4935 return READ_UINT32_FIELD(this, kBitField3Offset);
4936 }
4937
4938
AppendDescriptor(Descriptor * desc)4939 void Map::AppendDescriptor(Descriptor* desc) {
4940 DescriptorArray* descriptors = instance_descriptors();
4941 int number_of_own_descriptors = NumberOfOwnDescriptors();
4942 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4943 descriptors->Append(desc);
4944 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4945 }
4946
4947
GetBackPointer()4948 Object* Map::GetBackPointer() {
4949 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4950 if (object->IsDescriptorArray()) {
4951 return TransitionArray::cast(object)->back_pointer_storage();
4952 } else {
4953 ASSERT(object->IsMap() || object->IsUndefined());
4954 return object;
4955 }
4956 }
4957
4958
HasElementsTransition()4959 bool Map::HasElementsTransition() {
4960 return HasTransitionArray() && transitions()->HasElementsTransition();
4961 }
4962
4963
HasTransitionArray()4964 bool Map::HasTransitionArray() {
4965 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4966 return object->IsTransitionArray();
4967 }
4968
4969
elements_transition_map()4970 Map* Map::elements_transition_map() {
4971 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4972 return transitions()->GetTarget(index);
4973 }
4974
4975
CanHaveMoreTransitions()4976 bool Map::CanHaveMoreTransitions() {
4977 if (!HasTransitionArray()) return true;
4978 return FixedArray::SizeFor(transitions()->length() +
4979 TransitionArray::kTransitionSize)
4980 <= Page::kMaxRegularHeapObjectSize;
4981 }
4982
4983
GetTransition(int transition_index)4984 Map* Map::GetTransition(int transition_index) {
4985 return transitions()->GetTarget(transition_index);
4986 }
4987
4988
SearchTransition(Name * name)4989 int Map::SearchTransition(Name* name) {
4990 if (HasTransitionArray()) return transitions()->Search(name);
4991 return TransitionArray::kNotFound;
4992 }
4993
4994
GetPrototypeTransitions()4995 FixedArray* Map::GetPrototypeTransitions() {
4996 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4997 if (!transitions()->HasPrototypeTransitions()) {
4998 return GetHeap()->empty_fixed_array();
4999 }
5000 return transitions()->GetPrototypeTransitions();
5001 }
5002
5003
SetPrototypeTransitions(Handle<Map> map,Handle<FixedArray> proto_transitions)5004 void Map::SetPrototypeTransitions(
5005 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5006 EnsureHasTransitionArray(map);
5007 int old_number_of_transitions = map->NumberOfProtoTransitions();
5008 #ifdef DEBUG
5009 if (map->HasPrototypeTransitions()) {
5010 ASSERT(map->GetPrototypeTransitions() != *proto_transitions);
5011 map->ZapPrototypeTransitions();
5012 }
5013 #endif
5014 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5015 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5016 }
5017
5018
HasPrototypeTransitions()5019 bool Map::HasPrototypeTransitions() {
5020 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5021 }
5022
5023
transitions()5024 TransitionArray* Map::transitions() {
5025 ASSERT(HasTransitionArray());
5026 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5027 return TransitionArray::cast(object);
5028 }
5029
5030
set_transitions(TransitionArray * transition_array,WriteBarrierMode mode)5031 void Map::set_transitions(TransitionArray* transition_array,
5032 WriteBarrierMode mode) {
5033 // Transition arrays are not shared. When one is replaced, it should not
5034 // keep referenced objects alive, so we zap it.
5035 // When there is another reference to the array somewhere (e.g. a handle),
5036 // not zapping turns from a waste of memory into a source of crashes.
5037 if (HasTransitionArray()) {
5038 #ifdef DEBUG
5039 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5040 Map* target = transitions()->GetTarget(i);
5041 if (target->instance_descriptors() == instance_descriptors()) {
5042 Name* key = transitions()->GetKey(i);
5043 int new_target_index = transition_array->Search(key);
5044 ASSERT(new_target_index != TransitionArray::kNotFound);
5045 ASSERT(transition_array->GetTarget(new_target_index) == target);
5046 }
5047 }
5048 #endif
5049 ASSERT(transitions() != transition_array);
5050 ZapTransitions();
5051 }
5052
5053 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5054 CONDITIONAL_WRITE_BARRIER(
5055 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5056 }
5057
5058
init_back_pointer(Object * undefined)5059 void Map::init_back_pointer(Object* undefined) {
5060 ASSERT(undefined->IsUndefined());
5061 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5062 }
5063
5064
SetBackPointer(Object * value,WriteBarrierMode mode)5065 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5066 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5067 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5068 (value->IsMap() && GetBackPointer()->IsUndefined()));
5069 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5070 if (object->IsTransitionArray()) {
5071 TransitionArray::cast(object)->set_back_pointer_storage(value);
5072 } else {
5073 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5074 CONDITIONAL_WRITE_BARRIER(
5075 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5076 }
5077 }
5078
5079
ACCESSORS(Map,code_cache,Object,kCodeCacheOffset)5080 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5081 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5082 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5083
5084 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5085 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5086 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5087
5088 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5089 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5090 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5091 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
5092
5093 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5094 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5095
5096 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5097 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5098 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5099 kExpectedReceiverTypeOffset)
5100
5101 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5102 kSerializedDataOffset)
5103
5104 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5105 kDescriptorOffset)
5106
5107 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5108 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5109 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5110
5111 ACCESSORS(Box, value, Object, kValueOffset)
5112
5113 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5114 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5115 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
5116
5117 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5118 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5119 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5120
5121 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5122 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5123 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5124 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5125 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5126 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5127
5128 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5129 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5130
5131 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5132 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5133 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5134
5135 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5136 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5137 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5138 kPrototypeTemplateOffset)
5139 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5140 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5141 kNamedPropertyHandlerOffset)
5142 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5143 kIndexedPropertyHandlerOffset)
5144 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5145 kInstanceTemplateOffset)
5146 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5147 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5148 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5149 kInstanceCallHandlerOffset)
5150 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5151 kAccessCheckInfoOffset)
5152 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5153
5154 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5155 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5156 kInternalFieldCountOffset)
5157
5158 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5159 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5160
5161 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5162
5163 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5164 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5165 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5166 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5167 kPretenureCreateCountOffset)
5168 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5169 kDependentCodeOffset)
5170 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5171 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5172
5173 ACCESSORS(Script, source, Object, kSourceOffset)
5174 ACCESSORS(Script, name, Object, kNameOffset)
5175 ACCESSORS(Script, id, Smi, kIdOffset)
5176 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5177 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5178 ACCESSORS(Script, context_data, Object, kContextOffset)
5179 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5180 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5181 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5182 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5183 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5184 kEvalFrominstructionsOffsetOffset)
5185 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5186 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5187
5188 Script::CompilationType Script::compilation_type() {
5189 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5190 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5191 }
set_compilation_type(CompilationType type)5192 void Script::set_compilation_type(CompilationType type) {
5193 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5194 type == COMPILATION_TYPE_EVAL));
5195 }
compilation_state()5196 Script::CompilationState Script::compilation_state() {
5197 return BooleanBit::get(flags(), kCompilationStateBit) ?
5198 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5199 }
set_compilation_state(CompilationState state)5200 void Script::set_compilation_state(CompilationState state) {
5201 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5202 state == COMPILATION_STATE_COMPILED));
5203 }
5204
5205
ACCESSORS(DebugInfo,shared,SharedFunctionInfo,kSharedFunctionInfoIndex)5206 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5207 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5208 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5209 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5210
5211 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5212 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5213 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5214 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5215
5216 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5217 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5218 kOptimizedCodeMapOffset)
5219 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5220 ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5221 kFeedbackVectorOffset)
5222 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5223 kInstanceClassNameOffset)
5224 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5225 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5226 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5227 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5228
5229
5230 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5231 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5232 kHiddenPrototypeBit)
5233 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5234 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5235 kNeedsAccessCheckBit)
5236 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5237 kReadOnlyPrototypeBit)
5238 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5239 kRemovePrototypeBit)
5240 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5241 kDoNotCacheBit)
5242 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5243 kIsExpressionBit)
5244 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5245 kIsTopLevelBit)
5246
5247 BOOL_ACCESSORS(SharedFunctionInfo,
5248 compiler_hints,
5249 allows_lazy_compilation,
5250 kAllowLazyCompilation)
5251 BOOL_ACCESSORS(SharedFunctionInfo,
5252 compiler_hints,
5253 allows_lazy_compilation_without_context,
5254 kAllowLazyCompilationWithoutContext)
5255 BOOL_ACCESSORS(SharedFunctionInfo,
5256 compiler_hints,
5257 uses_arguments,
5258 kUsesArguments)
5259 BOOL_ACCESSORS(SharedFunctionInfo,
5260 compiler_hints,
5261 has_duplicate_parameters,
5262 kHasDuplicateParameters)
5263
5264
5265 #if V8_HOST_ARCH_32_BIT
5266 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5267 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5268 kFormalParameterCountOffset)
5269 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5270 kExpectedNofPropertiesOffset)
5271 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5272 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5273 kStartPositionAndTypeOffset)
5274 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5275 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5276 kFunctionTokenPositionOffset)
5277 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5278 kCompilerHintsOffset)
5279 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5280 kOptCountAndBailoutReasonOffset)
5281 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5282 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5283 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5284
5285 #else
5286
5287 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5288 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5289 int holder::name() { \
5290 int value = READ_INT_FIELD(this, offset); \
5291 ASSERT(kHeapObjectTag == 1); \
5292 ASSERT((value & kHeapObjectTag) == 0); \
5293 return value >> 1; \
5294 } \
5295 void holder::set_##name(int value) { \
5296 ASSERT(kHeapObjectTag == 1); \
5297 ASSERT((value & 0xC0000000) == 0xC0000000 || \
5298 (value & 0xC0000000) == 0x0); \
5299 WRITE_INT_FIELD(this, \
5300 offset, \
5301 (value << 1) & ~kHeapObjectTag); \
5302 }
5303
5304 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5305 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5306 INT_ACCESSORS(holder, name, offset)
5307
5308
5309 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5310 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5311 formal_parameter_count,
5312 kFormalParameterCountOffset)
5313
5314 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5315 expected_nof_properties,
5316 kExpectedNofPropertiesOffset)
5317 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5318
5319 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5320 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5321 start_position_and_type,
5322 kStartPositionAndTypeOffset)
5323
5324 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5325 function_token_position,
5326 kFunctionTokenPositionOffset)
5327 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5328 compiler_hints,
5329 kCompilerHintsOffset)
5330
5331 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5332 opt_count_and_bailout_reason,
5333 kOptCountAndBailoutReasonOffset)
5334 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5335
5336 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5337 ast_node_count,
5338 kAstNodeCountOffset)
5339 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5340 profiler_ticks,
5341 kProfilerTicksOffset)
5342
5343 #endif
5344
5345
5346 BOOL_GETTER(SharedFunctionInfo,
5347 compiler_hints,
5348 optimization_disabled,
5349 kOptimizationDisabled)
5350
5351
5352 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5353 set_compiler_hints(BooleanBit::set(compiler_hints(),
5354 kOptimizationDisabled,
5355 disable));
5356 // If disabling optimizations we reflect that in the code object so
5357 // it will not be counted as optimizable code.
5358 if ((code()->kind() == Code::FUNCTION) && disable) {
5359 code()->set_optimizable(false);
5360 }
5361 }
5362
5363
strict_mode()5364 StrictMode SharedFunctionInfo::strict_mode() {
5365 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5366 ? STRICT : SLOPPY;
5367 }
5368
5369
set_strict_mode(StrictMode strict_mode)5370 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5371 // We only allow mode transitions from sloppy to strict.
5372 ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5373 int hints = compiler_hints();
5374 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5375 set_compiler_hints(hints);
5376 }
5377
5378
BOOL_ACCESSORS(SharedFunctionInfo,compiler_hints,native,kNative)5379 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5380 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5381 kInlineBuiltin)
5382 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5383 name_should_print_as_anonymous,
5384 kNameShouldPrintAsAnonymous)
5385 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5386 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5387 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5388 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5389 kDontOptimize)
5390 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5391 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5392 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5393 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5394
5395 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5396 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5397
5398 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5399
5400 bool Script::HasValidSource() {
5401 Object* src = this->source();
5402 if (!src->IsString()) return true;
5403 String* src_str = String::cast(src);
5404 if (!StringShape(src_str).IsExternal()) return true;
5405 if (src_str->IsOneByteRepresentation()) {
5406 return ExternalAsciiString::cast(src)->resource() != NULL;
5407 } else if (src_str->IsTwoByteRepresentation()) {
5408 return ExternalTwoByteString::cast(src)->resource() != NULL;
5409 }
5410 return true;
5411 }
5412
5413
DontAdaptArguments()5414 void SharedFunctionInfo::DontAdaptArguments() {
5415 ASSERT(code()->kind() == Code::BUILTIN);
5416 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5417 }
5418
5419
start_position()5420 int SharedFunctionInfo::start_position() {
5421 return start_position_and_type() >> kStartPositionShift;
5422 }
5423
5424
set_start_position(int start_position)5425 void SharedFunctionInfo::set_start_position(int start_position) {
5426 set_start_position_and_type((start_position << kStartPositionShift)
5427 | (start_position_and_type() & ~kStartPositionMask));
5428 }
5429
5430
code()5431 Code* SharedFunctionInfo::code() {
5432 return Code::cast(READ_FIELD(this, kCodeOffset));
5433 }
5434
5435
set_code(Code * value,WriteBarrierMode mode)5436 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5437 ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5438 WRITE_FIELD(this, kCodeOffset, value);
5439 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5440 }
5441
5442
ReplaceCode(Code * value)5443 void SharedFunctionInfo::ReplaceCode(Code* value) {
5444 // If the GC metadata field is already used then the function was
5445 // enqueued as a code flushing candidate and we remove it now.
5446 if (code()->gc_metadata() != NULL) {
5447 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5448 flusher->EvictCandidate(this);
5449 }
5450
5451 ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5452
5453 set_code(value);
5454 }
5455
5456
scope_info()5457 ScopeInfo* SharedFunctionInfo::scope_info() {
5458 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5459 }
5460
5461
set_scope_info(ScopeInfo * value,WriteBarrierMode mode)5462 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5463 WriteBarrierMode mode) {
5464 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5465 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5466 this,
5467 kScopeInfoOffset,
5468 reinterpret_cast<Object*>(value),
5469 mode);
5470 }
5471
5472
is_compiled()5473 bool SharedFunctionInfo::is_compiled() {
5474 return code() !=
5475 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5476 }
5477
5478
IsApiFunction()5479 bool SharedFunctionInfo::IsApiFunction() {
5480 return function_data()->IsFunctionTemplateInfo();
5481 }
5482
5483
get_api_func_data()5484 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5485 ASSERT(IsApiFunction());
5486 return FunctionTemplateInfo::cast(function_data());
5487 }
5488
5489
HasBuiltinFunctionId()5490 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5491 return function_data()->IsSmi();
5492 }
5493
5494
builtin_function_id()5495 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5496 ASSERT(HasBuiltinFunctionId());
5497 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5498 }
5499
5500
ic_age()5501 int SharedFunctionInfo::ic_age() {
5502 return ICAgeBits::decode(counters());
5503 }
5504
5505
set_ic_age(int ic_age)5506 void SharedFunctionInfo::set_ic_age(int ic_age) {
5507 set_counters(ICAgeBits::update(counters(), ic_age));
5508 }
5509
5510
deopt_count()5511 int SharedFunctionInfo::deopt_count() {
5512 return DeoptCountBits::decode(counters());
5513 }
5514
5515
set_deopt_count(int deopt_count)5516 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5517 set_counters(DeoptCountBits::update(counters(), deopt_count));
5518 }
5519
5520
increment_deopt_count()5521 void SharedFunctionInfo::increment_deopt_count() {
5522 int value = counters();
5523 int deopt_count = DeoptCountBits::decode(value);
5524 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5525 set_counters(DeoptCountBits::update(value, deopt_count));
5526 }
5527
5528
opt_reenable_tries()5529 int SharedFunctionInfo::opt_reenable_tries() {
5530 return OptReenableTriesBits::decode(counters());
5531 }
5532
5533
set_opt_reenable_tries(int tries)5534 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5535 set_counters(OptReenableTriesBits::update(counters(), tries));
5536 }
5537
5538
opt_count()5539 int SharedFunctionInfo::opt_count() {
5540 return OptCountBits::decode(opt_count_and_bailout_reason());
5541 }
5542
5543
set_opt_count(int opt_count)5544 void SharedFunctionInfo::set_opt_count(int opt_count) {
5545 set_opt_count_and_bailout_reason(
5546 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5547 }
5548
5549
DisableOptimizationReason()5550 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5551 BailoutReason reason = static_cast<BailoutReason>(
5552 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5553 return reason;
5554 }
5555
5556
has_deoptimization_support()5557 bool SharedFunctionInfo::has_deoptimization_support() {
5558 Code* code = this->code();
5559 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5560 }
5561
5562
TryReenableOptimization()5563 void SharedFunctionInfo::TryReenableOptimization() {
5564 int tries = opt_reenable_tries();
5565 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5566 // We reenable optimization whenever the number of tries is a large
5567 // enough power of 2.
5568 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5569 set_optimization_disabled(false);
5570 set_opt_count(0);
5571 set_deopt_count(0);
5572 code()->set_optimizable(true);
5573 }
5574 }
5575
5576
IsBuiltin()5577 bool JSFunction::IsBuiltin() {
5578 return context()->global_object()->IsJSBuiltinsObject();
5579 }
5580
5581
IsNative()5582 bool JSFunction::IsNative() {
5583 Object* script = shared()->script();
5584 bool native = script->IsScript() &&
5585 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5586 ASSERT(!IsBuiltin() || native); // All builtins are also native.
5587 return native;
5588 }
5589
5590
NeedsArgumentsAdaption()5591 bool JSFunction::NeedsArgumentsAdaption() {
5592 return shared()->formal_parameter_count() !=
5593 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5594 }
5595
5596
IsOptimized()5597 bool JSFunction::IsOptimized() {
5598 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5599 }
5600
5601
IsOptimizable()5602 bool JSFunction::IsOptimizable() {
5603 return code()->kind() == Code::FUNCTION && code()->optimizable();
5604 }
5605
5606
IsMarkedForOptimization()5607 bool JSFunction::IsMarkedForOptimization() {
5608 return code() == GetIsolate()->builtins()->builtin(
5609 Builtins::kCompileOptimized);
5610 }
5611
5612
IsMarkedForConcurrentOptimization()5613 bool JSFunction::IsMarkedForConcurrentOptimization() {
5614 return code() == GetIsolate()->builtins()->builtin(
5615 Builtins::kCompileOptimizedConcurrent);
5616 }
5617
5618
IsInOptimizationQueue()5619 bool JSFunction::IsInOptimizationQueue() {
5620 return code() == GetIsolate()->builtins()->builtin(
5621 Builtins::kInOptimizationQueue);
5622 }
5623
5624
IsInobjectSlackTrackingInProgress()5625 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5626 return has_initial_map() &&
5627 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5628 }
5629
5630
code()5631 Code* JSFunction::code() {
5632 return Code::cast(
5633 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5634 }
5635
5636
set_code(Code * value)5637 void JSFunction::set_code(Code* value) {
5638 ASSERT(!GetHeap()->InNewSpace(value));
5639 Address entry = value->entry();
5640 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5641 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5642 this,
5643 HeapObject::RawField(this, kCodeEntryOffset),
5644 value);
5645 }
5646
5647
set_code_no_write_barrier(Code * value)5648 void JSFunction::set_code_no_write_barrier(Code* value) {
5649 ASSERT(!GetHeap()->InNewSpace(value));
5650 Address entry = value->entry();
5651 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5652 }
5653
5654
ReplaceCode(Code * code)5655 void JSFunction::ReplaceCode(Code* code) {
5656 bool was_optimized = IsOptimized();
5657 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5658
5659 if (was_optimized && is_optimized) {
5660 shared()->EvictFromOptimizedCodeMap(this->code(),
5661 "Replacing with another optimized code");
5662 }
5663
5664 set_code(code);
5665
5666 // Add/remove the function from the list of optimized functions for this
5667 // context based on the state change.
5668 if (!was_optimized && is_optimized) {
5669 context()->native_context()->AddOptimizedFunction(this);
5670 }
5671 if (was_optimized && !is_optimized) {
5672 // TODO(titzer): linear in the number of optimized functions; fix!
5673 context()->native_context()->RemoveOptimizedFunction(this);
5674 }
5675 }
5676
5677
context()5678 Context* JSFunction::context() {
5679 return Context::cast(READ_FIELD(this, kContextOffset));
5680 }
5681
5682
set_context(Object * value)5683 void JSFunction::set_context(Object* value) {
5684 ASSERT(value->IsUndefined() || value->IsContext());
5685 WRITE_FIELD(this, kContextOffset, value);
5686 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5687 }
5688
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)5689 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5690 kPrototypeOrInitialMapOffset)
5691
5692
5693 Map* JSFunction::initial_map() {
5694 return Map::cast(prototype_or_initial_map());
5695 }
5696
5697
set_initial_map(Map * value)5698 void JSFunction::set_initial_map(Map* value) {
5699 set_prototype_or_initial_map(value);
5700 }
5701
5702
has_initial_map()5703 bool JSFunction::has_initial_map() {
5704 return prototype_or_initial_map()->IsMap();
5705 }
5706
5707
has_instance_prototype()5708 bool JSFunction::has_instance_prototype() {
5709 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5710 }
5711
5712
has_prototype()5713 bool JSFunction::has_prototype() {
5714 return map()->has_non_instance_prototype() || has_instance_prototype();
5715 }
5716
5717
instance_prototype()5718 Object* JSFunction::instance_prototype() {
5719 ASSERT(has_instance_prototype());
5720 if (has_initial_map()) return initial_map()->prototype();
5721 // When there is no initial map and the prototype is a JSObject, the
5722 // initial map field is used for the prototype field.
5723 return prototype_or_initial_map();
5724 }
5725
5726
prototype()5727 Object* JSFunction::prototype() {
5728 ASSERT(has_prototype());
5729 // If the function's prototype property has been set to a non-JSObject
5730 // value, that value is stored in the constructor field of the map.
5731 if (map()->has_non_instance_prototype()) return map()->constructor();
5732 return instance_prototype();
5733 }
5734
5735
should_have_prototype()5736 bool JSFunction::should_have_prototype() {
5737 return map()->function_with_prototype();
5738 }
5739
5740
is_compiled()5741 bool JSFunction::is_compiled() {
5742 return code() !=
5743 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5744 }
5745
5746
literals()5747 FixedArray* JSFunction::literals() {
5748 ASSERT(!shared()->bound());
5749 return literals_or_bindings();
5750 }
5751
5752
set_literals(FixedArray * literals)5753 void JSFunction::set_literals(FixedArray* literals) {
5754 ASSERT(!shared()->bound());
5755 set_literals_or_bindings(literals);
5756 }
5757
5758
function_bindings()5759 FixedArray* JSFunction::function_bindings() {
5760 ASSERT(shared()->bound());
5761 return literals_or_bindings();
5762 }
5763
5764
set_function_bindings(FixedArray * bindings)5765 void JSFunction::set_function_bindings(FixedArray* bindings) {
5766 ASSERT(shared()->bound());
5767 // Bound function literal may be initialized to the empty fixed array
5768 // before the bindings are set.
5769 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5770 bindings->map() == GetHeap()->fixed_cow_array_map());
5771 set_literals_or_bindings(bindings);
5772 }
5773
5774
NumberOfLiterals()5775 int JSFunction::NumberOfLiterals() {
5776 ASSERT(!shared()->bound());
5777 return literals()->length();
5778 }
5779
5780
javascript_builtin(Builtins::JavaScript id)5781 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5782 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5783 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5784 }
5785
5786
set_javascript_builtin(Builtins::JavaScript id,Object * value)5787 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5788 Object* value) {
5789 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5790 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5791 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5792 }
5793
5794
javascript_builtin_code(Builtins::JavaScript id)5795 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5796 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5797 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5798 }
5799
5800
set_javascript_builtin_code(Builtins::JavaScript id,Code * value)5801 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5802 Code* value) {
5803 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5804 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5805 ASSERT(!GetHeap()->InNewSpace(value));
5806 }
5807
5808
ACCESSORS(JSProxy,handler,Object,kHandlerOffset)5809 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5810 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5811 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5812 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5813
5814
5815 void JSProxy::InitializeBody(int object_size, Object* value) {
5816 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5817 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5818 WRITE_FIELD(this, offset, value);
5819 }
5820 }
5821
5822
ACCESSORS(JSSet,table,Object,kTableOffset)5823 ACCESSORS(JSSet, table, Object, kTableOffset)
5824 ACCESSORS(JSMap, table, Object, kTableOffset)
5825
5826
5827 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
5828 template<class Derived, class TableType> \
5829 type* OrderedHashTableIterator<Derived, TableType>::name() { \
5830 return type::cast(READ_FIELD(this, offset)); \
5831 } \
5832 template<class Derived, class TableType> \
5833 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
5834 type* value, WriteBarrierMode mode) { \
5835 WRITE_FIELD(this, offset, value); \
5836 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
5837 }
5838
5839 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
5840 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
5841 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
5842
5843 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
5844
5845
5846 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5847 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5848
5849
5850 Address Foreign::foreign_address() {
5851 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5852 }
5853
5854
set_foreign_address(Address value)5855 void Foreign::set_foreign_address(Address value) {
5856 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5857 }
5858
5859
ACCESSORS(JSGeneratorObject,function,JSFunction,kFunctionOffset)5860 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5861 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5862 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5863 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5864 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5865 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5866
5867 bool JSGeneratorObject::is_suspended() {
5868 ASSERT_LT(kGeneratorExecuting, kGeneratorClosed);
5869 ASSERT_EQ(kGeneratorClosed, 0);
5870 return continuation() > 0;
5871 }
5872
is_closed()5873 bool JSGeneratorObject::is_closed() {
5874 return continuation() == kGeneratorClosed;
5875 }
5876
is_executing()5877 bool JSGeneratorObject::is_executing() {
5878 return continuation() == kGeneratorExecuting;
5879 }
5880
cast(Object * obj)5881 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5882 ASSERT(obj->IsJSGeneratorObject());
5883 ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
5884 return reinterpret_cast<JSGeneratorObject*>(obj);
5885 }
5886
5887
ACCESSORS(JSModule,context,Object,kContextOffset)5888 ACCESSORS(JSModule, context, Object, kContextOffset)
5889 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5890
5891
5892 JSModule* JSModule::cast(Object* obj) {
5893 ASSERT(obj->IsJSModule());
5894 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5895 return reinterpret_cast<JSModule*>(obj);
5896 }
5897
5898
ACCESSORS(JSValue,value,Object,kValueOffset)5899 ACCESSORS(JSValue, value, Object, kValueOffset)
5900
5901
5902 JSValue* JSValue::cast(Object* obj) {
5903 ASSERT(obj->IsJSValue());
5904 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5905 return reinterpret_cast<JSValue*>(obj);
5906 }
5907
5908
ACCESSORS(JSDate,value,Object,kValueOffset)5909 ACCESSORS(JSDate, value, Object, kValueOffset)
5910 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5911 ACCESSORS(JSDate, year, Object, kYearOffset)
5912 ACCESSORS(JSDate, month, Object, kMonthOffset)
5913 ACCESSORS(JSDate, day, Object, kDayOffset)
5914 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5915 ACCESSORS(JSDate, hour, Object, kHourOffset)
5916 ACCESSORS(JSDate, min, Object, kMinOffset)
5917 ACCESSORS(JSDate, sec, Object, kSecOffset)
5918
5919
5920 JSDate* JSDate::cast(Object* obj) {
5921 ASSERT(obj->IsJSDate());
5922 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5923 return reinterpret_cast<JSDate*>(obj);
5924 }
5925
5926
ACCESSORS(JSMessageObject,type,String,kTypeOffset)5927 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5928 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5929 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5930 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5931 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5932 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5933
5934
5935 JSMessageObject* JSMessageObject::cast(Object* obj) {
5936 ASSERT(obj->IsJSMessageObject());
5937 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
5938 return reinterpret_cast<JSMessageObject*>(obj);
5939 }
5940
5941
INT_ACCESSORS(Code,instruction_size,kInstructionSizeOffset)5942 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5943 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5944 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5945 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5946 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5947 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5948 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
5949
5950
5951 void Code::WipeOutHeader() {
5952 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5953 WRITE_FIELD(this, kHandlerTableOffset, NULL);
5954 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5955 WRITE_FIELD(this, kConstantPoolOffset, NULL);
5956 // Do not wipe out e.g. a minor key.
5957 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5958 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5959 }
5960 }
5961
5962
type_feedback_info()5963 Object* Code::type_feedback_info() {
5964 ASSERT(kind() == FUNCTION);
5965 return raw_type_feedback_info();
5966 }
5967
5968
set_type_feedback_info(Object * value,WriteBarrierMode mode)5969 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5970 ASSERT(kind() == FUNCTION);
5971 set_raw_type_feedback_info(value, mode);
5972 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5973 value, mode);
5974 }
5975
5976
stub_info()5977 int Code::stub_info() {
5978 ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5979 kind() == BINARY_OP_IC || kind() == LOAD_IC || kind() == CALL_IC);
5980 return Smi::cast(raw_type_feedback_info())->value();
5981 }
5982
5983
set_stub_info(int value)5984 void Code::set_stub_info(int value) {
5985 ASSERT(kind() == COMPARE_IC ||
5986 kind() == COMPARE_NIL_IC ||
5987 kind() == BINARY_OP_IC ||
5988 kind() == STUB ||
5989 kind() == LOAD_IC ||
5990 kind() == CALL_IC ||
5991 kind() == KEYED_LOAD_IC ||
5992 kind() == STORE_IC ||
5993 kind() == KEYED_STORE_IC);
5994 set_raw_type_feedback_info(Smi::FromInt(value));
5995 }
5996
5997
ACCESSORS(Code,gc_metadata,Object,kGCMetadataOffset)5998 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5999 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6000
6001
6002 byte* Code::instruction_start() {
6003 return FIELD_ADDR(this, kHeaderSize);
6004 }
6005
6006
instruction_end()6007 byte* Code::instruction_end() {
6008 return instruction_start() + instruction_size();
6009 }
6010
6011
body_size()6012 int Code::body_size() {
6013 return RoundUp(instruction_size(), kObjectAlignment);
6014 }
6015
6016
unchecked_relocation_info()6017 ByteArray* Code::unchecked_relocation_info() {
6018 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6019 }
6020
6021
relocation_start()6022 byte* Code::relocation_start() {
6023 return unchecked_relocation_info()->GetDataStartAddress();
6024 }
6025
6026
relocation_size()6027 int Code::relocation_size() {
6028 return unchecked_relocation_info()->length();
6029 }
6030
6031
entry()6032 byte* Code::entry() {
6033 return instruction_start();
6034 }
6035
6036
contains(byte * inner_pointer)6037 bool Code::contains(byte* inner_pointer) {
6038 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6039 }
6040
6041
ACCESSORS(JSArray,length,Object,kLengthOffset)6042 ACCESSORS(JSArray, length, Object, kLengthOffset)
6043
6044
6045 void* JSArrayBuffer::backing_store() {
6046 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6047 return reinterpret_cast<void*>(ptr);
6048 }
6049
6050
set_backing_store(void * value,WriteBarrierMode mode)6051 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6052 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6053 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6054 }
6055
6056
ACCESSORS(JSArrayBuffer,byte_length,Object,kByteLengthOffset)6057 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6058 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6059
6060
6061 bool JSArrayBuffer::is_external() {
6062 return BooleanBit::get(flag(), kIsExternalBit);
6063 }
6064
6065
set_is_external(bool value)6066 void JSArrayBuffer::set_is_external(bool value) {
6067 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6068 }
6069
6070
should_be_freed()6071 bool JSArrayBuffer::should_be_freed() {
6072 return BooleanBit::get(flag(), kShouldBeFreed);
6073 }
6074
6075
set_should_be_freed(bool value)6076 void JSArrayBuffer::set_should_be_freed(bool value) {
6077 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6078 }
6079
6080
ACCESSORS(JSArrayBuffer,weak_next,Object,kWeakNextOffset)6081 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6082 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6083
6084
6085 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6086 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6087 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6088 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6089 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6090
6091 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6092
6093
6094 JSRegExp::Type JSRegExp::TypeTag() {
6095 Object* data = this->data();
6096 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6097 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6098 return static_cast<JSRegExp::Type>(smi->value());
6099 }
6100
6101
CaptureCount()6102 int JSRegExp::CaptureCount() {
6103 switch (TypeTag()) {
6104 case ATOM:
6105 return 0;
6106 case IRREGEXP:
6107 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6108 default:
6109 UNREACHABLE();
6110 return -1;
6111 }
6112 }
6113
6114
GetFlags()6115 JSRegExp::Flags JSRegExp::GetFlags() {
6116 ASSERT(this->data()->IsFixedArray());
6117 Object* data = this->data();
6118 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6119 return Flags(smi->value());
6120 }
6121
6122
Pattern()6123 String* JSRegExp::Pattern() {
6124 ASSERT(this->data()->IsFixedArray());
6125 Object* data = this->data();
6126 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6127 return pattern;
6128 }
6129
6130
DataAt(int index)6131 Object* JSRegExp::DataAt(int index) {
6132 ASSERT(TypeTag() != NOT_COMPILED);
6133 return FixedArray::cast(data())->get(index);
6134 }
6135
6136
SetDataAt(int index,Object * value)6137 void JSRegExp::SetDataAt(int index, Object* value) {
6138 ASSERT(TypeTag() != NOT_COMPILED);
6139 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
6140 FixedArray::cast(data())->set(index, value);
6141 }
6142
6143
GetElementsKind()6144 ElementsKind JSObject::GetElementsKind() {
6145 ElementsKind kind = map()->elements_kind();
6146 #if DEBUG
6147 FixedArrayBase* fixed_array =
6148 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6149
6150 // If a GC was caused while constructing this object, the elements
6151 // pointer may point to a one pointer filler map.
6152 if (ElementsAreSafeToExamine()) {
6153 Map* map = fixed_array->map();
6154 ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
6155 (map == GetHeap()->fixed_array_map() ||
6156 map == GetHeap()->fixed_cow_array_map())) ||
6157 (IsFastDoubleElementsKind(kind) &&
6158 (fixed_array->IsFixedDoubleArray() ||
6159 fixed_array == GetHeap()->empty_fixed_array())) ||
6160 (kind == DICTIONARY_ELEMENTS &&
6161 fixed_array->IsFixedArray() &&
6162 fixed_array->IsDictionary()) ||
6163 (kind > DICTIONARY_ELEMENTS));
6164 ASSERT((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6165 (elements()->IsFixedArray() && elements()->length() >= 2));
6166 }
6167 #endif
6168 return kind;
6169 }
6170
6171
GetElementsAccessor()6172 ElementsAccessor* JSObject::GetElementsAccessor() {
6173 return ElementsAccessor::ForKind(GetElementsKind());
6174 }
6175
6176
HasFastObjectElements()6177 bool JSObject::HasFastObjectElements() {
6178 return IsFastObjectElementsKind(GetElementsKind());
6179 }
6180
6181
HasFastSmiElements()6182 bool JSObject::HasFastSmiElements() {
6183 return IsFastSmiElementsKind(GetElementsKind());
6184 }
6185
6186
HasFastSmiOrObjectElements()6187 bool JSObject::HasFastSmiOrObjectElements() {
6188 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6189 }
6190
6191
HasFastDoubleElements()6192 bool JSObject::HasFastDoubleElements() {
6193 return IsFastDoubleElementsKind(GetElementsKind());
6194 }
6195
6196
HasFastHoleyElements()6197 bool JSObject::HasFastHoleyElements() {
6198 return IsFastHoleyElementsKind(GetElementsKind());
6199 }
6200
6201
HasFastElements()6202 bool JSObject::HasFastElements() {
6203 return IsFastElementsKind(GetElementsKind());
6204 }
6205
6206
HasDictionaryElements()6207 bool JSObject::HasDictionaryElements() {
6208 return GetElementsKind() == DICTIONARY_ELEMENTS;
6209 }
6210
6211
HasSloppyArgumentsElements()6212 bool JSObject::HasSloppyArgumentsElements() {
6213 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6214 }
6215
6216
HasExternalArrayElements()6217 bool JSObject::HasExternalArrayElements() {
6218 HeapObject* array = elements();
6219 ASSERT(array != NULL);
6220 return array->IsExternalArray();
6221 }
6222
6223
6224 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6225 bool JSObject::HasExternal##Type##Elements() { \
6226 HeapObject* array = elements(); \
6227 ASSERT(array != NULL); \
6228 if (!array->IsHeapObject()) \
6229 return false; \
6230 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6231 }
6232
TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)6233 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6234
6235 #undef EXTERNAL_ELEMENTS_CHECK
6236
6237
6238 bool JSObject::HasFixedTypedArrayElements() {
6239 HeapObject* array = elements();
6240 ASSERT(array != NULL);
6241 return array->IsFixedTypedArrayBase();
6242 }
6243
6244
6245 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6246 bool JSObject::HasFixed##Type##Elements() { \
6247 HeapObject* array = elements(); \
6248 ASSERT(array != NULL); \
6249 if (!array->IsHeapObject()) \
6250 return false; \
6251 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6252 }
6253
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)6254 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6255
6256 #undef FIXED_TYPED_ELEMENTS_CHECK
6257
6258
6259 bool JSObject::HasNamedInterceptor() {
6260 return map()->has_named_interceptor();
6261 }
6262
6263
HasIndexedInterceptor()6264 bool JSObject::HasIndexedInterceptor() {
6265 return map()->has_indexed_interceptor();
6266 }
6267
6268
property_dictionary()6269 NameDictionary* JSObject::property_dictionary() {
6270 ASSERT(!HasFastProperties());
6271 return NameDictionary::cast(properties());
6272 }
6273
6274
element_dictionary()6275 SeededNumberDictionary* JSObject::element_dictionary() {
6276 ASSERT(HasDictionaryElements());
6277 return SeededNumberDictionary::cast(elements());
6278 }
6279
6280
IsHashFieldComputed(uint32_t field)6281 bool Name::IsHashFieldComputed(uint32_t field) {
6282 return (field & kHashNotComputedMask) == 0;
6283 }
6284
6285
HasHashCode()6286 bool Name::HasHashCode() {
6287 return IsHashFieldComputed(hash_field());
6288 }
6289
6290
Hash()6291 uint32_t Name::Hash() {
6292 // Fast case: has hash code already been computed?
6293 uint32_t field = hash_field();
6294 if (IsHashFieldComputed(field)) return field >> kHashShift;
6295 // Slow case: compute hash code and set it. Has to be a string.
6296 return String::cast(this)->ComputeAndSetHash();
6297 }
6298
6299
StringHasher(int length,uint32_t seed)6300 StringHasher::StringHasher(int length, uint32_t seed)
6301 : length_(length),
6302 raw_running_hash_(seed),
6303 array_index_(0),
6304 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6305 is_first_char_(true) {
6306 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6307 }
6308
6309
has_trivial_hash()6310 bool StringHasher::has_trivial_hash() {
6311 return length_ > String::kMaxHashCalcLength;
6312 }
6313
6314
AddCharacterCore(uint32_t running_hash,uint16_t c)6315 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6316 running_hash += c;
6317 running_hash += (running_hash << 10);
6318 running_hash ^= (running_hash >> 6);
6319 return running_hash;
6320 }
6321
6322
GetHashCore(uint32_t running_hash)6323 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6324 running_hash += (running_hash << 3);
6325 running_hash ^= (running_hash >> 11);
6326 running_hash += (running_hash << 15);
6327 if ((running_hash & String::kHashBitMask) == 0) {
6328 return kZeroHash;
6329 }
6330 return running_hash;
6331 }
6332
6333
AddCharacter(uint16_t c)6334 void StringHasher::AddCharacter(uint16_t c) {
6335 // Use the Jenkins one-at-a-time hash function to update the hash
6336 // for the given character.
6337 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6338 }
6339
6340
UpdateIndex(uint16_t c)6341 bool StringHasher::UpdateIndex(uint16_t c) {
6342 ASSERT(is_array_index_);
6343 if (c < '0' || c > '9') {
6344 is_array_index_ = false;
6345 return false;
6346 }
6347 int d = c - '0';
6348 if (is_first_char_) {
6349 is_first_char_ = false;
6350 if (c == '0' && length_ > 1) {
6351 is_array_index_ = false;
6352 return false;
6353 }
6354 }
6355 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6356 is_array_index_ = false;
6357 return false;
6358 }
6359 array_index_ = array_index_ * 10 + d;
6360 return true;
6361 }
6362
6363
6364 template<typename Char>
AddCharacters(const Char * chars,int length)6365 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6366 ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6367 int i = 0;
6368 if (is_array_index_) {
6369 for (; i < length; i++) {
6370 AddCharacter(chars[i]);
6371 if (!UpdateIndex(chars[i])) {
6372 i++;
6373 break;
6374 }
6375 }
6376 }
6377 for (; i < length; i++) {
6378 ASSERT(!is_array_index_);
6379 AddCharacter(chars[i]);
6380 }
6381 }
6382
6383
6384 template <typename schar>
HashSequentialString(const schar * chars,int length,uint32_t seed)6385 uint32_t StringHasher::HashSequentialString(const schar* chars,
6386 int length,
6387 uint32_t seed) {
6388 StringHasher hasher(length, seed);
6389 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6390 return hasher.GetHashField();
6391 }
6392
6393
AsArrayIndex(uint32_t * index)6394 bool Name::AsArrayIndex(uint32_t* index) {
6395 return IsString() && String::cast(this)->AsArrayIndex(index);
6396 }
6397
6398
AsArrayIndex(uint32_t * index)6399 bool String::AsArrayIndex(uint32_t* index) {
6400 uint32_t field = hash_field();
6401 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6402 return false;
6403 }
6404 return SlowAsArrayIndex(index);
6405 }
6406
6407
GetPrototype()6408 Object* JSReceiver::GetPrototype() {
6409 return map()->prototype();
6410 }
6411
6412
GetConstructor()6413 Object* JSReceiver::GetConstructor() {
6414 return map()->constructor();
6415 }
6416
6417
HasProperty(Handle<JSReceiver> object,Handle<Name> name)6418 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
6419 Handle<Name> name) {
6420 if (object->IsJSProxy()) {
6421 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6422 return JSProxy::HasPropertyWithHandler(proxy, name);
6423 }
6424 return GetPropertyAttributes(object, name) != ABSENT;
6425 }
6426
6427
HasOwnProperty(Handle<JSReceiver> object,Handle<Name> name)6428 bool JSReceiver::HasOwnProperty(Handle<JSReceiver> object, Handle<Name> name) {
6429 if (object->IsJSProxy()) {
6430 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6431 return JSProxy::HasPropertyWithHandler(proxy, name);
6432 }
6433 return GetOwnPropertyAttributes(object, name) != ABSENT;
6434 }
6435
6436
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> key)6437 PropertyAttributes JSReceiver::GetPropertyAttributes(Handle<JSReceiver> object,
6438 Handle<Name> key) {
6439 uint32_t index;
6440 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6441 return GetElementAttribute(object, index);
6442 }
6443 LookupIterator it(object, key);
6444 return GetPropertyAttributes(&it);
6445 }
6446
6447
GetElementAttribute(Handle<JSReceiver> object,uint32_t index)6448 PropertyAttributes JSReceiver::GetElementAttribute(Handle<JSReceiver> object,
6449 uint32_t index) {
6450 if (object->IsJSProxy()) {
6451 return JSProxy::GetElementAttributeWithHandler(
6452 Handle<JSProxy>::cast(object), object, index);
6453 }
6454 return JSObject::GetElementAttributeWithReceiver(
6455 Handle<JSObject>::cast(object), object, index, true);
6456 }
6457
6458
IsDetached()6459 bool JSGlobalObject::IsDetached() {
6460 return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6461 }
6462
6463
IsDetachedFrom(GlobalObject * global)6464 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
6465 return GetPrototype() != global;
6466 }
6467
6468
GetOrCreateIdentityHash(Handle<JSReceiver> object)6469 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6470 return object->IsJSProxy()
6471 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6472 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6473 }
6474
6475
GetIdentityHash()6476 Object* JSReceiver::GetIdentityHash() {
6477 return IsJSProxy()
6478 ? JSProxy::cast(this)->GetIdentityHash()
6479 : JSObject::cast(this)->GetIdentityHash();
6480 }
6481
6482
HasElement(Handle<JSReceiver> object,uint32_t index)6483 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6484 if (object->IsJSProxy()) {
6485 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6486 return JSProxy::HasElementWithHandler(proxy, index);
6487 }
6488 return JSObject::GetElementAttributeWithReceiver(
6489 Handle<JSObject>::cast(object), object, index, true) != ABSENT;
6490 }
6491
6492
HasOwnElement(Handle<JSReceiver> object,uint32_t index)6493 bool JSReceiver::HasOwnElement(Handle<JSReceiver> object, uint32_t index) {
6494 if (object->IsJSProxy()) {
6495 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6496 return JSProxy::HasElementWithHandler(proxy, index);
6497 }
6498 return JSObject::GetElementAttributeWithReceiver(
6499 Handle<JSObject>::cast(object), object, index, false) != ABSENT;
6500 }
6501
6502
GetOwnElementAttribute(Handle<JSReceiver> object,uint32_t index)6503 PropertyAttributes JSReceiver::GetOwnElementAttribute(
6504 Handle<JSReceiver> object, uint32_t index) {
6505 if (object->IsJSProxy()) {
6506 return JSProxy::GetElementAttributeWithHandler(
6507 Handle<JSProxy>::cast(object), object, index);
6508 }
6509 return JSObject::GetElementAttributeWithReceiver(
6510 Handle<JSObject>::cast(object), object, index, false);
6511 }
6512
6513
all_can_read()6514 bool AccessorInfo::all_can_read() {
6515 return BooleanBit::get(flag(), kAllCanReadBit);
6516 }
6517
6518
set_all_can_read(bool value)6519 void AccessorInfo::set_all_can_read(bool value) {
6520 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6521 }
6522
6523
all_can_write()6524 bool AccessorInfo::all_can_write() {
6525 return BooleanBit::get(flag(), kAllCanWriteBit);
6526 }
6527
6528
set_all_can_write(bool value)6529 void AccessorInfo::set_all_can_write(bool value) {
6530 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6531 }
6532
6533
property_attributes()6534 PropertyAttributes AccessorInfo::property_attributes() {
6535 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6536 }
6537
6538
set_property_attributes(PropertyAttributes attributes)6539 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6540 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6541 }
6542
6543
IsCompatibleReceiver(Object * receiver)6544 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6545 Object* function_template = expected_receiver_type();
6546 if (!function_template->IsFunctionTemplateInfo()) return true;
6547 return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6548 }
6549
6550
clear_setter()6551 void ExecutableAccessorInfo::clear_setter() {
6552 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6553 }
6554
6555
set_access_flags(v8::AccessControl access_control)6556 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
6557 int current = access_flags()->value();
6558 current = BooleanBit::set(current,
6559 kAllCanReadBit,
6560 access_control & ALL_CAN_READ);
6561 current = BooleanBit::set(current,
6562 kAllCanWriteBit,
6563 access_control & ALL_CAN_WRITE);
6564 set_access_flags(Smi::FromInt(current));
6565 }
6566
6567
all_can_read()6568 bool AccessorPair::all_can_read() {
6569 return BooleanBit::get(access_flags(), kAllCanReadBit);
6570 }
6571
6572
all_can_write()6573 bool AccessorPair::all_can_write() {
6574 return BooleanBit::get(access_flags(), kAllCanWriteBit);
6575 }
6576
6577
6578 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value)6579 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6580 Handle<Object> key,
6581 Handle<Object> value) {
6582 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6583 }
6584
6585
6586 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)6587 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6588 Handle<Object> key,
6589 Handle<Object> value,
6590 PropertyDetails details) {
6591 ASSERT(!key->IsName() ||
6592 details.IsDeleted() ||
6593 details.dictionary_index() > 0);
6594 int index = DerivedHashTable::EntryToIndex(entry);
6595 DisallowHeapAllocation no_gc;
6596 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6597 FixedArray::set(index, *key, mode);
6598 FixedArray::set(index+1, *value, mode);
6599 FixedArray::set(index+2, details.AsSmi());
6600 }
6601
6602
IsMatch(uint32_t key,Object * other)6603 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6604 ASSERT(other->IsNumber());
6605 return key == static_cast<uint32_t>(other->Number());
6606 }
6607
6608
Hash(uint32_t key)6609 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6610 return ComputeIntegerHash(key, 0);
6611 }
6612
6613
HashForObject(uint32_t key,Object * other)6614 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6615 Object* other) {
6616 ASSERT(other->IsNumber());
6617 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6618 }
6619
6620
SeededHash(uint32_t key,uint32_t seed)6621 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6622 return ComputeIntegerHash(key, seed);
6623 }
6624
6625
SeededHashForObject(uint32_t key,uint32_t seed,Object * other)6626 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6627 uint32_t seed,
6628 Object* other) {
6629 ASSERT(other->IsNumber());
6630 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6631 }
6632
6633
AsHandle(Isolate * isolate,uint32_t key)6634 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6635 return isolate->factory()->NewNumberFromUint(key);
6636 }
6637
6638
IsMatch(Handle<Name> key,Object * other)6639 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6640 // We know that all entries in a hash table had their hash keys created.
6641 // Use that knowledge to have fast failure.
6642 if (key->Hash() != Name::cast(other)->Hash()) return false;
6643 return key->Equals(Name::cast(other));
6644 }
6645
6646
Hash(Handle<Name> key)6647 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6648 return key->Hash();
6649 }
6650
6651
HashForObject(Handle<Name> key,Object * other)6652 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6653 return Name::cast(other)->Hash();
6654 }
6655
6656
AsHandle(Isolate * isolate,Handle<Name> key)6657 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6658 Handle<Name> key) {
6659 ASSERT(key->IsUniqueName());
6660 return key;
6661 }
6662
6663
DoGenerateNewEnumerationIndices(Handle<NameDictionary> dictionary)6664 void NameDictionary::DoGenerateNewEnumerationIndices(
6665 Handle<NameDictionary> dictionary) {
6666 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6667 }
6668
6669
IsMatch(Handle<Object> key,Object * other)6670 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6671 return key->SameValue(other);
6672 }
6673
6674
Hash(Handle<Object> key)6675 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6676 return Smi::cast(key->GetHash())->value();
6677 }
6678
6679
HashForObject(Handle<Object> key,Object * other)6680 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6681 Object* other) {
6682 return Smi::cast(other->GetHash())->value();
6683 }
6684
6685
AsHandle(Isolate * isolate,Handle<Object> key)6686 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6687 Handle<Object> key) {
6688 return key;
6689 }
6690
6691
Shrink(Handle<ObjectHashTable> table,Handle<Object> key)6692 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6693 Handle<ObjectHashTable> table, Handle<Object> key) {
6694 return DerivedHashTable::Shrink(table, key);
6695 }
6696
6697
6698 template <int entrysize>
IsMatch(Handle<Object> key,Object * other)6699 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6700 return key->SameValue(other);
6701 }
6702
6703
6704 template <int entrysize>
Hash(Handle<Object> key)6705 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6706 intptr_t hash = reinterpret_cast<intptr_t>(*key);
6707 return (uint32_t)(hash & 0xFFFFFFFF);
6708 }
6709
6710
6711 template <int entrysize>
HashForObject(Handle<Object> key,Object * other)6712 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6713 Object* other) {
6714 intptr_t hash = reinterpret_cast<intptr_t>(other);
6715 return (uint32_t)(hash & 0xFFFFFFFF);
6716 }
6717
6718
6719 template <int entrysize>
AsHandle(Isolate * isolate,Handle<Object> key)6720 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6721 Handle<Object> key) {
6722 return key;
6723 }
6724
6725
ClearCodeCache(Heap * heap)6726 void Map::ClearCodeCache(Heap* heap) {
6727 // No write barrier is needed since empty_fixed_array is not in new space.
6728 // Please note this function is used during marking:
6729 // - MarkCompactCollector::MarkUnmarkedObject
6730 // - IncrementalMarking::Step
6731 ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6732 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6733 }
6734
6735
EnsureSize(Handle<JSArray> array,int required_size)6736 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6737 ASSERT(array->HasFastSmiOrObjectElements());
6738 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6739 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6740 if (elts->length() < required_size) {
6741 // Doubling in size would be overkill, but leave some slack to avoid
6742 // constantly growing.
6743 Expand(array, required_size + (required_size >> 3));
6744 // It's a performance benefit to keep a frequently used array in new-space.
6745 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6746 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6747 // Expand will allocate a new backing store in new space even if the size
6748 // we asked for isn't larger than what we had before.
6749 Expand(array, required_size);
6750 }
6751 }
6752
6753
set_length(Smi * length)6754 void JSArray::set_length(Smi* length) {
6755 // Don't need a write barrier for a Smi.
6756 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6757 }
6758
6759
AllowsSetElementsLength()6760 bool JSArray::AllowsSetElementsLength() {
6761 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6762 ASSERT(result == !HasExternalArrayElements());
6763 return result;
6764 }
6765
6766
SetContent(Handle<JSArray> array,Handle<FixedArrayBase> storage)6767 void JSArray::SetContent(Handle<JSArray> array,
6768 Handle<FixedArrayBase> storage) {
6769 EnsureCanContainElements(array, storage, storage->length(),
6770 ALLOW_COPIED_DOUBLE_ELEMENTS);
6771
6772 ASSERT((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6773 IsFastDoubleElementsKind(array->GetElementsKind())) ||
6774 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6775 (IsFastObjectElementsKind(array->GetElementsKind()) ||
6776 (IsFastSmiElementsKind(array->GetElementsKind()) &&
6777 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6778 array->set_elements(*storage);
6779 array->set_length(Smi::FromInt(storage->length()));
6780 }
6781
6782
UninitializedSentinel(Isolate * isolate)6783 Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
6784 return isolate->factory()->uninitialized_symbol();
6785 }
6786
6787
MegamorphicSentinel(Isolate * isolate)6788 Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
6789 return isolate->factory()->megamorphic_symbol();
6790 }
6791
6792
MonomorphicArraySentinel(Isolate * isolate,ElementsKind elements_kind)6793 Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
6794 ElementsKind elements_kind) {
6795 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6796 }
6797
6798
RawUninitializedSentinel(Heap * heap)6799 Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
6800 return heap->uninitialized_symbol();
6801 }
6802
6803
ic_total_count()6804 int TypeFeedbackInfo::ic_total_count() {
6805 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6806 return ICTotalCountField::decode(current);
6807 }
6808
6809
set_ic_total_count(int count)6810 void TypeFeedbackInfo::set_ic_total_count(int count) {
6811 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6812 value = ICTotalCountField::update(value,
6813 ICTotalCountField::decode(count));
6814 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6815 }
6816
6817
ic_with_type_info_count()6818 int TypeFeedbackInfo::ic_with_type_info_count() {
6819 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6820 return ICsWithTypeInfoCountField::decode(current);
6821 }
6822
6823
change_ic_with_type_info_count(int delta)6824 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6825 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6826 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6827 // We can get negative count here when the type-feedback info is
6828 // shared between two code objects. The can only happen when
6829 // the debugger made a shallow copy of code object (see Heap::CopyCode).
6830 // Since we do not optimize when the debugger is active, we can skip
6831 // this counter update.
6832 if (new_count >= 0) {
6833 new_count &= ICsWithTypeInfoCountField::kMask;
6834 value = ICsWithTypeInfoCountField::update(value, new_count);
6835 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6836 }
6837 }
6838
6839
initialize_storage()6840 void TypeFeedbackInfo::initialize_storage() {
6841 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
6842 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
6843 }
6844
6845
change_own_type_change_checksum()6846 void TypeFeedbackInfo::change_own_type_change_checksum() {
6847 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6848 int checksum = OwnTypeChangeChecksum::decode(value);
6849 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6850 value = OwnTypeChangeChecksum::update(value, checksum);
6851 // Ensure packed bit field is in Smi range.
6852 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6853 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6854 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6855 }
6856
6857
set_inlined_type_change_checksum(int checksum)6858 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
6859 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6860 int mask = (1 << kTypeChangeChecksumBits) - 1;
6861 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6862 // Ensure packed bit field is in Smi range.
6863 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6864 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6865 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6866 }
6867
6868
own_type_change_checksum()6869 int TypeFeedbackInfo::own_type_change_checksum() {
6870 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6871 return OwnTypeChangeChecksum::decode(value);
6872 }
6873
6874
matches_inlined_type_change_checksum(int checksum)6875 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
6876 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6877 int mask = (1 << kTypeChangeChecksumBits) - 1;
6878 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6879 }
6880
6881
SMI_ACCESSORS(AliasedArgumentsEntry,aliased_context_slot,kAliasedContextSlot)6882 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6883
6884
6885 Relocatable::Relocatable(Isolate* isolate) {
6886 isolate_ = isolate;
6887 prev_ = isolate->relocatable_top();
6888 isolate->set_relocatable_top(this);
6889 }
6890
6891
~Relocatable()6892 Relocatable::~Relocatable() {
6893 ASSERT_EQ(isolate_->relocatable_top(), this);
6894 isolate_->set_relocatable_top(prev_);
6895 }
6896
6897
SizeOf(Map * map,HeapObject * object)6898 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
6899 return map->instance_size();
6900 }
6901
6902
ForeignIterateBody(ObjectVisitor * v)6903 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6904 v->VisitExternalReference(
6905 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6906 }
6907
6908
6909 template<typename StaticVisitor>
ForeignIterateBody()6910 void Foreign::ForeignIterateBody() {
6911 StaticVisitor::VisitExternalReference(
6912 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6913 }
6914
6915
ExternalAsciiStringIterateBody(ObjectVisitor * v)6916 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
6917 typedef v8::String::ExternalAsciiStringResource Resource;
6918 v->VisitExternalAsciiString(
6919 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6920 }
6921
6922
6923 template<typename StaticVisitor>
ExternalAsciiStringIterateBody()6924 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
6925 typedef v8::String::ExternalAsciiStringResource Resource;
6926 StaticVisitor::VisitExternalAsciiString(
6927 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6928 }
6929
6930
ExternalTwoByteStringIterateBody(ObjectVisitor * v)6931 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
6932 typedef v8::String::ExternalStringResource Resource;
6933 v->VisitExternalTwoByteString(
6934 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6935 }
6936
6937
6938 template<typename StaticVisitor>
ExternalTwoByteStringIterateBody()6939 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
6940 typedef v8::String::ExternalStringResource Resource;
6941 StaticVisitor::VisitExternalTwoByteString(
6942 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6943 }
6944
6945
6946 template<int start_offset, int end_offset, int size>
IterateBody(HeapObject * obj,ObjectVisitor * v)6947 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
6948 HeapObject* obj,
6949 ObjectVisitor* v) {
6950 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6951 HeapObject::RawField(obj, end_offset));
6952 }
6953
6954
6955 template<int start_offset>
IterateBody(HeapObject * obj,int object_size,ObjectVisitor * v)6956 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
6957 int object_size,
6958 ObjectVisitor* v) {
6959 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6960 HeapObject::RawField(obj, object_size));
6961 }
6962
6963
6964 #undef TYPE_CHECKER
6965 #undef CAST_ACCESSOR
6966 #undef INT_ACCESSORS
6967 #undef ACCESSORS
6968 #undef ACCESSORS_TO_SMI
6969 #undef SMI_ACCESSORS
6970 #undef SYNCHRONIZED_SMI_ACCESSORS
6971 #undef NOBARRIER_SMI_ACCESSORS
6972 #undef BOOL_GETTER
6973 #undef BOOL_ACCESSORS
6974 #undef FIELD_ADDR
6975 #undef READ_FIELD
6976 #undef NOBARRIER_READ_FIELD
6977 #undef WRITE_FIELD
6978 #undef NOBARRIER_WRITE_FIELD
6979 #undef WRITE_BARRIER
6980 #undef CONDITIONAL_WRITE_BARRIER
6981 #undef READ_DOUBLE_FIELD
6982 #undef WRITE_DOUBLE_FIELD
6983 #undef READ_INT_FIELD
6984 #undef WRITE_INT_FIELD
6985 #undef READ_INTPTR_FIELD
6986 #undef WRITE_INTPTR_FIELD
6987 #undef READ_UINT32_FIELD
6988 #undef WRITE_UINT32_FIELD
6989 #undef READ_SHORT_FIELD
6990 #undef WRITE_SHORT_FIELD
6991 #undef READ_BYTE_FIELD
6992 #undef WRITE_BYTE_FIELD
6993 #undef NOBARRIER_READ_BYTE_FIELD
6994 #undef NOBARRIER_WRITE_BYTE_FIELD
6995
6996 } } // namespace v8::internal
6997
6998 #endif // V8_OBJECTS_INL_H_
6999