• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 //
28 // Review notes:
29 //
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
33 //
34 
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
37 
38 #include "objects.h"
39 #include "contexts.h"
40 #include "conversions-inl.h"
41 #include "heap.h"
42 #include "isolate.h"
43 #include "property.h"
44 #include "spaces.h"
45 #include "v8memory.h"
46 
47 namespace v8 {
48 namespace internal {
49 
PropertyDetails(Smi * smi)50 PropertyDetails::PropertyDetails(Smi* smi) {
51   value_ = smi->value();
52 }
53 
54 
AsSmi()55 Smi* PropertyDetails::AsSmi() {
56   return Smi::FromInt(value_);
57 }
58 
59 
AsDeleted()60 PropertyDetails PropertyDetails::AsDeleted() {
61   Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
62   return PropertyDetails(smi);
63 }
64 
65 
66 #define CAST_ACCESSOR(type)                     \
67   type* type::cast(Object* object) {            \
68     ASSERT(object->Is##type());                 \
69     return reinterpret_cast<type*>(object);     \
70   }
71 
72 
73 #define INT_ACCESSORS(holder, name, offset)                             \
74   int holder::name() { return READ_INT_FIELD(this, offset); }           \
75   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
76 
77 
78 #define ACCESSORS(holder, name, type, offset)                           \
79   type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
80   void holder::set_##name(type* value, WriteBarrierMode mode) {         \
81     WRITE_FIELD(this, offset, value);                                   \
82     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);           \
83   }
84 
85 
86 // GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
87 #define ACCESSORS_GCSAFE(holder, name, type, offset)                    \
88   type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
89   void holder::set_##name(type* value, WriteBarrierMode mode) {         \
90     WRITE_FIELD(this, offset, value);                                   \
91     CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode);                \
92   }
93 
94 
95 #define SMI_ACCESSORS(holder, name, offset)             \
96   int holder::name() {                                  \
97     Object* value = READ_FIELD(this, offset);           \
98     return Smi::cast(value)->value();                   \
99   }                                                     \
100   void holder::set_##name(int value) {                  \
101     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
102   }
103 
104 
105 #define BOOL_GETTER(holder, field, name, offset)           \
106   bool holder::name() {                                    \
107     return BooleanBit::get(field(), offset);               \
108   }                                                        \
109 
110 
111 #define BOOL_ACCESSORS(holder, field, name, offset)        \
112   bool holder::name() {                                    \
113     return BooleanBit::get(field(), offset);               \
114   }                                                        \
115   void holder::set_##name(bool value) {                    \
116     set_##field(BooleanBit::set(field(), offset, value));  \
117   }
118 
119 
IsInstanceOf(FunctionTemplateInfo * expected)120 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
121   // There is a constraint on the object; check.
122   if (!this->IsJSObject()) return false;
123   // Fetch the constructor function of the object.
124   Object* cons_obj = JSObject::cast(this)->map()->constructor();
125   if (!cons_obj->IsJSFunction()) return false;
126   JSFunction* fun = JSFunction::cast(cons_obj);
127   // Iterate through the chain of inheriting function templates to
128   // see if the required one occurs.
129   for (Object* type = fun->shared()->function_data();
130        type->IsFunctionTemplateInfo();
131        type = FunctionTemplateInfo::cast(type)->parent_template()) {
132     if (type == expected) return true;
133   }
134   // Didn't find the required type in the inheritance chain.
135   return false;
136 }
137 
138 
IsSmi()139 bool Object::IsSmi() {
140   return HAS_SMI_TAG(this);
141 }
142 
143 
IsHeapObject()144 bool Object::IsHeapObject() {
145   return Internals::HasHeapObjectTag(this);
146 }
147 
148 
IsHeapNumber()149 bool Object::IsHeapNumber() {
150   return Object::IsHeapObject()
151     && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
152 }
153 
154 
IsString()155 bool Object::IsString() {
156   return Object::IsHeapObject()
157     && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
158 }
159 
160 
IsSymbol()161 bool Object::IsSymbol() {
162   if (!this->IsHeapObject()) return false;
163   uint32_t type = HeapObject::cast(this)->map()->instance_type();
164   // Because the symbol tag is non-zero and no non-string types have the
165   // symbol bit set we can test for symbols with a very simple test
166   // operation.
167   ASSERT(kSymbolTag != 0);
168   ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
169   return (type & kIsSymbolMask) != 0;
170 }
171 
172 
IsConsString()173 bool Object::IsConsString() {
174   if (!this->IsHeapObject()) return false;
175   uint32_t type = HeapObject::cast(this)->map()->instance_type();
176   return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
177          (kStringTag | kConsStringTag);
178 }
179 
180 
IsSeqString()181 bool Object::IsSeqString() {
182   if (!IsString()) return false;
183   return StringShape(String::cast(this)).IsSequential();
184 }
185 
186 
IsSeqAsciiString()187 bool Object::IsSeqAsciiString() {
188   if (!IsString()) return false;
189   return StringShape(String::cast(this)).IsSequential() &&
190          String::cast(this)->IsAsciiRepresentation();
191 }
192 
193 
IsSeqTwoByteString()194 bool Object::IsSeqTwoByteString() {
195   if (!IsString()) return false;
196   return StringShape(String::cast(this)).IsSequential() &&
197          String::cast(this)->IsTwoByteRepresentation();
198 }
199 
200 
IsExternalString()201 bool Object::IsExternalString() {
202   if (!IsString()) return false;
203   return StringShape(String::cast(this)).IsExternal();
204 }
205 
206 
IsExternalAsciiString()207 bool Object::IsExternalAsciiString() {
208   if (!IsString()) return false;
209   return StringShape(String::cast(this)).IsExternal() &&
210          String::cast(this)->IsAsciiRepresentation();
211 }
212 
213 
IsExternalTwoByteString()214 bool Object::IsExternalTwoByteString() {
215   if (!IsString()) return false;
216   return StringShape(String::cast(this)).IsExternal() &&
217          String::cast(this)->IsTwoByteRepresentation();
218 }
219 
220 
StringShape(String * str)221 StringShape::StringShape(String* str)
222   : type_(str->map()->instance_type()) {
223   set_valid();
224   ASSERT((type_ & kIsNotStringMask) == kStringTag);
225 }
226 
227 
StringShape(Map * map)228 StringShape::StringShape(Map* map)
229   : type_(map->instance_type()) {
230   set_valid();
231   ASSERT((type_ & kIsNotStringMask) == kStringTag);
232 }
233 
234 
StringShape(InstanceType t)235 StringShape::StringShape(InstanceType t)
236   : type_(static_cast<uint32_t>(t)) {
237   set_valid();
238   ASSERT((type_ & kIsNotStringMask) == kStringTag);
239 }
240 
241 
IsSymbol()242 bool StringShape::IsSymbol() {
243   ASSERT(valid());
244   ASSERT(kSymbolTag != 0);
245   return (type_ & kIsSymbolMask) != 0;
246 }
247 
248 
IsAsciiRepresentation()249 bool String::IsAsciiRepresentation() {
250   uint32_t type = map()->instance_type();
251   return (type & kStringEncodingMask) == kAsciiStringTag;
252 }
253 
254 
IsTwoByteRepresentation()255 bool String::IsTwoByteRepresentation() {
256   uint32_t type = map()->instance_type();
257   return (type & kStringEncodingMask) == kTwoByteStringTag;
258 }
259 
260 
HasOnlyAsciiChars()261 bool String::HasOnlyAsciiChars() {
262   uint32_t type = map()->instance_type();
263   return (type & kStringEncodingMask) == kAsciiStringTag ||
264          (type & kAsciiDataHintMask) == kAsciiDataHintTag;
265 }
266 
267 
IsCons()268 bool StringShape::IsCons() {
269   return (type_ & kStringRepresentationMask) == kConsStringTag;
270 }
271 
272 
IsExternal()273 bool StringShape::IsExternal() {
274   return (type_ & kStringRepresentationMask) == kExternalStringTag;
275 }
276 
277 
IsSequential()278 bool StringShape::IsSequential() {
279   return (type_ & kStringRepresentationMask) == kSeqStringTag;
280 }
281 
282 
representation_tag()283 StringRepresentationTag StringShape::representation_tag() {
284   uint32_t tag = (type_ & kStringRepresentationMask);
285   return static_cast<StringRepresentationTag>(tag);
286 }
287 
288 
full_representation_tag()289 uint32_t StringShape::full_representation_tag() {
290   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
291 }
292 
293 
294 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
295              Internals::kFullStringRepresentationMask);
296 
297 
IsSequentialAscii()298 bool StringShape::IsSequentialAscii() {
299   return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
300 }
301 
302 
IsSequentialTwoByte()303 bool StringShape::IsSequentialTwoByte() {
304   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
305 }
306 
307 
IsExternalAscii()308 bool StringShape::IsExternalAscii() {
309   return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
310 }
311 
312 
IsExternalTwoByte()313 bool StringShape::IsExternalTwoByte() {
314   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
315 }
316 
317 
318 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
319              Internals::kExternalTwoByteRepresentationTag);
320 
321 
Get(int index)322 uc32 FlatStringReader::Get(int index) {
323   ASSERT(0 <= index && index <= length_);
324   if (is_ascii_) {
325     return static_cast<const byte*>(start_)[index];
326   } else {
327     return static_cast<const uc16*>(start_)[index];
328   }
329 }
330 
331 
IsNumber()332 bool Object::IsNumber() {
333   return IsSmi() || IsHeapNumber();
334 }
335 
336 
IsByteArray()337 bool Object::IsByteArray() {
338   return Object::IsHeapObject()
339     && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
340 }
341 
342 
IsExternalPixelArray()343 bool Object::IsExternalPixelArray() {
344   return Object::IsHeapObject() &&
345       HeapObject::cast(this)->map()->instance_type() ==
346           EXTERNAL_PIXEL_ARRAY_TYPE;
347 }
348 
349 
IsExternalArray()350 bool Object::IsExternalArray() {
351   if (!Object::IsHeapObject())
352     return false;
353   InstanceType instance_type =
354       HeapObject::cast(this)->map()->instance_type();
355   return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
356           instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
357 }
358 
359 
IsExternalByteArray()360 bool Object::IsExternalByteArray() {
361   return Object::IsHeapObject() &&
362       HeapObject::cast(this)->map()->instance_type() ==
363       EXTERNAL_BYTE_ARRAY_TYPE;
364 }
365 
366 
IsExternalUnsignedByteArray()367 bool Object::IsExternalUnsignedByteArray() {
368   return Object::IsHeapObject() &&
369       HeapObject::cast(this)->map()->instance_type() ==
370       EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
371 }
372 
373 
IsExternalShortArray()374 bool Object::IsExternalShortArray() {
375   return Object::IsHeapObject() &&
376       HeapObject::cast(this)->map()->instance_type() ==
377       EXTERNAL_SHORT_ARRAY_TYPE;
378 }
379 
380 
IsExternalUnsignedShortArray()381 bool Object::IsExternalUnsignedShortArray() {
382   return Object::IsHeapObject() &&
383       HeapObject::cast(this)->map()->instance_type() ==
384       EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
385 }
386 
387 
IsExternalIntArray()388 bool Object::IsExternalIntArray() {
389   return Object::IsHeapObject() &&
390       HeapObject::cast(this)->map()->instance_type() ==
391       EXTERNAL_INT_ARRAY_TYPE;
392 }
393 
394 
IsExternalUnsignedIntArray()395 bool Object::IsExternalUnsignedIntArray() {
396   return Object::IsHeapObject() &&
397       HeapObject::cast(this)->map()->instance_type() ==
398       EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
399 }
400 
401 
IsExternalFloatArray()402 bool Object::IsExternalFloatArray() {
403   return Object::IsHeapObject() &&
404       HeapObject::cast(this)->map()->instance_type() ==
405       EXTERNAL_FLOAT_ARRAY_TYPE;
406 }
407 
408 
IsFailure()409 bool MaybeObject::IsFailure() {
410   return HAS_FAILURE_TAG(this);
411 }
412 
413 
IsRetryAfterGC()414 bool MaybeObject::IsRetryAfterGC() {
415   return HAS_FAILURE_TAG(this)
416     && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
417 }
418 
419 
IsOutOfMemory()420 bool MaybeObject::IsOutOfMemory() {
421   return HAS_FAILURE_TAG(this)
422       && Failure::cast(this)->IsOutOfMemoryException();
423 }
424 
425 
IsException()426 bool MaybeObject::IsException() {
427   return this == Failure::Exception();
428 }
429 
430 
IsTheHole()431 bool MaybeObject::IsTheHole() {
432   return !IsFailure() && ToObjectUnchecked()->IsTheHole();
433 }
434 
435 
cast(MaybeObject * obj)436 Failure* Failure::cast(MaybeObject* obj) {
437   ASSERT(HAS_FAILURE_TAG(obj));
438   return reinterpret_cast<Failure*>(obj);
439 }
440 
441 
IsJSObject()442 bool Object::IsJSObject() {
443   return IsHeapObject()
444       && HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
445 }
446 
447 
IsJSContextExtensionObject()448 bool Object::IsJSContextExtensionObject() {
449   return IsHeapObject()
450       && (HeapObject::cast(this)->map()->instance_type() ==
451           JS_CONTEXT_EXTENSION_OBJECT_TYPE);
452 }
453 
454 
IsMap()455 bool Object::IsMap() {
456   return Object::IsHeapObject()
457       && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
458 }
459 
460 
IsFixedArray()461 bool Object::IsFixedArray() {
462   return Object::IsHeapObject()
463       && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
464 }
465 
466 
IsDescriptorArray()467 bool Object::IsDescriptorArray() {
468   return IsFixedArray();
469 }
470 
471 
IsDeoptimizationInputData()472 bool Object::IsDeoptimizationInputData() {
473   // Must be a fixed array.
474   if (!IsFixedArray()) return false;
475 
476   // There's no sure way to detect the difference between a fixed array and
477   // a deoptimization data array.  Since this is used for asserts we can
478   // check that the length is zero or else the fixed size plus a multiple of
479   // the entry size.
480   int length = FixedArray::cast(this)->length();
481   if (length == 0) return true;
482 
483   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
484   return length >= 0 &&
485       length % DeoptimizationInputData::kDeoptEntrySize == 0;
486 }
487 
488 
IsDeoptimizationOutputData()489 bool Object::IsDeoptimizationOutputData() {
490   if (!IsFixedArray()) return false;
491   // There's actually no way to see the difference between a fixed array and
492   // a deoptimization data array.  Since this is used for asserts we can check
493   // that the length is plausible though.
494   if (FixedArray::cast(this)->length() % 2 != 0) return false;
495   return true;
496 }
497 
498 
IsContext()499 bool Object::IsContext() {
500   if (Object::IsHeapObject()) {
501     Heap* heap = HeapObject::cast(this)->GetHeap();
502     return (HeapObject::cast(this)->map() == heap->context_map() ||
503             HeapObject::cast(this)->map() == heap->catch_context_map() ||
504             HeapObject::cast(this)->map() == heap->global_context_map());
505   }
506   return false;
507 }
508 
509 
IsCatchContext()510 bool Object::IsCatchContext() {
511   return Object::IsHeapObject() &&
512       HeapObject::cast(this)->map() ==
513       HeapObject::cast(this)->GetHeap()->catch_context_map();
514 }
515 
516 
IsGlobalContext()517 bool Object::IsGlobalContext() {
518   return Object::IsHeapObject() &&
519       HeapObject::cast(this)->map() ==
520       HeapObject::cast(this)->GetHeap()->global_context_map();
521 }
522 
523 
IsJSFunction()524 bool Object::IsJSFunction() {
525   return Object::IsHeapObject()
526       && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
527 }
528 
529 
530 template <> inline bool Is<JSFunction>(Object* obj) {
531   return obj->IsJSFunction();
532 }
533 
534 
IsCode()535 bool Object::IsCode() {
536   return Object::IsHeapObject()
537       && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
538 }
539 
540 
IsOddball()541 bool Object::IsOddball() {
542   ASSERT(HEAP->is_safe_to_read_maps());
543   return Object::IsHeapObject()
544     && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
545 }
546 
547 
IsJSGlobalPropertyCell()548 bool Object::IsJSGlobalPropertyCell() {
549   return Object::IsHeapObject()
550       && HeapObject::cast(this)->map()->instance_type()
551       == JS_GLOBAL_PROPERTY_CELL_TYPE;
552 }
553 
554 
IsSharedFunctionInfo()555 bool Object::IsSharedFunctionInfo() {
556   return Object::IsHeapObject() &&
557       (HeapObject::cast(this)->map()->instance_type() ==
558        SHARED_FUNCTION_INFO_TYPE);
559 }
560 
561 
IsJSValue()562 bool Object::IsJSValue() {
563   return Object::IsHeapObject()
564       && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
565 }
566 
567 
IsJSMessageObject()568 bool Object::IsJSMessageObject() {
569   return Object::IsHeapObject()
570       && (HeapObject::cast(this)->map()->instance_type() ==
571           JS_MESSAGE_OBJECT_TYPE);
572 }
573 
574 
IsStringWrapper()575 bool Object::IsStringWrapper() {
576   return IsJSValue() && JSValue::cast(this)->value()->IsString();
577 }
578 
579 
IsProxy()580 bool Object::IsProxy() {
581   return Object::IsHeapObject()
582       && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE;
583 }
584 
585 
IsBoolean()586 bool Object::IsBoolean() {
587   return IsOddball() &&
588       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
589 }
590 
591 
IsJSArray()592 bool Object::IsJSArray() {
593   return Object::IsHeapObject()
594       && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
595 }
596 
597 
IsJSRegExp()598 bool Object::IsJSRegExp() {
599   return Object::IsHeapObject()
600       && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
601 }
602 
603 
604 template <> inline bool Is<JSArray>(Object* obj) {
605   return obj->IsJSArray();
606 }
607 
608 
IsHashTable()609 bool Object::IsHashTable() {
610   return Object::IsHeapObject() &&
611       HeapObject::cast(this)->map() ==
612       HeapObject::cast(this)->GetHeap()->hash_table_map();
613 }
614 
615 
IsDictionary()616 bool Object::IsDictionary() {
617   return IsHashTable() && this !=
618          HeapObject::cast(this)->GetHeap()->symbol_table();
619 }
620 
621 
IsSymbolTable()622 bool Object::IsSymbolTable() {
623   return IsHashTable() && this ==
624          HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
625 }
626 
627 
IsJSFunctionResultCache()628 bool Object::IsJSFunctionResultCache() {
629   if (!IsFixedArray()) return false;
630   FixedArray* self = FixedArray::cast(this);
631   int length = self->length();
632   if (length < JSFunctionResultCache::kEntriesIndex) return false;
633   if ((length - JSFunctionResultCache::kEntriesIndex)
634       % JSFunctionResultCache::kEntrySize != 0) {
635     return false;
636   }
637 #ifdef DEBUG
638   reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
639 #endif
640   return true;
641 }
642 
643 
IsNormalizedMapCache()644 bool Object::IsNormalizedMapCache() {
645   if (!IsFixedArray()) return false;
646   if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
647     return false;
648   }
649 #ifdef DEBUG
650   reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
651 #endif
652   return true;
653 }
654 
655 
IsCompilationCacheTable()656 bool Object::IsCompilationCacheTable() {
657   return IsHashTable();
658 }
659 
660 
IsCodeCacheHashTable()661 bool Object::IsCodeCacheHashTable() {
662   return IsHashTable();
663 }
664 
665 
IsMapCache()666 bool Object::IsMapCache() {
667   return IsHashTable();
668 }
669 
670 
IsPrimitive()671 bool Object::IsPrimitive() {
672   return IsOddball() || IsNumber() || IsString();
673 }
674 
675 
IsJSGlobalProxy()676 bool Object::IsJSGlobalProxy() {
677   bool result = IsHeapObject() &&
678                 (HeapObject::cast(this)->map()->instance_type() ==
679                  JS_GLOBAL_PROXY_TYPE);
680   ASSERT(!result || IsAccessCheckNeeded());
681   return result;
682 }
683 
684 
IsGlobalObject()685 bool Object::IsGlobalObject() {
686   if (!IsHeapObject()) return false;
687 
688   InstanceType type = HeapObject::cast(this)->map()->instance_type();
689   return type == JS_GLOBAL_OBJECT_TYPE ||
690          type == JS_BUILTINS_OBJECT_TYPE;
691 }
692 
693 
IsJSGlobalObject()694 bool Object::IsJSGlobalObject() {
695   return IsHeapObject() &&
696       (HeapObject::cast(this)->map()->instance_type() ==
697        JS_GLOBAL_OBJECT_TYPE);
698 }
699 
700 
IsJSBuiltinsObject()701 bool Object::IsJSBuiltinsObject() {
702   return IsHeapObject() &&
703       (HeapObject::cast(this)->map()->instance_type() ==
704        JS_BUILTINS_OBJECT_TYPE);
705 }
706 
707 
IsUndetectableObject()708 bool Object::IsUndetectableObject() {
709   return IsHeapObject()
710     && HeapObject::cast(this)->map()->is_undetectable();
711 }
712 
713 
IsAccessCheckNeeded()714 bool Object::IsAccessCheckNeeded() {
715   return IsHeapObject()
716     && HeapObject::cast(this)->map()->is_access_check_needed();
717 }
718 
719 
IsStruct()720 bool Object::IsStruct() {
721   if (!IsHeapObject()) return false;
722   switch (HeapObject::cast(this)->map()->instance_type()) {
723 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
724   STRUCT_LIST(MAKE_STRUCT_CASE)
725 #undef MAKE_STRUCT_CASE
726     default: return false;
727   }
728 }
729 
730 
731 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
732   bool Object::Is##Name() {                                      \
733     return Object::IsHeapObject()                                \
734       && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
735   }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)736   STRUCT_LIST(MAKE_STRUCT_PREDICATE)
737 #undef MAKE_STRUCT_PREDICATE
738 
739 
740 bool Object::IsUndefined() {
741   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
742 }
743 
744 
IsNull()745 bool Object::IsNull() {
746   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
747 }
748 
749 
IsTheHole()750 bool Object::IsTheHole() {
751   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
752 }
753 
754 
IsTrue()755 bool Object::IsTrue() {
756   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
757 }
758 
759 
IsFalse()760 bool Object::IsFalse() {
761   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
762 }
763 
764 
IsArgumentsMarker()765 bool Object::IsArgumentsMarker() {
766   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
767 }
768 
769 
Number()770 double Object::Number() {
771   ASSERT(IsNumber());
772   return IsSmi()
773     ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
774     : reinterpret_cast<HeapNumber*>(this)->value();
775 }
776 
777 
ToSmi()778 MaybeObject* Object::ToSmi() {
779   if (IsSmi()) return this;
780   if (IsHeapNumber()) {
781     double value = HeapNumber::cast(this)->value();
782     int int_value = FastD2I(value);
783     if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
784       return Smi::FromInt(int_value);
785     }
786   }
787   return Failure::Exception();
788 }
789 
790 
HasSpecificClassOf(String * name)791 bool Object::HasSpecificClassOf(String* name) {
792   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
793 }
794 
795 
GetElement(uint32_t index)796 MaybeObject* Object::GetElement(uint32_t index) {
797   // GetElement can trigger a getter which can cause allocation.
798   // This was not always the case. This ASSERT is here to catch
799   // leftover incorrect uses.
800   ASSERT(HEAP->IsAllocationAllowed());
801   return GetElementWithReceiver(this, index);
802 }
803 
804 
GetElementNoExceptionThrown(uint32_t index)805 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
806   MaybeObject* maybe = GetElementWithReceiver(this, index);
807   ASSERT(!maybe->IsFailure());
808   Object* result = NULL;  // Initialization to please compiler.
809   maybe->ToObject(&result);
810   return result;
811 }
812 
813 
GetProperty(String * key)814 MaybeObject* Object::GetProperty(String* key) {
815   PropertyAttributes attributes;
816   return GetPropertyWithReceiver(this, key, &attributes);
817 }
818 
819 
GetProperty(String * key,PropertyAttributes * attributes)820 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
821   return GetPropertyWithReceiver(this, key, attributes);
822 }
823 
824 
825 #define FIELD_ADDR(p, offset) \
826   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
827 
828 #define READ_FIELD(p, offset) \
829   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
830 
831 #define WRITE_FIELD(p, offset, value) \
832   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
833 
834 // TODO(isolates): Pass heap in to these macros.
835 #define WRITE_BARRIER(object, offset) \
836   object->GetHeap()->RecordWrite(object->address(), offset);
837 
838 // CONDITIONAL_WRITE_BARRIER must be issued after the actual
839 // write due to the assert validating the written value.
840 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
841   if (mode == UPDATE_WRITE_BARRIER) { \
842     heap->RecordWrite(object->address(), offset); \
843   } else { \
844     ASSERT(mode == SKIP_WRITE_BARRIER); \
845     ASSERT(heap->InNewSpace(object) || \
846            !heap->InNewSpace(READ_FIELD(object, offset)) || \
847            Page::FromAddress(object->address())->           \
848                IsRegionDirty(object->address() + offset));  \
849   }
850 
851 #ifndef V8_TARGET_ARCH_MIPS
852   #define READ_DOUBLE_FIELD(p, offset) \
853     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
854 #else  // V8_TARGET_ARCH_MIPS
855   // Prevent gcc from using load-double (mips ldc1) on (possibly)
856   // non-64-bit aligned HeapNumber::value.
read_double_field(HeapNumber * p,int offset)857   static inline double read_double_field(HeapNumber* p, int offset) {
858     union conversion {
859       double d;
860       uint32_t u[2];
861     } c;
862     c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
863     c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
864     return c.d;
865   }
866   #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
867 #endif  // V8_TARGET_ARCH_MIPS
868 
869 
870 #ifndef V8_TARGET_ARCH_MIPS
871   #define WRITE_DOUBLE_FIELD(p, offset, value) \
872     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
873 #else  // V8_TARGET_ARCH_MIPS
874   // Prevent gcc from using store-double (mips sdc1) on (possibly)
875   // non-64-bit aligned HeapNumber::value.
write_double_field(HeapNumber * p,int offset,double value)876   static inline void write_double_field(HeapNumber* p, int offset,
877                                         double value) {
878     union conversion {
879       double d;
880       uint32_t u[2];
881     } c;
882     c.d = value;
883     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
884     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
885   }
886   #define WRITE_DOUBLE_FIELD(p, offset, value) \
887     write_double_field(p, offset, value)
888 #endif  // V8_TARGET_ARCH_MIPS
889 
890 
891 #define READ_INT_FIELD(p, offset) \
892   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
893 
894 #define WRITE_INT_FIELD(p, offset, value) \
895   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
896 
897 #define READ_INTPTR_FIELD(p, offset) \
898   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
899 
900 #define WRITE_INTPTR_FIELD(p, offset, value) \
901   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
902 
903 #define READ_UINT32_FIELD(p, offset) \
904   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
905 
906 #define WRITE_UINT32_FIELD(p, offset, value) \
907   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
908 
909 #define READ_SHORT_FIELD(p, offset) \
910   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
911 
912 #define WRITE_SHORT_FIELD(p, offset, value) \
913   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
914 
915 #define READ_BYTE_FIELD(p, offset) \
916   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
917 
918 #define WRITE_BYTE_FIELD(p, offset, value) \
919   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
920 
921 
RawField(HeapObject * obj,int byte_offset)922 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
923   return &READ_FIELD(obj, byte_offset);
924 }
925 
926 
value()927 int Smi::value() {
928   return Internals::SmiValue(this);
929 }
930 
931 
FromInt(int value)932 Smi* Smi::FromInt(int value) {
933   ASSERT(Smi::IsValid(value));
934   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
935   intptr_t tagged_value =
936       (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
937   return reinterpret_cast<Smi*>(tagged_value);
938 }
939 
940 
FromIntptr(intptr_t value)941 Smi* Smi::FromIntptr(intptr_t value) {
942   ASSERT(Smi::IsValid(value));
943   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
944   return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
945 }
946 
947 
type()948 Failure::Type Failure::type() const {
949   return static_cast<Type>(value() & kFailureTypeTagMask);
950 }
951 
952 
IsInternalError()953 bool Failure::IsInternalError() const {
954   return type() == INTERNAL_ERROR;
955 }
956 
957 
IsOutOfMemoryException()958 bool Failure::IsOutOfMemoryException() const {
959   return type() == OUT_OF_MEMORY_EXCEPTION;
960 }
961 
962 
allocation_space()963 AllocationSpace Failure::allocation_space() const {
964   ASSERT_EQ(RETRY_AFTER_GC, type());
965   return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
966                                       & kSpaceTagMask);
967 }
968 
969 
InternalError()970 Failure* Failure::InternalError() {
971   return Construct(INTERNAL_ERROR);
972 }
973 
974 
Exception()975 Failure* Failure::Exception() {
976   return Construct(EXCEPTION);
977 }
978 
979 
OutOfMemoryException()980 Failure* Failure::OutOfMemoryException() {
981   return Construct(OUT_OF_MEMORY_EXCEPTION);
982 }
983 
984 
value()985 intptr_t Failure::value() const {
986   return static_cast<intptr_t>(
987       reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
988 }
989 
990 
RetryAfterGC()991 Failure* Failure::RetryAfterGC() {
992   return RetryAfterGC(NEW_SPACE);
993 }
994 
995 
RetryAfterGC(AllocationSpace space)996 Failure* Failure::RetryAfterGC(AllocationSpace space) {
997   ASSERT((space & ~kSpaceTagMask) == 0);
998   return Construct(RETRY_AFTER_GC, space);
999 }
1000 
1001 
Construct(Type type,intptr_t value)1002 Failure* Failure::Construct(Type type, intptr_t value) {
1003   uintptr_t info =
1004       (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1005   ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1006   return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1007 }
1008 
1009 
IsValid(intptr_t value)1010 bool Smi::IsValid(intptr_t value) {
1011 #ifdef DEBUG
1012   bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1013 #endif
1014 
1015 #ifdef V8_TARGET_ARCH_X64
1016   // To be representable as a long smi, the value must be a 32-bit integer.
1017   bool result = (value == static_cast<int32_t>(value));
1018 #else
1019   // To be representable as an tagged small integer, the two
1020   // most-significant bits of 'value' must be either 00 or 11 due to
1021   // sign-extension. To check this we add 01 to the two
1022   // most-significant bits, and check if the most-significant bit is 0
1023   //
1024   // CAUTION: The original code below:
1025   // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1026   // may lead to incorrect results according to the C language spec, and
1027   // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1028   // compiler may produce undefined results in case of signed integer
1029   // overflow. The computation must be done w/ unsigned ints.
1030   bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1031 #endif
1032   ASSERT(result == in_range);
1033   return result;
1034 }
1035 
1036 
FromMap(Map * map)1037 MapWord MapWord::FromMap(Map* map) {
1038   return MapWord(reinterpret_cast<uintptr_t>(map));
1039 }
1040 
1041 
ToMap()1042 Map* MapWord::ToMap() {
1043   return reinterpret_cast<Map*>(value_);
1044 }
1045 
1046 
IsForwardingAddress()1047 bool MapWord::IsForwardingAddress() {
1048   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1049 }
1050 
1051 
FromForwardingAddress(HeapObject * object)1052 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1053   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1054   return MapWord(reinterpret_cast<uintptr_t>(raw));
1055 }
1056 
1057 
ToForwardingAddress()1058 HeapObject* MapWord::ToForwardingAddress() {
1059   ASSERT(IsForwardingAddress());
1060   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1061 }
1062 
1063 
IsMarked()1064 bool MapWord::IsMarked() {
1065   return (value_ & kMarkingMask) == 0;
1066 }
1067 
1068 
SetMark()1069 void MapWord::SetMark() {
1070   value_ &= ~kMarkingMask;
1071 }
1072 
1073 
ClearMark()1074 void MapWord::ClearMark() {
1075   value_ |= kMarkingMask;
1076 }
1077 
1078 
IsOverflowed()1079 bool MapWord::IsOverflowed() {
1080   return (value_ & kOverflowMask) != 0;
1081 }
1082 
1083 
SetOverflow()1084 void MapWord::SetOverflow() {
1085   value_ |= kOverflowMask;
1086 }
1087 
1088 
ClearOverflow()1089 void MapWord::ClearOverflow() {
1090   value_ &= ~kOverflowMask;
1091 }
1092 
1093 
EncodeAddress(Address map_address,int offset)1094 MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1095   // Offset is the distance in live bytes from the first live object in the
1096   // same page. The offset between two objects in the same page should not
1097   // exceed the object area size of a page.
1098   ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1099 
1100   uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1101   ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1102 
1103   Page* map_page = Page::FromAddress(map_address);
1104   ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1105 
1106   uintptr_t map_page_offset =
1107       map_page->Offset(map_address) >> kMapAlignmentBits;
1108 
1109   uintptr_t encoding =
1110       (compact_offset << kForwardingOffsetShift) |
1111       (map_page_offset << kMapPageOffsetShift) |
1112       (map_page->mc_page_index << kMapPageIndexShift);
1113   return MapWord(encoding);
1114 }
1115 
1116 
DecodeMapAddress(MapSpace * map_space)1117 Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1118   int map_page_index =
1119       static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1120   ASSERT_MAP_PAGE_INDEX(map_page_index);
1121 
1122   int map_page_offset = static_cast<int>(
1123       ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1124       kMapAlignmentBits);
1125 
1126   return (map_space->PageAddress(map_page_index) + map_page_offset);
1127 }
1128 
1129 
DecodeOffset()1130 int MapWord::DecodeOffset() {
1131   // The offset field is represented in the kForwardingOffsetBits
1132   // most-significant bits.
1133   uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1134   ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1135   return static_cast<int>(offset);
1136 }
1137 
1138 
FromEncodedAddress(Address address)1139 MapWord MapWord::FromEncodedAddress(Address address) {
1140   return MapWord(reinterpret_cast<uintptr_t>(address));
1141 }
1142 
1143 
ToEncodedAddress()1144 Address MapWord::ToEncodedAddress() {
1145   return reinterpret_cast<Address>(value_);
1146 }
1147 
1148 
1149 #ifdef DEBUG
VerifyObjectField(int offset)1150 void HeapObject::VerifyObjectField(int offset) {
1151   VerifyPointer(READ_FIELD(this, offset));
1152 }
1153 
VerifySmiField(int offset)1154 void HeapObject::VerifySmiField(int offset) {
1155   ASSERT(READ_FIELD(this, offset)->IsSmi());
1156 }
1157 #endif
1158 
1159 
GetHeap()1160 Heap* HeapObject::GetHeap() {
1161   // During GC, the map pointer in HeapObject is used in various ways that
1162   // prevent us from retrieving Heap from the map.
1163   // Assert that we are not in GC, implement GC code in a way that it doesn't
1164   // pull heap from the map.
1165   ASSERT(HEAP->is_safe_to_read_maps());
1166   return map()->heap();
1167 }
1168 
1169 
GetIsolate()1170 Isolate* HeapObject::GetIsolate() {
1171   return GetHeap()->isolate();
1172 }
1173 
1174 
map()1175 Map* HeapObject::map() {
1176   return map_word().ToMap();
1177 }
1178 
1179 
set_map(Map * value)1180 void HeapObject::set_map(Map* value) {
1181   set_map_word(MapWord::FromMap(value));
1182 }
1183 
1184 
map_word()1185 MapWord HeapObject::map_word() {
1186   return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1187 }
1188 
1189 
set_map_word(MapWord map_word)1190 void HeapObject::set_map_word(MapWord map_word) {
1191   // WRITE_FIELD does not invoke write barrier, but there is no need
1192   // here.
1193   WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1194 }
1195 
1196 
FromAddress(Address address)1197 HeapObject* HeapObject::FromAddress(Address address) {
1198   ASSERT_TAG_ALIGNED(address);
1199   return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1200 }
1201 
1202 
address()1203 Address HeapObject::address() {
1204   return reinterpret_cast<Address>(this) - kHeapObjectTag;
1205 }
1206 
1207 
Size()1208 int HeapObject::Size() {
1209   return SizeFromMap(map());
1210 }
1211 
1212 
IteratePointers(ObjectVisitor * v,int start,int end)1213 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1214   v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1215                    reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1216 }
1217 
1218 
IteratePointer(ObjectVisitor * v,int offset)1219 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1220   v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1221 }
1222 
1223 
IsMarked()1224 bool HeapObject::IsMarked() {
1225   return map_word().IsMarked();
1226 }
1227 
1228 
SetMark()1229 void HeapObject::SetMark() {
1230   ASSERT(!IsMarked());
1231   MapWord first_word = map_word();
1232   first_word.SetMark();
1233   set_map_word(first_word);
1234 }
1235 
1236 
ClearMark()1237 void HeapObject::ClearMark() {
1238   ASSERT(IsMarked());
1239   MapWord first_word = map_word();
1240   first_word.ClearMark();
1241   set_map_word(first_word);
1242 }
1243 
1244 
IsOverflowed()1245 bool HeapObject::IsOverflowed() {
1246   return map_word().IsOverflowed();
1247 }
1248 
1249 
SetOverflow()1250 void HeapObject::SetOverflow() {
1251   MapWord first_word = map_word();
1252   first_word.SetOverflow();
1253   set_map_word(first_word);
1254 }
1255 
1256 
ClearOverflow()1257 void HeapObject::ClearOverflow() {
1258   ASSERT(IsOverflowed());
1259   MapWord first_word = map_word();
1260   first_word.ClearOverflow();
1261   set_map_word(first_word);
1262 }
1263 
1264 
value()1265 double HeapNumber::value() {
1266   return READ_DOUBLE_FIELD(this, kValueOffset);
1267 }
1268 
1269 
set_value(double value)1270 void HeapNumber::set_value(double value) {
1271   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1272 }
1273 
1274 
get_exponent()1275 int HeapNumber::get_exponent() {
1276   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1277           kExponentShift) - kExponentBias;
1278 }
1279 
1280 
get_sign()1281 int HeapNumber::get_sign() {
1282   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1283 }
1284 
1285 
ACCESSORS(JSObject,properties,FixedArray,kPropertiesOffset)1286 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1287 
1288 
1289 HeapObject* JSObject::elements() {
1290   Object* array = READ_FIELD(this, kElementsOffset);
1291   // In the assert below Dictionary is covered under FixedArray.
1292   ASSERT(array->IsFixedArray() || array->IsExternalArray());
1293   return reinterpret_cast<HeapObject*>(array);
1294 }
1295 
1296 
set_elements(HeapObject * value,WriteBarrierMode mode)1297 void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
1298   ASSERT(map()->has_fast_elements() ==
1299          (value->map() == GetHeap()->fixed_array_map() ||
1300           value->map() == GetHeap()->fixed_cow_array_map()));
1301   // In the assert below Dictionary is covered under FixedArray.
1302   ASSERT(value->IsFixedArray() || value->IsExternalArray());
1303   WRITE_FIELD(this, kElementsOffset, value);
1304   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
1305 }
1306 
1307 
initialize_properties()1308 void JSObject::initialize_properties() {
1309   ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1310   WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1311 }
1312 
1313 
initialize_elements()1314 void JSObject::initialize_elements() {
1315   ASSERT(map()->has_fast_elements());
1316   ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1317   WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1318 }
1319 
1320 
ResetElements()1321 MaybeObject* JSObject::ResetElements() {
1322   Object* obj;
1323   { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1324     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1325   }
1326   set_map(Map::cast(obj));
1327   initialize_elements();
1328   return this;
1329 }
1330 
1331 
ACCESSORS(Oddball,to_string,String,kToStringOffset)1332 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1333 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1334 
1335 
1336 byte Oddball::kind() {
1337   return READ_BYTE_FIELD(this, kKindOffset);
1338 }
1339 
1340 
set_kind(byte value)1341 void Oddball::set_kind(byte value) {
1342   WRITE_BYTE_FIELD(this, kKindOffset, value);
1343 }
1344 
1345 
value()1346 Object* JSGlobalPropertyCell::value() {
1347   return READ_FIELD(this, kValueOffset);
1348 }
1349 
1350 
set_value(Object * val,WriteBarrierMode ignored)1351 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1352   // The write barrier is not used for global property cells.
1353   ASSERT(!val->IsJSGlobalPropertyCell());
1354   WRITE_FIELD(this, kValueOffset, val);
1355 }
1356 
1357 
GetHeaderSize()1358 int JSObject::GetHeaderSize() {
1359   InstanceType type = map()->instance_type();
1360   // Check for the most common kind of JavaScript object before
1361   // falling into the generic switch. This speeds up the internal
1362   // field operations considerably on average.
1363   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1364   switch (type) {
1365     case JS_GLOBAL_PROXY_TYPE:
1366       return JSGlobalProxy::kSize;
1367     case JS_GLOBAL_OBJECT_TYPE:
1368       return JSGlobalObject::kSize;
1369     case JS_BUILTINS_OBJECT_TYPE:
1370       return JSBuiltinsObject::kSize;
1371     case JS_FUNCTION_TYPE:
1372       return JSFunction::kSize;
1373     case JS_VALUE_TYPE:
1374       return JSValue::kSize;
1375     case JS_ARRAY_TYPE:
1376       return JSValue::kSize;
1377     case JS_REGEXP_TYPE:
1378       return JSValue::kSize;
1379     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1380       return JSObject::kHeaderSize;
1381     case JS_MESSAGE_OBJECT_TYPE:
1382       return JSMessageObject::kSize;
1383     default:
1384       UNREACHABLE();
1385       return 0;
1386   }
1387 }
1388 
1389 
GetInternalFieldCount()1390 int JSObject::GetInternalFieldCount() {
1391   ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1392   // Make sure to adjust for the number of in-object properties. These
1393   // properties do contribute to the size, but are not internal fields.
1394   return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1395          map()->inobject_properties();
1396 }
1397 
1398 
GetInternalFieldOffset(int index)1399 int JSObject::GetInternalFieldOffset(int index) {
1400   ASSERT(index < GetInternalFieldCount() && index >= 0);
1401   return GetHeaderSize() + (kPointerSize * index);
1402 }
1403 
1404 
GetInternalField(int index)1405 Object* JSObject::GetInternalField(int index) {
1406   ASSERT(index < GetInternalFieldCount() && index >= 0);
1407   // Internal objects do follow immediately after the header, whereas in-object
1408   // properties are at the end of the object. Therefore there is no need
1409   // to adjust the index here.
1410   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1411 }
1412 
1413 
SetInternalField(int index,Object * value)1414 void JSObject::SetInternalField(int index, Object* value) {
1415   ASSERT(index < GetInternalFieldCount() && index >= 0);
1416   // Internal objects do follow immediately after the header, whereas in-object
1417   // properties are at the end of the object. Therefore there is no need
1418   // to adjust the index here.
1419   int offset = GetHeaderSize() + (kPointerSize * index);
1420   WRITE_FIELD(this, offset, value);
1421   WRITE_BARRIER(this, offset);
1422 }
1423 
1424 
1425 // Access fast-case object properties at index. The use of these routines
1426 // is needed to correctly distinguish between properties stored in-object and
1427 // properties stored in the properties array.
FastPropertyAt(int index)1428 Object* JSObject::FastPropertyAt(int index) {
1429   // Adjust for the number of properties stored in the object.
1430   index -= map()->inobject_properties();
1431   if (index < 0) {
1432     int offset = map()->instance_size() + (index * kPointerSize);
1433     return READ_FIELD(this, offset);
1434   } else {
1435     ASSERT(index < properties()->length());
1436     return properties()->get(index);
1437   }
1438 }
1439 
1440 
FastPropertyAtPut(int index,Object * value)1441 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1442   // Adjust for the number of properties stored in the object.
1443   index -= map()->inobject_properties();
1444   if (index < 0) {
1445     int offset = map()->instance_size() + (index * kPointerSize);
1446     WRITE_FIELD(this, offset, value);
1447     WRITE_BARRIER(this, offset);
1448   } else {
1449     ASSERT(index < properties()->length());
1450     properties()->set(index, value);
1451   }
1452   return value;
1453 }
1454 
1455 
GetInObjectPropertyOffset(int index)1456 int JSObject::GetInObjectPropertyOffset(int index) {
1457   // Adjust for the number of properties stored in the object.
1458   index -= map()->inobject_properties();
1459   ASSERT(index < 0);
1460   return map()->instance_size() + (index * kPointerSize);
1461 }
1462 
1463 
InObjectPropertyAt(int index)1464 Object* JSObject::InObjectPropertyAt(int index) {
1465   // Adjust for the number of properties stored in the object.
1466   index -= map()->inobject_properties();
1467   ASSERT(index < 0);
1468   int offset = map()->instance_size() + (index * kPointerSize);
1469   return READ_FIELD(this, offset);
1470 }
1471 
1472 
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)1473 Object* JSObject::InObjectPropertyAtPut(int index,
1474                                         Object* value,
1475                                         WriteBarrierMode mode) {
1476   // Adjust for the number of properties stored in the object.
1477   index -= map()->inobject_properties();
1478   ASSERT(index < 0);
1479   int offset = map()->instance_size() + (index * kPointerSize);
1480   WRITE_FIELD(this, offset, value);
1481   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1482   return value;
1483 }
1484 
1485 
1486 
InitializeBody(int object_size,Object * value)1487 void JSObject::InitializeBody(int object_size, Object* value) {
1488   ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1489   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1490     WRITE_FIELD(this, offset, value);
1491   }
1492 }
1493 
1494 
HasFastProperties()1495 bool JSObject::HasFastProperties() {
1496   return !properties()->IsDictionary();
1497 }
1498 
1499 
MaxFastProperties()1500 int JSObject::MaxFastProperties() {
1501   // Allow extra fast properties if the object has more than
1502   // kMaxFastProperties in-object properties. When this is the case,
1503   // it is very unlikely that the object is being used as a dictionary
1504   // and there is a good chance that allowing more map transitions
1505   // will be worth it.
1506   return Max(map()->inobject_properties(), kMaxFastProperties);
1507 }
1508 
1509 
InitializeBody(int object_size)1510 void Struct::InitializeBody(int object_size) {
1511   Object* value = GetHeap()->undefined_value();
1512   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1513     WRITE_FIELD(this, offset, value);
1514   }
1515 }
1516 
1517 
ToArrayIndex(uint32_t * index)1518 bool Object::ToArrayIndex(uint32_t* index) {
1519   if (IsSmi()) {
1520     int value = Smi::cast(this)->value();
1521     if (value < 0) return false;
1522     *index = value;
1523     return true;
1524   }
1525   if (IsHeapNumber()) {
1526     double value = HeapNumber::cast(this)->value();
1527     uint32_t uint_value = static_cast<uint32_t>(value);
1528     if (value == static_cast<double>(uint_value)) {
1529       *index = uint_value;
1530       return true;
1531     }
1532   }
1533   return false;
1534 }
1535 
1536 
IsStringObjectWithCharacterAt(uint32_t index)1537 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1538   if (!this->IsJSValue()) return false;
1539 
1540   JSValue* js_value = JSValue::cast(this);
1541   if (!js_value->value()->IsString()) return false;
1542 
1543   String* str = String::cast(js_value->value());
1544   if (index >= (uint32_t)str->length()) return false;
1545 
1546   return true;
1547 }
1548 
1549 
get(int index)1550 Object* FixedArray::get(int index) {
1551   ASSERT(index >= 0 && index < this->length());
1552   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1553 }
1554 
1555 
set(int index,Smi * value)1556 void FixedArray::set(int index, Smi* value) {
1557   ASSERT(map() != HEAP->fixed_cow_array_map());
1558   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1559   int offset = kHeaderSize + index * kPointerSize;
1560   WRITE_FIELD(this, offset, value);
1561 }
1562 
1563 
set(int index,Object * value)1564 void FixedArray::set(int index, Object* value) {
1565   ASSERT(map() != HEAP->fixed_cow_array_map());
1566   ASSERT(index >= 0 && index < this->length());
1567   int offset = kHeaderSize + index * kPointerSize;
1568   WRITE_FIELD(this, offset, value);
1569   WRITE_BARRIER(this, offset);
1570 }
1571 
1572 
GetWriteBarrierMode(const AssertNoAllocation &)1573 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1574   if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1575   return UPDATE_WRITE_BARRIER;
1576 }
1577 
1578 
set(int index,Object * value,WriteBarrierMode mode)1579 void FixedArray::set(int index,
1580                      Object* value,
1581                      WriteBarrierMode mode) {
1582   ASSERT(map() != HEAP->fixed_cow_array_map());
1583   ASSERT(index >= 0 && index < this->length());
1584   int offset = kHeaderSize + index * kPointerSize;
1585   WRITE_FIELD(this, offset, value);
1586   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1587 }
1588 
1589 
fast_set(FixedArray * array,int index,Object * value)1590 void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1591   ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1592   ASSERT(index >= 0 && index < array->length());
1593   ASSERT(!HEAP->InNewSpace(value));
1594   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1595 }
1596 
1597 
set_undefined(int index)1598 void FixedArray::set_undefined(int index) {
1599   ASSERT(map() != HEAP->fixed_cow_array_map());
1600   set_undefined(GetHeap(), index);
1601 }
1602 
1603 
set_undefined(Heap * heap,int index)1604 void FixedArray::set_undefined(Heap* heap, int index) {
1605   ASSERT(index >= 0 && index < this->length());
1606   ASSERT(!heap->InNewSpace(heap->undefined_value()));
1607   WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1608               heap->undefined_value());
1609 }
1610 
1611 
set_null(int index)1612 void FixedArray::set_null(int index) {
1613   set_null(GetHeap(), index);
1614 }
1615 
1616 
set_null(Heap * heap,int index)1617 void FixedArray::set_null(Heap* heap, int index) {
1618   ASSERT(index >= 0 && index < this->length());
1619   ASSERT(!heap->InNewSpace(heap->null_value()));
1620   WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1621 }
1622 
1623 
set_the_hole(int index)1624 void FixedArray::set_the_hole(int index) {
1625   ASSERT(map() != HEAP->fixed_cow_array_map());
1626   ASSERT(index >= 0 && index < this->length());
1627   ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1628   WRITE_FIELD(this,
1629               kHeaderSize + index * kPointerSize,
1630               GetHeap()->the_hole_value());
1631 }
1632 
1633 
set_unchecked(int index,Smi * value)1634 void FixedArray::set_unchecked(int index, Smi* value) {
1635   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1636   int offset = kHeaderSize + index * kPointerSize;
1637   WRITE_FIELD(this, offset, value);
1638 }
1639 
1640 
set_unchecked(Heap * heap,int index,Object * value,WriteBarrierMode mode)1641 void FixedArray::set_unchecked(Heap* heap,
1642                                int index,
1643                                Object* value,
1644                                WriteBarrierMode mode) {
1645   int offset = kHeaderSize + index * kPointerSize;
1646   WRITE_FIELD(this, offset, value);
1647   CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
1648 }
1649 
1650 
set_null_unchecked(Heap * heap,int index)1651 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1652   ASSERT(index >= 0 && index < this->length());
1653   ASSERT(!HEAP->InNewSpace(heap->null_value()));
1654   WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1655 }
1656 
1657 
data_start()1658 Object** FixedArray::data_start() {
1659   return HeapObject::RawField(this, kHeaderSize);
1660 }
1661 
1662 
IsEmpty()1663 bool DescriptorArray::IsEmpty() {
1664   ASSERT(this->length() > kFirstIndex ||
1665          this == HEAP->empty_descriptor_array());
1666   return length() <= kFirstIndex;
1667 }
1668 
1669 
fast_swap(FixedArray * array,int first,int second)1670 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1671   Object* tmp = array->get(first);
1672   fast_set(array, first, array->get(second));
1673   fast_set(array, second, tmp);
1674 }
1675 
1676 
Search(String * name)1677 int DescriptorArray::Search(String* name) {
1678   SLOW_ASSERT(IsSortedNoDuplicates());
1679 
1680   // Check for empty descriptor array.
1681   int nof = number_of_descriptors();
1682   if (nof == 0) return kNotFound;
1683 
1684   // Fast case: do linear search for small arrays.
1685   const int kMaxElementsForLinearSearch = 8;
1686   if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1687     return LinearSearch(name, nof);
1688   }
1689 
1690   // Slow case: perform binary search.
1691   return BinarySearch(name, 0, nof - 1);
1692 }
1693 
1694 
SearchWithCache(String * name)1695 int DescriptorArray::SearchWithCache(String* name) {
1696   int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1697   if (number == DescriptorLookupCache::kAbsent) {
1698     number = Search(name);
1699     GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1700   }
1701   return number;
1702 }
1703 
1704 
GetKey(int descriptor_number)1705 String* DescriptorArray::GetKey(int descriptor_number) {
1706   ASSERT(descriptor_number < number_of_descriptors());
1707   return String::cast(get(ToKeyIndex(descriptor_number)));
1708 }
1709 
1710 
GetValue(int descriptor_number)1711 Object* DescriptorArray::GetValue(int descriptor_number) {
1712   ASSERT(descriptor_number < number_of_descriptors());
1713   return GetContentArray()->get(ToValueIndex(descriptor_number));
1714 }
1715 
1716 
GetDetails(int descriptor_number)1717 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1718   ASSERT(descriptor_number < number_of_descriptors());
1719   return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1720 }
1721 
1722 
GetType(int descriptor_number)1723 PropertyType DescriptorArray::GetType(int descriptor_number) {
1724   ASSERT(descriptor_number < number_of_descriptors());
1725   return PropertyDetails(GetDetails(descriptor_number)).type();
1726 }
1727 
1728 
GetFieldIndex(int descriptor_number)1729 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1730   return Descriptor::IndexFromValue(GetValue(descriptor_number));
1731 }
1732 
1733 
GetConstantFunction(int descriptor_number)1734 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1735   return JSFunction::cast(GetValue(descriptor_number));
1736 }
1737 
1738 
GetCallbacksObject(int descriptor_number)1739 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1740   ASSERT(GetType(descriptor_number) == CALLBACKS);
1741   return GetValue(descriptor_number);
1742 }
1743 
1744 
GetCallbacks(int descriptor_number)1745 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1746   ASSERT(GetType(descriptor_number) == CALLBACKS);
1747   Proxy* p = Proxy::cast(GetCallbacksObject(descriptor_number));
1748   return reinterpret_cast<AccessorDescriptor*>(p->proxy());
1749 }
1750 
1751 
IsProperty(int descriptor_number)1752 bool DescriptorArray::IsProperty(int descriptor_number) {
1753   return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1754 }
1755 
1756 
IsTransition(int descriptor_number)1757 bool DescriptorArray::IsTransition(int descriptor_number) {
1758   PropertyType t = GetType(descriptor_number);
1759   return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1760       t == EXTERNAL_ARRAY_TRANSITION;
1761 }
1762 
1763 
IsNullDescriptor(int descriptor_number)1764 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1765   return GetType(descriptor_number) == NULL_DESCRIPTOR;
1766 }
1767 
1768 
IsDontEnum(int descriptor_number)1769 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1770   return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1771 }
1772 
1773 
Get(int descriptor_number,Descriptor * desc)1774 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1775   desc->Init(GetKey(descriptor_number),
1776              GetValue(descriptor_number),
1777              PropertyDetails(GetDetails(descriptor_number)));
1778 }
1779 
1780 
Set(int descriptor_number,Descriptor * desc)1781 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1782   // Range check.
1783   ASSERT(descriptor_number < number_of_descriptors());
1784 
1785   // Make sure none of the elements in desc are in new space.
1786   ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1787   ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1788 
1789   fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1790   FixedArray* content_array = GetContentArray();
1791   fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1792   fast_set(content_array, ToDetailsIndex(descriptor_number),
1793            desc->GetDetails().AsSmi());
1794 }
1795 
1796 
CopyFrom(int index,DescriptorArray * src,int src_index)1797 void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
1798   Descriptor desc;
1799   src->Get(src_index, &desc);
1800   Set(index, &desc);
1801 }
1802 
1803 
Swap(int first,int second)1804 void DescriptorArray::Swap(int first, int second) {
1805   fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1806   FixedArray* content_array = GetContentArray();
1807   fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1808   fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
1809 }
1810 
1811 
1812 template<typename Shape, typename Key>
FindEntry(Key key)1813 int HashTable<Shape, Key>::FindEntry(Key key) {
1814   return FindEntry(GetIsolate(), key);
1815 }
1816 
1817 
1818 // Find entry for key otherwise return kNotFound.
1819 template<typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)1820 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1821   uint32_t capacity = Capacity();
1822   uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1823   uint32_t count = 1;
1824   // EnsureCapacity will guarantee the hash table is never full.
1825   while (true) {
1826     Object* element = KeyAt(entry);
1827     if (element == isolate->heap()->undefined_value()) break;  // Empty entry.
1828     if (element != isolate->heap()->null_value() &&
1829         Shape::IsMatch(key, element)) return entry;
1830     entry = NextProbe(entry, count++, capacity);
1831   }
1832   return kNotFound;
1833 }
1834 
1835 
requires_slow_elements()1836 bool NumberDictionary::requires_slow_elements() {
1837   Object* max_index_object = get(kMaxNumberKeyIndex);
1838   if (!max_index_object->IsSmi()) return false;
1839   return 0 !=
1840       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1841 }
1842 
max_number_key()1843 uint32_t NumberDictionary::max_number_key() {
1844   ASSERT(!requires_slow_elements());
1845   Object* max_index_object = get(kMaxNumberKeyIndex);
1846   if (!max_index_object->IsSmi()) return 0;
1847   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1848   return value >> kRequiresSlowElementsTagSize;
1849 }
1850 
set_requires_slow_elements()1851 void NumberDictionary::set_requires_slow_elements() {
1852   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
1853 }
1854 
1855 
1856 // ------------------------------------
1857 // Cast operations
1858 
1859 
1860 CAST_ACCESSOR(FixedArray)
CAST_ACCESSOR(DescriptorArray)1861 CAST_ACCESSOR(DescriptorArray)
1862 CAST_ACCESSOR(DeoptimizationInputData)
1863 CAST_ACCESSOR(DeoptimizationOutputData)
1864 CAST_ACCESSOR(SymbolTable)
1865 CAST_ACCESSOR(JSFunctionResultCache)
1866 CAST_ACCESSOR(NormalizedMapCache)
1867 CAST_ACCESSOR(CompilationCacheTable)
1868 CAST_ACCESSOR(CodeCacheHashTable)
1869 CAST_ACCESSOR(MapCache)
1870 CAST_ACCESSOR(String)
1871 CAST_ACCESSOR(SeqString)
1872 CAST_ACCESSOR(SeqAsciiString)
1873 CAST_ACCESSOR(SeqTwoByteString)
1874 CAST_ACCESSOR(ConsString)
1875 CAST_ACCESSOR(ExternalString)
1876 CAST_ACCESSOR(ExternalAsciiString)
1877 CAST_ACCESSOR(ExternalTwoByteString)
1878 CAST_ACCESSOR(JSObject)
1879 CAST_ACCESSOR(Smi)
1880 CAST_ACCESSOR(HeapObject)
1881 CAST_ACCESSOR(HeapNumber)
1882 CAST_ACCESSOR(Oddball)
1883 CAST_ACCESSOR(JSGlobalPropertyCell)
1884 CAST_ACCESSOR(SharedFunctionInfo)
1885 CAST_ACCESSOR(Map)
1886 CAST_ACCESSOR(JSFunction)
1887 CAST_ACCESSOR(GlobalObject)
1888 CAST_ACCESSOR(JSGlobalProxy)
1889 CAST_ACCESSOR(JSGlobalObject)
1890 CAST_ACCESSOR(JSBuiltinsObject)
1891 CAST_ACCESSOR(Code)
1892 CAST_ACCESSOR(JSArray)
1893 CAST_ACCESSOR(JSRegExp)
1894 CAST_ACCESSOR(Proxy)
1895 CAST_ACCESSOR(ByteArray)
1896 CAST_ACCESSOR(ExternalArray)
1897 CAST_ACCESSOR(ExternalByteArray)
1898 CAST_ACCESSOR(ExternalUnsignedByteArray)
1899 CAST_ACCESSOR(ExternalShortArray)
1900 CAST_ACCESSOR(ExternalUnsignedShortArray)
1901 CAST_ACCESSOR(ExternalIntArray)
1902 CAST_ACCESSOR(ExternalUnsignedIntArray)
1903 CAST_ACCESSOR(ExternalFloatArray)
1904 CAST_ACCESSOR(ExternalPixelArray)
1905 CAST_ACCESSOR(Struct)
1906 
1907 
1908 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
1909   STRUCT_LIST(MAKE_STRUCT_CAST)
1910 #undef MAKE_STRUCT_CAST
1911 
1912 
1913 template <typename Shape, typename Key>
1914 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
1915   ASSERT(obj->IsHashTable());
1916   return reinterpret_cast<HashTable*>(obj);
1917 }
1918 
1919 
SMI_ACCESSORS(FixedArray,length,kLengthOffset)1920 SMI_ACCESSORS(FixedArray, length, kLengthOffset)
1921 SMI_ACCESSORS(ByteArray, length, kLengthOffset)
1922 
1923 INT_ACCESSORS(ExternalArray, length, kLengthOffset)
1924 
1925 
1926 SMI_ACCESSORS(String, length, kLengthOffset)
1927 
1928 
1929 uint32_t String::hash_field() {
1930   return READ_UINT32_FIELD(this, kHashFieldOffset);
1931 }
1932 
1933 
set_hash_field(uint32_t value)1934 void String::set_hash_field(uint32_t value) {
1935   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
1936 #if V8_HOST_ARCH_64_BIT
1937   WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
1938 #endif
1939 }
1940 
1941 
Equals(String * other)1942 bool String::Equals(String* other) {
1943   if (other == this) return true;
1944   if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
1945     return false;
1946   }
1947   return SlowEquals(other);
1948 }
1949 
1950 
TryFlatten(PretenureFlag pretenure)1951 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
1952   if (!StringShape(this).IsCons()) return this;
1953   ConsString* cons = ConsString::cast(this);
1954   if (cons->second()->length() == 0) return cons->first();
1955   return SlowTryFlatten(pretenure);
1956 }
1957 
1958 
TryFlattenGetString(PretenureFlag pretenure)1959 String* String::TryFlattenGetString(PretenureFlag pretenure) {
1960   MaybeObject* flat = TryFlatten(pretenure);
1961   Object* successfully_flattened;
1962   if (flat->ToObject(&successfully_flattened)) {
1963     return String::cast(successfully_flattened);
1964   }
1965   return this;
1966 }
1967 
1968 
Get(int index)1969 uint16_t String::Get(int index) {
1970   ASSERT(index >= 0 && index < length());
1971   switch (StringShape(this).full_representation_tag()) {
1972     case kSeqStringTag | kAsciiStringTag:
1973       return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
1974     case kSeqStringTag | kTwoByteStringTag:
1975       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
1976     case kConsStringTag | kAsciiStringTag:
1977     case kConsStringTag | kTwoByteStringTag:
1978       return ConsString::cast(this)->ConsStringGet(index);
1979     case kExternalStringTag | kAsciiStringTag:
1980       return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
1981     case kExternalStringTag | kTwoByteStringTag:
1982       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
1983     default:
1984       break;
1985   }
1986 
1987   UNREACHABLE();
1988   return 0;
1989 }
1990 
1991 
Set(int index,uint16_t value)1992 void String::Set(int index, uint16_t value) {
1993   ASSERT(index >= 0 && index < length());
1994   ASSERT(StringShape(this).IsSequential());
1995 
1996   return this->IsAsciiRepresentation()
1997       ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
1998       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
1999 }
2000 
2001 
IsFlat()2002 bool String::IsFlat() {
2003   switch (StringShape(this).representation_tag()) {
2004     case kConsStringTag: {
2005       String* second = ConsString::cast(this)->second();
2006       // Only flattened strings have second part empty.
2007       return second->length() == 0;
2008     }
2009     default:
2010       return true;
2011   }
2012 }
2013 
2014 
SeqAsciiStringGet(int index)2015 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2016   ASSERT(index >= 0 && index < length());
2017   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2018 }
2019 
2020 
SeqAsciiStringSet(int index,uint16_t value)2021 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2022   ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2023   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2024                    static_cast<byte>(value));
2025 }
2026 
2027 
GetCharsAddress()2028 Address SeqAsciiString::GetCharsAddress() {
2029   return FIELD_ADDR(this, kHeaderSize);
2030 }
2031 
2032 
GetChars()2033 char* SeqAsciiString::GetChars() {
2034   return reinterpret_cast<char*>(GetCharsAddress());
2035 }
2036 
2037 
GetCharsAddress()2038 Address SeqTwoByteString::GetCharsAddress() {
2039   return FIELD_ADDR(this, kHeaderSize);
2040 }
2041 
2042 
GetChars()2043 uc16* SeqTwoByteString::GetChars() {
2044   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2045 }
2046 
2047 
SeqTwoByteStringGet(int index)2048 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2049   ASSERT(index >= 0 && index < length());
2050   return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2051 }
2052 
2053 
SeqTwoByteStringSet(int index,uint16_t value)2054 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2055   ASSERT(index >= 0 && index < length());
2056   WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2057 }
2058 
2059 
SeqTwoByteStringSize(InstanceType instance_type)2060 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2061   return SizeFor(length());
2062 }
2063 
2064 
SeqAsciiStringSize(InstanceType instance_type)2065 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2066   return SizeFor(length());
2067 }
2068 
2069 
first()2070 String* ConsString::first() {
2071   return String::cast(READ_FIELD(this, kFirstOffset));
2072 }
2073 
2074 
unchecked_first()2075 Object* ConsString::unchecked_first() {
2076   return READ_FIELD(this, kFirstOffset);
2077 }
2078 
2079 
set_first(String * value,WriteBarrierMode mode)2080 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2081   WRITE_FIELD(this, kFirstOffset, value);
2082   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
2083 }
2084 
2085 
second()2086 String* ConsString::second() {
2087   return String::cast(READ_FIELD(this, kSecondOffset));
2088 }
2089 
2090 
unchecked_second()2091 Object* ConsString::unchecked_second() {
2092   return READ_FIELD(this, kSecondOffset);
2093 }
2094 
2095 
set_second(String * value,WriteBarrierMode mode)2096 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2097   WRITE_FIELD(this, kSecondOffset, value);
2098   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
2099 }
2100 
2101 
resource()2102 ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2103   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2104 }
2105 
2106 
set_resource(ExternalAsciiString::Resource * resource)2107 void ExternalAsciiString::set_resource(
2108     ExternalAsciiString::Resource* resource) {
2109   *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2110 }
2111 
2112 
resource()2113 ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2114   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2115 }
2116 
2117 
set_resource(ExternalTwoByteString::Resource * resource)2118 void ExternalTwoByteString::set_resource(
2119     ExternalTwoByteString::Resource* resource) {
2120   *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
2121 }
2122 
2123 
MakeZeroSize()2124 void JSFunctionResultCache::MakeZeroSize() {
2125   set_finger_index(kEntriesIndex);
2126   set_size(kEntriesIndex);
2127 }
2128 
2129 
Clear()2130 void JSFunctionResultCache::Clear() {
2131   int cache_size = size();
2132   Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2133   MemsetPointer(entries_start,
2134                 GetHeap()->the_hole_value(),
2135                 cache_size - kEntriesIndex);
2136   MakeZeroSize();
2137 }
2138 
2139 
size()2140 int JSFunctionResultCache::size() {
2141   return Smi::cast(get(kCacheSizeIndex))->value();
2142 }
2143 
2144 
set_size(int size)2145 void JSFunctionResultCache::set_size(int size) {
2146   set(kCacheSizeIndex, Smi::FromInt(size));
2147 }
2148 
2149 
finger_index()2150 int JSFunctionResultCache::finger_index() {
2151   return Smi::cast(get(kFingerIndex))->value();
2152 }
2153 
2154 
set_finger_index(int finger_index)2155 void JSFunctionResultCache::set_finger_index(int finger_index) {
2156   set(kFingerIndex, Smi::FromInt(finger_index));
2157 }
2158 
2159 
get(int index)2160 byte ByteArray::get(int index) {
2161   ASSERT(index >= 0 && index < this->length());
2162   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2163 }
2164 
2165 
set(int index,byte value)2166 void ByteArray::set(int index, byte value) {
2167   ASSERT(index >= 0 && index < this->length());
2168   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2169 }
2170 
2171 
get_int(int index)2172 int ByteArray::get_int(int index) {
2173   ASSERT(index >= 0 && (index * kIntSize) < this->length());
2174   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2175 }
2176 
2177 
FromDataStartAddress(Address address)2178 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2179   ASSERT_TAG_ALIGNED(address);
2180   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2181 }
2182 
2183 
GetDataStartAddress()2184 Address ByteArray::GetDataStartAddress() {
2185   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2186 }
2187 
2188 
external_pixel_pointer()2189 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2190   return reinterpret_cast<uint8_t*>(external_pointer());
2191 }
2192 
2193 
get(int index)2194 uint8_t ExternalPixelArray::get(int index) {
2195   ASSERT((index >= 0) && (index < this->length()));
2196   uint8_t* ptr = external_pixel_pointer();
2197   return ptr[index];
2198 }
2199 
2200 
set(int index,uint8_t value)2201 void ExternalPixelArray::set(int index, uint8_t value) {
2202   ASSERT((index >= 0) && (index < this->length()));
2203   uint8_t* ptr = external_pixel_pointer();
2204   ptr[index] = value;
2205 }
2206 
2207 
external_pointer()2208 void* ExternalArray::external_pointer() {
2209   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2210   return reinterpret_cast<void*>(ptr);
2211 }
2212 
2213 
set_external_pointer(void * value,WriteBarrierMode mode)2214 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2215   intptr_t ptr = reinterpret_cast<intptr_t>(value);
2216   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2217 }
2218 
2219 
get(int index)2220 int8_t ExternalByteArray::get(int index) {
2221   ASSERT((index >= 0) && (index < this->length()));
2222   int8_t* ptr = static_cast<int8_t*>(external_pointer());
2223   return ptr[index];
2224 }
2225 
2226 
set(int index,int8_t value)2227 void ExternalByteArray::set(int index, int8_t value) {
2228   ASSERT((index >= 0) && (index < this->length()));
2229   int8_t* ptr = static_cast<int8_t*>(external_pointer());
2230   ptr[index] = value;
2231 }
2232 
2233 
get(int index)2234 uint8_t ExternalUnsignedByteArray::get(int index) {
2235   ASSERT((index >= 0) && (index < this->length()));
2236   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2237   return ptr[index];
2238 }
2239 
2240 
set(int index,uint8_t value)2241 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2242   ASSERT((index >= 0) && (index < this->length()));
2243   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2244   ptr[index] = value;
2245 }
2246 
2247 
get(int index)2248 int16_t ExternalShortArray::get(int index) {
2249   ASSERT((index >= 0) && (index < this->length()));
2250   int16_t* ptr = static_cast<int16_t*>(external_pointer());
2251   return ptr[index];
2252 }
2253 
2254 
set(int index,int16_t value)2255 void ExternalShortArray::set(int index, int16_t value) {
2256   ASSERT((index >= 0) && (index < this->length()));
2257   int16_t* ptr = static_cast<int16_t*>(external_pointer());
2258   ptr[index] = value;
2259 }
2260 
2261 
get(int index)2262 uint16_t ExternalUnsignedShortArray::get(int index) {
2263   ASSERT((index >= 0) && (index < this->length()));
2264   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2265   return ptr[index];
2266 }
2267 
2268 
set(int index,uint16_t value)2269 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2270   ASSERT((index >= 0) && (index < this->length()));
2271   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2272   ptr[index] = value;
2273 }
2274 
2275 
get(int index)2276 int32_t ExternalIntArray::get(int index) {
2277   ASSERT((index >= 0) && (index < this->length()));
2278   int32_t* ptr = static_cast<int32_t*>(external_pointer());
2279   return ptr[index];
2280 }
2281 
2282 
set(int index,int32_t value)2283 void ExternalIntArray::set(int index, int32_t value) {
2284   ASSERT((index >= 0) && (index < this->length()));
2285   int32_t* ptr = static_cast<int32_t*>(external_pointer());
2286   ptr[index] = value;
2287 }
2288 
2289 
get(int index)2290 uint32_t ExternalUnsignedIntArray::get(int index) {
2291   ASSERT((index >= 0) && (index < this->length()));
2292   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2293   return ptr[index];
2294 }
2295 
2296 
set(int index,uint32_t value)2297 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2298   ASSERT((index >= 0) && (index < this->length()));
2299   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2300   ptr[index] = value;
2301 }
2302 
2303 
get(int index)2304 float ExternalFloatArray::get(int index) {
2305   ASSERT((index >= 0) && (index < this->length()));
2306   float* ptr = static_cast<float*>(external_pointer());
2307   return ptr[index];
2308 }
2309 
2310 
set(int index,float value)2311 void ExternalFloatArray::set(int index, float value) {
2312   ASSERT((index >= 0) && (index < this->length()));
2313   float* ptr = static_cast<float*>(external_pointer());
2314   ptr[index] = value;
2315 }
2316 
2317 
visitor_id()2318 int Map::visitor_id() {
2319   return READ_BYTE_FIELD(this, kVisitorIdOffset);
2320 }
2321 
2322 
set_visitor_id(int id)2323 void Map::set_visitor_id(int id) {
2324   ASSERT(0 <= id && id < 256);
2325   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2326 }
2327 
2328 
instance_size()2329 int Map::instance_size() {
2330   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2331 }
2332 
2333 
inobject_properties()2334 int Map::inobject_properties() {
2335   return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2336 }
2337 
2338 
pre_allocated_property_fields()2339 int Map::pre_allocated_property_fields() {
2340   return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2341 }
2342 
2343 
SizeFromMap(Map * map)2344 int HeapObject::SizeFromMap(Map* map) {
2345   int instance_size = map->instance_size();
2346   if (instance_size != kVariableSizeSentinel) return instance_size;
2347   // We can ignore the "symbol" bit becase it is only set for symbols
2348   // and implies a string type.
2349   int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2350   // Only inline the most frequent cases.
2351   if (instance_type == FIXED_ARRAY_TYPE) {
2352     return FixedArray::BodyDescriptor::SizeOf(map, this);
2353   }
2354   if (instance_type == ASCII_STRING_TYPE) {
2355     return SeqAsciiString::SizeFor(
2356         reinterpret_cast<SeqAsciiString*>(this)->length());
2357   }
2358   if (instance_type == BYTE_ARRAY_TYPE) {
2359     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2360   }
2361   if (instance_type == STRING_TYPE) {
2362     return SeqTwoByteString::SizeFor(
2363         reinterpret_cast<SeqTwoByteString*>(this)->length());
2364   }
2365   ASSERT(instance_type == CODE_TYPE);
2366   return reinterpret_cast<Code*>(this)->CodeSize();
2367 }
2368 
2369 
set_instance_size(int value)2370 void Map::set_instance_size(int value) {
2371   ASSERT_EQ(0, value & (kPointerSize - 1));
2372   value >>= kPointerSizeLog2;
2373   ASSERT(0 <= value && value < 256);
2374   WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2375 }
2376 
2377 
set_inobject_properties(int value)2378 void Map::set_inobject_properties(int value) {
2379   ASSERT(0 <= value && value < 256);
2380   WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2381 }
2382 
2383 
set_pre_allocated_property_fields(int value)2384 void Map::set_pre_allocated_property_fields(int value) {
2385   ASSERT(0 <= value && value < 256);
2386   WRITE_BYTE_FIELD(this,
2387                    kPreAllocatedPropertyFieldsOffset,
2388                    static_cast<byte>(value));
2389 }
2390 
2391 
instance_type()2392 InstanceType Map::instance_type() {
2393   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2394 }
2395 
2396 
set_instance_type(InstanceType value)2397 void Map::set_instance_type(InstanceType value) {
2398   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2399 }
2400 
2401 
unused_property_fields()2402 int Map::unused_property_fields() {
2403   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2404 }
2405 
2406 
set_unused_property_fields(int value)2407 void Map::set_unused_property_fields(int value) {
2408   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2409 }
2410 
2411 
bit_field()2412 byte Map::bit_field() {
2413   return READ_BYTE_FIELD(this, kBitFieldOffset);
2414 }
2415 
2416 
set_bit_field(byte value)2417 void Map::set_bit_field(byte value) {
2418   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2419 }
2420 
2421 
bit_field2()2422 byte Map::bit_field2() {
2423   return READ_BYTE_FIELD(this, kBitField2Offset);
2424 }
2425 
2426 
set_bit_field2(byte value)2427 void Map::set_bit_field2(byte value) {
2428   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2429 }
2430 
2431 
set_non_instance_prototype(bool value)2432 void Map::set_non_instance_prototype(bool value) {
2433   if (value) {
2434     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2435   } else {
2436     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2437   }
2438 }
2439 
2440 
has_non_instance_prototype()2441 bool Map::has_non_instance_prototype() {
2442   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2443 }
2444 
2445 
set_function_with_prototype(bool value)2446 void Map::set_function_with_prototype(bool value) {
2447   if (value) {
2448     set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2449   } else {
2450     set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2451   }
2452 }
2453 
2454 
function_with_prototype()2455 bool Map::function_with_prototype() {
2456   return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2457 }
2458 
2459 
set_is_access_check_needed(bool access_check_needed)2460 void Map::set_is_access_check_needed(bool access_check_needed) {
2461   if (access_check_needed) {
2462     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2463   } else {
2464     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2465   }
2466 }
2467 
2468 
is_access_check_needed()2469 bool Map::is_access_check_needed() {
2470   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2471 }
2472 
2473 
set_is_extensible(bool value)2474 void Map::set_is_extensible(bool value) {
2475   if (value) {
2476     set_bit_field2(bit_field2() | (1 << kIsExtensible));
2477   } else {
2478     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2479   }
2480 }
2481 
is_extensible()2482 bool Map::is_extensible() {
2483   return ((1 << kIsExtensible) & bit_field2()) != 0;
2484 }
2485 
2486 
set_attached_to_shared_function_info(bool value)2487 void Map::set_attached_to_shared_function_info(bool value) {
2488   if (value) {
2489     set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2490   } else {
2491     set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2492   }
2493 }
2494 
attached_to_shared_function_info()2495 bool Map::attached_to_shared_function_info() {
2496   return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2497 }
2498 
2499 
set_is_shared(bool value)2500 void Map::set_is_shared(bool value) {
2501   if (value) {
2502     set_bit_field2(bit_field2() | (1 << kIsShared));
2503   } else {
2504     set_bit_field2(bit_field2() & ~(1 << kIsShared));
2505   }
2506 }
2507 
is_shared()2508 bool Map::is_shared() {
2509   return ((1 << kIsShared) & bit_field2()) != 0;
2510 }
2511 
2512 
unchecked_constructor()2513 JSFunction* Map::unchecked_constructor() {
2514   return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2515 }
2516 
2517 
unchecked_prototype_transitions()2518 FixedArray* Map::unchecked_prototype_transitions() {
2519   return reinterpret_cast<FixedArray*>(
2520       READ_FIELD(this, kPrototypeTransitionsOffset));
2521 }
2522 
2523 
flags()2524 Code::Flags Code::flags() {
2525   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2526 }
2527 
2528 
set_flags(Code::Flags flags)2529 void Code::set_flags(Code::Flags flags) {
2530   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= (kFlagsKindMask >> kFlagsKindShift)+1);
2531   // Make sure that all call stubs have an arguments count.
2532   ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2533           ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2534          ExtractArgumentsCountFromFlags(flags) >= 0);
2535   WRITE_INT_FIELD(this, kFlagsOffset, flags);
2536 }
2537 
2538 
kind()2539 Code::Kind Code::kind() {
2540   return ExtractKindFromFlags(flags());
2541 }
2542 
2543 
ic_in_loop()2544 InLoopFlag Code::ic_in_loop() {
2545   return ExtractICInLoopFromFlags(flags());
2546 }
2547 
2548 
ic_state()2549 InlineCacheState Code::ic_state() {
2550   InlineCacheState result = ExtractICStateFromFlags(flags());
2551   // Only allow uninitialized or debugger states for non-IC code
2552   // objects. This is used in the debugger to determine whether or not
2553   // a call to code object has been replaced with a debug break call.
2554   ASSERT(is_inline_cache_stub() ||
2555          result == UNINITIALIZED ||
2556          result == DEBUG_BREAK ||
2557          result == DEBUG_PREPARE_STEP_IN);
2558   return result;
2559 }
2560 
2561 
extra_ic_state()2562 Code::ExtraICState Code::extra_ic_state() {
2563   ASSERT(is_inline_cache_stub());
2564   return ExtractExtraICStateFromFlags(flags());
2565 }
2566 
2567 
type()2568 PropertyType Code::type() {
2569   ASSERT(ic_state() == MONOMORPHIC);
2570   return ExtractTypeFromFlags(flags());
2571 }
2572 
2573 
arguments_count()2574 int Code::arguments_count() {
2575   ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2576   return ExtractArgumentsCountFromFlags(flags());
2577 }
2578 
2579 
major_key()2580 int Code::major_key() {
2581   ASSERT(kind() == STUB ||
2582          kind() == TYPE_RECORDING_BINARY_OP_IC ||
2583          kind() == COMPARE_IC);
2584   return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2585 }
2586 
2587 
set_major_key(int major)2588 void Code::set_major_key(int major) {
2589   ASSERT(kind() == STUB ||
2590          kind() == TYPE_RECORDING_BINARY_OP_IC ||
2591          kind() == COMPARE_IC);
2592   ASSERT(0 <= major && major < 256);
2593   WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2594 }
2595 
2596 
optimizable()2597 bool Code::optimizable() {
2598   ASSERT(kind() == FUNCTION);
2599   return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2600 }
2601 
2602 
set_optimizable(bool value)2603 void Code::set_optimizable(bool value) {
2604   ASSERT(kind() == FUNCTION);
2605   WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2606 }
2607 
2608 
has_deoptimization_support()2609 bool Code::has_deoptimization_support() {
2610   ASSERT(kind() == FUNCTION);
2611   return READ_BYTE_FIELD(this, kHasDeoptimizationSupportOffset) == 1;
2612 }
2613 
2614 
set_has_deoptimization_support(bool value)2615 void Code::set_has_deoptimization_support(bool value) {
2616   ASSERT(kind() == FUNCTION);
2617   WRITE_BYTE_FIELD(this, kHasDeoptimizationSupportOffset, value ? 1 : 0);
2618 }
2619 
2620 
allow_osr_at_loop_nesting_level()2621 int Code::allow_osr_at_loop_nesting_level() {
2622   ASSERT(kind() == FUNCTION);
2623   return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2624 }
2625 
2626 
set_allow_osr_at_loop_nesting_level(int level)2627 void Code::set_allow_osr_at_loop_nesting_level(int level) {
2628   ASSERT(kind() == FUNCTION);
2629   ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2630   WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2631 }
2632 
2633 
stack_slots()2634 unsigned Code::stack_slots() {
2635   ASSERT(kind() == OPTIMIZED_FUNCTION);
2636   return READ_UINT32_FIELD(this, kStackSlotsOffset);
2637 }
2638 
2639 
set_stack_slots(unsigned slots)2640 void Code::set_stack_slots(unsigned slots) {
2641   ASSERT(kind() == OPTIMIZED_FUNCTION);
2642   WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2643 }
2644 
2645 
safepoint_table_offset()2646 unsigned Code::safepoint_table_offset() {
2647   ASSERT(kind() == OPTIMIZED_FUNCTION);
2648   return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2649 }
2650 
2651 
set_safepoint_table_offset(unsigned offset)2652 void Code::set_safepoint_table_offset(unsigned offset) {
2653   ASSERT(kind() == OPTIMIZED_FUNCTION);
2654   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2655   WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2656 }
2657 
2658 
stack_check_table_offset()2659 unsigned Code::stack_check_table_offset() {
2660   ASSERT(kind() == FUNCTION);
2661   return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2662 }
2663 
2664 
set_stack_check_table_offset(unsigned offset)2665 void Code::set_stack_check_table_offset(unsigned offset) {
2666   ASSERT(kind() == FUNCTION);
2667   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2668   WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
2669 }
2670 
2671 
check_type()2672 CheckType Code::check_type() {
2673   ASSERT(is_call_stub() || is_keyed_call_stub());
2674   byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2675   return static_cast<CheckType>(type);
2676 }
2677 
2678 
set_check_type(CheckType value)2679 void Code::set_check_type(CheckType value) {
2680   ASSERT(is_call_stub() || is_keyed_call_stub());
2681   WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2682 }
2683 
2684 
external_array_type()2685 ExternalArrayType Code::external_array_type() {
2686   ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
2687   byte type = READ_BYTE_FIELD(this, kExternalArrayTypeOffset);
2688   return static_cast<ExternalArrayType>(type);
2689 }
2690 
2691 
set_external_array_type(ExternalArrayType value)2692 void Code::set_external_array_type(ExternalArrayType value) {
2693   ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
2694   WRITE_BYTE_FIELD(this, kExternalArrayTypeOffset, value);
2695 }
2696 
2697 
type_recording_binary_op_type()2698 byte Code::type_recording_binary_op_type() {
2699   ASSERT(is_type_recording_binary_op_stub());
2700   return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2701 }
2702 
2703 
set_type_recording_binary_op_type(byte value)2704 void Code::set_type_recording_binary_op_type(byte value) {
2705   ASSERT(is_type_recording_binary_op_stub());
2706   WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2707 }
2708 
2709 
type_recording_binary_op_result_type()2710 byte Code::type_recording_binary_op_result_type() {
2711   ASSERT(is_type_recording_binary_op_stub());
2712   return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2713 }
2714 
2715 
set_type_recording_binary_op_result_type(byte value)2716 void Code::set_type_recording_binary_op_result_type(byte value) {
2717   ASSERT(is_type_recording_binary_op_stub());
2718   WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
2719 }
2720 
2721 
compare_state()2722 byte Code::compare_state() {
2723   ASSERT(is_compare_ic_stub());
2724   return READ_BYTE_FIELD(this, kCompareStateOffset);
2725 }
2726 
2727 
set_compare_state(byte value)2728 void Code::set_compare_state(byte value) {
2729   ASSERT(is_compare_ic_stub());
2730   WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
2731 }
2732 
2733 
is_inline_cache_stub()2734 bool Code::is_inline_cache_stub() {
2735   Kind kind = this->kind();
2736   return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
2737 }
2738 
2739 
ComputeFlags(Kind kind,InLoopFlag in_loop,InlineCacheState ic_state,ExtraICState extra_ic_state,PropertyType type,int argc,InlineCacheHolderFlag holder)2740 Code::Flags Code::ComputeFlags(Kind kind,
2741                                InLoopFlag in_loop,
2742                                InlineCacheState ic_state,
2743                                ExtraICState extra_ic_state,
2744                                PropertyType type,
2745                                int argc,
2746                                InlineCacheHolderFlag holder) {
2747   // Extra IC state is only allowed for monomorphic call IC stubs
2748   // or for store IC stubs.
2749   ASSERT(extra_ic_state == kNoExtraICState ||
2750          (kind == CALL_IC && (ic_state == MONOMORPHIC ||
2751                               ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
2752          (kind == STORE_IC) ||
2753          (kind == KEYED_STORE_IC));
2754   // Compute the bit mask.
2755   int bits = kind << kFlagsKindShift;
2756   if (in_loop) bits |= kFlagsICInLoopMask;
2757   bits |= ic_state << kFlagsICStateShift;
2758   bits |= type << kFlagsTypeShift;
2759   bits |= extra_ic_state << kFlagsExtraICStateShift;
2760   bits |= argc << kFlagsArgumentsCountShift;
2761   if (holder == PROTOTYPE_MAP) bits |= kFlagsCacheInPrototypeMapMask;
2762   // Cast to flags and validate result before returning it.
2763   Flags result = static_cast<Flags>(bits);
2764   ASSERT(ExtractKindFromFlags(result) == kind);
2765   ASSERT(ExtractICStateFromFlags(result) == ic_state);
2766   ASSERT(ExtractICInLoopFromFlags(result) == in_loop);
2767   ASSERT(ExtractTypeFromFlags(result) == type);
2768   ASSERT(ExtractExtraICStateFromFlags(result) == extra_ic_state);
2769   ASSERT(ExtractArgumentsCountFromFlags(result) == argc);
2770   return result;
2771 }
2772 
2773 
ComputeMonomorphicFlags(Kind kind,PropertyType type,ExtraICState extra_ic_state,InlineCacheHolderFlag holder,InLoopFlag in_loop,int argc)2774 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
2775                                           PropertyType type,
2776                                           ExtraICState extra_ic_state,
2777                                           InlineCacheHolderFlag holder,
2778                                           InLoopFlag in_loop,
2779                                           int argc) {
2780   return ComputeFlags(
2781       kind, in_loop, MONOMORPHIC, extra_ic_state, type, argc, holder);
2782 }
2783 
2784 
ExtractKindFromFlags(Flags flags)2785 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
2786   int bits = (flags & kFlagsKindMask) >> kFlagsKindShift;
2787   return static_cast<Kind>(bits);
2788 }
2789 
2790 
ExtractICStateFromFlags(Flags flags)2791 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
2792   int bits = (flags & kFlagsICStateMask) >> kFlagsICStateShift;
2793   return static_cast<InlineCacheState>(bits);
2794 }
2795 
2796 
ExtractExtraICStateFromFlags(Flags flags)2797 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
2798   int bits = (flags & kFlagsExtraICStateMask) >> kFlagsExtraICStateShift;
2799   return static_cast<ExtraICState>(bits);
2800 }
2801 
2802 
ExtractICInLoopFromFlags(Flags flags)2803 InLoopFlag Code::ExtractICInLoopFromFlags(Flags flags) {
2804   int bits = (flags & kFlagsICInLoopMask);
2805   return bits != 0 ? IN_LOOP : NOT_IN_LOOP;
2806 }
2807 
2808 
ExtractTypeFromFlags(Flags flags)2809 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
2810   int bits = (flags & kFlagsTypeMask) >> kFlagsTypeShift;
2811   return static_cast<PropertyType>(bits);
2812 }
2813 
2814 
ExtractArgumentsCountFromFlags(Flags flags)2815 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
2816   return (flags & kFlagsArgumentsCountMask) >> kFlagsArgumentsCountShift;
2817 }
2818 
2819 
ExtractCacheHolderFromFlags(Flags flags)2820 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
2821   int bits = (flags & kFlagsCacheInPrototypeMapMask);
2822   return bits != 0 ? PROTOTYPE_MAP : OWN_MAP;
2823 }
2824 
2825 
RemoveTypeFromFlags(Flags flags)2826 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
2827   int bits = flags & ~kFlagsTypeMask;
2828   return static_cast<Flags>(bits);
2829 }
2830 
2831 
GetCodeFromTargetAddress(Address address)2832 Code* Code::GetCodeFromTargetAddress(Address address) {
2833   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
2834   // GetCodeFromTargetAddress might be called when marking objects during mark
2835   // sweep. reinterpret_cast is therefore used instead of the more appropriate
2836   // Code::cast. Code::cast does not work when the object's map is
2837   // marked.
2838   Code* result = reinterpret_cast<Code*>(code);
2839   return result;
2840 }
2841 
2842 
isolate()2843 Isolate* Map::isolate() {
2844   return heap()->isolate();
2845 }
2846 
2847 
heap()2848 Heap* Map::heap() {
2849   // NOTE: address() helper is not used to save one instruction.
2850   Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2851   ASSERT(heap != NULL);
2852   ASSERT(heap->isolate() == Isolate::Current());
2853   return heap;
2854 }
2855 
2856 
heap()2857 Heap* Code::heap() {
2858   // NOTE: address() helper is not used to save one instruction.
2859   Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2860   ASSERT(heap != NULL);
2861   ASSERT(heap->isolate() == Isolate::Current());
2862   return heap;
2863 }
2864 
2865 
isolate()2866 Isolate* Code::isolate() {
2867   return heap()->isolate();
2868 }
2869 
2870 
heap()2871 Heap* JSGlobalPropertyCell::heap() {
2872   // NOTE: address() helper is not used to save one instruction.
2873   Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2874   ASSERT(heap != NULL);
2875   ASSERT(heap->isolate() == Isolate::Current());
2876   return heap;
2877 }
2878 
2879 
isolate()2880 Isolate* JSGlobalPropertyCell::isolate() {
2881   return heap()->isolate();
2882 }
2883 
2884 
GetObjectFromEntryAddress(Address location_of_address)2885 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
2886   return HeapObject::
2887       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
2888 }
2889 
2890 
prototype()2891 Object* Map::prototype() {
2892   return READ_FIELD(this, kPrototypeOffset);
2893 }
2894 
2895 
set_prototype(Object * value,WriteBarrierMode mode)2896 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
2897   ASSERT(value->IsNull() || value->IsJSObject());
2898   WRITE_FIELD(this, kPrototypeOffset, value);
2899   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
2900 }
2901 
2902 
GetFastElementsMap()2903 MaybeObject* Map::GetFastElementsMap() {
2904   if (has_fast_elements()) return this;
2905   Object* obj;
2906   { MaybeObject* maybe_obj = CopyDropTransitions();
2907     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2908   }
2909   Map* new_map = Map::cast(obj);
2910   new_map->set_has_fast_elements(true);
2911   isolate()->counters()->map_slow_to_fast_elements()->Increment();
2912   return new_map;
2913 }
2914 
2915 
GetSlowElementsMap()2916 MaybeObject* Map::GetSlowElementsMap() {
2917   if (!has_fast_elements()) return this;
2918   Object* obj;
2919   { MaybeObject* maybe_obj = CopyDropTransitions();
2920     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2921   }
2922   Map* new_map = Map::cast(obj);
2923   new_map->set_has_fast_elements(false);
2924   isolate()->counters()->map_fast_to_slow_elements()->Increment();
2925   return new_map;
2926 }
2927 
2928 
ACCESSORS(Map,instance_descriptors,DescriptorArray,kInstanceDescriptorsOffset)2929 ACCESSORS(Map, instance_descriptors, DescriptorArray,
2930           kInstanceDescriptorsOffset)
2931 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
2932 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
2933 ACCESSORS(Map, constructor, Object, kConstructorOffset)
2934 
2935 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
2936 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
2937 ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
2938                  kNextFunctionLinkOffset)
2939 
2940 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
2941 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
2942 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
2943 
2944 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
2945 
2946 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
2947 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
2948 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
2949 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
2950 ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
2951 
2952 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
2953 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
2954 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
2955 
2956 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
2957 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
2958 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
2959 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
2960 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
2961 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
2962 
2963 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
2964 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
2965 
2966 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
2967 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
2968 
2969 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
2970 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
2971 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
2972           kPropertyAccessorsOffset)
2973 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
2974           kPrototypeTemplateOffset)
2975 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
2976 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
2977           kNamedPropertyHandlerOffset)
2978 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
2979           kIndexedPropertyHandlerOffset)
2980 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
2981           kInstanceTemplateOffset)
2982 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
2983 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
2984 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
2985           kInstanceCallHandlerOffset)
2986 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
2987           kAccessCheckInfoOffset)
2988 ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
2989 
2990 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
2991 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
2992           kInternalFieldCountOffset)
2993 
2994 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
2995 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
2996 
2997 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
2998 
2999 ACCESSORS(Script, source, Object, kSourceOffset)
3000 ACCESSORS(Script, name, Object, kNameOffset)
3001 ACCESSORS(Script, id, Object, kIdOffset)
3002 ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3003 ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3004 ACCESSORS(Script, data, Object, kDataOffset)
3005 ACCESSORS(Script, context_data, Object, kContextOffset)
3006 ACCESSORS(Script, wrapper, Proxy, kWrapperOffset)
3007 ACCESSORS(Script, type, Smi, kTypeOffset)
3008 ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3009 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3010 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3011 ACCESSORS(Script, eval_from_instructions_offset, Smi,
3012           kEvalFrominstructionsOffsetOffset)
3013 
3014 #ifdef ENABLE_DEBUGGER_SUPPORT
3015 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3016 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3017 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3018 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3019 
3020 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3021 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3022 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3023 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3024 #endif
3025 
3026 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3027 ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3028 ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3029 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3030           kInstanceClassNameOffset)
3031 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3032 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3033 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3034 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3035 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3036           kThisPropertyAssignmentsOffset)
3037 
3038 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3039                kHiddenPrototypeBit)
3040 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3041 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3042                kNeedsAccessCheckBit)
3043 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3044                kIsExpressionBit)
3045 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3046                kIsTopLevelBit)
3047 BOOL_GETTER(SharedFunctionInfo, compiler_hints,
3048             has_only_simple_this_property_assignments,
3049             kHasOnlySimpleThisPropertyAssignments)
3050 BOOL_ACCESSORS(SharedFunctionInfo,
3051                compiler_hints,
3052                allows_lazy_compilation,
3053                kAllowLazyCompilation)
3054 
3055 
3056 #if V8_HOST_ARCH_32_BIT
3057 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3058 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3059               kFormalParameterCountOffset)
3060 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3061               kExpectedNofPropertiesOffset)
3062 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3063 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3064               kStartPositionAndTypeOffset)
3065 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3066 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3067               kFunctionTokenPositionOffset)
3068 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3069               kCompilerHintsOffset)
3070 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3071               kThisPropertyAssignmentsCountOffset)
3072 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3073 #else
3074 
3075 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
3076   STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
3077   int holder::name() {                                            \
3078     int value = READ_INT_FIELD(this, offset);                     \
3079     ASSERT(kHeapObjectTag == 1);                                  \
3080     ASSERT((value & kHeapObjectTag) == 0);                        \
3081     return value >> 1;                                            \
3082   }                                                               \
3083   void holder::set_##name(int value) {                            \
3084     ASSERT(kHeapObjectTag == 1);                                  \
3085     ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
3086            (value & 0xC0000000) == 0x000000000);                  \
3087     WRITE_INT_FIELD(this,                                         \
3088                     offset,                                       \
3089                     (value << 1) & ~kHeapObjectTag);              \
3090   }
3091 
3092 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
3093   STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
3094   INT_ACCESSORS(holder, name, offset)
3095 
3096 
3097 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3098 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3099                         formal_parameter_count,
3100                         kFormalParameterCountOffset)
3101 
3102 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3103                         expected_nof_properties,
3104                         kExpectedNofPropertiesOffset)
3105 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3106 
3107 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3108 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3109                         start_position_and_type,
3110                         kStartPositionAndTypeOffset)
3111 
3112 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3113                         function_token_position,
3114                         kFunctionTokenPositionOffset)
3115 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3116                         compiler_hints,
3117                         kCompilerHintsOffset)
3118 
3119 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3120                         this_property_assignments_count,
3121                         kThisPropertyAssignmentsCountOffset)
3122 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3123 #endif
3124 
3125 
3126 int SharedFunctionInfo::construction_count() {
3127   return READ_BYTE_FIELD(this, kConstructionCountOffset);
3128 }
3129 
3130 
set_construction_count(int value)3131 void SharedFunctionInfo::set_construction_count(int value) {
3132   ASSERT(0 <= value && value < 256);
3133   WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3134 }
3135 
3136 
live_objects_may_exist()3137 bool SharedFunctionInfo::live_objects_may_exist() {
3138   return (compiler_hints() & (1 << kLiveObjectsMayExist)) != 0;
3139 }
3140 
3141 
set_live_objects_may_exist(bool value)3142 void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
3143   if (value) {
3144     set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
3145   } else {
3146     set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
3147   }
3148 }
3149 
3150 
IsInobjectSlackTrackingInProgress()3151 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3152   return initial_map() != HEAP->undefined_value();
3153 }
3154 
3155 
optimization_disabled()3156 bool SharedFunctionInfo::optimization_disabled() {
3157   return BooleanBit::get(compiler_hints(), kOptimizationDisabled);
3158 }
3159 
3160 
set_optimization_disabled(bool disable)3161 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3162   set_compiler_hints(BooleanBit::set(compiler_hints(),
3163                                      kOptimizationDisabled,
3164                                      disable));
3165   // If disabling optimizations we reflect that in the code object so
3166   // it will not be counted as optimizable code.
3167   if ((code()->kind() == Code::FUNCTION) && disable) {
3168     code()->set_optimizable(false);
3169   }
3170 }
3171 
3172 
strict_mode()3173 bool SharedFunctionInfo::strict_mode() {
3174   return BooleanBit::get(compiler_hints(), kStrictModeFunction);
3175 }
3176 
3177 
set_strict_mode(bool value)3178 void SharedFunctionInfo::set_strict_mode(bool value) {
3179   set_compiler_hints(BooleanBit::set(compiler_hints(),
3180                                      kStrictModeFunction,
3181                                      value));
3182 }
3183 
3184 
ACCESSORS(CodeCache,default_cache,FixedArray,kDefaultCacheOffset)3185 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3186 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3187 
3188 bool Script::HasValidSource() {
3189   Object* src = this->source();
3190   if (!src->IsString()) return true;
3191   String* src_str = String::cast(src);
3192   if (!StringShape(src_str).IsExternal()) return true;
3193   if (src_str->IsAsciiRepresentation()) {
3194     return ExternalAsciiString::cast(src)->resource() != NULL;
3195   } else if (src_str->IsTwoByteRepresentation()) {
3196     return ExternalTwoByteString::cast(src)->resource() != NULL;
3197   }
3198   return true;
3199 }
3200 
3201 
DontAdaptArguments()3202 void SharedFunctionInfo::DontAdaptArguments() {
3203   ASSERT(code()->kind() == Code::BUILTIN);
3204   set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3205 }
3206 
3207 
start_position()3208 int SharedFunctionInfo::start_position() {
3209   return start_position_and_type() >> kStartPositionShift;
3210 }
3211 
3212 
set_start_position(int start_position)3213 void SharedFunctionInfo::set_start_position(int start_position) {
3214   set_start_position_and_type((start_position << kStartPositionShift)
3215     | (start_position_and_type() & ~kStartPositionMask));
3216 }
3217 
3218 
code()3219 Code* SharedFunctionInfo::code() {
3220   return Code::cast(READ_FIELD(this, kCodeOffset));
3221 }
3222 
3223 
unchecked_code()3224 Code* SharedFunctionInfo::unchecked_code() {
3225   return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3226 }
3227 
3228 
set_code(Code * value,WriteBarrierMode mode)3229 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3230   WRITE_FIELD(this, kCodeOffset, value);
3231   ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
3232 }
3233 
3234 
scope_info()3235 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3236   return reinterpret_cast<SerializedScopeInfo*>(
3237       READ_FIELD(this, kScopeInfoOffset));
3238 }
3239 
3240 
set_scope_info(SerializedScopeInfo * value,WriteBarrierMode mode)3241 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3242                                         WriteBarrierMode mode) {
3243   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3244   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
3245 }
3246 
3247 
deopt_counter()3248 Smi* SharedFunctionInfo::deopt_counter() {
3249   return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3250 }
3251 
3252 
set_deopt_counter(Smi * value)3253 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3254   WRITE_FIELD(this, kDeoptCounterOffset, value);
3255 }
3256 
3257 
is_compiled()3258 bool SharedFunctionInfo::is_compiled() {
3259   return code() !=
3260       Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3261 }
3262 
3263 
IsApiFunction()3264 bool SharedFunctionInfo::IsApiFunction() {
3265   return function_data()->IsFunctionTemplateInfo();
3266 }
3267 
3268 
get_api_func_data()3269 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3270   ASSERT(IsApiFunction());
3271   return FunctionTemplateInfo::cast(function_data());
3272 }
3273 
3274 
HasBuiltinFunctionId()3275 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3276   return function_data()->IsSmi();
3277 }
3278 
3279 
builtin_function_id()3280 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3281   ASSERT(HasBuiltinFunctionId());
3282   return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3283 }
3284 
3285 
code_age()3286 int SharedFunctionInfo::code_age() {
3287   return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3288 }
3289 
3290 
set_code_age(int code_age)3291 void SharedFunctionInfo::set_code_age(int code_age) {
3292   set_compiler_hints(compiler_hints() |
3293                      ((code_age & kCodeAgeMask) << kCodeAgeShift));
3294 }
3295 
3296 
has_deoptimization_support()3297 bool SharedFunctionInfo::has_deoptimization_support() {
3298   Code* code = this->code();
3299   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3300 }
3301 
3302 
IsBuiltin()3303 bool JSFunction::IsBuiltin() {
3304   return context()->global()->IsJSBuiltinsObject();
3305 }
3306 
3307 
NeedsArgumentsAdaption()3308 bool JSFunction::NeedsArgumentsAdaption() {
3309   return shared()->formal_parameter_count() !=
3310       SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3311 }
3312 
3313 
IsOptimized()3314 bool JSFunction::IsOptimized() {
3315   return code()->kind() == Code::OPTIMIZED_FUNCTION;
3316 }
3317 
3318 
IsOptimizable()3319 bool JSFunction::IsOptimizable() {
3320   return code()->kind() == Code::FUNCTION && code()->optimizable();
3321 }
3322 
3323 
IsMarkedForLazyRecompilation()3324 bool JSFunction::IsMarkedForLazyRecompilation() {
3325   return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3326 }
3327 
3328 
code()3329 Code* JSFunction::code() {
3330   return Code::cast(unchecked_code());
3331 }
3332 
3333 
unchecked_code()3334 Code* JSFunction::unchecked_code() {
3335   return reinterpret_cast<Code*>(
3336       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3337 }
3338 
3339 
set_code(Code * value)3340 void JSFunction::set_code(Code* value) {
3341   // Skip the write barrier because code is never in new space.
3342   ASSERT(!HEAP->InNewSpace(value));
3343   Address entry = value->entry();
3344   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3345 }
3346 
3347 
ReplaceCode(Code * code)3348 void JSFunction::ReplaceCode(Code* code) {
3349   bool was_optimized = IsOptimized();
3350   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3351 
3352   set_code(code);
3353 
3354   // Add/remove the function from the list of optimized functions for this
3355   // context based on the state change.
3356   if (!was_optimized && is_optimized) {
3357     context()->global_context()->AddOptimizedFunction(this);
3358   }
3359   if (was_optimized && !is_optimized) {
3360     context()->global_context()->RemoveOptimizedFunction(this);
3361   }
3362 }
3363 
3364 
context()3365 Context* JSFunction::context() {
3366   return Context::cast(READ_FIELD(this, kContextOffset));
3367 }
3368 
3369 
unchecked_context()3370 Object* JSFunction::unchecked_context() {
3371   return READ_FIELD(this, kContextOffset);
3372 }
3373 
3374 
unchecked_shared()3375 SharedFunctionInfo* JSFunction::unchecked_shared() {
3376   return reinterpret_cast<SharedFunctionInfo*>(
3377       READ_FIELD(this, kSharedFunctionInfoOffset));
3378 }
3379 
3380 
set_context(Object * value)3381 void JSFunction::set_context(Object* value) {
3382   ASSERT(value->IsUndefined() || value->IsContext());
3383   WRITE_FIELD(this, kContextOffset, value);
3384   WRITE_BARRIER(this, kContextOffset);
3385 }
3386 
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)3387 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3388           kPrototypeOrInitialMapOffset)
3389 
3390 
3391 Map* JSFunction::initial_map() {
3392   return Map::cast(prototype_or_initial_map());
3393 }
3394 
3395 
set_initial_map(Map * value)3396 void JSFunction::set_initial_map(Map* value) {
3397   set_prototype_or_initial_map(value);
3398 }
3399 
3400 
has_initial_map()3401 bool JSFunction::has_initial_map() {
3402   return prototype_or_initial_map()->IsMap();
3403 }
3404 
3405 
has_instance_prototype()3406 bool JSFunction::has_instance_prototype() {
3407   return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3408 }
3409 
3410 
has_prototype()3411 bool JSFunction::has_prototype() {
3412   return map()->has_non_instance_prototype() || has_instance_prototype();
3413 }
3414 
3415 
instance_prototype()3416 Object* JSFunction::instance_prototype() {
3417   ASSERT(has_instance_prototype());
3418   if (has_initial_map()) return initial_map()->prototype();
3419   // When there is no initial map and the prototype is a JSObject, the
3420   // initial map field is used for the prototype field.
3421   return prototype_or_initial_map();
3422 }
3423 
3424 
prototype()3425 Object* JSFunction::prototype() {
3426   ASSERT(has_prototype());
3427   // If the function's prototype property has been set to a non-JSObject
3428   // value, that value is stored in the constructor field of the map.
3429   if (map()->has_non_instance_prototype()) return map()->constructor();
3430   return instance_prototype();
3431 }
3432 
should_have_prototype()3433 bool JSFunction::should_have_prototype() {
3434   return map()->function_with_prototype();
3435 }
3436 
3437 
is_compiled()3438 bool JSFunction::is_compiled() {
3439   return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3440 }
3441 
3442 
NumberOfLiterals()3443 int JSFunction::NumberOfLiterals() {
3444   return literals()->length();
3445 }
3446 
3447 
javascript_builtin(Builtins::JavaScript id)3448 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3449   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3450   return READ_FIELD(this, OffsetOfFunctionWithId(id));
3451 }
3452 
3453 
set_javascript_builtin(Builtins::JavaScript id,Object * value)3454 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3455                                               Object* value) {
3456   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3457   WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3458   WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
3459 }
3460 
3461 
javascript_builtin_code(Builtins::JavaScript id)3462 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3463   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3464   return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3465 }
3466 
3467 
set_javascript_builtin_code(Builtins::JavaScript id,Code * value)3468 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3469                                                    Code* value) {
3470   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
3471   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3472   ASSERT(!HEAP->InNewSpace(value));
3473 }
3474 
3475 
proxy()3476 Address Proxy::proxy() {
3477   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset));
3478 }
3479 
3480 
set_proxy(Address value)3481 void Proxy::set_proxy(Address value) {
3482   WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value));
3483 }
3484 
3485 
ACCESSORS(JSValue,value,Object,kValueOffset)3486 ACCESSORS(JSValue, value, Object, kValueOffset)
3487 
3488 
3489 JSValue* JSValue::cast(Object* obj) {
3490   ASSERT(obj->IsJSValue());
3491   ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3492   return reinterpret_cast<JSValue*>(obj);
3493 }
3494 
3495 
ACCESSORS(JSMessageObject,type,String,kTypeOffset)3496 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3497 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3498 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3499 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3500 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3501 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3502 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3503 
3504 
3505 JSMessageObject* JSMessageObject::cast(Object* obj) {
3506   ASSERT(obj->IsJSMessageObject());
3507   ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3508   return reinterpret_cast<JSMessageObject*>(obj);
3509 }
3510 
3511 
INT_ACCESSORS(Code,instruction_size,kInstructionSizeOffset)3512 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3513 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3514 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3515 
3516 
3517 byte* Code::instruction_start()  {
3518   return FIELD_ADDR(this, kHeaderSize);
3519 }
3520 
3521 
instruction_end()3522 byte* Code::instruction_end()  {
3523   return instruction_start() + instruction_size();
3524 }
3525 
3526 
body_size()3527 int Code::body_size() {
3528   return RoundUp(instruction_size(), kObjectAlignment);
3529 }
3530 
3531 
unchecked_deoptimization_data()3532 FixedArray* Code::unchecked_deoptimization_data() {
3533   return reinterpret_cast<FixedArray*>(
3534       READ_FIELD(this, kDeoptimizationDataOffset));
3535 }
3536 
3537 
unchecked_relocation_info()3538 ByteArray* Code::unchecked_relocation_info() {
3539   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3540 }
3541 
3542 
relocation_start()3543 byte* Code::relocation_start() {
3544   return unchecked_relocation_info()->GetDataStartAddress();
3545 }
3546 
3547 
relocation_size()3548 int Code::relocation_size() {
3549   return unchecked_relocation_info()->length();
3550 }
3551 
3552 
entry()3553 byte* Code::entry() {
3554   return instruction_start();
3555 }
3556 
3557 
contains(byte * pc)3558 bool Code::contains(byte* pc) {
3559   return (instruction_start() <= pc) &&
3560       (pc <= instruction_start() + instruction_size());
3561 }
3562 
3563 
ACCESSORS(JSArray,length,Object,kLengthOffset)3564 ACCESSORS(JSArray, length, Object, kLengthOffset)
3565 
3566 
3567 ACCESSORS(JSRegExp, data, Object, kDataOffset)
3568 
3569 
3570 JSRegExp::Type JSRegExp::TypeTag() {
3571   Object* data = this->data();
3572   if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3573   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3574   return static_cast<JSRegExp::Type>(smi->value());
3575 }
3576 
3577 
CaptureCount()3578 int JSRegExp::CaptureCount() {
3579   switch (TypeTag()) {
3580     case ATOM:
3581       return 0;
3582     case IRREGEXP:
3583       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3584     default:
3585       UNREACHABLE();
3586       return -1;
3587   }
3588 }
3589 
3590 
GetFlags()3591 JSRegExp::Flags JSRegExp::GetFlags() {
3592   ASSERT(this->data()->IsFixedArray());
3593   Object* data = this->data();
3594   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3595   return Flags(smi->value());
3596 }
3597 
3598 
Pattern()3599 String* JSRegExp::Pattern() {
3600   ASSERT(this->data()->IsFixedArray());
3601   Object* data = this->data();
3602   String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3603   return pattern;
3604 }
3605 
3606 
DataAt(int index)3607 Object* JSRegExp::DataAt(int index) {
3608   ASSERT(TypeTag() != NOT_COMPILED);
3609   return FixedArray::cast(data())->get(index);
3610 }
3611 
3612 
SetDataAt(int index,Object * value)3613 void JSRegExp::SetDataAt(int index, Object* value) {
3614   ASSERT(TypeTag() != NOT_COMPILED);
3615   ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
3616   FixedArray::cast(data())->set(index, value);
3617 }
3618 
3619 
GetElementsKind()3620 JSObject::ElementsKind JSObject::GetElementsKind() {
3621   if (map()->has_fast_elements()) {
3622     ASSERT(elements()->map() == GetHeap()->fixed_array_map() ||
3623            elements()->map() == GetHeap()->fixed_cow_array_map());
3624     return FAST_ELEMENTS;
3625   }
3626   HeapObject* array = elements();
3627   if (array->IsFixedArray()) {
3628     // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a
3629     // FixedArray, but FAST_ELEMENTS is already handled above.
3630     ASSERT(array->IsDictionary());
3631     return DICTIONARY_ELEMENTS;
3632   }
3633   ASSERT(!map()->has_fast_elements());
3634   if (array->IsExternalArray()) {
3635     switch (array->map()->instance_type()) {
3636       case EXTERNAL_BYTE_ARRAY_TYPE:
3637         return EXTERNAL_BYTE_ELEMENTS;
3638       case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
3639         return EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
3640       case EXTERNAL_SHORT_ARRAY_TYPE:
3641         return EXTERNAL_SHORT_ELEMENTS;
3642       case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
3643         return EXTERNAL_UNSIGNED_SHORT_ELEMENTS;
3644       case EXTERNAL_INT_ARRAY_TYPE:
3645         return EXTERNAL_INT_ELEMENTS;
3646       case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
3647         return EXTERNAL_UNSIGNED_INT_ELEMENTS;
3648       case EXTERNAL_PIXEL_ARRAY_TYPE:
3649         return EXTERNAL_PIXEL_ELEMENTS;
3650       default:
3651         break;
3652     }
3653   }
3654   ASSERT(array->map()->instance_type() == EXTERNAL_FLOAT_ARRAY_TYPE);
3655   return EXTERNAL_FLOAT_ELEMENTS;
3656 }
3657 
3658 
HasFastElements()3659 bool JSObject::HasFastElements() {
3660   return GetElementsKind() == FAST_ELEMENTS;
3661 }
3662 
3663 
HasDictionaryElements()3664 bool JSObject::HasDictionaryElements() {
3665   return GetElementsKind() == DICTIONARY_ELEMENTS;
3666 }
3667 
3668 
HasExternalArrayElements()3669 bool JSObject::HasExternalArrayElements() {
3670   HeapObject* array = elements();
3671   ASSERT(array != NULL);
3672   return array->IsExternalArray();
3673 }
3674 
3675 
3676 #define EXTERNAL_ELEMENTS_CHECK(name, type)          \
3677 bool JSObject::HasExternal##name##Elements() {       \
3678   HeapObject* array = elements();                    \
3679   ASSERT(array != NULL);                             \
3680   if (!array->IsHeapObject())                        \
3681     return false;                                    \
3682   return array->map()->instance_type() == type;      \
3683 }
3684 
3685 
EXTERNAL_ELEMENTS_CHECK(Byte,EXTERNAL_BYTE_ARRAY_TYPE)3686 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
3687 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
3688 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
3689 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
3690                         EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
3691 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
3692 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
3693                         EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
3694 EXTERNAL_ELEMENTS_CHECK(Float,
3695                         EXTERNAL_FLOAT_ARRAY_TYPE)
3696 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
3697 
3698 
3699 bool JSObject::HasNamedInterceptor() {
3700   return map()->has_named_interceptor();
3701 }
3702 
3703 
HasIndexedInterceptor()3704 bool JSObject::HasIndexedInterceptor() {
3705   return map()->has_indexed_interceptor();
3706 }
3707 
3708 
AllowsSetElementsLength()3709 bool JSObject::AllowsSetElementsLength() {
3710   bool result = elements()->IsFixedArray();
3711   ASSERT(result == !HasExternalArrayElements());
3712   return result;
3713 }
3714 
3715 
EnsureWritableFastElements()3716 MaybeObject* JSObject::EnsureWritableFastElements() {
3717   ASSERT(HasFastElements());
3718   FixedArray* elems = FixedArray::cast(elements());
3719   Isolate* isolate = GetIsolate();
3720   if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
3721   Object* writable_elems;
3722   { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
3723       elems, isolate->heap()->fixed_array_map());
3724     if (!maybe_writable_elems->ToObject(&writable_elems)) {
3725       return maybe_writable_elems;
3726     }
3727   }
3728   set_elements(FixedArray::cast(writable_elems));
3729   isolate->counters()->cow_arrays_converted()->Increment();
3730   return writable_elems;
3731 }
3732 
3733 
property_dictionary()3734 StringDictionary* JSObject::property_dictionary() {
3735   ASSERT(!HasFastProperties());
3736   return StringDictionary::cast(properties());
3737 }
3738 
3739 
element_dictionary()3740 NumberDictionary* JSObject::element_dictionary() {
3741   ASSERT(HasDictionaryElements());
3742   return NumberDictionary::cast(elements());
3743 }
3744 
3745 
IsHashFieldComputed(uint32_t field)3746 bool String::IsHashFieldComputed(uint32_t field) {
3747   return (field & kHashNotComputedMask) == 0;
3748 }
3749 
3750 
HasHashCode()3751 bool String::HasHashCode() {
3752   return IsHashFieldComputed(hash_field());
3753 }
3754 
3755 
Hash()3756 uint32_t String::Hash() {
3757   // Fast case: has hash code already been computed?
3758   uint32_t field = hash_field();
3759   if (IsHashFieldComputed(field)) return field >> kHashShift;
3760   // Slow case: compute hash code and set it.
3761   return ComputeAndSetHash();
3762 }
3763 
3764 
StringHasher(int length)3765 StringHasher::StringHasher(int length)
3766   : length_(length),
3767     raw_running_hash_(0),
3768     array_index_(0),
3769     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
3770     is_first_char_(true),
3771     is_valid_(true) { }
3772 
3773 
has_trivial_hash()3774 bool StringHasher::has_trivial_hash() {
3775   return length_ > String::kMaxHashCalcLength;
3776 }
3777 
3778 
AddCharacter(uc32 c)3779 void StringHasher::AddCharacter(uc32 c) {
3780   // Use the Jenkins one-at-a-time hash function to update the hash
3781   // for the given character.
3782   raw_running_hash_ += c;
3783   raw_running_hash_ += (raw_running_hash_ << 10);
3784   raw_running_hash_ ^= (raw_running_hash_ >> 6);
3785   // Incremental array index computation.
3786   if (is_array_index_) {
3787     if (c < '0' || c > '9') {
3788       is_array_index_ = false;
3789     } else {
3790       int d = c - '0';
3791       if (is_first_char_) {
3792         is_first_char_ = false;
3793         if (c == '0' && length_ > 1) {
3794           is_array_index_ = false;
3795           return;
3796         }
3797       }
3798       if (array_index_ > 429496729U - ((d + 2) >> 3)) {
3799         is_array_index_ = false;
3800       } else {
3801         array_index_ = array_index_ * 10 + d;
3802       }
3803     }
3804   }
3805 }
3806 
3807 
AddCharacterNoIndex(uc32 c)3808 void StringHasher::AddCharacterNoIndex(uc32 c) {
3809   ASSERT(!is_array_index());
3810   raw_running_hash_ += c;
3811   raw_running_hash_ += (raw_running_hash_ << 10);
3812   raw_running_hash_ ^= (raw_running_hash_ >> 6);
3813 }
3814 
3815 
GetHash()3816 uint32_t StringHasher::GetHash() {
3817   // Get the calculated raw hash value and do some more bit ops to distribute
3818   // the hash further. Ensure that we never return zero as the hash value.
3819   uint32_t result = raw_running_hash_;
3820   result += (result << 3);
3821   result ^= (result >> 11);
3822   result += (result << 15);
3823   if (result == 0) {
3824     result = 27;
3825   }
3826   return result;
3827 }
3828 
3829 
3830 template <typename schar>
HashSequentialString(const schar * chars,int length)3831 uint32_t HashSequentialString(const schar* chars, int length) {
3832   StringHasher hasher(length);
3833   if (!hasher.has_trivial_hash()) {
3834     int i;
3835     for (i = 0; hasher.is_array_index() && (i < length); i++) {
3836       hasher.AddCharacter(chars[i]);
3837     }
3838     for (; i < length; i++) {
3839       hasher.AddCharacterNoIndex(chars[i]);
3840     }
3841   }
3842   return hasher.GetHashField();
3843 }
3844 
3845 
AsArrayIndex(uint32_t * index)3846 bool String::AsArrayIndex(uint32_t* index) {
3847   uint32_t field = hash_field();
3848   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
3849     return false;
3850   }
3851   return SlowAsArrayIndex(index);
3852 }
3853 
3854 
GetPrototype()3855 Object* JSObject::GetPrototype() {
3856   return JSObject::cast(this)->map()->prototype();
3857 }
3858 
3859 
GetPropertyAttribute(String * key)3860 PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
3861   return GetPropertyAttributeWithReceiver(this, key);
3862 }
3863 
3864 // TODO(504): this may be useful in other places too where JSGlobalProxy
3865 // is used.
BypassGlobalProxy()3866 Object* JSObject::BypassGlobalProxy() {
3867   if (IsJSGlobalProxy()) {
3868     Object* proto = GetPrototype();
3869     if (proto->IsNull()) return GetHeap()->undefined_value();
3870     ASSERT(proto->IsJSGlobalObject());
3871     return proto;
3872   }
3873   return this;
3874 }
3875 
3876 
HasHiddenPropertiesObject()3877 bool JSObject::HasHiddenPropertiesObject() {
3878   ASSERT(!IsJSGlobalProxy());
3879   return GetPropertyAttributePostInterceptor(this,
3880                                              GetHeap()->hidden_symbol(),
3881                                              false) != ABSENT;
3882 }
3883 
3884 
GetHiddenPropertiesObject()3885 Object* JSObject::GetHiddenPropertiesObject() {
3886   ASSERT(!IsJSGlobalProxy());
3887   PropertyAttributes attributes;
3888   // You can't install a getter on a property indexed by the hidden symbol,
3889   // so we can be sure that GetLocalPropertyPostInterceptor returns a real
3890   // object.
3891   Object* result =
3892       GetLocalPropertyPostInterceptor(this,
3893                                       GetHeap()->hidden_symbol(),
3894                                       &attributes)->ToObjectUnchecked();
3895   return result;
3896 }
3897 
3898 
SetHiddenPropertiesObject(Object * hidden_obj)3899 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
3900   ASSERT(!IsJSGlobalProxy());
3901   return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
3902                                     hidden_obj,
3903                                     DONT_ENUM,
3904                                     kNonStrictMode);
3905 }
3906 
3907 
HasElement(uint32_t index)3908 bool JSObject::HasElement(uint32_t index) {
3909   return HasElementWithReceiver(this, index);
3910 }
3911 
3912 
all_can_read()3913 bool AccessorInfo::all_can_read() {
3914   return BooleanBit::get(flag(), kAllCanReadBit);
3915 }
3916 
3917 
set_all_can_read(bool value)3918 void AccessorInfo::set_all_can_read(bool value) {
3919   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
3920 }
3921 
3922 
all_can_write()3923 bool AccessorInfo::all_can_write() {
3924   return BooleanBit::get(flag(), kAllCanWriteBit);
3925 }
3926 
3927 
set_all_can_write(bool value)3928 void AccessorInfo::set_all_can_write(bool value) {
3929   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
3930 }
3931 
3932 
prohibits_overwriting()3933 bool AccessorInfo::prohibits_overwriting() {
3934   return BooleanBit::get(flag(), kProhibitsOverwritingBit);
3935 }
3936 
3937 
set_prohibits_overwriting(bool value)3938 void AccessorInfo::set_prohibits_overwriting(bool value) {
3939   set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
3940 }
3941 
3942 
property_attributes()3943 PropertyAttributes AccessorInfo::property_attributes() {
3944   return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
3945 }
3946 
3947 
set_property_attributes(PropertyAttributes attributes)3948 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
3949   ASSERT(AttributesField::is_valid(attributes));
3950   int rest_value = flag()->value() & ~AttributesField::mask();
3951   set_flag(Smi::FromInt(rest_value | AttributesField::encode(attributes)));
3952 }
3953 
3954 
3955 template<typename Shape, typename Key>
SetEntry(int entry,Object * key,Object * value)3956 void Dictionary<Shape, Key>::SetEntry(int entry,
3957                                       Object* key,
3958                                       Object* value) {
3959   SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
3960 }
3961 
3962 
3963 template<typename Shape, typename Key>
SetEntry(int entry,Object * key,Object * value,PropertyDetails details)3964 void Dictionary<Shape, Key>::SetEntry(int entry,
3965                                       Object* key,
3966                                       Object* value,
3967                                       PropertyDetails details) {
3968   ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
3969   int index = HashTable<Shape, Key>::EntryToIndex(entry);
3970   AssertNoAllocation no_gc;
3971   WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
3972   FixedArray::set(index, key, mode);
3973   FixedArray::set(index+1, value, mode);
3974   FixedArray::fast_set(this, index+2, details.AsSmi());
3975 }
3976 
3977 
IsMatch(uint32_t key,Object * other)3978 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
3979   ASSERT(other->IsNumber());
3980   return key == static_cast<uint32_t>(other->Number());
3981 }
3982 
3983 
Hash(uint32_t key)3984 uint32_t NumberDictionaryShape::Hash(uint32_t key) {
3985   return ComputeIntegerHash(key);
3986 }
3987 
3988 
HashForObject(uint32_t key,Object * other)3989 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
3990   ASSERT(other->IsNumber());
3991   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
3992 }
3993 
3994 
AsObject(uint32_t key)3995 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
3996   return Isolate::Current()->heap()->NumberFromUint32(key);
3997 }
3998 
3999 
IsMatch(String * key,Object * other)4000 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4001   // We know that all entries in a hash table had their hash keys created.
4002   // Use that knowledge to have fast failure.
4003   if (key->Hash() != String::cast(other)->Hash()) return false;
4004   return key->Equals(String::cast(other));
4005 }
4006 
4007 
Hash(String * key)4008 uint32_t StringDictionaryShape::Hash(String* key) {
4009   return key->Hash();
4010 }
4011 
4012 
HashForObject(String * key,Object * other)4013 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4014   return String::cast(other)->Hash();
4015 }
4016 
4017 
AsObject(String * key)4018 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4019   return key;
4020 }
4021 
4022 
ClearCodeCache(Heap * heap)4023 void Map::ClearCodeCache(Heap* heap) {
4024   // No write barrier is needed since empty_fixed_array is not in new space.
4025   // Please note this function is used during marking:
4026   //  - MarkCompactCollector::MarkUnmarkedObject
4027   ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4028   WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4029 }
4030 
4031 
EnsureSize(int required_size)4032 void JSArray::EnsureSize(int required_size) {
4033   ASSERT(HasFastElements());
4034   FixedArray* elts = FixedArray::cast(elements());
4035   const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4036   if (elts->length() < required_size) {
4037     // Doubling in size would be overkill, but leave some slack to avoid
4038     // constantly growing.
4039     Expand(required_size + (required_size >> 3));
4040     // It's a performance benefit to keep a frequently used array in new-space.
4041   } else if (!GetHeap()->new_space()->Contains(elts) &&
4042              required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4043     // Expand will allocate a new backing store in new space even if the size
4044     // we asked for isn't larger than what we had before.
4045     Expand(required_size);
4046   }
4047 }
4048 
4049 
set_length(Smi * length)4050 void JSArray::set_length(Smi* length) {
4051   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4052 }
4053 
4054 
SetContent(FixedArray * storage)4055 void JSArray::SetContent(FixedArray* storage) {
4056   set_length(Smi::FromInt(storage->length()));
4057   set_elements(storage);
4058 }
4059 
4060 
Copy()4061 MaybeObject* FixedArray::Copy() {
4062   if (length() == 0) return this;
4063   return GetHeap()->CopyFixedArray(this);
4064 }
4065 
4066 
Relocatable(Isolate * isolate)4067 Relocatable::Relocatable(Isolate* isolate) {
4068   ASSERT(isolate == Isolate::Current());
4069   isolate_ = isolate;
4070   prev_ = isolate->relocatable_top();
4071   isolate->set_relocatable_top(this);
4072 }
4073 
4074 
~Relocatable()4075 Relocatable::~Relocatable() {
4076   ASSERT(isolate_ == Isolate::Current());
4077   ASSERT_EQ(isolate_->relocatable_top(), this);
4078   isolate_->set_relocatable_top(prev_);
4079 }
4080 
4081 
SizeOf(Map * map,HeapObject * object)4082 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4083   return map->instance_size();
4084 }
4085 
4086 
ProxyIterateBody(ObjectVisitor * v)4087 void Proxy::ProxyIterateBody(ObjectVisitor* v) {
4088   v->VisitExternalReference(
4089       reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
4090 }
4091 
4092 
4093 template<typename StaticVisitor>
ProxyIterateBody()4094 void Proxy::ProxyIterateBody() {
4095   StaticVisitor::VisitExternalReference(
4096       reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
4097 }
4098 
4099 
ExternalAsciiStringIterateBody(ObjectVisitor * v)4100 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4101   typedef v8::String::ExternalAsciiStringResource Resource;
4102   v->VisitExternalAsciiString(
4103       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4104 }
4105 
4106 
4107 template<typename StaticVisitor>
ExternalAsciiStringIterateBody()4108 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4109   typedef v8::String::ExternalAsciiStringResource Resource;
4110   StaticVisitor::VisitExternalAsciiString(
4111       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4112 }
4113 
4114 
ExternalTwoByteStringIterateBody(ObjectVisitor * v)4115 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4116   typedef v8::String::ExternalStringResource Resource;
4117   v->VisitExternalTwoByteString(
4118       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4119 }
4120 
4121 
4122 template<typename StaticVisitor>
ExternalTwoByteStringIterateBody()4123 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4124   typedef v8::String::ExternalStringResource Resource;
4125   StaticVisitor::VisitExternalTwoByteString(
4126       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4127 }
4128 
4129 #define SLOT_ADDR(obj, offset) \
4130   reinterpret_cast<Object**>((obj)->address() + offset)
4131 
4132 template<int start_offset, int end_offset, int size>
IterateBody(HeapObject * obj,ObjectVisitor * v)4133 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4134     HeapObject* obj,
4135     ObjectVisitor* v) {
4136     v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4137 }
4138 
4139 
4140 template<int start_offset>
IterateBody(HeapObject * obj,int object_size,ObjectVisitor * v)4141 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4142                                                        int object_size,
4143                                                        ObjectVisitor* v) {
4144   v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4145 }
4146 
4147 #undef SLOT_ADDR
4148 
4149 
4150 #undef CAST_ACCESSOR
4151 #undef INT_ACCESSORS
4152 #undef SMI_ACCESSORS
4153 #undef ACCESSORS
4154 #undef FIELD_ADDR
4155 #undef READ_FIELD
4156 #undef WRITE_FIELD
4157 #undef WRITE_BARRIER
4158 #undef CONDITIONAL_WRITE_BARRIER
4159 #undef READ_MEMADDR_FIELD
4160 #undef WRITE_MEMADDR_FIELD
4161 #undef READ_DOUBLE_FIELD
4162 #undef WRITE_DOUBLE_FIELD
4163 #undef READ_INT_FIELD
4164 #undef WRITE_INT_FIELD
4165 #undef READ_SHORT_FIELD
4166 #undef WRITE_SHORT_FIELD
4167 #undef READ_BYTE_FIELD
4168 #undef WRITE_BYTE_FIELD
4169 
4170 
4171 } }  // namespace v8::internal
4172 
4173 #endif  // V8_OBJECTS_INL_H_
4174