• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "accessors.h"
31 #include "api.h"
32 #include "bootstrapper.h"
33 #include "codegen-inl.h"
34 #include "compilation-cache.h"
35 #include "debug.h"
36 #include "global-handles.h"
37 #include "mark-compact.h"
38 #include "natives.h"
39 #include "scanner.h"
40 #include "scopeinfo.h"
41 #include "v8threads.h"
42 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
43 #include "regexp-macro-assembler.h"
44 #endif
45 
46 namespace v8 {
47 namespace internal {
48 
49 
50 String* Heap::hidden_symbol_;
51 Object* Heap::roots_[Heap::kRootListLength];
52 
53 
54 NewSpace Heap::new_space_;
55 OldSpace* Heap::old_pointer_space_ = NULL;
56 OldSpace* Heap::old_data_space_ = NULL;
57 OldSpace* Heap::code_space_ = NULL;
58 MapSpace* Heap::map_space_ = NULL;
59 CellSpace* Heap::cell_space_ = NULL;
60 LargeObjectSpace* Heap::lo_space_ = NULL;
61 
62 static const int kMinimumPromotionLimit = 2*MB;
63 static const int kMinimumAllocationLimit = 8*MB;
64 
65 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
66 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
67 
68 int Heap::old_gen_exhausted_ = false;
69 
70 int Heap::amount_of_external_allocated_memory_ = 0;
71 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
72 
73 // semispace_size_ should be a power of 2 and old_generation_size_ should be
74 // a multiple of Page::kPageSize.
75 #if defined(ANDROID)
76 int Heap::semispace_size_  = 512*KB;
77 int Heap::old_generation_size_ = 128*MB;
78 int Heap::initial_semispace_size_ = 128*KB;
79 #elif defined(V8_TARGET_ARCH_X64)
80 int Heap::semispace_size_  = 8*MB;
81 int Heap::old_generation_size_ = 1*GB;
82 int Heap::initial_semispace_size_ = 1*MB;
83 #else
84 int Heap::semispace_size_  = 4*MB;
85 int Heap::old_generation_size_ = 512*MB;
86 int Heap::initial_semispace_size_ = 512*KB;
87 #endif
88 
89 GCCallback Heap::global_gc_prologue_callback_ = NULL;
90 GCCallback Heap::global_gc_epilogue_callback_ = NULL;
91 
92 // Variables set based on semispace_size_ and old_generation_size_ in
93 // ConfigureHeap.
94 int Heap::young_generation_size_ = 0;  // Will be 2 * semispace_size_.
95 int Heap::survived_since_last_expansion_ = 0;
96 int Heap::external_allocation_limit_ = 0;
97 
98 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
99 
100 int Heap::mc_count_ = 0;
101 int Heap::gc_count_ = 0;
102 
103 int Heap::always_allocate_scope_depth_ = 0;
104 bool Heap::context_disposed_pending_ = false;
105 
106 #ifdef DEBUG
107 bool Heap::allocation_allowed_ = true;
108 
109 int Heap::allocation_timeout_ = 0;
110 bool Heap::disallow_allocation_failure_ = false;
111 #endif  // DEBUG
112 
113 
Capacity()114 int Heap::Capacity() {
115   if (!HasBeenSetup()) return 0;
116 
117   return new_space_.Capacity() +
118       old_pointer_space_->Capacity() +
119       old_data_space_->Capacity() +
120       code_space_->Capacity() +
121       map_space_->Capacity() +
122       cell_space_->Capacity();
123 }
124 
125 
Available()126 int Heap::Available() {
127   if (!HasBeenSetup()) return 0;
128 
129   return new_space_.Available() +
130       old_pointer_space_->Available() +
131       old_data_space_->Available() +
132       code_space_->Available() +
133       map_space_->Available() +
134       cell_space_->Available();
135 }
136 
137 
HasBeenSetup()138 bool Heap::HasBeenSetup() {
139   return old_pointer_space_ != NULL &&
140          old_data_space_ != NULL &&
141          code_space_ != NULL &&
142          map_space_ != NULL &&
143          cell_space_ != NULL &&
144          lo_space_ != NULL;
145 }
146 
147 
SelectGarbageCollector(AllocationSpace space)148 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
149   // Is global GC requested?
150   if (space != NEW_SPACE || FLAG_gc_global) {
151     Counters::gc_compactor_caused_by_request.Increment();
152     return MARK_COMPACTOR;
153   }
154 
155   // Is enough data promoted to justify a global GC?
156   if (OldGenerationPromotionLimitReached()) {
157     Counters::gc_compactor_caused_by_promoted_data.Increment();
158     return MARK_COMPACTOR;
159   }
160 
161   // Have allocation in OLD and LO failed?
162   if (old_gen_exhausted_) {
163     Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
164     return MARK_COMPACTOR;
165   }
166 
167   // Is there enough space left in OLD to guarantee that a scavenge can
168   // succeed?
169   //
170   // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
171   // for object promotion. It counts only the bytes that the memory
172   // allocator has not yet allocated from the OS and assigned to any space,
173   // and does not count available bytes already in the old space or code
174   // space.  Undercounting is safe---we may get an unrequested full GC when
175   // a scavenge would have succeeded.
176   if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
177     Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
178     return MARK_COMPACTOR;
179   }
180 
181   // Default
182   return SCAVENGER;
183 }
184 
185 
186 // TODO(1238405): Combine the infrastructure for --heap-stats and
187 // --log-gc to avoid the complicated preprocessor and flag testing.
188 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
ReportStatisticsBeforeGC()189 void Heap::ReportStatisticsBeforeGC() {
190   // Heap::ReportHeapStatistics will also log NewSpace statistics when
191   // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set.  The
192   // following logic is used to avoid double logging.
193 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
194   if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
195   if (FLAG_heap_stats) {
196     ReportHeapStatistics("Before GC");
197   } else if (FLAG_log_gc) {
198     new_space_.ReportStatistics();
199   }
200   if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
201 #elif defined(DEBUG)
202   if (FLAG_heap_stats) {
203     new_space_.CollectStatistics();
204     ReportHeapStatistics("Before GC");
205     new_space_.ClearHistograms();
206   }
207 #elif defined(ENABLE_LOGGING_AND_PROFILING)
208   if (FLAG_log_gc) {
209     new_space_.CollectStatistics();
210     new_space_.ReportStatistics();
211     new_space_.ClearHistograms();
212   }
213 #endif
214 }
215 
216 
217 #if defined(ENABLE_LOGGING_AND_PROFILING)
PrintShortHeapStatistics()218 void Heap::PrintShortHeapStatistics() {
219   if (!FLAG_trace_gc_verbose) return;
220   PrintF("Memory allocator,   used: %8d, available: %8d\n",
221          MemoryAllocator::Size(), MemoryAllocator::Available());
222   PrintF("New space,          used: %8d, available: %8d\n",
223          Heap::new_space_.Size(), new_space_.Available());
224   PrintF("Old pointers,       used: %8d, available: %8d\n",
225          old_pointer_space_->Size(), old_pointer_space_->Available());
226   PrintF("Old data space,     used: %8d, available: %8d\n",
227          old_data_space_->Size(), old_data_space_->Available());
228   PrintF("Code space,         used: %8d, available: %8d\n",
229          code_space_->Size(), code_space_->Available());
230   PrintF("Map space,          used: %8d, available: %8d\n",
231          map_space_->Size(), map_space_->Available());
232   PrintF("Large object space, used: %8d, avaialble: %8d\n",
233          lo_space_->Size(), lo_space_->Available());
234 }
235 #endif
236 
237 
238 // TODO(1238405): Combine the infrastructure for --heap-stats and
239 // --log-gc to avoid the complicated preprocessor and flag testing.
ReportStatisticsAfterGC()240 void Heap::ReportStatisticsAfterGC() {
241   // Similar to the before GC, we use some complicated logic to ensure that
242   // NewSpace statistics are logged exactly once when --log-gc is turned on.
243 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
244   if (FLAG_heap_stats) {
245     new_space_.CollectStatistics();
246     ReportHeapStatistics("After GC");
247   } else if (FLAG_log_gc) {
248     new_space_.ReportStatistics();
249   }
250 #elif defined(DEBUG)
251   if (FLAG_heap_stats) ReportHeapStatistics("After GC");
252 #elif defined(ENABLE_LOGGING_AND_PROFILING)
253   if (FLAG_log_gc) new_space_.ReportStatistics();
254 #endif
255 }
256 #endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
257 
258 
GarbageCollectionPrologue()259 void Heap::GarbageCollectionPrologue() {
260   TranscendentalCache::Clear();
261   gc_count_++;
262 #ifdef DEBUG
263   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
264   allow_allocation(false);
265 
266   if (FLAG_verify_heap) {
267     Verify();
268   }
269 
270   if (FLAG_gc_verbose) Print();
271 
272   if (FLAG_print_rset) {
273     // Not all spaces have remembered set bits that we care about.
274     old_pointer_space_->PrintRSet();
275     map_space_->PrintRSet();
276     lo_space_->PrintRSet();
277   }
278 #endif
279 
280 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
281   ReportStatisticsBeforeGC();
282 #endif
283 }
284 
SizeOfObjects()285 int Heap::SizeOfObjects() {
286   int total = 0;
287   AllSpaces spaces;
288   while (Space* space = spaces.next()) {
289     total += space->Size();
290   }
291   return total;
292 }
293 
GarbageCollectionEpilogue()294 void Heap::GarbageCollectionEpilogue() {
295 #ifdef DEBUG
296   allow_allocation(true);
297   ZapFromSpace();
298 
299   if (FLAG_verify_heap) {
300     Verify();
301   }
302 
303   if (FLAG_print_global_handles) GlobalHandles::Print();
304   if (FLAG_print_handles) PrintHandles();
305   if (FLAG_gc_verbose) Print();
306   if (FLAG_code_stats) ReportCodeStatistics("After GC");
307 #endif
308 
309   Counters::alive_after_last_gc.Set(SizeOfObjects());
310 
311   Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
312   Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
313 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
314   ReportStatisticsAfterGC();
315 #endif
316 #ifdef ENABLE_DEBUGGER_SUPPORT
317   Debug::AfterGarbageCollection();
318 #endif
319 }
320 
321 
CollectAllGarbage(bool force_compaction)322 void Heap::CollectAllGarbage(bool force_compaction) {
323   // Since we are ignoring the return value, the exact choice of space does
324   // not matter, so long as we do not specify NEW_SPACE, which would not
325   // cause a full GC.
326   MarkCompactCollector::SetForceCompaction(force_compaction);
327   CollectGarbage(0, OLD_POINTER_SPACE);
328   MarkCompactCollector::SetForceCompaction(false);
329 }
330 
331 
CollectAllGarbageIfContextDisposed()332 void Heap::CollectAllGarbageIfContextDisposed() {
333   // If the garbage collector interface is exposed through the global
334   // gc() function, we avoid being clever about forcing GCs when
335   // contexts are disposed and leave it to the embedder to make
336   // informed decisions about when to force a collection.
337   if (!FLAG_expose_gc && context_disposed_pending_) {
338     HistogramTimerScope scope(&Counters::gc_context);
339     CollectAllGarbage(false);
340   }
341   context_disposed_pending_ = false;
342 }
343 
344 
NotifyContextDisposed()345 void Heap::NotifyContextDisposed() {
346   context_disposed_pending_ = true;
347 }
348 
349 
CollectGarbage(int requested_size,AllocationSpace space)350 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
351   // The VM is in the GC state until exiting this function.
352   VMState state(GC);
353 
354 #ifdef DEBUG
355   // Reset the allocation timeout to the GC interval, but make sure to
356   // allow at least a few allocations after a collection. The reason
357   // for this is that we have a lot of allocation sequences and we
358   // assume that a garbage collection will allow the subsequent
359   // allocation attempts to go through.
360   allocation_timeout_ = Max(6, FLAG_gc_interval);
361 #endif
362 
363   { GCTracer tracer;
364     GarbageCollectionPrologue();
365     // The GC count was incremented in the prologue.  Tell the tracer about
366     // it.
367     tracer.set_gc_count(gc_count_);
368 
369     GarbageCollector collector = SelectGarbageCollector(space);
370     // Tell the tracer which collector we've selected.
371     tracer.set_collector(collector);
372 
373     HistogramTimer* rate = (collector == SCAVENGER)
374         ? &Counters::gc_scavenger
375         : &Counters::gc_compactor;
376     rate->Start();
377     PerformGarbageCollection(space, collector, &tracer);
378     rate->Stop();
379 
380     GarbageCollectionEpilogue();
381   }
382 
383 
384 #ifdef ENABLE_LOGGING_AND_PROFILING
385   if (FLAG_log_gc) HeapProfiler::WriteSample();
386 #endif
387 
388   switch (space) {
389     case NEW_SPACE:
390       return new_space_.Available() >= requested_size;
391     case OLD_POINTER_SPACE:
392       return old_pointer_space_->Available() >= requested_size;
393     case OLD_DATA_SPACE:
394       return old_data_space_->Available() >= requested_size;
395     case CODE_SPACE:
396       return code_space_->Available() >= requested_size;
397     case MAP_SPACE:
398       return map_space_->Available() >= requested_size;
399     case CELL_SPACE:
400       return cell_space_->Available() >= requested_size;
401     case LO_SPACE:
402       return lo_space_->Available() >= requested_size;
403   }
404   return false;
405 }
406 
407 
PerformScavenge()408 void Heap::PerformScavenge() {
409   GCTracer tracer;
410   PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
411 }
412 
413 
414 #ifdef DEBUG
415 // Helper class for verifying the symbol table.
416 class SymbolTableVerifier : public ObjectVisitor {
417  public:
SymbolTableVerifier()418   SymbolTableVerifier() { }
VisitPointers(Object ** start,Object ** end)419   void VisitPointers(Object** start, Object** end) {
420     // Visit all HeapObject pointers in [start, end).
421     for (Object** p = start; p < end; p++) {
422       if ((*p)->IsHeapObject()) {
423         // Check that the symbol is actually a symbol.
424         ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
425       }
426     }
427   }
428 };
429 #endif  // DEBUG
430 
431 
VerifySymbolTable()432 static void VerifySymbolTable() {
433 #ifdef DEBUG
434   SymbolTableVerifier verifier;
435   Heap::symbol_table()->IterateElements(&verifier);
436 #endif  // DEBUG
437 }
438 
439 
EnsureFromSpaceIsCommitted()440 void Heap::EnsureFromSpaceIsCommitted() {
441   if (new_space_.CommitFromSpaceIfNeeded()) return;
442 
443   // Committing memory to from space failed.
444   // Try shrinking and try again.
445   Shrink();
446   if (new_space_.CommitFromSpaceIfNeeded()) return;
447 
448   // Committing memory to from space failed again.
449   // Memory is exhausted and we will die.
450   V8::FatalProcessOutOfMemory("Committing semi space failed.");
451 }
452 
453 
PerformGarbageCollection(AllocationSpace space,GarbageCollector collector,GCTracer * tracer)454 void Heap::PerformGarbageCollection(AllocationSpace space,
455                                     GarbageCollector collector,
456                                     GCTracer* tracer) {
457   VerifySymbolTable();
458   if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
459     ASSERT(!allocation_allowed_);
460     global_gc_prologue_callback_();
461   }
462   EnsureFromSpaceIsCommitted();
463   if (collector == MARK_COMPACTOR) {
464     MarkCompact(tracer);
465 
466     int old_gen_size = PromotedSpaceSize();
467     old_gen_promotion_limit_ =
468         old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
469     old_gen_allocation_limit_ =
470         old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
471     old_gen_exhausted_ = false;
472   }
473   Scavenge();
474 
475   Counters::objs_since_last_young.Set(0);
476 
477   PostGarbageCollectionProcessing();
478 
479   if (collector == MARK_COMPACTOR) {
480     // Register the amount of external allocated memory.
481     amount_of_external_allocated_memory_at_last_global_gc_ =
482         amount_of_external_allocated_memory_;
483   }
484 
485   if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
486     ASSERT(!allocation_allowed_);
487     global_gc_epilogue_callback_();
488   }
489   VerifySymbolTable();
490 }
491 
492 
PostGarbageCollectionProcessing()493 void Heap::PostGarbageCollectionProcessing() {
494   // Process weak handles post gc.
495   {
496     DisableAssertNoAllocation allow_allocation;
497     GlobalHandles::PostGarbageCollectionProcessing();
498   }
499   // Update flat string readers.
500   FlatStringReader::PostGarbageCollectionProcessing();
501 }
502 
503 
MarkCompact(GCTracer * tracer)504 void Heap::MarkCompact(GCTracer* tracer) {
505   gc_state_ = MARK_COMPACT;
506   mc_count_++;
507   tracer->set_full_gc_count(mc_count_);
508   LOG(ResourceEvent("markcompact", "begin"));
509 
510   MarkCompactCollector::Prepare(tracer);
511 
512   bool is_compacting = MarkCompactCollector::IsCompacting();
513 
514   MarkCompactPrologue(is_compacting);
515 
516   MarkCompactCollector::CollectGarbage();
517 
518   MarkCompactEpilogue(is_compacting);
519 
520   LOG(ResourceEvent("markcompact", "end"));
521 
522   gc_state_ = NOT_IN_GC;
523 
524   Shrink();
525 
526   Counters::objs_since_last_full.Set(0);
527   context_disposed_pending_ = false;
528 }
529 
530 
MarkCompactPrologue(bool is_compacting)531 void Heap::MarkCompactPrologue(bool is_compacting) {
532   // At any old GC clear the keyed lookup cache to enable collection of unused
533   // maps.
534   KeyedLookupCache::Clear();
535   ContextSlotCache::Clear();
536   DescriptorLookupCache::Clear();
537 
538   CompilationCache::MarkCompactPrologue();
539 
540   Top::MarkCompactPrologue(is_compacting);
541   ThreadManager::MarkCompactPrologue(is_compacting);
542 }
543 
544 
MarkCompactEpilogue(bool is_compacting)545 void Heap::MarkCompactEpilogue(bool is_compacting) {
546   Top::MarkCompactEpilogue(is_compacting);
547   ThreadManager::MarkCompactEpilogue(is_compacting);
548 }
549 
550 
FindCodeObject(Address a)551 Object* Heap::FindCodeObject(Address a) {
552   Object* obj = code_space_->FindObject(a);
553   if (obj->IsFailure()) {
554     obj = lo_space_->FindObject(a);
555   }
556   ASSERT(!obj->IsFailure());
557   return obj;
558 }
559 
560 
561 // Helper class for copying HeapObjects
562 class ScavengeVisitor: public ObjectVisitor {
563  public:
564 
VisitPointer(Object ** p)565   void VisitPointer(Object** p) { ScavengePointer(p); }
566 
VisitPointers(Object ** start,Object ** end)567   void VisitPointers(Object** start, Object** end) {
568     // Copy all HeapObject pointers in [start, end)
569     for (Object** p = start; p < end; p++) ScavengePointer(p);
570   }
571 
572  private:
ScavengePointer(Object ** p)573   void ScavengePointer(Object** p) {
574     Object* object = *p;
575     if (!Heap::InNewSpace(object)) return;
576     Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
577                          reinterpret_cast<HeapObject*>(object));
578   }
579 };
580 
581 
582 // A queue of pointers and maps of to-be-promoted objects during a
583 // scavenge collection.
584 class PromotionQueue {
585  public:
Initialize(Address start_address)586   void Initialize(Address start_address) {
587     front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
588   }
589 
is_empty()590   bool is_empty() { return front_ <= rear_; }
591 
insert(HeapObject * object,Map * map)592   void insert(HeapObject* object, Map* map) {
593     *(--rear_) = object;
594     *(--rear_) = map;
595     // Assert no overflow into live objects.
596     ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
597   }
598 
remove(HeapObject ** object,Map ** map)599   void remove(HeapObject** object, Map** map) {
600     *object = *(--front_);
601     *map = Map::cast(*(--front_));
602     // Assert no underflow.
603     ASSERT(front_ >= rear_);
604   }
605 
606  private:
607   // The front of the queue is higher in memory than the rear.
608   HeapObject** front_;
609   HeapObject** rear_;
610 };
611 
612 
613 // Shared state read by the scavenge collector and set by ScavengeObject.
614 static PromotionQueue promotion_queue;
615 
616 
617 #ifdef DEBUG
618 // Visitor class to verify pointers in code or data space do not point into
619 // new space.
620 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
621  public:
VisitPointers(Object ** start,Object ** end)622   void VisitPointers(Object** start, Object**end) {
623     for (Object** current = start; current < end; current++) {
624       if ((*current)->IsHeapObject()) {
625         ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
626       }
627     }
628   }
629 };
630 
631 
VerifyNonPointerSpacePointers()632 static void VerifyNonPointerSpacePointers() {
633   // Verify that there are no pointers to new space in spaces where we
634   // do not expect them.
635   VerifyNonPointerSpacePointersVisitor v;
636   HeapObjectIterator code_it(Heap::code_space());
637   while (code_it.has_next()) {
638     HeapObject* object = code_it.next();
639     if (object->IsCode()) {
640       Code::cast(object)->ConvertICTargetsFromAddressToObject();
641       object->Iterate(&v);
642       Code::cast(object)->ConvertICTargetsFromObjectToAddress();
643     } else {
644       // If we find non-code objects in code space (e.g., free list
645       // nodes) we want to verify them as well.
646       object->Iterate(&v);
647     }
648   }
649 
650   HeapObjectIterator data_it(Heap::old_data_space());
651   while (data_it.has_next()) data_it.next()->Iterate(&v);
652 }
653 #endif
654 
655 
Scavenge()656 void Heap::Scavenge() {
657 #ifdef DEBUG
658   if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
659 #endif
660 
661   gc_state_ = SCAVENGE;
662 
663   // Implements Cheney's copying algorithm
664   LOG(ResourceEvent("scavenge", "begin"));
665 
666   // Clear descriptor cache.
667   DescriptorLookupCache::Clear();
668 
669   // Used for updating survived_since_last_expansion_ at function end.
670   int survived_watermark = PromotedSpaceSize();
671 
672   if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
673       survived_since_last_expansion_ > new_space_.Capacity()) {
674     // Grow the size of new space if there is room to grow and enough
675     // data has survived scavenge since the last expansion.
676     new_space_.Grow();
677     survived_since_last_expansion_ = 0;
678   }
679 
680   // Flip the semispaces.  After flipping, to space is empty, from space has
681   // live objects.
682   new_space_.Flip();
683   new_space_.ResetAllocationInfo();
684 
685   // We need to sweep newly copied objects which can be either in the
686   // to space or promoted to the old generation.  For to-space
687   // objects, we treat the bottom of the to space as a queue.  Newly
688   // copied and unswept objects lie between a 'front' mark and the
689   // allocation pointer.
690   //
691   // Promoted objects can go into various old-generation spaces, and
692   // can be allocated internally in the spaces (from the free list).
693   // We treat the top of the to space as a queue of addresses of
694   // promoted objects.  The addresses of newly promoted and unswept
695   // objects lie between a 'front' mark and a 'rear' mark that is
696   // updated as a side effect of promoting an object.
697   //
698   // There is guaranteed to be enough room at the top of the to space
699   // for the addresses of promoted objects: every object promoted
700   // frees up its size in bytes from the top of the new space, and
701   // objects are at least one pointer in size.
702   Address new_space_front = new_space_.ToSpaceLow();
703   promotion_queue.Initialize(new_space_.ToSpaceHigh());
704 
705   ScavengeVisitor scavenge_visitor;
706   // Copy roots.
707   IterateRoots(&scavenge_visitor);
708 
709   // Copy objects reachable from weak pointers.
710   GlobalHandles::IterateWeakRoots(&scavenge_visitor);
711 
712   // Copy objects reachable from the old generation.  By definition,
713   // there are no intergenerational pointers in code or data spaces.
714   IterateRSet(old_pointer_space_, &ScavengePointer);
715   IterateRSet(map_space_, &ScavengePointer);
716   lo_space_->IterateRSet(&ScavengePointer);
717 
718   // Copy objects reachable from cells by scavenging cell values directly.
719   HeapObjectIterator cell_iterator(cell_space_);
720   while (cell_iterator.has_next()) {
721     HeapObject* cell = cell_iterator.next();
722     if (cell->IsJSGlobalPropertyCell()) {
723       Address value_address =
724           reinterpret_cast<Address>(cell) +
725           (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
726       scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
727     }
728   }
729 
730   do {
731     ASSERT(new_space_front <= new_space_.top());
732 
733     // The addresses new_space_front and new_space_.top() define a
734     // queue of unprocessed copied objects.  Process them until the
735     // queue is empty.
736     while (new_space_front < new_space_.top()) {
737       HeapObject* object = HeapObject::FromAddress(new_space_front);
738       object->Iterate(&scavenge_visitor);
739       new_space_front += object->Size();
740     }
741 
742     // Promote and process all the to-be-promoted objects.
743     while (!promotion_queue.is_empty()) {
744       HeapObject* source;
745       Map* map;
746       promotion_queue.remove(&source, &map);
747       // Copy the from-space object to its new location (given by the
748       // forwarding address) and fix its map.
749       HeapObject* target = source->map_word().ToForwardingAddress();
750       CopyBlock(reinterpret_cast<Object**>(target->address()),
751                 reinterpret_cast<Object**>(source->address()),
752                 source->SizeFromMap(map));
753       target->set_map(map);
754 
755 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
756       // Update NewSpace stats if necessary.
757       RecordCopiedObject(target);
758 #endif
759       // Visit the newly copied object for pointers to new space.
760       target->Iterate(&scavenge_visitor);
761       UpdateRSet(target);
762     }
763 
764     // Take another spin if there are now unswept objects in new space
765     // (there are currently no more unswept promoted objects).
766   } while (new_space_front < new_space_.top());
767 
768   // Set age mark.
769   new_space_.set_age_mark(new_space_.top());
770 
771   // Update how much has survived scavenge.
772   survived_since_last_expansion_ +=
773       (PromotedSpaceSize() - survived_watermark) + new_space_.Size();
774 
775   LOG(ResourceEvent("scavenge", "end"));
776 
777   gc_state_ = NOT_IN_GC;
778 }
779 
780 
ClearRSetRange(Address start,int size_in_bytes)781 void Heap::ClearRSetRange(Address start, int size_in_bytes) {
782   uint32_t start_bit;
783   Address start_word_address =
784       Page::ComputeRSetBitPosition(start, 0, &start_bit);
785   uint32_t end_bit;
786   Address end_word_address =
787       Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize,
788                                    0,
789                                    &end_bit);
790 
791   // We want to clear the bits in the starting word starting with the
792   // first bit, and in the ending word up to and including the last
793   // bit.  Build a pair of bitmasks to do that.
794   uint32_t start_bitmask = start_bit - 1;
795   uint32_t end_bitmask = ~((end_bit << 1) - 1);
796 
797   // If the start address and end address are the same, we mask that
798   // word once, otherwise mask the starting and ending word
799   // separately and all the ones in between.
800   if (start_word_address == end_word_address) {
801     Memory::uint32_at(start_word_address) &= (start_bitmask | end_bitmask);
802   } else {
803     Memory::uint32_at(start_word_address) &= start_bitmask;
804     Memory::uint32_at(end_word_address) &= end_bitmask;
805     start_word_address += kIntSize;
806     memset(start_word_address, 0, end_word_address - start_word_address);
807   }
808 }
809 
810 
811 class UpdateRSetVisitor: public ObjectVisitor {
812  public:
813 
VisitPointer(Object ** p)814   void VisitPointer(Object** p) {
815     UpdateRSet(p);
816   }
817 
VisitPointers(Object ** start,Object ** end)818   void VisitPointers(Object** start, Object** end) {
819     // Update a store into slots [start, end), used (a) to update remembered
820     // set when promoting a young object to old space or (b) to rebuild
821     // remembered sets after a mark-compact collection.
822     for (Object** p = start; p < end; p++) UpdateRSet(p);
823   }
824  private:
825 
UpdateRSet(Object ** p)826   void UpdateRSet(Object** p) {
827     // The remembered set should not be set.  It should be clear for objects
828     // newly copied to old space, and it is cleared before rebuilding in the
829     // mark-compact collector.
830     ASSERT(!Page::IsRSetSet(reinterpret_cast<Address>(p), 0));
831     if (Heap::InNewSpace(*p)) {
832       Page::SetRSet(reinterpret_cast<Address>(p), 0);
833     }
834   }
835 };
836 
837 
UpdateRSet(HeapObject * obj)838 int Heap::UpdateRSet(HeapObject* obj) {
839   ASSERT(!InNewSpace(obj));
840   // Special handling of fixed arrays to iterate the body based on the start
841   // address and offset.  Just iterating the pointers as in UpdateRSetVisitor
842   // will not work because Page::SetRSet needs to have the start of the
843   // object for large object pages.
844   if (obj->IsFixedArray()) {
845     FixedArray* array = FixedArray::cast(obj);
846     int length = array->length();
847     for (int i = 0; i < length; i++) {
848       int offset = FixedArray::kHeaderSize + i * kPointerSize;
849       ASSERT(!Page::IsRSetSet(obj->address(), offset));
850       if (Heap::InNewSpace(array->get(i))) {
851         Page::SetRSet(obj->address(), offset);
852       }
853     }
854   } else if (!obj->IsCode()) {
855     // Skip code object, we know it does not contain inter-generational
856     // pointers.
857     UpdateRSetVisitor v;
858     obj->Iterate(&v);
859   }
860   return obj->Size();
861 }
862 
863 
RebuildRSets()864 void Heap::RebuildRSets() {
865   // By definition, we do not care about remembered set bits in code,
866   // data, or cell spaces.
867   map_space_->ClearRSet();
868   RebuildRSets(map_space_);
869 
870   old_pointer_space_->ClearRSet();
871   RebuildRSets(old_pointer_space_);
872 
873   Heap::lo_space_->ClearRSet();
874   RebuildRSets(lo_space_);
875 }
876 
877 
RebuildRSets(PagedSpace * space)878 void Heap::RebuildRSets(PagedSpace* space) {
879   HeapObjectIterator it(space);
880   while (it.has_next()) Heap::UpdateRSet(it.next());
881 }
882 
883 
RebuildRSets(LargeObjectSpace * space)884 void Heap::RebuildRSets(LargeObjectSpace* space) {
885   LargeObjectIterator it(space);
886   while (it.has_next()) Heap::UpdateRSet(it.next());
887 }
888 
889 
890 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
RecordCopiedObject(HeapObject * obj)891 void Heap::RecordCopiedObject(HeapObject* obj) {
892   bool should_record = false;
893 #ifdef DEBUG
894   should_record = FLAG_heap_stats;
895 #endif
896 #ifdef ENABLE_LOGGING_AND_PROFILING
897   should_record = should_record || FLAG_log_gc;
898 #endif
899   if (should_record) {
900     if (new_space_.Contains(obj)) {
901       new_space_.RecordAllocation(obj);
902     } else {
903       new_space_.RecordPromotion(obj);
904     }
905   }
906 }
907 #endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
908 
909 
910 
MigrateObject(HeapObject * source,HeapObject * target,int size)911 HeapObject* Heap::MigrateObject(HeapObject* source,
912                                 HeapObject* target,
913                                 int size) {
914   // Copy the content of source to target.
915   CopyBlock(reinterpret_cast<Object**>(target->address()),
916             reinterpret_cast<Object**>(source->address()),
917             size);
918 
919   // Set the forwarding address.
920   source->set_map_word(MapWord::FromForwardingAddress(target));
921 
922 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
923   // Update NewSpace stats if necessary.
924   RecordCopiedObject(target);
925 #endif
926 
927   return target;
928 }
929 
930 
IsShortcutCandidate(HeapObject * object,Map * map)931 static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
932   STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
933   ASSERT(object->map() == map);
934   InstanceType type = map->instance_type();
935   if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
936   ASSERT(object->IsString() && !object->IsSymbol());
937   return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
938 }
939 
940 
ScavengeObjectSlow(HeapObject ** p,HeapObject * object)941 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
942   ASSERT(InFromSpace(object));
943   MapWord first_word = object->map_word();
944   ASSERT(!first_word.IsForwardingAddress());
945 
946   // Optimization: Bypass flattened ConsString objects.
947   if (IsShortcutCandidate(object, first_word.ToMap())) {
948     object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
949     *p = object;
950     // After patching *p we have to repeat the checks that object is in the
951     // active semispace of the young generation and not already copied.
952     if (!InNewSpace(object)) return;
953     first_word = object->map_word();
954     if (first_word.IsForwardingAddress()) {
955       *p = first_word.ToForwardingAddress();
956       return;
957     }
958   }
959 
960   int object_size = object->SizeFromMap(first_word.ToMap());
961   // We rely on live objects in new space to be at least two pointers,
962   // so we can store the from-space address and map pointer of promoted
963   // objects in the to space.
964   ASSERT(object_size >= 2 * kPointerSize);
965 
966   // If the object should be promoted, we try to copy it to old space.
967   if (ShouldBePromoted(object->address(), object_size)) {
968     Object* result;
969     if (object_size > MaxObjectSizeInPagedSpace()) {
970       result = lo_space_->AllocateRawFixedArray(object_size);
971       if (!result->IsFailure()) {
972         // Save the from-space object pointer and its map pointer at the
973         // top of the to space to be swept and copied later.  Write the
974         // forwarding address over the map word of the from-space
975         // object.
976         HeapObject* target = HeapObject::cast(result);
977         promotion_queue.insert(object, first_word.ToMap());
978         object->set_map_word(MapWord::FromForwardingAddress(target));
979 
980         // Give the space allocated for the result a proper map by
981         // treating it as a free list node (not linked into the free
982         // list).
983         FreeListNode* node = FreeListNode::FromAddress(target->address());
984         node->set_size(object_size);
985 
986         *p = target;
987         return;
988       }
989     } else {
990       OldSpace* target_space = Heap::TargetSpace(object);
991       ASSERT(target_space == Heap::old_pointer_space_ ||
992              target_space == Heap::old_data_space_);
993       result = target_space->AllocateRaw(object_size);
994       if (!result->IsFailure()) {
995         HeapObject* target = HeapObject::cast(result);
996         if (target_space == Heap::old_pointer_space_) {
997           // Save the from-space object pointer and its map pointer at the
998           // top of the to space to be swept and copied later.  Write the
999           // forwarding address over the map word of the from-space
1000           // object.
1001           promotion_queue.insert(object, first_word.ToMap());
1002           object->set_map_word(MapWord::FromForwardingAddress(target));
1003 
1004           // Give the space allocated for the result a proper map by
1005           // treating it as a free list node (not linked into the free
1006           // list).
1007           FreeListNode* node = FreeListNode::FromAddress(target->address());
1008           node->set_size(object_size);
1009 
1010           *p = target;
1011         } else {
1012           // Objects promoted to the data space can be copied immediately
1013           // and not revisited---we will never sweep that space for
1014           // pointers and the copied objects do not contain pointers to
1015           // new space objects.
1016           *p = MigrateObject(object, target, object_size);
1017 #ifdef DEBUG
1018           VerifyNonPointerSpacePointersVisitor v;
1019           (*p)->Iterate(&v);
1020 #endif
1021         }
1022         return;
1023       }
1024     }
1025   }
1026   // The object should remain in new space or the old space allocation failed.
1027   Object* result = new_space_.AllocateRaw(object_size);
1028   // Failed allocation at this point is utterly unexpected.
1029   ASSERT(!result->IsFailure());
1030   *p = MigrateObject(object, HeapObject::cast(result), object_size);
1031 }
1032 
1033 
ScavengePointer(HeapObject ** p)1034 void Heap::ScavengePointer(HeapObject** p) {
1035   ScavengeObject(p, *p);
1036 }
1037 
1038 
AllocatePartialMap(InstanceType instance_type,int instance_size)1039 Object* Heap::AllocatePartialMap(InstanceType instance_type,
1040                                  int instance_size) {
1041   Object* result = AllocateRawMap();
1042   if (result->IsFailure()) return result;
1043 
1044   // Map::cast cannot be used due to uninitialized map field.
1045   reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1046   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1047   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1048   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1049   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1050   return result;
1051 }
1052 
1053 
AllocateMap(InstanceType instance_type,int instance_size)1054 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1055   Object* result = AllocateRawMap();
1056   if (result->IsFailure()) return result;
1057 
1058   Map* map = reinterpret_cast<Map*>(result);
1059   map->set_map(meta_map());
1060   map->set_instance_type(instance_type);
1061   map->set_prototype(null_value());
1062   map->set_constructor(null_value());
1063   map->set_instance_size(instance_size);
1064   map->set_inobject_properties(0);
1065   map->set_pre_allocated_property_fields(0);
1066   map->set_instance_descriptors(empty_descriptor_array());
1067   map->set_code_cache(empty_fixed_array());
1068   map->set_unused_property_fields(0);
1069   map->set_bit_field(0);
1070   map->set_bit_field2(0);
1071   return map;
1072 }
1073 
1074 
1075 const Heap::StringTypeTable Heap::string_type_table[] = {
1076 #define STRING_TYPE_ELEMENT(type, size, name, camel_name)                      \
1077   {type, size, k##camel_name##MapRootIndex},
1078   STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
1079 #undef STRING_TYPE_ELEMENT
1080 };
1081 
1082 
1083 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
1084 #define CONSTANT_SYMBOL_ELEMENT(name, contents)                                \
1085   {contents, k##name##RootIndex},
1086   SYMBOL_LIST(CONSTANT_SYMBOL_ELEMENT)
1087 #undef CONSTANT_SYMBOL_ELEMENT
1088 };
1089 
1090 
1091 const Heap::StructTable Heap::struct_table[] = {
1092 #define STRUCT_TABLE_ELEMENT(NAME, Name, name)                                 \
1093   { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
1094   STRUCT_LIST(STRUCT_TABLE_ELEMENT)
1095 #undef STRUCT_TABLE_ELEMENT
1096 };
1097 
1098 
CreateInitialMaps()1099 bool Heap::CreateInitialMaps() {
1100   Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
1101   if (obj->IsFailure()) return false;
1102   // Map::cast cannot be used due to uninitialized map field.
1103   Map* new_meta_map = reinterpret_cast<Map*>(obj);
1104   set_meta_map(new_meta_map);
1105   new_meta_map->set_map(new_meta_map);
1106 
1107   obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
1108   if (obj->IsFailure()) return false;
1109   set_fixed_array_map(Map::cast(obj));
1110 
1111   obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
1112   if (obj->IsFailure()) return false;
1113   set_oddball_map(Map::cast(obj));
1114 
1115   // Allocate the empty array
1116   obj = AllocateEmptyFixedArray();
1117   if (obj->IsFailure()) return false;
1118   set_empty_fixed_array(FixedArray::cast(obj));
1119 
1120   obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1121   if (obj->IsFailure()) return false;
1122   set_null_value(obj);
1123 
1124   // Allocate the empty descriptor array.
1125   obj = AllocateEmptyFixedArray();
1126   if (obj->IsFailure()) return false;
1127   set_empty_descriptor_array(DescriptorArray::cast(obj));
1128 
1129   // Fix the instance_descriptors for the existing maps.
1130   meta_map()->set_instance_descriptors(empty_descriptor_array());
1131   meta_map()->set_code_cache(empty_fixed_array());
1132 
1133   fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
1134   fixed_array_map()->set_code_cache(empty_fixed_array());
1135 
1136   oddball_map()->set_instance_descriptors(empty_descriptor_array());
1137   oddball_map()->set_code_cache(empty_fixed_array());
1138 
1139   // Fix prototype object for existing maps.
1140   meta_map()->set_prototype(null_value());
1141   meta_map()->set_constructor(null_value());
1142 
1143   fixed_array_map()->set_prototype(null_value());
1144   fixed_array_map()->set_constructor(null_value());
1145 
1146   oddball_map()->set_prototype(null_value());
1147   oddball_map()->set_constructor(null_value());
1148 
1149   obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
1150   if (obj->IsFailure()) return false;
1151   set_heap_number_map(Map::cast(obj));
1152 
1153   obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
1154   if (obj->IsFailure()) return false;
1155   set_proxy_map(Map::cast(obj));
1156 
1157   for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
1158     const StringTypeTable& entry = string_type_table[i];
1159     obj = AllocateMap(entry.type, entry.size);
1160     if (obj->IsFailure()) return false;
1161     roots_[entry.index] = Map::cast(obj);
1162   }
1163 
1164   obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize);
1165   if (obj->IsFailure()) return false;
1166   set_undetectable_short_string_map(Map::cast(obj));
1167   Map::cast(obj)->set_is_undetectable();
1168 
1169   obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize);
1170   if (obj->IsFailure()) return false;
1171   set_undetectable_medium_string_map(Map::cast(obj));
1172   Map::cast(obj)->set_is_undetectable();
1173 
1174   obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize);
1175   if (obj->IsFailure()) return false;
1176   set_undetectable_long_string_map(Map::cast(obj));
1177   Map::cast(obj)->set_is_undetectable();
1178 
1179   obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
1180   if (obj->IsFailure()) return false;
1181   set_undetectable_short_ascii_string_map(Map::cast(obj));
1182   Map::cast(obj)->set_is_undetectable();
1183 
1184   obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
1185   if (obj->IsFailure()) return false;
1186   set_undetectable_medium_ascii_string_map(Map::cast(obj));
1187   Map::cast(obj)->set_is_undetectable();
1188 
1189   obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
1190   if (obj->IsFailure()) return false;
1191   set_undetectable_long_ascii_string_map(Map::cast(obj));
1192   Map::cast(obj)->set_is_undetectable();
1193 
1194   obj = AllocateMap(BYTE_ARRAY_TYPE, ByteArray::kAlignedSize);
1195   if (obj->IsFailure()) return false;
1196   set_byte_array_map(Map::cast(obj));
1197 
1198   obj = AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
1199   if (obj->IsFailure()) return false;
1200   set_pixel_array_map(Map::cast(obj));
1201 
1202   obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
1203   if (obj->IsFailure()) return false;
1204   set_code_map(Map::cast(obj));
1205 
1206   obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
1207                     JSGlobalPropertyCell::kSize);
1208   if (obj->IsFailure()) return false;
1209   set_global_property_cell_map(Map::cast(obj));
1210 
1211   obj = AllocateMap(FILLER_TYPE, kPointerSize);
1212   if (obj->IsFailure()) return false;
1213   set_one_pointer_filler_map(Map::cast(obj));
1214 
1215   obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
1216   if (obj->IsFailure()) return false;
1217   set_two_pointer_filler_map(Map::cast(obj));
1218 
1219   for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
1220     const StructTable& entry = struct_table[i];
1221     obj = AllocateMap(entry.type, entry.size);
1222     if (obj->IsFailure()) return false;
1223     roots_[entry.index] = Map::cast(obj);
1224   }
1225 
1226   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1227   if (obj->IsFailure()) return false;
1228   set_hash_table_map(Map::cast(obj));
1229 
1230   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1231   if (obj->IsFailure()) return false;
1232   set_context_map(Map::cast(obj));
1233 
1234   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1235   if (obj->IsFailure()) return false;
1236   set_catch_context_map(Map::cast(obj));
1237 
1238   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1239   if (obj->IsFailure()) return false;
1240   set_global_context_map(Map::cast(obj));
1241 
1242   obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize);
1243   if (obj->IsFailure()) return false;
1244   set_boilerplate_function_map(Map::cast(obj));
1245 
1246   obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize);
1247   if (obj->IsFailure()) return false;
1248   set_shared_function_info_map(Map::cast(obj));
1249 
1250   ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1251   return true;
1252 }
1253 
1254 
AllocateHeapNumber(double value,PretenureFlag pretenure)1255 Object* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
1256   // Statically ensure that it is safe to allocate heap numbers in paged
1257   // spaces.
1258   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1259   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1260   Object* result = AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
1261   if (result->IsFailure()) return result;
1262 
1263   HeapObject::cast(result)->set_map(heap_number_map());
1264   HeapNumber::cast(result)->set_value(value);
1265   return result;
1266 }
1267 
1268 
AllocateHeapNumber(double value)1269 Object* Heap::AllocateHeapNumber(double value) {
1270   // Use general version, if we're forced to always allocate.
1271   if (always_allocate()) return AllocateHeapNumber(value, NOT_TENURED);
1272   // This version of AllocateHeapNumber is optimized for
1273   // allocation in new space.
1274   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1275   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1276   Object* result = new_space_.AllocateRaw(HeapNumber::kSize);
1277   if (result->IsFailure()) return result;
1278   HeapObject::cast(result)->set_map(heap_number_map());
1279   HeapNumber::cast(result)->set_value(value);
1280   return result;
1281 }
1282 
1283 
AllocateJSGlobalPropertyCell(Object * value)1284 Object* Heap::AllocateJSGlobalPropertyCell(Object* value) {
1285   Object* result = AllocateRawCell();
1286   if (result->IsFailure()) return result;
1287   HeapObject::cast(result)->set_map(global_property_cell_map());
1288   JSGlobalPropertyCell::cast(result)->set_value(value);
1289   return result;
1290 }
1291 
1292 
CreateOddball(Map * map,const char * to_string,Object * to_number)1293 Object* Heap::CreateOddball(Map* map,
1294                             const char* to_string,
1295                             Object* to_number) {
1296   Object* result = Allocate(map, OLD_DATA_SPACE);
1297   if (result->IsFailure()) return result;
1298   return Oddball::cast(result)->Initialize(to_string, to_number);
1299 }
1300 
1301 
CreateApiObjects()1302 bool Heap::CreateApiObjects() {
1303   Object* obj;
1304 
1305   obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
1306   if (obj->IsFailure()) return false;
1307   set_neander_map(Map::cast(obj));
1308 
1309   obj = Heap::AllocateJSObjectFromMap(neander_map());
1310   if (obj->IsFailure()) return false;
1311   Object* elements = AllocateFixedArray(2);
1312   if (elements->IsFailure()) return false;
1313   FixedArray::cast(elements)->set(0, Smi::FromInt(0));
1314   JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
1315   set_message_listeners(JSObject::cast(obj));
1316 
1317   return true;
1318 }
1319 
1320 
CreateCEntryStub()1321 void Heap::CreateCEntryStub() {
1322   CEntryStub stub;
1323   set_c_entry_code(*stub.GetCode());
1324 }
1325 
1326 
1327 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
CreateRegExpCEntryStub()1328 void Heap::CreateRegExpCEntryStub() {
1329   RegExpCEntryStub stub;
1330   set_re_c_entry_code(*stub.GetCode());
1331 }
1332 #endif
1333 
1334 
CreateCEntryDebugBreakStub()1335 void Heap::CreateCEntryDebugBreakStub() {
1336   CEntryDebugBreakStub stub;
1337   set_c_entry_debug_break_code(*stub.GetCode());
1338 }
1339 
1340 
CreateJSEntryStub()1341 void Heap::CreateJSEntryStub() {
1342   JSEntryStub stub;
1343   set_js_entry_code(*stub.GetCode());
1344 }
1345 
1346 
CreateJSConstructEntryStub()1347 void Heap::CreateJSConstructEntryStub() {
1348   JSConstructEntryStub stub;
1349   set_js_construct_entry_code(*stub.GetCode());
1350 }
1351 
1352 
CreateFixedStubs()1353 void Heap::CreateFixedStubs() {
1354   // Here we create roots for fixed stubs. They are needed at GC
1355   // for cooking and uncooking (check out frames.cc).
1356   // The eliminates the need for doing dictionary lookup in the
1357   // stub cache for these stubs.
1358   HandleScope scope;
1359   // gcc-4.4 has problem generating correct code of following snippet:
1360   // {  CEntryStub stub;
1361   //    c_entry_code_ = *stub.GetCode();
1362   // }
1363   // {  CEntryDebugBreakStub stub;
1364   //    c_entry_debug_break_code_ = *stub.GetCode();
1365   // }
1366   // To workaround the problem, make separate functions without inlining.
1367   Heap::CreateCEntryStub();
1368   Heap::CreateCEntryDebugBreakStub();
1369   Heap::CreateJSEntryStub();
1370   Heap::CreateJSConstructEntryStub();
1371 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
1372   Heap::CreateRegExpCEntryStub();
1373 #endif
1374 }
1375 
1376 
CreateInitialObjects()1377 bool Heap::CreateInitialObjects() {
1378   Object* obj;
1379 
1380   // The -0 value must be set before NumberFromDouble works.
1381   obj = AllocateHeapNumber(-0.0, TENURED);
1382   if (obj->IsFailure()) return false;
1383   set_minus_zero_value(obj);
1384   ASSERT(signbit(minus_zero_value()->Number()) != 0);
1385 
1386   obj = AllocateHeapNumber(OS::nan_value(), TENURED);
1387   if (obj->IsFailure()) return false;
1388   set_nan_value(obj);
1389 
1390   obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1391   if (obj->IsFailure()) return false;
1392   set_undefined_value(obj);
1393   ASSERT(!InNewSpace(undefined_value()));
1394 
1395   // Allocate initial symbol table.
1396   obj = SymbolTable::Allocate(kInitialSymbolTableSize);
1397   if (obj->IsFailure()) return false;
1398   // Don't use set_symbol_table() due to asserts.
1399   roots_[kSymbolTableRootIndex] = obj;
1400 
1401   // Assign the print strings for oddballs after creating symboltable.
1402   Object* symbol = LookupAsciiSymbol("undefined");
1403   if (symbol->IsFailure()) return false;
1404   Oddball::cast(undefined_value())->set_to_string(String::cast(symbol));
1405   Oddball::cast(undefined_value())->set_to_number(nan_value());
1406 
1407   // Assign the print strings for oddballs after creating symboltable.
1408   symbol = LookupAsciiSymbol("null");
1409   if (symbol->IsFailure()) return false;
1410   Oddball::cast(null_value())->set_to_string(String::cast(symbol));
1411   Oddball::cast(null_value())->set_to_number(Smi::FromInt(0));
1412 
1413   // Allocate the null_value
1414   obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
1415   if (obj->IsFailure()) return false;
1416 
1417   obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1));
1418   if (obj->IsFailure()) return false;
1419   set_true_value(obj);
1420 
1421   obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0));
1422   if (obj->IsFailure()) return false;
1423   set_false_value(obj);
1424 
1425   obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1));
1426   if (obj->IsFailure()) return false;
1427   set_the_hole_value(obj);
1428 
1429   obj = CreateOddball(
1430       oddball_map(), "no_interceptor_result_sentinel", Smi::FromInt(-2));
1431   if (obj->IsFailure()) return false;
1432   set_no_interceptor_result_sentinel(obj);
1433 
1434   obj = CreateOddball(oddball_map(), "termination_exception", Smi::FromInt(-3));
1435   if (obj->IsFailure()) return false;
1436   set_termination_exception(obj);
1437 
1438   // Allocate the empty string.
1439   obj = AllocateRawAsciiString(0, TENURED);
1440   if (obj->IsFailure()) return false;
1441   set_empty_string(String::cast(obj));
1442 
1443   for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
1444     obj = LookupAsciiSymbol(constant_symbol_table[i].contents);
1445     if (obj->IsFailure()) return false;
1446     roots_[constant_symbol_table[i].index] = String::cast(obj);
1447   }
1448 
1449   // Allocate the hidden symbol which is used to identify the hidden properties
1450   // in JSObjects. The hash code has a special value so that it will not match
1451   // the empty string when searching for the property. It cannot be part of the
1452   // loop above because it needs to be allocated manually with the special
1453   // hash code in place. The hash code for the hidden_symbol is zero to ensure
1454   // that it will always be at the first entry in property descriptors.
1455   obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask);
1456   if (obj->IsFailure()) return false;
1457   hidden_symbol_ = String::cast(obj);
1458 
1459   // Allocate the proxy for __proto__.
1460   obj = AllocateProxy((Address) &Accessors::ObjectPrototype);
1461   if (obj->IsFailure()) return false;
1462   set_prototype_accessors(Proxy::cast(obj));
1463 
1464   // Allocate the code_stubs dictionary. The initial size is set to avoid
1465   // expanding the dictionary during bootstrapping.
1466   obj = NumberDictionary::Allocate(128);
1467   if (obj->IsFailure()) return false;
1468   set_code_stubs(NumberDictionary::cast(obj));
1469 
1470   // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
1471   // is set to avoid expanding the dictionary during bootstrapping.
1472   obj = NumberDictionary::Allocate(64);
1473   if (obj->IsFailure()) return false;
1474   set_non_monomorphic_cache(NumberDictionary::cast(obj));
1475 
1476   CreateFixedStubs();
1477 
1478   // Allocate the number->string conversion cache
1479   obj = AllocateFixedArray(kNumberStringCacheSize * 2);
1480   if (obj->IsFailure()) return false;
1481   set_number_string_cache(FixedArray::cast(obj));
1482 
1483   // Allocate cache for single character strings.
1484   obj = AllocateFixedArray(String::kMaxAsciiCharCode+1);
1485   if (obj->IsFailure()) return false;
1486   set_single_character_string_cache(FixedArray::cast(obj));
1487 
1488   // Allocate cache for external strings pointing to native source code.
1489   obj = AllocateFixedArray(Natives::GetBuiltinsCount());
1490   if (obj->IsFailure()) return false;
1491   set_natives_source_cache(FixedArray::cast(obj));
1492 
1493   // Handling of script id generation is in Factory::NewScript.
1494   set_last_script_id(undefined_value());
1495 
1496   // Initialize keyed lookup cache.
1497   KeyedLookupCache::Clear();
1498 
1499   // Initialize context slot cache.
1500   ContextSlotCache::Clear();
1501 
1502   // Initialize descriptor cache.
1503   DescriptorLookupCache::Clear();
1504 
1505   // Initialize compilation cache.
1506   CompilationCache::Clear();
1507 
1508   return true;
1509 }
1510 
1511 
double_get_hash(double d)1512 static inline int double_get_hash(double d) {
1513   DoubleRepresentation rep(d);
1514   return ((static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32)) &
1515           (Heap::kNumberStringCacheSize - 1));
1516 }
1517 
1518 
smi_get_hash(Smi * smi)1519 static inline int smi_get_hash(Smi* smi) {
1520   return (smi->value() & (Heap::kNumberStringCacheSize - 1));
1521 }
1522 
1523 
1524 
GetNumberStringCache(Object * number)1525 Object* Heap::GetNumberStringCache(Object* number) {
1526   int hash;
1527   if (number->IsSmi()) {
1528     hash = smi_get_hash(Smi::cast(number));
1529   } else {
1530     hash = double_get_hash(number->Number());
1531   }
1532   Object* key = number_string_cache()->get(hash * 2);
1533   if (key == number) {
1534     return String::cast(number_string_cache()->get(hash * 2 + 1));
1535   } else if (key->IsHeapNumber() &&
1536              number->IsHeapNumber() &&
1537              key->Number() == number->Number()) {
1538     return String::cast(number_string_cache()->get(hash * 2 + 1));
1539   }
1540   return undefined_value();
1541 }
1542 
1543 
SetNumberStringCache(Object * number,String * string)1544 void Heap::SetNumberStringCache(Object* number, String* string) {
1545   int hash;
1546   if (number->IsSmi()) {
1547     hash = smi_get_hash(Smi::cast(number));
1548     number_string_cache()->set(hash * 2, number, SKIP_WRITE_BARRIER);
1549   } else {
1550     hash = double_get_hash(number->Number());
1551     number_string_cache()->set(hash * 2, number);
1552   }
1553   number_string_cache()->set(hash * 2 + 1, string);
1554 }
1555 
1556 
SmiOrNumberFromDouble(double value,bool new_object,PretenureFlag pretenure)1557 Object* Heap::SmiOrNumberFromDouble(double value,
1558                                     bool new_object,
1559                                     PretenureFlag pretenure) {
1560   // We need to distinguish the minus zero value and this cannot be
1561   // done after conversion to int. Doing this by comparing bit
1562   // patterns is faster than using fpclassify() et al.
1563   static const DoubleRepresentation plus_zero(0.0);
1564   static const DoubleRepresentation minus_zero(-0.0);
1565   static const DoubleRepresentation nan(OS::nan_value());
1566   ASSERT(minus_zero_value() != NULL);
1567   ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits));
1568 
1569   DoubleRepresentation rep(value);
1570   if (rep.bits == plus_zero.bits) return Smi::FromInt(0);  // not uncommon
1571   if (rep.bits == minus_zero.bits) {
1572     return new_object ? AllocateHeapNumber(-0.0, pretenure)
1573                       : minus_zero_value();
1574   }
1575   if (rep.bits == nan.bits) {
1576     return new_object
1577         ? AllocateHeapNumber(OS::nan_value(), pretenure)
1578         : nan_value();
1579   }
1580 
1581   // Try to represent the value as a tagged small integer.
1582   int int_value = FastD2I(value);
1583   if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1584     return Smi::FromInt(int_value);
1585   }
1586 
1587   // Materialize the value in the heap.
1588   return AllocateHeapNumber(value, pretenure);
1589 }
1590 
1591 
NewNumberFromDouble(double value,PretenureFlag pretenure)1592 Object* Heap::NewNumberFromDouble(double value, PretenureFlag pretenure) {
1593   return SmiOrNumberFromDouble(value,
1594                                true /* number object must be new */,
1595                                pretenure);
1596 }
1597 
1598 
NumberFromDouble(double value,PretenureFlag pretenure)1599 Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
1600   return SmiOrNumberFromDouble(value,
1601                                false /* use preallocated NaN, -0.0 */,
1602                                pretenure);
1603 }
1604 
1605 
AllocateProxy(Address proxy,PretenureFlag pretenure)1606 Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
1607   // Statically ensure that it is safe to allocate proxies in paged spaces.
1608   STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
1609   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1610   Object* result = Allocate(proxy_map(), space);
1611   if (result->IsFailure()) return result;
1612 
1613   Proxy::cast(result)->set_proxy(proxy);
1614   return result;
1615 }
1616 
1617 
AllocateSharedFunctionInfo(Object * name)1618 Object* Heap::AllocateSharedFunctionInfo(Object* name) {
1619   Object* result = Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
1620   if (result->IsFailure()) return result;
1621 
1622   SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
1623   share->set_name(name);
1624   Code* illegal = Builtins::builtin(Builtins::Illegal);
1625   share->set_code(illegal);
1626   Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
1627   share->set_construct_stub(construct_stub);
1628   share->set_expected_nof_properties(0);
1629   share->set_length(0);
1630   share->set_formal_parameter_count(0);
1631   share->set_instance_class_name(Object_symbol());
1632   share->set_function_data(undefined_value());
1633   share->set_script(undefined_value());
1634   share->set_start_position_and_type(0);
1635   share->set_debug_info(undefined_value());
1636   share->set_inferred_name(empty_string());
1637   share->set_compiler_hints(0);
1638   share->set_this_property_assignments_count(0);
1639   share->set_this_property_assignments(undefined_value());
1640   return result;
1641 }
1642 
1643 
AllocateConsString(String * first,String * second)1644 Object* Heap::AllocateConsString(String* first, String* second) {
1645   int first_length = first->length();
1646   if (first_length == 0) return second;
1647 
1648   int second_length = second->length();
1649   if (second_length == 0) return first;
1650 
1651   int length = first_length + second_length;
1652   bool is_ascii = first->IsAsciiRepresentation()
1653       && second->IsAsciiRepresentation();
1654 
1655   // Make sure that an out of memory exception is thrown if the length
1656   // of the new cons string is too large to fit in a Smi.
1657   if (length > Smi::kMaxValue || length < -0) {
1658     Top::context()->mark_out_of_memory();
1659     return Failure::OutOfMemoryException();
1660   }
1661 
1662   // If the resulting string is small make a flat string.
1663   if (length < String::kMinNonFlatLength) {
1664     ASSERT(first->IsFlat());
1665     ASSERT(second->IsFlat());
1666     if (is_ascii) {
1667       Object* result = AllocateRawAsciiString(length);
1668       if (result->IsFailure()) return result;
1669       // Copy the characters into the new object.
1670       char* dest = SeqAsciiString::cast(result)->GetChars();
1671       // Copy first part.
1672       char* src = SeqAsciiString::cast(first)->GetChars();
1673       for (int i = 0; i < first_length; i++) *dest++ = src[i];
1674       // Copy second part.
1675       src = SeqAsciiString::cast(second)->GetChars();
1676       for (int i = 0; i < second_length; i++) *dest++ = src[i];
1677       return result;
1678     } else {
1679       Object* result = AllocateRawTwoByteString(length);
1680       if (result->IsFailure()) return result;
1681       // Copy the characters into the new object.
1682       uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1683       String::WriteToFlat(first, dest, 0, first_length);
1684       String::WriteToFlat(second, dest + first_length, 0, second_length);
1685       return result;
1686     }
1687   }
1688 
1689   Map* map;
1690   if (length <= String::kMaxShortStringSize) {
1691     map = is_ascii ? short_cons_ascii_string_map()
1692       : short_cons_string_map();
1693   } else if (length <= String::kMaxMediumStringSize) {
1694     map = is_ascii ? medium_cons_ascii_string_map()
1695       : medium_cons_string_map();
1696   } else {
1697     map = is_ascii ? long_cons_ascii_string_map()
1698       : long_cons_string_map();
1699   }
1700 
1701   Object* result = Allocate(map, NEW_SPACE);
1702   if (result->IsFailure()) return result;
1703   ASSERT(InNewSpace(result));
1704   ConsString* cons_string = ConsString::cast(result);
1705   cons_string->set_first(first, SKIP_WRITE_BARRIER);
1706   cons_string->set_second(second, SKIP_WRITE_BARRIER);
1707   cons_string->set_length(length);
1708   return result;
1709 }
1710 
1711 
AllocateSlicedString(String * buffer,int start,int end)1712 Object* Heap::AllocateSlicedString(String* buffer,
1713                                    int start,
1714                                    int end) {
1715   int length = end - start;
1716 
1717   // If the resulting string is small make a sub string.
1718   if (length <= String::kMinNonFlatLength) {
1719     return Heap::AllocateSubString(buffer, start, end);
1720   }
1721 
1722   Map* map;
1723   if (length <= String::kMaxShortStringSize) {
1724     map = buffer->IsAsciiRepresentation() ?
1725       short_sliced_ascii_string_map() :
1726       short_sliced_string_map();
1727   } else if (length <= String::kMaxMediumStringSize) {
1728     map = buffer->IsAsciiRepresentation() ?
1729       medium_sliced_ascii_string_map() :
1730       medium_sliced_string_map();
1731   } else {
1732     map = buffer->IsAsciiRepresentation() ?
1733       long_sliced_ascii_string_map() :
1734       long_sliced_string_map();
1735   }
1736 
1737   Object* result = Allocate(map, NEW_SPACE);
1738   if (result->IsFailure()) return result;
1739 
1740   SlicedString* sliced_string = SlicedString::cast(result);
1741   sliced_string->set_buffer(buffer);
1742   sliced_string->set_start(start);
1743   sliced_string->set_length(length);
1744 
1745   return result;
1746 }
1747 
1748 
AllocateSubString(String * buffer,int start,int end)1749 Object* Heap::AllocateSubString(String* buffer,
1750                                 int start,
1751                                 int end) {
1752   int length = end - start;
1753 
1754   if (length == 1) {
1755     return Heap::LookupSingleCharacterStringFromCode(
1756         buffer->Get(start));
1757   }
1758 
1759   // Make an attempt to flatten the buffer to reduce access time.
1760   if (!buffer->IsFlat()) {
1761     buffer->TryFlatten();
1762   }
1763 
1764   Object* result = buffer->IsAsciiRepresentation()
1765       ? AllocateRawAsciiString(length)
1766       : AllocateRawTwoByteString(length);
1767   if (result->IsFailure()) return result;
1768 
1769   // Copy the characters into the new object.
1770   String* string_result = String::cast(result);
1771   StringHasher hasher(length);
1772   int i = 0;
1773   for (; i < length && hasher.is_array_index(); i++) {
1774     uc32 c = buffer->Get(start + i);
1775     hasher.AddCharacter(c);
1776     string_result->Set(i, c);
1777   }
1778   for (; i < length; i++) {
1779     uc32 c = buffer->Get(start + i);
1780     hasher.AddCharacterNoIndex(c);
1781     string_result->Set(i, c);
1782   }
1783   string_result->set_length_field(hasher.GetHashField());
1784   return result;
1785 }
1786 
1787 
AllocateExternalStringFromAscii(ExternalAsciiString::Resource * resource)1788 Object* Heap::AllocateExternalStringFromAscii(
1789     ExternalAsciiString::Resource* resource) {
1790   Map* map;
1791   int length = resource->length();
1792   if (length <= String::kMaxShortStringSize) {
1793     map = short_external_ascii_string_map();
1794   } else if (length <= String::kMaxMediumStringSize) {
1795     map = medium_external_ascii_string_map();
1796   } else {
1797     map = long_external_ascii_string_map();
1798   }
1799 
1800   Object* result = Allocate(map, NEW_SPACE);
1801   if (result->IsFailure()) return result;
1802 
1803   ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
1804   external_string->set_length(length);
1805   external_string->set_resource(resource);
1806 
1807   return result;
1808 }
1809 
1810 
AllocateExternalStringFromTwoByte(ExternalTwoByteString::Resource * resource)1811 Object* Heap::AllocateExternalStringFromTwoByte(
1812     ExternalTwoByteString::Resource* resource) {
1813   int length = resource->length();
1814 
1815   Map* map = ExternalTwoByteString::StringMap(length);
1816   Object* result = Allocate(map, NEW_SPACE);
1817   if (result->IsFailure()) return result;
1818 
1819   ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
1820   external_string->set_length(length);
1821   external_string->set_resource(resource);
1822 
1823   return result;
1824 }
1825 
1826 
LookupSingleCharacterStringFromCode(uint16_t code)1827 Object* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
1828   if (code <= String::kMaxAsciiCharCode) {
1829     Object* value = Heap::single_character_string_cache()->get(code);
1830     if (value != Heap::undefined_value()) return value;
1831 
1832     char buffer[1];
1833     buffer[0] = static_cast<char>(code);
1834     Object* result = LookupSymbol(Vector<const char>(buffer, 1));
1835 
1836     if (result->IsFailure()) return result;
1837     Heap::single_character_string_cache()->set(code, result);
1838     return result;
1839   }
1840 
1841   Object* result = Heap::AllocateRawTwoByteString(1);
1842   if (result->IsFailure()) return result;
1843   String* answer = String::cast(result);
1844   answer->Set(0, code);
1845   return answer;
1846 }
1847 
1848 
AllocateByteArray(int length,PretenureFlag pretenure)1849 Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
1850   if (pretenure == NOT_TENURED) {
1851     return AllocateByteArray(length);
1852   }
1853   int size = ByteArray::SizeFor(length);
1854   AllocationSpace space =
1855       size > MaxObjectSizeInPagedSpace() ? LO_SPACE : OLD_DATA_SPACE;
1856 
1857   Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
1858 
1859   if (result->IsFailure()) return result;
1860 
1861   reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1862   reinterpret_cast<Array*>(result)->set_length(length);
1863   return result;
1864 }
1865 
1866 
AllocateByteArray(int length)1867 Object* Heap::AllocateByteArray(int length) {
1868   int size = ByteArray::SizeFor(length);
1869   AllocationSpace space =
1870       size > MaxObjectSizeInPagedSpace() ? LO_SPACE : NEW_SPACE;
1871 
1872   Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
1873 
1874   if (result->IsFailure()) return result;
1875 
1876   reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1877   reinterpret_cast<Array*>(result)->set_length(length);
1878   return result;
1879 }
1880 
1881 
CreateFillerObjectAt(Address addr,int size)1882 void Heap::CreateFillerObjectAt(Address addr, int size) {
1883   if (size == 0) return;
1884   HeapObject* filler = HeapObject::FromAddress(addr);
1885   if (size == kPointerSize) {
1886     filler->set_map(Heap::one_pointer_filler_map());
1887   } else {
1888     filler->set_map(Heap::byte_array_map());
1889     ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
1890   }
1891 }
1892 
1893 
AllocatePixelArray(int length,uint8_t * external_pointer,PretenureFlag pretenure)1894 Object* Heap::AllocatePixelArray(int length,
1895                                  uint8_t* external_pointer,
1896                                  PretenureFlag pretenure) {
1897   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1898 
1899   Object* result = AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
1900 
1901   if (result->IsFailure()) return result;
1902 
1903   reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
1904   reinterpret_cast<PixelArray*>(result)->set_length(length);
1905   reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
1906 
1907   return result;
1908 }
1909 
1910 
CreateCode(const CodeDesc & desc,ZoneScopeInfo * sinfo,Code::Flags flags,Handle<Object> self_reference)1911 Object* Heap::CreateCode(const CodeDesc& desc,
1912                          ZoneScopeInfo* sinfo,
1913                          Code::Flags flags,
1914                          Handle<Object> self_reference) {
1915   // Compute size
1916   int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
1917   int sinfo_size = 0;
1918   if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
1919   int obj_size = Code::SizeFor(body_size, sinfo_size);
1920   ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
1921   Object* result;
1922   if (obj_size > MaxObjectSizeInPagedSpace()) {
1923     result = lo_space_->AllocateRawCode(obj_size);
1924   } else {
1925     result = code_space_->AllocateRaw(obj_size);
1926   }
1927 
1928   if (result->IsFailure()) return result;
1929 
1930   // Initialize the object
1931   HeapObject::cast(result)->set_map(code_map());
1932   Code* code = Code::cast(result);
1933   code->set_instruction_size(desc.instr_size);
1934   code->set_relocation_size(desc.reloc_size);
1935   code->set_sinfo_size(sinfo_size);
1936   code->set_flags(flags);
1937   code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS);
1938   // Allow self references to created code object by patching the handle to
1939   // point to the newly allocated Code object.
1940   if (!self_reference.is_null()) {
1941     *(self_reference.location()) = code;
1942   }
1943   // Migrate generated code.
1944   // The generated code can contain Object** values (typically from handles)
1945   // that are dereferenced during the copy to point directly to the actual heap
1946   // objects. These pointers can include references to the code object itself,
1947   // through the self_reference parameter.
1948   code->CopyFrom(desc);
1949   if (sinfo != NULL) sinfo->Serialize(code);  // write scope info
1950 
1951 #ifdef DEBUG
1952   code->Verify();
1953 #endif
1954   return code;
1955 }
1956 
1957 
CopyCode(Code * code)1958 Object* Heap::CopyCode(Code* code) {
1959   // Allocate an object the same size as the code object.
1960   int obj_size = code->Size();
1961   Object* result;
1962   if (obj_size > MaxObjectSizeInPagedSpace()) {
1963     result = lo_space_->AllocateRawCode(obj_size);
1964   } else {
1965     result = code_space_->AllocateRaw(obj_size);
1966   }
1967 
1968   if (result->IsFailure()) return result;
1969 
1970   // Copy code object.
1971   Address old_addr = code->address();
1972   Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
1973   CopyBlock(reinterpret_cast<Object**>(new_addr),
1974             reinterpret_cast<Object**>(old_addr),
1975             obj_size);
1976   // Relocate the copy.
1977   Code* new_code = Code::cast(result);
1978   new_code->Relocate(new_addr - old_addr);
1979   return new_code;
1980 }
1981 
1982 
Allocate(Map * map,AllocationSpace space)1983 Object* Heap::Allocate(Map* map, AllocationSpace space) {
1984   ASSERT(gc_state_ == NOT_IN_GC);
1985   ASSERT(map->instance_type() != MAP_TYPE);
1986   Object* result = AllocateRaw(map->instance_size(),
1987                                space,
1988                                TargetSpaceId(map->instance_type()));
1989   if (result->IsFailure()) return result;
1990   HeapObject::cast(result)->set_map(map);
1991   return result;
1992 }
1993 
1994 
InitializeFunction(JSFunction * function,SharedFunctionInfo * shared,Object * prototype)1995 Object* Heap::InitializeFunction(JSFunction* function,
1996                                  SharedFunctionInfo* shared,
1997                                  Object* prototype) {
1998   ASSERT(!prototype->IsMap());
1999   function->initialize_properties();
2000   function->initialize_elements();
2001   function->set_shared(shared);
2002   function->set_prototype_or_initial_map(prototype);
2003   function->set_context(undefined_value());
2004   function->set_literals(empty_fixed_array(), SKIP_WRITE_BARRIER);
2005   return function;
2006 }
2007 
2008 
AllocateFunctionPrototype(JSFunction * function)2009 Object* Heap::AllocateFunctionPrototype(JSFunction* function) {
2010   // Allocate the prototype.  Make sure to use the object function
2011   // from the function's context, since the function can be from a
2012   // different context.
2013   JSFunction* object_function =
2014       function->context()->global_context()->object_function();
2015   Object* prototype = AllocateJSObject(object_function);
2016   if (prototype->IsFailure()) return prototype;
2017   // When creating the prototype for the function we must set its
2018   // constructor to the function.
2019   Object* result =
2020       JSObject::cast(prototype)->SetProperty(constructor_symbol(),
2021                                              function,
2022                                              DONT_ENUM);
2023   if (result->IsFailure()) return result;
2024   return prototype;
2025 }
2026 
2027 
AllocateFunction(Map * function_map,SharedFunctionInfo * shared,Object * prototype)2028 Object* Heap::AllocateFunction(Map* function_map,
2029                                SharedFunctionInfo* shared,
2030                                Object* prototype) {
2031   Object* result = Allocate(function_map, OLD_POINTER_SPACE);
2032   if (result->IsFailure()) return result;
2033   return InitializeFunction(JSFunction::cast(result), shared, prototype);
2034 }
2035 
2036 
AllocateArgumentsObject(Object * callee,int length)2037 Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
2038   // To get fast allocation and map sharing for arguments objects we
2039   // allocate them based on an arguments boilerplate.
2040 
2041   // This calls Copy directly rather than using Heap::AllocateRaw so we
2042   // duplicate the check here.
2043   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2044 
2045   JSObject* boilerplate =
2046       Top::context()->global_context()->arguments_boilerplate();
2047 
2048   // Make the clone.
2049   Map* map = boilerplate->map();
2050   int object_size = map->instance_size();
2051   Object* result = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
2052   if (result->IsFailure()) return result;
2053 
2054   // Copy the content. The arguments boilerplate doesn't have any
2055   // fields that point to new space so it's safe to skip the write
2056   // barrier here.
2057   CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(result)->address()),
2058             reinterpret_cast<Object**>(boilerplate->address()),
2059             object_size);
2060 
2061   // Set the two properties.
2062   JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
2063                                                 callee);
2064   JSObject::cast(result)->InObjectPropertyAtPut(arguments_length_index,
2065                                                 Smi::FromInt(length),
2066                                                 SKIP_WRITE_BARRIER);
2067 
2068   // Check the state of the object
2069   ASSERT(JSObject::cast(result)->HasFastProperties());
2070   ASSERT(JSObject::cast(result)->HasFastElements());
2071 
2072   return result;
2073 }
2074 
2075 
AllocateInitialMap(JSFunction * fun)2076 Object* Heap::AllocateInitialMap(JSFunction* fun) {
2077   ASSERT(!fun->has_initial_map());
2078 
2079   // First create a new map with the size and number of in-object properties
2080   // suggested by the function.
2081   int instance_size = fun->shared()->CalculateInstanceSize();
2082   int in_object_properties = fun->shared()->CalculateInObjectProperties();
2083   Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
2084   if (map_obj->IsFailure()) return map_obj;
2085 
2086   // Fetch or allocate prototype.
2087   Object* prototype;
2088   if (fun->has_instance_prototype()) {
2089     prototype = fun->instance_prototype();
2090   } else {
2091     prototype = AllocateFunctionPrototype(fun);
2092     if (prototype->IsFailure()) return prototype;
2093   }
2094   Map* map = Map::cast(map_obj);
2095   map->set_inobject_properties(in_object_properties);
2096   map->set_unused_property_fields(in_object_properties);
2097   map->set_prototype(prototype);
2098 
2099   // If the function has only simple this property assignments add field
2100   // descriptors for these to the initial map as the object cannot be
2101   // constructed without having these properties.
2102   ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
2103   if (fun->shared()->has_only_this_property_assignments() &&
2104       fun->shared()->this_property_assignments_count() > 0) {
2105     int count = fun->shared()->this_property_assignments_count();
2106     if (count > in_object_properties) {
2107       count = in_object_properties;
2108     }
2109     Object* descriptors_obj = DescriptorArray::Allocate(count);
2110     if (descriptors_obj->IsFailure()) return descriptors_obj;
2111     DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj);
2112     for (int i = 0; i < count; i++) {
2113       String* name = fun->shared()->GetThisPropertyAssignmentName(i);
2114       ASSERT(name->IsSymbol());
2115       FieldDescriptor field(name, i, NONE);
2116       descriptors->Set(i, &field);
2117     }
2118     descriptors->Sort();
2119     map->set_instance_descriptors(descriptors);
2120     map->set_pre_allocated_property_fields(count);
2121     map->set_unused_property_fields(in_object_properties - count);
2122   }
2123   return map;
2124 }
2125 
2126 
InitializeJSObjectFromMap(JSObject * obj,FixedArray * properties,Map * map)2127 void Heap::InitializeJSObjectFromMap(JSObject* obj,
2128                                      FixedArray* properties,
2129                                      Map* map) {
2130   obj->set_properties(properties);
2131   obj->initialize_elements();
2132   // TODO(1240798): Initialize the object's body using valid initial values
2133   // according to the object's initial map.  For example, if the map's
2134   // instance type is JS_ARRAY_TYPE, the length field should be initialized
2135   // to a number (eg, Smi::FromInt(0)) and the elements initialized to a
2136   // fixed array (eg, Heap::empty_fixed_array()).  Currently, the object
2137   // verification code has to cope with (temporarily) invalid objects.  See
2138   // for example, JSArray::JSArrayVerify).
2139   obj->InitializeBody(map->instance_size());
2140 }
2141 
2142 
AllocateJSObjectFromMap(Map * map,PretenureFlag pretenure)2143 Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
2144   // JSFunctions should be allocated using AllocateFunction to be
2145   // properly initialized.
2146   ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
2147 
2148   // Both types of globla objects should be allocated using
2149   // AllocateGloblaObject to be properly initialized.
2150   ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
2151   ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
2152 
2153   // Allocate the backing storage for the properties.
2154   int prop_size =
2155       map->pre_allocated_property_fields() +
2156       map->unused_property_fields() -
2157       map->inobject_properties();
2158   ASSERT(prop_size >= 0);
2159   Object* properties = AllocateFixedArray(prop_size, pretenure);
2160   if (properties->IsFailure()) return properties;
2161 
2162   // Allocate the JSObject.
2163   AllocationSpace space =
2164       (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2165   if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2166   Object* obj = Allocate(map, space);
2167   if (obj->IsFailure()) return obj;
2168 
2169   // Initialize the JSObject.
2170   InitializeJSObjectFromMap(JSObject::cast(obj),
2171                             FixedArray::cast(properties),
2172                             map);
2173   return obj;
2174 }
2175 
2176 
AllocateJSObject(JSFunction * constructor,PretenureFlag pretenure)2177 Object* Heap::AllocateJSObject(JSFunction* constructor,
2178                                PretenureFlag pretenure) {
2179   // Allocate the initial map if absent.
2180   if (!constructor->has_initial_map()) {
2181     Object* initial_map = AllocateInitialMap(constructor);
2182     if (initial_map->IsFailure()) return initial_map;
2183     constructor->set_initial_map(Map::cast(initial_map));
2184     Map::cast(initial_map)->set_constructor(constructor);
2185   }
2186   // Allocate the object based on the constructors initial map.
2187   Object* result =
2188       AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
2189   // Make sure result is NOT a global object if valid.
2190   ASSERT(result->IsFailure() || !result->IsGlobalObject());
2191   return result;
2192 }
2193 
2194 
AllocateGlobalObject(JSFunction * constructor)2195 Object* Heap::AllocateGlobalObject(JSFunction* constructor) {
2196   ASSERT(constructor->has_initial_map());
2197   Map* map = constructor->initial_map();
2198 
2199   // Make sure no field properties are described in the initial map.
2200   // This guarantees us that normalizing the properties does not
2201   // require us to change property values to JSGlobalPropertyCells.
2202   ASSERT(map->NextFreePropertyIndex() == 0);
2203 
2204   // Make sure we don't have a ton of pre-allocated slots in the
2205   // global objects. They will be unused once we normalize the object.
2206   ASSERT(map->unused_property_fields() == 0);
2207   ASSERT(map->inobject_properties() == 0);
2208 
2209   // Initial size of the backing store to avoid resize of the storage during
2210   // bootstrapping. The size differs between the JS global object ad the
2211   // builtins object.
2212   int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
2213 
2214   // Allocate a dictionary object for backing storage.
2215   Object* obj =
2216       StringDictionary::Allocate(
2217           map->NumberOfDescribedProperties() * 2 + initial_size);
2218   if (obj->IsFailure()) return obj;
2219   StringDictionary* dictionary = StringDictionary::cast(obj);
2220 
2221   // The global object might be created from an object template with accessors.
2222   // Fill these accessors into the dictionary.
2223   DescriptorArray* descs = map->instance_descriptors();
2224   for (int i = 0; i < descs->number_of_descriptors(); i++) {
2225     PropertyDetails details = descs->GetDetails(i);
2226     ASSERT(details.type() == CALLBACKS);  // Only accessors are expected.
2227     PropertyDetails d =
2228         PropertyDetails(details.attributes(), CALLBACKS, details.index());
2229     Object* value = descs->GetCallbacksObject(i);
2230     value = Heap::AllocateJSGlobalPropertyCell(value);
2231     if (value->IsFailure()) return value;
2232 
2233     Object* result = dictionary->Add(descs->GetKey(i), value, d);
2234     if (result->IsFailure()) return result;
2235     dictionary = StringDictionary::cast(result);
2236   }
2237 
2238   // Allocate the global object and initialize it with the backing store.
2239   obj = Allocate(map, OLD_POINTER_SPACE);
2240   if (obj->IsFailure()) return obj;
2241   JSObject* global = JSObject::cast(obj);
2242   InitializeJSObjectFromMap(global, dictionary, map);
2243 
2244   // Create a new map for the global object.
2245   obj = map->CopyDropDescriptors();
2246   if (obj->IsFailure()) return obj;
2247   Map* new_map = Map::cast(obj);
2248 
2249   // Setup the global object as a normalized object.
2250   global->set_map(new_map);
2251   global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
2252   global->set_properties(dictionary);
2253 
2254   // Make sure result is a global object with properties in dictionary.
2255   ASSERT(global->IsGlobalObject());
2256   ASSERT(!global->HasFastProperties());
2257   return global;
2258 }
2259 
2260 
CopyJSObject(JSObject * source)2261 Object* Heap::CopyJSObject(JSObject* source) {
2262   // Never used to copy functions.  If functions need to be copied we
2263   // have to be careful to clear the literals array.
2264   ASSERT(!source->IsJSFunction());
2265 
2266   // Make the clone.
2267   Map* map = source->map();
2268   int object_size = map->instance_size();
2269   Object* clone;
2270 
2271   // If we're forced to always allocate, we use the general allocation
2272   // functions which may leave us with an object in old space.
2273   if (always_allocate()) {
2274     clone = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
2275     if (clone->IsFailure()) return clone;
2276     Address clone_address = HeapObject::cast(clone)->address();
2277     CopyBlock(reinterpret_cast<Object**>(clone_address),
2278               reinterpret_cast<Object**>(source->address()),
2279               object_size);
2280     // Update write barrier for all fields that lie beyond the header.
2281     for (int offset = JSObject::kHeaderSize;
2282          offset < object_size;
2283          offset += kPointerSize) {
2284       RecordWrite(clone_address, offset);
2285     }
2286   } else {
2287     clone = new_space_.AllocateRaw(object_size);
2288     if (clone->IsFailure()) return clone;
2289     ASSERT(Heap::InNewSpace(clone));
2290     // Since we know the clone is allocated in new space, we can copy
2291     // the contents without worrying about updating the write barrier.
2292     CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(clone)->address()),
2293               reinterpret_cast<Object**>(source->address()),
2294               object_size);
2295   }
2296 
2297   FixedArray* elements = FixedArray::cast(source->elements());
2298   FixedArray* properties = FixedArray::cast(source->properties());
2299   // Update elements if necessary.
2300   if (elements->length()> 0) {
2301     Object* elem = CopyFixedArray(elements);
2302     if (elem->IsFailure()) return elem;
2303     JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
2304   }
2305   // Update properties if necessary.
2306   if (properties->length() > 0) {
2307     Object* prop = CopyFixedArray(properties);
2308     if (prop->IsFailure()) return prop;
2309     JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
2310   }
2311   // Return the new clone.
2312   return clone;
2313 }
2314 
2315 
ReinitializeJSGlobalProxy(JSFunction * constructor,JSGlobalProxy * object)2316 Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
2317                                         JSGlobalProxy* object) {
2318   // Allocate initial map if absent.
2319   if (!constructor->has_initial_map()) {
2320     Object* initial_map = AllocateInitialMap(constructor);
2321     if (initial_map->IsFailure()) return initial_map;
2322     constructor->set_initial_map(Map::cast(initial_map));
2323     Map::cast(initial_map)->set_constructor(constructor);
2324   }
2325 
2326   Map* map = constructor->initial_map();
2327 
2328   // Check that the already allocated object has the same size as
2329   // objects allocated using the constructor.
2330   ASSERT(map->instance_size() == object->map()->instance_size());
2331 
2332   // Allocate the backing storage for the properties.
2333   int prop_size = map->unused_property_fields() - map->inobject_properties();
2334   Object* properties = AllocateFixedArray(prop_size, TENURED);
2335   if (properties->IsFailure()) return properties;
2336 
2337   // Reset the map for the object.
2338   object->set_map(constructor->initial_map());
2339 
2340   // Reinitialize the object from the constructor map.
2341   InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
2342   return object;
2343 }
2344 
2345 
AllocateStringFromAscii(Vector<const char> string,PretenureFlag pretenure)2346 Object* Heap::AllocateStringFromAscii(Vector<const char> string,
2347                                       PretenureFlag pretenure) {
2348   Object* result = AllocateRawAsciiString(string.length(), pretenure);
2349   if (result->IsFailure()) return result;
2350 
2351   // Copy the characters into the new object.
2352   SeqAsciiString* string_result = SeqAsciiString::cast(result);
2353   for (int i = 0; i < string.length(); i++) {
2354     string_result->SeqAsciiStringSet(i, string[i]);
2355   }
2356   return result;
2357 }
2358 
2359 
AllocateStringFromUtf8(Vector<const char> string,PretenureFlag pretenure)2360 Object* Heap::AllocateStringFromUtf8(Vector<const char> string,
2361                                      PretenureFlag pretenure) {
2362   // Count the number of characters in the UTF-8 string and check if
2363   // it is an ASCII string.
2364   Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
2365   decoder->Reset(string.start(), string.length());
2366   int chars = 0;
2367   bool is_ascii = true;
2368   while (decoder->has_more()) {
2369     uc32 r = decoder->GetNext();
2370     if (r > String::kMaxAsciiCharCode) is_ascii = false;
2371     chars++;
2372   }
2373 
2374   // If the string is ascii, we do not need to convert the characters
2375   // since UTF8 is backwards compatible with ascii.
2376   if (is_ascii) return AllocateStringFromAscii(string, pretenure);
2377 
2378   Object* result = AllocateRawTwoByteString(chars, pretenure);
2379   if (result->IsFailure()) return result;
2380 
2381   // Convert and copy the characters into the new object.
2382   String* string_result = String::cast(result);
2383   decoder->Reset(string.start(), string.length());
2384   for (int i = 0; i < chars; i++) {
2385     uc32 r = decoder->GetNext();
2386     string_result->Set(i, r);
2387   }
2388   return result;
2389 }
2390 
2391 
AllocateStringFromTwoByte(Vector<const uc16> string,PretenureFlag pretenure)2392 Object* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
2393                                         PretenureFlag pretenure) {
2394   // Check if the string is an ASCII string.
2395   int i = 0;
2396   while (i < string.length() && string[i] <= String::kMaxAsciiCharCode) i++;
2397 
2398   Object* result;
2399   if (i == string.length()) {  // It's an ASCII string.
2400     result = AllocateRawAsciiString(string.length(), pretenure);
2401   } else {  // It's not an ASCII string.
2402     result = AllocateRawTwoByteString(string.length(), pretenure);
2403   }
2404   if (result->IsFailure()) return result;
2405 
2406   // Copy the characters into the new object, which may be either ASCII or
2407   // UTF-16.
2408   String* string_result = String::cast(result);
2409   for (int i = 0; i < string.length(); i++) {
2410     string_result->Set(i, string[i]);
2411   }
2412   return result;
2413 }
2414 
2415 
SymbolMapForString(String * string)2416 Map* Heap::SymbolMapForString(String* string) {
2417   // If the string is in new space it cannot be used as a symbol.
2418   if (InNewSpace(string)) return NULL;
2419 
2420   // Find the corresponding symbol map for strings.
2421   Map* map = string->map();
2422 
2423   if (map == short_ascii_string_map()) return short_ascii_symbol_map();
2424   if (map == medium_ascii_string_map()) return medium_ascii_symbol_map();
2425   if (map == long_ascii_string_map()) return long_ascii_symbol_map();
2426 
2427   if (map == short_string_map()) return short_symbol_map();
2428   if (map == medium_string_map()) return medium_symbol_map();
2429   if (map == long_string_map()) return long_symbol_map();
2430 
2431   if (map == short_cons_string_map()) return short_cons_symbol_map();
2432   if (map == medium_cons_string_map()) return medium_cons_symbol_map();
2433   if (map == long_cons_string_map()) return long_cons_symbol_map();
2434 
2435   if (map == short_cons_ascii_string_map()) {
2436     return short_cons_ascii_symbol_map();
2437   }
2438   if (map == medium_cons_ascii_string_map()) {
2439     return medium_cons_ascii_symbol_map();
2440   }
2441   if (map == long_cons_ascii_string_map()) {
2442     return long_cons_ascii_symbol_map();
2443   }
2444 
2445   if (map == short_sliced_string_map()) return short_sliced_symbol_map();
2446   if (map == medium_sliced_string_map()) return medium_sliced_symbol_map();
2447   if (map == long_sliced_string_map()) return long_sliced_symbol_map();
2448 
2449   if (map == short_sliced_ascii_string_map()) {
2450     return short_sliced_ascii_symbol_map();
2451   }
2452   if (map == medium_sliced_ascii_string_map()) {
2453     return medium_sliced_ascii_symbol_map();
2454   }
2455   if (map == long_sliced_ascii_string_map()) {
2456     return long_sliced_ascii_symbol_map();
2457   }
2458 
2459   if (map == short_external_string_map()) {
2460     return short_external_symbol_map();
2461   }
2462   if (map == medium_external_string_map()) {
2463     return medium_external_symbol_map();
2464   }
2465   if (map == long_external_string_map()) {
2466     return long_external_symbol_map();
2467   }
2468 
2469   if (map == short_external_ascii_string_map()) {
2470     return short_external_ascii_symbol_map();
2471   }
2472   if (map == medium_external_ascii_string_map()) {
2473     return medium_external_ascii_symbol_map();
2474   }
2475   if (map == long_external_ascii_string_map()) {
2476     return long_external_ascii_symbol_map();
2477   }
2478 
2479   // No match found.
2480   return NULL;
2481 }
2482 
2483 
AllocateInternalSymbol(unibrow::CharacterStream * buffer,int chars,uint32_t length_field)2484 Object* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
2485                                      int chars,
2486                                      uint32_t length_field) {
2487   // Ensure the chars matches the number of characters in the buffer.
2488   ASSERT(static_cast<unsigned>(chars) == buffer->Length());
2489   // Determine whether the string is ascii.
2490   bool is_ascii = true;
2491   while (buffer->has_more() && is_ascii) {
2492     if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) is_ascii = false;
2493   }
2494   buffer->Rewind();
2495 
2496   // Compute map and object size.
2497   int size;
2498   Map* map;
2499 
2500   if (is_ascii) {
2501     if (chars <= String::kMaxShortStringSize) {
2502       map = short_ascii_symbol_map();
2503     } else if (chars <= String::kMaxMediumStringSize) {
2504       map = medium_ascii_symbol_map();
2505     } else {
2506       map = long_ascii_symbol_map();
2507     }
2508     size = SeqAsciiString::SizeFor(chars);
2509   } else {
2510     if (chars <= String::kMaxShortStringSize) {
2511       map = short_symbol_map();
2512     } else if (chars <= String::kMaxMediumStringSize) {
2513       map = medium_symbol_map();
2514     } else {
2515       map = long_symbol_map();
2516     }
2517     size = SeqTwoByteString::SizeFor(chars);
2518   }
2519 
2520   // Allocate string.
2521   AllocationSpace space =
2522       (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_DATA_SPACE;
2523   Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2524   if (result->IsFailure()) return result;
2525 
2526   reinterpret_cast<HeapObject*>(result)->set_map(map);
2527   // The hash value contains the length of the string.
2528   String* answer = String::cast(result);
2529   answer->set_length_field(length_field);
2530 
2531   ASSERT_EQ(size, answer->Size());
2532 
2533   // Fill in the characters.
2534   for (int i = 0; i < chars; i++) {
2535     answer->Set(i, buffer->GetNext());
2536   }
2537   return answer;
2538 }
2539 
2540 
AllocateRawAsciiString(int length,PretenureFlag pretenure)2541 Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
2542   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2543   int size = SeqAsciiString::SizeFor(length);
2544 
2545   Object* result = Failure::OutOfMemoryException();
2546   if (space == NEW_SPACE) {
2547     result = size <= kMaxObjectSizeInNewSpace
2548         ? new_space_.AllocateRaw(size)
2549         : lo_space_->AllocateRawFixedArray(size);
2550   } else {
2551     if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2552     result = AllocateRaw(size, space, OLD_DATA_SPACE);
2553   }
2554   if (result->IsFailure()) return result;
2555 
2556   // Determine the map based on the string's length.
2557   Map* map;
2558   if (length <= String::kMaxShortStringSize) {
2559     map = short_ascii_string_map();
2560   } else if (length <= String::kMaxMediumStringSize) {
2561     map = medium_ascii_string_map();
2562   } else {
2563     map = long_ascii_string_map();
2564   }
2565 
2566   // Partially initialize the object.
2567   HeapObject::cast(result)->set_map(map);
2568   String::cast(result)->set_length(length);
2569   ASSERT_EQ(size, HeapObject::cast(result)->Size());
2570   return result;
2571 }
2572 
2573 
AllocateRawTwoByteString(int length,PretenureFlag pretenure)2574 Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) {
2575   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2576   int size = SeqTwoByteString::SizeFor(length);
2577 
2578   Object* result = Failure::OutOfMemoryException();
2579   if (space == NEW_SPACE) {
2580     result = size <= kMaxObjectSizeInNewSpace
2581         ? new_space_.AllocateRaw(size)
2582         : lo_space_->AllocateRawFixedArray(size);
2583   } else {
2584     if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2585     result = AllocateRaw(size, space, OLD_DATA_SPACE);
2586   }
2587   if (result->IsFailure()) return result;
2588 
2589   // Determine the map based on the string's length.
2590   Map* map;
2591   if (length <= String::kMaxShortStringSize) {
2592     map = short_string_map();
2593   } else if (length <= String::kMaxMediumStringSize) {
2594     map = medium_string_map();
2595   } else {
2596     map = long_string_map();
2597   }
2598 
2599   // Partially initialize the object.
2600   HeapObject::cast(result)->set_map(map);
2601   String::cast(result)->set_length(length);
2602   ASSERT_EQ(size, HeapObject::cast(result)->Size());
2603   return result;
2604 }
2605 
2606 
AllocateEmptyFixedArray()2607 Object* Heap::AllocateEmptyFixedArray() {
2608   int size = FixedArray::SizeFor(0);
2609   Object* result = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
2610   if (result->IsFailure()) return result;
2611   // Initialize the object.
2612   reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2613   reinterpret_cast<Array*>(result)->set_length(0);
2614   return result;
2615 }
2616 
2617 
AllocateRawFixedArray(int length)2618 Object* Heap::AllocateRawFixedArray(int length) {
2619   // Use the general function if we're forced to always allocate.
2620   if (always_allocate()) return AllocateFixedArray(length, NOT_TENURED);
2621   // Allocate the raw data for a fixed array.
2622   int size = FixedArray::SizeFor(length);
2623   return size <= kMaxObjectSizeInNewSpace
2624       ? new_space_.AllocateRaw(size)
2625       : lo_space_->AllocateRawFixedArray(size);
2626 }
2627 
2628 
CopyFixedArray(FixedArray * src)2629 Object* Heap::CopyFixedArray(FixedArray* src) {
2630   int len = src->length();
2631   Object* obj = AllocateRawFixedArray(len);
2632   if (obj->IsFailure()) return obj;
2633   if (Heap::InNewSpace(obj)) {
2634     HeapObject* dst = HeapObject::cast(obj);
2635     CopyBlock(reinterpret_cast<Object**>(dst->address()),
2636               reinterpret_cast<Object**>(src->address()),
2637               FixedArray::SizeFor(len));
2638     return obj;
2639   }
2640   HeapObject::cast(obj)->set_map(src->map());
2641   FixedArray* result = FixedArray::cast(obj);
2642   result->set_length(len);
2643   // Copy the content
2644   WriteBarrierMode mode = result->GetWriteBarrierMode();
2645   for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
2646   return result;
2647 }
2648 
2649 
AllocateFixedArray(int length)2650 Object* Heap::AllocateFixedArray(int length) {
2651   ASSERT(length >= 0);
2652   if (length == 0) return empty_fixed_array();
2653   Object* result = AllocateRawFixedArray(length);
2654   if (!result->IsFailure()) {
2655     // Initialize header.
2656     reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2657     FixedArray* array = FixedArray::cast(result);
2658     array->set_length(length);
2659     Object* value = undefined_value();
2660     // Initialize body.
2661     for (int index = 0; index < length; index++) {
2662       array->set(index, value, SKIP_WRITE_BARRIER);
2663     }
2664   }
2665   return result;
2666 }
2667 
2668 
AllocateFixedArray(int length,PretenureFlag pretenure)2669 Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
2670   ASSERT(empty_fixed_array()->IsFixedArray());
2671   if (length == 0) return empty_fixed_array();
2672 
2673   int size = FixedArray::SizeFor(length);
2674   Object* result = Failure::OutOfMemoryException();
2675   if (pretenure != TENURED) {
2676     result = size <= kMaxObjectSizeInNewSpace
2677         ? new_space_.AllocateRaw(size)
2678         : lo_space_->AllocateRawFixedArray(size);
2679   }
2680   if (result->IsFailure()) {
2681     if (size > MaxObjectSizeInPagedSpace()) {
2682       result = lo_space_->AllocateRawFixedArray(size);
2683     } else {
2684       AllocationSpace space =
2685           (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2686       result = AllocateRaw(size, space, OLD_POINTER_SPACE);
2687     }
2688     if (result->IsFailure()) return result;
2689   }
2690   // Initialize the object.
2691   reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2692   FixedArray* array = FixedArray::cast(result);
2693   array->set_length(length);
2694   Object* value = undefined_value();
2695   for (int index = 0; index < length; index++) {
2696     array->set(index, value, SKIP_WRITE_BARRIER);
2697   }
2698   return array;
2699 }
2700 
2701 
AllocateFixedArrayWithHoles(int length)2702 Object* Heap::AllocateFixedArrayWithHoles(int length) {
2703   if (length == 0) return empty_fixed_array();
2704   Object* result = AllocateRawFixedArray(length);
2705   if (!result->IsFailure()) {
2706     // Initialize header.
2707     reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2708     FixedArray* array = FixedArray::cast(result);
2709     array->set_length(length);
2710     // Initialize body.
2711     Object* value = the_hole_value();
2712     for (int index = 0; index < length; index++)  {
2713       array->set(index, value, SKIP_WRITE_BARRIER);
2714     }
2715   }
2716   return result;
2717 }
2718 
2719 
AllocateHashTable(int length)2720 Object* Heap::AllocateHashTable(int length) {
2721   Object* result = Heap::AllocateFixedArray(length);
2722   if (result->IsFailure()) return result;
2723   reinterpret_cast<Array*>(result)->set_map(hash_table_map());
2724   ASSERT(result->IsHashTable());
2725   return result;
2726 }
2727 
2728 
AllocateGlobalContext()2729 Object* Heap::AllocateGlobalContext() {
2730   Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
2731   if (result->IsFailure()) return result;
2732   Context* context = reinterpret_cast<Context*>(result);
2733   context->set_map(global_context_map());
2734   ASSERT(context->IsGlobalContext());
2735   ASSERT(result->IsContext());
2736   return result;
2737 }
2738 
2739 
AllocateFunctionContext(int length,JSFunction * function)2740 Object* Heap::AllocateFunctionContext(int length, JSFunction* function) {
2741   ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
2742   Object* result = Heap::AllocateFixedArray(length);
2743   if (result->IsFailure()) return result;
2744   Context* context = reinterpret_cast<Context*>(result);
2745   context->set_map(context_map());
2746   context->set_closure(function);
2747   context->set_fcontext(context);
2748   context->set_previous(NULL);
2749   context->set_extension(NULL);
2750   context->set_global(function->context()->global());
2751   ASSERT(!context->IsGlobalContext());
2752   ASSERT(context->is_function_context());
2753   ASSERT(result->IsContext());
2754   return result;
2755 }
2756 
2757 
AllocateWithContext(Context * previous,JSObject * extension,bool is_catch_context)2758 Object* Heap::AllocateWithContext(Context* previous,
2759                                   JSObject* extension,
2760                                   bool is_catch_context) {
2761   Object* result = Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
2762   if (result->IsFailure()) return result;
2763   Context* context = reinterpret_cast<Context*>(result);
2764   context->set_map(is_catch_context ? catch_context_map() : context_map());
2765   context->set_closure(previous->closure());
2766   context->set_fcontext(previous->fcontext());
2767   context->set_previous(previous);
2768   context->set_extension(extension);
2769   context->set_global(previous->global());
2770   ASSERT(!context->IsGlobalContext());
2771   ASSERT(!context->is_function_context());
2772   ASSERT(result->IsContext());
2773   return result;
2774 }
2775 
2776 
AllocateStruct(InstanceType type)2777 Object* Heap::AllocateStruct(InstanceType type) {
2778   Map* map;
2779   switch (type) {
2780 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
2781 STRUCT_LIST(MAKE_CASE)
2782 #undef MAKE_CASE
2783     default:
2784       UNREACHABLE();
2785       return Failure::InternalError();
2786   }
2787   int size = map->instance_size();
2788   AllocationSpace space =
2789       (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
2790   Object* result = Heap::Allocate(map, space);
2791   if (result->IsFailure()) return result;
2792   Struct::cast(result)->InitializeBody(size);
2793   return result;
2794 }
2795 
2796 
IdleNotification()2797 bool Heap::IdleNotification() {
2798   static const int kIdlesBeforeCollection = 7;
2799   static int number_idle_notifications = 0;
2800   static int last_gc_count = gc_count_;
2801 
2802   bool finished = false;
2803 
2804   if (last_gc_count == gc_count_) {
2805     number_idle_notifications++;
2806   } else {
2807     number_idle_notifications = 0;
2808     last_gc_count = gc_count_;
2809   }
2810 
2811   if (number_idle_notifications >= kIdlesBeforeCollection) {
2812     // The first time through we collect without forcing compaction.
2813     // The second time through we force compaction and quit.
2814     bool force_compaction =
2815         number_idle_notifications > kIdlesBeforeCollection;
2816     CollectAllGarbage(force_compaction);
2817     last_gc_count = gc_count_;
2818     if (force_compaction) {
2819       // Shrink new space.
2820       new_space_.Shrink();
2821       number_idle_notifications = 0;
2822       finished = true;
2823     }
2824   }
2825 
2826   // Uncommit unused memory in new space.
2827   Heap::UncommitFromSpace();
2828   return finished;
2829 }
2830 
2831 
2832 #ifdef DEBUG
2833 
Print()2834 void Heap::Print() {
2835   if (!HasBeenSetup()) return;
2836   Top::PrintStack();
2837   AllSpaces spaces;
2838   while (Space* space = spaces.next()) space->Print();
2839 }
2840 
2841 
ReportCodeStatistics(const char * title)2842 void Heap::ReportCodeStatistics(const char* title) {
2843   PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
2844   PagedSpace::ResetCodeStatistics();
2845   // We do not look for code in new space, map space, or old space.  If code
2846   // somehow ends up in those spaces, we would miss it here.
2847   code_space_->CollectCodeStatistics();
2848   lo_space_->CollectCodeStatistics();
2849   PagedSpace::ReportCodeStatistics();
2850 }
2851 
2852 
2853 // This function expects that NewSpace's allocated objects histogram is
2854 // populated (via a call to CollectStatistics or else as a side effect of a
2855 // just-completed scavenge collection).
ReportHeapStatistics(const char * title)2856 void Heap::ReportHeapStatistics(const char* title) {
2857   USE(title);
2858   PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
2859          title, gc_count_);
2860   PrintF("mark-compact GC : %d\n", mc_count_);
2861   PrintF("old_gen_promotion_limit_ %d\n", old_gen_promotion_limit_);
2862   PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
2863 
2864   PrintF("\n");
2865   PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
2866   GlobalHandles::PrintStats();
2867   PrintF("\n");
2868 
2869   PrintF("Heap statistics : ");
2870   MemoryAllocator::ReportStatistics();
2871   PrintF("To space : ");
2872   new_space_.ReportStatistics();
2873   PrintF("Old pointer space : ");
2874   old_pointer_space_->ReportStatistics();
2875   PrintF("Old data space : ");
2876   old_data_space_->ReportStatistics();
2877   PrintF("Code space : ");
2878   code_space_->ReportStatistics();
2879   PrintF("Map space : ");
2880   map_space_->ReportStatistics();
2881   PrintF("Cell space : ");
2882   cell_space_->ReportStatistics();
2883   PrintF("Large object space : ");
2884   lo_space_->ReportStatistics();
2885   PrintF(">>>>>> ========================================= >>>>>>\n");
2886 }
2887 
2888 #endif  // DEBUG
2889 
Contains(HeapObject * value)2890 bool Heap::Contains(HeapObject* value) {
2891   return Contains(value->address());
2892 }
2893 
2894 
Contains(Address addr)2895 bool Heap::Contains(Address addr) {
2896   if (OS::IsOutsideAllocatedSpace(addr)) return false;
2897   return HasBeenSetup() &&
2898     (new_space_.ToSpaceContains(addr) ||
2899      old_pointer_space_->Contains(addr) ||
2900      old_data_space_->Contains(addr) ||
2901      code_space_->Contains(addr) ||
2902      map_space_->Contains(addr) ||
2903      cell_space_->Contains(addr) ||
2904      lo_space_->SlowContains(addr));
2905 }
2906 
2907 
InSpace(HeapObject * value,AllocationSpace space)2908 bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
2909   return InSpace(value->address(), space);
2910 }
2911 
2912 
InSpace(Address addr,AllocationSpace space)2913 bool Heap::InSpace(Address addr, AllocationSpace space) {
2914   if (OS::IsOutsideAllocatedSpace(addr)) return false;
2915   if (!HasBeenSetup()) return false;
2916 
2917   switch (space) {
2918     case NEW_SPACE:
2919       return new_space_.ToSpaceContains(addr);
2920     case OLD_POINTER_SPACE:
2921       return old_pointer_space_->Contains(addr);
2922     case OLD_DATA_SPACE:
2923       return old_data_space_->Contains(addr);
2924     case CODE_SPACE:
2925       return code_space_->Contains(addr);
2926     case MAP_SPACE:
2927       return map_space_->Contains(addr);
2928     case CELL_SPACE:
2929       return cell_space_->Contains(addr);
2930     case LO_SPACE:
2931       return lo_space_->SlowContains(addr);
2932   }
2933 
2934   return false;
2935 }
2936 
2937 
2938 #ifdef DEBUG
Verify()2939 void Heap::Verify() {
2940   ASSERT(HasBeenSetup());
2941 
2942   VerifyPointersVisitor visitor;
2943   IterateRoots(&visitor);
2944 
2945   new_space_.Verify();
2946 
2947   VerifyPointersAndRSetVisitor rset_visitor;
2948   old_pointer_space_->Verify(&rset_visitor);
2949   map_space_->Verify(&rset_visitor);
2950 
2951   VerifyPointersVisitor no_rset_visitor;
2952   old_data_space_->Verify(&no_rset_visitor);
2953   code_space_->Verify(&no_rset_visitor);
2954   cell_space_->Verify(&no_rset_visitor);
2955 
2956   lo_space_->Verify();
2957 }
2958 #endif  // DEBUG
2959 
2960 
LookupSymbol(Vector<const char> string)2961 Object* Heap::LookupSymbol(Vector<const char> string) {
2962   Object* symbol = NULL;
2963   Object* new_table = symbol_table()->LookupSymbol(string, &symbol);
2964   if (new_table->IsFailure()) return new_table;
2965   // Can't use set_symbol_table because SymbolTable::cast knows that
2966   // SymbolTable is a singleton and checks for identity.
2967   roots_[kSymbolTableRootIndex] = new_table;
2968   ASSERT(symbol != NULL);
2969   return symbol;
2970 }
2971 
2972 
LookupSymbol(String * string)2973 Object* Heap::LookupSymbol(String* string) {
2974   if (string->IsSymbol()) return string;
2975   Object* symbol = NULL;
2976   Object* new_table = symbol_table()->LookupString(string, &symbol);
2977   if (new_table->IsFailure()) return new_table;
2978   // Can't use set_symbol_table because SymbolTable::cast knows that
2979   // SymbolTable is a singleton and checks for identity.
2980   roots_[kSymbolTableRootIndex] = new_table;
2981   ASSERT(symbol != NULL);
2982   return symbol;
2983 }
2984 
2985 
LookupSymbolIfExists(String * string,String ** symbol)2986 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
2987   if (string->IsSymbol()) {
2988     *symbol = string;
2989     return true;
2990   }
2991   return symbol_table()->LookupSymbolIfExists(string, symbol);
2992 }
2993 
2994 
2995 #ifdef DEBUG
ZapFromSpace()2996 void Heap::ZapFromSpace() {
2997   ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject());
2998   for (Address a = new_space_.FromSpaceLow();
2999        a < new_space_.FromSpaceHigh();
3000        a += kPointerSize) {
3001     Memory::Address_at(a) = kFromSpaceZapValue;
3002   }
3003 }
3004 #endif  // DEBUG
3005 
3006 
IterateRSetRange(Address object_start,Address object_end,Address rset_start,ObjectSlotCallback copy_object_func)3007 int Heap::IterateRSetRange(Address object_start,
3008                            Address object_end,
3009                            Address rset_start,
3010                            ObjectSlotCallback copy_object_func) {
3011   Address object_address = object_start;
3012   Address rset_address = rset_start;
3013   int set_bits_count = 0;
3014 
3015   // Loop over all the pointers in [object_start, object_end).
3016   while (object_address < object_end) {
3017     uint32_t rset_word = Memory::uint32_at(rset_address);
3018     if (rset_word != 0) {
3019       uint32_t result_rset = rset_word;
3020       for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) {
3021         // Do not dereference pointers at or past object_end.
3022         if ((rset_word & bitmask) != 0 && object_address < object_end) {
3023           Object** object_p = reinterpret_cast<Object**>(object_address);
3024           if (Heap::InNewSpace(*object_p)) {
3025             copy_object_func(reinterpret_cast<HeapObject**>(object_p));
3026           }
3027           // If this pointer does not need to be remembered anymore, clear
3028           // the remembered set bit.
3029           if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask;
3030           set_bits_count++;
3031         }
3032         object_address += kPointerSize;
3033       }
3034       // Update the remembered set if it has changed.
3035       if (result_rset != rset_word) {
3036         Memory::uint32_at(rset_address) = result_rset;
3037       }
3038     } else {
3039       // No bits in the word were set.  This is the common case.
3040       object_address += kPointerSize * kBitsPerInt;
3041     }
3042     rset_address += kIntSize;
3043   }
3044   return set_bits_count;
3045 }
3046 
3047 
IterateRSet(PagedSpace * space,ObjectSlotCallback copy_object_func)3048 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
3049   ASSERT(Page::is_rset_in_use());
3050   ASSERT(space == old_pointer_space_ || space == map_space_);
3051 
3052   static void* paged_rset_histogram = StatsTable::CreateHistogram(
3053       "V8.RSetPaged",
3054       0,
3055       Page::kObjectAreaSize / kPointerSize,
3056       30);
3057 
3058   PageIterator it(space, PageIterator::PAGES_IN_USE);
3059   while (it.has_next()) {
3060     Page* page = it.next();
3061     int count = IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
3062                                  page->RSetStart(), copy_object_func);
3063     if (paged_rset_histogram != NULL) {
3064       StatsTable::AddHistogramSample(paged_rset_histogram, count);
3065     }
3066   }
3067 }
3068 
3069 
3070 #ifdef DEBUG
3071 #define SYNCHRONIZE_TAG(tag) v->Synchronize(tag)
3072 #else
3073 #define SYNCHRONIZE_TAG(tag)
3074 #endif
3075 
IterateRoots(ObjectVisitor * v)3076 void Heap::IterateRoots(ObjectVisitor* v) {
3077   IterateStrongRoots(v);
3078   v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
3079   SYNCHRONIZE_TAG("symbol_table");
3080 }
3081 
3082 
IterateStrongRoots(ObjectVisitor * v)3083 void Heap::IterateStrongRoots(ObjectVisitor* v) {
3084   v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
3085   SYNCHRONIZE_TAG("strong_root_list");
3086 
3087   v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_));
3088   SYNCHRONIZE_TAG("symbol");
3089 
3090   Bootstrapper::Iterate(v);
3091   SYNCHRONIZE_TAG("bootstrapper");
3092   Top::Iterate(v);
3093   SYNCHRONIZE_TAG("top");
3094 
3095 #ifdef ENABLE_DEBUGGER_SUPPORT
3096   Debug::Iterate(v);
3097 #endif
3098   SYNCHRONIZE_TAG("debug");
3099   CompilationCache::Iterate(v);
3100   SYNCHRONIZE_TAG("compilationcache");
3101 
3102   // Iterate over local handles in handle scopes.
3103   HandleScopeImplementer::Iterate(v);
3104   SYNCHRONIZE_TAG("handlescope");
3105 
3106   // Iterate over the builtin code objects and code stubs in the heap. Note
3107   // that it is not strictly necessary to iterate over code objects on
3108   // scavenge collections.  We still do it here because this same function
3109   // is used by the mark-sweep collector and the deserializer.
3110   Builtins::IterateBuiltins(v);
3111   SYNCHRONIZE_TAG("builtins");
3112 
3113   // Iterate over global handles.
3114   GlobalHandles::IterateRoots(v);
3115   SYNCHRONIZE_TAG("globalhandles");
3116 
3117   // Iterate over pointers being held by inactive threads.
3118   ThreadManager::Iterate(v);
3119   SYNCHRONIZE_TAG("threadmanager");
3120 }
3121 #undef SYNCHRONIZE_TAG
3122 
3123 
3124 // Flag is set when the heap has been configured.  The heap can be repeatedly
3125 // configured through the API until it is setup.
3126 static bool heap_configured = false;
3127 
3128 // TODO(1236194): Since the heap size is configurable on the command line
3129 // and through the API, we should gracefully handle the case that the heap
3130 // size is not big enough to fit all the initial objects.
ConfigureHeap(int semispace_size,int old_gen_size)3131 bool Heap::ConfigureHeap(int semispace_size, int old_gen_size) {
3132   if (HasBeenSetup()) return false;
3133 
3134   if (semispace_size > 0) semispace_size_ = semispace_size;
3135   if (old_gen_size > 0) old_generation_size_ = old_gen_size;
3136 
3137   // The new space size must be a power of two to support single-bit testing
3138   // for containment.
3139   semispace_size_ = RoundUpToPowerOf2(semispace_size_);
3140   initial_semispace_size_ = Min(initial_semispace_size_, semispace_size_);
3141   young_generation_size_ = 2 * semispace_size_;
3142   external_allocation_limit_ = 10 * semispace_size_;
3143 
3144   // The old generation is paged.
3145   old_generation_size_ = RoundUp(old_generation_size_, Page::kPageSize);
3146 
3147   heap_configured = true;
3148   return true;
3149 }
3150 
3151 
ConfigureHeapDefault()3152 bool Heap::ConfigureHeapDefault() {
3153   return ConfigureHeap(FLAG_new_space_size, FLAG_old_space_size);
3154 }
3155 
3156 
PromotedSpaceSize()3157 int Heap::PromotedSpaceSize() {
3158   return old_pointer_space_->Size()
3159       + old_data_space_->Size()
3160       + code_space_->Size()
3161       + map_space_->Size()
3162       + cell_space_->Size()
3163       + lo_space_->Size();
3164 }
3165 
3166 
PromotedExternalMemorySize()3167 int Heap::PromotedExternalMemorySize() {
3168   if (amount_of_external_allocated_memory_
3169       <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
3170   return amount_of_external_allocated_memory_
3171       - amount_of_external_allocated_memory_at_last_global_gc_;
3172 }
3173 
3174 
Setup(bool create_heap_objects)3175 bool Heap::Setup(bool create_heap_objects) {
3176   // Initialize heap spaces and initial maps and objects. Whenever something
3177   // goes wrong, just return false. The caller should check the results and
3178   // call Heap::TearDown() to release allocated memory.
3179   //
3180   // If the heap is not yet configured (eg, through the API), configure it.
3181   // Configuration is based on the flags new-space-size (really the semispace
3182   // size) and old-space-size if set or the initial values of semispace_size_
3183   // and old_generation_size_ otherwise.
3184   if (!heap_configured) {
3185     if (!ConfigureHeapDefault()) return false;
3186   }
3187 
3188   // Setup memory allocator and allocate an initial chunk of memory.  The
3189   // initial chunk is double the size of the new space to ensure that we can
3190   // find a pair of semispaces that are contiguous and aligned to their size.
3191   if (!MemoryAllocator::Setup(MaxCapacity())) return false;
3192   void* chunk
3193       = MemoryAllocator::ReserveInitialChunk(2 * young_generation_size_);
3194   if (chunk == NULL) return false;
3195 
3196   // Put the initial chunk of the old space at the start of the initial
3197   // chunk, then the two new space semispaces, then the initial chunk of
3198   // code space.  Align the pair of semispaces to their size, which must be
3199   // a power of 2.
3200   ASSERT(IsPowerOf2(young_generation_size_));
3201   Address code_space_start = reinterpret_cast<Address>(chunk);
3202   Address new_space_start = RoundUp(code_space_start, young_generation_size_);
3203   Address old_space_start = new_space_start + young_generation_size_;
3204   int code_space_size = new_space_start - code_space_start;
3205   int old_space_size = young_generation_size_ - code_space_size;
3206 
3207   // Initialize new space.
3208   if (!new_space_.Setup(new_space_start, young_generation_size_)) return false;
3209 
3210   // Initialize old space, set the maximum capacity to the old generation
3211   // size. It will not contain code.
3212   old_pointer_space_ =
3213       new OldSpace(old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
3214   if (old_pointer_space_ == NULL) return false;
3215   if (!old_pointer_space_->Setup(old_space_start, old_space_size >> 1)) {
3216     return false;
3217   }
3218   old_data_space_ =
3219       new OldSpace(old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
3220   if (old_data_space_ == NULL) return false;
3221   if (!old_data_space_->Setup(old_space_start + (old_space_size >> 1),
3222                               old_space_size >> 1)) {
3223     return false;
3224   }
3225 
3226   // Initialize the code space, set its maximum capacity to the old
3227   // generation size. It needs executable memory.
3228   code_space_ =
3229       new OldSpace(old_generation_size_, CODE_SPACE, EXECUTABLE);
3230   if (code_space_ == NULL) return false;
3231   if (!code_space_->Setup(code_space_start, code_space_size)) return false;
3232 
3233   // Initialize map space.
3234   map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE);
3235   if (map_space_ == NULL) return false;
3236   // Setting up a paged space without giving it a virtual memory range big
3237   // enough to hold at least a page will cause it to allocate.
3238   if (!map_space_->Setup(NULL, 0)) return false;
3239 
3240   // Initialize global property cell space.
3241   cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE);
3242   if (cell_space_ == NULL) return false;
3243   // Setting up a paged space without giving it a virtual memory range big
3244   // enough to hold at least a page will cause it to allocate.
3245   if (!cell_space_->Setup(NULL, 0)) return false;
3246 
3247   // The large object code space may contain code or data.  We set the memory
3248   // to be non-executable here for safety, but this means we need to enable it
3249   // explicitly when allocating large code objects.
3250   lo_space_ = new LargeObjectSpace(LO_SPACE);
3251   if (lo_space_ == NULL) return false;
3252   if (!lo_space_->Setup()) return false;
3253 
3254   if (create_heap_objects) {
3255     // Create initial maps.
3256     if (!CreateInitialMaps()) return false;
3257     if (!CreateApiObjects()) return false;
3258 
3259     // Create initial objects
3260     if (!CreateInitialObjects()) return false;
3261   }
3262 
3263   LOG(IntEvent("heap-capacity", Capacity()));
3264   LOG(IntEvent("heap-available", Available()));
3265 
3266   return true;
3267 }
3268 
3269 
SetStackLimit(intptr_t limit)3270 void Heap::SetStackLimit(intptr_t limit) {
3271   // On 64 bit machines, pointers are generally out of range of Smis.  We write
3272   // something that looks like an out of range Smi to the GC.
3273 
3274   // Set up the special root array entry containing the stack guard.
3275   // This is actually an address, but the tag makes the GC ignore it.
3276   roots_[kStackLimitRootIndex] =
3277     reinterpret_cast<Object*>((limit & ~kSmiTagMask) | kSmiTag);
3278 }
3279 
3280 
TearDown()3281 void Heap::TearDown() {
3282   GlobalHandles::TearDown();
3283 
3284   new_space_.TearDown();
3285 
3286   if (old_pointer_space_ != NULL) {
3287     old_pointer_space_->TearDown();
3288     delete old_pointer_space_;
3289     old_pointer_space_ = NULL;
3290   }
3291 
3292   if (old_data_space_ != NULL) {
3293     old_data_space_->TearDown();
3294     delete old_data_space_;
3295     old_data_space_ = NULL;
3296   }
3297 
3298   if (code_space_ != NULL) {
3299     code_space_->TearDown();
3300     delete code_space_;
3301     code_space_ = NULL;
3302   }
3303 
3304   if (map_space_ != NULL) {
3305     map_space_->TearDown();
3306     delete map_space_;
3307     map_space_ = NULL;
3308   }
3309 
3310   if (cell_space_ != NULL) {
3311     cell_space_->TearDown();
3312     delete cell_space_;
3313     cell_space_ = NULL;
3314   }
3315 
3316   if (lo_space_ != NULL) {
3317     lo_space_->TearDown();
3318     delete lo_space_;
3319     lo_space_ = NULL;
3320   }
3321 
3322   MemoryAllocator::TearDown();
3323 }
3324 
3325 
Shrink()3326 void Heap::Shrink() {
3327   // Try to shrink all paged spaces.
3328   PagedSpaces spaces;
3329   while (PagedSpace* space = spaces.next()) space->Shrink();
3330 }
3331 
3332 
3333 #ifdef ENABLE_HEAP_PROTECTION
3334 
Protect()3335 void Heap::Protect() {
3336   if (HasBeenSetup()) {
3337     AllSpaces spaces;
3338     while (Space* space = spaces.next()) space->Protect();
3339   }
3340 }
3341 
3342 
Unprotect()3343 void Heap::Unprotect() {
3344   if (HasBeenSetup()) {
3345     AllSpaces spaces;
3346     while (Space* space = spaces.next()) space->Unprotect();
3347   }
3348 }
3349 
3350 #endif
3351 
3352 
3353 #ifdef DEBUG
3354 
3355 class PrintHandleVisitor: public ObjectVisitor {
3356  public:
VisitPointers(Object ** start,Object ** end)3357   void VisitPointers(Object** start, Object** end) {
3358     for (Object** p = start; p < end; p++)
3359       PrintF("  handle %p to %p\n", p, *p);
3360   }
3361 };
3362 
PrintHandles()3363 void Heap::PrintHandles() {
3364   PrintF("Handles:\n");
3365   PrintHandleVisitor v;
3366   HandleScopeImplementer::Iterate(&v);
3367 }
3368 
3369 #endif
3370 
3371 
next()3372 Space* AllSpaces::next() {
3373   switch (counter_++) {
3374     case NEW_SPACE:
3375       return Heap::new_space();
3376     case OLD_POINTER_SPACE:
3377       return Heap::old_pointer_space();
3378     case OLD_DATA_SPACE:
3379       return Heap::old_data_space();
3380     case CODE_SPACE:
3381       return Heap::code_space();
3382     case MAP_SPACE:
3383       return Heap::map_space();
3384     case CELL_SPACE:
3385       return Heap::cell_space();
3386     case LO_SPACE:
3387       return Heap::lo_space();
3388     default:
3389       return NULL;
3390   }
3391 }
3392 
3393 
next()3394 PagedSpace* PagedSpaces::next() {
3395   switch (counter_++) {
3396     case OLD_POINTER_SPACE:
3397       return Heap::old_pointer_space();
3398     case OLD_DATA_SPACE:
3399       return Heap::old_data_space();
3400     case CODE_SPACE:
3401       return Heap::code_space();
3402     case MAP_SPACE:
3403       return Heap::map_space();
3404     case CELL_SPACE:
3405       return Heap::cell_space();
3406     default:
3407       return NULL;
3408   }
3409 }
3410 
3411 
3412 
next()3413 OldSpace* OldSpaces::next() {
3414   switch (counter_++) {
3415     case OLD_POINTER_SPACE:
3416       return Heap::old_pointer_space();
3417     case OLD_DATA_SPACE:
3418       return Heap::old_data_space();
3419     case CODE_SPACE:
3420       return Heap::code_space();
3421     default:
3422       return NULL;
3423   }
3424 }
3425 
3426 
SpaceIterator()3427 SpaceIterator::SpaceIterator() : current_space_(FIRST_SPACE), iterator_(NULL) {
3428 }
3429 
3430 
~SpaceIterator()3431 SpaceIterator::~SpaceIterator() {
3432   // Delete active iterator if any.
3433   delete iterator_;
3434 }
3435 
3436 
has_next()3437 bool SpaceIterator::has_next() {
3438   // Iterate until no more spaces.
3439   return current_space_ != LAST_SPACE;
3440 }
3441 
3442 
next()3443 ObjectIterator* SpaceIterator::next() {
3444   if (iterator_ != NULL) {
3445     delete iterator_;
3446     iterator_ = NULL;
3447     // Move to the next space
3448     current_space_++;
3449     if (current_space_ > LAST_SPACE) {
3450       return NULL;
3451     }
3452   }
3453 
3454   // Return iterator for the new current space.
3455   return CreateIterator();
3456 }
3457 
3458 
3459 // Create an iterator for the space to iterate.
CreateIterator()3460 ObjectIterator* SpaceIterator::CreateIterator() {
3461   ASSERT(iterator_ == NULL);
3462 
3463   switch (current_space_) {
3464     case NEW_SPACE:
3465       iterator_ = new SemiSpaceIterator(Heap::new_space());
3466       break;
3467     case OLD_POINTER_SPACE:
3468       iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
3469       break;
3470     case OLD_DATA_SPACE:
3471       iterator_ = new HeapObjectIterator(Heap::old_data_space());
3472       break;
3473     case CODE_SPACE:
3474       iterator_ = new HeapObjectIterator(Heap::code_space());
3475       break;
3476     case MAP_SPACE:
3477       iterator_ = new HeapObjectIterator(Heap::map_space());
3478       break;
3479     case CELL_SPACE:
3480       iterator_ = new HeapObjectIterator(Heap::cell_space());
3481       break;
3482     case LO_SPACE:
3483       iterator_ = new LargeObjectIterator(Heap::lo_space());
3484       break;
3485   }
3486 
3487   // Return the newly allocated iterator;
3488   ASSERT(iterator_ != NULL);
3489   return iterator_;
3490 }
3491 
3492 
HeapIterator()3493 HeapIterator::HeapIterator() {
3494   Init();
3495 }
3496 
3497 
~HeapIterator()3498 HeapIterator::~HeapIterator() {
3499   Shutdown();
3500 }
3501 
3502 
Init()3503 void HeapIterator::Init() {
3504   // Start the iteration.
3505   space_iterator_ = new SpaceIterator();
3506   object_iterator_ = space_iterator_->next();
3507 }
3508 
3509 
Shutdown()3510 void HeapIterator::Shutdown() {
3511   // Make sure the last iterator is deallocated.
3512   delete space_iterator_;
3513   space_iterator_ = NULL;
3514   object_iterator_ = NULL;
3515 }
3516 
3517 
has_next()3518 bool HeapIterator::has_next() {
3519   // No iterator means we are done.
3520   if (object_iterator_ == NULL) return false;
3521 
3522   if (object_iterator_->has_next_object()) {
3523     // If the current iterator has more objects we are fine.
3524     return true;
3525   } else {
3526     // Go though the spaces looking for one that has objects.
3527     while (space_iterator_->has_next()) {
3528       object_iterator_ = space_iterator_->next();
3529       if (object_iterator_->has_next_object()) {
3530         return true;
3531       }
3532     }
3533   }
3534   // Done with the last space.
3535   object_iterator_ = NULL;
3536   return false;
3537 }
3538 
3539 
next()3540 HeapObject* HeapIterator::next() {
3541   if (has_next()) {
3542     return object_iterator_->next_object();
3543   } else {
3544     return NULL;
3545   }
3546 }
3547 
3548 
reset()3549 void HeapIterator::reset() {
3550   // Restart the iterator.
3551   Shutdown();
3552   Init();
3553 }
3554 
3555 
3556 #ifdef ENABLE_LOGGING_AND_PROFILING
3557 namespace {
3558 
3559 // JSConstructorProfile is responsible for gathering and logging
3560 // "constructor profile" of JS object allocated on heap.
3561 // It is run during garbage collection cycle, thus it doesn't need
3562 // to use handles.
3563 class JSConstructorProfile BASE_EMBEDDED {
3564  public:
JSConstructorProfile()3565   JSConstructorProfile() : zscope_(DELETE_ON_EXIT) {}
3566   void CollectStats(JSObject* obj);
3567   void PrintStats();
3568   // Used by ZoneSplayTree::ForEach.
3569   void Call(String* name, const NumberAndSizeInfo& number_and_size);
3570  private:
3571   struct TreeConfig {
3572     typedef String* Key;
3573     typedef NumberAndSizeInfo Value;
3574     static const Key kNoKey;
3575     static const Value kNoValue;
3576     // Strings are unique, so it is sufficient to compare their pointers.
Comparev8::internal::__anon0d2d38850111::BASE_EMBEDDED::TreeConfig3577     static int Compare(const Key& a, const Key& b) {
3578       return a == b ? 0 : (a < b ? -1 : 1);
3579     }
3580   };
3581 
3582   typedef ZoneSplayTree<TreeConfig> JSObjectsInfoTree;
3583   static int CalculateJSObjectNetworkSize(JSObject* obj);
3584 
3585   ZoneScope zscope_;
3586   JSObjectsInfoTree js_objects_info_tree_;
3587 };
3588 
3589 const JSConstructorProfile::TreeConfig::Key
3590     JSConstructorProfile::TreeConfig::kNoKey = NULL;
3591 const JSConstructorProfile::TreeConfig::Value
3592     JSConstructorProfile::TreeConfig::kNoValue;
3593 
3594 
CalculateJSObjectNetworkSize(JSObject * obj)3595 int JSConstructorProfile::CalculateJSObjectNetworkSize(JSObject* obj) {
3596   int size = obj->Size();
3597   // If 'properties' and 'elements' are non-empty (thus, non-shared),
3598   // take their size into account.
3599   if (FixedArray::cast(obj->properties())->length() != 0) {
3600     size += obj->properties()->Size();
3601   }
3602   if (FixedArray::cast(obj->elements())->length() != 0) {
3603     size += obj->elements()->Size();
3604   }
3605   return size;
3606 }
3607 
3608 
Call(String * name,const NumberAndSizeInfo & number_and_size)3609 void JSConstructorProfile::Call(String* name,
3610                                 const NumberAndSizeInfo& number_and_size) {
3611   SmartPointer<char> s_name;
3612   if (name != NULL) {
3613     s_name = name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
3614   }
3615   LOG(HeapSampleJSConstructorEvent(*s_name,
3616                                    number_and_size.number(),
3617                                    number_and_size.bytes()));
3618 }
3619 
3620 
CollectStats(JSObject * obj)3621 void JSConstructorProfile::CollectStats(JSObject* obj) {
3622   String* constructor_func = NULL;
3623   if (obj->map()->constructor()->IsJSFunction()) {
3624     JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
3625     SharedFunctionInfo* sfi = constructor->shared();
3626     String* name = String::cast(sfi->name());
3627     constructor_func = name->length() > 0 ? name : sfi->inferred_name();
3628   } else if (obj->IsJSFunction()) {
3629     constructor_func = Heap::function_class_symbol();
3630   }
3631   JSObjectsInfoTree::Locator loc;
3632   if (!js_objects_info_tree_.Find(constructor_func, &loc)) {
3633     js_objects_info_tree_.Insert(constructor_func, &loc);
3634   }
3635   NumberAndSizeInfo number_and_size = loc.value();
3636   number_and_size.increment_number(1);
3637   number_and_size.increment_bytes(CalculateJSObjectNetworkSize(obj));
3638   loc.set_value(number_and_size);
3639 }
3640 
3641 
PrintStats()3642 void JSConstructorProfile::PrintStats() {
3643   js_objects_info_tree_.ForEach(this);
3644 }
3645 
3646 }  // namespace
3647 #endif
3648 
3649 
3650 //
3651 // HeapProfiler class implementation.
3652 //
3653 #ifdef ENABLE_LOGGING_AND_PROFILING
CollectStats(HeapObject * obj,HistogramInfo * info)3654 void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
3655   InstanceType type = obj->map()->instance_type();
3656   ASSERT(0 <= type && type <= LAST_TYPE);
3657   info[type].increment_number(1);
3658   info[type].increment_bytes(obj->Size());
3659 }
3660 #endif
3661 
3662 
3663 #ifdef ENABLE_LOGGING_AND_PROFILING
WriteSample()3664 void HeapProfiler::WriteSample() {
3665   LOG(HeapSampleBeginEvent("Heap", "allocated"));
3666   LOG(HeapSampleStats(
3667       "Heap", "allocated", Heap::Capacity(), Heap::SizeOfObjects()));
3668 
3669   HistogramInfo info[LAST_TYPE+1];
3670 #define DEF_TYPE_NAME(name) info[name].set_name(#name);
3671   INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
3672 #undef DEF_TYPE_NAME
3673 
3674   JSConstructorProfile js_cons_profile;
3675   HeapIterator iterator;
3676   while (iterator.has_next()) {
3677     HeapObject* obj = iterator.next();
3678     CollectStats(obj, info);
3679     if (obj->IsJSObject()) {
3680       js_cons_profile.CollectStats(JSObject::cast(obj));
3681     }
3682   }
3683 
3684   // Lump all the string types together.
3685   int string_number = 0;
3686   int string_bytes = 0;
3687 #define INCREMENT_SIZE(type, size, name, camel_name)   \
3688     string_number += info[type].number();              \
3689     string_bytes += info[type].bytes();
3690   STRING_TYPE_LIST(INCREMENT_SIZE)
3691 #undef INCREMENT_SIZE
3692   if (string_bytes > 0) {
3693     LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
3694   }
3695 
3696   for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
3697     if (info[i].bytes() > 0) {
3698       LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
3699                               info[i].bytes()));
3700     }
3701   }
3702 
3703   js_cons_profile.PrintStats();
3704 
3705   LOG(HeapSampleEndEvent("Heap", "allocated"));
3706 }
3707 
3708 
3709 #endif
3710 
3711 
3712 
3713 #ifdef DEBUG
3714 
3715 static bool search_for_any_global;
3716 static Object* search_target;
3717 static bool found_target;
3718 static List<Object*> object_stack(20);
3719 
3720 
3721 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
3722 static const int kMarkTag = 2;
3723 
3724 static void MarkObjectRecursively(Object** p);
3725 class MarkObjectVisitor : public ObjectVisitor {
3726  public:
VisitPointers(Object ** start,Object ** end)3727   void VisitPointers(Object** start, Object** end) {
3728     // Copy all HeapObject pointers in [start, end)
3729     for (Object** p = start; p < end; p++) {
3730       if ((*p)->IsHeapObject())
3731         MarkObjectRecursively(p);
3732     }
3733   }
3734 };
3735 
3736 static MarkObjectVisitor mark_visitor;
3737 
MarkObjectRecursively(Object ** p)3738 static void MarkObjectRecursively(Object** p) {
3739   if (!(*p)->IsHeapObject()) return;
3740 
3741   HeapObject* obj = HeapObject::cast(*p);
3742 
3743   Object* map = obj->map();
3744 
3745   if (!map->IsHeapObject()) return;  // visited before
3746 
3747   if (found_target) return;  // stop if target found
3748   object_stack.Add(obj);
3749   if ((search_for_any_global && obj->IsJSGlobalObject()) ||
3750       (!search_for_any_global && (obj == search_target))) {
3751     found_target = true;
3752     return;
3753   }
3754 
3755   if (obj->IsCode()) {
3756     Code::cast(obj)->ConvertICTargetsFromAddressToObject();
3757   }
3758 
3759   // not visited yet
3760   Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
3761 
3762   Address map_addr = map_p->address();
3763 
3764   obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
3765 
3766   MarkObjectRecursively(&map);
3767 
3768   obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
3769                    &mark_visitor);
3770 
3771   if (!found_target)  // don't pop if found the target
3772     object_stack.RemoveLast();
3773 }
3774 
3775 
3776 static void UnmarkObjectRecursively(Object** p);
3777 class UnmarkObjectVisitor : public ObjectVisitor {
3778  public:
VisitPointers(Object ** start,Object ** end)3779   void VisitPointers(Object** start, Object** end) {
3780     // Copy all HeapObject pointers in [start, end)
3781     for (Object** p = start; p < end; p++) {
3782       if ((*p)->IsHeapObject())
3783         UnmarkObjectRecursively(p);
3784     }
3785   }
3786 };
3787 
3788 static UnmarkObjectVisitor unmark_visitor;
3789 
UnmarkObjectRecursively(Object ** p)3790 static void UnmarkObjectRecursively(Object** p) {
3791   if (!(*p)->IsHeapObject()) return;
3792 
3793   HeapObject* obj = HeapObject::cast(*p);
3794 
3795   Object* map = obj->map();
3796 
3797   if (map->IsHeapObject()) return;  // unmarked already
3798 
3799   Address map_addr = reinterpret_cast<Address>(map);
3800 
3801   map_addr -= kMarkTag;
3802 
3803   ASSERT_TAG_ALIGNED(map_addr);
3804 
3805   HeapObject* map_p = HeapObject::FromAddress(map_addr);
3806 
3807   obj->set_map(reinterpret_cast<Map*>(map_p));
3808 
3809   UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
3810 
3811   obj->IterateBody(Map::cast(map_p)->instance_type(),
3812                    obj->SizeFromMap(Map::cast(map_p)),
3813                    &unmark_visitor);
3814 
3815   if (obj->IsCode()) {
3816     Code::cast(obj)->ConvertICTargetsFromObjectToAddress();
3817   }
3818 }
3819 
3820 
MarkRootObjectRecursively(Object ** root)3821 static void MarkRootObjectRecursively(Object** root) {
3822   if (search_for_any_global) {
3823     ASSERT(search_target == NULL);
3824   } else {
3825     ASSERT(search_target->IsHeapObject());
3826   }
3827   found_target = false;
3828   object_stack.Clear();
3829 
3830   MarkObjectRecursively(root);
3831   UnmarkObjectRecursively(root);
3832 
3833   if (found_target) {
3834     PrintF("=====================================\n");
3835     PrintF("====        Path to object       ====\n");
3836     PrintF("=====================================\n\n");
3837 
3838     ASSERT(!object_stack.is_empty());
3839     for (int i = 0; i < object_stack.length(); i++) {
3840       if (i > 0) PrintF("\n     |\n     |\n     V\n\n");
3841       Object* obj = object_stack[i];
3842       obj->Print();
3843     }
3844     PrintF("=====================================\n");
3845   }
3846 }
3847 
3848 
3849 // Helper class for visiting HeapObjects recursively.
3850 class MarkRootVisitor: public ObjectVisitor {
3851  public:
VisitPointers(Object ** start,Object ** end)3852   void VisitPointers(Object** start, Object** end) {
3853     // Visit all HeapObject pointers in [start, end)
3854     for (Object** p = start; p < end; p++) {
3855       if ((*p)->IsHeapObject())
3856         MarkRootObjectRecursively(p);
3857     }
3858   }
3859 };
3860 
3861 
3862 // Triggers a depth-first traversal of reachable objects from roots
3863 // and finds a path to a specific heap object and prints it.
TracePathToObject()3864 void Heap::TracePathToObject() {
3865   search_target = NULL;
3866   search_for_any_global = false;
3867 
3868   MarkRootVisitor root_visitor;
3869   IterateRoots(&root_visitor);
3870 }
3871 
3872 
3873 // Triggers a depth-first traversal of reachable objects from roots
3874 // and finds a path to any global object and prints it. Useful for
3875 // determining the source for leaks of global objects.
TracePathToGlobal()3876 void Heap::TracePathToGlobal() {
3877   search_target = NULL;
3878   search_for_any_global = true;
3879 
3880   MarkRootVisitor root_visitor;
3881   IterateRoots(&root_visitor);
3882 }
3883 #endif
3884 
3885 
GCTracer()3886 GCTracer::GCTracer()
3887     : start_time_(0.0),
3888       start_size_(0.0),
3889       gc_count_(0),
3890       full_gc_count_(0),
3891       is_compacting_(false),
3892       marked_count_(0) {
3893   // These two fields reflect the state of the previous full collection.
3894   // Set them before they are changed by the collector.
3895   previous_has_compacted_ = MarkCompactCollector::HasCompacted();
3896   previous_marked_count_ = MarkCompactCollector::previous_marked_count();
3897   if (!FLAG_trace_gc) return;
3898   start_time_ = OS::TimeCurrentMillis();
3899   start_size_ = SizeOfHeapObjects();
3900 }
3901 
3902 
~GCTracer()3903 GCTracer::~GCTracer() {
3904   if (!FLAG_trace_gc) return;
3905   // Printf ONE line iff flag is set.
3906   PrintF("%s %.1f -> %.1f MB, %d ms.\n",
3907          CollectorString(),
3908          start_size_, SizeOfHeapObjects(),
3909          static_cast<int>(OS::TimeCurrentMillis() - start_time_));
3910 
3911 #if defined(ENABLE_LOGGING_AND_PROFILING)
3912   Heap::PrintShortHeapStatistics();
3913 #endif
3914 }
3915 
3916 
CollectorString()3917 const char* GCTracer::CollectorString() {
3918   switch (collector_) {
3919     case SCAVENGER:
3920       return "Scavenge";
3921     case MARK_COMPACTOR:
3922       return MarkCompactCollector::HasCompacted() ? "Mark-compact"
3923                                                   : "Mark-sweep";
3924   }
3925   return "Unknown GC";
3926 }
3927 
3928 
Hash(Map * map,String * name)3929 int KeyedLookupCache::Hash(Map* map, String* name) {
3930   // Uses only lower 32 bits if pointers are larger.
3931   uintptr_t addr_hash =
3932       static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> 2;
3933   return (addr_hash ^ name->Hash()) % kLength;
3934 }
3935 
3936 
Lookup(Map * map,String * name)3937 int KeyedLookupCache::Lookup(Map* map, String* name) {
3938   int index = Hash(map, name);
3939   Key& key = keys_[index];
3940   if ((key.map == map) && key.name->Equals(name)) {
3941     return field_offsets_[index];
3942   }
3943   return -1;
3944 }
3945 
3946 
Update(Map * map,String * name,int field_offset)3947 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
3948   String* symbol;
3949   if (Heap::LookupSymbolIfExists(name, &symbol)) {
3950     int index = Hash(map, symbol);
3951     Key& key = keys_[index];
3952     key.map = map;
3953     key.name = symbol;
3954     field_offsets_[index] = field_offset;
3955   }
3956 }
3957 
3958 
Clear()3959 void KeyedLookupCache::Clear() {
3960   for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
3961 }
3962 
3963 
3964 KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
3965 
3966 
3967 int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
3968 
3969 
Clear()3970 void DescriptorLookupCache::Clear() {
3971   for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
3972 }
3973 
3974 
3975 DescriptorLookupCache::Key
3976 DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
3977 
3978 int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
3979 
3980 
3981 #ifdef DEBUG
GarbageCollectionGreedyCheck()3982 bool Heap::GarbageCollectionGreedyCheck() {
3983   ASSERT(FLAG_gc_greedy);
3984   if (Bootstrapper::IsActive()) return true;
3985   if (disallow_allocation_failure()) return true;
3986   return CollectGarbage(0, NEW_SPACE);
3987 }
3988 #endif
3989 
3990 
TranscendentalCache(TranscendentalCache::Type t)3991 TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
3992   : type_(t) {
3993   uint32_t in0 = 0xffffffffu;  // Bit-pattern for a NaN that isn't
3994   uint32_t in1 = 0xffffffffu;  // generated by the FPU.
3995   for (int i = 0; i < kCacheSize; i++) {
3996     elements_[i].in[0] = in0;
3997     elements_[i].in[1] = in1;
3998     elements_[i].output = NULL;
3999   }
4000 }
4001 
4002 
4003 TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
4004 
4005 
Clear()4006 void TranscendentalCache::Clear() {
4007   for (int i = 0; i < kNumberOfCaches; i++) {
4008     if (caches_[i] != NULL) {
4009       delete caches_[i];
4010       caches_[i] = NULL;
4011     }
4012   }
4013 }
4014 
4015 
4016 } }  // namespace v8::internal
4017