• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_
30 
31 #include <math.h>
32 
33 #include "allocation.h"
34 #include "globals.h"
35 #include "incremental-marking.h"
36 #include "list.h"
37 #include "mark-compact.h"
38 #include "objects-visiting.h"
39 #include "spaces.h"
40 #include "splay-tree-inl.h"
41 #include "store-buffer.h"
42 #include "v8-counters.h"
43 #include "v8globals.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 // Defines all the roots in Heap.
49 #define STRONG_ROOT_LIST(V)                                                    \
50   V(Map, byte_array_map, ByteArrayMap)                                         \
51   V(Map, free_space_map, FreeSpaceMap)                                         \
52   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
53   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
54   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
55   V(Smi, store_buffer_top, StoreBufferTop)                                     \
56   V(Oddball, undefined_value, UndefinedValue)                                  \
57   V(Oddball, the_hole_value, TheHoleValue)                                     \
58   V(Oddball, null_value, NullValue)                                            \
59   V(Oddball, true_value, TrueValue)                                            \
60   V(Oddball, false_value, FalseValue)                                          \
61   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
62   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
63   V(Map, meta_map, MetaMap)                                                    \
64   V(Map, ascii_symbol_map, AsciiSymbolMap)                                     \
65   V(Map, ascii_string_map, AsciiStringMap)                                     \
66   V(Map, heap_number_map, HeapNumberMap)                                       \
67   V(Map, global_context_map, GlobalContextMap)                                 \
68   V(Map, fixed_array_map, FixedArrayMap)                                       \
69   V(Map, code_map, CodeMap)                                                    \
70   V(Map, scope_info_map, ScopeInfoMap)                                         \
71   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
72   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
73   V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
74   V(Map, hash_table_map, HashTableMap)                                         \
75   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
76   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
77   V(String, empty_string, EmptyString)                                         \
78   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
79   V(Smi, stack_limit, StackLimit)                                              \
80   V(Oddball, arguments_marker, ArgumentsMarker)                                \
81   /* The first 32 roots above this line should be boring from a GC point of */ \
82   /* view.  This means they are never in new space and never on a page that */ \
83   /* is being compacted.                                                    */ \
84   V(FixedArray, number_string_cache, NumberStringCache)                        \
85   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
86   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
87   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
88   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
89   V(FixedArray, string_split_cache, StringSplitCache)                          \
90   V(Object, termination_exception, TerminationException)                       \
91   V(Smi, hash_seed, HashSeed)                                                  \
92   V(Map, string_map, StringMap)                                                \
93   V(Map, symbol_map, SymbolMap)                                                \
94   V(Map, cons_string_map, ConsStringMap)                                       \
95   V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
96   V(Map, sliced_string_map, SlicedStringMap)                                   \
97   V(Map, sliced_ascii_string_map, SlicedAsciiStringMap)                        \
98   V(Map, cons_symbol_map, ConsSymbolMap)                                       \
99   V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap)                            \
100   V(Map, external_symbol_map, ExternalSymbolMap)                               \
101   V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap)  \
102   V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap)                    \
103   V(Map, external_string_map, ExternalStringMap)                               \
104   V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap)  \
105   V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
106   V(Map, short_external_symbol_map, ShortExternalSymbolMap)                    \
107   V(Map,                                                                       \
108     short_external_symbol_with_ascii_data_map,                                 \
109     ShortExternalSymbolWithAsciiDataMap)                                       \
110   V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap)         \
111   V(Map, short_external_string_map, ShortExternalStringMap)                    \
112   V(Map,                                                                       \
113     short_external_string_with_ascii_data_map,                                 \
114     ShortExternalStringWithAsciiDataMap)                                       \
115   V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap)         \
116   V(Map, undetectable_string_map, UndetectableStringMap)                       \
117   V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
118   V(Map, external_pixel_array_map, ExternalPixelArrayMap)                      \
119   V(Map, external_byte_array_map, ExternalByteArrayMap)                        \
120   V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap)       \
121   V(Map, external_short_array_map, ExternalShortArrayMap)                      \
122   V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap)     \
123   V(Map, external_int_array_map, ExternalIntArrayMap)                          \
124   V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap)         \
125   V(Map, external_float_array_map, ExternalFloatArrayMap)                      \
126   V(Map, external_double_array_map, ExternalDoubleArrayMap)                    \
127   V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap)     \
128   V(Map, function_context_map, FunctionContextMap)                             \
129   V(Map, catch_context_map, CatchContextMap)                                   \
130   V(Map, with_context_map, WithContextMap)                                     \
131   V(Map, block_context_map, BlockContextMap)                                   \
132   V(Map, module_context_map, ModuleContextMap)                                 \
133   V(Map, oddball_map, OddballMap)                                              \
134   V(Map, message_object_map, JSMessageObjectMap)                               \
135   V(Map, foreign_map, ForeignMap)                                              \
136   V(HeapNumber, nan_value, NanValue)                                           \
137   V(HeapNumber, infinity_value, InfinityValue)                                 \
138   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
139   V(Map, neander_map, NeanderMap)                                              \
140   V(JSObject, message_listeners, MessageListeners)                             \
141   V(Foreign, prototype_accessors, PrototypeAccessors)                          \
142   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
143   V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
144   V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
145   V(Code, js_entry_code, JsEntryCode)                                          \
146   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
147   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
148   V(Object, last_script_id, LastScriptId)                                      \
149   V(Script, empty_script, EmptyScript)                                         \
150   V(Smi, real_stack_limit, RealStackLimit)                                     \
151   V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames)        \
152   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
153   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
154 
155 #define ROOT_LIST(V)                                  \
156   STRONG_ROOT_LIST(V)                                 \
157   V(SymbolTable, symbol_table, SymbolTable)
158 
159 #define SYMBOL_LIST(V)                                                   \
160   V(Array_symbol, "Array")                                               \
161   V(Object_symbol, "Object")                                             \
162   V(Proto_symbol, "__proto__")                                           \
163   V(StringImpl_symbol, "StringImpl")                                     \
164   V(arguments_symbol, "arguments")                                       \
165   V(Arguments_symbol, "Arguments")                                       \
166   V(call_symbol, "call")                                                 \
167   V(apply_symbol, "apply")                                               \
168   V(caller_symbol, "caller")                                             \
169   V(boolean_symbol, "boolean")                                           \
170   V(Boolean_symbol, "Boolean")                                           \
171   V(callee_symbol, "callee")                                             \
172   V(constructor_symbol, "constructor")                                   \
173   V(code_symbol, ".code")                                                \
174   V(result_symbol, ".result")                                            \
175   V(catch_var_symbol, ".catch-var")                                      \
176   V(empty_symbol, "")                                                    \
177   V(eval_symbol, "eval")                                                 \
178   V(function_symbol, "function")                                         \
179   V(length_symbol, "length")                                             \
180   V(module_symbol, "module")                                             \
181   V(name_symbol, "name")                                                 \
182   V(native_symbol, "native")                                             \
183   V(null_symbol, "null")                                                 \
184   V(number_symbol, "number")                                             \
185   V(Number_symbol, "Number")                                             \
186   V(nan_symbol, "NaN")                                                   \
187   V(RegExp_symbol, "RegExp")                                             \
188   V(source_symbol, "source")                                             \
189   V(global_symbol, "global")                                             \
190   V(ignore_case_symbol, "ignoreCase")                                    \
191   V(multiline_symbol, "multiline")                                       \
192   V(input_symbol, "input")                                               \
193   V(index_symbol, "index")                                               \
194   V(last_index_symbol, "lastIndex")                                      \
195   V(object_symbol, "object")                                             \
196   V(prototype_symbol, "prototype")                                       \
197   V(string_symbol, "string")                                             \
198   V(String_symbol, "String")                                             \
199   V(Date_symbol, "Date")                                                 \
200   V(this_symbol, "this")                                                 \
201   V(to_string_symbol, "toString")                                        \
202   V(char_at_symbol, "CharAt")                                            \
203   V(undefined_symbol, "undefined")                                       \
204   V(value_of_symbol, "valueOf")                                          \
205   V(InitializeVarGlobal_symbol, "InitializeVarGlobal")                   \
206   V(InitializeConstGlobal_symbol, "InitializeConstGlobal")               \
207   V(KeyedLoadElementMonomorphic_symbol,                                  \
208     "KeyedLoadElementMonomorphic")                                       \
209   V(KeyedStoreElementMonomorphic_symbol,                                 \
210     "KeyedStoreElementMonomorphic")                                      \
211   V(KeyedStoreAndGrowElementMonomorphic_symbol,                          \
212     "KeyedStoreAndGrowElementMonomorphic")                               \
213   V(stack_overflow_symbol, "kStackOverflowBoilerplate")                  \
214   V(illegal_access_symbol, "illegal access")                             \
215   V(out_of_memory_symbol, "out-of-memory")                               \
216   V(illegal_execution_state_symbol, "illegal execution state")           \
217   V(get_symbol, "get")                                                   \
218   V(set_symbol, "set")                                                   \
219   V(function_class_symbol, "Function")                                   \
220   V(illegal_argument_symbol, "illegal argument")                         \
221   V(MakeReferenceError_symbol, "MakeReferenceError")                     \
222   V(MakeSyntaxError_symbol, "MakeSyntaxError")                           \
223   V(MakeTypeError_symbol, "MakeTypeError")                               \
224   V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment")       \
225   V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in")               \
226   V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op")       \
227   V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op")         \
228   V(illegal_return_symbol, "illegal_return")                             \
229   V(illegal_break_symbol, "illegal_break")                               \
230   V(illegal_continue_symbol, "illegal_continue")                         \
231   V(unknown_label_symbol, "unknown_label")                               \
232   V(redeclaration_symbol, "redeclaration")                               \
233   V(failure_symbol, "<failure>")                                         \
234   V(space_symbol, " ")                                                   \
235   V(exec_symbol, "exec")                                                 \
236   V(zero_symbol, "0")                                                    \
237   V(global_eval_symbol, "GlobalEval")                                    \
238   V(identity_hash_symbol, "v8::IdentityHash")                            \
239   V(closure_symbol, "(closure)")                                         \
240   V(use_strict, "use strict")                                            \
241   V(dot_symbol, ".")                                                     \
242   V(anonymous_function_symbol, "(anonymous function)")                   \
243   V(compare_ic_symbol, ".compare_ic")                                    \
244   V(infinity_symbol, "Infinity")                                         \
245   V(minus_infinity_symbol, "-Infinity")                                  \
246   V(hidden_stack_trace_symbol, "v8::hidden_stack_trace")
247 
248 // Forward declarations.
249 class GCTracer;
250 class HeapStats;
251 class Isolate;
252 class WeakObjectRetainer;
253 
254 
255 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
256                                                       Object** pointer);
257 
258 class StoreBufferRebuilder {
259  public:
StoreBufferRebuilder(StoreBuffer * store_buffer)260   explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
261       : store_buffer_(store_buffer) {
262   }
263 
264   void Callback(MemoryChunk* page, StoreBufferEvent event);
265 
266  private:
267   StoreBuffer* store_buffer_;
268 
269   // We record in this variable how full the store buffer was when we started
270   // iterating over the current page, finding pointers to new space.  If the
271   // store buffer overflows again we can exempt the page from the store buffer
272   // by rewinding to this point instead of having to search the store buffer.
273   Object*** start_of_current_page_;
274   // The current page we are scanning in the store buffer iterator.
275   MemoryChunk* current_page_;
276 };
277 
278 
279 
280 // The all static Heap captures the interface to the global object heap.
281 // All JavaScript contexts by this process share the same object heap.
282 
283 #ifdef DEBUG
284 class HeapDebugUtils;
285 #endif
286 
287 
288 // A queue of objects promoted during scavenge. Each object is accompanied
289 // by it's size to avoid dereferencing a map pointer for scanning.
290 class PromotionQueue {
291  public:
PromotionQueue(Heap * heap)292   explicit PromotionQueue(Heap* heap)
293       : front_(NULL),
294         rear_(NULL),
295         limit_(NULL),
296         emergency_stack_(0),
297         heap_(heap) { }
298 
299   void Initialize();
300 
Destroy()301   void Destroy() {
302     ASSERT(is_empty());
303     delete emergency_stack_;
304     emergency_stack_ = NULL;
305   }
306 
307   inline void ActivateGuardIfOnTheSamePage();
308 
GetHeadPage()309   Page* GetHeadPage() {
310     return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
311   }
312 
SetNewLimit(Address limit)313   void SetNewLimit(Address limit) {
314     if (!guard_) {
315       return;
316     }
317 
318     ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
319     limit_ = reinterpret_cast<intptr_t*>(limit);
320 
321     if (limit_ <= rear_) {
322       return;
323     }
324 
325     RelocateQueueHead();
326   }
327 
is_empty()328   bool is_empty() {
329     return (front_ == rear_) &&
330         (emergency_stack_ == NULL || emergency_stack_->length() == 0);
331   }
332 
333   inline void insert(HeapObject* target, int size);
334 
remove(HeapObject ** target,int * size)335   void remove(HeapObject** target, int* size) {
336     ASSERT(!is_empty());
337     if (front_ == rear_) {
338       Entry e = emergency_stack_->RemoveLast();
339       *target = e.obj_;
340       *size = e.size_;
341       return;
342     }
343 
344     if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
345       NewSpacePage* front_page =
346           NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
347       ASSERT(!front_page->prev_page()->is_anchor());
348       front_ =
349           reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
350     }
351     *target = reinterpret_cast<HeapObject*>(*(--front_));
352     *size = static_cast<int>(*(--front_));
353     // Assert no underflow.
354     SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
355                                 reinterpret_cast<Address>(front_));
356   }
357 
358  private:
359   // The front of the queue is higher in the memory page chain than the rear.
360   intptr_t* front_;
361   intptr_t* rear_;
362   intptr_t* limit_;
363 
364   bool guard_;
365 
366   static const int kEntrySizeInWords = 2;
367 
368   struct Entry {
EntryEntry369     Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
370 
371     HeapObject* obj_;
372     int size_;
373   };
374   List<Entry>* emergency_stack_;
375 
376   Heap* heap_;
377 
378   void RelocateQueueHead();
379 
380   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
381 };
382 
383 
384 typedef void (*ScavengingCallback)(Map* map,
385                                    HeapObject** slot,
386                                    HeapObject* object);
387 
388 
389 // External strings table is a place where all external strings are
390 // registered.  We need to keep track of such strings to properly
391 // finalize them.
392 class ExternalStringTable {
393  public:
394   // Registers an external string.
395   inline void AddString(String* string);
396 
397   inline void Iterate(ObjectVisitor* v);
398 
399   // Restores internal invariant and gets rid of collected strings.
400   // Must be called after each Iterate() that modified the strings.
401   void CleanUp();
402 
403   // Destroys all allocated memory.
404   void TearDown();
405 
406  private:
ExternalStringTable()407   ExternalStringTable() { }
408 
409   friend class Heap;
410 
411   inline void Verify();
412 
413   inline void AddOldString(String* string);
414 
415   // Notifies the table that only a prefix of the new list is valid.
416   inline void ShrinkNewStrings(int position);
417 
418   // To speed up scavenge collections new space string are kept
419   // separate from old space strings.
420   List<Object*> new_space_strings_;
421   List<Object*> old_space_strings_;
422 
423   Heap* heap_;
424 
425   DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
426 };
427 
428 
429 enum ArrayStorageAllocationMode {
430   DONT_INITIALIZE_ARRAY_ELEMENTS,
431   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
432 };
433 
434 class Heap {
435  public:
436   // Configure heap size before setup. Return false if the heap has been
437   // set up already.
438   bool ConfigureHeap(int max_semispace_size,
439                      intptr_t max_old_gen_size,
440                      intptr_t max_executable_size);
441   bool ConfigureHeapDefault();
442 
443   // Initializes the global object heap. If create_heap_objects is true,
444   // also creates the basic non-mutable objects.
445   // Returns whether it succeeded.
446   bool SetUp(bool create_heap_objects);
447 
448   // Destroys all memory allocated by the heap.
449   void TearDown();
450 
451   // Set the stack limit in the roots_ array.  Some architectures generate
452   // code that looks here, because it is faster than loading from the static
453   // jslimit_/real_jslimit_ variable in the StackGuard.
454   void SetStackLimits();
455 
456   // Returns whether SetUp has been called.
457   bool HasBeenSetUp();
458 
459   // Returns the maximum amount of memory reserved for the heap.  For
460   // the young generation, we reserve 4 times the amount needed for a
461   // semi space.  The young generation consists of two semi spaces and
462   // we reserve twice the amount needed for those in order to ensure
463   // that new space can be aligned to its size.
MaxReserved()464   intptr_t MaxReserved() {
465     return 4 * reserved_semispace_size_ + max_old_generation_size_;
466   }
MaxSemiSpaceSize()467   int MaxSemiSpaceSize() { return max_semispace_size_; }
ReservedSemiSpaceSize()468   int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
InitialSemiSpaceSize()469   int InitialSemiSpaceSize() { return initial_semispace_size_; }
MaxOldGenerationSize()470   intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
MaxExecutableSize()471   intptr_t MaxExecutableSize() { return max_executable_size_; }
472 
473   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
474   // more spaces are needed until it reaches the limit.
475   intptr_t Capacity();
476 
477   // Returns the amount of memory currently committed for the heap.
478   intptr_t CommittedMemory();
479 
480   // Returns the amount of executable memory currently committed for the heap.
481   intptr_t CommittedMemoryExecutable();
482 
483   // Returns the available bytes in space w/o growing.
484   // Heap doesn't guarantee that it can allocate an object that requires
485   // all available bytes. Check MaxHeapObjectSize() instead.
486   intptr_t Available();
487 
488   // Returns of size of all objects residing in the heap.
489   intptr_t SizeOfObjects();
490 
491   // Return the starting address and a mask for the new space.  And-masking an
492   // address with the mask will result in the start address of the new space
493   // for all addresses in either semispace.
NewSpaceStart()494   Address NewSpaceStart() { return new_space_.start(); }
NewSpaceMask()495   uintptr_t NewSpaceMask() { return new_space_.mask(); }
NewSpaceTop()496   Address NewSpaceTop() { return new_space_.top(); }
497 
new_space()498   NewSpace* new_space() { return &new_space_; }
old_pointer_space()499   OldSpace* old_pointer_space() { return old_pointer_space_; }
old_data_space()500   OldSpace* old_data_space() { return old_data_space_; }
code_space()501   OldSpace* code_space() { return code_space_; }
map_space()502   MapSpace* map_space() { return map_space_; }
cell_space()503   CellSpace* cell_space() { return cell_space_; }
lo_space()504   LargeObjectSpace* lo_space() { return lo_space_; }
505 
always_allocate()506   bool always_allocate() { return always_allocate_scope_depth_ != 0; }
always_allocate_scope_depth_address()507   Address always_allocate_scope_depth_address() {
508     return reinterpret_cast<Address>(&always_allocate_scope_depth_);
509   }
linear_allocation()510   bool linear_allocation() {
511     return linear_allocation_scope_depth_ != 0;
512   }
513 
NewSpaceAllocationTopAddress()514   Address* NewSpaceAllocationTopAddress() {
515     return new_space_.allocation_top_address();
516   }
NewSpaceAllocationLimitAddress()517   Address* NewSpaceAllocationLimitAddress() {
518     return new_space_.allocation_limit_address();
519   }
520 
521   // Uncommit unused semi space.
UncommitFromSpace()522   bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
523 
524   // Allocates and initializes a new JavaScript object based on a
525   // constructor.
526   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
527   // failed.
528   // Please note this does not perform a garbage collection.
529   MUST_USE_RESULT MaybeObject* AllocateJSObject(
530       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
531 
532   // Allocate a JSArray with no elements
533   MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
534       ElementsKind elements_kind,
535       PretenureFlag pretenure = NOT_TENURED) {
536     return AllocateJSArrayAndStorage(elements_kind, 0, 0,
537                                      DONT_INITIALIZE_ARRAY_ELEMENTS,
538                                      pretenure);
539   }
540 
541   // Allocate a JSArray with a specified length but elements that are left
542   // uninitialized.
543   MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage(
544       ElementsKind elements_kind,
545       int length,
546       int capacity,
547       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
548       PretenureFlag pretenure = NOT_TENURED);
549 
550   // Allocate a JSArray with no elements
551   MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements(
552       FixedArrayBase* array_base,
553       ElementsKind elements_kind,
554       PretenureFlag pretenure = NOT_TENURED);
555 
556   // Allocates and initializes a new global object based on a constructor.
557   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
558   // failed.
559   // Please note this does not perform a garbage collection.
560   MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
561 
562   // Returns a deep copy of the JavaScript object.
563   // Properties and elements are copied too.
564   // Returns failure if allocation failed.
565   MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
566 
567   // Allocates the function prototype.
568   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
569   // failed.
570   // Please note this does not perform a garbage collection.
571   MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
572 
573   // Allocates a Harmony proxy or function proxy.
574   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
575   // failed.
576   // Please note this does not perform a garbage collection.
577   MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
578                                                Object* prototype);
579 
580   MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
581                                                        Object* call_trap,
582                                                        Object* construct_trap,
583                                                        Object* prototype);
584 
585   // Reinitialize a JSReceiver into an (empty) JS object of respective type and
586   // size, but keeping the original prototype.  The receiver must have at least
587   // the size of the new object.  The object is reinitialized and behaves as an
588   // object that has been freshly allocated.
589   // Returns failure if an error occured, otherwise object.
590   MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object,
591                                                       InstanceType type,
592                                                       int size);
593 
594   // Reinitialize an JSGlobalProxy based on a constructor.  The object
595   // must have the same size as objects allocated using the
596   // constructor.  The object is reinitialized and behaves as an
597   // object that has been freshly allocated using the constructor.
598   MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
599       JSFunction* constructor, JSGlobalProxy* global);
600 
601   // Allocates and initializes a new JavaScript object based on a map.
602   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
603   // failed.
604   // Please note this does not perform a garbage collection.
605   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
606       Map* map, PretenureFlag pretenure = NOT_TENURED);
607 
608   // Allocates a heap object based on the map.
609   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
610   // failed.
611   // Please note this function does not perform a garbage collection.
612   MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
613 
614   // Allocates a JS Map in the heap.
615   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
616   // failed.
617   // Please note this function does not perform a garbage collection.
618   MUST_USE_RESULT MaybeObject* AllocateMap(
619       InstanceType instance_type,
620       int instance_size,
621       ElementsKind elements_kind = FAST_ELEMENTS);
622 
623   // Allocates a partial map for bootstrapping.
624   MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
625                                                   int instance_size);
626 
627   // Allocate a map for the specified function
628   MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
629 
630   // Allocates an empty code cache.
631   MUST_USE_RESULT MaybeObject* AllocateCodeCache();
632 
633   // Allocates a serialized scope info.
634   MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
635 
636   // Allocates an empty PolymorphicCodeCache.
637   MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
638 
639   // Allocates a pre-tenured empty AccessorPair.
640   MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
641 
642   // Allocates an empty TypeFeedbackInfo.
643   MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo();
644 
645   // Allocates an AliasedArgumentsEntry.
646   MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
647 
648   // Clear the Instanceof cache (used when a prototype changes).
649   inline void ClearInstanceofCache();
650 
651   // Allocates and fully initializes a String.  There are two String
652   // encodings: ASCII and two byte. One should choose between the three string
653   // allocation functions based on the encoding of the string buffer used to
654   // initialized the string.
655   //   - ...FromAscii initializes the string from a buffer that is ASCII
656   //     encoded (it does not check that the buffer is ASCII encoded) and the
657   //     result will be ASCII encoded.
658   //   - ...FromUTF8 initializes the string from a buffer that is UTF-8
659   //     encoded.  If the characters are all single-byte characters, the
660   //     result will be ASCII encoded, otherwise it will converted to two
661   //     byte.
662   //   - ...FromTwoByte initializes the string from a buffer that is two-byte
663   //     encoded.  If the characters are all single-byte characters, the
664   //     result will be converted to ASCII, otherwise it will be left as
665   //     two-byte.
666   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
667   // failed.
668   // Please note this does not perform a garbage collection.
669   MUST_USE_RESULT MaybeObject* AllocateStringFromAscii(
670       Vector<const char> str,
671       PretenureFlag pretenure = NOT_TENURED);
672   MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
673       Vector<const char> str,
674       PretenureFlag pretenure = NOT_TENURED);
675   MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
676       Vector<const char> str,
677       PretenureFlag pretenure = NOT_TENURED);
678   MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
679       Vector<const uc16> str,
680       PretenureFlag pretenure = NOT_TENURED);
681 
682   // Allocates a symbol in old space based on the character stream.
683   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
684   // failed.
685   // Please note this function does not perform a garbage collection.
686   MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
687                                                      int chars,
688                                                      uint32_t hash_field);
689 
690   MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
691         Vector<const char> str,
692         uint32_t hash_field);
693 
694   MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
695         Vector<const uc16> str,
696         uint32_t hash_field);
697 
698   MUST_USE_RESULT MaybeObject* AllocateInternalSymbol(
699       unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
700 
701   MUST_USE_RESULT MaybeObject* AllocateExternalSymbol(
702       Vector<const char> str,
703       int chars);
704 
705   // Allocates and partially initializes a String.  There are two String
706   // encodings: ASCII and two byte.  These functions allocate a string of the
707   // given length and set its map and length fields.  The characters of the
708   // string are uninitialized.
709   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
710   // failed.
711   // Please note this does not perform a garbage collection.
712   MUST_USE_RESULT MaybeObject* AllocateRawAsciiString(
713       int length,
714       PretenureFlag pretenure = NOT_TENURED);
715   MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
716       int length,
717       PretenureFlag pretenure = NOT_TENURED);
718 
719   // Computes a single character string where the character has code.
720   // A cache is used for ASCII codes.
721   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
722   // failed. Please note this does not perform a garbage collection.
723   MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
724       uint16_t code);
725 
726   // Allocate a byte array of the specified length
727   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
728   // failed.
729   // Please note this does not perform a garbage collection.
730   MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
731                                                  PretenureFlag pretenure);
732 
733   // Allocate a non-tenured byte array of the specified length
734   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
735   // failed.
736   // Please note this does not perform a garbage collection.
737   MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
738 
739   // Allocates an external array of the specified length and type.
740   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
741   // failed.
742   // Please note this does not perform a garbage collection.
743   MUST_USE_RESULT MaybeObject* AllocateExternalArray(
744       int length,
745       ExternalArrayType array_type,
746       void* external_pointer,
747       PretenureFlag pretenure);
748 
749   // Allocate a tenured JS global property cell.
750   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
751   // failed.
752   // Please note this does not perform a garbage collection.
753   MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
754 
755   // Allocates a fixed array initialized with undefined values
756   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
757   // failed.
758   // Please note this does not perform a garbage collection.
759   MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
760                                                   PretenureFlag pretenure);
761   // Allocates a fixed array initialized with undefined values
762   MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
763 
764   // Allocates an uninitialized fixed array. It must be filled by the caller.
765   //
766   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
767   // failed.
768   // Please note this does not perform a garbage collection.
769   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
770 
771   // Make a copy of src and return it. Returns
772   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
773   MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
774 
775   // Make a copy of src, set the map, and return the copy. Returns
776   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
777   MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
778 
779   // Make a copy of src and return it. Returns
780   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
781   MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
782       FixedDoubleArray* src);
783 
784   // Make a copy of src, set the map, and return the copy. Returns
785   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
786   MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
787       FixedDoubleArray* src, Map* map);
788 
789   // Allocates a fixed array initialized with the hole values.
790   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
791   // failed.
792   // Please note this does not perform a garbage collection.
793   MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
794       int length,
795       PretenureFlag pretenure = NOT_TENURED);
796 
797   MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray(
798       int length,
799       PretenureFlag pretenure);
800 
801   // Allocates a fixed double array with uninitialized values. Returns
802   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
803   // Please note this does not perform a garbage collection.
804   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray(
805       int length,
806       PretenureFlag pretenure = NOT_TENURED);
807 
808   // Allocates a fixed double array with hole values. Returns
809   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
810   // Please note this does not perform a garbage collection.
811   MUST_USE_RESULT MaybeObject* AllocateFixedDoubleArrayWithHoles(
812       int length,
813       PretenureFlag pretenure = NOT_TENURED);
814 
815   // AllocateHashTable is identical to AllocateFixedArray except
816   // that the resulting object has hash_table_map as map.
817   MUST_USE_RESULT MaybeObject* AllocateHashTable(
818       int length, PretenureFlag pretenure = NOT_TENURED);
819 
820   // Allocate a global (but otherwise uninitialized) context.
821   MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
822 
823   // Allocate a function context.
824   MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
825                                                        JSFunction* function);
826 
827   // Allocate a catch context.
828   MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
829                                                     Context* previous,
830                                                     String* name,
831                                                     Object* thrown_object);
832   // Allocate a 'with' context.
833   MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
834                                                    Context* previous,
835                                                    JSObject* extension);
836 
837   // Allocate a block context.
838   MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
839                                                     Context* previous,
840                                                     ScopeInfo* info);
841 
842   // Allocates a new utility object in the old generation.
843   MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
844 
845   // Allocates a function initialized with a shared part.
846   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
847   // failed.
848   // Please note this does not perform a garbage collection.
849   MUST_USE_RESULT MaybeObject* AllocateFunction(
850       Map* function_map,
851       SharedFunctionInfo* shared,
852       Object* prototype,
853       PretenureFlag pretenure = TENURED);
854 
855   // Arguments object size.
856   static const int kArgumentsObjectSize =
857       JSObject::kHeaderSize + 2 * kPointerSize;
858   // Strict mode arguments has no callee so it is smaller.
859   static const int kArgumentsObjectSizeStrict =
860       JSObject::kHeaderSize + 1 * kPointerSize;
861   // Indicies for direct access into argument objects.
862   static const int kArgumentsLengthIndex = 0;
863   // callee is only valid in non-strict mode.
864   static const int kArgumentsCalleeIndex = 1;
865 
866   // Allocates an arguments object - optionally with an elements array.
867   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
868   // failed.
869   // Please note this does not perform a garbage collection.
870   MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
871       Object* callee, int length);
872 
873   // Same as NewNumberFromDouble, but may return a preallocated/immutable
874   // number object (e.g., minus_zero_value_, nan_value_)
875   MUST_USE_RESULT MaybeObject* NumberFromDouble(
876       double value, PretenureFlag pretenure = NOT_TENURED);
877 
878   // Allocated a HeapNumber from value.
879   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
880       double value,
881       PretenureFlag pretenure);
882   // pretenure = NOT_TENURED
883   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
884 
885   // Converts an int into either a Smi or a HeapNumber object.
886   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
887   // failed.
888   // Please note this does not perform a garbage collection.
889   MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
890       int32_t value, PretenureFlag pretenure = NOT_TENURED);
891 
892   // Converts an int into either a Smi or a HeapNumber object.
893   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
894   // failed.
895   // Please note this does not perform a garbage collection.
896   MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
897       uint32_t value, PretenureFlag pretenure = NOT_TENURED);
898 
899   // Allocates a new foreign object.
900   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
901   // failed.
902   // Please note this does not perform a garbage collection.
903   MUST_USE_RESULT MaybeObject* AllocateForeign(
904       Address address, PretenureFlag pretenure = NOT_TENURED);
905 
906   // Allocates a new SharedFunctionInfo object.
907   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
908   // failed.
909   // Please note this does not perform a garbage collection.
910   MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
911 
912   // Allocates a new JSMessageObject object.
913   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
914   // failed.
915   // Please note that this does not perform a garbage collection.
916   MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
917       String* type,
918       JSArray* arguments,
919       int start_position,
920       int end_position,
921       Object* script,
922       Object* stack_trace,
923       Object* stack_frames);
924 
925   // Allocates a new cons string object.
926   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
927   // failed.
928   // Please note this does not perform a garbage collection.
929   MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
930                                                   String* second);
931 
932   // Allocates a new sub string object which is a substring of an underlying
933   // string buffer stretching from the index start (inclusive) to the index
934   // end (exclusive).
935   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
936   // failed.
937   // Please note this does not perform a garbage collection.
938   MUST_USE_RESULT MaybeObject* AllocateSubString(
939       String* buffer,
940       int start,
941       int end,
942       PretenureFlag pretenure = NOT_TENURED);
943 
944   // Allocate a new external string object, which is backed by a string
945   // resource that resides outside the V8 heap.
946   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
947   // failed.
948   // Please note this does not perform a garbage collection.
949   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
950       const ExternalAsciiString::Resource* resource);
951   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
952       const ExternalTwoByteString::Resource* resource);
953 
954   // Finalizes an external string by deleting the associated external
955   // data and clearing the resource pointer.
956   inline void FinalizeExternalString(String* string);
957 
958   // Allocates an uninitialized object.  The memory is non-executable if the
959   // hardware and OS allow.
960   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
961   // failed.
962   // Please note this function does not perform a garbage collection.
963   MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
964                                                   AllocationSpace space,
965                                                   AllocationSpace retry_space);
966 
967   // Initialize a filler object to keep the ability to iterate over the heap
968   // when shortening objects.
969   void CreateFillerObjectAt(Address addr, int size);
970 
971   // Makes a new native code object
972   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
973   // failed. On success, the pointer to the Code object is stored in the
974   // self_reference. This allows generated code to reference its own Code
975   // object by containing this pointer.
976   // Please note this function does not perform a garbage collection.
977   MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
978                                           Code::Flags flags,
979                                           Handle<Object> self_reference,
980                                           bool immovable = false);
981 
982   MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
983 
984   // Copy the code and scope info part of the code object, but insert
985   // the provided data as the relocation information.
986   MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
987 
988   // Finds the symbol for string in the symbol table.
989   // If not found, a new symbol is added to the table and returned.
990   // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
991   // failed.
992   // Please note this function does not perform a garbage collection.
993   MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
994   MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
995   MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(Vector<const uc16> str);
LookupAsciiSymbol(const char * str)996   MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
997     return LookupSymbol(CStrVector(str));
998   }
999   MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
1000   MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Handle<SeqAsciiString> string,
1001                                                  int from,
1002                                                  int length);
1003 
1004   bool LookupSymbolIfExists(String* str, String** symbol);
1005   bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
1006 
1007   // Compute the matching symbol map for a string if possible.
1008   // NULL is returned if string is in new space or not flattened.
1009   Map* SymbolMapForString(String* str);
1010 
1011   // Tries to flatten a string before compare operation.
1012   //
1013   // Returns a failure in case it was decided that flattening was
1014   // necessary and failed.  Note, if flattening is not necessary the
1015   // string might stay non-flat even when not a failure is returned.
1016   //
1017   // Please note this function does not perform a garbage collection.
1018   MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
1019 
1020   // Converts the given boolean condition to JavaScript boolean value.
1021   inline Object* ToBoolean(bool condition);
1022 
1023   // Code that should be run before and after each GC.  Includes some
1024   // reporting/verification activities when compiled with DEBUG set.
1025   void GarbageCollectionPrologue();
1026   void GarbageCollectionEpilogue();
1027 
1028   // Performs garbage collection operation.
1029   // Returns whether there is a chance that another major GC could
1030   // collect more garbage.
1031   bool CollectGarbage(AllocationSpace space,
1032                       GarbageCollector collector,
1033                       const char* gc_reason,
1034                       const char* collector_reason);
1035 
1036   // Performs garbage collection operation.
1037   // Returns whether there is a chance that another major GC could
1038   // collect more garbage.
1039   inline bool CollectGarbage(AllocationSpace space,
1040                              const char* gc_reason = NULL);
1041 
1042   static const int kNoGCFlags = 0;
1043   static const int kSweepPreciselyMask = 1;
1044   static const int kReduceMemoryFootprintMask = 2;
1045   static const int kAbortIncrementalMarkingMask = 4;
1046 
1047   // Making the heap iterable requires us to sweep precisely and abort any
1048   // incremental marking as well.
1049   static const int kMakeHeapIterableMask =
1050       kSweepPreciselyMask | kAbortIncrementalMarkingMask;
1051 
1052   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
1053   // non-zero, then the slower precise sweeper is used, which leaves the heap
1054   // in a state where we can iterate over the heap visiting all objects.
1055   void CollectAllGarbage(int flags, const char* gc_reason = NULL);
1056 
1057   // Last hope GC, should try to squeeze as much as possible.
1058   void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1059 
1060   // Check whether the heap is currently iterable.
1061   bool IsHeapIterable();
1062 
1063   // Ensure that we have swept all spaces in such a way that we can iterate
1064   // over all objects.  May cause a GC.
1065   void EnsureHeapIsIterable();
1066 
1067   // Notify the heap that a context has been disposed.
NotifyContextDisposed()1068   int NotifyContextDisposed() { return ++contexts_disposed_; }
1069 
1070   // Utility to invoke the scavenger. This is needed in test code to
1071   // ensure correct callback for weak global handles.
1072   void PerformScavenge();
1073 
increment_scan_on_scavenge_pages()1074   inline void increment_scan_on_scavenge_pages() {
1075     scan_on_scavenge_pages_++;
1076     if (FLAG_gc_verbose) {
1077       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1078     }
1079   }
1080 
decrement_scan_on_scavenge_pages()1081   inline void decrement_scan_on_scavenge_pages() {
1082     scan_on_scavenge_pages_--;
1083     if (FLAG_gc_verbose) {
1084       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1085     }
1086   }
1087 
promotion_queue()1088   PromotionQueue* promotion_queue() { return &promotion_queue_; }
1089 
1090 #ifdef DEBUG
1091   // Utility used with flag gc-greedy.
1092   void GarbageCollectionGreedyCheck();
1093 #endif
1094 
1095   void AddGCPrologueCallback(
1096       GCEpilogueCallback callback, GCType gc_type_filter);
1097   void RemoveGCPrologueCallback(GCEpilogueCallback callback);
1098 
1099   void AddGCEpilogueCallback(
1100       GCEpilogueCallback callback, GCType gc_type_filter);
1101   void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
1102 
SetGlobalGCPrologueCallback(GCCallback callback)1103   void SetGlobalGCPrologueCallback(GCCallback callback) {
1104     ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
1105     global_gc_prologue_callback_ = callback;
1106   }
SetGlobalGCEpilogueCallback(GCCallback callback)1107   void SetGlobalGCEpilogueCallback(GCCallback callback) {
1108     ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
1109     global_gc_epilogue_callback_ = callback;
1110   }
1111 
1112   // Heap root getters.  We have versions with and without type::cast() here.
1113   // You can't use type::cast during GC because the assert fails.
1114   // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1115   // not corrupt the map.
1116 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
1117   type* name() {                                                               \
1118     return type::cast(roots_[k##camel_name##RootIndex]);                       \
1119   }                                                                            \
1120   type* raw_unchecked_##name() {                                               \
1121     return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
1122   }
1123   ROOT_LIST(ROOT_ACCESSOR)
1124 #undef ROOT_ACCESSOR
1125 
1126 // Utility type maps
1127 #define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
1128     Map* name##_map() {                                                        \
1129       return Map::cast(roots_[k##Name##MapRootIndex]);                         \
1130     }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)1131   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1132 #undef STRUCT_MAP_ACCESSOR
1133 
1134 #define SYMBOL_ACCESSOR(name, str) String* name() {                            \
1135     return String::cast(roots_[k##name##RootIndex]);                           \
1136   }
1137   SYMBOL_LIST(SYMBOL_ACCESSOR)
1138 #undef SYMBOL_ACCESSOR
1139 
1140   // The hidden_symbol is special because it is the empty string, but does
1141   // not match the empty string.
1142   String* hidden_symbol() { return hidden_symbol_; }
1143 
set_global_contexts_list(Object * object)1144   void set_global_contexts_list(Object* object) {
1145     global_contexts_list_ = object;
1146   }
global_contexts_list()1147   Object* global_contexts_list() { return global_contexts_list_; }
1148 
1149   // Number of mark-sweeps.
ms_count()1150   int ms_count() { return ms_count_; }
1151 
1152   // Iterates over all roots in the heap.
1153   void IterateRoots(ObjectVisitor* v, VisitMode mode);
1154   // Iterates over all strong roots in the heap.
1155   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1156   // Iterates over all the other roots in the heap.
1157   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1158 
1159   // Iterate pointers to from semispace of new space found in memory interval
1160   // from start to end.
1161   void IterateAndMarkPointersToFromSpace(Address start,
1162                                          Address end,
1163                                          ObjectSlotCallback callback);
1164 
1165   // Returns whether the object resides in new space.
1166   inline bool InNewSpace(Object* object);
1167   inline bool InNewSpace(Address addr);
1168   inline bool InNewSpacePage(Address addr);
1169   inline bool InFromSpace(Object* object);
1170   inline bool InToSpace(Object* object);
1171 
1172   // Checks whether an address/object in the heap (including auxiliary
1173   // area and unused area).
1174   bool Contains(Address addr);
1175   bool Contains(HeapObject* value);
1176 
1177   // Checks whether an address/object in a space.
1178   // Currently used by tests, serialization and heap verification only.
1179   bool InSpace(Address addr, AllocationSpace space);
1180   bool InSpace(HeapObject* value, AllocationSpace space);
1181 
1182   // Finds out which space an object should get promoted to based on its type.
1183   inline OldSpace* TargetSpace(HeapObject* object);
1184   inline AllocationSpace TargetSpaceId(InstanceType type);
1185 
1186   // Sets the stub_cache_ (only used when expanding the dictionary).
public_set_code_stubs(UnseededNumberDictionary * value)1187   void public_set_code_stubs(UnseededNumberDictionary* value) {
1188     roots_[kCodeStubsRootIndex] = value;
1189   }
1190 
1191   // Support for computing object sizes for old objects during GCs. Returns
1192   // a function that is guaranteed to be safe for computing object sizes in
1193   // the current GC phase.
GcSafeSizeOfOldObjectFunction()1194   HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
1195     return gc_safe_size_of_old_object_;
1196   }
1197 
1198   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
public_set_non_monomorphic_cache(UnseededNumberDictionary * value)1199   void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
1200     roots_[kNonMonomorphicCacheRootIndex] = value;
1201   }
1202 
public_set_empty_script(Script * script)1203   void public_set_empty_script(Script* script) {
1204     roots_[kEmptyScriptRootIndex] = script;
1205   }
1206 
public_set_store_buffer_top(Address * top)1207   void public_set_store_buffer_top(Address* top) {
1208     roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1209   }
1210 
1211   // Update the next script id.
1212   inline void SetLastScriptId(Object* last_script_id);
1213 
1214   // Generated code can embed this address to get access to the roots.
roots_array_start()1215   Object** roots_array_start() { return roots_; }
1216 
store_buffer_top_address()1217   Address* store_buffer_top_address() {
1218     return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1219   }
1220 
1221   // Get address of global contexts list for serialization support.
global_contexts_list_address()1222   Object** global_contexts_list_address() {
1223     return &global_contexts_list_;
1224   }
1225 
1226 #ifdef DEBUG
1227   void Print();
1228   void PrintHandles();
1229 
1230   // Verify the heap is in its normal state before or after a GC.
1231   void Verify();
1232 
1233   // Verify that AccessorPairs are not shared, i.e. make sure that they have
1234   // exactly one pointer to them.
1235   void VerifyNoAccessorPairSharing();
1236 
1237   void OldPointerSpaceCheckStoreBuffer();
1238   void MapSpaceCheckStoreBuffer();
1239   void LargeObjectSpaceCheckStoreBuffer();
1240 
1241   // Report heap statistics.
1242   void ReportHeapStatistics(const char* title);
1243   void ReportCodeStatistics(const char* title);
1244 
1245   // Fill in bogus values in from space
1246   void ZapFromSpace();
1247 #endif
1248 
1249   // Print short heap statistics.
1250   void PrintShortHeapStatistics();
1251 
1252   // Makes a new symbol object
1253   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1254   // failed.
1255   // Please note this function does not perform a garbage collection.
1256   MUST_USE_RESULT MaybeObject* CreateSymbol(
1257       const char* str, int length, int hash);
1258   MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
1259 
1260   // Write barrier support for address[offset] = o.
1261   inline void RecordWrite(Address address, int offset);
1262 
1263   // Write barrier support for address[start : start + len[ = o.
1264   inline void RecordWrites(Address address, int start, int len);
1265 
1266   // Given an address occupied by a live code object, return that object.
1267   Object* FindCodeObject(Address a);
1268 
1269   // Invoke Shrink on shrinkable spaces.
1270   void Shrink();
1271 
1272   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
gc_state()1273   inline HeapState gc_state() { return gc_state_; }
1274 
IsInGCPostProcessing()1275   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1276 
1277 #ifdef DEBUG
IsAllocationAllowed()1278   bool IsAllocationAllowed() { return allocation_allowed_; }
1279   inline bool allow_allocation(bool enable);
1280 
disallow_allocation_failure()1281   bool disallow_allocation_failure() {
1282     return disallow_allocation_failure_;
1283   }
1284 
1285   void TracePathToObject(Object* target);
1286   void TracePathToGlobal();
1287 #endif
1288 
1289   // Callback function passed to Heap::Iterate etc.  Copies an object if
1290   // necessary, the object might be promoted to an old space.  The caller must
1291   // ensure the precondition that the object is (a) a heap object and (b) in
1292   // the heap's from space.
1293   static inline void ScavengePointer(HeapObject** p);
1294   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1295 
1296   // Commits from space if it is uncommitted.
1297   void EnsureFromSpaceIsCommitted();
1298 
1299   // Support for partial snapshots.  After calling this we can allocate a
1300   // certain number of bytes using only linear allocation (with a
1301   // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1302   // or causing a GC.  It returns true of space was reserved or false if a GC is
1303   // needed.  For paged spaces the space requested must include the space wasted
1304   // at the end of each page when allocating linearly.
1305   void ReserveSpace(
1306     int new_space_size,
1307     int pointer_space_size,
1308     int data_space_size,
1309     int code_space_size,
1310     int map_space_size,
1311     int cell_space_size,
1312     int large_object_size);
1313 
1314   //
1315   // Support for the API.
1316   //
1317 
1318   bool CreateApiObjects();
1319 
1320   // Attempt to find the number in a small cache.  If we finds it, return
1321   // the string representation of the number.  Otherwise return undefined.
1322   Object* GetNumberStringCache(Object* number);
1323 
1324   // Update the cache with a new number-string pair.
1325   void SetNumberStringCache(Object* number, String* str);
1326 
1327   // Adjusts the amount of registered external memory.
1328   // Returns the adjusted value.
1329   inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1330 
1331   // Allocate uninitialized fixed array.
1332   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1333   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1334                                                      PretenureFlag pretenure);
1335 
PromotedTotalSize()1336   inline intptr_t PromotedTotalSize() {
1337     return PromotedSpaceSize() + PromotedExternalMemorySize();
1338   }
1339 
1340   // True if we have reached the allocation limit in the old generation that
1341   // should force the next GC (caused normally) to be a full one.
OldGenerationPromotionLimitReached()1342   inline bool OldGenerationPromotionLimitReached() {
1343     return PromotedTotalSize() > old_gen_promotion_limit_;
1344   }
1345 
OldGenerationSpaceAvailable()1346   inline intptr_t OldGenerationSpaceAvailable() {
1347     return old_gen_allocation_limit_ - PromotedTotalSize();
1348   }
1349 
OldGenerationCapacityAvailable()1350   inline intptr_t OldGenerationCapacityAvailable() {
1351     return max_old_generation_size_ - PromotedTotalSize();
1352   }
1353 
1354   static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
1355   static const intptr_t kMinimumAllocationLimit =
1356       8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1357 
1358   // When we sweep lazily we initially guess that there is no garbage on the
1359   // heap and set the limits for the next GC accordingly.  As we sweep we find
1360   // out that some of the pages contained garbage and we have to adjust
1361   // downwards the size of the heap.  This means the limits that control the
1362   // timing of the next GC also need to be adjusted downwards.
LowerOldGenLimits(intptr_t adjustment)1363   void LowerOldGenLimits(intptr_t adjustment) {
1364     size_of_old_gen_at_last_old_space_gc_ -= adjustment;
1365     old_gen_promotion_limit_ =
1366         OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_);
1367     old_gen_allocation_limit_ =
1368         OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_);
1369   }
1370 
OldGenPromotionLimit(intptr_t old_gen_size)1371   intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
1372     const int divisor = FLAG_stress_compaction ? 10 : 3;
1373     intptr_t limit =
1374         Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
1375     limit += new_space_.Capacity();
1376     limit *= old_gen_limit_factor_;
1377     intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1378     return Min(limit, halfway_to_the_max);
1379   }
1380 
OldGenAllocationLimit(intptr_t old_gen_size)1381   intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
1382     const int divisor = FLAG_stress_compaction ? 8 : 2;
1383     intptr_t limit =
1384         Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
1385     limit += new_space_.Capacity();
1386     limit *= old_gen_limit_factor_;
1387     intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1388     return Min(limit, halfway_to_the_max);
1389   }
1390 
1391   // Implements the corresponding V8 API function.
1392   bool IdleNotification(int hint);
1393 
1394   // Declare all the root indices.
1395   enum RootListIndex {
1396 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1397     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1398 #undef ROOT_INDEX_DECLARATION
1399 
1400 // Utility type maps
1401 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1402   STRUCT_LIST(DECLARE_STRUCT_MAP)
1403 #undef DECLARE_STRUCT_MAP
1404 
1405 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1406     SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1407 #undef SYMBOL_DECLARATION
1408 
1409     kSymbolTableRootIndex,
1410     kStrongRootListLength = kSymbolTableRootIndex,
1411     kRootListLength
1412   };
1413 
1414   MUST_USE_RESULT MaybeObject* NumberToString(
1415       Object* number, bool check_number_string_cache = true);
1416   MUST_USE_RESULT MaybeObject* Uint32ToString(
1417       uint32_t value, bool check_number_string_cache = true);
1418 
1419   Map* MapForExternalArrayType(ExternalArrayType array_type);
1420   RootListIndex RootIndexForExternalArrayType(
1421       ExternalArrayType array_type);
1422 
1423   void RecordStats(HeapStats* stats, bool take_snapshot = false);
1424 
1425   // Copy block of memory from src to dst. Size of block should be aligned
1426   // by pointer size.
1427   static inline void CopyBlock(Address dst, Address src, int byte_size);
1428 
1429   // Optimized version of memmove for blocks with pointer size aligned sizes and
1430   // pointer size aligned addresses.
1431   static inline void MoveBlock(Address dst, Address src, int byte_size);
1432 
1433   // Check new space expansion criteria and expand semispaces if it was hit.
1434   void CheckNewSpaceExpansionCriteria();
1435 
IncrementYoungSurvivorsCounter(int survived)1436   inline void IncrementYoungSurvivorsCounter(int survived) {
1437     ASSERT(survived >= 0);
1438     young_survivors_after_last_gc_ = survived;
1439     survived_since_last_expansion_ += survived;
1440   }
1441 
NextGCIsLikelyToBeFull()1442   inline bool NextGCIsLikelyToBeFull() {
1443     if (FLAG_gc_global) return true;
1444 
1445     intptr_t total_promoted = PromotedTotalSize();
1446 
1447     intptr_t adjusted_promotion_limit =
1448         old_gen_promotion_limit_ - new_space_.Capacity();
1449 
1450     if (total_promoted >= adjusted_promotion_limit) return true;
1451 
1452     intptr_t adjusted_allocation_limit =
1453         old_gen_allocation_limit_ - new_space_.Capacity() / 5;
1454 
1455     if (PromotedSpaceSize() >= adjusted_allocation_limit) return true;
1456 
1457     return false;
1458   }
1459 
1460 
1461   void UpdateNewSpaceReferencesInExternalStringTable(
1462       ExternalStringTableUpdaterCallback updater_func);
1463 
1464   void UpdateReferencesInExternalStringTable(
1465       ExternalStringTableUpdaterCallback updater_func);
1466 
1467   void ProcessWeakReferences(WeakObjectRetainer* retainer);
1468 
1469   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1470 
1471   // Helper function that governs the promotion policy from new space to
1472   // old.  If the object's old address lies below the new space's age
1473   // mark or if we've already filled the bottom 1/16th of the to space,
1474   // we try to promote this object.
1475   inline bool ShouldBePromoted(Address old_address, int object_size);
1476 
MaxObjectSizeInNewSpace()1477   int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1478 
1479   void ClearJSFunctionResultCaches();
1480 
1481   void ClearNormalizedMapCaches();
1482 
1483   // Clears the cache of ICs related to this map.
ClearCacheOnMap(Map * map)1484   void ClearCacheOnMap(Map* map) {
1485     if (FLAG_cleanup_code_caches_at_gc) {
1486       map->ClearCodeCache(this);
1487     }
1488   }
1489 
tracer()1490   GCTracer* tracer() { return tracer_; }
1491 
1492   // Returns the size of objects residing in non new spaces.
1493   intptr_t PromotedSpaceSize();
1494   intptr_t PromotedSpaceSizeOfObjects();
1495 
total_regexp_code_generated()1496   double total_regexp_code_generated() { return total_regexp_code_generated_; }
IncreaseTotalRegexpCodeGenerated(int size)1497   void IncreaseTotalRegexpCodeGenerated(int size) {
1498     total_regexp_code_generated_ += size;
1499   }
1500 
1501   // Returns maximum GC pause.
get_max_gc_pause()1502   int get_max_gc_pause() { return max_gc_pause_; }
1503 
1504   // Returns maximum size of objects alive after GC.
get_max_alive_after_gc()1505   intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1506 
1507   // Returns minimal interval between two subsequent collections.
get_min_in_mutator()1508   int get_min_in_mutator() { return min_in_mutator_; }
1509 
mark_compact_collector()1510   MarkCompactCollector* mark_compact_collector() {
1511     return &mark_compact_collector_;
1512   }
1513 
store_buffer()1514   StoreBuffer* store_buffer() {
1515     return &store_buffer_;
1516   }
1517 
marking()1518   Marking* marking() {
1519     return &marking_;
1520   }
1521 
incremental_marking()1522   IncrementalMarking* incremental_marking() {
1523     return &incremental_marking_;
1524   }
1525 
IsSweepingComplete()1526   bool IsSweepingComplete() {
1527     return old_data_space()->IsSweepingComplete() &&
1528            old_pointer_space()->IsSweepingComplete();
1529   }
1530 
AdvanceSweepers(int step_size)1531   bool AdvanceSweepers(int step_size) {
1532     bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
1533     sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
1534     return sweeping_complete;
1535   }
1536 
external_string_table()1537   ExternalStringTable* external_string_table() {
1538     return &external_string_table_;
1539   }
1540 
1541   // Returns the current sweep generation.
sweep_generation()1542   int sweep_generation() {
1543     return sweep_generation_;
1544   }
1545 
1546   inline Isolate* isolate();
1547 
CallGlobalGCPrologueCallback()1548   inline void CallGlobalGCPrologueCallback() {
1549     if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1550   }
1551 
CallGlobalGCEpilogueCallback()1552   inline void CallGlobalGCEpilogueCallback() {
1553     if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1554   }
1555 
1556   inline bool OldGenerationAllocationLimitReached();
1557 
DoScavengeObject(Map * map,HeapObject ** slot,HeapObject * obj)1558   inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1559     scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1560   }
1561 
1562   void QueueMemoryChunkForFree(MemoryChunk* chunk);
1563   void FreeQueuedChunks();
1564 
1565   // Completely clear the Instanceof cache (to stop it keeping objects alive
1566   // around a GC).
1567   inline void CompletelyClearInstanceofCache();
1568 
1569   // The roots that have an index less than this are always in old space.
1570   static const int kOldSpaceRoots = 0x20;
1571 
HashSeed()1572   uint32_t HashSeed() {
1573     uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1574     ASSERT(FLAG_randomize_hashes || seed == 0);
1575     return seed;
1576   }
1577 
SetArgumentsAdaptorDeoptPCOffset(int pc_offset)1578   void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1579     ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1580     set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1581   }
1582 
SetConstructStubDeoptPCOffset(int pc_offset)1583   void SetConstructStubDeoptPCOffset(int pc_offset) {
1584     ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1585     set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1586   }
1587 
1588   // For post mortem debugging.
1589   void RememberUnmappedPage(Address page, bool compacted);
1590 
1591   // Global inline caching age: it is incremented on some GCs after context
1592   // disposal. We use it to flush inline caches.
global_ic_age()1593   int global_ic_age() {
1594     return global_ic_age_;
1595   }
1596 
AgeInlineCaches()1597   void AgeInlineCaches() {
1598     ++global_ic_age_;
1599   }
1600 
1601  private:
1602   Heap();
1603 
1604   // This can be calculated directly from a pointer to the heap; however, it is
1605   // more expedient to get at the isolate directly from within Heap methods.
1606   Isolate* isolate_;
1607 
1608   intptr_t code_range_size_;
1609   int reserved_semispace_size_;
1610   int max_semispace_size_;
1611   int initial_semispace_size_;
1612   intptr_t max_old_generation_size_;
1613   intptr_t max_executable_size_;
1614 
1615   // For keeping track of how much data has survived
1616   // scavenge since last new space expansion.
1617   int survived_since_last_expansion_;
1618 
1619   // For keeping track on when to flush RegExp code.
1620   int sweep_generation_;
1621 
1622   int always_allocate_scope_depth_;
1623   int linear_allocation_scope_depth_;
1624 
1625   // For keeping track of context disposals.
1626   int contexts_disposed_;
1627 
1628   int global_ic_age_;
1629 
1630   int scan_on_scavenge_pages_;
1631 
1632 #if defined(V8_TARGET_ARCH_X64)
1633   static const int kMaxObjectSizeInNewSpace = 1024*KB;
1634 #else
1635   static const int kMaxObjectSizeInNewSpace = 512*KB;
1636 #endif
1637 
1638   NewSpace new_space_;
1639   OldSpace* old_pointer_space_;
1640   OldSpace* old_data_space_;
1641   OldSpace* code_space_;
1642   MapSpace* map_space_;
1643   CellSpace* cell_space_;
1644   LargeObjectSpace* lo_space_;
1645   HeapState gc_state_;
1646   int gc_post_processing_depth_;
1647 
1648   // Returns the amount of external memory registered since last global gc.
1649   int PromotedExternalMemorySize();
1650 
1651   int ms_count_;  // how many mark-sweep collections happened
1652   unsigned int gc_count_;  // how many gc happened
1653 
1654   // For post mortem debugging.
1655   static const int kRememberedUnmappedPages = 128;
1656   int remembered_unmapped_pages_index_;
1657   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1658 
1659   // Total length of the strings we failed to flatten since the last GC.
1660   int unflattened_strings_length_;
1661 
1662 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
1663   inline void set_##name(type* value) {                                        \
1664     /* The deserializer makes use of the fact that these common roots are */   \
1665     /* never in new space and never on a page that is being compacted.    */   \
1666     ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value));  \
1667     roots_[k##camel_name##RootIndex] = value;                                  \
1668   }
1669   ROOT_LIST(ROOT_ACCESSOR)
1670 #undef ROOT_ACCESSOR
1671 
1672 #ifdef DEBUG
1673   bool allocation_allowed_;
1674 
1675   // If the --gc-interval flag is set to a positive value, this
1676   // variable holds the value indicating the number of allocations
1677   // remain until the next failure and garbage collection.
1678   int allocation_timeout_;
1679 
1680   // Do we expect to be able to handle allocation failure at this
1681   // time?
1682   bool disallow_allocation_failure_;
1683 
1684   HeapDebugUtils* debug_utils_;
1685 #endif  // DEBUG
1686 
1687   // Indicates that the new space should be kept small due to high promotion
1688   // rates caused by the mutator allocating a lot of long-lived objects.
1689   bool new_space_high_promotion_mode_active_;
1690 
1691   // Limit that triggers a global GC on the next (normally caused) GC.  This
1692   // is checked when we have already decided to do a GC to help determine
1693   // which collector to invoke.
1694   intptr_t old_gen_promotion_limit_;
1695 
1696   // Limit that triggers a global GC as soon as is reasonable.  This is
1697   // checked before expanding a paged space in the old generation and on
1698   // every allocation in large object space.
1699   intptr_t old_gen_allocation_limit_;
1700 
1701   // Sometimes the heuristics dictate that those limits are increased.  This
1702   // variable records that fact.
1703   int old_gen_limit_factor_;
1704 
1705   // Used to adjust the limits that control the timing of the next GC.
1706   intptr_t size_of_old_gen_at_last_old_space_gc_;
1707 
1708   // Limit on the amount of externally allocated memory allowed
1709   // between global GCs. If reached a global GC is forced.
1710   intptr_t external_allocation_limit_;
1711 
1712   // The amount of external memory registered through the API kept alive
1713   // by global handles
1714   int amount_of_external_allocated_memory_;
1715 
1716   // Caches the amount of external memory registered at the last global gc.
1717   int amount_of_external_allocated_memory_at_last_global_gc_;
1718 
1719   // Indicates that an allocation has failed in the old generation since the
1720   // last GC.
1721   int old_gen_exhausted_;
1722 
1723   Object* roots_[kRootListLength];
1724 
1725   Object* global_contexts_list_;
1726 
1727   StoreBufferRebuilder store_buffer_rebuilder_;
1728 
1729   struct StringTypeTable {
1730     InstanceType type;
1731     int size;
1732     RootListIndex index;
1733   };
1734 
1735   struct ConstantSymbolTable {
1736     const char* contents;
1737     RootListIndex index;
1738   };
1739 
1740   struct StructTable {
1741     InstanceType type;
1742     int size;
1743     RootListIndex index;
1744   };
1745 
1746   static const StringTypeTable string_type_table[];
1747   static const ConstantSymbolTable constant_symbol_table[];
1748   static const StructTable struct_table[];
1749 
1750   // The special hidden symbol which is an empty string, but does not match
1751   // any string when looked up in properties.
1752   String* hidden_symbol_;
1753 
1754   // GC callback function, called before and after mark-compact GC.
1755   // Allocations in the callback function are disallowed.
1756   struct GCPrologueCallbackPair {
GCPrologueCallbackPairGCPrologueCallbackPair1757     GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1758         : callback(callback), gc_type(gc_type) {
1759     }
1760     bool operator==(const GCPrologueCallbackPair& pair) const {
1761       return pair.callback == callback;
1762     }
1763     GCPrologueCallback callback;
1764     GCType gc_type;
1765   };
1766   List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1767 
1768   struct GCEpilogueCallbackPair {
GCEpilogueCallbackPairGCEpilogueCallbackPair1769     GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1770         : callback(callback), gc_type(gc_type) {
1771     }
1772     bool operator==(const GCEpilogueCallbackPair& pair) const {
1773       return pair.callback == callback;
1774     }
1775     GCEpilogueCallback callback;
1776     GCType gc_type;
1777   };
1778   List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1779 
1780   GCCallback global_gc_prologue_callback_;
1781   GCCallback global_gc_epilogue_callback_;
1782 
1783   // Support for computing object sizes during GC.
1784   HeapObjectCallback gc_safe_size_of_old_object_;
1785   static int GcSafeSizeOfOldObject(HeapObject* object);
1786 
1787   // Update the GC state. Called from the mark-compact collector.
MarkMapPointersAsEncoded(bool encoded)1788   void MarkMapPointersAsEncoded(bool encoded) {
1789     ASSERT(!encoded);
1790     gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1791   }
1792 
1793   // Checks whether a global GC is necessary
1794   GarbageCollector SelectGarbageCollector(AllocationSpace space,
1795                                           const char** reason);
1796 
1797   // Performs garbage collection
1798   // Returns whether there is a chance another major GC could
1799   // collect more garbage.
1800   bool PerformGarbageCollection(GarbageCollector collector,
1801                                 GCTracer* tracer);
1802 
1803 
1804   inline void UpdateOldSpaceLimits();
1805 
1806   // Allocate an uninitialized object in map space.  The behavior is identical
1807   // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1808   // have to test the allocation space argument and (b) can reduce code size
1809   // (since both AllocateRaw and AllocateRawMap are inlined).
1810   MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1811 
1812   // Allocate an uninitialized object in the global property cell space.
1813   MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1814 
1815   // Initializes a JSObject based on its map.
1816   void InitializeJSObjectFromMap(JSObject* obj,
1817                                  FixedArray* properties,
1818                                  Map* map);
1819 
1820   bool CreateInitialMaps();
1821   bool CreateInitialObjects();
1822 
1823   // These five Create*EntryStub functions are here and forced to not be inlined
1824   // because of a gcc-4.4 bug that assigns wrong vtable entries.
1825   NO_INLINE(void CreateJSEntryStub());
1826   NO_INLINE(void CreateJSConstructEntryStub());
1827 
1828   void CreateFixedStubs();
1829 
1830   MaybeObject* CreateOddball(const char* to_string,
1831                              Object* to_number,
1832                              byte kind);
1833 
1834   // Allocate a JSArray with no elements
1835   MUST_USE_RESULT MaybeObject* AllocateJSArray(
1836       ElementsKind elements_kind,
1837       PretenureFlag pretenure = NOT_TENURED);
1838 
1839   // Allocate empty fixed array.
1840   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1841 
1842   // Allocate empty fixed double array.
1843   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
1844 
1845   // Performs a minor collection in new generation.
1846   void Scavenge();
1847 
1848   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1849       Heap* heap,
1850       Object** pointer);
1851 
1852   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1853   static void ScavengeStoreBufferCallback(Heap* heap,
1854                                           MemoryChunk* page,
1855                                           StoreBufferEvent event);
1856 
1857   // Performs a major collection in the whole heap.
1858   void MarkCompact(GCTracer* tracer);
1859 
1860   // Code to be run before and after mark-compact.
1861   void MarkCompactPrologue();
1862 
1863   // Record statistics before and after garbage collection.
1864   void ReportStatisticsBeforeGC();
1865   void ReportStatisticsAfterGC();
1866 
1867   // Slow part of scavenge object.
1868   static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1869 
1870   // Initializes a function with a shared part and prototype.
1871   // Note: this code was factored out of AllocateFunction such that
1872   // other parts of the VM could use it. Specifically, a function that creates
1873   // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1874   // Please note this does not perform a garbage collection.
1875   inline void InitializeFunction(
1876       JSFunction* function,
1877       SharedFunctionInfo* shared,
1878       Object* prototype);
1879 
1880   // Total RegExp code ever generated
1881   double total_regexp_code_generated_;
1882 
1883   GCTracer* tracer_;
1884 
1885 
1886   // Allocates a small number to string cache.
1887   MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
1888   // Creates and installs the full-sized number string cache.
1889   void AllocateFullSizeNumberStringCache();
1890   // Get the length of the number to string cache based on the max semispace
1891   // size.
1892   int FullSizeNumberStringCacheLength();
1893   // Flush the number to string cache.
1894   void FlushNumberStringCache();
1895 
1896   void UpdateSurvivalRateTrend(int start_new_space_size);
1897 
1898   enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1899 
1900   static const int kYoungSurvivalRateHighThreshold = 90;
1901   static const int kYoungSurvivalRateLowThreshold = 10;
1902   static const int kYoungSurvivalRateAllowedDeviation = 15;
1903 
1904   int young_survivors_after_last_gc_;
1905   int high_survival_rate_period_length_;
1906   int low_survival_rate_period_length_;
1907   double survival_rate_;
1908   SurvivalRateTrend previous_survival_rate_trend_;
1909   SurvivalRateTrend survival_rate_trend_;
1910 
set_survival_rate_trend(SurvivalRateTrend survival_rate_trend)1911   void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1912     ASSERT(survival_rate_trend != FLUCTUATING);
1913     previous_survival_rate_trend_ = survival_rate_trend_;
1914     survival_rate_trend_ = survival_rate_trend;
1915   }
1916 
survival_rate_trend()1917   SurvivalRateTrend survival_rate_trend() {
1918     if (survival_rate_trend_ == STABLE) {
1919       return STABLE;
1920     } else if (previous_survival_rate_trend_ == STABLE) {
1921       return survival_rate_trend_;
1922     } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1923       return FLUCTUATING;
1924     } else {
1925       return survival_rate_trend_;
1926     }
1927   }
1928 
IsStableOrIncreasingSurvivalTrend()1929   bool IsStableOrIncreasingSurvivalTrend() {
1930     switch (survival_rate_trend()) {
1931       case STABLE:
1932       case INCREASING:
1933         return true;
1934       default:
1935         return false;
1936     }
1937   }
1938 
IsStableOrDecreasingSurvivalTrend()1939   bool IsStableOrDecreasingSurvivalTrend() {
1940     switch (survival_rate_trend()) {
1941       case STABLE:
1942       case DECREASING:
1943         return true;
1944       default:
1945         return false;
1946     }
1947   }
1948 
IsIncreasingSurvivalTrend()1949   bool IsIncreasingSurvivalTrend() {
1950     return survival_rate_trend() == INCREASING;
1951   }
1952 
IsHighSurvivalRate()1953   bool IsHighSurvivalRate() {
1954     return high_survival_rate_period_length_ > 0;
1955   }
1956 
IsLowSurvivalRate()1957   bool IsLowSurvivalRate() {
1958     return low_survival_rate_period_length_ > 0;
1959   }
1960 
1961   void SelectScavengingVisitorsTable();
1962 
StartIdleRound()1963   void StartIdleRound() {
1964     mark_sweeps_since_idle_round_started_ = 0;
1965     ms_count_at_last_idle_notification_ = ms_count_;
1966   }
1967 
FinishIdleRound()1968   void FinishIdleRound() {
1969     mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
1970     scavenges_since_last_idle_round_ = 0;
1971   }
1972 
EnoughGarbageSinceLastIdleRound()1973   bool EnoughGarbageSinceLastIdleRound() {
1974     return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
1975   }
1976 
WorthStartingGCWhenIdle()1977   bool WorthStartingGCWhenIdle() {
1978     if (contexts_disposed_ > 0) {
1979       return true;
1980     }
1981     return incremental_marking()->WorthActivating();
1982   }
1983 
1984   // Estimates how many milliseconds a Mark-Sweep would take to complete.
1985   // In idle notification handler we assume that this function will return:
1986   // - a number less than 10 for small heaps, which are less than 8Mb.
1987   // - a number greater than 10 for large heaps, which are greater than 32Mb.
TimeMarkSweepWouldTakeInMs()1988   int TimeMarkSweepWouldTakeInMs() {
1989     // Rough estimate of how many megabytes of heap can be processed in 1 ms.
1990     static const int kMbPerMs = 2;
1991 
1992     int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
1993     return heap_size_mb / kMbPerMs;
1994   }
1995 
1996   // Returns true if no more GC work is left.
1997   bool IdleGlobalGC();
1998 
1999   void AdvanceIdleIncrementalMarking(intptr_t step_size);
2000 
2001 
2002   static const int kInitialSymbolTableSize = 2048;
2003   static const int kInitialEvalCacheSize = 64;
2004   static const int kInitialNumberStringCacheSize = 256;
2005 
2006   // Maximum GC pause.
2007   int max_gc_pause_;
2008 
2009   // Maximum size of objects alive after GC.
2010   intptr_t max_alive_after_gc_;
2011 
2012   // Minimal interval between two subsequent collections.
2013   int min_in_mutator_;
2014 
2015   // Size of objects alive after last GC.
2016   intptr_t alive_after_last_gc_;
2017 
2018   double last_gc_end_timestamp_;
2019 
2020   MarkCompactCollector mark_compact_collector_;
2021 
2022   StoreBuffer store_buffer_;
2023 
2024   Marking marking_;
2025 
2026   IncrementalMarking incremental_marking_;
2027 
2028   int number_idle_notifications_;
2029   unsigned int last_idle_notification_gc_count_;
2030   bool last_idle_notification_gc_count_init_;
2031 
2032   int mark_sweeps_since_idle_round_started_;
2033   int ms_count_at_last_idle_notification_;
2034   unsigned int gc_count_at_last_idle_gc_;
2035   int scavenges_since_last_idle_round_;
2036 
2037   static const int kMaxMarkSweepsInIdleRound = 7;
2038   static const int kIdleScavengeThreshold = 5;
2039 
2040   // Shared state read by the scavenge collector and set by ScavengeObject.
2041   PromotionQueue promotion_queue_;
2042 
2043   // Flag is set when the heap has been configured.  The heap can be repeatedly
2044   // configured through the API until it is set up.
2045   bool configured_;
2046 
2047   ExternalStringTable external_string_table_;
2048 
2049   VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2050 
2051   MemoryChunk* chunks_queued_for_free_;
2052 
2053   friend class Factory;
2054   friend class GCTracer;
2055   friend class DisallowAllocationFailure;
2056   friend class AlwaysAllocateScope;
2057   friend class LinearAllocationScope;
2058   friend class Page;
2059   friend class Isolate;
2060   friend class MarkCompactCollector;
2061   friend class StaticMarkingVisitor;
2062   friend class MapCompact;
2063 
2064   DISALLOW_COPY_AND_ASSIGN(Heap);
2065 };
2066 
2067 
2068 class HeapStats {
2069  public:
2070   static const int kStartMarker = 0xDECADE00;
2071   static const int kEndMarker = 0xDECADE01;
2072 
2073   int* start_marker;                    //  0
2074   int* new_space_size;                  //  1
2075   int* new_space_capacity;              //  2
2076   intptr_t* old_pointer_space_size;          //  3
2077   intptr_t* old_pointer_space_capacity;      //  4
2078   intptr_t* old_data_space_size;             //  5
2079   intptr_t* old_data_space_capacity;         //  6
2080   intptr_t* code_space_size;                 //  7
2081   intptr_t* code_space_capacity;             //  8
2082   intptr_t* map_space_size;                  //  9
2083   intptr_t* map_space_capacity;              // 10
2084   intptr_t* cell_space_size;                 // 11
2085   intptr_t* cell_space_capacity;             // 12
2086   intptr_t* lo_space_size;                   // 13
2087   int* global_handle_count;             // 14
2088   int* weak_global_handle_count;        // 15
2089   int* pending_global_handle_count;     // 16
2090   int* near_death_global_handle_count;  // 17
2091   int* free_global_handle_count;        // 18
2092   intptr_t* memory_allocator_size;           // 19
2093   intptr_t* memory_allocator_capacity;       // 20
2094   int* objects_per_type;                // 21
2095   int* size_per_type;                   // 22
2096   int* os_error;                        // 23
2097   int* end_marker;                      // 24
2098 };
2099 
2100 
2101 class AlwaysAllocateScope {
2102  public:
2103   inline AlwaysAllocateScope();
2104   inline ~AlwaysAllocateScope();
2105 };
2106 
2107 
2108 class LinearAllocationScope {
2109  public:
2110   inline LinearAllocationScope();
2111   inline ~LinearAllocationScope();
2112 };
2113 
2114 
2115 #ifdef DEBUG
2116 // Visitor class to verify interior pointers in spaces that do not contain
2117 // or care about intergenerational references. All heap object pointers have to
2118 // point into the heap to a location that has a map pointer at its first word.
2119 // Caveat: Heap::Contains is an approximation because it can return true for
2120 // objects in a heap space but above the allocation pointer.
2121 class VerifyPointersVisitor: public ObjectVisitor {
2122  public:
2123   inline void VisitPointers(Object** start, Object** end);
2124 };
2125 #endif
2126 
2127 
2128 // Space iterator for iterating over all spaces of the heap.
2129 // Returns each space in turn, and null when it is done.
2130 class AllSpaces BASE_EMBEDDED {
2131  public:
2132   Space* next();
AllSpaces()2133   AllSpaces() { counter_ = FIRST_SPACE; }
2134  private:
2135   int counter_;
2136 };
2137 
2138 
2139 // Space iterator for iterating over all old spaces of the heap: Old pointer
2140 // space, old data space and code space.
2141 // Returns each space in turn, and null when it is done.
2142 class OldSpaces BASE_EMBEDDED {
2143  public:
2144   OldSpace* next();
OldSpaces()2145   OldSpaces() { counter_ = OLD_POINTER_SPACE; }
2146  private:
2147   int counter_;
2148 };
2149 
2150 
2151 // Space iterator for iterating over all the paged spaces of the heap:
2152 // Map space, old pointer space, old data space, code space and cell space.
2153 // Returns each space in turn, and null when it is done.
2154 class PagedSpaces BASE_EMBEDDED {
2155  public:
2156   PagedSpace* next();
PagedSpaces()2157   PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
2158  private:
2159   int counter_;
2160 };
2161 
2162 
2163 // Space iterator for iterating over all spaces of the heap.
2164 // For each space an object iterator is provided. The deallocation of the
2165 // returned object iterators is handled by the space iterator.
2166 class SpaceIterator : public Malloced {
2167  public:
2168   SpaceIterator();
2169   explicit SpaceIterator(HeapObjectCallback size_func);
2170   virtual ~SpaceIterator();
2171 
2172   bool has_next();
2173   ObjectIterator* next();
2174 
2175  private:
2176   ObjectIterator* CreateIterator();
2177 
2178   int current_space_;  // from enum AllocationSpace.
2179   ObjectIterator* iterator_;  // object iterator for the current space.
2180   HeapObjectCallback size_func_;
2181 };
2182 
2183 
2184 // A HeapIterator provides iteration over the whole heap. It
2185 // aggregates the specific iterators for the different spaces as
2186 // these can only iterate over one space only.
2187 //
2188 // HeapIterator can skip free list nodes (that is, de-allocated heap
2189 // objects that still remain in the heap). As implementation of free
2190 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2191 // phases. Also, it is forbidden to interrupt iteration in this mode,
2192 // as this will leave heap objects marked (and thus, unusable).
2193 class HeapObjectsFilter;
2194 
2195 class HeapIterator BASE_EMBEDDED {
2196  public:
2197   enum HeapObjectsFiltering {
2198     kNoFiltering,
2199     kFilterUnreachable
2200   };
2201 
2202   HeapIterator();
2203   explicit HeapIterator(HeapObjectsFiltering filtering);
2204   ~HeapIterator();
2205 
2206   HeapObject* next();
2207   void reset();
2208 
2209  private:
2210   // Perform the initialization.
2211   void Init();
2212   // Perform all necessary shutdown (destruction) work.
2213   void Shutdown();
2214   HeapObject* NextObject();
2215 
2216   HeapObjectsFiltering filtering_;
2217   HeapObjectsFilter* filter_;
2218   // Space iterator for iterating all the spaces.
2219   SpaceIterator* space_iterator_;
2220   // Object iterator for the space currently being iterated.
2221   ObjectIterator* object_iterator_;
2222 };
2223 
2224 
2225 // Cache for mapping (map, property name) into field offset.
2226 // Cleared at startup and prior to mark sweep collection.
2227 class KeyedLookupCache {
2228  public:
2229   // Lookup field offset for (map, name). If absent, -1 is returned.
2230   int Lookup(Map* map, String* name);
2231 
2232   // Update an element in the cache.
2233   void Update(Map* map, String* name, int field_offset);
2234 
2235   // Clear the cache.
2236   void Clear();
2237 
2238   static const int kLength = 256;
2239   static const int kCapacityMask = kLength - 1;
2240   static const int kMapHashShift = 5;
2241   static const int kHashMask = -4;  // Zero the last two bits.
2242   static const int kEntriesPerBucket = 4;
2243   static const int kNotFound = -1;
2244 
2245   // kEntriesPerBucket should be a power of 2.
2246   STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2247   STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2248 
2249  private:
KeyedLookupCache()2250   KeyedLookupCache() {
2251     for (int i = 0; i < kLength; ++i) {
2252       keys_[i].map = NULL;
2253       keys_[i].name = NULL;
2254       field_offsets_[i] = kNotFound;
2255     }
2256   }
2257 
2258   static inline int Hash(Map* map, String* name);
2259 
2260   // Get the address of the keys and field_offsets arrays.  Used in
2261   // generated code to perform cache lookups.
keys_address()2262   Address keys_address() {
2263     return reinterpret_cast<Address>(&keys_);
2264   }
2265 
field_offsets_address()2266   Address field_offsets_address() {
2267     return reinterpret_cast<Address>(&field_offsets_);
2268   }
2269 
2270   struct Key {
2271     Map* map;
2272     String* name;
2273   };
2274 
2275   Key keys_[kLength];
2276   int field_offsets_[kLength];
2277 
2278   friend class ExternalReference;
2279   friend class Isolate;
2280   DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2281 };
2282 
2283 
2284 // Cache for mapping (array, property name) into descriptor index.
2285 // The cache contains both positive and negative results.
2286 // Descriptor index equals kNotFound means the property is absent.
2287 // Cleared at startup and prior to any gc.
2288 class DescriptorLookupCache {
2289  public:
2290   // Lookup descriptor index for (map, name).
2291   // If absent, kAbsent is returned.
Lookup(DescriptorArray * array,String * name)2292   int Lookup(DescriptorArray* array, String* name) {
2293     if (!StringShape(name).IsSymbol()) return kAbsent;
2294     int index = Hash(array, name);
2295     Key& key = keys_[index];
2296     if ((key.array == array) && (key.name == name)) return results_[index];
2297     return kAbsent;
2298   }
2299 
2300   // Update an element in the cache.
Update(DescriptorArray * array,String * name,int result)2301   void Update(DescriptorArray* array, String* name, int result) {
2302     ASSERT(result != kAbsent);
2303     if (StringShape(name).IsSymbol()) {
2304       int index = Hash(array, name);
2305       Key& key = keys_[index];
2306       key.array = array;
2307       key.name = name;
2308       results_[index] = result;
2309     }
2310   }
2311 
2312   // Clear the cache.
2313   void Clear();
2314 
2315   static const int kAbsent = -2;
2316 
2317  private:
DescriptorLookupCache()2318   DescriptorLookupCache() {
2319     for (int i = 0; i < kLength; ++i) {
2320       keys_[i].array = NULL;
2321       keys_[i].name = NULL;
2322       results_[i] = kAbsent;
2323     }
2324   }
2325 
Hash(DescriptorArray * array,String * name)2326   static int Hash(DescriptorArray* array, String* name) {
2327     // Uses only lower 32 bits if pointers are larger.
2328     uint32_t array_hash =
2329         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
2330     uint32_t name_hash =
2331         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
2332     return (array_hash ^ name_hash) % kLength;
2333   }
2334 
2335   static const int kLength = 64;
2336   struct Key {
2337     DescriptorArray* array;
2338     String* name;
2339   };
2340 
2341   Key keys_[kLength];
2342   int results_[kLength];
2343 
2344   friend class Isolate;
2345   DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2346 };
2347 
2348 
2349 #ifdef DEBUG
2350 class DisallowAllocationFailure {
2351  public:
2352   inline DisallowAllocationFailure();
2353   inline ~DisallowAllocationFailure();
2354 
2355  private:
2356   bool old_state_;
2357 };
2358 #endif
2359 
2360 
2361 // A helper class to document/test C++ scopes where we do not
2362 // expect a GC. Usage:
2363 //
2364 // /* Allocation not allowed: we cannot handle a GC in this scope. */
2365 // { AssertNoAllocation nogc;
2366 //   ...
2367 // }
2368 class AssertNoAllocation {
2369  public:
2370   inline AssertNoAllocation();
2371   inline ~AssertNoAllocation();
2372 
2373 #ifdef DEBUG
2374  private:
2375   bool old_state_;
2376 #endif
2377 };
2378 
2379 
2380 class DisableAssertNoAllocation {
2381  public:
2382   inline DisableAssertNoAllocation();
2383   inline ~DisableAssertNoAllocation();
2384 
2385 #ifdef DEBUG
2386  private:
2387   bool old_state_;
2388 #endif
2389 };
2390 
2391 // GCTracer collects and prints ONE line after each garbage collector
2392 // invocation IFF --trace_gc is used.
2393 
2394 class GCTracer BASE_EMBEDDED {
2395  public:
2396   class Scope BASE_EMBEDDED {
2397    public:
2398     enum ScopeId {
2399       EXTERNAL,
2400       MC_MARK,
2401       MC_SWEEP,
2402       MC_SWEEP_NEWSPACE,
2403       MC_EVACUATE_PAGES,
2404       MC_UPDATE_NEW_TO_NEW_POINTERS,
2405       MC_UPDATE_ROOT_TO_NEW_POINTERS,
2406       MC_UPDATE_OLD_TO_NEW_POINTERS,
2407       MC_UPDATE_POINTERS_TO_EVACUATED,
2408       MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2409       MC_UPDATE_MISC_POINTERS,
2410       MC_FLUSH_CODE,
2411       kNumberOfScopes
2412     };
2413 
Scope(GCTracer * tracer,ScopeId scope)2414     Scope(GCTracer* tracer, ScopeId scope)
2415         : tracer_(tracer),
2416         scope_(scope) {
2417       start_time_ = OS::TimeCurrentMillis();
2418     }
2419 
~Scope()2420     ~Scope() {
2421       ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
2422       tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2423     }
2424 
2425    private:
2426     GCTracer* tracer_;
2427     ScopeId scope_;
2428     double start_time_;
2429   };
2430 
2431   explicit GCTracer(Heap* heap,
2432                     const char* gc_reason,
2433                     const char* collector_reason);
2434   ~GCTracer();
2435 
2436   // Sets the collector.
set_collector(GarbageCollector collector)2437   void set_collector(GarbageCollector collector) { collector_ = collector; }
2438 
2439   // Sets the GC count.
set_gc_count(unsigned int count)2440   void set_gc_count(unsigned int count) { gc_count_ = count; }
2441 
2442   // Sets the full GC count.
set_full_gc_count(int count)2443   void set_full_gc_count(int count) { full_gc_count_ = count; }
2444 
increment_promoted_objects_size(int object_size)2445   void increment_promoted_objects_size(int object_size) {
2446     promoted_objects_size_ += object_size;
2447   }
2448 
2449  private:
2450   // Returns a string matching the collector.
2451   const char* CollectorString();
2452 
2453   // Returns size of object in heap (in MB).
2454   inline double SizeOfHeapObjects();
2455 
2456   // Timestamp set in the constructor.
2457   double start_time_;
2458 
2459   // Size of objects in heap set in constructor.
2460   intptr_t start_object_size_;
2461 
2462   // Size of memory allocated from OS set in constructor.
2463   intptr_t start_memory_size_;
2464 
2465   // Type of collector.
2466   GarbageCollector collector_;
2467 
2468   // A count (including this one, e.g. the first collection is 1) of the
2469   // number of garbage collections.
2470   unsigned int gc_count_;
2471 
2472   // A count (including this one) of the number of full garbage collections.
2473   int full_gc_count_;
2474 
2475   // Amounts of time spent in different scopes during GC.
2476   double scopes_[Scope::kNumberOfScopes];
2477 
2478   // Total amount of space either wasted or contained in one of free lists
2479   // before the current GC.
2480   intptr_t in_free_list_or_wasted_before_gc_;
2481 
2482   // Difference between space used in the heap at the beginning of the current
2483   // collection and the end of the previous collection.
2484   intptr_t allocated_since_last_gc_;
2485 
2486   // Amount of time spent in mutator that is time elapsed between end of the
2487   // previous collection and the beginning of the current one.
2488   double spent_in_mutator_;
2489 
2490   // Size of objects promoted during the current collection.
2491   intptr_t promoted_objects_size_;
2492 
2493   // Incremental marking steps counters.
2494   int steps_count_;
2495   double steps_took_;
2496   double longest_step_;
2497   int steps_count_since_last_gc_;
2498   double steps_took_since_last_gc_;
2499 
2500   Heap* heap_;
2501 
2502   const char* gc_reason_;
2503   const char* collector_reason_;
2504 };
2505 
2506 
2507 class StringSplitCache {
2508  public:
2509   static Object* Lookup(FixedArray* cache, String* string, String* pattern);
2510   static void Enter(Heap* heap,
2511                     FixedArray* cache,
2512                     String* string,
2513                     String* pattern,
2514                     FixedArray* array);
2515   static void Clear(FixedArray* cache);
2516   static const int kStringSplitCacheSize = 0x100;
2517 
2518  private:
2519   static const int kArrayEntriesPerCacheEntry = 4;
2520   static const int kStringOffset = 0;
2521   static const int kPatternOffset = 1;
2522   static const int kArrayOffset = 2;
2523 
2524   static MaybeObject* WrapFixedArrayInJSArray(Object* fixed_array);
2525 };
2526 
2527 
2528 class TranscendentalCache {
2529  public:
2530   enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2531   static const int kTranscendentalTypeBits = 3;
2532   STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2533 
2534   // Returns a heap number with f(input), where f is a math function specified
2535   // by the 'type' argument.
2536   MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2537 
2538   // The cache contains raw Object pointers.  This method disposes of
2539   // them before a garbage collection.
2540   void Clear();
2541 
2542  private:
2543   class SubCache {
2544     static const int kCacheSize = 512;
2545 
2546     explicit SubCache(Type t);
2547 
2548     MUST_USE_RESULT inline MaybeObject* Get(double input);
2549 
2550     inline double Calculate(double input);
2551 
2552     struct Element {
2553       uint32_t in[2];
2554       Object* output;
2555     };
2556 
2557     union Converter {
2558       double dbl;
2559       uint32_t integers[2];
2560     };
2561 
Hash(const Converter & c)2562     inline static int Hash(const Converter& c) {
2563       uint32_t hash = (c.integers[0] ^ c.integers[1]);
2564       hash ^= static_cast<int32_t>(hash) >> 16;
2565       hash ^= static_cast<int32_t>(hash) >> 8;
2566       return (hash & (kCacheSize - 1));
2567     }
2568 
2569     Element elements_[kCacheSize];
2570     Type type_;
2571     Isolate* isolate_;
2572 
2573     // Allow access to the caches_ array as an ExternalReference.
2574     friend class ExternalReference;
2575     // Inline implementation of the cache.
2576     friend class TranscendentalCacheStub;
2577     // For evaluating value.
2578     friend class TranscendentalCache;
2579 
2580     DISALLOW_COPY_AND_ASSIGN(SubCache);
2581   };
2582 
TranscendentalCache()2583   TranscendentalCache() {
2584     for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2585   }
2586 
2587   // Used to create an external reference.
2588   inline Address cache_array_address();
2589 
2590   // Instantiation
2591   friend class Isolate;
2592   // Inline implementation of the caching.
2593   friend class TranscendentalCacheStub;
2594   // Allow access to the caches_ array as an ExternalReference.
2595   friend class ExternalReference;
2596 
2597   SubCache* caches_[kNumberOfCaches];
2598   DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2599 };
2600 
2601 
2602 // Abstract base class for checking whether a weak object should be retained.
2603 class WeakObjectRetainer {
2604  public:
~WeakObjectRetainer()2605   virtual ~WeakObjectRetainer() {}
2606 
2607   // Return whether this object should be retained. If NULL is returned the
2608   // object has no references. Otherwise the address of the retained object
2609   // should be returned as in some GC situations the object has been moved.
2610   virtual Object* RetainAs(Object* object) = 0;
2611 };
2612 
2613 
2614 // Intrusive object marking uses least significant bit of
2615 // heap object's map word to mark objects.
2616 // Normally all map words have least significant bit set
2617 // because they contain tagged map pointer.
2618 // If the bit is not set object is marked.
2619 // All objects should be unmarked before resuming
2620 // JavaScript execution.
2621 class IntrusiveMarking {
2622  public:
IsMarked(HeapObject * object)2623   static bool IsMarked(HeapObject* object) {
2624     return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2625   }
2626 
ClearMark(HeapObject * object)2627   static void ClearMark(HeapObject* object) {
2628     uintptr_t map_word = object->map_word().ToRawValue();
2629     object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2630     ASSERT(!IsMarked(object));
2631   }
2632 
SetMark(HeapObject * object)2633   static void SetMark(HeapObject* object) {
2634     uintptr_t map_word = object->map_word().ToRawValue();
2635     object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2636     ASSERT(IsMarked(object));
2637   }
2638 
MapOfMarkedObject(HeapObject * object)2639   static Map* MapOfMarkedObject(HeapObject* object) {
2640     uintptr_t map_word = object->map_word().ToRawValue();
2641     return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2642   }
2643 
SizeOfMarkedObject(HeapObject * object)2644   static int SizeOfMarkedObject(HeapObject* object) {
2645     return object->SizeFromMap(MapOfMarkedObject(object));
2646   }
2647 
2648  private:
2649   static const uintptr_t kNotMarkedBit = 0x1;
2650   STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
2651 };
2652 
2653 
2654 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2655 // Helper class for tracing paths to a search target Object from all roots.
2656 // The TracePathFrom() method can be used to trace paths from a specific
2657 // object to the search target object.
2658 class PathTracer : public ObjectVisitor {
2659  public:
2660   enum WhatToFind {
2661     FIND_ALL,   // Will find all matches.
2662     FIND_FIRST  // Will stop the search after first match.
2663   };
2664 
2665   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2666   // after the first match.  If FIND_ALL is specified, then tracing will be
2667   // done for all matches.
PathTracer(Object * search_target,WhatToFind what_to_find,VisitMode visit_mode)2668   PathTracer(Object* search_target,
2669              WhatToFind what_to_find,
2670              VisitMode visit_mode)
2671       : search_target_(search_target),
2672         found_target_(false),
2673         found_target_in_trace_(false),
2674         what_to_find_(what_to_find),
2675         visit_mode_(visit_mode),
2676         object_stack_(20),
2677         no_alloc() {}
2678 
2679   virtual void VisitPointers(Object** start, Object** end);
2680 
2681   void Reset();
2682   void TracePathFrom(Object** root);
2683 
found()2684   bool found() const { return found_target_; }
2685 
2686   static Object* const kAnyGlobalObject;
2687 
2688  protected:
2689   class MarkVisitor;
2690   class UnmarkVisitor;
2691 
2692   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2693   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2694   virtual void ProcessResults();
2695 
2696   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2697   static const int kMarkTag = 2;
2698 
2699   Object* search_target_;
2700   bool found_target_;
2701   bool found_target_in_trace_;
2702   WhatToFind what_to_find_;
2703   VisitMode visit_mode_;
2704   List<Object*> object_stack_;
2705 
2706   AssertNoAllocation no_alloc;  // i.e. no gc allowed.
2707 
2708  private:
2709   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2710 };
2711 #endif  // DEBUG || LIVE_OBJECT_LIST
2712 
2713 } }  // namespace v8::internal
2714 
2715 #endif  // V8_HEAP_H_
2716