• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_
30 
31 #include <cmath>
32 
33 #include "allocation.h"
34 #include "assert-scope.h"
35 #include "globals.h"
36 #include "incremental-marking.h"
37 #include "list.h"
38 #include "mark-compact.h"
39 #include "objects-visiting.h"
40 #include "spaces.h"
41 #include "splay-tree-inl.h"
42 #include "store-buffer.h"
43 #include "v8-counters.h"
44 #include "v8globals.h"
45 
46 namespace v8 {
47 namespace internal {
48 
49 // Defines all the roots in Heap.
50 #define STRONG_ROOT_LIST(V)                                                    \
51   V(Map, byte_array_map, ByteArrayMap)                                         \
52   V(Map, free_space_map, FreeSpaceMap)                                         \
53   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
54   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
55   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
56   V(Smi, store_buffer_top, StoreBufferTop)                                     \
57   V(Oddball, undefined_value, UndefinedValue)                                  \
58   V(Oddball, the_hole_value, TheHoleValue)                                     \
59   V(Oddball, null_value, NullValue)                                            \
60   V(Oddball, true_value, TrueValue)                                            \
61   V(Oddball, false_value, FalseValue)                                          \
62   V(Oddball, uninitialized_value, UninitializedValue)                          \
63   V(Map, cell_map, CellMap)                                                    \
64   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
65   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
66   V(Map, meta_map, MetaMap)                                                    \
67   V(Map, heap_number_map, HeapNumberMap)                                       \
68   V(Map, native_context_map, NativeContextMap)                                 \
69   V(Map, fixed_array_map, FixedArrayMap)                                       \
70   V(Map, code_map, CodeMap)                                                    \
71   V(Map, scope_info_map, ScopeInfoMap)                                         \
72   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
73   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
74   V(Map, constant_pool_array_map, ConstantPoolArrayMap)                        \
75   V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
76   V(Map, hash_table_map, HashTableMap)                                         \
77   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
78   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
79   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
80   V(Smi, stack_limit, StackLimit)                                              \
81   V(Oddball, arguments_marker, ArgumentsMarker)                                \
82   /* The roots above this line should be boring from a GC point of view.    */ \
83   /* This means they are never in new space and never on a page that is     */ \
84   /* being compacted.                                                       */ \
85   V(FixedArray, number_string_cache, NumberStringCache)                        \
86   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
87   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
88   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
89   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
90   V(FixedArray, string_split_cache, StringSplitCache)                          \
91   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
92   V(Object, termination_exception, TerminationException)                       \
93   V(Smi, hash_seed, HashSeed)                                                  \
94   V(Map, symbol_map, SymbolMap)                                                \
95   V(Map, string_map, StringMap)                                                \
96   V(Map, ascii_string_map, AsciiStringMap)                                     \
97   V(Map, cons_string_map, ConsStringMap)                                       \
98   V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
99   V(Map, sliced_string_map, SlicedStringMap)                                   \
100   V(Map, sliced_ascii_string_map, SlicedAsciiStringMap)                        \
101   V(Map, external_string_map, ExternalStringMap)                               \
102   V(Map,                                                                       \
103     external_string_with_one_byte_data_map,                                    \
104     ExternalStringWithOneByteDataMap)                                          \
105   V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
106   V(Map, short_external_string_map, ShortExternalStringMap)                    \
107   V(Map,                                                                       \
108     short_external_string_with_one_byte_data_map,                              \
109     ShortExternalStringWithOneByteDataMap)                                     \
110   V(Map, internalized_string_map, InternalizedStringMap)                       \
111   V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap)            \
112   V(Map, cons_internalized_string_map, ConsInternalizedStringMap)              \
113   V(Map, cons_ascii_internalized_string_map, ConsAsciiInternalizedStringMap)   \
114   V(Map,                                                                       \
115     external_internalized_string_map,                                          \
116     ExternalInternalizedStringMap)                                             \
117   V(Map,                                                                       \
118     external_internalized_string_with_one_byte_data_map,                       \
119     ExternalInternalizedStringWithOneByteDataMap)                              \
120   V(Map,                                                                       \
121     external_ascii_internalized_string_map,                                    \
122     ExternalAsciiInternalizedStringMap)                                        \
123   V(Map,                                                                       \
124     short_external_internalized_string_map,                                    \
125     ShortExternalInternalizedStringMap)                                        \
126   V(Map,                                                                       \
127     short_external_internalized_string_with_one_byte_data_map,                 \
128     ShortExternalInternalizedStringWithOneByteDataMap)                         \
129   V(Map,                                                                       \
130     short_external_ascii_internalized_string_map,                              \
131     ShortExternalAsciiInternalizedStringMap)                                   \
132   V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap)         \
133   V(Map, undetectable_string_map, UndetectableStringMap)                       \
134   V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
135   V(Map, external_byte_array_map, ExternalByteArrayMap)                        \
136   V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap)       \
137   V(Map, external_short_array_map, ExternalShortArrayMap)                      \
138   V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap)     \
139   V(Map, external_int_array_map, ExternalIntArrayMap)                          \
140   V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap)         \
141   V(Map, external_float_array_map, ExternalFloatArrayMap)                      \
142   V(Map, external_double_array_map, ExternalDoubleArrayMap)                    \
143   V(Map, external_pixel_array_map, ExternalPixelArrayMap)                      \
144   V(ExternalArray, empty_external_byte_array,                                  \
145       EmptyExternalByteArray)                                                  \
146   V(ExternalArray, empty_external_unsigned_byte_array,                         \
147       EmptyExternalUnsignedByteArray)                                          \
148   V(ExternalArray, empty_external_short_array, EmptyExternalShortArray)        \
149   V(ExternalArray, empty_external_unsigned_short_array,                        \
150       EmptyExternalUnsignedShortArray)                                         \
151   V(ExternalArray, empty_external_int_array, EmptyExternalIntArray)            \
152   V(ExternalArray, empty_external_unsigned_int_array,                          \
153       EmptyExternalUnsignedIntArray)                                           \
154   V(ExternalArray, empty_external_float_array, EmptyExternalFloatArray)        \
155   V(ExternalArray, empty_external_double_array, EmptyExternalDoubleArray)      \
156   V(ExternalArray, empty_external_pixel_array,                                 \
157       EmptyExternalPixelArray)                                                 \
158   V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap)     \
159   V(Map, function_context_map, FunctionContextMap)                             \
160   V(Map, catch_context_map, CatchContextMap)                                   \
161   V(Map, with_context_map, WithContextMap)                                     \
162   V(Map, block_context_map, BlockContextMap)                                   \
163   V(Map, module_context_map, ModuleContextMap)                                 \
164   V(Map, global_context_map, GlobalContextMap)                                 \
165   V(Map, oddball_map, OddballMap)                                              \
166   V(Map, message_object_map, JSMessageObjectMap)                               \
167   V(Map, foreign_map, ForeignMap)                                              \
168   V(HeapNumber, nan_value, NanValue)                                           \
169   V(HeapNumber, infinity_value, InfinityValue)                                 \
170   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
171   V(Map, neander_map, NeanderMap)                                              \
172   V(JSObject, message_listeners, MessageListeners)                             \
173   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
174   V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
175   V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
176   V(Code, js_entry_code, JsEntryCode)                                          \
177   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
178   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
179   V(Smi, last_script_id, LastScriptId)                                         \
180   V(Script, empty_script, EmptyScript)                                         \
181   V(Smi, real_stack_limit, RealStackLimit)                                     \
182   V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
183   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
184   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
185   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
186   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
187   V(JSObject, observation_state, ObservationState)                             \
188   V(Map, external_map, ExternalMap)                                            \
189   V(Symbol, frozen_symbol, FrozenSymbol)                                       \
190   V(Symbol, elements_transition_symbol, ElementsTransitionSymbol)              \
191   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
192       EmptySlowElementDictionary)                                              \
193   V(Symbol, observed_symbol, ObservedSymbol)
194 
195 #define ROOT_LIST(V)                                  \
196   STRONG_ROOT_LIST(V)                                 \
197   V(StringTable, string_table, StringTable)
198 
199 #define INTERNALIZED_STRING_LIST(V)                                      \
200   V(Array_string, "Array")                                               \
201   V(Object_string, "Object")                                             \
202   V(proto_string, "__proto__")                                           \
203   V(arguments_string, "arguments")                                       \
204   V(Arguments_string, "Arguments")                                       \
205   V(call_string, "call")                                                 \
206   V(apply_string, "apply")                                               \
207   V(caller_string, "caller")                                             \
208   V(boolean_string, "boolean")                                           \
209   V(Boolean_string, "Boolean")                                           \
210   V(callee_string, "callee")                                             \
211   V(constructor_string, "constructor")                                   \
212   V(dot_result_string, ".result")                                        \
213   V(dot_for_string, ".for.")                                             \
214   V(dot_iterator_string, ".iterator")                                    \
215   V(dot_generator_object_string, ".generator_object")                    \
216   V(eval_string, "eval")                                                 \
217   V(empty_string, "")                                                    \
218   V(function_string, "function")                                         \
219   V(length_string, "length")                                             \
220   V(module_string, "module")                                             \
221   V(name_string, "name")                                                 \
222   V(native_string, "native")                                             \
223   V(null_string, "null")                                                 \
224   V(number_string, "number")                                             \
225   V(Number_string, "Number")                                             \
226   V(nan_string, "NaN")                                                   \
227   V(RegExp_string, "RegExp")                                             \
228   V(source_string, "source")                                             \
229   V(global_string, "global")                                             \
230   V(ignore_case_string, "ignoreCase")                                    \
231   V(multiline_string, "multiline")                                       \
232   V(input_string, "input")                                               \
233   V(index_string, "index")                                               \
234   V(last_index_string, "lastIndex")                                      \
235   V(object_string, "object")                                             \
236   V(literals_string, "literals")                                         \
237   V(prototype_string, "prototype")                                       \
238   V(string_string, "string")                                             \
239   V(String_string, "String")                                             \
240   V(symbol_string, "symbol")                                             \
241   V(Symbol_string, "Symbol")                                             \
242   V(Date_string, "Date")                                                 \
243   V(this_string, "this")                                                 \
244   V(to_string_string, "toString")                                        \
245   V(char_at_string, "CharAt")                                            \
246   V(undefined_string, "undefined")                                       \
247   V(value_of_string, "valueOf")                                          \
248   V(stack_string, "stack")                                               \
249   V(toJSON_string, "toJSON")                                             \
250   V(InitializeVarGlobal_string, "InitializeVarGlobal")                   \
251   V(InitializeConstGlobal_string, "InitializeConstGlobal")               \
252   V(KeyedLoadElementMonomorphic_string,                                  \
253     "KeyedLoadElementMonomorphic")                                       \
254   V(KeyedStoreElementMonomorphic_string,                                 \
255     "KeyedStoreElementMonomorphic")                                      \
256   V(stack_overflow_string, "kStackOverflowBoilerplate")                  \
257   V(illegal_access_string, "illegal access")                             \
258   V(illegal_execution_state_string, "illegal execution state")           \
259   V(get_string, "get")                                                   \
260   V(set_string, "set")                                                   \
261   V(map_field_string, "%map")                                            \
262   V(elements_field_string, "%elements")                                  \
263   V(length_field_string, "%length")                                      \
264   V(cell_value_string, "%cell_value")                                    \
265   V(function_class_string, "Function")                                   \
266   V(illegal_argument_string, "illegal argument")                         \
267   V(MakeReferenceError_string, "MakeReferenceError")                     \
268   V(MakeSyntaxError_string, "MakeSyntaxError")                           \
269   V(MakeTypeError_string, "MakeTypeError")                               \
270   V(invalid_lhs_in_assignment_string, "invalid_lhs_in_assignment")       \
271   V(invalid_lhs_in_for_in_string, "invalid_lhs_in_for_in")               \
272   V(invalid_lhs_in_postfix_op_string, "invalid_lhs_in_postfix_op")       \
273   V(invalid_lhs_in_prefix_op_string, "invalid_lhs_in_prefix_op")         \
274   V(illegal_return_string, "illegal_return")                             \
275   V(illegal_break_string, "illegal_break")                               \
276   V(illegal_continue_string, "illegal_continue")                         \
277   V(unknown_label_string, "unknown_label")                               \
278   V(redeclaration_string, "redeclaration")                               \
279   V(space_string, " ")                                                   \
280   V(exec_string, "exec")                                                 \
281   V(zero_string, "0")                                                    \
282   V(global_eval_string, "GlobalEval")                                    \
283   V(identity_hash_string, "v8::IdentityHash")                            \
284   V(closure_string, "(closure)")                                         \
285   V(use_strict_string, "use strict")                                     \
286   V(dot_string, ".")                                                     \
287   V(anonymous_function_string, "(anonymous function)")                   \
288   V(compare_ic_string, "==")                                             \
289   V(strict_compare_ic_string, "===")                                     \
290   V(infinity_string, "Infinity")                                         \
291   V(minus_infinity_string, "-Infinity")                                  \
292   V(hidden_stack_trace_string, "v8::hidden_stack_trace")                 \
293   V(query_colon_string, "(?:)")                                          \
294   V(Generator_string, "Generator")                                       \
295   V(throw_string, "throw")                                               \
296   V(done_string, "done")                                                 \
297   V(value_string, "value")                                               \
298   V(next_string, "next")                                                 \
299   V(byte_length_string, "byteLength")                                    \
300   V(byte_offset_string, "byteOffset")                                    \
301   V(buffer_string, "buffer")
302 
303 // Forward declarations.
304 class GCTracer;
305 class HeapStats;
306 class Isolate;
307 class WeakObjectRetainer;
308 
309 
310 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
311                                                       Object** pointer);
312 
313 class StoreBufferRebuilder {
314  public:
StoreBufferRebuilder(StoreBuffer * store_buffer)315   explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
316       : store_buffer_(store_buffer) {
317   }
318 
319   void Callback(MemoryChunk* page, StoreBufferEvent event);
320 
321  private:
322   StoreBuffer* store_buffer_;
323 
324   // We record in this variable how full the store buffer was when we started
325   // iterating over the current page, finding pointers to new space.  If the
326   // store buffer overflows again we can exempt the page from the store buffer
327   // by rewinding to this point instead of having to search the store buffer.
328   Object*** start_of_current_page_;
329   // The current page we are scanning in the store buffer iterator.
330   MemoryChunk* current_page_;
331 };
332 
333 
334 
335 // A queue of objects promoted during scavenge. Each object is accompanied
336 // by it's size to avoid dereferencing a map pointer for scanning.
337 class PromotionQueue {
338  public:
PromotionQueue(Heap * heap)339   explicit PromotionQueue(Heap* heap)
340       : front_(NULL),
341         rear_(NULL),
342         limit_(NULL),
343         emergency_stack_(0),
344         heap_(heap) { }
345 
346   void Initialize();
347 
Destroy()348   void Destroy() {
349     ASSERT(is_empty());
350     delete emergency_stack_;
351     emergency_stack_ = NULL;
352   }
353 
354   inline void ActivateGuardIfOnTheSamePage();
355 
GetHeadPage()356   Page* GetHeadPage() {
357     return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
358   }
359 
SetNewLimit(Address limit)360   void SetNewLimit(Address limit) {
361     if (!guard_) {
362       return;
363     }
364 
365     ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
366     limit_ = reinterpret_cast<intptr_t*>(limit);
367 
368     if (limit_ <= rear_) {
369       return;
370     }
371 
372     RelocateQueueHead();
373   }
374 
is_empty()375   bool is_empty() {
376     return (front_ == rear_) &&
377         (emergency_stack_ == NULL || emergency_stack_->length() == 0);
378   }
379 
380   inline void insert(HeapObject* target, int size);
381 
remove(HeapObject ** target,int * size)382   void remove(HeapObject** target, int* size) {
383     ASSERT(!is_empty());
384     if (front_ == rear_) {
385       Entry e = emergency_stack_->RemoveLast();
386       *target = e.obj_;
387       *size = e.size_;
388       return;
389     }
390 
391     if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
392       NewSpacePage* front_page =
393           NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
394       ASSERT(!front_page->prev_page()->is_anchor());
395       front_ =
396           reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
397     }
398     *target = reinterpret_cast<HeapObject*>(*(--front_));
399     *size = static_cast<int>(*(--front_));
400     // Assert no underflow.
401     SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
402                                 reinterpret_cast<Address>(front_));
403   }
404 
405  private:
406   // The front of the queue is higher in the memory page chain than the rear.
407   intptr_t* front_;
408   intptr_t* rear_;
409   intptr_t* limit_;
410 
411   bool guard_;
412 
413   static const int kEntrySizeInWords = 2;
414 
415   struct Entry {
EntryEntry416     Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
417 
418     HeapObject* obj_;
419     int size_;
420   };
421   List<Entry>* emergency_stack_;
422 
423   Heap* heap_;
424 
425   void RelocateQueueHead();
426 
427   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
428 };
429 
430 
431 typedef void (*ScavengingCallback)(Map* map,
432                                    HeapObject** slot,
433                                    HeapObject* object);
434 
435 
436 // External strings table is a place where all external strings are
437 // registered.  We need to keep track of such strings to properly
438 // finalize them.
439 class ExternalStringTable {
440  public:
441   // Registers an external string.
442   inline void AddString(String* string);
443 
444   inline void Iterate(ObjectVisitor* v);
445 
446   // Restores internal invariant and gets rid of collected strings.
447   // Must be called after each Iterate() that modified the strings.
448   void CleanUp();
449 
450   // Destroys all allocated memory.
451   void TearDown();
452 
453  private:
ExternalStringTable()454   ExternalStringTable() { }
455 
456   friend class Heap;
457 
458   inline void Verify();
459 
460   inline void AddOldString(String* string);
461 
462   // Notifies the table that only a prefix of the new list is valid.
463   inline void ShrinkNewStrings(int position);
464 
465   // To speed up scavenge collections new space string are kept
466   // separate from old space strings.
467   List<Object*> new_space_strings_;
468   List<Object*> old_space_strings_;
469 
470   Heap* heap_;
471 
472   DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
473 };
474 
475 
476 enum ArrayStorageAllocationMode {
477   DONT_INITIALIZE_ARRAY_ELEMENTS,
478   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
479 };
480 
481 
482 class Heap {
483  public:
484   // Configure heap size before setup. Return false if the heap has been
485   // set up already.
486   bool ConfigureHeap(int max_semispace_size,
487                      intptr_t max_old_gen_size,
488                      intptr_t max_executable_size);
489   bool ConfigureHeapDefault();
490 
491   // Prepares the heap, setting up memory areas that are needed in the isolate
492   // without actually creating any objects.
493   bool SetUp();
494 
495   // Bootstraps the object heap with the core set of objects required to run.
496   // Returns whether it succeeded.
497   bool CreateHeapObjects();
498 
499   // Destroys all memory allocated by the heap.
500   void TearDown();
501 
502   // Set the stack limit in the roots_ array.  Some architectures generate
503   // code that looks here, because it is faster than loading from the static
504   // jslimit_/real_jslimit_ variable in the StackGuard.
505   void SetStackLimits();
506 
507   // Returns whether SetUp has been called.
508   bool HasBeenSetUp();
509 
510   // Returns the maximum amount of memory reserved for the heap.  For
511   // the young generation, we reserve 4 times the amount needed for a
512   // semi space.  The young generation consists of two semi spaces and
513   // we reserve twice the amount needed for those in order to ensure
514   // that new space can be aligned to its size.
MaxReserved()515   intptr_t MaxReserved() {
516     return 4 * reserved_semispace_size_ + max_old_generation_size_;
517   }
MaxSemiSpaceSize()518   int MaxSemiSpaceSize() { return max_semispace_size_; }
ReservedSemiSpaceSize()519   int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
InitialSemiSpaceSize()520   int InitialSemiSpaceSize() { return initial_semispace_size_; }
MaxOldGenerationSize()521   intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
MaxExecutableSize()522   intptr_t MaxExecutableSize() { return max_executable_size_; }
MaxRegularSpaceAllocationSize()523   int MaxRegularSpaceAllocationSize() { return InitialSemiSpaceSize() * 4/5; }
524 
525   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
526   // more spaces are needed until it reaches the limit.
527   intptr_t Capacity();
528 
529   // Returns the amount of memory currently committed for the heap.
530   intptr_t CommittedMemory();
531 
532   // Returns the amount of executable memory currently committed for the heap.
533   intptr_t CommittedMemoryExecutable();
534 
535   // Returns the amount of phyical memory currently committed for the heap.
536   size_t CommittedPhysicalMemory();
537 
538   // Returns the maximum amount of memory ever committed for the heap.
MaximumCommittedMemory()539   intptr_t MaximumCommittedMemory() { return maximum_committed_; }
540 
541   // Updates the maximum committed memory for the heap. Should be called
542   // whenever a space grows.
543   void UpdateMaximumCommitted();
544 
545   // Returns the available bytes in space w/o growing.
546   // Heap doesn't guarantee that it can allocate an object that requires
547   // all available bytes. Check MaxHeapObjectSize() instead.
548   intptr_t Available();
549 
550   // Returns of size of all objects residing in the heap.
551   intptr_t SizeOfObjects();
552 
553   // Return the starting address and a mask for the new space.  And-masking an
554   // address with the mask will result in the start address of the new space
555   // for all addresses in either semispace.
NewSpaceStart()556   Address NewSpaceStart() { return new_space_.start(); }
NewSpaceMask()557   uintptr_t NewSpaceMask() { return new_space_.mask(); }
NewSpaceTop()558   Address NewSpaceTop() { return new_space_.top(); }
559 
new_space()560   NewSpace* new_space() { return &new_space_; }
old_pointer_space()561   OldSpace* old_pointer_space() { return old_pointer_space_; }
old_data_space()562   OldSpace* old_data_space() { return old_data_space_; }
code_space()563   OldSpace* code_space() { return code_space_; }
map_space()564   MapSpace* map_space() { return map_space_; }
cell_space()565   CellSpace* cell_space() { return cell_space_; }
property_cell_space()566   PropertyCellSpace* property_cell_space() {
567     return property_cell_space_;
568   }
lo_space()569   LargeObjectSpace* lo_space() { return lo_space_; }
paged_space(int idx)570   PagedSpace* paged_space(int idx) {
571     switch (idx) {
572       case OLD_POINTER_SPACE:
573         return old_pointer_space();
574       case OLD_DATA_SPACE:
575         return old_data_space();
576       case MAP_SPACE:
577         return map_space();
578       case CELL_SPACE:
579         return cell_space();
580       case PROPERTY_CELL_SPACE:
581         return property_cell_space();
582       case CODE_SPACE:
583         return code_space();
584       case NEW_SPACE:
585       case LO_SPACE:
586         UNREACHABLE();
587     }
588     return NULL;
589   }
590 
always_allocate()591   bool always_allocate() { return always_allocate_scope_depth_ != 0; }
always_allocate_scope_depth_address()592   Address always_allocate_scope_depth_address() {
593     return reinterpret_cast<Address>(&always_allocate_scope_depth_);
594   }
linear_allocation()595   bool linear_allocation() {
596     return linear_allocation_scope_depth_ != 0;
597   }
598 
NewSpaceAllocationTopAddress()599   Address* NewSpaceAllocationTopAddress() {
600     return new_space_.allocation_top_address();
601   }
NewSpaceAllocationLimitAddress()602   Address* NewSpaceAllocationLimitAddress() {
603     return new_space_.allocation_limit_address();
604   }
605 
OldPointerSpaceAllocationTopAddress()606   Address* OldPointerSpaceAllocationTopAddress() {
607     return old_pointer_space_->allocation_top_address();
608   }
OldPointerSpaceAllocationLimitAddress()609   Address* OldPointerSpaceAllocationLimitAddress() {
610     return old_pointer_space_->allocation_limit_address();
611   }
612 
OldDataSpaceAllocationTopAddress()613   Address* OldDataSpaceAllocationTopAddress() {
614     return old_data_space_->allocation_top_address();
615   }
OldDataSpaceAllocationLimitAddress()616   Address* OldDataSpaceAllocationLimitAddress() {
617     return old_data_space_->allocation_limit_address();
618   }
619 
620   // Allocates and initializes a new JavaScript object based on a
621   // constructor.
622   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
623   // failed.
624   // Please note this does not perform a garbage collection.
625   MUST_USE_RESULT MaybeObject* AllocateJSObject(
626       JSFunction* constructor,
627       PretenureFlag pretenure = NOT_TENURED);
628 
629   MUST_USE_RESULT MaybeObject* AllocateJSObjectWithAllocationSite(
630       JSFunction* constructor,
631       Handle<AllocationSite> allocation_site);
632 
633   MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
634                                                 ScopeInfo* scope_info);
635 
636   // Allocate a JSArray with no elements
637   MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
638       ElementsKind elements_kind,
639       PretenureFlag pretenure = NOT_TENURED) {
640     return AllocateJSArrayAndStorage(elements_kind, 0, 0,
641                                      DONT_INITIALIZE_ARRAY_ELEMENTS,
642                                      pretenure);
643   }
644 
645   // Allocate a JSArray with a specified length but elements that are left
646   // uninitialized.
647   MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage(
648       ElementsKind elements_kind,
649       int length,
650       int capacity,
651       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
652       PretenureFlag pretenure = NOT_TENURED);
653 
654   MUST_USE_RESULT MaybeObject* AllocateJSArrayStorage(
655       JSArray* array,
656       int length,
657       int capacity,
658       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS);
659 
660   // Allocate a JSArray with no elements
661   MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements(
662       FixedArrayBase* array_base,
663       ElementsKind elements_kind,
664       int length,
665       PretenureFlag pretenure = NOT_TENURED);
666 
667   // Returns a deep copy of the JavaScript object.
668   // Properties and elements are copied too.
669   // Returns failure if allocation failed.
670   // Optionally takes an AllocationSite to be appended in an AllocationMemento.
671   MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source,
672                                             AllocationSite* site = NULL);
673 
674   // Allocates a JS ArrayBuffer object.
675   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
676   // failed.
677   // Please note this does not perform a garbage collection.
678   MUST_USE_RESULT MaybeObject* AllocateJSArrayBuffer();
679 
680   // Allocates a Harmony proxy or function proxy.
681   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
682   // failed.
683   // Please note this does not perform a garbage collection.
684   MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
685                                                Object* prototype);
686 
687   MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
688                                                        Object* call_trap,
689                                                        Object* construct_trap,
690                                                        Object* prototype);
691 
692   // Reinitialize a JSReceiver into an (empty) JS object of respective type and
693   // size, but keeping the original prototype.  The receiver must have at least
694   // the size of the new object.  The object is reinitialized and behaves as an
695   // object that has been freshly allocated.
696   // Returns failure if an error occured, otherwise object.
697   MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object,
698                                                       InstanceType type,
699                                                       int size);
700 
701   // Reinitialize an JSGlobalProxy based on a constructor.  The object
702   // must have the same size as objects allocated using the
703   // constructor.  The object is reinitialized and behaves as an
704   // object that has been freshly allocated using the constructor.
705   MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
706       JSFunction* constructor, JSGlobalProxy* global);
707 
708   // Allocates and initializes a new JavaScript object based on a map.
709   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
710   // failed.
711   // Please note this does not perform a garbage collection.
712   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
713       Map* map, PretenureFlag pretenure = NOT_TENURED, bool alloc_props = true);
714 
715   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMapWithAllocationSite(
716       Map* map, Handle<AllocationSite> allocation_site);
717 
718   // Allocates a heap object based on the map.
719   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
720   // failed.
721   // Please note this function does not perform a garbage collection.
722   MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
723 
724   MUST_USE_RESULT MaybeObject* AllocateWithAllocationSite(Map* map,
725       AllocationSpace space, Handle<AllocationSite> allocation_site);
726 
727   // Allocates a JS Map in the heap.
728   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
729   // failed.
730   // Please note this function does not perform a garbage collection.
731   MUST_USE_RESULT MaybeObject* AllocateMap(
732       InstanceType instance_type,
733       int instance_size,
734       ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
735 
736   // Allocates a partial map for bootstrapping.
737   MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
738                                                   int instance_size);
739 
740   // Allocates an empty code cache.
741   MUST_USE_RESULT MaybeObject* AllocateCodeCache();
742 
743   // Allocates a serialized scope info.
744   MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
745 
746   // Allocates an External object for v8's external API.
747   MUST_USE_RESULT MaybeObject* AllocateExternal(void* value);
748 
749   // Allocates an empty PolymorphicCodeCache.
750   MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
751 
752   // Allocates a pre-tenured empty AccessorPair.
753   MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
754 
755   // Allocates an empty TypeFeedbackInfo.
756   MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo();
757 
758   // Allocates an AliasedArgumentsEntry.
759   MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
760 
761   // Clear the Instanceof cache (used when a prototype changes).
762   inline void ClearInstanceofCache();
763 
764   // Iterates the whole code space to clear all ICs of the given kind.
765   void ClearAllICsByKind(Code::Kind kind);
766 
767   // For use during bootup.
768   void RepairFreeListsAfterBoot();
769 
770   // Allocates and fully initializes a String.  There are two String
771   // encodings: ASCII and two byte. One should choose between the three string
772   // allocation functions based on the encoding of the string buffer used to
773   // initialized the string.
774   //   - ...FromAscii initializes the string from a buffer that is ASCII
775   //     encoded (it does not check that the buffer is ASCII encoded) and the
776   //     result will be ASCII encoded.
777   //   - ...FromUTF8 initializes the string from a buffer that is UTF-8
778   //     encoded.  If the characters are all single-byte characters, the
779   //     result will be ASCII encoded, otherwise it will converted to two
780   //     byte.
781   //   - ...FromTwoByte initializes the string from a buffer that is two-byte
782   //     encoded.  If the characters are all single-byte characters, the
783   //     result will be converted to ASCII, otherwise it will be left as
784   //     two-byte.
785   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
786   // failed.
787   // Please note this does not perform a garbage collection.
788   MUST_USE_RESULT MaybeObject* AllocateStringFromOneByte(
789       Vector<const uint8_t> str,
790       PretenureFlag pretenure = NOT_TENURED);
791   // TODO(dcarney): remove this function.
792   MUST_USE_RESULT inline MaybeObject* AllocateStringFromOneByte(
793       Vector<const char> str,
794       PretenureFlag pretenure = NOT_TENURED) {
795     return AllocateStringFromOneByte(Vector<const uint8_t>::cast(str),
796                                      pretenure);
797   }
798   MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
799       Vector<const char> str,
800       PretenureFlag pretenure = NOT_TENURED);
801   MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
802       Vector<const char> str,
803       int non_ascii_start,
804       PretenureFlag pretenure = NOT_TENURED);
805   MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
806       Vector<const uc16> str,
807       PretenureFlag pretenure = NOT_TENURED);
808 
809   // Allocates an internalized string in old space based on the character
810   // stream. Returns Failure::RetryAfterGC(requested_bytes, space) if the
811   // allocation failed.
812   // Please note this function does not perform a garbage collection.
813   MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringFromUtf8(
814       Vector<const char> str,
815       int chars,
816       uint32_t hash_field);
817 
818   MUST_USE_RESULT inline MaybeObject* AllocateOneByteInternalizedString(
819         Vector<const uint8_t> str,
820         uint32_t hash_field);
821 
822   MUST_USE_RESULT inline MaybeObject* AllocateTwoByteInternalizedString(
823         Vector<const uc16> str,
824         uint32_t hash_field);
825 
826   template<typename T>
827   static inline bool IsOneByte(T t, int chars);
828 
829   template<typename T>
830   MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringImpl(
831       T t, int chars, uint32_t hash_field);
832 
833   template<bool is_one_byte, typename T>
834   MUST_USE_RESULT MaybeObject* AllocateInternalizedStringImpl(
835       T t, int chars, uint32_t hash_field);
836 
837   // Allocates and partially initializes a String.  There are two String
838   // encodings: ASCII and two byte.  These functions allocate a string of the
839   // given length and set its map and length fields.  The characters of the
840   // string are uninitialized.
841   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
842   // failed.
843   // Please note this does not perform a garbage collection.
844   MUST_USE_RESULT MaybeObject* AllocateRawOneByteString(
845       int length,
846       PretenureFlag pretenure = NOT_TENURED);
847   MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
848       int length,
849       PretenureFlag pretenure = NOT_TENURED);
850 
851   // Computes a single character string where the character has code.
852   // A cache is used for ASCII codes.
853   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
854   // failed. Please note this does not perform a garbage collection.
855   MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
856       uint16_t code);
857 
858   // Allocate a byte array of the specified length
859   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
860   // failed.
861   // Please note this does not perform a garbage collection.
862   MUST_USE_RESULT MaybeObject* AllocateByteArray(
863       int length,
864       PretenureFlag pretenure = NOT_TENURED);
865 
866   // Allocates an external array of the specified length and type.
867   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
868   // failed.
869   // Please note this does not perform a garbage collection.
870   MUST_USE_RESULT MaybeObject* AllocateExternalArray(
871       int length,
872       ExternalArrayType array_type,
873       void* external_pointer,
874       PretenureFlag pretenure);
875 
876   // Allocate a symbol in old space.
877   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
878   // failed.
879   // Please note this does not perform a garbage collection.
880   MUST_USE_RESULT MaybeObject* AllocateSymbol();
881   MUST_USE_RESULT MaybeObject* AllocatePrivateSymbol();
882 
883   // Allocate a tenured AllocationSite. It's payload is null
884   MUST_USE_RESULT MaybeObject* AllocateAllocationSite();
885 
886   // Allocates a fixed array initialized with undefined values
887   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
888   // failed.
889   // Please note this does not perform a garbage collection.
890   MUST_USE_RESULT MaybeObject* AllocateFixedArray(
891       int length,
892       PretenureFlag pretenure = NOT_TENURED);
893 
894   // Allocates an uninitialized fixed array. It must be filled by the caller.
895   //
896   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
897   // failed.
898   // Please note this does not perform a garbage collection.
899   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
900 
901   // Move len elements within a given array from src_index index to dst_index
902   // index.
903   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
904 
905   // Make a copy of src and return it. Returns
906   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
907   MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
908 
909   // Make a copy of src, set the map, and return the copy. Returns
910   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
911   MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
912 
913   // Make a copy of src and return it. Returns
914   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
915   MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
916       FixedDoubleArray* src);
917 
918   // Make a copy of src, set the map, and return the copy. Returns
919   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
920   MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
921       FixedDoubleArray* src, Map* map);
922 
923   // Make a copy of src and return it. Returns
924   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
925   MUST_USE_RESULT inline MaybeObject* CopyConstantPoolArray(
926       ConstantPoolArray* src);
927 
928   // Make a copy of src, set the map, and return the copy. Returns
929   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
930   MUST_USE_RESULT MaybeObject* CopyConstantPoolArrayWithMap(
931       ConstantPoolArray* src, Map* map);
932 
933   // Allocates a fixed array initialized with the hole values.
934   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
935   // failed.
936   // Please note this does not perform a garbage collection.
937   MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
938       int length,
939       PretenureFlag pretenure = NOT_TENURED);
940 
941   MUST_USE_RESULT MaybeObject* AllocateConstantPoolArray(
942       int first_int64_index,
943       int first_ptr_index,
944       int first_int32_index);
945 
946   // Allocates a fixed double array with uninitialized values. Returns
947   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
948   // Please note this does not perform a garbage collection.
949   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray(
950       int length,
951       PretenureFlag pretenure = NOT_TENURED);
952 
953   // Allocates a fixed double array with hole values. Returns
954   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
955   // Please note this does not perform a garbage collection.
956   MUST_USE_RESULT MaybeObject* AllocateFixedDoubleArrayWithHoles(
957       int length,
958       PretenureFlag pretenure = NOT_TENURED);
959 
960   // AllocateHashTable is identical to AllocateFixedArray except
961   // that the resulting object has hash_table_map as map.
962   MUST_USE_RESULT MaybeObject* AllocateHashTable(
963       int length, PretenureFlag pretenure = NOT_TENURED);
964 
965   // Allocate a native (but otherwise uninitialized) context.
966   MUST_USE_RESULT MaybeObject* AllocateNativeContext();
967 
968   // Allocate a global context.
969   MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
970                                                      ScopeInfo* scope_info);
971 
972   // Allocate a module context.
973   MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
974 
975   // Allocate a function context.
976   MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
977                                                        JSFunction* function);
978 
979   // Allocate a catch context.
980   MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
981                                                     Context* previous,
982                                                     String* name,
983                                                     Object* thrown_object);
984   // Allocate a 'with' context.
985   MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
986                                                    Context* previous,
987                                                    JSReceiver* extension);
988 
989   // Allocate a block context.
990   MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
991                                                     Context* previous,
992                                                     ScopeInfo* info);
993 
994   // Allocates a new utility object in the old generation.
995   MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
996 
997   // Allocates a function initialized with a shared part.
998   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
999   // failed.
1000   // Please note this does not perform a garbage collection.
1001   MUST_USE_RESULT MaybeObject* AllocateFunction(
1002       Map* function_map,
1003       SharedFunctionInfo* shared,
1004       Object* prototype,
1005       PretenureFlag pretenure = TENURED);
1006 
1007   // Arguments object size.
1008   static const int kArgumentsObjectSize =
1009       JSObject::kHeaderSize + 2 * kPointerSize;
1010   // Strict mode arguments has no callee so it is smaller.
1011   static const int kArgumentsObjectSizeStrict =
1012       JSObject::kHeaderSize + 1 * kPointerSize;
1013   // Indicies for direct access into argument objects.
1014   static const int kArgumentsLengthIndex = 0;
1015   // callee is only valid in non-strict mode.
1016   static const int kArgumentsCalleeIndex = 1;
1017 
1018   // Allocates an arguments object - optionally with an elements array.
1019   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1020   // failed.
1021   // Please note this does not perform a garbage collection.
1022   MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
1023       Object* callee, int length);
1024 
1025   // Same as NewNumberFromDouble, but may return a preallocated/immutable
1026   // number object (e.g., minus_zero_value_, nan_value_)
1027   MUST_USE_RESULT MaybeObject* NumberFromDouble(
1028       double value, PretenureFlag pretenure = NOT_TENURED);
1029 
1030   // Allocated a HeapNumber from value.
1031   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
1032       double value, PretenureFlag pretenure = NOT_TENURED);
1033 
1034   // Converts an int into either a Smi or a HeapNumber object.
1035   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1036   // failed.
1037   // Please note this does not perform a garbage collection.
1038   MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
1039       int32_t value, PretenureFlag pretenure = NOT_TENURED);
1040 
1041   // Converts an int into either a Smi or a HeapNumber object.
1042   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1043   // failed.
1044   // Please note this does not perform a garbage collection.
1045   MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
1046       uint32_t value, PretenureFlag pretenure = NOT_TENURED);
1047 
1048   // Allocates a new foreign object.
1049   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1050   // failed.
1051   // Please note this does not perform a garbage collection.
1052   MUST_USE_RESULT MaybeObject* AllocateForeign(
1053       Address address, PretenureFlag pretenure = NOT_TENURED);
1054 
1055   // Allocates a new SharedFunctionInfo object.
1056   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1057   // failed.
1058   // Please note this does not perform a garbage collection.
1059   MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
1060 
1061   // Allocates a new JSMessageObject object.
1062   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1063   // failed.
1064   // Please note that this does not perform a garbage collection.
1065   MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
1066       String* type,
1067       JSArray* arguments,
1068       int start_position,
1069       int end_position,
1070       Object* script,
1071       Object* stack_trace,
1072       Object* stack_frames);
1073 
1074   // Allocates a new cons string object.
1075   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1076   // failed.
1077   // Please note this does not perform a garbage collection.
1078   MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
1079                                                   String* second);
1080 
1081   // Allocates a new sub string object which is a substring of an underlying
1082   // string buffer stretching from the index start (inclusive) to the index
1083   // end (exclusive).
1084   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1085   // failed.
1086   // Please note this does not perform a garbage collection.
1087   MUST_USE_RESULT MaybeObject* AllocateSubString(
1088       String* buffer,
1089       int start,
1090       int end,
1091       PretenureFlag pretenure = NOT_TENURED);
1092 
1093   // Allocate a new external string object, which is backed by a string
1094   // resource that resides outside the V8 heap.
1095   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1096   // failed.
1097   // Please note this does not perform a garbage collection.
1098   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
1099       const ExternalAsciiString::Resource* resource);
1100   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
1101       const ExternalTwoByteString::Resource* resource);
1102 
1103   // Finalizes an external string by deleting the associated external
1104   // data and clearing the resource pointer.
1105   inline void FinalizeExternalString(String* string);
1106 
1107   // Allocates an uninitialized object.  The memory is non-executable if the
1108   // hardware and OS allow.
1109   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1110   // failed.
1111   // Please note this function does not perform a garbage collection.
1112   MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
1113                                                   AllocationSpace space,
1114                                                   AllocationSpace retry_space);
1115 
1116   // Initialize a filler object to keep the ability to iterate over the heap
1117   // when shortening objects.
1118   void CreateFillerObjectAt(Address addr, int size);
1119 
1120   // Makes a new native code object
1121   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1122   // failed. On success, the pointer to the Code object is stored in the
1123   // self_reference. This allows generated code to reference its own Code
1124   // object by containing this pointer.
1125   // Please note this function does not perform a garbage collection.
1126   MUST_USE_RESULT MaybeObject* CreateCode(
1127       const CodeDesc& desc,
1128       Code::Flags flags,
1129       Handle<Object> self_reference,
1130       bool immovable = false,
1131       bool crankshafted = false,
1132       int prologue_offset = Code::kPrologueOffsetNotSet);
1133 
1134   MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
1135 
1136   // Copy the code and scope info part of the code object, but insert
1137   // the provided data as the relocation information.
1138   MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
1139 
1140   // Finds the internalized copy for string in the string table.
1141   // If not found, a new string is added to the table and returned.
1142   // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
1143   // failed.
1144   // Please note this function does not perform a garbage collection.
1145   MUST_USE_RESULT MaybeObject* InternalizeUtf8String(Vector<const char> str);
InternalizeUtf8String(const char * str)1146   MUST_USE_RESULT MaybeObject* InternalizeUtf8String(const char* str) {
1147     return InternalizeUtf8String(CStrVector(str));
1148   }
1149   MUST_USE_RESULT MaybeObject* InternalizeOneByteString(
1150       Vector<const uint8_t> str);
1151   MUST_USE_RESULT MaybeObject* InternalizeTwoByteString(Vector<const uc16> str);
1152   MUST_USE_RESULT MaybeObject* InternalizeString(String* str);
1153   MUST_USE_RESULT MaybeObject* InternalizeOneByteString(
1154       Handle<SeqOneByteString> string, int from, int length);
1155 
1156   bool InternalizeStringIfExists(String* str, String** result);
1157   bool InternalizeTwoCharsStringIfExists(String* str, String** result);
1158 
1159   // Compute the matching internalized string map for a string if possible.
1160   // NULL is returned if string is in new space or not flattened.
1161   Map* InternalizedStringMapForString(String* str);
1162 
1163   // Tries to flatten a string before compare operation.
1164   //
1165   // Returns a failure in case it was decided that flattening was
1166   // necessary and failed.  Note, if flattening is not necessary the
1167   // string might stay non-flat even when not a failure is returned.
1168   //
1169   // Please note this function does not perform a garbage collection.
1170   MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
1171 
1172   // Converts the given boolean condition to JavaScript boolean value.
1173   inline Object* ToBoolean(bool condition);
1174 
1175   // Performs garbage collection operation.
1176   // Returns whether there is a chance that another major GC could
1177   // collect more garbage.
1178   inline bool CollectGarbage(AllocationSpace space,
1179                              const char* gc_reason = NULL);
1180 
1181   static const int kNoGCFlags = 0;
1182   static const int kSweepPreciselyMask = 1;
1183   static const int kReduceMemoryFootprintMask = 2;
1184   static const int kAbortIncrementalMarkingMask = 4;
1185 
1186   // Making the heap iterable requires us to sweep precisely and abort any
1187   // incremental marking as well.
1188   static const int kMakeHeapIterableMask =
1189       kSweepPreciselyMask | kAbortIncrementalMarkingMask;
1190 
1191   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
1192   // non-zero, then the slower precise sweeper is used, which leaves the heap
1193   // in a state where we can iterate over the heap visiting all objects.
1194   void CollectAllGarbage(int flags, const char* gc_reason = NULL);
1195 
1196   // Last hope GC, should try to squeeze as much as possible.
1197   void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1198 
1199   // Check whether the heap is currently iterable.
1200   bool IsHeapIterable();
1201 
1202   // Ensure that we have swept all spaces in such a way that we can iterate
1203   // over all objects.  May cause a GC.
1204   void EnsureHeapIsIterable();
1205 
1206   // Notify the heap that a context has been disposed.
1207   int NotifyContextDisposed();
1208 
1209   // Utility to invoke the scavenger. This is needed in test code to
1210   // ensure correct callback for weak global handles.
1211   void PerformScavenge();
1212 
increment_scan_on_scavenge_pages()1213   inline void increment_scan_on_scavenge_pages() {
1214     scan_on_scavenge_pages_++;
1215     if (FLAG_gc_verbose) {
1216       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1217     }
1218   }
1219 
decrement_scan_on_scavenge_pages()1220   inline void decrement_scan_on_scavenge_pages() {
1221     scan_on_scavenge_pages_--;
1222     if (FLAG_gc_verbose) {
1223       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1224     }
1225   }
1226 
promotion_queue()1227   PromotionQueue* promotion_queue() { return &promotion_queue_; }
1228 
1229 #ifdef DEBUG
1230   // Utility used with flag gc-greedy.
1231   void GarbageCollectionGreedyCheck();
1232 #endif
1233 
1234   void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
1235                              GCType gc_type_filter,
1236                              bool pass_isolate = true);
1237   void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
1238 
1239   void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
1240                              GCType gc_type_filter,
1241                              bool pass_isolate = true);
1242   void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
1243 
1244   // Heap root getters.  We have versions with and without type::cast() here.
1245   // You can't use type::cast during GC because the assert fails.
1246   // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1247   // not corrupt the map.
1248 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
1249   type* name() {                                                               \
1250     return type::cast(roots_[k##camel_name##RootIndex]);                       \
1251   }                                                                            \
1252   type* raw_unchecked_##name() {                                               \
1253     return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
1254   }
1255   ROOT_LIST(ROOT_ACCESSOR)
1256 #undef ROOT_ACCESSOR
1257 
1258 // Utility type maps
1259 #define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
1260     Map* name##_map() {                                                        \
1261       return Map::cast(roots_[k##Name##MapRootIndex]);                         \
1262     }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)1263   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1264 #undef STRUCT_MAP_ACCESSOR
1265 
1266 #define STRING_ACCESSOR(name, str) String* name() {                            \
1267     return String::cast(roots_[k##name##RootIndex]);                           \
1268   }
1269   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
1270 #undef STRING_ACCESSOR
1271 
1272   // The hidden_string is special because it is the empty string, but does
1273   // not match the empty string.
1274   String* hidden_string() { return hidden_string_; }
1275 
set_native_contexts_list(Object * object)1276   void set_native_contexts_list(Object* object) {
1277     native_contexts_list_ = object;
1278   }
native_contexts_list()1279   Object* native_contexts_list() { return native_contexts_list_; }
1280 
set_array_buffers_list(Object * object)1281   void set_array_buffers_list(Object* object) {
1282     array_buffers_list_ = object;
1283   }
array_buffers_list()1284   Object* array_buffers_list() { return array_buffers_list_; }
1285 
set_allocation_sites_list(Object * object)1286   void set_allocation_sites_list(Object* object) {
1287     allocation_sites_list_ = object;
1288   }
allocation_sites_list()1289   Object* allocation_sites_list() { return allocation_sites_list_; }
allocation_sites_list_address()1290   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
1291 
weak_object_to_code_table()1292   Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
1293 
1294   // Number of mark-sweeps.
ms_count()1295   unsigned int ms_count() { return ms_count_; }
1296 
1297   // Iterates over all roots in the heap.
1298   void IterateRoots(ObjectVisitor* v, VisitMode mode);
1299   // Iterates over all strong roots in the heap.
1300   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1301   // Iterates over all the other roots in the heap.
1302   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1303 
1304   // Iterate pointers to from semispace of new space found in memory interval
1305   // from start to end.
1306   void IterateAndMarkPointersToFromSpace(Address start,
1307                                          Address end,
1308                                          ObjectSlotCallback callback);
1309 
1310   // Returns whether the object resides in new space.
1311   inline bool InNewSpace(Object* object);
1312   inline bool InNewSpace(Address address);
1313   inline bool InNewSpacePage(Address address);
1314   inline bool InFromSpace(Object* object);
1315   inline bool InToSpace(Object* object);
1316 
1317   // Returns whether the object resides in old pointer space.
1318   inline bool InOldPointerSpace(Address address);
1319   inline bool InOldPointerSpace(Object* object);
1320 
1321   // Returns whether the object resides in old data space.
1322   inline bool InOldDataSpace(Address address);
1323   inline bool InOldDataSpace(Object* object);
1324 
1325   // Checks whether an address/object in the heap (including auxiliary
1326   // area and unused area).
1327   bool Contains(Address addr);
1328   bool Contains(HeapObject* value);
1329 
1330   // Checks whether an address/object in a space.
1331   // Currently used by tests, serialization and heap verification only.
1332   bool InSpace(Address addr, AllocationSpace space);
1333   bool InSpace(HeapObject* value, AllocationSpace space);
1334 
1335   // Finds out which space an object should get promoted to based on its type.
1336   inline OldSpace* TargetSpace(HeapObject* object);
1337   static inline AllocationSpace TargetSpaceId(InstanceType type);
1338 
1339   // Checks whether the given object is allowed to be migrated from it's
1340   // current space into the given destination space. Used for debugging.
1341   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
1342 
1343   // Sets the stub_cache_ (only used when expanding the dictionary).
public_set_code_stubs(UnseededNumberDictionary * value)1344   void public_set_code_stubs(UnseededNumberDictionary* value) {
1345     roots_[kCodeStubsRootIndex] = value;
1346   }
1347 
1348   // Support for computing object sizes for old objects during GCs. Returns
1349   // a function that is guaranteed to be safe for computing object sizes in
1350   // the current GC phase.
GcSafeSizeOfOldObjectFunction()1351   HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
1352     return gc_safe_size_of_old_object_;
1353   }
1354 
1355   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
public_set_non_monomorphic_cache(UnseededNumberDictionary * value)1356   void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
1357     roots_[kNonMonomorphicCacheRootIndex] = value;
1358   }
1359 
public_set_empty_script(Script * script)1360   void public_set_empty_script(Script* script) {
1361     roots_[kEmptyScriptRootIndex] = script;
1362   }
1363 
public_set_store_buffer_top(Address * top)1364   void public_set_store_buffer_top(Address* top) {
1365     roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1366   }
1367 
1368   // Generated code can embed this address to get access to the roots.
roots_array_start()1369   Object** roots_array_start() { return roots_; }
1370 
store_buffer_top_address()1371   Address* store_buffer_top_address() {
1372     return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1373   }
1374 
1375   // Get address of native contexts list for serialization support.
native_contexts_list_address()1376   Object** native_contexts_list_address() {
1377     return &native_contexts_list_;
1378   }
1379 
1380 #ifdef VERIFY_HEAP
1381   // Verify the heap is in its normal state before or after a GC.
1382   void Verify();
1383 
1384 
weak_embedded_objects_verification_enabled()1385   bool weak_embedded_objects_verification_enabled() {
1386     return no_weak_object_verification_scope_depth_ == 0;
1387   }
1388 #endif
1389 
1390 #ifdef DEBUG
1391   void Print();
1392   void PrintHandles();
1393 
1394   void OldPointerSpaceCheckStoreBuffer();
1395   void MapSpaceCheckStoreBuffer();
1396   void LargeObjectSpaceCheckStoreBuffer();
1397 
1398   // Report heap statistics.
1399   void ReportHeapStatistics(const char* title);
1400   void ReportCodeStatistics(const char* title);
1401 #endif
1402 
1403   // Zapping is needed for verify heap, and always done in debug builds.
ShouldZapGarbage()1404   static inline bool ShouldZapGarbage() {
1405 #ifdef DEBUG
1406     return true;
1407 #else
1408 #ifdef VERIFY_HEAP
1409     return FLAG_verify_heap;
1410 #else
1411     return false;
1412 #endif
1413 #endif
1414   }
1415 
1416   // Print short heap statistics.
1417   void PrintShortHeapStatistics();
1418 
1419   // Makes a new internalized string object
1420   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1421   // failed.
1422   // Please note this function does not perform a garbage collection.
1423   MUST_USE_RESULT MaybeObject* CreateInternalizedString(
1424       const char* str, int length, int hash);
1425   MUST_USE_RESULT MaybeObject* CreateInternalizedString(String* str);
1426 
1427   // Write barrier support for address[offset] = o.
1428   INLINE(void RecordWrite(Address address, int offset));
1429 
1430   // Write barrier support for address[start : start + len[ = o.
1431   INLINE(void RecordWrites(Address address, int start, int len));
1432 
1433   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
gc_state()1434   inline HeapState gc_state() { return gc_state_; }
1435 
IsInGCPostProcessing()1436   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1437 
1438 #ifdef DEBUG
set_allocation_timeout(int timeout)1439   void set_allocation_timeout(int timeout) {
1440     allocation_timeout_ = timeout;
1441   }
1442 
disallow_allocation_failure()1443   bool disallow_allocation_failure() {
1444     return disallow_allocation_failure_;
1445   }
1446 
1447   void TracePathToObjectFrom(Object* target, Object* root);
1448   void TracePathToObject(Object* target);
1449   void TracePathToGlobal();
1450 #endif
1451 
1452   // Callback function passed to Heap::Iterate etc.  Copies an object if
1453   // necessary, the object might be promoted to an old space.  The caller must
1454   // ensure the precondition that the object is (a) a heap object and (b) in
1455   // the heap's from space.
1456   static inline void ScavengePointer(HeapObject** p);
1457   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1458 
1459   // An object may have an AllocationSite associated with it through a trailing
1460   // AllocationMemento. Its feedback should be updated when objects are found
1461   // in the heap.
1462   static inline void UpdateAllocationSiteFeedback(HeapObject* object);
1463 
1464   // Support for partial snapshots.  After calling this we have a linear
1465   // space to write objects in each space.
1466   void ReserveSpace(int *sizes, Address* addresses);
1467 
1468   //
1469   // Support for the API.
1470   //
1471 
1472   bool CreateApiObjects();
1473 
1474   // Attempt to find the number in a small cache.  If we finds it, return
1475   // the string representation of the number.  Otherwise return undefined.
1476   Object* GetNumberStringCache(Object* number);
1477 
1478   // Update the cache with a new number-string pair.
1479   void SetNumberStringCache(Object* number, String* str);
1480 
1481   // Adjusts the amount of registered external memory.
1482   // Returns the adjusted value.
1483   inline int64_t AdjustAmountOfExternalAllocatedMemory(
1484       int64_t change_in_bytes);
1485 
1486   // This is only needed for testing high promotion mode.
SetNewSpaceHighPromotionModeActive(bool mode)1487   void SetNewSpaceHighPromotionModeActive(bool mode) {
1488     new_space_high_promotion_mode_active_ = mode;
1489   }
1490 
1491   // Returns the allocation mode (pre-tenuring) based on observed promotion
1492   // rates of previous collections.
GetPretenureMode()1493   inline PretenureFlag GetPretenureMode() {
1494     return FLAG_pretenuring && new_space_high_promotion_mode_active_
1495         ? TENURED : NOT_TENURED;
1496   }
1497 
NewSpaceHighPromotionModeActiveAddress()1498   inline Address* NewSpaceHighPromotionModeActiveAddress() {
1499     return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
1500   }
1501 
PromotedTotalSize()1502   inline intptr_t PromotedTotalSize() {
1503     int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1504     if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
1505     if (total < 0) return 0;
1506     return static_cast<intptr_t>(total);
1507   }
1508 
OldGenerationSpaceAvailable()1509   inline intptr_t OldGenerationSpaceAvailable() {
1510     return old_generation_allocation_limit_ - PromotedTotalSize();
1511   }
1512 
OldGenerationCapacityAvailable()1513   inline intptr_t OldGenerationCapacityAvailable() {
1514     return max_old_generation_size_ - PromotedTotalSize();
1515   }
1516 
1517   static const intptr_t kMinimumOldGenerationAllocationLimit =
1518       8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1519 
OldGenerationAllocationLimit(intptr_t old_gen_size)1520   intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) {
1521     const int divisor = FLAG_stress_compaction ? 10 :
1522         new_space_high_promotion_mode_active_ ? 1 : 3;
1523     intptr_t limit =
1524         Max(old_gen_size + old_gen_size / divisor,
1525             kMinimumOldGenerationAllocationLimit);
1526     limit += new_space_.Capacity();
1527     // TODO(hpayer): Can be removed when when pretenuring is supported for all
1528     // allocation sites.
1529     if (IsHighSurvivalRate() && IsStableOrIncreasingSurvivalTrend()) {
1530       limit *= 2;
1531     }
1532     intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1533     return Min(limit, halfway_to_the_max);
1534   }
1535 
1536   // Indicates whether inline bump-pointer allocation has been disabled.
inline_allocation_disabled()1537   bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1538 
1539   // Switch whether inline bump-pointer allocation should be used.
1540   void EnableInlineAllocation();
1541   void DisableInlineAllocation();
1542 
1543   // Implements the corresponding V8 API function.
1544   bool IdleNotification(int hint);
1545 
1546   // Declare all the root indices.
1547   enum RootListIndex {
1548 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1549     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1550 #undef ROOT_INDEX_DECLARATION
1551 
1552 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1553     INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
1554 #undef STRING_DECLARATION
1555 
1556     // Utility type maps
1557 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1558     STRUCT_LIST(DECLARE_STRUCT_MAP)
1559 #undef DECLARE_STRUCT_MAP
1560 
1561     kStringTableRootIndex,
1562     kStrongRootListLength = kStringTableRootIndex,
1563     kRootListLength
1564   };
1565 
1566   STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
1567   STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
1568   STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1569   STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1570   STATIC_CHECK(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1571 
1572   // Generated code can embed direct references to non-writable roots if
1573   // they are in new space.
1574   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1575   // Generated code can treat direct references to this root as constant.
1576   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1577 
1578   MUST_USE_RESULT MaybeObject* NumberToString(
1579       Object* number, bool check_number_string_cache = true,
1580       PretenureFlag pretenure = NOT_TENURED);
1581   MUST_USE_RESULT MaybeObject* Uint32ToString(
1582       uint32_t value, bool check_number_string_cache = true);
1583 
1584   Map* MapForExternalArrayType(ExternalArrayType array_type);
1585   RootListIndex RootIndexForExternalArrayType(
1586       ExternalArrayType array_type);
1587 
1588   RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
1589   ExternalArray* EmptyExternalArrayForMap(Map* map);
1590 
1591   void RecordStats(HeapStats* stats, bool take_snapshot = false);
1592 
1593   // Copy block of memory from src to dst. Size of block should be aligned
1594   // by pointer size.
1595   static inline void CopyBlock(Address dst, Address src, int byte_size);
1596 
1597   // Optimized version of memmove for blocks with pointer size aligned sizes and
1598   // pointer size aligned addresses.
1599   static inline void MoveBlock(Address dst, Address src, int byte_size);
1600 
1601   // Check new space expansion criteria and expand semispaces if it was hit.
1602   void CheckNewSpaceExpansionCriteria();
1603 
IncrementYoungSurvivorsCounter(int survived)1604   inline void IncrementYoungSurvivorsCounter(int survived) {
1605     ASSERT(survived >= 0);
1606     young_survivors_after_last_gc_ = survived;
1607     survived_since_last_expansion_ += survived;
1608   }
1609 
NextGCIsLikelyToBeFull()1610   inline bool NextGCIsLikelyToBeFull() {
1611     if (FLAG_gc_global) return true;
1612 
1613     if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1614 
1615     intptr_t adjusted_allocation_limit =
1616         old_generation_allocation_limit_ - new_space_.Capacity();
1617 
1618     if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1619 
1620     return false;
1621   }
1622 
1623   void UpdateNewSpaceReferencesInExternalStringTable(
1624       ExternalStringTableUpdaterCallback updater_func);
1625 
1626   void UpdateReferencesInExternalStringTable(
1627       ExternalStringTableUpdaterCallback updater_func);
1628 
1629   void ProcessWeakReferences(WeakObjectRetainer* retainer);
1630 
1631   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1632 
1633   // Helper function that governs the promotion policy from new space to
1634   // old.  If the object's old address lies below the new space's age
1635   // mark or if we've already filled the bottom 1/16th of the to space,
1636   // we try to promote this object.
1637   inline bool ShouldBePromoted(Address old_address, int object_size);
1638 
1639   void ClearJSFunctionResultCaches();
1640 
1641   void ClearNormalizedMapCaches();
1642 
tracer()1643   GCTracer* tracer() { return tracer_; }
1644 
1645   // Returns the size of objects residing in non new spaces.
1646   intptr_t PromotedSpaceSizeOfObjects();
1647 
total_regexp_code_generated()1648   double total_regexp_code_generated() { return total_regexp_code_generated_; }
IncreaseTotalRegexpCodeGenerated(int size)1649   void IncreaseTotalRegexpCodeGenerated(int size) {
1650     total_regexp_code_generated_ += size;
1651   }
1652 
IncrementCodeGeneratedBytes(bool is_crankshafted,int size)1653   void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1654     if (is_crankshafted) {
1655       crankshaft_codegen_bytes_generated_ += size;
1656     } else {
1657       full_codegen_bytes_generated_ += size;
1658     }
1659   }
1660 
1661   // Returns maximum GC pause.
get_max_gc_pause()1662   double get_max_gc_pause() { return max_gc_pause_; }
1663 
1664   // Returns maximum size of objects alive after GC.
get_max_alive_after_gc()1665   intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1666 
1667   // Returns minimal interval between two subsequent collections.
get_min_in_mutator()1668   double get_min_in_mutator() { return min_in_mutator_; }
1669 
1670   // TODO(hpayer): remove, should be handled by GCTracer
AddMarkingTime(double marking_time)1671   void AddMarkingTime(double marking_time) {
1672     marking_time_ += marking_time;
1673   }
1674 
marking_time()1675   double marking_time() const {
1676     return marking_time_;
1677   }
1678 
1679   // TODO(hpayer): remove, should be handled by GCTracer
AddSweepingTime(double sweeping_time)1680   void AddSweepingTime(double sweeping_time) {
1681     sweeping_time_ += sweeping_time;
1682   }
1683 
sweeping_time()1684   double sweeping_time() const {
1685     return sweeping_time_;
1686   }
1687 
mark_compact_collector()1688   MarkCompactCollector* mark_compact_collector() {
1689     return &mark_compact_collector_;
1690   }
1691 
store_buffer()1692   StoreBuffer* store_buffer() {
1693     return &store_buffer_;
1694   }
1695 
marking()1696   Marking* marking() {
1697     return &marking_;
1698   }
1699 
incremental_marking()1700   IncrementalMarking* incremental_marking() {
1701     return &incremental_marking_;
1702   }
1703 
IsSweepingComplete()1704   bool IsSweepingComplete() {
1705     return !mark_compact_collector()->IsConcurrentSweepingInProgress() &&
1706            old_data_space()->IsLazySweepingComplete() &&
1707            old_pointer_space()->IsLazySweepingComplete();
1708   }
1709 
1710   bool AdvanceSweepers(int step_size);
1711 
EnsureSweepersProgressed(int step_size)1712   bool EnsureSweepersProgressed(int step_size) {
1713     bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size);
1714     sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size);
1715     return sweeping_complete;
1716   }
1717 
external_string_table()1718   ExternalStringTable* external_string_table() {
1719     return &external_string_table_;
1720   }
1721 
1722   // Returns the current sweep generation.
sweep_generation()1723   int sweep_generation() {
1724     return sweep_generation_;
1725   }
1726 
1727   inline Isolate* isolate();
1728 
1729   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1730   void CallGCEpilogueCallbacks(GCType gc_type);
1731 
1732   inline bool OldGenerationAllocationLimitReached();
1733 
DoScavengeObject(Map * map,HeapObject ** slot,HeapObject * obj)1734   inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1735     scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1736   }
1737 
1738   void QueueMemoryChunkForFree(MemoryChunk* chunk);
1739   void FreeQueuedChunks();
1740 
gc_count()1741   int gc_count() const { return gc_count_; }
1742 
1743   // Completely clear the Instanceof cache (to stop it keeping objects alive
1744   // around a GC).
1745   inline void CompletelyClearInstanceofCache();
1746 
1747   // The roots that have an index less than this are always in old space.
1748   static const int kOldSpaceRoots = 0x20;
1749 
HashSeed()1750   uint32_t HashSeed() {
1751     uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1752     ASSERT(FLAG_randomize_hashes || seed == 0);
1753     return seed;
1754   }
1755 
SetArgumentsAdaptorDeoptPCOffset(int pc_offset)1756   void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1757     ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1758     set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1759   }
1760 
SetConstructStubDeoptPCOffset(int pc_offset)1761   void SetConstructStubDeoptPCOffset(int pc_offset) {
1762     ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1763     set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1764   }
1765 
SetGetterStubDeoptPCOffset(int pc_offset)1766   void SetGetterStubDeoptPCOffset(int pc_offset) {
1767     ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1768     set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1769   }
1770 
SetSetterStubDeoptPCOffset(int pc_offset)1771   void SetSetterStubDeoptPCOffset(int pc_offset) {
1772     ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1773     set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1774   }
1775 
1776   // For post mortem debugging.
1777   void RememberUnmappedPage(Address page, bool compacted);
1778 
1779   // Global inline caching age: it is incremented on some GCs after context
1780   // disposal. We use it to flush inline caches.
global_ic_age()1781   int global_ic_age() {
1782     return global_ic_age_;
1783   }
1784 
AgeInlineCaches()1785   void AgeInlineCaches() {
1786     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1787   }
1788 
flush_monomorphic_ics()1789   bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1790 
amount_of_external_allocated_memory()1791   int64_t amount_of_external_allocated_memory() {
1792     return amount_of_external_allocated_memory_;
1793   }
1794 
1795   // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1796   // stored in a contiguous linear buffer. Stats groups are stored one after
1797   // another.
1798   enum {
1799     FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
1800     FIRST_FIXED_ARRAY_SUB_TYPE =
1801         FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
1802     FIRST_CODE_AGE_SUB_TYPE =
1803         FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
1804     OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
1805   };
1806 
RecordObjectStats(InstanceType type,size_t size)1807   void RecordObjectStats(InstanceType type, size_t size) {
1808     ASSERT(type <= LAST_TYPE);
1809     object_counts_[type]++;
1810     object_sizes_[type] += size;
1811   }
1812 
RecordCodeSubTypeStats(int code_sub_type,int code_age,size_t size)1813   void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1814     int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1815     int code_age_index =
1816         FIRST_CODE_AGE_SUB_TYPE + code_age - Code::kFirstCodeAge;
1817     ASSERT(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1818            code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1819     ASSERT(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1820            code_age_index < OBJECT_STATS_COUNT);
1821     object_counts_[code_sub_type_index]++;
1822     object_sizes_[code_sub_type_index] += size;
1823     object_counts_[code_age_index]++;
1824     object_sizes_[code_age_index] += size;
1825   }
1826 
RecordFixedArraySubTypeStats(int array_sub_type,size_t size)1827   void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1828     ASSERT(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1829     object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1830     object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1831   }
1832 
1833   void CheckpointObjectStats();
1834 
1835   // We don't use a LockGuard here since we want to lock the heap
1836   // only when FLAG_concurrent_recompilation is true.
1837   class RelocationLock {
1838    public:
RelocationLock(Heap * heap)1839     explicit RelocationLock(Heap* heap) : heap_(heap) {
1840       if (FLAG_concurrent_recompilation) {
1841         heap_->relocation_mutex_->Lock();
1842       }
1843     }
1844 
1845 
~RelocationLock()1846     ~RelocationLock() {
1847       if (FLAG_concurrent_recompilation) {
1848         heap_->relocation_mutex_->Unlock();
1849       }
1850     }
1851 
1852    private:
1853     Heap* heap_;
1854   };
1855 
1856   MaybeObject* AddWeakObjectToCodeDependency(Object* obj, DependentCode* dep);
1857 
1858   DependentCode* LookupWeakObjectToCodeDependency(Object* obj);
1859 
InitializeWeakObjectToCodeTable()1860   void InitializeWeakObjectToCodeTable() {
1861     set_weak_object_to_code_table(undefined_value());
1862   }
1863 
1864   void EnsureWeakObjectToCodeTable();
1865 
1866  private:
1867   Heap();
1868 
1869   // This can be calculated directly from a pointer to the heap; however, it is
1870   // more expedient to get at the isolate directly from within Heap methods.
1871   Isolate* isolate_;
1872 
1873   Object* roots_[kRootListLength];
1874 
1875   intptr_t code_range_size_;
1876   int reserved_semispace_size_;
1877   int max_semispace_size_;
1878   int initial_semispace_size_;
1879   intptr_t max_old_generation_size_;
1880   intptr_t max_executable_size_;
1881   intptr_t maximum_committed_;
1882 
1883   // For keeping track of how much data has survived
1884   // scavenge since last new space expansion.
1885   int survived_since_last_expansion_;
1886 
1887   // For keeping track on when to flush RegExp code.
1888   int sweep_generation_;
1889 
1890   int always_allocate_scope_depth_;
1891   int linear_allocation_scope_depth_;
1892 
1893   // For keeping track of context disposals.
1894   int contexts_disposed_;
1895 
1896   int global_ic_age_;
1897 
1898   bool flush_monomorphic_ics_;
1899 
1900   int scan_on_scavenge_pages_;
1901 
1902   NewSpace new_space_;
1903   OldSpace* old_pointer_space_;
1904   OldSpace* old_data_space_;
1905   OldSpace* code_space_;
1906   MapSpace* map_space_;
1907   CellSpace* cell_space_;
1908   PropertyCellSpace* property_cell_space_;
1909   LargeObjectSpace* lo_space_;
1910   HeapState gc_state_;
1911   int gc_post_processing_depth_;
1912 
1913   // Returns the amount of external memory registered since last global gc.
1914   int64_t PromotedExternalMemorySize();
1915 
1916   unsigned int ms_count_;  // how many mark-sweep collections happened
1917   unsigned int gc_count_;  // how many gc happened
1918 
1919   // For post mortem debugging.
1920   static const int kRememberedUnmappedPages = 128;
1921   int remembered_unmapped_pages_index_;
1922   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1923 
1924   // Total length of the strings we failed to flatten since the last GC.
1925   int unflattened_strings_length_;
1926 
1927 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
1928   inline void set_##name(type* value) {                                        \
1929     /* The deserializer makes use of the fact that these common roots are */   \
1930     /* never in new space and never on a page that is being compacted.    */   \
1931     ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value));  \
1932     roots_[k##camel_name##RootIndex] = value;                                  \
1933   }
1934   ROOT_LIST(ROOT_ACCESSOR)
1935 #undef ROOT_ACCESSOR
1936 
1937 #ifdef DEBUG
1938   // If the --gc-interval flag is set to a positive value, this
1939   // variable holds the value indicating the number of allocations
1940   // remain until the next failure and garbage collection.
1941   int allocation_timeout_;
1942 
1943   // Do we expect to be able to handle allocation failure at this
1944   // time?
1945   bool disallow_allocation_failure_;
1946 #endif  // DEBUG
1947 
1948   // Indicates that the new space should be kept small due to high promotion
1949   // rates caused by the mutator allocating a lot of long-lived objects.
1950   // TODO(hpayer): change to bool if no longer accessed from generated code
1951   intptr_t new_space_high_promotion_mode_active_;
1952 
1953   // Limit that triggers a global GC on the next (normally caused) GC.  This
1954   // is checked when we have already decided to do a GC to help determine
1955   // which collector to invoke, before expanding a paged space in the old
1956   // generation and on every allocation in large object space.
1957   intptr_t old_generation_allocation_limit_;
1958 
1959   // Used to adjust the limits that control the timing of the next GC.
1960   intptr_t size_of_old_gen_at_last_old_space_gc_;
1961 
1962   // Limit on the amount of externally allocated memory allowed
1963   // between global GCs. If reached a global GC is forced.
1964   intptr_t external_allocation_limit_;
1965 
1966   // The amount of external memory registered through the API kept alive
1967   // by global handles
1968   int64_t amount_of_external_allocated_memory_;
1969 
1970   // Caches the amount of external memory registered at the last global gc.
1971   int64_t amount_of_external_allocated_memory_at_last_global_gc_;
1972 
1973   // Indicates that an allocation has failed in the old generation since the
1974   // last GC.
1975   bool old_gen_exhausted_;
1976 
1977   // Indicates that inline bump-pointer allocation has been globally disabled
1978   // for all spaces. This is used to disable allocations in generated code.
1979   bool inline_allocation_disabled_;
1980 
1981   // Weak list heads, threaded through the objects.
1982   // List heads are initilized lazily and contain the undefined_value at start.
1983   Object* native_contexts_list_;
1984   Object* array_buffers_list_;
1985   Object* allocation_sites_list_;
1986 
1987   // WeakHashTable that maps objects embedded in optimized code to dependent
1988   // code list. It is initilized lazily and contains the undefined_value at
1989   // start.
1990   Object* weak_object_to_code_table_;
1991 
1992   StoreBufferRebuilder store_buffer_rebuilder_;
1993 
1994   struct StringTypeTable {
1995     InstanceType type;
1996     int size;
1997     RootListIndex index;
1998   };
1999 
2000   struct ConstantStringTable {
2001     const char* contents;
2002     RootListIndex index;
2003   };
2004 
2005   struct StructTable {
2006     InstanceType type;
2007     int size;
2008     RootListIndex index;
2009   };
2010 
2011   static const StringTypeTable string_type_table[];
2012   static const ConstantStringTable constant_string_table[];
2013   static const StructTable struct_table[];
2014 
2015   // The special hidden string which is an empty string, but does not match
2016   // any string when looked up in properties.
2017   String* hidden_string_;
2018 
2019   // GC callback function, called before and after mark-compact GC.
2020   // Allocations in the callback function are disallowed.
2021   struct GCPrologueCallbackPair {
GCPrologueCallbackPairGCPrologueCallbackPair2022     GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
2023                            GCType gc_type,
2024                            bool pass_isolate)
2025         : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2026     }
2027     bool operator==(const GCPrologueCallbackPair& pair) const {
2028       return pair.callback == callback;
2029     }
2030     v8::Isolate::GCPrologueCallback callback;
2031     GCType gc_type;
2032     // TODO(dcarney): remove variable
2033     bool pass_isolate_;
2034   };
2035   List<GCPrologueCallbackPair> gc_prologue_callbacks_;
2036 
2037   struct GCEpilogueCallbackPair {
GCEpilogueCallbackPairGCEpilogueCallbackPair2038     GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
2039                            GCType gc_type,
2040                            bool pass_isolate)
2041         : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2042     }
2043     bool operator==(const GCEpilogueCallbackPair& pair) const {
2044       return pair.callback == callback;
2045     }
2046     v8::Isolate::GCPrologueCallback callback;
2047     GCType gc_type;
2048     // TODO(dcarney): remove variable
2049     bool pass_isolate_;
2050   };
2051   List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
2052 
2053   // Support for computing object sizes during GC.
2054   HeapObjectCallback gc_safe_size_of_old_object_;
2055   static int GcSafeSizeOfOldObject(HeapObject* object);
2056 
2057   // Update the GC state. Called from the mark-compact collector.
MarkMapPointersAsEncoded(bool encoded)2058   void MarkMapPointersAsEncoded(bool encoded) {
2059     ASSERT(!encoded);
2060     gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
2061   }
2062 
2063   // Code that should be run before and after each GC.  Includes some
2064   // reporting/verification activities when compiled with DEBUG set.
2065   void GarbageCollectionPrologue();
2066   void GarbageCollectionEpilogue();
2067 
2068   // Checks whether a global GC is necessary
2069   GarbageCollector SelectGarbageCollector(AllocationSpace space,
2070                                           const char** reason);
2071 
2072   // Performs garbage collection operation.
2073   // Returns whether there is a chance that another major GC could
2074   // collect more garbage.
2075   bool CollectGarbage(AllocationSpace space,
2076                       GarbageCollector collector,
2077                       const char* gc_reason,
2078                       const char* collector_reason);
2079 
2080   // Performs garbage collection
2081   // Returns whether there is a chance another major GC could
2082   // collect more garbage.
2083   bool PerformGarbageCollection(GarbageCollector collector,
2084                                 GCTracer* tracer);
2085 
2086   inline void UpdateOldSpaceLimits();
2087 
2088   // Selects the proper allocation space depending on the given object
2089   // size, pretenuring decision, and preferred old-space.
SelectSpace(int object_size,AllocationSpace preferred_old_space,PretenureFlag pretenure)2090   static AllocationSpace SelectSpace(int object_size,
2091                                      AllocationSpace preferred_old_space,
2092                                      PretenureFlag pretenure) {
2093     ASSERT(preferred_old_space == OLD_POINTER_SPACE ||
2094            preferred_old_space == OLD_DATA_SPACE);
2095     if (object_size > Page::kMaxNonCodeHeapObjectSize) return LO_SPACE;
2096     return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
2097   }
2098 
2099   // Allocate an uninitialized fixed array.
2100   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(
2101       int length, PretenureFlag pretenure);
2102 
2103   // Allocate an uninitialized fixed double array.
2104   MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray(
2105       int length, PretenureFlag pretenure);
2106 
2107   // Allocate an initialized fixed array with the given filler value.
2108   MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithFiller(
2109       int length, PretenureFlag pretenure, Object* filler);
2110 
2111   // Initializes a JSObject based on its map.
2112   void InitializeJSObjectFromMap(JSObject* obj,
2113                                  FixedArray* properties,
2114                                  Map* map);
2115   void InitializeAllocationMemento(AllocationMemento* memento,
2116                                    AllocationSite* allocation_site);
2117 
2118   bool CreateInitialMaps();
2119   bool CreateInitialObjects();
2120 
2121   // These five Create*EntryStub functions are here and forced to not be inlined
2122   // because of a gcc-4.4 bug that assigns wrong vtable entries.
2123   NO_INLINE(void CreateJSEntryStub());
2124   NO_INLINE(void CreateJSConstructEntryStub());
2125 
2126   void CreateFixedStubs();
2127   void CreateStubsRequiringBuiltins();
2128 
2129   MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string,
2130                                              Object* to_number,
2131                                              byte kind);
2132 
2133   // Allocate a JSArray with no elements
2134   MUST_USE_RESULT MaybeObject* AllocateJSArray(
2135       ElementsKind elements_kind,
2136       PretenureFlag pretenure = NOT_TENURED);
2137 
2138   // Allocate empty fixed array.
2139   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
2140 
2141   // Allocate empty external array of given type.
2142   MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray(
2143       ExternalArrayType array_type);
2144 
2145   // Allocate empty fixed double array.
2146   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
2147 
2148   // Allocate a tenured simple cell.
2149   MUST_USE_RESULT MaybeObject* AllocateCell(Object* value);
2150 
2151   // Allocate a tenured JS global property cell initialized with the hole.
2152   MUST_USE_RESULT MaybeObject* AllocatePropertyCell();
2153 
2154   // Allocate Box.
2155   MUST_USE_RESULT MaybeObject* AllocateBox(Object* value,
2156                                            PretenureFlag pretenure);
2157 
2158   // Performs a minor collection in new generation.
2159   void Scavenge();
2160 
2161   // Commits from space if it is uncommitted.
2162   void EnsureFromSpaceIsCommitted();
2163 
2164   // Uncommit unused semi space.
UncommitFromSpace()2165   bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
2166 
2167   // Fill in bogus values in from space
2168   void ZapFromSpace();
2169 
2170   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
2171       Heap* heap,
2172       Object** pointer);
2173 
2174   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
2175   static void ScavengeStoreBufferCallback(Heap* heap,
2176                                           MemoryChunk* page,
2177                                           StoreBufferEvent event);
2178 
2179   // Performs a major collection in the whole heap.
2180   void MarkCompact(GCTracer* tracer);
2181 
2182   // Code to be run before and after mark-compact.
2183   void MarkCompactPrologue();
2184 
2185   void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
2186   void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
2187   void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots);
2188 
2189   // Called on heap tear-down.
2190   void TearDownArrayBuffers();
2191 
2192   // Record statistics before and after garbage collection.
2193   void ReportStatisticsBeforeGC();
2194   void ReportStatisticsAfterGC();
2195 
2196   // Slow part of scavenge object.
2197   static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
2198 
2199   // Initializes a function with a shared part and prototype.
2200   // Note: this code was factored out of AllocateFunction such that
2201   // other parts of the VM could use it. Specifically, a function that creates
2202   // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
2203   // Please note this does not perform a garbage collection.
2204   inline void InitializeFunction(
2205       JSFunction* function,
2206       SharedFunctionInfo* shared,
2207       Object* prototype);
2208 
2209   // Total RegExp code ever generated
2210   double total_regexp_code_generated_;
2211 
2212   GCTracer* tracer_;
2213 
2214   // Allocates a small number to string cache.
2215   MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
2216   // Creates and installs the full-sized number string cache.
2217   void AllocateFullSizeNumberStringCache();
2218   // Get the length of the number to string cache based on the max semispace
2219   // size.
2220   int FullSizeNumberStringCacheLength();
2221   // Flush the number to string cache.
2222   void FlushNumberStringCache();
2223 
2224   void UpdateSurvivalRateTrend(int start_new_space_size);
2225 
2226   enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
2227 
2228   static const int kYoungSurvivalRateHighThreshold = 90;
2229   static const int kYoungSurvivalRateLowThreshold = 10;
2230   static const int kYoungSurvivalRateAllowedDeviation = 15;
2231 
2232   int young_survivors_after_last_gc_;
2233   int high_survival_rate_period_length_;
2234   int low_survival_rate_period_length_;
2235   double survival_rate_;
2236   SurvivalRateTrend previous_survival_rate_trend_;
2237   SurvivalRateTrend survival_rate_trend_;
2238 
set_survival_rate_trend(SurvivalRateTrend survival_rate_trend)2239   void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
2240     ASSERT(survival_rate_trend != FLUCTUATING);
2241     previous_survival_rate_trend_ = survival_rate_trend_;
2242     survival_rate_trend_ = survival_rate_trend;
2243   }
2244 
survival_rate_trend()2245   SurvivalRateTrend survival_rate_trend() {
2246     if (survival_rate_trend_ == STABLE) {
2247       return STABLE;
2248     } else if (previous_survival_rate_trend_ == STABLE) {
2249       return survival_rate_trend_;
2250     } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
2251       return FLUCTUATING;
2252     } else {
2253       return survival_rate_trend_;
2254     }
2255   }
2256 
IsStableOrIncreasingSurvivalTrend()2257   bool IsStableOrIncreasingSurvivalTrend() {
2258     switch (survival_rate_trend()) {
2259       case STABLE:
2260       case INCREASING:
2261         return true;
2262       default:
2263         return false;
2264     }
2265   }
2266 
IsStableOrDecreasingSurvivalTrend()2267   bool IsStableOrDecreasingSurvivalTrend() {
2268     switch (survival_rate_trend()) {
2269       case STABLE:
2270       case DECREASING:
2271         return true;
2272       default:
2273         return false;
2274     }
2275   }
2276 
IsIncreasingSurvivalTrend()2277   bool IsIncreasingSurvivalTrend() {
2278     return survival_rate_trend() == INCREASING;
2279   }
2280 
IsHighSurvivalRate()2281   bool IsHighSurvivalRate() {
2282     return high_survival_rate_period_length_ > 0;
2283   }
2284 
IsLowSurvivalRate()2285   bool IsLowSurvivalRate() {
2286     return low_survival_rate_period_length_ > 0;
2287   }
2288 
2289   void SelectScavengingVisitorsTable();
2290 
StartIdleRound()2291   void StartIdleRound() {
2292     mark_sweeps_since_idle_round_started_ = 0;
2293   }
2294 
FinishIdleRound()2295   void FinishIdleRound() {
2296     mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2297     scavenges_since_last_idle_round_ = 0;
2298   }
2299 
EnoughGarbageSinceLastIdleRound()2300   bool EnoughGarbageSinceLastIdleRound() {
2301     return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2302   }
2303 
2304   // Estimates how many milliseconds a Mark-Sweep would take to complete.
2305   // In idle notification handler we assume that this function will return:
2306   // - a number less than 10 for small heaps, which are less than 8Mb.
2307   // - a number greater than 10 for large heaps, which are greater than 32Mb.
TimeMarkSweepWouldTakeInMs()2308   int TimeMarkSweepWouldTakeInMs() {
2309     // Rough estimate of how many megabytes of heap can be processed in 1 ms.
2310     static const int kMbPerMs = 2;
2311 
2312     int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
2313     return heap_size_mb / kMbPerMs;
2314   }
2315 
2316   // Returns true if no more GC work is left.
2317   bool IdleGlobalGC();
2318 
2319   void AdvanceIdleIncrementalMarking(intptr_t step_size);
2320 
2321   void ClearObjectStats(bool clear_last_time_stats = false);
2322 
set_weak_object_to_code_table(Object * value)2323   void set_weak_object_to_code_table(Object* value) {
2324     ASSERT(!InNewSpace(value));
2325     weak_object_to_code_table_ = value;
2326   }
2327 
weak_object_to_code_table_address()2328   Object** weak_object_to_code_table_address() {
2329     return &weak_object_to_code_table_;
2330   }
2331 
2332   static const int kInitialStringTableSize = 2048;
2333   static const int kInitialEvalCacheSize = 64;
2334   static const int kInitialNumberStringCacheSize = 256;
2335 
2336   // Object counts and used memory by InstanceType
2337   size_t object_counts_[OBJECT_STATS_COUNT];
2338   size_t object_counts_last_time_[OBJECT_STATS_COUNT];
2339   size_t object_sizes_[OBJECT_STATS_COUNT];
2340   size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2341 
2342   // Maximum GC pause.
2343   double max_gc_pause_;
2344 
2345   // Total time spent in GC.
2346   double total_gc_time_ms_;
2347 
2348   // Maximum size of objects alive after GC.
2349   intptr_t max_alive_after_gc_;
2350 
2351   // Minimal interval between two subsequent collections.
2352   double min_in_mutator_;
2353 
2354   // Size of objects alive after last GC.
2355   intptr_t alive_after_last_gc_;
2356 
2357   double last_gc_end_timestamp_;
2358 
2359   // Cumulative GC time spent in marking
2360   double marking_time_;
2361 
2362   // Cumulative GC time spent in sweeping
2363   double sweeping_time_;
2364 
2365   MarkCompactCollector mark_compact_collector_;
2366 
2367   StoreBuffer store_buffer_;
2368 
2369   Marking marking_;
2370 
2371   IncrementalMarking incremental_marking_;
2372 
2373   int number_idle_notifications_;
2374   unsigned int last_idle_notification_gc_count_;
2375   bool last_idle_notification_gc_count_init_;
2376 
2377   int mark_sweeps_since_idle_round_started_;
2378   unsigned int gc_count_at_last_idle_gc_;
2379   int scavenges_since_last_idle_round_;
2380 
2381   // These two counters are monotomically increasing and never reset.
2382   size_t full_codegen_bytes_generated_;
2383   size_t crankshaft_codegen_bytes_generated_;
2384 
2385   // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2386   // this variable holds the number of garbage collections since the last
2387   // deoptimization triggered by garbage collection.
2388   int gcs_since_last_deopt_;
2389 
2390 #ifdef VERIFY_HEAP
2391   int no_weak_object_verification_scope_depth_;
2392 #endif
2393 
2394   static const int kMaxMarkSweepsInIdleRound = 7;
2395   static const int kIdleScavengeThreshold = 5;
2396 
2397   // Shared state read by the scavenge collector and set by ScavengeObject.
2398   PromotionQueue promotion_queue_;
2399 
2400   // Flag is set when the heap has been configured.  The heap can be repeatedly
2401   // configured through the API until it is set up.
2402   bool configured_;
2403 
2404   ExternalStringTable external_string_table_;
2405 
2406   VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2407 
2408   MemoryChunk* chunks_queued_for_free_;
2409 
2410   Mutex* relocation_mutex_;
2411 #ifdef DEBUG
2412   bool relocation_mutex_locked_by_optimizer_thread_;
2413 #endif  // DEBUG;
2414 
2415   friend class Factory;
2416   friend class GCTracer;
2417   friend class DisallowAllocationFailure;
2418   friend class AlwaysAllocateScope;
2419   friend class Page;
2420   friend class Isolate;
2421   friend class MarkCompactCollector;
2422   friend class MarkCompactMarkingVisitor;
2423   friend class MapCompact;
2424 #ifdef VERIFY_HEAP
2425   friend class NoWeakObjectVerificationScope;
2426 #endif
2427 
2428   DISALLOW_COPY_AND_ASSIGN(Heap);
2429 };
2430 
2431 
2432 class HeapStats {
2433  public:
2434   static const int kStartMarker = 0xDECADE00;
2435   static const int kEndMarker = 0xDECADE01;
2436 
2437   int* start_marker;                    //  0
2438   int* new_space_size;                  //  1
2439   int* new_space_capacity;              //  2
2440   intptr_t* old_pointer_space_size;          //  3
2441   intptr_t* old_pointer_space_capacity;      //  4
2442   intptr_t* old_data_space_size;             //  5
2443   intptr_t* old_data_space_capacity;         //  6
2444   intptr_t* code_space_size;                 //  7
2445   intptr_t* code_space_capacity;             //  8
2446   intptr_t* map_space_size;                  //  9
2447   intptr_t* map_space_capacity;              // 10
2448   intptr_t* cell_space_size;                 // 11
2449   intptr_t* cell_space_capacity;             // 12
2450   intptr_t* lo_space_size;                   // 13
2451   int* global_handle_count;             // 14
2452   int* weak_global_handle_count;        // 15
2453   int* pending_global_handle_count;     // 16
2454   int* near_death_global_handle_count;  // 17
2455   int* free_global_handle_count;        // 18
2456   intptr_t* memory_allocator_size;           // 19
2457   intptr_t* memory_allocator_capacity;       // 20
2458   int* objects_per_type;                // 21
2459   int* size_per_type;                   // 22
2460   int* os_error;                        // 23
2461   int* end_marker;                      // 24
2462   intptr_t* property_cell_space_size;   // 25
2463   intptr_t* property_cell_space_capacity;    // 26
2464 };
2465 
2466 
2467 class DisallowAllocationFailure {
2468  public:
2469   inline DisallowAllocationFailure();
2470   inline ~DisallowAllocationFailure();
2471 
2472 #ifdef DEBUG
2473  private:
2474   bool old_state_;
2475 #endif
2476 };
2477 
2478 
2479 class AlwaysAllocateScope {
2480  public:
2481   inline AlwaysAllocateScope();
2482   inline ~AlwaysAllocateScope();
2483 
2484  private:
2485   // Implicitly disable artificial allocation failures.
2486   DisallowAllocationFailure disallow_allocation_failure_;
2487 };
2488 
2489 
2490 #ifdef VERIFY_HEAP
2491 class NoWeakObjectVerificationScope {
2492  public:
2493   inline NoWeakObjectVerificationScope();
2494   inline ~NoWeakObjectVerificationScope();
2495 };
2496 #endif
2497 
2498 
2499 // Visitor class to verify interior pointers in spaces that do not contain
2500 // or care about intergenerational references. All heap object pointers have to
2501 // point into the heap to a location that has a map pointer at its first word.
2502 // Caveat: Heap::Contains is an approximation because it can return true for
2503 // objects in a heap space but above the allocation pointer.
2504 class VerifyPointersVisitor: public ObjectVisitor {
2505  public:
2506   inline void VisitPointers(Object** start, Object** end);
2507 };
2508 
2509 
2510 // Space iterator for iterating over all spaces of the heap.  Returns each space
2511 // in turn, and null when it is done.
2512 class AllSpaces BASE_EMBEDDED {
2513  public:
AllSpaces(Heap * heap)2514   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2515   Space* next();
2516  private:
2517   Heap* heap_;
2518   int counter_;
2519 };
2520 
2521 
2522 // Space iterator for iterating over all old spaces of the heap: Old pointer
2523 // space, old data space and code space.  Returns each space in turn, and null
2524 // when it is done.
2525 class OldSpaces BASE_EMBEDDED {
2526  public:
OldSpaces(Heap * heap)2527   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2528   OldSpace* next();
2529  private:
2530   Heap* heap_;
2531   int counter_;
2532 };
2533 
2534 
2535 // Space iterator for iterating over all the paged spaces of the heap: Map
2536 // space, old pointer space, old data space, code space and cell space.  Returns
2537 // each space in turn, and null when it is done.
2538 class PagedSpaces BASE_EMBEDDED {
2539  public:
PagedSpaces(Heap * heap)2540   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2541   PagedSpace* next();
2542  private:
2543   Heap* heap_;
2544   int counter_;
2545 };
2546 
2547 
2548 // Space iterator for iterating over all spaces of the heap.
2549 // For each space an object iterator is provided. The deallocation of the
2550 // returned object iterators is handled by the space iterator.
2551 class SpaceIterator : public Malloced {
2552  public:
2553   explicit SpaceIterator(Heap* heap);
2554   SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2555   virtual ~SpaceIterator();
2556 
2557   bool has_next();
2558   ObjectIterator* next();
2559 
2560  private:
2561   ObjectIterator* CreateIterator();
2562 
2563   Heap* heap_;
2564   int current_space_;  // from enum AllocationSpace.
2565   ObjectIterator* iterator_;  // object iterator for the current space.
2566   HeapObjectCallback size_func_;
2567 };
2568 
2569 
2570 // A HeapIterator provides iteration over the whole heap. It
2571 // aggregates the specific iterators for the different spaces as
2572 // these can only iterate over one space only.
2573 //
2574 // HeapIterator can skip free list nodes (that is, de-allocated heap
2575 // objects that still remain in the heap). As implementation of free
2576 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2577 // phases. Also, it is forbidden to interrupt iteration in this mode,
2578 // as this will leave heap objects marked (and thus, unusable).
2579 class HeapObjectsFilter;
2580 
2581 class HeapIterator BASE_EMBEDDED {
2582  public:
2583   enum HeapObjectsFiltering {
2584     kNoFiltering,
2585     kFilterUnreachable
2586   };
2587 
2588   explicit HeapIterator(Heap* heap);
2589   HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2590   ~HeapIterator();
2591 
2592   HeapObject* next();
2593   void reset();
2594 
2595  private:
2596   // Perform the initialization.
2597   void Init();
2598   // Perform all necessary shutdown (destruction) work.
2599   void Shutdown();
2600   HeapObject* NextObject();
2601 
2602   Heap* heap_;
2603   HeapObjectsFiltering filtering_;
2604   HeapObjectsFilter* filter_;
2605   // Space iterator for iterating all the spaces.
2606   SpaceIterator* space_iterator_;
2607   // Object iterator for the space currently being iterated.
2608   ObjectIterator* object_iterator_;
2609 };
2610 
2611 
2612 // Cache for mapping (map, property name) into field offset.
2613 // Cleared at startup and prior to mark sweep collection.
2614 class KeyedLookupCache {
2615  public:
2616   // Lookup field offset for (map, name). If absent, -1 is returned.
2617   int Lookup(Map* map, Name* name);
2618 
2619   // Update an element in the cache.
2620   void Update(Map* map, Name* name, int field_offset);
2621 
2622   // Clear the cache.
2623   void Clear();
2624 
2625   static const int kLength = 256;
2626   static const int kCapacityMask = kLength - 1;
2627   static const int kMapHashShift = 5;
2628   static const int kHashMask = -4;  // Zero the last two bits.
2629   static const int kEntriesPerBucket = 4;
2630   static const int kNotFound = -1;
2631 
2632   // kEntriesPerBucket should be a power of 2.
2633   STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2634   STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2635 
2636  private:
KeyedLookupCache()2637   KeyedLookupCache() {
2638     for (int i = 0; i < kLength; ++i) {
2639       keys_[i].map = NULL;
2640       keys_[i].name = NULL;
2641       field_offsets_[i] = kNotFound;
2642     }
2643   }
2644 
2645   static inline int Hash(Map* map, Name* name);
2646 
2647   // Get the address of the keys and field_offsets arrays.  Used in
2648   // generated code to perform cache lookups.
keys_address()2649   Address keys_address() {
2650     return reinterpret_cast<Address>(&keys_);
2651   }
2652 
field_offsets_address()2653   Address field_offsets_address() {
2654     return reinterpret_cast<Address>(&field_offsets_);
2655   }
2656 
2657   struct Key {
2658     Map* map;
2659     Name* name;
2660   };
2661 
2662   Key keys_[kLength];
2663   int field_offsets_[kLength];
2664 
2665   friend class ExternalReference;
2666   friend class Isolate;
2667   DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2668 };
2669 
2670 
2671 // Cache for mapping (map, property name) into descriptor index.
2672 // The cache contains both positive and negative results.
2673 // Descriptor index equals kNotFound means the property is absent.
2674 // Cleared at startup and prior to any gc.
2675 class DescriptorLookupCache {
2676  public:
2677   // Lookup descriptor index for (map, name).
2678   // If absent, kAbsent is returned.
Lookup(Map * source,Name * name)2679   int Lookup(Map* source, Name* name) {
2680     if (!name->IsUniqueName()) return kAbsent;
2681     int index = Hash(source, name);
2682     Key& key = keys_[index];
2683     if ((key.source == source) && (key.name == name)) return results_[index];
2684     return kAbsent;
2685   }
2686 
2687   // Update an element in the cache.
Update(Map * source,Name * name,int result)2688   void Update(Map* source, Name* name, int result) {
2689     ASSERT(result != kAbsent);
2690     if (name->IsUniqueName()) {
2691       int index = Hash(source, name);
2692       Key& key = keys_[index];
2693       key.source = source;
2694       key.name = name;
2695       results_[index] = result;
2696     }
2697   }
2698 
2699   // Clear the cache.
2700   void Clear();
2701 
2702   static const int kAbsent = -2;
2703 
2704  private:
DescriptorLookupCache()2705   DescriptorLookupCache() {
2706     for (int i = 0; i < kLength; ++i) {
2707       keys_[i].source = NULL;
2708       keys_[i].name = NULL;
2709       results_[i] = kAbsent;
2710     }
2711   }
2712 
Hash(Object * source,Name * name)2713   static int Hash(Object* source, Name* name) {
2714     // Uses only lower 32 bits if pointers are larger.
2715     uint32_t source_hash =
2716         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
2717             >> kPointerSizeLog2;
2718     uint32_t name_hash =
2719         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
2720             >> kPointerSizeLog2;
2721     return (source_hash ^ name_hash) % kLength;
2722   }
2723 
2724   static const int kLength = 64;
2725   struct Key {
2726     Map* source;
2727     Name* name;
2728   };
2729 
2730   Key keys_[kLength];
2731   int results_[kLength];
2732 
2733   friend class Isolate;
2734   DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2735 };
2736 
2737 
2738 // GCTracer collects and prints ONE line after each garbage collector
2739 // invocation IFF --trace_gc is used.
2740 
2741 class GCTracer BASE_EMBEDDED {
2742  public:
2743   class Scope BASE_EMBEDDED {
2744    public:
2745     enum ScopeId {
2746       EXTERNAL,
2747       MC_MARK,
2748       MC_SWEEP,
2749       MC_SWEEP_NEWSPACE,
2750       MC_EVACUATE_PAGES,
2751       MC_UPDATE_NEW_TO_NEW_POINTERS,
2752       MC_UPDATE_ROOT_TO_NEW_POINTERS,
2753       MC_UPDATE_OLD_TO_NEW_POINTERS,
2754       MC_UPDATE_POINTERS_TO_EVACUATED,
2755       MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2756       MC_UPDATE_MISC_POINTERS,
2757       MC_WEAKCOLLECTION_PROCESS,
2758       MC_WEAKCOLLECTION_CLEAR,
2759       MC_FLUSH_CODE,
2760       kNumberOfScopes
2761     };
2762 
Scope(GCTracer * tracer,ScopeId scope)2763     Scope(GCTracer* tracer, ScopeId scope)
2764         : tracer_(tracer),
2765         scope_(scope) {
2766       start_time_ = OS::TimeCurrentMillis();
2767     }
2768 
~Scope()2769     ~Scope() {
2770       ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
2771       tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2772     }
2773 
2774    private:
2775     GCTracer* tracer_;
2776     ScopeId scope_;
2777     double start_time_;
2778   };
2779 
2780   explicit GCTracer(Heap* heap,
2781                     const char* gc_reason,
2782                     const char* collector_reason);
2783   ~GCTracer();
2784 
2785   // Sets the collector.
set_collector(GarbageCollector collector)2786   void set_collector(GarbageCollector collector) { collector_ = collector; }
2787 
2788   // Sets the GC count.
set_gc_count(unsigned int count)2789   void set_gc_count(unsigned int count) { gc_count_ = count; }
2790 
2791   // Sets the full GC count.
set_full_gc_count(int count)2792   void set_full_gc_count(int count) { full_gc_count_ = count; }
2793 
increment_promoted_objects_size(int object_size)2794   void increment_promoted_objects_size(int object_size) {
2795     promoted_objects_size_ += object_size;
2796   }
2797 
increment_nodes_died_in_new_space()2798   void increment_nodes_died_in_new_space() {
2799     nodes_died_in_new_space_++;
2800   }
2801 
increment_nodes_copied_in_new_space()2802   void increment_nodes_copied_in_new_space() {
2803     nodes_copied_in_new_space_++;
2804   }
2805 
increment_nodes_promoted()2806   void increment_nodes_promoted() {
2807     nodes_promoted_++;
2808   }
2809 
2810  private:
2811   // Returns a string matching the collector.
2812   const char* CollectorString();
2813 
2814   // Returns size of object in heap (in MB).
2815   inline double SizeOfHeapObjects();
2816 
2817   // Timestamp set in the constructor.
2818   double start_time_;
2819 
2820   // Size of objects in heap set in constructor.
2821   intptr_t start_object_size_;
2822 
2823   // Size of memory allocated from OS set in constructor.
2824   intptr_t start_memory_size_;
2825 
2826   // Type of collector.
2827   GarbageCollector collector_;
2828 
2829   // A count (including this one, e.g. the first collection is 1) of the
2830   // number of garbage collections.
2831   unsigned int gc_count_;
2832 
2833   // A count (including this one) of the number of full garbage collections.
2834   int full_gc_count_;
2835 
2836   // Amounts of time spent in different scopes during GC.
2837   double scopes_[Scope::kNumberOfScopes];
2838 
2839   // Total amount of space either wasted or contained in one of free lists
2840   // before the current GC.
2841   intptr_t in_free_list_or_wasted_before_gc_;
2842 
2843   // Difference between space used in the heap at the beginning of the current
2844   // collection and the end of the previous collection.
2845   intptr_t allocated_since_last_gc_;
2846 
2847   // Amount of time spent in mutator that is time elapsed between end of the
2848   // previous collection and the beginning of the current one.
2849   double spent_in_mutator_;
2850 
2851   // Size of objects promoted during the current collection.
2852   intptr_t promoted_objects_size_;
2853 
2854   // Number of died nodes in the new space.
2855   int nodes_died_in_new_space_;
2856 
2857   // Number of copied nodes to the new space.
2858   int nodes_copied_in_new_space_;
2859 
2860   // Number of promoted nodes to the old space.
2861   int nodes_promoted_;
2862 
2863   // Incremental marking steps counters.
2864   int steps_count_;
2865   double steps_took_;
2866   double longest_step_;
2867   int steps_count_since_last_gc_;
2868   double steps_took_since_last_gc_;
2869 
2870   Heap* heap_;
2871 
2872   const char* gc_reason_;
2873   const char* collector_reason_;
2874 };
2875 
2876 
2877 class RegExpResultsCache {
2878  public:
2879   enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
2880 
2881   // Attempt to retrieve a cached result.  On failure, 0 is returned as a Smi.
2882   // On success, the returned result is guaranteed to be a COW-array.
2883   static Object* Lookup(Heap* heap,
2884                         String* key_string,
2885                         Object* key_pattern,
2886                         ResultsCacheType type);
2887   // Attempt to add value_array to the cache specified by type.  On success,
2888   // value_array is turned into a COW-array.
2889   static void Enter(Heap* heap,
2890                     String* key_string,
2891                     Object* key_pattern,
2892                     FixedArray* value_array,
2893                     ResultsCacheType type);
2894   static void Clear(FixedArray* cache);
2895   static const int kRegExpResultsCacheSize = 0x100;
2896 
2897  private:
2898   static const int kArrayEntriesPerCacheEntry = 4;
2899   static const int kStringOffset = 0;
2900   static const int kPatternOffset = 1;
2901   static const int kArrayOffset = 2;
2902 };
2903 
2904 
2905 class TranscendentalCache {
2906  public:
2907   enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2908   static const int kTranscendentalTypeBits = 3;
2909   STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2910 
2911   // Returns a heap number with f(input), where f is a math function specified
2912   // by the 'type' argument.
2913   MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2914 
2915   // The cache contains raw Object pointers.  This method disposes of
2916   // them before a garbage collection.
2917   void Clear();
2918 
2919  private:
2920   class SubCache {
2921     static const int kCacheSize = 512;
2922 
2923     explicit SubCache(Isolate* isolate, Type t);
2924 
2925     MUST_USE_RESULT inline MaybeObject* Get(double input);
2926 
2927     inline double Calculate(double input);
2928 
2929     struct Element {
2930       uint32_t in[2];
2931       Object* output;
2932     };
2933 
2934     union Converter {
2935       double dbl;
2936       uint32_t integers[2];
2937     };
2938 
Hash(const Converter & c)2939     inline static int Hash(const Converter& c) {
2940       uint32_t hash = (c.integers[0] ^ c.integers[1]);
2941       hash ^= static_cast<int32_t>(hash) >> 16;
2942       hash ^= static_cast<int32_t>(hash) >> 8;
2943       return (hash & (kCacheSize - 1));
2944     }
2945 
2946     Element elements_[kCacheSize];
2947     Type type_;
2948     Isolate* isolate_;
2949 
2950     // Allow access to the caches_ array as an ExternalReference.
2951     friend class ExternalReference;
2952     // Inline implementation of the cache.
2953     friend class TranscendentalCacheStub;
2954     // For evaluating value.
2955     friend class TranscendentalCache;
2956 
2957     DISALLOW_COPY_AND_ASSIGN(SubCache);
2958   };
2959 
TranscendentalCache(Isolate * isolate)2960   explicit TranscendentalCache(Isolate* isolate) : isolate_(isolate) {
2961     for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2962   }
2963 
~TranscendentalCache()2964   ~TranscendentalCache() {
2965     for (int i = 0; i < kNumberOfCaches; ++i) delete caches_[i];
2966   }
2967 
2968   // Used to create an external reference.
2969   inline Address cache_array_address();
2970 
2971   // Instantiation
2972   friend class Isolate;
2973   // Inline implementation of the caching.
2974   friend class TranscendentalCacheStub;
2975   // Allow access to the caches_ array as an ExternalReference.
2976   friend class ExternalReference;
2977 
2978   Isolate* isolate_;
2979   SubCache* caches_[kNumberOfCaches];
2980   DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2981 };
2982 
2983 
2984 // Abstract base class for checking whether a weak object should be retained.
2985 class WeakObjectRetainer {
2986  public:
~WeakObjectRetainer()2987   virtual ~WeakObjectRetainer() {}
2988 
2989   // Return whether this object should be retained. If NULL is returned the
2990   // object has no references. Otherwise the address of the retained object
2991   // should be returned as in some GC situations the object has been moved.
2992   virtual Object* RetainAs(Object* object) = 0;
2993 };
2994 
2995 
2996 // Intrusive object marking uses least significant bit of
2997 // heap object's map word to mark objects.
2998 // Normally all map words have least significant bit set
2999 // because they contain tagged map pointer.
3000 // If the bit is not set object is marked.
3001 // All objects should be unmarked before resuming
3002 // JavaScript execution.
3003 class IntrusiveMarking {
3004  public:
IsMarked(HeapObject * object)3005   static bool IsMarked(HeapObject* object) {
3006     return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
3007   }
3008 
ClearMark(HeapObject * object)3009   static void ClearMark(HeapObject* object) {
3010     uintptr_t map_word = object->map_word().ToRawValue();
3011     object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
3012     ASSERT(!IsMarked(object));
3013   }
3014 
SetMark(HeapObject * object)3015   static void SetMark(HeapObject* object) {
3016     uintptr_t map_word = object->map_word().ToRawValue();
3017     object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
3018     ASSERT(IsMarked(object));
3019   }
3020 
MapOfMarkedObject(HeapObject * object)3021   static Map* MapOfMarkedObject(HeapObject* object) {
3022     uintptr_t map_word = object->map_word().ToRawValue();
3023     return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
3024   }
3025 
SizeOfMarkedObject(HeapObject * object)3026   static int SizeOfMarkedObject(HeapObject* object) {
3027     return object->SizeFromMap(MapOfMarkedObject(object));
3028   }
3029 
3030  private:
3031   static const uintptr_t kNotMarkedBit = 0x1;
3032   STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
3033 };
3034 
3035 
3036 #ifdef DEBUG
3037 // Helper class for tracing paths to a search target Object from all roots.
3038 // The TracePathFrom() method can be used to trace paths from a specific
3039 // object to the search target object.
3040 class PathTracer : public ObjectVisitor {
3041  public:
3042   enum WhatToFind {
3043     FIND_ALL,   // Will find all matches.
3044     FIND_FIRST  // Will stop the search after first match.
3045   };
3046 
3047   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
3048   // after the first match.  If FIND_ALL is specified, then tracing will be
3049   // done for all matches.
PathTracer(Object * search_target,WhatToFind what_to_find,VisitMode visit_mode)3050   PathTracer(Object* search_target,
3051              WhatToFind what_to_find,
3052              VisitMode visit_mode)
3053       : search_target_(search_target),
3054         found_target_(false),
3055         found_target_in_trace_(false),
3056         what_to_find_(what_to_find),
3057         visit_mode_(visit_mode),
3058         object_stack_(20),
3059         no_allocation() {}
3060 
3061   virtual void VisitPointers(Object** start, Object** end);
3062 
3063   void Reset();
3064   void TracePathFrom(Object** root);
3065 
found()3066   bool found() const { return found_target_; }
3067 
3068   static Object* const kAnyGlobalObject;
3069 
3070  protected:
3071   class MarkVisitor;
3072   class UnmarkVisitor;
3073 
3074   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
3075   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
3076   virtual void ProcessResults();
3077 
3078   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
3079   static const int kMarkTag = 2;
3080 
3081   Object* search_target_;
3082   bool found_target_;
3083   bool found_target_in_trace_;
3084   WhatToFind what_to_find_;
3085   VisitMode visit_mode_;
3086   List<Object*> object_stack_;
3087 
3088   DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
3089 
3090  private:
3091   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
3092 };
3093 #endif  // DEBUG
3094 
3095 } }  // namespace v8::internal
3096 
3097 #endif  // V8_HEAP_H_
3098