• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_
30 
31 #include <math.h>
32 
33 #include "globals.h"
34 #include "list.h"
35 #include "mark-compact.h"
36 #include "spaces.h"
37 #include "splay-tree-inl.h"
38 #include "v8-counters.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 // TODO(isolates): remove HEAP here
44 #define HEAP (_inline_get_heap_())
45 class Heap;
46 inline Heap* _inline_get_heap_();
47 
48 
49 // Defines all the roots in Heap.
50 #define STRONG_ROOT_LIST(V)                                      \
51   /* Put the byte array map early.  We need it to be in place by the time   */ \
52   /* the deserializer hits the next page, since it wants to put a byte      */ \
53   /* array in the unused space at the end of the page.                      */ \
54   V(Map, byte_array_map, ByteArrayMap)                                         \
55   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
56   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
57   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
58   V(Object, undefined_value, UndefinedValue)                                   \
59   V(Object, the_hole_value, TheHoleValue)                                      \
60   V(Object, null_value, NullValue)                                             \
61   V(Object, true_value, TrueValue)                                             \
62   V(Object, false_value, FalseValue)                                           \
63   V(Object, arguments_marker, ArgumentsMarker)                                 \
64   V(Map, heap_number_map, HeapNumberMap)                                       \
65   V(Map, global_context_map, GlobalContextMap)                                 \
66   V(Map, fixed_array_map, FixedArrayMap)                                       \
67   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
68   V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
69   V(Map, meta_map, MetaMap)                                                    \
70   V(Map, hash_table_map, HashTableMap)                                         \
71   V(Smi, stack_limit, StackLimit)                                              \
72   V(FixedArray, number_string_cache, NumberStringCache)                        \
73   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
74   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
75   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
76   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
77   V(Object, termination_exception, TerminationException)                       \
78   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
79   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
80   V(String, empty_string, EmptyString)                                         \
81   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
82   V(Map, string_map, StringMap)                                                \
83   V(Map, ascii_string_map, AsciiStringMap)                                     \
84   V(Map, symbol_map, SymbolMap)                                                \
85   V(Map, cons_string_map, ConsStringMap)                                       \
86   V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
87   V(Map, ascii_symbol_map, AsciiSymbolMap)                                     \
88   V(Map, cons_symbol_map, ConsSymbolMap)                                       \
89   V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap)                            \
90   V(Map, external_symbol_map, ExternalSymbolMap)                               \
91   V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap)  \
92   V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap)                    \
93   V(Map, external_string_map, ExternalStringMap)                               \
94   V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap)  \
95   V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
96   V(Map, undetectable_string_map, UndetectableStringMap)                       \
97   V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
98   V(Map, external_pixel_array_map, ExternalPixelArrayMap)                      \
99   V(Map, external_byte_array_map, ExternalByteArrayMap)                        \
100   V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap)       \
101   V(Map, external_short_array_map, ExternalShortArrayMap)                      \
102   V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap)     \
103   V(Map, external_int_array_map, ExternalIntArrayMap)                          \
104   V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap)         \
105   V(Map, external_float_array_map, ExternalFloatArrayMap)                      \
106   V(Map, context_map, ContextMap)                                              \
107   V(Map, catch_context_map, CatchContextMap)                                   \
108   V(Map, code_map, CodeMap)                                                    \
109   V(Map, oddball_map, OddballMap)                                              \
110   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
111   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
112   V(Map, message_object_map, JSMessageObjectMap)                               \
113   V(Map, proxy_map, ProxyMap)                                                  \
114   V(Object, nan_value, NanValue)                                               \
115   V(Object, minus_zero_value, MinusZeroValue)                                  \
116   V(Map, neander_map, NeanderMap)                                              \
117   V(JSObject, message_listeners, MessageListeners)                             \
118   V(Proxy, prototype_accessors, PrototypeAccessors)                            \
119   V(NumberDictionary, code_stubs, CodeStubs)                                   \
120   V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache)              \
121   V(Code, js_entry_code, JsEntryCode)                                          \
122   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
123   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
124   V(Object, last_script_id, LastScriptId)                                      \
125   V(Script, empty_script, EmptyScript)                                         \
126   V(Smi, real_stack_limit, RealStackLimit)                                     \
127   V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames)        \
128 
129 #define ROOT_LIST(V)                                  \
130   STRONG_ROOT_LIST(V)                                 \
131   V(SymbolTable, symbol_table, SymbolTable)
132 
133 #define SYMBOL_LIST(V)                                                   \
134   V(Array_symbol, "Array")                                               \
135   V(Object_symbol, "Object")                                             \
136   V(Proto_symbol, "__proto__")                                           \
137   V(StringImpl_symbol, "StringImpl")                                     \
138   V(arguments_symbol, "arguments")                                       \
139   V(Arguments_symbol, "Arguments")                                       \
140   V(arguments_shadow_symbol, ".arguments")                               \
141   V(call_symbol, "call")                                                 \
142   V(apply_symbol, "apply")                                               \
143   V(caller_symbol, "caller")                                             \
144   V(boolean_symbol, "boolean")                                           \
145   V(Boolean_symbol, "Boolean")                                           \
146   V(callee_symbol, "callee")                                             \
147   V(constructor_symbol, "constructor")                                   \
148   V(code_symbol, ".code")                                                \
149   V(result_symbol, ".result")                                            \
150   V(catch_var_symbol, ".catch-var")                                      \
151   V(empty_symbol, "")                                                    \
152   V(eval_symbol, "eval")                                                 \
153   V(function_symbol, "function")                                         \
154   V(length_symbol, "length")                                             \
155   V(name_symbol, "name")                                                 \
156   V(number_symbol, "number")                                             \
157   V(Number_symbol, "Number")                                             \
158   V(nan_symbol, "NaN")                                                   \
159   V(RegExp_symbol, "RegExp")                                             \
160   V(source_symbol, "source")                                             \
161   V(global_symbol, "global")                                             \
162   V(ignore_case_symbol, "ignoreCase")                                    \
163   V(multiline_symbol, "multiline")                                       \
164   V(input_symbol, "input")                                               \
165   V(index_symbol, "index")                                               \
166   V(last_index_symbol, "lastIndex")                                      \
167   V(object_symbol, "object")                                             \
168   V(prototype_symbol, "prototype")                                       \
169   V(string_symbol, "string")                                             \
170   V(String_symbol, "String")                                             \
171   V(Date_symbol, "Date")                                                 \
172   V(this_symbol, "this")                                                 \
173   V(to_string_symbol, "toString")                                        \
174   V(char_at_symbol, "CharAt")                                            \
175   V(undefined_symbol, "undefined")                                       \
176   V(value_of_symbol, "valueOf")                                          \
177   V(InitializeVarGlobal_symbol, "InitializeVarGlobal")                   \
178   V(InitializeConstGlobal_symbol, "InitializeConstGlobal")               \
179   V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized")                 \
180   V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized")               \
181   V(stack_overflow_symbol, "kStackOverflowBoilerplate")                  \
182   V(illegal_access_symbol, "illegal access")                             \
183   V(out_of_memory_symbol, "out-of-memory")                               \
184   V(illegal_execution_state_symbol, "illegal execution state")           \
185   V(get_symbol, "get")                                                   \
186   V(set_symbol, "set")                                                   \
187   V(function_class_symbol, "Function")                                   \
188   V(illegal_argument_symbol, "illegal argument")                         \
189   V(MakeReferenceError_symbol, "MakeReferenceError")                     \
190   V(MakeSyntaxError_symbol, "MakeSyntaxError")                           \
191   V(MakeTypeError_symbol, "MakeTypeError")                               \
192   V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment")       \
193   V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in")               \
194   V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op")       \
195   V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op")         \
196   V(illegal_return_symbol, "illegal_return")                             \
197   V(illegal_break_symbol, "illegal_break")                               \
198   V(illegal_continue_symbol, "illegal_continue")                         \
199   V(unknown_label_symbol, "unknown_label")                               \
200   V(redeclaration_symbol, "redeclaration")                               \
201   V(failure_symbol, "<failure>")                                         \
202   V(space_symbol, " ")                                                   \
203   V(exec_symbol, "exec")                                                 \
204   V(zero_symbol, "0")                                                    \
205   V(global_eval_symbol, "GlobalEval")                                    \
206   V(identity_hash_symbol, "v8::IdentityHash")                            \
207   V(closure_symbol, "(closure)")                                         \
208   V(use_strict, "use strict")                                            \
209   V(KeyedLoadExternalByteArray_symbol, "KeyedLoadExternalByteArray")     \
210   V(KeyedLoadExternalUnsignedByteArray_symbol,                           \
211       "KeyedLoadExternalUnsignedByteArray")                              \
212   V(KeyedLoadExternalShortArray_symbol,                                  \
213       "KeyedLoadExternalShortArray")                                     \
214   V(KeyedLoadExternalUnsignedShortArray_symbol,                          \
215       "KeyedLoadExternalUnsignedShortArray")                             \
216   V(KeyedLoadExternalIntArray_symbol, "KeyedLoadExternalIntArray")       \
217   V(KeyedLoadExternalUnsignedIntArray_symbol,                            \
218        "KeyedLoadExternalUnsignedIntArray")                              \
219   V(KeyedLoadExternalFloatArray_symbol, "KeyedLoadExternalFloatArray")   \
220   V(KeyedLoadExternalPixelArray_symbol, "KeyedLoadExternalPixelArray")   \
221   V(KeyedStoreExternalByteArray_symbol, "KeyedStoreExternalByteArray")   \
222   V(KeyedStoreExternalUnsignedByteArray_symbol,                          \
223         "KeyedStoreExternalUnsignedByteArray")                           \
224   V(KeyedStoreExternalShortArray_symbol, "KeyedStoreExternalShortArray") \
225   V(KeyedStoreExternalUnsignedShortArray_symbol,                         \
226         "KeyedStoreExternalUnsignedShortArray")                          \
227   V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray")     \
228   V(KeyedStoreExternalUnsignedIntArray_symbol,                           \
229         "KeyedStoreExternalUnsignedIntArray")                            \
230   V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \
231   V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray")
232 
233 // Forward declarations.
234 class GCTracer;
235 class HeapStats;
236 class Isolate;
237 class WeakObjectRetainer;
238 
239 
240 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
241                                                       Object** pointer);
242 
243 typedef bool (*DirtyRegionCallback)(Heap* heap,
244                                     Address start,
245                                     Address end,
246                                     ObjectSlotCallback copy_object_func);
247 
248 
249 // The all static Heap captures the interface to the global object heap.
250 // All JavaScript contexts by this process share the same object heap.
251 
252 #ifdef DEBUG
253 class HeapDebugUtils;
254 #endif
255 
256 
257 // A queue of objects promoted during scavenge. Each object is accompanied
258 // by it's size to avoid dereferencing a map pointer for scanning.
259 class PromotionQueue {
260  public:
PromotionQueue()261   PromotionQueue() : front_(NULL), rear_(NULL) { }
262 
Initialize(Address start_address)263   void Initialize(Address start_address) {
264     front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
265   }
266 
is_empty()267   bool is_empty() { return front_ <= rear_; }
268 
269   inline void insert(HeapObject* target, int size);
270 
remove(HeapObject ** target,int * size)271   void remove(HeapObject** target, int* size) {
272     *target = reinterpret_cast<HeapObject*>(*(--front_));
273     *size = static_cast<int>(*(--front_));
274     // Assert no underflow.
275     ASSERT(front_ >= rear_);
276   }
277 
278  private:
279   // The front of the queue is higher in memory than the rear.
280   intptr_t* front_;
281   intptr_t* rear_;
282 
283   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
284 };
285 
286 
287 // External strings table is a place where all external strings are
288 // registered.  We need to keep track of such strings to properly
289 // finalize them.
290 class ExternalStringTable {
291  public:
292   // Registers an external string.
293   inline void AddString(String* string);
294 
295   inline void Iterate(ObjectVisitor* v);
296 
297   // Restores internal invariant and gets rid of collected strings.
298   // Must be called after each Iterate() that modified the strings.
299   void CleanUp();
300 
301   // Destroys all allocated memory.
302   void TearDown();
303 
304  private:
ExternalStringTable()305   ExternalStringTable() { }
306 
307   friend class Heap;
308 
309   inline void Verify();
310 
311   inline void AddOldString(String* string);
312 
313   // Notifies the table that only a prefix of the new list is valid.
314   inline void ShrinkNewStrings(int position);
315 
316   // To speed up scavenge collections new space string are kept
317   // separate from old space strings.
318   List<Object*> new_space_strings_;
319   List<Object*> old_space_strings_;
320 
321   Heap* heap_;
322 
323   DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
324 };
325 
326 
327 class Heap {
328  public:
329   // Configure heap size before setup. Return false if the heap has been
330   // setup already.
331   bool ConfigureHeap(int max_semispace_size,
332                      int max_old_gen_size,
333                      int max_executable_size);
334   bool ConfigureHeapDefault();
335 
336   // Initializes the global object heap. If create_heap_objects is true,
337   // also creates the basic non-mutable objects.
338   // Returns whether it succeeded.
339   bool Setup(bool create_heap_objects);
340 
341   // Destroys all memory allocated by the heap.
342   void TearDown();
343 
344   // Set the stack limit in the roots_ array.  Some architectures generate
345   // code that looks here, because it is faster than loading from the static
346   // jslimit_/real_jslimit_ variable in the StackGuard.
347   void SetStackLimits();
348 
349   // Returns whether Setup has been called.
350   bool HasBeenSetup();
351 
352   // Returns the maximum amount of memory reserved for the heap.  For
353   // the young generation, we reserve 4 times the amount needed for a
354   // semi space.  The young generation consists of two semi spaces and
355   // we reserve twice the amount needed for those in order to ensure
356   // that new space can be aligned to its size.
MaxReserved()357   intptr_t MaxReserved() {
358     return 4 * reserved_semispace_size_ + max_old_generation_size_;
359   }
MaxSemiSpaceSize()360   int MaxSemiSpaceSize() { return max_semispace_size_; }
ReservedSemiSpaceSize()361   int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
InitialSemiSpaceSize()362   int InitialSemiSpaceSize() { return initial_semispace_size_; }
MaxOldGenerationSize()363   intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
MaxExecutableSize()364   intptr_t MaxExecutableSize() { return max_executable_size_; }
365 
366   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
367   // more spaces are needed until it reaches the limit.
368   intptr_t Capacity();
369 
370   // Returns the amount of memory currently committed for the heap.
371   intptr_t CommittedMemory();
372 
373   // Returns the amount of executable memory currently committed for the heap.
374   intptr_t CommittedMemoryExecutable();
375 
376   // Returns the available bytes in space w/o growing.
377   // Heap doesn't guarantee that it can allocate an object that requires
378   // all available bytes. Check MaxHeapObjectSize() instead.
379   intptr_t Available();
380 
381   // Returns the maximum object size in paged space.
382   inline int MaxObjectSizeInPagedSpace();
383 
384   // Returns of size of all objects residing in the heap.
385   intptr_t SizeOfObjects();
386 
387   // Return the starting address and a mask for the new space.  And-masking an
388   // address with the mask will result in the start address of the new space
389   // for all addresses in either semispace.
NewSpaceStart()390   Address NewSpaceStart() { return new_space_.start(); }
NewSpaceMask()391   uintptr_t NewSpaceMask() { return new_space_.mask(); }
NewSpaceTop()392   Address NewSpaceTop() { return new_space_.top(); }
393 
new_space()394   NewSpace* new_space() { return &new_space_; }
old_pointer_space()395   OldSpace* old_pointer_space() { return old_pointer_space_; }
old_data_space()396   OldSpace* old_data_space() { return old_data_space_; }
code_space()397   OldSpace* code_space() { return code_space_; }
map_space()398   MapSpace* map_space() { return map_space_; }
cell_space()399   CellSpace* cell_space() { return cell_space_; }
lo_space()400   LargeObjectSpace* lo_space() { return lo_space_; }
401 
always_allocate()402   bool always_allocate() { return always_allocate_scope_depth_ != 0; }
always_allocate_scope_depth_address()403   Address always_allocate_scope_depth_address() {
404     return reinterpret_cast<Address>(&always_allocate_scope_depth_);
405   }
linear_allocation()406   bool linear_allocation() {
407     return linear_allocation_scope_depth_ != 0;
408   }
409 
NewSpaceAllocationTopAddress()410   Address* NewSpaceAllocationTopAddress() {
411     return new_space_.allocation_top_address();
412   }
NewSpaceAllocationLimitAddress()413   Address* NewSpaceAllocationLimitAddress() {
414     return new_space_.allocation_limit_address();
415   }
416 
417   // Uncommit unused semi space.
UncommitFromSpace()418   bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
419 
420 #ifdef ENABLE_HEAP_PROTECTION
421   // Protect/unprotect the heap by marking all spaces read-only/writable.
422   void Protect();
423   void Unprotect();
424 #endif
425 
426   // Allocates and initializes a new JavaScript object based on a
427   // constructor.
428   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
429   // failed.
430   // Please note this does not perform a garbage collection.
431   MUST_USE_RESULT MaybeObject* AllocateJSObject(
432       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
433 
434   // Allocates and initializes a new global object based on a constructor.
435   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
436   // failed.
437   // Please note this does not perform a garbage collection.
438   MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
439 
440   // Returns a deep copy of the JavaScript object.
441   // Properties and elements are copied too.
442   // Returns failure if allocation failed.
443   MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
444 
445   // Allocates the function prototype.
446   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
447   // failed.
448   // Please note this does not perform a garbage collection.
449   MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
450 
451   // Reinitialize an JSGlobalProxy based on a constructor.  The object
452   // must have the same size as objects allocated using the
453   // constructor.  The object is reinitialized and behaves as an
454   // object that has been freshly allocated using the constructor.
455   MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
456       JSFunction* constructor, JSGlobalProxy* global);
457 
458   // Allocates and initializes a new JavaScript object based on a map.
459   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
460   // failed.
461   // Please note this does not perform a garbage collection.
462   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
463       Map* map, PretenureFlag pretenure = NOT_TENURED);
464 
465   // Allocates a heap object based on the map.
466   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
467   // failed.
468   // Please note this function does not perform a garbage collection.
469   MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
470 
471   // Allocates a JS Map in the heap.
472   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
473   // failed.
474   // Please note this function does not perform a garbage collection.
475   MUST_USE_RESULT MaybeObject* AllocateMap(InstanceType instance_type,
476                                            int instance_size);
477 
478   // Allocates a partial map for bootstrapping.
479   MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
480                                                   int instance_size);
481 
482   // Allocate a map for the specified function
483   MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
484 
485   // Allocates an empty code cache.
486   MUST_USE_RESULT MaybeObject* AllocateCodeCache();
487 
488   // Clear the Instanceof cache (used when a prototype changes).
489   inline void ClearInstanceofCache();
490 
491   // Allocates and fully initializes a String.  There are two String
492   // encodings: ASCII and two byte. One should choose between the three string
493   // allocation functions based on the encoding of the string buffer used to
494   // initialized the string.
495   //   - ...FromAscii initializes the string from a buffer that is ASCII
496   //     encoded (it does not check that the buffer is ASCII encoded) and the
497   //     result will be ASCII encoded.
498   //   - ...FromUTF8 initializes the string from a buffer that is UTF-8
499   //     encoded.  If the characters are all single-byte characters, the
500   //     result will be ASCII encoded, otherwise it will converted to two
501   //     byte.
502   //   - ...FromTwoByte initializes the string from a buffer that is two-byte
503   //     encoded.  If the characters are all single-byte characters, the
504   //     result will be converted to ASCII, otherwise it will be left as
505   //     two-byte.
506   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
507   // failed.
508   // Please note this does not perform a garbage collection.
509   MUST_USE_RESULT MaybeObject* AllocateStringFromAscii(
510       Vector<const char> str,
511       PretenureFlag pretenure = NOT_TENURED);
512   MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
513       Vector<const char> str,
514       PretenureFlag pretenure = NOT_TENURED);
515   MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
516       Vector<const char> str,
517       PretenureFlag pretenure = NOT_TENURED);
518   MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
519       Vector<const uc16> str,
520       PretenureFlag pretenure = NOT_TENURED);
521 
522   // Allocates a symbol in old space based on the character stream.
523   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
524   // failed.
525   // Please note this function does not perform a garbage collection.
526   MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
527                                                      int chars,
528                                                      uint32_t hash_field);
529 
530   MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
531         Vector<const char> str,
532         uint32_t hash_field);
533 
534   MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
535         Vector<const uc16> str,
536         uint32_t hash_field);
537 
538   MUST_USE_RESULT MaybeObject* AllocateInternalSymbol(
539       unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
540 
541   MUST_USE_RESULT MaybeObject* AllocateExternalSymbol(
542       Vector<const char> str,
543       int chars);
544 
545   // Allocates and partially initializes a String.  There are two String
546   // encodings: ASCII and two byte.  These functions allocate a string of the
547   // given length and set its map and length fields.  The characters of the
548   // string are uninitialized.
549   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
550   // failed.
551   // Please note this does not perform a garbage collection.
552   MUST_USE_RESULT MaybeObject* AllocateRawAsciiString(
553       int length,
554       PretenureFlag pretenure = NOT_TENURED);
555   MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
556       int length,
557       PretenureFlag pretenure = NOT_TENURED);
558 
559   // Computes a single character string where the character has code.
560   // A cache is used for ascii codes.
561   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
562   // failed. Please note this does not perform a garbage collection.
563   MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
564       uint16_t code);
565 
566   // Allocate a byte array of the specified length
567   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
568   // failed.
569   // Please note this does not perform a garbage collection.
570   MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
571                                                  PretenureFlag pretenure);
572 
573   // Allocate a non-tenured byte array of the specified length
574   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
575   // failed.
576   // Please note this does not perform a garbage collection.
577   MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
578 
579   // Allocates an external array of the specified length and type.
580   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
581   // failed.
582   // Please note this does not perform a garbage collection.
583   MUST_USE_RESULT MaybeObject* AllocateExternalArray(
584       int length,
585       ExternalArrayType array_type,
586       void* external_pointer,
587       PretenureFlag pretenure);
588 
589   // Allocate a tenured JS global property cell.
590   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
591   // failed.
592   // Please note this does not perform a garbage collection.
593   MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
594 
595   // Allocates a fixed array initialized with undefined values
596   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
597   // failed.
598   // Please note this does not perform a garbage collection.
599   MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
600                                                   PretenureFlag pretenure);
601   // Allocates a fixed array initialized with undefined values
602   MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
603 
604   // Allocates an uninitialized fixed array. It must be filled by the caller.
605   //
606   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
607   // failed.
608   // Please note this does not perform a garbage collection.
609   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
610 
611   // Make a copy of src and return it. Returns
612   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
613   MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
614 
615   // Make a copy of src, set the map, and return the copy. Returns
616   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
617   MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
618 
619   // Allocates a fixed array initialized with the hole values.
620   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
621   // failed.
622   // Please note this does not perform a garbage collection.
623   MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
624       int length,
625       PretenureFlag pretenure = NOT_TENURED);
626 
627   // AllocateHashTable is identical to AllocateFixedArray except
628   // that the resulting object has hash_table_map as map.
629   MUST_USE_RESULT MaybeObject* AllocateHashTable(
630       int length, PretenureFlag pretenure = NOT_TENURED);
631 
632   // Allocate a global (but otherwise uninitialized) context.
633   MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
634 
635   // Allocate a function context.
636   MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
637                                                        JSFunction* closure);
638 
639   // Allocate a 'with' context.
640   MUST_USE_RESULT MaybeObject* AllocateWithContext(Context* previous,
641                                                    JSObject* extension,
642                                                    bool is_catch_context);
643 
644   // Allocates a new utility object in the old generation.
645   MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
646 
647   // Allocates a function initialized with a shared part.
648   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
649   // failed.
650   // Please note this does not perform a garbage collection.
651   MUST_USE_RESULT MaybeObject* AllocateFunction(
652       Map* function_map,
653       SharedFunctionInfo* shared,
654       Object* prototype,
655       PretenureFlag pretenure = TENURED);
656 
657   // Arguments object size.
658   static const int kArgumentsObjectSize =
659       JSObject::kHeaderSize + 2 * kPointerSize;
660   // Strict mode arguments has no callee so it is smaller.
661   static const int kArgumentsObjectSizeStrict =
662       JSObject::kHeaderSize + 1 * kPointerSize;
663   // Indicies for direct access into argument objects.
664   static const int kArgumentsLengthIndex = 0;
665   // callee is only valid in non-strict mode.
666   static const int kArgumentsCalleeIndex = 1;
667 
668   // Allocates an arguments object - optionally with an elements array.
669   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
670   // failed.
671   // Please note this does not perform a garbage collection.
672   MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
673       Object* callee, int length);
674 
675   // Same as NewNumberFromDouble, but may return a preallocated/immutable
676   // number object (e.g., minus_zero_value_, nan_value_)
677   MUST_USE_RESULT MaybeObject* NumberFromDouble(
678       double value, PretenureFlag pretenure = NOT_TENURED);
679 
680   // Allocated a HeapNumber from value.
681   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
682       double value,
683       PretenureFlag pretenure);
684   // pretenure = NOT_TENURED
685   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
686 
687   // Converts an int into either a Smi or a HeapNumber object.
688   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
689   // failed.
690   // Please note this does not perform a garbage collection.
691   MUST_USE_RESULT inline MaybeObject* NumberFromInt32(int32_t value);
692 
693   // Converts an int into either a Smi or a HeapNumber object.
694   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
695   // failed.
696   // Please note this does not perform a garbage collection.
697   MUST_USE_RESULT inline MaybeObject* NumberFromUint32(uint32_t value);
698 
699   // Allocates a new proxy object.
700   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
701   // failed.
702   // Please note this does not perform a garbage collection.
703   MUST_USE_RESULT MaybeObject* AllocateProxy(
704       Address proxy, PretenureFlag pretenure = NOT_TENURED);
705 
706   // Allocates a new SharedFunctionInfo object.
707   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
708   // failed.
709   // Please note this does not perform a garbage collection.
710   MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
711 
712   // Allocates a new JSMessageObject object.
713   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
714   // failed.
715   // Please note that this does not perform a garbage collection.
716   MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
717       String* type,
718       JSArray* arguments,
719       int start_position,
720       int end_position,
721       Object* script,
722       Object* stack_trace,
723       Object* stack_frames);
724 
725   // Allocates a new cons string object.
726   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
727   // failed.
728   // Please note this does not perform a garbage collection.
729   MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
730                                                   String* second);
731 
732   // Allocates a new sub string object which is a substring of an underlying
733   // string buffer stretching from the index start (inclusive) to the index
734   // end (exclusive).
735   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
736   // failed.
737   // Please note this does not perform a garbage collection.
738   MUST_USE_RESULT MaybeObject* AllocateSubString(
739       String* buffer,
740       int start,
741       int end,
742       PretenureFlag pretenure = NOT_TENURED);
743 
744   // Allocate a new external string object, which is backed by a string
745   // resource that resides outside the V8 heap.
746   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
747   // failed.
748   // Please note this does not perform a garbage collection.
749   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
750       ExternalAsciiString::Resource* resource);
751   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
752       ExternalTwoByteString::Resource* resource);
753 
754   // Finalizes an external string by deleting the associated external
755   // data and clearing the resource pointer.
756   inline void FinalizeExternalString(String* string);
757 
758   // Allocates an uninitialized object.  The memory is non-executable if the
759   // hardware and OS allow.
760   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
761   // failed.
762   // Please note this function does not perform a garbage collection.
763   MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
764                                                   AllocationSpace space,
765                                                   AllocationSpace retry_space);
766 
767   // Initialize a filler object to keep the ability to iterate over the heap
768   // when shortening objects.
769   void CreateFillerObjectAt(Address addr, int size);
770 
771   // Makes a new native code object
772   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
773   // failed. On success, the pointer to the Code object is stored in the
774   // self_reference. This allows generated code to reference its own Code
775   // object by containing this pointer.
776   // Please note this function does not perform a garbage collection.
777   MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
778                                           Code::Flags flags,
779                                           Handle<Object> self_reference,
780                                           bool immovable = false);
781 
782   MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
783 
784   // Copy the code and scope info part of the code object, but insert
785   // the provided data as the relocation information.
786   MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
787 
788   // Finds the symbol for string in the symbol table.
789   // If not found, a new symbol is added to the table and returned.
790   // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
791   // failed.
792   // Please note this function does not perform a garbage collection.
793   MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
794   MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
795   MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(
796       Vector<const uc16> str);
LookupAsciiSymbol(const char * str)797   MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
798     return LookupSymbol(CStrVector(str));
799   }
800   MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
801   bool LookupSymbolIfExists(String* str, String** symbol);
802   bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
803 
804   // Compute the matching symbol map for a string if possible.
805   // NULL is returned if string is in new space or not flattened.
806   Map* SymbolMapForString(String* str);
807 
808   // Tries to flatten a string before compare operation.
809   //
810   // Returns a failure in case it was decided that flattening was
811   // necessary and failed.  Note, if flattening is not necessary the
812   // string might stay non-flat even when not a failure is returned.
813   //
814   // Please note this function does not perform a garbage collection.
815   MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
816 
817   // Converts the given boolean condition to JavaScript boolean value.
818   inline Object* ToBoolean(bool condition);
819 
820   // Code that should be run before and after each GC.  Includes some
821   // reporting/verification activities when compiled with DEBUG set.
822   void GarbageCollectionPrologue();
823   void GarbageCollectionEpilogue();
824 
825   // Performs garbage collection operation.
826   // Returns whether there is a chance that another major GC could
827   // collect more garbage.
828   bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
829 
830   // Performs garbage collection operation.
831   // Returns whether there is a chance that another major GC could
832   // collect more garbage.
833   inline bool CollectGarbage(AllocationSpace space);
834 
835   // Performs a full garbage collection. Force compaction if the
836   // parameter is true.
837   void CollectAllGarbage(bool force_compaction);
838 
839   // Last hope GC, should try to squeeze as much as possible.
840   void CollectAllAvailableGarbage();
841 
842   // Notify the heap that a context has been disposed.
NotifyContextDisposed()843   int NotifyContextDisposed() { return ++contexts_disposed_; }
844 
845   // Utility to invoke the scavenger. This is needed in test code to
846   // ensure correct callback for weak global handles.
847   void PerformScavenge();
848 
promotion_queue()849   PromotionQueue* promotion_queue() { return &promotion_queue_; }
850 
851 #ifdef DEBUG
852   // Utility used with flag gc-greedy.
853   void GarbageCollectionGreedyCheck();
854 #endif
855 
856   void AddGCPrologueCallback(
857       GCEpilogueCallback callback, GCType gc_type_filter);
858   void RemoveGCPrologueCallback(GCEpilogueCallback callback);
859 
860   void AddGCEpilogueCallback(
861       GCEpilogueCallback callback, GCType gc_type_filter);
862   void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
863 
SetGlobalGCPrologueCallback(GCCallback callback)864   void SetGlobalGCPrologueCallback(GCCallback callback) {
865     ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
866     global_gc_prologue_callback_ = callback;
867   }
SetGlobalGCEpilogueCallback(GCCallback callback)868   void SetGlobalGCEpilogueCallback(GCCallback callback) {
869     ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
870     global_gc_epilogue_callback_ = callback;
871   }
872 
873   // Heap root getters.  We have versions with and without type::cast() here.
874   // You can't use type::cast during GC because the assert fails.
875 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
876   type* name() {                                                               \
877     return type::cast(roots_[k##camel_name##RootIndex]);                       \
878   }                                                                            \
879   type* raw_unchecked_##name() {                                               \
880     return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
881   }
882   ROOT_LIST(ROOT_ACCESSOR)
883 #undef ROOT_ACCESSOR
884 
885 // Utility type maps
886 #define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
887     Map* name##_map() {                                                        \
888       return Map::cast(roots_[k##Name##MapRootIndex]);                         \
889     }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)890   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
891 #undef STRUCT_MAP_ACCESSOR
892 
893 #define SYMBOL_ACCESSOR(name, str) String* name() {                            \
894     return String::cast(roots_[k##name##RootIndex]);                           \
895   }
896   SYMBOL_LIST(SYMBOL_ACCESSOR)
897 #undef SYMBOL_ACCESSOR
898 
899   // The hidden_symbol is special because it is the empty string, but does
900   // not match the empty string.
901   String* hidden_symbol() { return hidden_symbol_; }
902 
set_global_contexts_list(Object * object)903   void set_global_contexts_list(Object* object) {
904     global_contexts_list_ = object;
905   }
global_contexts_list()906   Object* global_contexts_list() { return global_contexts_list_; }
907 
908   // Iterates over all roots in the heap.
909   void IterateRoots(ObjectVisitor* v, VisitMode mode);
910   // Iterates over all strong roots in the heap.
911   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
912   // Iterates over all the other roots in the heap.
913   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
914 
915   enum ExpectedPageWatermarkState {
916     WATERMARK_SHOULD_BE_VALID,
917     WATERMARK_CAN_BE_INVALID
918   };
919 
920   // For each dirty region on a page in use from an old space call
921   // visit_dirty_region callback.
922   // If either visit_dirty_region or callback can cause an allocation
923   // in old space and changes in allocation watermark then
924   // can_preallocate_during_iteration should be set to true.
925   // All pages will be marked as having invalid watermark upon
926   // iteration completion.
927   void IterateDirtyRegions(
928       PagedSpace* space,
929       DirtyRegionCallback visit_dirty_region,
930       ObjectSlotCallback callback,
931       ExpectedPageWatermarkState expected_page_watermark_state);
932 
933   // Interpret marks as a bitvector of dirty marks for regions of size
934   // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
935   // memory interval from start to top. For each dirty region call a
936   // visit_dirty_region callback. Return updated bitvector of dirty marks.
937   uint32_t IterateDirtyRegions(uint32_t marks,
938                                Address start,
939                                Address end,
940                                DirtyRegionCallback visit_dirty_region,
941                                ObjectSlotCallback callback);
942 
943   // Iterate pointers to from semispace of new space found in memory interval
944   // from start to end.
945   // Update dirty marks for page containing start address.
946   void IterateAndMarkPointersToFromSpace(Address start,
947                                          Address end,
948                                          ObjectSlotCallback callback);
949 
950   // Iterate pointers to new space found in memory interval from start to end.
951   // Return true if pointers to new space was found.
952   static bool IteratePointersInDirtyRegion(Heap* heap,
953                                            Address start,
954                                            Address end,
955                                            ObjectSlotCallback callback);
956 
957 
958   // Iterate pointers to new space found in memory interval from start to end.
959   // This interval is considered to belong to the map space.
960   // Return true if pointers to new space was found.
961   static bool IteratePointersInDirtyMapsRegion(Heap* heap,
962                                                Address start,
963                                                Address end,
964                                                ObjectSlotCallback callback);
965 
966 
967   // Returns whether the object resides in new space.
968   inline bool InNewSpace(Object* object);
969   inline bool InFromSpace(Object* object);
970   inline bool InToSpace(Object* object);
971 
972   // Checks whether an address/object in the heap (including auxiliary
973   // area and unused area).
974   bool Contains(Address addr);
975   bool Contains(HeapObject* value);
976 
977   // Checks whether an address/object in a space.
978   // Currently used by tests, serialization and heap verification only.
979   bool InSpace(Address addr, AllocationSpace space);
980   bool InSpace(HeapObject* value, AllocationSpace space);
981 
982   // Finds out which space an object should get promoted to based on its type.
983   inline OldSpace* TargetSpace(HeapObject* object);
984   inline AllocationSpace TargetSpaceId(InstanceType type);
985 
986   // Sets the stub_cache_ (only used when expanding the dictionary).
public_set_code_stubs(NumberDictionary * value)987   void public_set_code_stubs(NumberDictionary* value) {
988     roots_[kCodeStubsRootIndex] = value;
989   }
990 
991   // Support for computing object sizes for old objects during GCs. Returns
992   // a function that is guaranteed to be safe for computing object sizes in
993   // the current GC phase.
GcSafeSizeOfOldObjectFunction()994   HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
995     return gc_safe_size_of_old_object_;
996   }
997 
998   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
public_set_non_monomorphic_cache(NumberDictionary * value)999   void public_set_non_monomorphic_cache(NumberDictionary* value) {
1000     roots_[kNonMonomorphicCacheRootIndex] = value;
1001   }
1002 
public_set_empty_script(Script * script)1003   void public_set_empty_script(Script* script) {
1004     roots_[kEmptyScriptRootIndex] = script;
1005   }
1006 
1007   // Update the next script id.
1008   inline void SetLastScriptId(Object* last_script_id);
1009 
1010   // Generated code can embed this address to get access to the roots.
roots_address()1011   Object** roots_address() { return roots_; }
1012 
1013   // Get address of global contexts list for serialization support.
global_contexts_list_address()1014   Object** global_contexts_list_address() {
1015     return &global_contexts_list_;
1016   }
1017 
1018 #ifdef DEBUG
1019   void Print();
1020   void PrintHandles();
1021 
1022   // Verify the heap is in its normal state before or after a GC.
1023   void Verify();
1024 
1025   // Report heap statistics.
1026   void ReportHeapStatistics(const char* title);
1027   void ReportCodeStatistics(const char* title);
1028 
1029   // Fill in bogus values in from space
1030   void ZapFromSpace();
1031 #endif
1032 
1033 #if defined(ENABLE_LOGGING_AND_PROFILING)
1034   // Print short heap statistics.
1035   void PrintShortHeapStatistics();
1036 #endif
1037 
1038   // Makes a new symbol object
1039   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1040   // failed.
1041   // Please note this function does not perform a garbage collection.
1042   MUST_USE_RESULT MaybeObject* CreateSymbol(
1043       const char* str, int length, int hash);
1044   MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
1045 
1046   // Write barrier support for address[offset] = o.
1047   inline void RecordWrite(Address address, int offset);
1048 
1049   // Write barrier support for address[start : start + len[ = o.
1050   inline void RecordWrites(Address address, int start, int len);
1051 
1052   // Given an address occupied by a live code object, return that object.
1053   Object* FindCodeObject(Address a);
1054 
1055   // Invoke Shrink on shrinkable spaces.
1056   void Shrink();
1057 
1058   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
gc_state()1059   inline HeapState gc_state() { return gc_state_; }
1060 
1061 #ifdef DEBUG
IsAllocationAllowed()1062   bool IsAllocationAllowed() { return allocation_allowed_; }
1063   inline bool allow_allocation(bool enable);
1064 
disallow_allocation_failure()1065   bool disallow_allocation_failure() {
1066     return disallow_allocation_failure_;
1067   }
1068 
1069   void TracePathToObject(Object* target);
1070   void TracePathToGlobal();
1071 #endif
1072 
1073   // Callback function passed to Heap::Iterate etc.  Copies an object if
1074   // necessary, the object might be promoted to an old space.  The caller must
1075   // ensure the precondition that the object is (a) a heap object and (b) in
1076   // the heap's from space.
1077   static inline void ScavengePointer(HeapObject** p);
1078   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1079 
1080   // Commits from space if it is uncommitted.
1081   void EnsureFromSpaceIsCommitted();
1082 
1083   // Support for partial snapshots.  After calling this we can allocate a
1084   // certain number of bytes using only linear allocation (with a
1085   // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1086   // or causing a GC.  It returns true of space was reserved or false if a GC is
1087   // needed.  For paged spaces the space requested must include the space wasted
1088   // at the end of each page when allocating linearly.
1089   void ReserveSpace(
1090     int new_space_size,
1091     int pointer_space_size,
1092     int data_space_size,
1093     int code_space_size,
1094     int map_space_size,
1095     int cell_space_size,
1096     int large_object_size);
1097 
1098   //
1099   // Support for the API.
1100   //
1101 
1102   bool CreateApiObjects();
1103 
1104   // Attempt to find the number in a small cache.  If we finds it, return
1105   // the string representation of the number.  Otherwise return undefined.
1106   Object* GetNumberStringCache(Object* number);
1107 
1108   // Update the cache with a new number-string pair.
1109   void SetNumberStringCache(Object* number, String* str);
1110 
1111   // Adjusts the amount of registered external memory.
1112   // Returns the adjusted value.
1113   inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1114 
1115   // Allocate uninitialized fixed array.
1116   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1117   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1118                                                      PretenureFlag pretenure);
1119 
1120   // True if we have reached the allocation limit in the old generation that
1121   // should force the next GC (caused normally) to be a full one.
OldGenerationPromotionLimitReached()1122   bool OldGenerationPromotionLimitReached() {
1123     return (PromotedSpaceSize() + PromotedExternalMemorySize())
1124            > old_gen_promotion_limit_;
1125   }
1126 
OldGenerationSpaceAvailable()1127   intptr_t OldGenerationSpaceAvailable() {
1128     return old_gen_allocation_limit_ -
1129            (PromotedSpaceSize() + PromotedExternalMemorySize());
1130   }
1131 
1132   // True if we have reached the allocation limit in the old generation that
1133   // should artificially cause a GC right now.
OldGenerationAllocationLimitReached()1134   bool OldGenerationAllocationLimitReached() {
1135     return OldGenerationSpaceAvailable() < 0;
1136   }
1137 
1138   // Can be called when the embedding application is idle.
1139   bool IdleNotification();
1140 
1141   // Declare all the root indices.
1142   enum RootListIndex {
1143 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1144     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1145 #undef ROOT_INDEX_DECLARATION
1146 
1147 // Utility type maps
1148 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1149   STRUCT_LIST(DECLARE_STRUCT_MAP)
1150 #undef DECLARE_STRUCT_MAP
1151 
1152 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1153     SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1154 #undef SYMBOL_DECLARATION
1155 
1156     kSymbolTableRootIndex,
1157     kStrongRootListLength = kSymbolTableRootIndex,
1158     kRootListLength
1159   };
1160 
1161   MUST_USE_RESULT MaybeObject* NumberToString(
1162       Object* number, bool check_number_string_cache = true);
1163 
1164   Map* MapForExternalArrayType(ExternalArrayType array_type);
1165   RootListIndex RootIndexForExternalArrayType(
1166       ExternalArrayType array_type);
1167 
1168   void RecordStats(HeapStats* stats, bool take_snapshot = false);
1169 
1170   // Copy block of memory from src to dst. Size of block should be aligned
1171   // by pointer size.
1172   static inline void CopyBlock(Address dst, Address src, int byte_size);
1173 
1174   inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1175                                                       Address src,
1176                                                       int byte_size);
1177 
1178   // Optimized version of memmove for blocks with pointer size aligned sizes and
1179   // pointer size aligned addresses.
1180   static inline void MoveBlock(Address dst, Address src, int byte_size);
1181 
1182   inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1183                                                       Address src,
1184                                                       int byte_size);
1185 
1186   // Check new space expansion criteria and expand semispaces if it was hit.
1187   void CheckNewSpaceExpansionCriteria();
1188 
IncrementYoungSurvivorsCounter(int survived)1189   inline void IncrementYoungSurvivorsCounter(int survived) {
1190     young_survivors_after_last_gc_ = survived;
1191     survived_since_last_expansion_ += survived;
1192   }
1193 
1194   void UpdateNewSpaceReferencesInExternalStringTable(
1195       ExternalStringTableUpdaterCallback updater_func);
1196 
1197   void ProcessWeakReferences(WeakObjectRetainer* retainer);
1198 
1199   // Helper function that governs the promotion policy from new space to
1200   // old.  If the object's old address lies below the new space's age
1201   // mark or if we've already filled the bottom 1/16th of the to space,
1202   // we try to promote this object.
1203   inline bool ShouldBePromoted(Address old_address, int object_size);
1204 
MaxObjectSizeInNewSpace()1205   int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1206 
1207   void ClearJSFunctionResultCaches();
1208 
1209   void ClearNormalizedMapCaches();
1210 
tracer()1211   GCTracer* tracer() { return tracer_; }
1212 
total_regexp_code_generated()1213   double total_regexp_code_generated() { return total_regexp_code_generated_; }
IncreaseTotalRegexpCodeGenerated(int size)1214   void IncreaseTotalRegexpCodeGenerated(int size) {
1215     total_regexp_code_generated_ += size;
1216   }
1217 
1218   // Returns maximum GC pause.
get_max_gc_pause()1219   int get_max_gc_pause() { return max_gc_pause_; }
1220 
1221   // Returns maximum size of objects alive after GC.
get_max_alive_after_gc()1222   intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1223 
1224   // Returns minimal interval between two subsequent collections.
get_min_in_mutator()1225   int get_min_in_mutator() { return min_in_mutator_; }
1226 
mark_compact_collector()1227   MarkCompactCollector* mark_compact_collector() {
1228     return &mark_compact_collector_;
1229   }
1230 
external_string_table()1231   ExternalStringTable* external_string_table() {
1232     return &external_string_table_;
1233   }
1234 
1235   inline Isolate* isolate();
is_safe_to_read_maps()1236   bool is_safe_to_read_maps() { return is_safe_to_read_maps_; }
1237 
CallGlobalGCPrologueCallback()1238   void CallGlobalGCPrologueCallback() {
1239     if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1240   }
1241 
CallGlobalGCEpilogueCallback()1242   void CallGlobalGCEpilogueCallback() {
1243     if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1244   }
1245 
1246  private:
1247   Heap();
1248 
1249   // This can be calculated directly from a pointer to the heap; however, it is
1250   // more expedient to get at the isolate directly from within Heap methods.
1251   Isolate* isolate_;
1252 
1253   int reserved_semispace_size_;
1254   int max_semispace_size_;
1255   int initial_semispace_size_;
1256   intptr_t max_old_generation_size_;
1257   intptr_t max_executable_size_;
1258   intptr_t code_range_size_;
1259 
1260   // For keeping track of how much data has survived
1261   // scavenge since last new space expansion.
1262   int survived_since_last_expansion_;
1263 
1264   int always_allocate_scope_depth_;
1265   int linear_allocation_scope_depth_;
1266 
1267   // For keeping track of context disposals.
1268   int contexts_disposed_;
1269 
1270 #if defined(V8_TARGET_ARCH_X64)
1271   static const int kMaxObjectSizeInNewSpace = 1024*KB;
1272 #else
1273   static const int kMaxObjectSizeInNewSpace = 512*KB;
1274 #endif
1275 
1276   NewSpace new_space_;
1277   OldSpace* old_pointer_space_;
1278   OldSpace* old_data_space_;
1279   OldSpace* code_space_;
1280   MapSpace* map_space_;
1281   CellSpace* cell_space_;
1282   LargeObjectSpace* lo_space_;
1283   HeapState gc_state_;
1284 
1285   // Returns the size of object residing in non new spaces.
1286   intptr_t PromotedSpaceSize();
1287 
1288   // Returns the amount of external memory registered since last global gc.
1289   int PromotedExternalMemorySize();
1290 
1291   int mc_count_;  // how many mark-compact collections happened
1292   int ms_count_;  // how many mark-sweep collections happened
1293   unsigned int gc_count_;  // how many gc happened
1294 
1295   // Total length of the strings we failed to flatten since the last GC.
1296   int unflattened_strings_length_;
1297 
1298 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
1299   inline void set_##name(type* value) {                                 \
1300     roots_[k##camel_name##RootIndex] = value;                                  \
1301   }
1302   ROOT_LIST(ROOT_ACCESSOR)
1303 #undef ROOT_ACCESSOR
1304 
1305 #ifdef DEBUG
1306   bool allocation_allowed_;
1307 
1308   // If the --gc-interval flag is set to a positive value, this
1309   // variable holds the value indicating the number of allocations
1310   // remain until the next failure and garbage collection.
1311   int allocation_timeout_;
1312 
1313   // Do we expect to be able to handle allocation failure at this
1314   // time?
1315   bool disallow_allocation_failure_;
1316 
1317   HeapDebugUtils* debug_utils_;
1318 #endif  // DEBUG
1319 
1320   // Limit that triggers a global GC on the next (normally caused) GC.  This
1321   // is checked when we have already decided to do a GC to help determine
1322   // which collector to invoke.
1323   intptr_t old_gen_promotion_limit_;
1324 
1325   // Limit that triggers a global GC as soon as is reasonable.  This is
1326   // checked before expanding a paged space in the old generation and on
1327   // every allocation in large object space.
1328   intptr_t old_gen_allocation_limit_;
1329 
1330   // Limit on the amount of externally allocated memory allowed
1331   // between global GCs. If reached a global GC is forced.
1332   intptr_t external_allocation_limit_;
1333 
1334   // The amount of external memory registered through the API kept alive
1335   // by global handles
1336   int amount_of_external_allocated_memory_;
1337 
1338   // Caches the amount of external memory registered at the last global gc.
1339   int amount_of_external_allocated_memory_at_last_global_gc_;
1340 
1341   // Indicates that an allocation has failed in the old generation since the
1342   // last GC.
1343   int old_gen_exhausted_;
1344 
1345   Object* roots_[kRootListLength];
1346 
1347   Object* global_contexts_list_;
1348 
1349   struct StringTypeTable {
1350     InstanceType type;
1351     int size;
1352     RootListIndex index;
1353   };
1354 
1355   struct ConstantSymbolTable {
1356     const char* contents;
1357     RootListIndex index;
1358   };
1359 
1360   struct StructTable {
1361     InstanceType type;
1362     int size;
1363     RootListIndex index;
1364   };
1365 
1366   static const StringTypeTable string_type_table[];
1367   static const ConstantSymbolTable constant_symbol_table[];
1368   static const StructTable struct_table[];
1369 
1370   // The special hidden symbol which is an empty string, but does not match
1371   // any string when looked up in properties.
1372   String* hidden_symbol_;
1373 
1374   // GC callback function, called before and after mark-compact GC.
1375   // Allocations in the callback function are disallowed.
1376   struct GCPrologueCallbackPair {
GCPrologueCallbackPairGCPrologueCallbackPair1377     GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1378         : callback(callback), gc_type(gc_type) {
1379     }
1380     bool operator==(const GCPrologueCallbackPair& pair) const {
1381       return pair.callback == callback;
1382     }
1383     GCPrologueCallback callback;
1384     GCType gc_type;
1385   };
1386   List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1387 
1388   struct GCEpilogueCallbackPair {
GCEpilogueCallbackPairGCEpilogueCallbackPair1389     GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1390         : callback(callback), gc_type(gc_type) {
1391     }
1392     bool operator==(const GCEpilogueCallbackPair& pair) const {
1393       return pair.callback == callback;
1394     }
1395     GCEpilogueCallback callback;
1396     GCType gc_type;
1397   };
1398   List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1399 
1400   GCCallback global_gc_prologue_callback_;
1401   GCCallback global_gc_epilogue_callback_;
1402 
1403   // Support for computing object sizes during GC.
1404   HeapObjectCallback gc_safe_size_of_old_object_;
1405   static int GcSafeSizeOfOldObject(HeapObject* object);
1406   static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object);
1407 
1408   // Update the GC state. Called from the mark-compact collector.
MarkMapPointersAsEncoded(bool encoded)1409   void MarkMapPointersAsEncoded(bool encoded) {
1410     gc_safe_size_of_old_object_ = encoded
1411         ? &GcSafeSizeOfOldObjectWithEncodedMap
1412         : &GcSafeSizeOfOldObject;
1413   }
1414 
1415   // Checks whether a global GC is necessary
1416   GarbageCollector SelectGarbageCollector(AllocationSpace space);
1417 
1418   // Performs garbage collection
1419   // Returns whether there is a chance another major GC could
1420   // collect more garbage.
1421   bool PerformGarbageCollection(GarbageCollector collector,
1422                                 GCTracer* tracer);
1423 
1424   static const intptr_t kMinimumPromotionLimit = 2 * MB;
1425   static const intptr_t kMinimumAllocationLimit = 8 * MB;
1426 
1427   inline void UpdateOldSpaceLimits();
1428 
1429   // Allocate an uninitialized object in map space.  The behavior is identical
1430   // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1431   // have to test the allocation space argument and (b) can reduce code size
1432   // (since both AllocateRaw and AllocateRawMap are inlined).
1433   MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1434 
1435   // Allocate an uninitialized object in the global property cell space.
1436   MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1437 
1438   // Initializes a JSObject based on its map.
1439   void InitializeJSObjectFromMap(JSObject* obj,
1440                                  FixedArray* properties,
1441                                  Map* map);
1442 
1443   bool CreateInitialMaps();
1444   bool CreateInitialObjects();
1445 
1446   // These five Create*EntryStub functions are here and forced to not be inlined
1447   // because of a gcc-4.4 bug that assigns wrong vtable entries.
1448   NO_INLINE(void CreateJSEntryStub());
1449   NO_INLINE(void CreateJSConstructEntryStub());
1450 
1451   void CreateFixedStubs();
1452 
1453   MaybeObject* CreateOddball(const char* to_string,
1454                              Object* to_number,
1455                              byte kind);
1456 
1457   // Allocate empty fixed array.
1458   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1459 
1460   void SwitchScavengingVisitorsTableIfProfilingWasEnabled();
1461 
1462   // Performs a minor collection in new generation.
1463   void Scavenge();
1464 
1465   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1466       Heap* heap,
1467       Object** pointer);
1468 
1469   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1470 
1471   // Performs a major collection in the whole heap.
1472   void MarkCompact(GCTracer* tracer);
1473 
1474   // Code to be run before and after mark-compact.
1475   void MarkCompactPrologue(bool is_compacting);
1476 
1477   // Completely clear the Instanceof cache (to stop it keeping objects alive
1478   // around a GC).
1479   inline void CompletelyClearInstanceofCache();
1480 
1481 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1482   // Record statistics before and after garbage collection.
1483   void ReportStatisticsBeforeGC();
1484   void ReportStatisticsAfterGC();
1485 #endif
1486 
1487   // Slow part of scavenge object.
1488   static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1489 
1490   // Initializes a function with a shared part and prototype.
1491   // Returns the function.
1492   // Note: this code was factored out of AllocateFunction such that
1493   // other parts of the VM could use it. Specifically, a function that creates
1494   // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1495   // Please note this does not perform a garbage collection.
1496   MUST_USE_RESULT inline MaybeObject* InitializeFunction(
1497       JSFunction* function,
1498       SharedFunctionInfo* shared,
1499       Object* prototype);
1500 
1501   // Total RegExp code ever generated
1502   double total_regexp_code_generated_;
1503 
1504   GCTracer* tracer_;
1505 
1506 
1507   // Initializes the number to string cache based on the max semispace size.
1508   MUST_USE_RESULT MaybeObject* InitializeNumberStringCache();
1509   // Flush the number to string cache.
1510   void FlushNumberStringCache();
1511 
1512   void UpdateSurvivalRateTrend(int start_new_space_size);
1513 
1514   enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1515 
1516   static const int kYoungSurvivalRateThreshold = 90;
1517   static const int kYoungSurvivalRateAllowedDeviation = 15;
1518 
1519   int young_survivors_after_last_gc_;
1520   int high_survival_rate_period_length_;
1521   double survival_rate_;
1522   SurvivalRateTrend previous_survival_rate_trend_;
1523   SurvivalRateTrend survival_rate_trend_;
1524 
set_survival_rate_trend(SurvivalRateTrend survival_rate_trend)1525   void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1526     ASSERT(survival_rate_trend != FLUCTUATING);
1527     previous_survival_rate_trend_ = survival_rate_trend_;
1528     survival_rate_trend_ = survival_rate_trend;
1529   }
1530 
survival_rate_trend()1531   SurvivalRateTrend survival_rate_trend() {
1532     if (survival_rate_trend_ == STABLE) {
1533       return STABLE;
1534     } else if (previous_survival_rate_trend_ == STABLE) {
1535       return survival_rate_trend_;
1536     } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1537       return FLUCTUATING;
1538     } else {
1539       return survival_rate_trend_;
1540     }
1541   }
1542 
IsStableOrIncreasingSurvivalTrend()1543   bool IsStableOrIncreasingSurvivalTrend() {
1544     switch (survival_rate_trend()) {
1545       case STABLE:
1546       case INCREASING:
1547         return true;
1548       default:
1549         return false;
1550     }
1551   }
1552 
IsIncreasingSurvivalTrend()1553   bool IsIncreasingSurvivalTrend() {
1554     return survival_rate_trend() == INCREASING;
1555   }
1556 
IsHighSurvivalRate()1557   bool IsHighSurvivalRate() {
1558     return high_survival_rate_period_length_ > 0;
1559   }
1560 
1561   static const int kInitialSymbolTableSize = 2048;
1562   static const int kInitialEvalCacheSize = 64;
1563 
1564   // Maximum GC pause.
1565   int max_gc_pause_;
1566 
1567   // Maximum size of objects alive after GC.
1568   intptr_t max_alive_after_gc_;
1569 
1570   // Minimal interval between two subsequent collections.
1571   int min_in_mutator_;
1572 
1573   // Size of objects alive after last GC.
1574   intptr_t alive_after_last_gc_;
1575 
1576   double last_gc_end_timestamp_;
1577 
1578   MarkCompactCollector mark_compact_collector_;
1579 
1580   // This field contains the meaning of the WATERMARK_INVALIDATED flag.
1581   // Instead of clearing this flag from all pages we just flip
1582   // its meaning at the beginning of a scavenge.
1583   intptr_t page_watermark_invalidated_mark_;
1584 
1585   int number_idle_notifications_;
1586   unsigned int last_idle_notification_gc_count_;
1587   bool last_idle_notification_gc_count_init_;
1588 
1589   // Shared state read by the scavenge collector and set by ScavengeObject.
1590   PromotionQueue promotion_queue_;
1591 
1592   // Flag is set when the heap has been configured.  The heap can be repeatedly
1593   // configured through the API until it is setup.
1594   bool configured_;
1595 
1596   ExternalStringTable external_string_table_;
1597 
1598   bool is_safe_to_read_maps_;
1599 
1600   friend class Factory;
1601   friend class GCTracer;
1602   friend class DisallowAllocationFailure;
1603   friend class AlwaysAllocateScope;
1604   friend class LinearAllocationScope;
1605   friend class Page;
1606   friend class Isolate;
1607   friend class MarkCompactCollector;
1608   friend class MapCompact;
1609 
1610   DISALLOW_COPY_AND_ASSIGN(Heap);
1611 };
1612 
1613 
1614 class HeapStats {
1615  public:
1616   static const int kStartMarker = 0xDECADE00;
1617   static const int kEndMarker = 0xDECADE01;
1618 
1619   int* start_marker;                    //  0
1620   int* new_space_size;                  //  1
1621   int* new_space_capacity;              //  2
1622   intptr_t* old_pointer_space_size;          //  3
1623   intptr_t* old_pointer_space_capacity;      //  4
1624   intptr_t* old_data_space_size;             //  5
1625   intptr_t* old_data_space_capacity;         //  6
1626   intptr_t* code_space_size;                 //  7
1627   intptr_t* code_space_capacity;             //  8
1628   intptr_t* map_space_size;                  //  9
1629   intptr_t* map_space_capacity;              // 10
1630   intptr_t* cell_space_size;                 // 11
1631   intptr_t* cell_space_capacity;             // 12
1632   intptr_t* lo_space_size;                   // 13
1633   int* global_handle_count;             // 14
1634   int* weak_global_handle_count;        // 15
1635   int* pending_global_handle_count;     // 16
1636   int* near_death_global_handle_count;  // 17
1637   int* destroyed_global_handle_count;   // 18
1638   intptr_t* memory_allocator_size;           // 19
1639   intptr_t* memory_allocator_capacity;       // 20
1640   int* objects_per_type;                // 21
1641   int* size_per_type;                   // 22
1642   int* os_error;                        // 23
1643   int* end_marker;                      // 24
1644 };
1645 
1646 
1647 class AlwaysAllocateScope {
1648  public:
AlwaysAllocateScope()1649   AlwaysAllocateScope() {
1650     // We shouldn't hit any nested scopes, because that requires
1651     // non-handle code to call handle code. The code still works but
1652     // performance will degrade, so we want to catch this situation
1653     // in debug mode.
1654     ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1655     HEAP->always_allocate_scope_depth_++;
1656   }
1657 
~AlwaysAllocateScope()1658   ~AlwaysAllocateScope() {
1659     HEAP->always_allocate_scope_depth_--;
1660     ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1661   }
1662 };
1663 
1664 
1665 class LinearAllocationScope {
1666  public:
LinearAllocationScope()1667   LinearAllocationScope() {
1668     HEAP->linear_allocation_scope_depth_++;
1669   }
1670 
~LinearAllocationScope()1671   ~LinearAllocationScope() {
1672     HEAP->linear_allocation_scope_depth_--;
1673     ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
1674   }
1675 };
1676 
1677 
1678 #ifdef DEBUG
1679 // Visitor class to verify interior pointers in spaces that do not contain
1680 // or care about intergenerational references. All heap object pointers have to
1681 // point into the heap to a location that has a map pointer at its first word.
1682 // Caveat: Heap::Contains is an approximation because it can return true for
1683 // objects in a heap space but above the allocation pointer.
1684 class VerifyPointersVisitor: public ObjectVisitor {
1685  public:
VisitPointers(Object ** start,Object ** end)1686   void VisitPointers(Object** start, Object** end) {
1687     for (Object** current = start; current < end; current++) {
1688       if ((*current)->IsHeapObject()) {
1689         HeapObject* object = HeapObject::cast(*current);
1690         ASSERT(HEAP->Contains(object));
1691         ASSERT(object->map()->IsMap());
1692       }
1693     }
1694   }
1695 };
1696 
1697 
1698 // Visitor class to verify interior pointers in spaces that use region marks
1699 // to keep track of intergenerational references.
1700 // As VerifyPointersVisitor but also checks that dirty marks are set
1701 // for regions covering intergenerational references.
1702 class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
1703  public:
VisitPointers(Object ** start,Object ** end)1704   void VisitPointers(Object** start, Object** end) {
1705     for (Object** current = start; current < end; current++) {
1706       if ((*current)->IsHeapObject()) {
1707         HeapObject* object = HeapObject::cast(*current);
1708         ASSERT(HEAP->Contains(object));
1709         ASSERT(object->map()->IsMap());
1710         if (HEAP->InNewSpace(object)) {
1711           ASSERT(HEAP->InToSpace(object));
1712           Address addr = reinterpret_cast<Address>(current);
1713           ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
1714         }
1715       }
1716     }
1717   }
1718 };
1719 #endif
1720 
1721 
1722 // Space iterator for iterating over all spaces of the heap.
1723 // Returns each space in turn, and null when it is done.
1724 class AllSpaces BASE_EMBEDDED {
1725  public:
1726   Space* next();
AllSpaces()1727   AllSpaces() { counter_ = FIRST_SPACE; }
1728  private:
1729   int counter_;
1730 };
1731 
1732 
1733 // Space iterator for iterating over all old spaces of the heap: Old pointer
1734 // space, old data space and code space.
1735 // Returns each space in turn, and null when it is done.
1736 class OldSpaces BASE_EMBEDDED {
1737  public:
1738   OldSpace* next();
OldSpaces()1739   OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1740  private:
1741   int counter_;
1742 };
1743 
1744 
1745 // Space iterator for iterating over all the paged spaces of the heap:
1746 // Map space, old pointer space, old data space, code space and cell space.
1747 // Returns each space in turn, and null when it is done.
1748 class PagedSpaces BASE_EMBEDDED {
1749  public:
1750   PagedSpace* next();
PagedSpaces()1751   PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1752  private:
1753   int counter_;
1754 };
1755 
1756 
1757 // Space iterator for iterating over all spaces of the heap.
1758 // For each space an object iterator is provided. The deallocation of the
1759 // returned object iterators is handled by the space iterator.
1760 class SpaceIterator : public Malloced {
1761  public:
1762   SpaceIterator();
1763   explicit SpaceIterator(HeapObjectCallback size_func);
1764   virtual ~SpaceIterator();
1765 
1766   bool has_next();
1767   ObjectIterator* next();
1768 
1769  private:
1770   ObjectIterator* CreateIterator();
1771 
1772   int current_space_;  // from enum AllocationSpace.
1773   ObjectIterator* iterator_;  // object iterator for the current space.
1774   HeapObjectCallback size_func_;
1775 };
1776 
1777 
1778 // A HeapIterator provides iteration over the whole heap. It
1779 // aggregates the specific iterators for the different spaces as
1780 // these can only iterate over one space only.
1781 //
1782 // HeapIterator can skip free list nodes (that is, de-allocated heap
1783 // objects that still remain in the heap). As implementation of free
1784 // nodes filtering uses GC marks, it can't be used during MS/MC GC
1785 // phases. Also, it is forbidden to interrupt iteration in this mode,
1786 // as this will leave heap objects marked (and thus, unusable).
1787 class HeapObjectsFilter;
1788 
1789 class HeapIterator BASE_EMBEDDED {
1790  public:
1791   enum HeapObjectsFiltering {
1792     kNoFiltering,
1793     kFilterFreeListNodes,
1794     kFilterUnreachable
1795   };
1796 
1797   HeapIterator();
1798   explicit HeapIterator(HeapObjectsFiltering filtering);
1799   ~HeapIterator();
1800 
1801   HeapObject* next();
1802   void reset();
1803 
1804  private:
1805   // Perform the initialization.
1806   void Init();
1807   // Perform all necessary shutdown (destruction) work.
1808   void Shutdown();
1809   HeapObject* NextObject();
1810 
1811   HeapObjectsFiltering filtering_;
1812   HeapObjectsFilter* filter_;
1813   // Space iterator for iterating all the spaces.
1814   SpaceIterator* space_iterator_;
1815   // Object iterator for the space currently being iterated.
1816   ObjectIterator* object_iterator_;
1817 };
1818 
1819 
1820 // Cache for mapping (map, property name) into field offset.
1821 // Cleared at startup and prior to mark sweep collection.
1822 class KeyedLookupCache {
1823  public:
1824   // Lookup field offset for (map, name). If absent, -1 is returned.
1825   int Lookup(Map* map, String* name);
1826 
1827   // Update an element in the cache.
1828   void Update(Map* map, String* name, int field_offset);
1829 
1830   // Clear the cache.
1831   void Clear();
1832 
1833   static const int kLength = 64;
1834   static const int kCapacityMask = kLength - 1;
1835   static const int kMapHashShift = 2;
1836   static const int kNotFound = -1;
1837 
1838  private:
KeyedLookupCache()1839   KeyedLookupCache() {
1840     for (int i = 0; i < kLength; ++i) {
1841       keys_[i].map = NULL;
1842       keys_[i].name = NULL;
1843       field_offsets_[i] = kNotFound;
1844     }
1845   }
1846 
1847   static inline int Hash(Map* map, String* name);
1848 
1849   // Get the address of the keys and field_offsets arrays.  Used in
1850   // generated code to perform cache lookups.
keys_address()1851   Address keys_address() {
1852     return reinterpret_cast<Address>(&keys_);
1853   }
1854 
field_offsets_address()1855   Address field_offsets_address() {
1856     return reinterpret_cast<Address>(&field_offsets_);
1857   }
1858 
1859   struct Key {
1860     Map* map;
1861     String* name;
1862   };
1863 
1864   Key keys_[kLength];
1865   int field_offsets_[kLength];
1866 
1867   friend class ExternalReference;
1868   friend class Isolate;
1869   DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
1870 };
1871 
1872 
1873 // Cache for mapping (array, property name) into descriptor index.
1874 // The cache contains both positive and negative results.
1875 // Descriptor index equals kNotFound means the property is absent.
1876 // Cleared at startup and prior to any gc.
1877 class DescriptorLookupCache {
1878  public:
1879   // Lookup descriptor index for (map, name).
1880   // If absent, kAbsent is returned.
Lookup(DescriptorArray * array,String * name)1881   int Lookup(DescriptorArray* array, String* name) {
1882     if (!StringShape(name).IsSymbol()) return kAbsent;
1883     int index = Hash(array, name);
1884     Key& key = keys_[index];
1885     if ((key.array == array) && (key.name == name)) return results_[index];
1886     return kAbsent;
1887   }
1888 
1889   // Update an element in the cache.
Update(DescriptorArray * array,String * name,int result)1890   void Update(DescriptorArray* array, String* name, int result) {
1891     ASSERT(result != kAbsent);
1892     if (StringShape(name).IsSymbol()) {
1893       int index = Hash(array, name);
1894       Key& key = keys_[index];
1895       key.array = array;
1896       key.name = name;
1897       results_[index] = result;
1898     }
1899   }
1900 
1901   // Clear the cache.
1902   void Clear();
1903 
1904   static const int kAbsent = -2;
1905  private:
DescriptorLookupCache()1906   DescriptorLookupCache() {
1907     for (int i = 0; i < kLength; ++i) {
1908       keys_[i].array = NULL;
1909       keys_[i].name = NULL;
1910       results_[i] = kAbsent;
1911     }
1912   }
1913 
Hash(DescriptorArray * array,String * name)1914   static int Hash(DescriptorArray* array, String* name) {
1915     // Uses only lower 32 bits if pointers are larger.
1916     uint32_t array_hash =
1917         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1918     uint32_t name_hash =
1919         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1920     return (array_hash ^ name_hash) % kLength;
1921   }
1922 
1923   static const int kLength = 64;
1924   struct Key {
1925     DescriptorArray* array;
1926     String* name;
1927   };
1928 
1929   Key keys_[kLength];
1930   int results_[kLength];
1931 
1932   friend class Isolate;
1933   DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
1934 };
1935 
1936 
1937 // A helper class to document/test C++ scopes where we do not
1938 // expect a GC. Usage:
1939 //
1940 // /* Allocation not allowed: we cannot handle a GC in this scope. */
1941 // { AssertNoAllocation nogc;
1942 //   ...
1943 // }
1944 
1945 #ifdef DEBUG
1946 
1947 class DisallowAllocationFailure {
1948  public:
DisallowAllocationFailure()1949   DisallowAllocationFailure() {
1950     old_state_ = HEAP->disallow_allocation_failure_;
1951     HEAP->disallow_allocation_failure_ = true;
1952   }
~DisallowAllocationFailure()1953   ~DisallowAllocationFailure() {
1954     HEAP->disallow_allocation_failure_ = old_state_;
1955   }
1956  private:
1957   bool old_state_;
1958 };
1959 
1960 class AssertNoAllocation {
1961  public:
AssertNoAllocation()1962   AssertNoAllocation() {
1963     old_state_ = HEAP->allow_allocation(false);
1964   }
1965 
~AssertNoAllocation()1966   ~AssertNoAllocation() {
1967     HEAP->allow_allocation(old_state_);
1968   }
1969 
1970  private:
1971   bool old_state_;
1972 };
1973 
1974 class DisableAssertNoAllocation {
1975  public:
DisableAssertNoAllocation()1976   DisableAssertNoAllocation() {
1977     old_state_ = HEAP->allow_allocation(true);
1978   }
1979 
~DisableAssertNoAllocation()1980   ~DisableAssertNoAllocation() {
1981     HEAP->allow_allocation(old_state_);
1982   }
1983 
1984  private:
1985   bool old_state_;
1986 };
1987 
1988 #else  // ndef DEBUG
1989 
1990 class AssertNoAllocation {
1991  public:
AssertNoAllocation()1992   AssertNoAllocation() { }
~AssertNoAllocation()1993   ~AssertNoAllocation() { }
1994 };
1995 
1996 class DisableAssertNoAllocation {
1997  public:
DisableAssertNoAllocation()1998   DisableAssertNoAllocation() { }
~DisableAssertNoAllocation()1999   ~DisableAssertNoAllocation() { }
2000 };
2001 
2002 #endif
2003 
2004 // GCTracer collects and prints ONE line after each garbage collector
2005 // invocation IFF --trace_gc is used.
2006 
2007 class GCTracer BASE_EMBEDDED {
2008  public:
2009   class Scope BASE_EMBEDDED {
2010    public:
2011     enum ScopeId {
2012       EXTERNAL,
2013       MC_MARK,
2014       MC_SWEEP,
2015       MC_SWEEP_NEWSPACE,
2016       MC_COMPACT,
2017       MC_FLUSH_CODE,
2018       kNumberOfScopes
2019     };
2020 
Scope(GCTracer * tracer,ScopeId scope)2021     Scope(GCTracer* tracer, ScopeId scope)
2022         : tracer_(tracer),
2023         scope_(scope) {
2024       start_time_ = OS::TimeCurrentMillis();
2025     }
2026 
~Scope()2027     ~Scope() {
2028       ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
2029       tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2030     }
2031 
2032    private:
2033     GCTracer* tracer_;
2034     ScopeId scope_;
2035     double start_time_;
2036   };
2037 
2038   explicit GCTracer(Heap* heap);
2039   ~GCTracer();
2040 
2041   // Sets the collector.
set_collector(GarbageCollector collector)2042   void set_collector(GarbageCollector collector) { collector_ = collector; }
2043 
2044   // Sets the GC count.
set_gc_count(unsigned int count)2045   void set_gc_count(unsigned int count) { gc_count_ = count; }
2046 
2047   // Sets the full GC count.
set_full_gc_count(int count)2048   void set_full_gc_count(int count) { full_gc_count_ = count; }
2049 
2050   // Sets the flag that this is a compacting full GC.
set_is_compacting()2051   void set_is_compacting() { is_compacting_ = true; }
is_compacting()2052   bool is_compacting() const { return is_compacting_; }
2053 
2054   // Increment and decrement the count of marked objects.
increment_marked_count()2055   void increment_marked_count() { ++marked_count_; }
decrement_marked_count()2056   void decrement_marked_count() { --marked_count_; }
2057 
marked_count()2058   int marked_count() { return marked_count_; }
2059 
increment_promoted_objects_size(int object_size)2060   void increment_promoted_objects_size(int object_size) {
2061     promoted_objects_size_ += object_size;
2062   }
2063 
2064  private:
2065   // Returns a string matching the collector.
2066   const char* CollectorString();
2067 
2068   // Returns size of object in heap (in MB).
SizeOfHeapObjects()2069   double SizeOfHeapObjects() {
2070     return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
2071   }
2072 
2073   double start_time_;  // Timestamp set in the constructor.
2074   intptr_t start_size_;  // Size of objects in heap set in constructor.
2075   GarbageCollector collector_;  // Type of collector.
2076 
2077   // A count (including this one, eg, the first collection is 1) of the
2078   // number of garbage collections.
2079   unsigned int gc_count_;
2080 
2081   // A count (including this one) of the number of full garbage collections.
2082   int full_gc_count_;
2083 
2084   // True if the current GC is a compacting full collection, false
2085   // otherwise.
2086   bool is_compacting_;
2087 
2088   // True if the *previous* full GC cwas a compacting collection (will be
2089   // false if there has not been a previous full GC).
2090   bool previous_has_compacted_;
2091 
2092   // On a full GC, a count of the number of marked objects.  Incremented
2093   // when an object is marked and decremented when an object's mark bit is
2094   // cleared.  Will be zero on a scavenge collection.
2095   int marked_count_;
2096 
2097   // The count from the end of the previous full GC.  Will be zero if there
2098   // was no previous full GC.
2099   int previous_marked_count_;
2100 
2101   // Amounts of time spent in different scopes during GC.
2102   double scopes_[Scope::kNumberOfScopes];
2103 
2104   // Total amount of space either wasted or contained in one of free lists
2105   // before the current GC.
2106   intptr_t in_free_list_or_wasted_before_gc_;
2107 
2108   // Difference between space used in the heap at the beginning of the current
2109   // collection and the end of the previous collection.
2110   intptr_t allocated_since_last_gc_;
2111 
2112   // Amount of time spent in mutator that is time elapsed between end of the
2113   // previous collection and the beginning of the current one.
2114   double spent_in_mutator_;
2115 
2116   // Size of objects promoted during the current collection.
2117   intptr_t promoted_objects_size_;
2118 
2119   Heap* heap_;
2120 };
2121 
2122 
2123 class TranscendentalCache {
2124  public:
2125   enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2126   static const int kTranscendentalTypeBits = 3;
2127   STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2128 
2129   // Returns a heap number with f(input), where f is a math function specified
2130   // by the 'type' argument.
2131   MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2132 
2133   // The cache contains raw Object pointers.  This method disposes of
2134   // them before a garbage collection.
2135   void Clear();
2136 
2137  private:
2138   class SubCache {
2139     static const int kCacheSize = 512;
2140 
2141     explicit SubCache(Type t);
2142 
2143     MUST_USE_RESULT inline MaybeObject* Get(double input);
2144 
2145     inline double Calculate(double input);
2146 
2147     struct Element {
2148       uint32_t in[2];
2149       Object* output;
2150     };
2151 
2152     union Converter {
2153       double dbl;
2154       uint32_t integers[2];
2155     };
2156 
Hash(const Converter & c)2157     inline static int Hash(const Converter& c) {
2158       uint32_t hash = (c.integers[0] ^ c.integers[1]);
2159       hash ^= static_cast<int32_t>(hash) >> 16;
2160       hash ^= static_cast<int32_t>(hash) >> 8;
2161       return (hash & (kCacheSize - 1));
2162     }
2163 
2164     Element elements_[kCacheSize];
2165     Type type_;
2166     Isolate* isolate_;
2167 
2168     // Allow access to the caches_ array as an ExternalReference.
2169     friend class ExternalReference;
2170     // Inline implementation of the cache.
2171     friend class TranscendentalCacheStub;
2172     // For evaluating value.
2173     friend class TranscendentalCache;
2174 
2175     DISALLOW_COPY_AND_ASSIGN(SubCache);
2176   };
2177 
TranscendentalCache()2178   TranscendentalCache() {
2179     for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2180   }
2181 
2182   // Used to create an external reference.
2183   inline Address cache_array_address();
2184 
2185   // Instantiation
2186   friend class Isolate;
2187   // Inline implementation of the caching.
2188   friend class TranscendentalCacheStub;
2189   // Allow access to the caches_ array as an ExternalReference.
2190   friend class ExternalReference;
2191 
2192   SubCache* caches_[kNumberOfCaches];
2193   DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2194 };
2195 
2196 
2197 // Abstract base class for checking whether a weak object should be retained.
2198 class WeakObjectRetainer {
2199  public:
~WeakObjectRetainer()2200   virtual ~WeakObjectRetainer() {}
2201 
2202   // Return whether this object should be retained. If NULL is returned the
2203   // object has no references. Otherwise the address of the retained object
2204   // should be returned as in some GC situations the object has been moved.
2205   virtual Object* RetainAs(Object* object) = 0;
2206 };
2207 
2208 
2209 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2210 // Helper class for tracing paths to a search target Object from all roots.
2211 // The TracePathFrom() method can be used to trace paths from a specific
2212 // object to the search target object.
2213 class PathTracer : public ObjectVisitor {
2214  public:
2215   enum WhatToFind {
2216     FIND_ALL,   // Will find all matches.
2217     FIND_FIRST  // Will stop the search after first match.
2218   };
2219 
2220   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2221   // after the first match.  If FIND_ALL is specified, then tracing will be
2222   // done for all matches.
PathTracer(Object * search_target,WhatToFind what_to_find,VisitMode visit_mode)2223   PathTracer(Object* search_target,
2224              WhatToFind what_to_find,
2225              VisitMode visit_mode)
2226       : search_target_(search_target),
2227         found_target_(false),
2228         found_target_in_trace_(false),
2229         what_to_find_(what_to_find),
2230         visit_mode_(visit_mode),
2231         object_stack_(20),
2232         no_alloc() {}
2233 
2234   virtual void VisitPointers(Object** start, Object** end);
2235 
2236   void Reset();
2237   void TracePathFrom(Object** root);
2238 
found()2239   bool found() const { return found_target_; }
2240 
2241   static Object* const kAnyGlobalObject;
2242 
2243  protected:
2244   class MarkVisitor;
2245   class UnmarkVisitor;
2246 
2247   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2248   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2249   virtual void ProcessResults();
2250 
2251   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2252   static const int kMarkTag = 2;
2253 
2254   Object* search_target_;
2255   bool found_target_;
2256   bool found_target_in_trace_;
2257   WhatToFind what_to_find_;
2258   VisitMode visit_mode_;
2259   List<Object*> object_stack_;
2260 
2261   AssertNoAllocation no_alloc;  // i.e. no gc allowed.
2262 
2263   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2264 };
2265 #endif  // DEBUG || LIVE_OBJECT_LIST
2266 
2267 
2268 } }  // namespace v8::internal
2269 
2270 #undef HEAP
2271 
2272 #endif  // V8_HEAP_H_
2273