1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_HEAP_H_ 29 #define V8_HEAP_H_ 30 31 #include <math.h> 32 33 #include "zone-inl.h" 34 35 36 namespace v8 { 37 namespace internal { 38 39 // Defines all the roots in Heap. 40 #define UNCONDITIONAL_STRONG_ROOT_LIST(V) \ 41 /* Cluster the most popular ones in a few cache lines here at the top. */ \ 42 V(Smi, stack_limit, StackLimit) \ 43 V(Object, undefined_value, UndefinedValue) \ 44 V(Object, the_hole_value, TheHoleValue) \ 45 V(Object, null_value, NullValue) \ 46 V(Object, true_value, TrueValue) \ 47 V(Object, false_value, FalseValue) \ 48 V(Map, heap_number_map, HeapNumberMap) \ 49 V(Map, global_context_map, GlobalContextMap) \ 50 V(Map, fixed_array_map, FixedArrayMap) \ 51 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ 52 V(Map, meta_map, MetaMap) \ 53 V(Object, termination_exception, TerminationException) \ 54 V(Map, hash_table_map, HashTableMap) \ 55 V(FixedArray, empty_fixed_array, EmptyFixedArray) \ 56 V(Map, short_string_map, ShortStringMap) \ 57 V(Map, medium_string_map, MediumStringMap) \ 58 V(Map, long_string_map, LongStringMap) \ 59 V(Map, short_ascii_string_map, ShortAsciiStringMap) \ 60 V(Map, medium_ascii_string_map, MediumAsciiStringMap) \ 61 V(Map, long_ascii_string_map, LongAsciiStringMap) \ 62 V(Map, short_symbol_map, ShortSymbolMap) \ 63 V(Map, medium_symbol_map, MediumSymbolMap) \ 64 V(Map, long_symbol_map, LongSymbolMap) \ 65 V(Map, short_ascii_symbol_map, ShortAsciiSymbolMap) \ 66 V(Map, medium_ascii_symbol_map, MediumAsciiSymbolMap) \ 67 V(Map, long_ascii_symbol_map, LongAsciiSymbolMap) \ 68 V(Map, short_cons_symbol_map, ShortConsSymbolMap) \ 69 V(Map, medium_cons_symbol_map, MediumConsSymbolMap) \ 70 V(Map, long_cons_symbol_map, LongConsSymbolMap) \ 71 V(Map, short_cons_ascii_symbol_map, ShortConsAsciiSymbolMap) \ 72 V(Map, medium_cons_ascii_symbol_map, MediumConsAsciiSymbolMap) \ 73 V(Map, long_cons_ascii_symbol_map, LongConsAsciiSymbolMap) \ 74 V(Map, short_sliced_symbol_map, ShortSlicedSymbolMap) \ 75 V(Map, medium_sliced_symbol_map, MediumSlicedSymbolMap) \ 76 V(Map, long_sliced_symbol_map, LongSlicedSymbolMap) \ 77 V(Map, short_sliced_ascii_symbol_map, ShortSlicedAsciiSymbolMap) \ 78 V(Map, medium_sliced_ascii_symbol_map, MediumSlicedAsciiSymbolMap) \ 79 V(Map, long_sliced_ascii_symbol_map, LongSlicedAsciiSymbolMap) \ 80 V(Map, short_external_symbol_map, ShortExternalSymbolMap) \ 81 V(Map, medium_external_symbol_map, MediumExternalSymbolMap) \ 82 V(Map, long_external_symbol_map, LongExternalSymbolMap) \ 83 V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \ 84 V(Map, medium_external_ascii_symbol_map, MediumExternalAsciiSymbolMap) \ 85 V(Map, long_external_ascii_symbol_map, LongExternalAsciiSymbolMap) \ 86 V(Map, short_cons_string_map, ShortConsStringMap) \ 87 V(Map, medium_cons_string_map, MediumConsStringMap) \ 88 V(Map, long_cons_string_map, LongConsStringMap) \ 89 V(Map, short_cons_ascii_string_map, ShortConsAsciiStringMap) \ 90 V(Map, medium_cons_ascii_string_map, MediumConsAsciiStringMap) \ 91 V(Map, long_cons_ascii_string_map, LongConsAsciiStringMap) \ 92 V(Map, short_sliced_string_map, ShortSlicedStringMap) \ 93 V(Map, medium_sliced_string_map, MediumSlicedStringMap) \ 94 V(Map, long_sliced_string_map, LongSlicedStringMap) \ 95 V(Map, short_sliced_ascii_string_map, ShortSlicedAsciiStringMap) \ 96 V(Map, medium_sliced_ascii_string_map, MediumSlicedAsciiStringMap) \ 97 V(Map, long_sliced_ascii_string_map, LongSlicedAsciiStringMap) \ 98 V(Map, short_external_string_map, ShortExternalStringMap) \ 99 V(Map, medium_external_string_map, MediumExternalStringMap) \ 100 V(Map, long_external_string_map, LongExternalStringMap) \ 101 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \ 102 V(Map, medium_external_ascii_string_map, MediumExternalAsciiStringMap) \ 103 V(Map, long_external_ascii_string_map, LongExternalAsciiStringMap) \ 104 V(Map, undetectable_short_string_map, UndetectableShortStringMap) \ 105 V(Map, undetectable_medium_string_map, UndetectableMediumStringMap) \ 106 V(Map, undetectable_long_string_map, UndetectableLongStringMap) \ 107 V(Map, undetectable_short_ascii_string_map, UndetectableShortAsciiStringMap) \ 108 V(Map, \ 109 undetectable_medium_ascii_string_map, \ 110 UndetectableMediumAsciiStringMap) \ 111 V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \ 112 V(Map, byte_array_map, ByteArrayMap) \ 113 V(Map, pixel_array_map, PixelArrayMap) \ 114 V(Map, context_map, ContextMap) \ 115 V(Map, catch_context_map, CatchContextMap) \ 116 V(Map, code_map, CodeMap) \ 117 V(Map, oddball_map, OddballMap) \ 118 V(Map, global_property_cell_map, GlobalPropertyCellMap) \ 119 V(Map, boilerplate_function_map, BoilerplateFunctionMap) \ 120 V(Map, shared_function_info_map, SharedFunctionInfoMap) \ 121 V(Map, proxy_map, ProxyMap) \ 122 V(Map, one_pointer_filler_map, OnePointerFillerMap) \ 123 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ 124 V(Object, nan_value, NanValue) \ 125 V(Object, minus_zero_value, MinusZeroValue) \ 126 V(String, empty_string, EmptyString) \ 127 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ 128 V(Map, neander_map, NeanderMap) \ 129 V(JSObject, message_listeners, MessageListeners) \ 130 V(Proxy, prototype_accessors, PrototypeAccessors) \ 131 V(NumberDictionary, code_stubs, CodeStubs) \ 132 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \ 133 V(Code, js_entry_code, JsEntryCode) \ 134 V(Code, js_construct_entry_code, JsConstructEntryCode) \ 135 V(Code, c_entry_code, CEntryCode) \ 136 V(Code, c_entry_debug_break_code, CEntryDebugBreakCode) \ 137 V(FixedArray, number_string_cache, NumberStringCache) \ 138 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ 139 V(FixedArray, natives_source_cache, NativesSourceCache) \ 140 V(Object, last_script_id, LastScriptId) \ 141 142 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP 143 #define STRONG_ROOT_LIST(V) \ 144 UNCONDITIONAL_STRONG_ROOT_LIST(V) \ 145 V(Code, re_c_entry_code, RegExpCEntryCode) 146 #else 147 #define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V) 148 #endif 149 150 #define ROOT_LIST(V) \ 151 STRONG_ROOT_LIST(V) \ 152 V(SymbolTable, symbol_table, SymbolTable) 153 154 #define SYMBOL_LIST(V) \ 155 V(Array_symbol, "Array") \ 156 V(Object_symbol, "Object") \ 157 V(Proto_symbol, "__proto__") \ 158 V(StringImpl_symbol, "StringImpl") \ 159 V(arguments_symbol, "arguments") \ 160 V(Arguments_symbol, "Arguments") \ 161 V(arguments_shadow_symbol, ".arguments") \ 162 V(call_symbol, "call") \ 163 V(apply_symbol, "apply") \ 164 V(caller_symbol, "caller") \ 165 V(boolean_symbol, "boolean") \ 166 V(Boolean_symbol, "Boolean") \ 167 V(callee_symbol, "callee") \ 168 V(constructor_symbol, "constructor") \ 169 V(code_symbol, ".code") \ 170 V(result_symbol, ".result") \ 171 V(catch_var_symbol, ".catch-var") \ 172 V(empty_symbol, "") \ 173 V(eval_symbol, "eval") \ 174 V(function_symbol, "function") \ 175 V(length_symbol, "length") \ 176 V(name_symbol, "name") \ 177 V(number_symbol, "number") \ 178 V(Number_symbol, "Number") \ 179 V(RegExp_symbol, "RegExp") \ 180 V(object_symbol, "object") \ 181 V(prototype_symbol, "prototype") \ 182 V(string_symbol, "string") \ 183 V(String_symbol, "String") \ 184 V(Date_symbol, "Date") \ 185 V(this_symbol, "this") \ 186 V(to_string_symbol, "toString") \ 187 V(char_at_symbol, "CharAt") \ 188 V(undefined_symbol, "undefined") \ 189 V(value_of_symbol, "valueOf") \ 190 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \ 191 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \ 192 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \ 193 V(illegal_access_symbol, "illegal access") \ 194 V(out_of_memory_symbol, "out-of-memory") \ 195 V(illegal_execution_state_symbol, "illegal execution state") \ 196 V(get_symbol, "get") \ 197 V(set_symbol, "set") \ 198 V(function_class_symbol, "Function") \ 199 V(illegal_argument_symbol, "illegal argument") \ 200 V(MakeReferenceError_symbol, "MakeReferenceError") \ 201 V(MakeSyntaxError_symbol, "MakeSyntaxError") \ 202 V(MakeTypeError_symbol, "MakeTypeError") \ 203 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \ 204 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \ 205 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \ 206 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \ 207 V(illegal_return_symbol, "illegal_return") \ 208 V(illegal_break_symbol, "illegal_break") \ 209 V(illegal_continue_symbol, "illegal_continue") \ 210 V(unknown_label_symbol, "unknown_label") \ 211 V(redeclaration_symbol, "redeclaration") \ 212 V(failure_symbol, "<failure>") \ 213 V(space_symbol, " ") \ 214 V(exec_symbol, "exec") \ 215 V(zero_symbol, "0") \ 216 V(global_eval_symbol, "GlobalEval") \ 217 V(identity_hash_symbol, "v8::IdentityHash") 218 219 220 // Forward declaration of the GCTracer class. 221 class GCTracer; 222 223 224 // The all static Heap captures the interface to the global object heap. 225 // All JavaScript contexts by this process share the same object heap. 226 227 class Heap : public AllStatic { 228 public: 229 // Configure heap size before setup. Return false if the heap has been 230 // setup already. 231 static bool ConfigureHeap(int semispace_size, int old_gen_size); 232 static bool ConfigureHeapDefault(); 233 234 // Initializes the global object heap. If create_heap_objects is true, 235 // also creates the basic non-mutable objects. 236 // Returns whether it succeeded. 237 static bool Setup(bool create_heap_objects); 238 239 // Destroys all memory allocated by the heap. 240 static void TearDown(); 241 242 // Sets the stack limit in the roots_ array. Some architectures generate code 243 // that looks here, because it is faster than loading from the static jslimit_ 244 // variable. 245 static void SetStackLimit(intptr_t limit); 246 247 // Returns whether Setup has been called. 248 static bool HasBeenSetup(); 249 250 // Returns the maximum heap capacity. MaxCapacity()251 static int MaxCapacity() { 252 return young_generation_size_ + old_generation_size_; 253 } SemiSpaceSize()254 static int SemiSpaceSize() { return semispace_size_; } InitialSemiSpaceSize()255 static int InitialSemiSpaceSize() { return initial_semispace_size_; } YoungGenerationSize()256 static int YoungGenerationSize() { return young_generation_size_; } OldGenerationSize()257 static int OldGenerationSize() { return old_generation_size_; } 258 259 // Returns the capacity of the heap in bytes w/o growing. Heap grows when 260 // more spaces are needed until it reaches the limit. 261 static int Capacity(); 262 263 // Returns the available bytes in space w/o growing. 264 // Heap doesn't guarantee that it can allocate an object that requires 265 // all available bytes. Check MaxHeapObjectSize() instead. 266 static int Available(); 267 268 // Returns the maximum object size in paged space. 269 static inline int MaxObjectSizeInPagedSpace(); 270 271 // Returns of size of all objects residing in the heap. 272 static int SizeOfObjects(); 273 274 // Return the starting address and a mask for the new space. And-masking an 275 // address with the mask will result in the start address of the new space 276 // for all addresses in either semispace. NewSpaceStart()277 static Address NewSpaceStart() { return new_space_.start(); } NewSpaceMask()278 static uintptr_t NewSpaceMask() { return new_space_.mask(); } NewSpaceTop()279 static Address NewSpaceTop() { return new_space_.top(); } 280 new_space()281 static NewSpace* new_space() { return &new_space_; } old_pointer_space()282 static OldSpace* old_pointer_space() { return old_pointer_space_; } old_data_space()283 static OldSpace* old_data_space() { return old_data_space_; } code_space()284 static OldSpace* code_space() { return code_space_; } map_space()285 static MapSpace* map_space() { return map_space_; } cell_space()286 static CellSpace* cell_space() { return cell_space_; } lo_space()287 static LargeObjectSpace* lo_space() { return lo_space_; } 288 always_allocate()289 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } always_allocate_scope_depth_address()290 static Address always_allocate_scope_depth_address() { 291 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 292 } 293 NewSpaceAllocationTopAddress()294 static Address* NewSpaceAllocationTopAddress() { 295 return new_space_.allocation_top_address(); 296 } NewSpaceAllocationLimitAddress()297 static Address* NewSpaceAllocationLimitAddress() { 298 return new_space_.allocation_limit_address(); 299 } 300 301 // Uncommit unused semi space. UncommitFromSpace()302 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 303 304 #ifdef ENABLE_HEAP_PROTECTION 305 // Protect/unprotect the heap by marking all spaces read-only/writable. 306 static void Protect(); 307 static void Unprotect(); 308 #endif 309 310 // Allocates and initializes a new JavaScript object based on a 311 // constructor. 312 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 313 // failed. 314 // Please note this does not perform a garbage collection. 315 static Object* AllocateJSObject(JSFunction* constructor, 316 PretenureFlag pretenure = NOT_TENURED); 317 318 // Allocates and initializes a new global object based on a constructor. 319 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 320 // failed. 321 // Please note this does not perform a garbage collection. 322 static Object* AllocateGlobalObject(JSFunction* constructor); 323 324 // Returns a deep copy of the JavaScript object. 325 // Properties and elements are copied too. 326 // Returns failure if allocation failed. 327 static Object* CopyJSObject(JSObject* source); 328 329 // Allocates the function prototype. 330 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 331 // failed. 332 // Please note this does not perform a garbage collection. 333 static Object* AllocateFunctionPrototype(JSFunction* function); 334 335 // Reinitialize an JSGlobalProxy based on a constructor. The object 336 // must have the same size as objects allocated using the 337 // constructor. The object is reinitialized and behaves as an 338 // object that has been freshly allocated using the constructor. 339 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor, 340 JSGlobalProxy* global); 341 342 // Allocates and initializes a new JavaScript object based on a map. 343 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 344 // failed. 345 // Please note this does not perform a garbage collection. 346 static Object* AllocateJSObjectFromMap(Map* map, 347 PretenureFlag pretenure = NOT_TENURED); 348 349 // Allocates a heap object based on the map. 350 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 351 // failed. 352 // Please note this function does not perform a garbage collection. 353 static Object* Allocate(Map* map, AllocationSpace space); 354 355 // Allocates a JS Map in the heap. 356 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 357 // failed. 358 // Please note this function does not perform a garbage collection. 359 static Object* AllocateMap(InstanceType instance_type, int instance_size); 360 361 // Allocates a partial map for bootstrapping. 362 static Object* AllocatePartialMap(InstanceType instance_type, 363 int instance_size); 364 365 // Allocate a map for the specified function 366 static Object* AllocateInitialMap(JSFunction* fun); 367 368 // Allocates and fully initializes a String. There are two String 369 // encodings: ASCII and two byte. One should choose between the three string 370 // allocation functions based on the encoding of the string buffer used to 371 // initialized the string. 372 // - ...FromAscii initializes the string from a buffer that is ASCII 373 // encoded (it does not check that the buffer is ASCII encoded) and the 374 // result will be ASCII encoded. 375 // - ...FromUTF8 initializes the string from a buffer that is UTF-8 376 // encoded. If the characters are all single-byte characters, the 377 // result will be ASCII encoded, otherwise it will converted to two 378 // byte. 379 // - ...FromTwoByte initializes the string from a buffer that is two-byte 380 // encoded. If the characters are all single-byte characters, the 381 // result will be converted to ASCII, otherwise it will be left as 382 // two-byte. 383 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 384 // failed. 385 // Please note this does not perform a garbage collection. 386 static Object* AllocateStringFromAscii( 387 Vector<const char> str, 388 PretenureFlag pretenure = NOT_TENURED); 389 static Object* AllocateStringFromUtf8( 390 Vector<const char> str, 391 PretenureFlag pretenure = NOT_TENURED); 392 static Object* AllocateStringFromTwoByte( 393 Vector<const uc16> str, 394 PretenureFlag pretenure = NOT_TENURED); 395 396 // Allocates a symbol in old space based on the character stream. 397 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 398 // failed. 399 // Please note this function does not perform a garbage collection. 400 static inline Object* AllocateSymbol(Vector<const char> str, 401 int chars, 402 uint32_t length_field); 403 404 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer, 405 int chars, 406 uint32_t length_field); 407 408 static Object* AllocateExternalSymbol(Vector<const char> str, 409 int chars); 410 411 412 // Allocates and partially initializes a String. There are two String 413 // encodings: ASCII and two byte. These functions allocate a string of the 414 // given length and set its map and length fields. The characters of the 415 // string are uninitialized. 416 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 417 // failed. 418 // Please note this does not perform a garbage collection. 419 static Object* AllocateRawAsciiString( 420 int length, 421 PretenureFlag pretenure = NOT_TENURED); 422 static Object* AllocateRawTwoByteString( 423 int length, 424 PretenureFlag pretenure = NOT_TENURED); 425 426 // Computes a single character string where the character has code. 427 // A cache is used for ascii codes. 428 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 429 // failed. Please note this does not perform a garbage collection. 430 static Object* LookupSingleCharacterStringFromCode(uint16_t code); 431 432 // Allocate a byte array of the specified length 433 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 434 // failed. 435 // Please note this does not perform a garbage collection. 436 static Object* AllocateByteArray(int length, PretenureFlag pretenure); 437 438 // Allocate a non-tenured byte array of the specified length 439 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 440 // failed. 441 // Please note this does not perform a garbage collection. 442 static Object* AllocateByteArray(int length); 443 444 // Allocate a pixel array of the specified length 445 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 446 // failed. 447 // Please note this does not perform a garbage collection. 448 static Object* AllocatePixelArray(int length, 449 uint8_t* external_pointer, 450 PretenureFlag pretenure); 451 452 // Allocate a tenured JS global property cell. 453 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 454 // failed. 455 // Please note this does not perform a garbage collection. 456 static Object* AllocateJSGlobalPropertyCell(Object* value); 457 458 // Allocates a fixed array initialized with undefined values 459 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 460 // failed. 461 // Please note this does not perform a garbage collection. 462 static Object* AllocateFixedArray(int length, PretenureFlag pretenure); 463 // Allocate uninitialized, non-tenured fixed array with length elements. 464 static Object* AllocateFixedArray(int length); 465 466 // Make a copy of src and return it. Returns 467 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 468 static Object* CopyFixedArray(FixedArray* src); 469 470 // Allocates a fixed array initialized with the hole values. 471 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 472 // failed. 473 // Please note this does not perform a garbage collection. 474 static Object* AllocateFixedArrayWithHoles(int length); 475 476 // AllocateHashTable is identical to AllocateFixedArray except 477 // that the resulting object has hash_table_map as map. 478 static Object* AllocateHashTable(int length); 479 480 // Allocate a global (but otherwise uninitialized) context. 481 static Object* AllocateGlobalContext(); 482 483 // Allocate a function context. 484 static Object* AllocateFunctionContext(int length, JSFunction* closure); 485 486 // Allocate a 'with' context. 487 static Object* AllocateWithContext(Context* previous, 488 JSObject* extension, 489 bool is_catch_context); 490 491 // Allocates a new utility object in the old generation. 492 static Object* AllocateStruct(InstanceType type); 493 494 // Allocates a function initialized with a shared part. 495 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 496 // failed. 497 // Please note this does not perform a garbage collection. 498 static Object* AllocateFunction(Map* function_map, 499 SharedFunctionInfo* shared, 500 Object* prototype); 501 502 // Indicies for direct access into argument objects. 503 static const int arguments_callee_index = 0; 504 static const int arguments_length_index = 1; 505 506 // Allocates an arguments object - optionally with an elements array. 507 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 508 // failed. 509 // Please note this does not perform a garbage collection. 510 static Object* AllocateArgumentsObject(Object* callee, int length); 511 512 // Converts a double into either a Smi or a HeapNumber object. 513 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 514 // failed. 515 // Please note this does not perform a garbage collection. 516 static Object* NewNumberFromDouble(double value, 517 PretenureFlag pretenure = NOT_TENURED); 518 519 // Same as NewNumberFromDouble, but may return a preallocated/immutable 520 // number object (e.g., minus_zero_value_, nan_value_) 521 static Object* NumberFromDouble(double value, 522 PretenureFlag pretenure = NOT_TENURED); 523 524 // Allocated a HeapNumber from value. 525 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure); 526 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED 527 528 // Converts an int into either a Smi or a HeapNumber object. 529 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 530 // failed. 531 // Please note this does not perform a garbage collection. 532 static inline Object* NumberFromInt32(int32_t value); 533 534 // Converts an int into either a Smi or a HeapNumber object. 535 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 536 // failed. 537 // Please note this does not perform a garbage collection. 538 static inline Object* NumberFromUint32(uint32_t value); 539 540 // Allocates a new proxy object. 541 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 542 // failed. 543 // Please note this does not perform a garbage collection. 544 static Object* AllocateProxy(Address proxy, 545 PretenureFlag pretenure = NOT_TENURED); 546 547 // Allocates a new SharedFunctionInfo object. 548 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 549 // failed. 550 // Please note this does not perform a garbage collection. 551 static Object* AllocateSharedFunctionInfo(Object* name); 552 553 // Allocates a new cons string object. 554 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 555 // failed. 556 // Please note this does not perform a garbage collection. 557 static Object* AllocateConsString(String* first, String* second); 558 559 // Allocates a new sliced string object which is a slice of an underlying 560 // string buffer stretching from the index start (inclusive) to the index 561 // end (exclusive). 562 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 563 // failed. 564 // Please note this does not perform a garbage collection. 565 static Object* AllocateSlicedString(String* buffer, 566 int start, 567 int end); 568 569 // Allocates a new sub string object which is a substring of an underlying 570 // string buffer stretching from the index start (inclusive) to the index 571 // end (exclusive). 572 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 573 // failed. 574 // Please note this does not perform a garbage collection. 575 static Object* AllocateSubString(String* buffer, 576 int start, 577 int end); 578 579 // Allocate a new external string object, which is backed by a string 580 // resource that resides outside the V8 heap. 581 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 582 // failed. 583 // Please note this does not perform a garbage collection. 584 static Object* AllocateExternalStringFromAscii( 585 ExternalAsciiString::Resource* resource); 586 static Object* AllocateExternalStringFromTwoByte( 587 ExternalTwoByteString::Resource* resource); 588 589 // Allocates an uninitialized object. The memory is non-executable if the 590 // hardware and OS allow. 591 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 592 // failed. 593 // Please note this function does not perform a garbage collection. 594 static inline Object* AllocateRaw(int size_in_bytes, 595 AllocationSpace space, 596 AllocationSpace retry_space); 597 598 // Initialize a filler object to keep the ability to iterate over the heap 599 // when shortening objects. 600 static void CreateFillerObjectAt(Address addr, int size); 601 602 // Makes a new native code object 603 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 604 // failed. On success, the pointer to the Code object is stored in the 605 // self_reference. This allows generated code to reference its own Code 606 // object by containing this pointer. 607 // Please note this function does not perform a garbage collection. 608 static Object* CreateCode(const CodeDesc& desc, 609 ZoneScopeInfo* sinfo, 610 Code::Flags flags, 611 Handle<Object> self_reference); 612 613 static Object* CopyCode(Code* code); 614 // Finds the symbol for string in the symbol table. 615 // If not found, a new symbol is added to the table and returned. 616 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation 617 // failed. 618 // Please note this function does not perform a garbage collection. 619 static Object* LookupSymbol(Vector<const char> str); LookupAsciiSymbol(const char * str)620 static Object* LookupAsciiSymbol(const char* str) { 621 return LookupSymbol(CStrVector(str)); 622 } 623 static Object* LookupSymbol(String* str); 624 static bool LookupSymbolIfExists(String* str, String** symbol); 625 626 // Compute the matching symbol map for a string if possible. 627 // NULL is returned if string is in new space or not flattened. 628 static Map* SymbolMapForString(String* str); 629 630 // Converts the given boolean condition to JavaScript boolean value. ToBoolean(bool condition)631 static Object* ToBoolean(bool condition) { 632 return condition ? true_value() : false_value(); 633 } 634 635 // Code that should be run before and after each GC. Includes some 636 // reporting/verification activities when compiled with DEBUG set. 637 static void GarbageCollectionPrologue(); 638 static void GarbageCollectionEpilogue(); 639 640 // Code that should be executed after the garbage collection proper. 641 static void PostGarbageCollectionProcessing(); 642 643 // Performs garbage collection operation. 644 // Returns whether required_space bytes are available after the collection. 645 static bool CollectGarbage(int required_space, AllocationSpace space); 646 647 // Performs a full garbage collection. Force compaction if the 648 // parameter is true. 649 static void CollectAllGarbage(bool force_compaction); 650 651 // Performs a full garbage collection if a context has been disposed 652 // since the last time the check was performed. 653 static void CollectAllGarbageIfContextDisposed(); 654 655 // Notify the heap that a context has been disposed. 656 static void NotifyContextDisposed(); 657 658 // Utility to invoke the scavenger. This is needed in test code to 659 // ensure correct callback for weak global handles. 660 static void PerformScavenge(); 661 662 #ifdef DEBUG 663 // Utility used with flag gc-greedy. 664 static bool GarbageCollectionGreedyCheck(); 665 #endif 666 SetGlobalGCPrologueCallback(GCCallback callback)667 static void SetGlobalGCPrologueCallback(GCCallback callback) { 668 global_gc_prologue_callback_ = callback; 669 } SetGlobalGCEpilogueCallback(GCCallback callback)670 static void SetGlobalGCEpilogueCallback(GCCallback callback) { 671 global_gc_epilogue_callback_ = callback; 672 } 673 674 // Heap root getters. We have versions with and without type::cast() here. 675 // You can't use type::cast during GC because the assert fails. 676 #define ROOT_ACCESSOR(type, name, camel_name) \ 677 static inline type* name() { \ 678 return type::cast(roots_[k##camel_name##RootIndex]); \ 679 } \ 680 static inline type* raw_unchecked_##name() { \ 681 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ 682 } 683 ROOT_LIST(ROOT_ACCESSOR) 684 #undef ROOT_ACCESSOR 685 686 // Utility type maps 687 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ 688 static inline Map* name##_map() { \ 689 return Map::cast(roots_[k##Name##MapRootIndex]); \ 690 } STRUCT_LIST(STRUCT_MAP_ACCESSOR)691 STRUCT_LIST(STRUCT_MAP_ACCESSOR) 692 #undef STRUCT_MAP_ACCESSOR 693 694 #define SYMBOL_ACCESSOR(name, str) static inline String* name() { \ 695 return String::cast(roots_[k##name##RootIndex]); \ 696 } 697 SYMBOL_LIST(SYMBOL_ACCESSOR) 698 #undef SYMBOL_ACCESSOR 699 700 // The hidden_symbol is special because it is the empty string, but does 701 // not match the empty string. 702 static String* hidden_symbol() { return hidden_symbol_; } 703 704 // Iterates over all roots in the heap. 705 static void IterateRoots(ObjectVisitor* v); 706 // Iterates over all strong roots in the heap. 707 static void IterateStrongRoots(ObjectVisitor* v); 708 709 // Iterates remembered set of an old space. 710 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback); 711 712 // Iterates a range of remembered set addresses starting with rset_start 713 // corresponding to the range of allocated pointers 714 // [object_start, object_end). 715 // Returns the number of bits that were set. 716 static int IterateRSetRange(Address object_start, 717 Address object_end, 718 Address rset_start, 719 ObjectSlotCallback copy_object_func); 720 721 // Returns whether the object resides in new space. 722 static inline bool InNewSpace(Object* object); 723 static inline bool InFromSpace(Object* object); 724 static inline bool InToSpace(Object* object); 725 726 // Checks whether an address/object in the heap (including auxiliary 727 // area and unused area). 728 static bool Contains(Address addr); 729 static bool Contains(HeapObject* value); 730 731 // Checks whether an address/object in a space. 732 // Currently used by tests and heap verification only. 733 static bool InSpace(Address addr, AllocationSpace space); 734 static bool InSpace(HeapObject* value, AllocationSpace space); 735 736 // Finds out which space an object should get promoted to based on its type. 737 static inline OldSpace* TargetSpace(HeapObject* object); 738 static inline AllocationSpace TargetSpaceId(InstanceType type); 739 740 // Sets the stub_cache_ (only used when expanding the dictionary). public_set_code_stubs(NumberDictionary * value)741 static void public_set_code_stubs(NumberDictionary* value) { 742 roots_[kCodeStubsRootIndex] = value; 743 } 744 745 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). public_set_non_monomorphic_cache(NumberDictionary * value)746 static void public_set_non_monomorphic_cache(NumberDictionary* value) { 747 roots_[kNonMonomorphicCacheRootIndex] = value; 748 } 749 750 // Update the next script id. 751 static inline void SetLastScriptId(Object* last_script_id); 752 753 // Generated code can embed this address to get access to the roots. roots_address()754 static Object** roots_address() { return roots_; } 755 756 #ifdef DEBUG 757 static void Print(); 758 static void PrintHandles(); 759 760 // Verify the heap is in its normal state before or after a GC. 761 static void Verify(); 762 763 // Report heap statistics. 764 static void ReportHeapStatistics(const char* title); 765 static void ReportCodeStatistics(const char* title); 766 767 // Fill in bogus values in from space 768 static void ZapFromSpace(); 769 #endif 770 771 #if defined(ENABLE_LOGGING_AND_PROFILING) 772 // Print short heap statistics. 773 static void PrintShortHeapStatistics(); 774 #endif 775 776 // Makes a new symbol object 777 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 778 // failed. 779 // Please note this function does not perform a garbage collection. 780 static Object* CreateSymbol(const char* str, int length, int hash); 781 static Object* CreateSymbol(String* str); 782 783 // Write barrier support for address[offset] = o. 784 static inline void RecordWrite(Address address, int offset); 785 786 // Given an address occupied by a live code object, return that object. 787 static Object* FindCodeObject(Address a); 788 789 // Invoke Shrink on shrinkable spaces. 790 static void Shrink(); 791 792 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; gc_state()793 static inline HeapState gc_state() { return gc_state_; } 794 795 #ifdef DEBUG IsAllocationAllowed()796 static bool IsAllocationAllowed() { return allocation_allowed_; } 797 static inline bool allow_allocation(bool enable); 798 disallow_allocation_failure()799 static bool disallow_allocation_failure() { 800 return disallow_allocation_failure_; 801 } 802 803 static void TracePathToObject(); 804 static void TracePathToGlobal(); 805 #endif 806 807 // Callback function passed to Heap::Iterate etc. Copies an object if 808 // necessary, the object might be promoted to an old space. The caller must 809 // ensure the precondition that the object is (a) a heap object and (b) in 810 // the heap's from space. 811 static void ScavengePointer(HeapObject** p); 812 static inline void ScavengeObject(HeapObject** p, HeapObject* object); 813 814 // Clear a range of remembered set addresses corresponding to the object 815 // area address 'start' with size 'size_in_bytes', eg, when adding blocks 816 // to the free list. 817 static void ClearRSetRange(Address start, int size_in_bytes); 818 819 // Rebuild remembered set in old and map spaces. 820 static void RebuildRSets(); 821 822 // Commits from space if it is uncommitted. 823 static void EnsureFromSpaceIsCommitted(); 824 825 // 826 // Support for the API. 827 // 828 829 static bool CreateApiObjects(); 830 831 // Attempt to find the number in a small cache. If we finds it, return 832 // the string representation of the number. Otherwise return undefined. 833 static Object* GetNumberStringCache(Object* number); 834 835 // Update the cache with a new number-string pair. 836 static void SetNumberStringCache(Object* number, String* str); 837 838 // Entries in the cache. Must be a power of 2. 839 static const int kNumberStringCacheSize = 64; 840 841 // Adjusts the amount of registered external memory. 842 // Returns the adjusted value. 843 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); 844 845 // Allocate unitialized fixed array (pretenure == NON_TENURE). 846 static Object* AllocateRawFixedArray(int length); 847 848 // True if we have reached the allocation limit in the old generation that 849 // should force the next GC (caused normally) to be a full one. OldGenerationPromotionLimitReached()850 static bool OldGenerationPromotionLimitReached() { 851 return (PromotedSpaceSize() + PromotedExternalMemorySize()) 852 > old_gen_promotion_limit_; 853 } 854 855 // True if we have reached the allocation limit in the old generation that 856 // should artificially cause a GC right now. OldGenerationAllocationLimitReached()857 static bool OldGenerationAllocationLimitReached() { 858 return (PromotedSpaceSize() + PromotedExternalMemorySize()) 859 > old_gen_allocation_limit_; 860 } 861 862 // Can be called when the embedding application is idle. 863 static bool IdleNotification(); 864 865 // Declare all the root indices. 866 enum RootListIndex { 867 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 868 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 869 #undef ROOT_INDEX_DECLARATION 870 871 // Utility type maps 872 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 873 STRUCT_LIST(DECLARE_STRUCT_MAP) 874 #undef DECLARE_STRUCT_MAP 875 876 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, 877 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) 878 #undef SYMBOL_DECLARATION 879 880 kSymbolTableRootIndex, 881 kStrongRootListLength = kSymbolTableRootIndex, 882 kRootListLength 883 }; 884 885 private: 886 static int semispace_size_; 887 static int initial_semispace_size_; 888 static int young_generation_size_; 889 static int old_generation_size_; 890 891 // For keeping track of how much data has survived 892 // scavenge since last new space expansion. 893 static int survived_since_last_expansion_; 894 895 static int always_allocate_scope_depth_; 896 static bool context_disposed_pending_; 897 898 static const int kMaxMapSpaceSize = 8*MB; 899 900 #if defined(V8_TARGET_ARCH_X64) 901 static const int kMaxObjectSizeInNewSpace = 512*KB; 902 #else 903 static const int kMaxObjectSizeInNewSpace = 256*KB; 904 #endif 905 906 static NewSpace new_space_; 907 static OldSpace* old_pointer_space_; 908 static OldSpace* old_data_space_; 909 static OldSpace* code_space_; 910 static MapSpace* map_space_; 911 static CellSpace* cell_space_; 912 static LargeObjectSpace* lo_space_; 913 static HeapState gc_state_; 914 915 // Returns the size of object residing in non new spaces. 916 static int PromotedSpaceSize(); 917 918 // Returns the amount of external memory registered since last global gc. 919 static int PromotedExternalMemorySize(); 920 921 static int mc_count_; // how many mark-compact collections happened 922 static int gc_count_; // how many gc happened 923 924 #define ROOT_ACCESSOR(type, name, camel_name) \ 925 static inline void set_##name(type* value) { \ 926 roots_[k##camel_name##RootIndex] = value; \ 927 } 928 ROOT_LIST(ROOT_ACCESSOR) 929 #undef ROOT_ACCESSOR 930 931 #ifdef DEBUG 932 static bool allocation_allowed_; 933 934 // If the --gc-interval flag is set to a positive value, this 935 // variable holds the value indicating the number of allocations 936 // remain until the next failure and garbage collection. 937 static int allocation_timeout_; 938 939 // Do we expect to be able to handle allocation failure at this 940 // time? 941 static bool disallow_allocation_failure_; 942 #endif // DEBUG 943 944 // Limit that triggers a global GC on the next (normally caused) GC. This 945 // is checked when we have already decided to do a GC to help determine 946 // which collector to invoke. 947 static int old_gen_promotion_limit_; 948 949 // Limit that triggers a global GC as soon as is reasonable. This is 950 // checked before expanding a paged space in the old generation and on 951 // every allocation in large object space. 952 static int old_gen_allocation_limit_; 953 954 // Limit on the amount of externally allocated memory allowed 955 // between global GCs. If reached a global GC is forced. 956 static int external_allocation_limit_; 957 958 // The amount of external memory registered through the API kept alive 959 // by global handles 960 static int amount_of_external_allocated_memory_; 961 962 // Caches the amount of external memory registered at the last global gc. 963 static int amount_of_external_allocated_memory_at_last_global_gc_; 964 965 // Indicates that an allocation has failed in the old generation since the 966 // last GC. 967 static int old_gen_exhausted_; 968 969 static Object* roots_[kRootListLength]; 970 971 struct StringTypeTable { 972 InstanceType type; 973 int size; 974 RootListIndex index; 975 }; 976 977 struct ConstantSymbolTable { 978 const char* contents; 979 RootListIndex index; 980 }; 981 982 struct StructTable { 983 InstanceType type; 984 int size; 985 RootListIndex index; 986 }; 987 988 static const StringTypeTable string_type_table[]; 989 static const ConstantSymbolTable constant_symbol_table[]; 990 static const StructTable struct_table[]; 991 992 // The special hidden symbol which is an empty string, but does not match 993 // any string when looked up in properties. 994 static String* hidden_symbol_; 995 996 // GC callback function, called before and after mark-compact GC. 997 // Allocations in the callback function are disallowed. 998 static GCCallback global_gc_prologue_callback_; 999 static GCCallback global_gc_epilogue_callback_; 1000 1001 // Checks whether a global GC is necessary 1002 static GarbageCollector SelectGarbageCollector(AllocationSpace space); 1003 1004 // Performs garbage collection 1005 static void PerformGarbageCollection(AllocationSpace space, 1006 GarbageCollector collector, 1007 GCTracer* tracer); 1008 1009 // Returns either a Smi or a Number object from 'value'. If 'new_object' 1010 // is false, it may return a preallocated immutable object. 1011 static Object* SmiOrNumberFromDouble(double value, 1012 bool new_object, 1013 PretenureFlag pretenure = NOT_TENURED); 1014 1015 // Allocate an uninitialized object in map space. The behavior is identical 1016 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't 1017 // have to test the allocation space argument and (b) can reduce code size 1018 // (since both AllocateRaw and AllocateRawMap are inlined). 1019 static inline Object* AllocateRawMap(); 1020 1021 // Allocate an uninitialized object in the global property cell space. 1022 static inline Object* AllocateRawCell(); 1023 1024 // Initializes a JSObject based on its map. 1025 static void InitializeJSObjectFromMap(JSObject* obj, 1026 FixedArray* properties, 1027 Map* map); 1028 1029 static bool CreateInitialMaps(); 1030 static bool CreateInitialObjects(); 1031 1032 // These four Create*EntryStub functions are here because of a gcc-4.4 bug 1033 // that assigns wrong vtable entries. 1034 static void CreateCEntryStub(); 1035 static void CreateCEntryDebugBreakStub(); 1036 static void CreateJSEntryStub(); 1037 static void CreateJSConstructEntryStub(); 1038 static void CreateRegExpCEntryStub(); 1039 1040 static void CreateFixedStubs(); 1041 1042 static Object* CreateOddball(Map* map, 1043 const char* to_string, 1044 Object* to_number); 1045 1046 // Allocate empty fixed array. 1047 static Object* AllocateEmptyFixedArray(); 1048 1049 // Performs a minor collection in new generation. 1050 static void Scavenge(); 1051 1052 // Performs a major collection in the whole heap. 1053 static void MarkCompact(GCTracer* tracer); 1054 1055 // Code to be run before and after mark-compact. 1056 static void MarkCompactPrologue(bool is_compacting); 1057 static void MarkCompactEpilogue(bool is_compacting); 1058 1059 // Helper function used by CopyObject to copy a source object to an 1060 // allocated target object and update the forwarding pointer in the source 1061 // object. Returns the target object. 1062 static HeapObject* MigrateObject(HeapObject* source, 1063 HeapObject* target, 1064 int size); 1065 1066 // Helper function that governs the promotion policy from new space to 1067 // old. If the object's old address lies below the new space's age 1068 // mark or if we've already filled the bottom 1/16th of the to space, 1069 // we try to promote this object. 1070 static inline bool ShouldBePromoted(Address old_address, int object_size); 1071 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1072 // Record the copy of an object in the NewSpace's statistics. 1073 static void RecordCopiedObject(HeapObject* obj); 1074 1075 // Record statistics before and after garbage collection. 1076 static void ReportStatisticsBeforeGC(); 1077 static void ReportStatisticsAfterGC(); 1078 #endif 1079 1080 // Update an old object's remembered set 1081 static int UpdateRSet(HeapObject* obj); 1082 1083 // Rebuild remembered set in an old space. 1084 static void RebuildRSets(PagedSpace* space); 1085 1086 // Rebuild remembered set in the large object space. 1087 static void RebuildRSets(LargeObjectSpace* space); 1088 1089 // Slow part of scavenge object. 1090 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); 1091 1092 // Copy memory from src to dst. 1093 static inline void CopyBlock(Object** dst, Object** src, int byte_size); 1094 1095 // Initializes a function with a shared part and prototype. 1096 // Returns the function. 1097 // Note: this code was factored out of AllocateFunction such that 1098 // other parts of the VM could use it. Specifically, a function that creates 1099 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. 1100 // Please note this does not perform a garbage collection. 1101 static inline Object* InitializeFunction(JSFunction* function, 1102 SharedFunctionInfo* shared, 1103 Object* prototype); 1104 1105 static const int kInitialSymbolTableSize = 2048; 1106 static const int kInitialEvalCacheSize = 64; 1107 1108 friend class Factory; 1109 friend class DisallowAllocationFailure; 1110 friend class AlwaysAllocateScope; 1111 }; 1112 1113 1114 class AlwaysAllocateScope { 1115 public: AlwaysAllocateScope()1116 AlwaysAllocateScope() { 1117 // We shouldn't hit any nested scopes, because that requires 1118 // non-handle code to call handle code. The code still works but 1119 // performance will degrade, so we want to catch this situation 1120 // in debug mode. 1121 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1122 Heap::always_allocate_scope_depth_++; 1123 } 1124 ~AlwaysAllocateScope()1125 ~AlwaysAllocateScope() { 1126 Heap::always_allocate_scope_depth_--; 1127 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1128 } 1129 }; 1130 1131 1132 #ifdef DEBUG 1133 // Visitor class to verify interior pointers that do not have remembered set 1134 // bits. All heap object pointers have to point into the heap to a location 1135 // that has a map pointer at its first word. Caveat: Heap::Contains is an 1136 // approximation because it can return true for objects in a heap space but 1137 // above the allocation pointer. 1138 class VerifyPointersVisitor: public ObjectVisitor { 1139 public: VisitPointers(Object ** start,Object ** end)1140 void VisitPointers(Object** start, Object** end) { 1141 for (Object** current = start; current < end; current++) { 1142 if ((*current)->IsHeapObject()) { 1143 HeapObject* object = HeapObject::cast(*current); 1144 ASSERT(Heap::Contains(object)); 1145 ASSERT(object->map()->IsMap()); 1146 } 1147 } 1148 } 1149 }; 1150 1151 1152 // Visitor class to verify interior pointers that have remembered set bits. 1153 // As VerifyPointersVisitor but also checks that remembered set bits are 1154 // always set for pointers into new space. 1155 class VerifyPointersAndRSetVisitor: public ObjectVisitor { 1156 public: VisitPointers(Object ** start,Object ** end)1157 void VisitPointers(Object** start, Object** end) { 1158 for (Object** current = start; current < end; current++) { 1159 if ((*current)->IsHeapObject()) { 1160 HeapObject* object = HeapObject::cast(*current); 1161 ASSERT(Heap::Contains(object)); 1162 ASSERT(object->map()->IsMap()); 1163 if (Heap::InNewSpace(object)) { 1164 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0)); 1165 } 1166 } 1167 } 1168 } 1169 }; 1170 #endif 1171 1172 1173 // Space iterator for iterating over all spaces of the heap. 1174 // Returns each space in turn, and null when it is done. 1175 class AllSpaces BASE_EMBEDDED { 1176 public: 1177 Space* next(); AllSpaces()1178 AllSpaces() { counter_ = FIRST_SPACE; } 1179 private: 1180 int counter_; 1181 }; 1182 1183 1184 // Space iterator for iterating over all old spaces of the heap: Old pointer 1185 // space, old data space and code space. 1186 // Returns each space in turn, and null when it is done. 1187 class OldSpaces BASE_EMBEDDED { 1188 public: 1189 OldSpace* next(); OldSpaces()1190 OldSpaces() { counter_ = OLD_POINTER_SPACE; } 1191 private: 1192 int counter_; 1193 }; 1194 1195 1196 // Space iterator for iterating over all the paged spaces of the heap: 1197 // Map space, old pointer space, old data space and code space. 1198 // Returns each space in turn, and null when it is done. 1199 class PagedSpaces BASE_EMBEDDED { 1200 public: 1201 PagedSpace* next(); PagedSpaces()1202 PagedSpaces() { counter_ = OLD_POINTER_SPACE; } 1203 private: 1204 int counter_; 1205 }; 1206 1207 1208 // Space iterator for iterating over all spaces of the heap. 1209 // For each space an object iterator is provided. The deallocation of the 1210 // returned object iterators is handled by the space iterator. 1211 class SpaceIterator : public Malloced { 1212 public: 1213 SpaceIterator(); 1214 virtual ~SpaceIterator(); 1215 1216 bool has_next(); 1217 ObjectIterator* next(); 1218 1219 private: 1220 ObjectIterator* CreateIterator(); 1221 1222 int current_space_; // from enum AllocationSpace. 1223 ObjectIterator* iterator_; // object iterator for the current space. 1224 }; 1225 1226 1227 // A HeapIterator provides iteration over the whole heap It aggregates a the 1228 // specific iterators for the different spaces as these can only iterate over 1229 // one space only. 1230 1231 class HeapIterator BASE_EMBEDDED { 1232 public: 1233 explicit HeapIterator(); 1234 virtual ~HeapIterator(); 1235 1236 bool has_next(); 1237 HeapObject* next(); 1238 void reset(); 1239 1240 private: 1241 // Perform the initialization. 1242 void Init(); 1243 1244 // Perform all necessary shutdown (destruction) work. 1245 void Shutdown(); 1246 1247 // Space iterator for iterating all the spaces. 1248 SpaceIterator* space_iterator_; 1249 // Object iterator for the space currently being iterated. 1250 ObjectIterator* object_iterator_; 1251 }; 1252 1253 1254 // Cache for mapping (map, property name) into field offset. 1255 // Cleared at startup and prior to mark sweep collection. 1256 class KeyedLookupCache { 1257 public: 1258 // Lookup field offset for (map, name). If absent, -1 is returned. 1259 static int Lookup(Map* map, String* name); 1260 1261 // Update an element in the cache. 1262 static void Update(Map* map, String* name, int field_offset); 1263 1264 // Clear the cache. 1265 static void Clear(); 1266 private: 1267 static inline int Hash(Map* map, String* name); 1268 static const int kLength = 64; 1269 struct Key { 1270 Map* map; 1271 String* name; 1272 }; 1273 static Key keys_[kLength]; 1274 static int field_offsets_[kLength]; 1275 }; 1276 1277 1278 1279 // Cache for mapping (array, property name) into descriptor index. 1280 // The cache contains both positive and negative results. 1281 // Descriptor index equals kNotFound means the property is absent. 1282 // Cleared at startup and prior to any gc. 1283 class DescriptorLookupCache { 1284 public: 1285 // Lookup descriptor index for (map, name). 1286 // If absent, kAbsent is returned. Lookup(DescriptorArray * array,String * name)1287 static int Lookup(DescriptorArray* array, String* name) { 1288 if (!StringShape(name).IsSymbol()) return kAbsent; 1289 int index = Hash(array, name); 1290 Key& key = keys_[index]; 1291 if ((key.array == array) && (key.name == name)) return results_[index]; 1292 return kAbsent; 1293 } 1294 1295 // Update an element in the cache. Update(DescriptorArray * array,String * name,int result)1296 static void Update(DescriptorArray* array, String* name, int result) { 1297 ASSERT(result != kAbsent); 1298 if (StringShape(name).IsSymbol()) { 1299 int index = Hash(array, name); 1300 Key& key = keys_[index]; 1301 key.array = array; 1302 key.name = name; 1303 results_[index] = result; 1304 } 1305 } 1306 1307 // Clear the cache. 1308 static void Clear(); 1309 1310 static const int kAbsent = -2; 1311 private: Hash(DescriptorArray * array,String * name)1312 static int Hash(DescriptorArray* array, String* name) { 1313 // Uses only lower 32 bits if pointers are larger. 1314 uintptr_t array_hash = 1315 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2; 1316 uintptr_t name_hash = 1317 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2; 1318 return (array_hash ^ name_hash) % kLength; 1319 } 1320 1321 static const int kLength = 64; 1322 struct Key { 1323 DescriptorArray* array; 1324 String* name; 1325 }; 1326 1327 static Key keys_[kLength]; 1328 static int results_[kLength]; 1329 }; 1330 1331 1332 // ---------------------------------------------------------------------------- 1333 // Marking stack for tracing live objects. 1334 1335 class MarkingStack { 1336 public: Initialize(Address low,Address high)1337 void Initialize(Address low, Address high) { 1338 top_ = low_ = reinterpret_cast<HeapObject**>(low); 1339 high_ = reinterpret_cast<HeapObject**>(high); 1340 overflowed_ = false; 1341 } 1342 is_full()1343 bool is_full() { return top_ >= high_; } 1344 is_empty()1345 bool is_empty() { return top_ <= low_; } 1346 overflowed()1347 bool overflowed() { return overflowed_; } 1348 clear_overflowed()1349 void clear_overflowed() { overflowed_ = false; } 1350 1351 // Push the (marked) object on the marking stack if there is room, 1352 // otherwise mark the object as overflowed and wait for a rescan of the 1353 // heap. Push(HeapObject * object)1354 void Push(HeapObject* object) { 1355 CHECK(object->IsHeapObject()); 1356 if (is_full()) { 1357 object->SetOverflow(); 1358 overflowed_ = true; 1359 } else { 1360 *(top_++) = object; 1361 } 1362 } 1363 Pop()1364 HeapObject* Pop() { 1365 ASSERT(!is_empty()); 1366 HeapObject* object = *(--top_); 1367 CHECK(object->IsHeapObject()); 1368 return object; 1369 } 1370 1371 private: 1372 HeapObject** low_; 1373 HeapObject** top_; 1374 HeapObject** high_; 1375 bool overflowed_; 1376 }; 1377 1378 1379 // A helper class to document/test C++ scopes where we do not 1380 // expect a GC. Usage: 1381 // 1382 // /* Allocation not allowed: we cannot handle a GC in this scope. */ 1383 // { AssertNoAllocation nogc; 1384 // ... 1385 // } 1386 1387 #ifdef DEBUG 1388 1389 class DisallowAllocationFailure { 1390 public: DisallowAllocationFailure()1391 DisallowAllocationFailure() { 1392 old_state_ = Heap::disallow_allocation_failure_; 1393 Heap::disallow_allocation_failure_ = true; 1394 } ~DisallowAllocationFailure()1395 ~DisallowAllocationFailure() { 1396 Heap::disallow_allocation_failure_ = old_state_; 1397 } 1398 private: 1399 bool old_state_; 1400 }; 1401 1402 class AssertNoAllocation { 1403 public: AssertNoAllocation()1404 AssertNoAllocation() { 1405 old_state_ = Heap::allow_allocation(false); 1406 } 1407 ~AssertNoAllocation()1408 ~AssertNoAllocation() { 1409 Heap::allow_allocation(old_state_); 1410 } 1411 1412 private: 1413 bool old_state_; 1414 }; 1415 1416 class DisableAssertNoAllocation { 1417 public: DisableAssertNoAllocation()1418 DisableAssertNoAllocation() { 1419 old_state_ = Heap::allow_allocation(true); 1420 } 1421 ~DisableAssertNoAllocation()1422 ~DisableAssertNoAllocation() { 1423 Heap::allow_allocation(old_state_); 1424 } 1425 1426 private: 1427 bool old_state_; 1428 }; 1429 1430 #else // ndef DEBUG 1431 1432 class AssertNoAllocation { 1433 public: AssertNoAllocation()1434 AssertNoAllocation() { } ~AssertNoAllocation()1435 ~AssertNoAllocation() { } 1436 }; 1437 1438 class DisableAssertNoAllocation { 1439 public: DisableAssertNoAllocation()1440 DisableAssertNoAllocation() { } ~DisableAssertNoAllocation()1441 ~DisableAssertNoAllocation() { } 1442 }; 1443 1444 #endif 1445 1446 #ifdef ENABLE_LOGGING_AND_PROFILING 1447 // The HeapProfiler writes data to the log files, which can be postprocessed 1448 // to generate .hp files for use by the GHC/Valgrind tool hp2ps. 1449 class HeapProfiler { 1450 public: 1451 // Write a single heap sample to the log file. 1452 static void WriteSample(); 1453 1454 private: 1455 // Update the array info with stats from obj. 1456 static void CollectStats(HeapObject* obj, HistogramInfo* info); 1457 }; 1458 #endif 1459 1460 // GCTracer collects and prints ONE line after each garbage collector 1461 // invocation IFF --trace_gc is used. 1462 1463 class GCTracer BASE_EMBEDDED { 1464 public: 1465 GCTracer(); 1466 1467 ~GCTracer(); 1468 1469 // Sets the collector. set_collector(GarbageCollector collector)1470 void set_collector(GarbageCollector collector) { collector_ = collector; } 1471 1472 // Sets the GC count. set_gc_count(int count)1473 void set_gc_count(int count) { gc_count_ = count; } 1474 1475 // Sets the full GC count. set_full_gc_count(int count)1476 void set_full_gc_count(int count) { full_gc_count_ = count; } 1477 1478 // Sets the flag that this is a compacting full GC. set_is_compacting()1479 void set_is_compacting() { is_compacting_ = true; } 1480 1481 // Increment and decrement the count of marked objects. increment_marked_count()1482 void increment_marked_count() { ++marked_count_; } decrement_marked_count()1483 void decrement_marked_count() { --marked_count_; } 1484 marked_count()1485 int marked_count() { return marked_count_; } 1486 1487 private: 1488 // Returns a string matching the collector. 1489 const char* CollectorString(); 1490 1491 // Returns size of object in heap (in MB). SizeOfHeapObjects()1492 double SizeOfHeapObjects() { 1493 return (static_cast<double>(Heap::SizeOfObjects())) / MB; 1494 } 1495 1496 double start_time_; // Timestamp set in the constructor. 1497 double start_size_; // Size of objects in heap set in constructor. 1498 GarbageCollector collector_; // Type of collector. 1499 1500 // A count (including this one, eg, the first collection is 1) of the 1501 // number of garbage collections. 1502 int gc_count_; 1503 1504 // A count (including this one) of the number of full garbage collections. 1505 int full_gc_count_; 1506 1507 // True if the current GC is a compacting full collection, false 1508 // otherwise. 1509 bool is_compacting_; 1510 1511 // True if the *previous* full GC cwas a compacting collection (will be 1512 // false if there has not been a previous full GC). 1513 bool previous_has_compacted_; 1514 1515 // On a full GC, a count of the number of marked objects. Incremented 1516 // when an object is marked and decremented when an object's mark bit is 1517 // cleared. Will be zero on a scavenge collection. 1518 int marked_count_; 1519 1520 // The count from the end of the previous full GC. Will be zero if there 1521 // was no previous full GC. 1522 int previous_marked_count_; 1523 }; 1524 1525 1526 class TranscendentalCache { 1527 public: 1528 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches}; 1529 1530 explicit TranscendentalCache(Type t); 1531 1532 // Returns a heap number with f(input), where f is a math function specified 1533 // by the 'type' argument. Get(Type type,double input)1534 static inline Object* Get(Type type, double input) { 1535 TranscendentalCache* cache = caches_[type]; 1536 if (cache == NULL) { 1537 caches_[type] = cache = new TranscendentalCache(type); 1538 } 1539 return cache->Get(input); 1540 } 1541 1542 // The cache contains raw Object pointers. This method disposes of 1543 // them before a garbage collection. 1544 static void Clear(); 1545 1546 private: Get(double input)1547 inline Object* Get(double input) { 1548 Converter c; 1549 c.dbl = input; 1550 int hash = Hash(c); 1551 Element e = elements_[hash]; 1552 if (e.in[0] == c.integers[0] && 1553 e.in[1] == c.integers[1]) { 1554 ASSERT(e.output != NULL); 1555 return e.output; 1556 } 1557 double answer = Calculate(input); 1558 Object* heap_number = Heap::AllocateHeapNumber(answer); 1559 if (!heap_number->IsFailure()) { 1560 elements_[hash].in[0] = c.integers[0]; 1561 elements_[hash].in[1] = c.integers[1]; 1562 elements_[hash].output = heap_number; 1563 } 1564 return heap_number; 1565 } 1566 Calculate(double input)1567 inline double Calculate(double input) { 1568 switch (type_) { 1569 case ACOS: 1570 return acos(input); 1571 case ASIN: 1572 return asin(input); 1573 case ATAN: 1574 return atan(input); 1575 case COS: 1576 return cos(input); 1577 case EXP: 1578 return exp(input); 1579 case LOG: 1580 return log(input); 1581 case SIN: 1582 return sin(input); 1583 case TAN: 1584 return tan(input); 1585 default: 1586 return 0.0; // Never happens. 1587 } 1588 } 1589 static const int kCacheSize = 512; 1590 struct Element { 1591 uint32_t in[2]; 1592 Object* output; 1593 }; 1594 union Converter { 1595 double dbl; 1596 uint32_t integers[2]; 1597 }; Hash(const Converter & c)1598 inline static int Hash(const Converter& c) { 1599 uint32_t hash = (c.integers[0] ^ c.integers[1]); 1600 hash ^= hash >> 16; 1601 hash ^= hash >> 8; 1602 return (hash & (kCacheSize - 1)); 1603 } 1604 static TranscendentalCache* caches_[kNumberOfCaches]; 1605 Element elements_[kCacheSize]; 1606 Type type_; 1607 }; 1608 1609 1610 } } // namespace v8::internal 1611 1612 #endif // V8_HEAP_H_ 1613