1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
7
8 #include <cmath>
9
10 #include "src/base/platform/platform.h"
11 #include "src/counters.h"
12 #include "src/heap/heap.h"
13 #include "src/heap/incremental-marking-inl.h"
14 #include "src/heap/mark-compact.h"
15 #include "src/heap/remembered-set.h"
16 #include "src/heap/spaces-inl.h"
17 #include "src/heap/store-buffer.h"
18 #include "src/isolate.h"
19 #include "src/list-inl.h"
20 #include "src/log.h"
21 #include "src/msan.h"
22 #include "src/objects-inl.h"
23 #include "src/type-feedback-vector-inl.h"
24
25 namespace v8 {
26 namespace internal {
27
insert(HeapObject * target,int32_t size,bool was_marked_black)28 void PromotionQueue::insert(HeapObject* target, int32_t size,
29 bool was_marked_black) {
30 if (emergency_stack_ != NULL) {
31 emergency_stack_->Add(Entry(target, size, was_marked_black));
32 return;
33 }
34
35 if ((rear_ - 1) < limit_) {
36 RelocateQueueHead();
37 emergency_stack_->Add(Entry(target, size, was_marked_black));
38 return;
39 }
40
41 struct Entry* entry = reinterpret_cast<struct Entry*>(--rear_);
42 entry->obj_ = target;
43 entry->size_ = size;
44 entry->was_marked_black_ = was_marked_black;
45
46 // Assert no overflow into live objects.
47 #ifdef DEBUG
48 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
49 reinterpret_cast<Address>(rear_));
50 #endif
51 }
52
53
54 #define ROOT_ACCESSOR(type, name, camel_name) \
55 type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
56 ROOT_LIST(ROOT_ACCESSOR)
57 #undef ROOT_ACCESSOR
58
59 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
60 Map* Heap::name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)61 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
62 #undef STRUCT_MAP_ACCESSOR
63
64 #define STRING_ACCESSOR(name, str) \
65 String* Heap::name() { return String::cast(roots_[k##name##RootIndex]); }
66 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
67 #undef STRING_ACCESSOR
68
69 #define SYMBOL_ACCESSOR(name) \
70 Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
71 PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
72 #undef SYMBOL_ACCESSOR
73
74 #define SYMBOL_ACCESSOR(name, description) \
75 Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
76 PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
77 WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
78 #undef SYMBOL_ACCESSOR
79
80 #define ROOT_ACCESSOR(type, name, camel_name) \
81 void Heap::set_##name(type* value) { \
82 /* The deserializer makes use of the fact that these common roots are */ \
83 /* never in new space and never on a page that is being compacted. */ \
84 DCHECK(!deserialization_complete() || \
85 RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex)); \
86 DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
87 roots_[k##camel_name##RootIndex] = value; \
88 }
89 ROOT_LIST(ROOT_ACCESSOR)
90 #undef ROOT_ACCESSOR
91
92
93 template <>
94 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
95 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
96 return chars == str.length();
97 }
98
99
100 template <>
IsOneByte(String * str,int chars)101 bool inline Heap::IsOneByte(String* str, int chars) {
102 return str->IsOneByteRepresentation();
103 }
104
105
AllocateInternalizedStringFromUtf8(Vector<const char> str,int chars,uint32_t hash_field)106 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
107 Vector<const char> str, int chars, uint32_t hash_field) {
108 if (IsOneByte(str, chars)) {
109 return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
110 hash_field);
111 }
112 return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
113 }
114
115
116 template <typename T>
AllocateInternalizedStringImpl(T t,int chars,uint32_t hash_field)117 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
118 uint32_t hash_field) {
119 if (IsOneByte(t, chars)) {
120 return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
121 }
122 return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
123 }
124
125
AllocateOneByteInternalizedString(Vector<const uint8_t> str,uint32_t hash_field)126 AllocationResult Heap::AllocateOneByteInternalizedString(
127 Vector<const uint8_t> str, uint32_t hash_field) {
128 CHECK_GE(String::kMaxLength, str.length());
129 // Compute map and object size.
130 Map* map = one_byte_internalized_string_map();
131 int size = SeqOneByteString::SizeFor(str.length());
132
133 // Allocate string.
134 HeapObject* result = nullptr;
135 {
136 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
137 if (!allocation.To(&result)) return allocation;
138 }
139
140 // String maps are all immortal immovable objects.
141 result->set_map_no_write_barrier(map);
142 // Set length and hash fields of the allocated string.
143 String* answer = String::cast(result);
144 answer->set_length(str.length());
145 answer->set_hash_field(hash_field);
146
147 DCHECK_EQ(size, answer->Size());
148
149 // Fill in the characters.
150 MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
151 str.length());
152
153 return answer;
154 }
155
156
AllocateTwoByteInternalizedString(Vector<const uc16> str,uint32_t hash_field)157 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
158 uint32_t hash_field) {
159 CHECK_GE(String::kMaxLength, str.length());
160 // Compute map and object size.
161 Map* map = internalized_string_map();
162 int size = SeqTwoByteString::SizeFor(str.length());
163
164 // Allocate string.
165 HeapObject* result = nullptr;
166 {
167 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
168 if (!allocation.To(&result)) return allocation;
169 }
170
171 result->set_map(map);
172 // Set length and hash fields of the allocated string.
173 String* answer = String::cast(result);
174 answer->set_length(str.length());
175 answer->set_hash_field(hash_field);
176
177 DCHECK_EQ(size, answer->Size());
178
179 // Fill in the characters.
180 MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
181 str.length() * kUC16Size);
182
183 return answer;
184 }
185
CopyFixedArray(FixedArray * src)186 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
187 if (src->length() == 0) return src;
188 return CopyFixedArrayWithMap(src, src->map());
189 }
190
191
CopyFixedDoubleArray(FixedDoubleArray * src)192 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
193 if (src->length() == 0) return src;
194 return CopyFixedDoubleArrayWithMap(src, src->map());
195 }
196
197
AllocateRaw(int size_in_bytes,AllocationSpace space,AllocationAlignment alignment)198 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
199 AllocationAlignment alignment) {
200 DCHECK(AllowHandleAllocation::IsAllowed());
201 DCHECK(AllowHeapAllocation::IsAllowed());
202 DCHECK(gc_state_ == NOT_IN_GC);
203 #ifdef DEBUG
204 if (FLAG_gc_interval >= 0 && !always_allocate() &&
205 Heap::allocation_timeout_-- <= 0) {
206 return AllocationResult::Retry(space);
207 }
208 isolate_->counters()->objs_since_last_full()->Increment();
209 isolate_->counters()->objs_since_last_young()->Increment();
210 #endif
211
212 bool large_object = size_in_bytes > Page::kMaxRegularHeapObjectSize;
213 HeapObject* object = nullptr;
214 AllocationResult allocation;
215 if (NEW_SPACE == space) {
216 if (large_object) {
217 space = LO_SPACE;
218 } else {
219 allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
220 if (allocation.To(&object)) {
221 OnAllocationEvent(object, size_in_bytes);
222 }
223 return allocation;
224 }
225 }
226
227 // Here we only allocate in the old generation.
228 if (OLD_SPACE == space) {
229 if (large_object) {
230 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
231 } else {
232 allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
233 }
234 } else if (CODE_SPACE == space) {
235 if (size_in_bytes <= code_space()->AreaSize()) {
236 allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
237 } else {
238 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
239 }
240 } else if (LO_SPACE == space) {
241 DCHECK(large_object);
242 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
243 } else if (MAP_SPACE == space) {
244 allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
245 } else {
246 // NEW_SPACE is not allowed here.
247 UNREACHABLE();
248 }
249 if (allocation.To(&object)) {
250 OnAllocationEvent(object, size_in_bytes);
251 } else {
252 old_gen_exhausted_ = true;
253 }
254
255 if (!old_gen_exhausted_ && incremental_marking()->black_allocation() &&
256 space != OLD_SPACE) {
257 Marking::MarkBlack(Marking::MarkBitFrom(object));
258 MemoryChunk::IncrementLiveBytesFromGC(object, size_in_bytes);
259 }
260 return allocation;
261 }
262
263
OnAllocationEvent(HeapObject * object,int size_in_bytes)264 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
265 HeapProfiler* profiler = isolate_->heap_profiler();
266 if (profiler->is_tracking_allocations()) {
267 profiler->AllocationEvent(object->address(), size_in_bytes);
268 }
269
270 if (FLAG_verify_predictable) {
271 ++allocations_count_;
272 // Advance synthetic time by making a time request.
273 MonotonicallyIncreasingTimeInMs();
274
275 UpdateAllocationsHash(object);
276 UpdateAllocationsHash(size_in_bytes);
277
278 if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
279 PrintAlloctionsHash();
280 }
281 }
282
283 if (FLAG_trace_allocation_stack_interval > 0) {
284 if (!FLAG_verify_predictable) ++allocations_count_;
285 if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
286 isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
287 }
288 }
289 }
290
291
OnMoveEvent(HeapObject * target,HeapObject * source,int size_in_bytes)292 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
293 int size_in_bytes) {
294 HeapProfiler* heap_profiler = isolate_->heap_profiler();
295 if (heap_profiler->is_tracking_object_moves()) {
296 heap_profiler->ObjectMoveEvent(source->address(), target->address(),
297 size_in_bytes);
298 }
299 if (target->IsSharedFunctionInfo()) {
300 LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
301 target->address()));
302 }
303
304 if (FLAG_verify_predictable) {
305 ++allocations_count_;
306 // Advance synthetic time by making a time request.
307 MonotonicallyIncreasingTimeInMs();
308
309 UpdateAllocationsHash(source);
310 UpdateAllocationsHash(target);
311 UpdateAllocationsHash(size_in_bytes);
312
313 if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
314 PrintAlloctionsHash();
315 }
316 }
317 }
318
319
UpdateAllocationsHash(HeapObject * object)320 void Heap::UpdateAllocationsHash(HeapObject* object) {
321 Address object_address = object->address();
322 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
323 AllocationSpace allocation_space = memory_chunk->owner()->identity();
324
325 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
326 uint32_t value =
327 static_cast<uint32_t>(object_address - memory_chunk->address()) |
328 (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
329
330 UpdateAllocationsHash(value);
331 }
332
333
UpdateAllocationsHash(uint32_t value)334 void Heap::UpdateAllocationsHash(uint32_t value) {
335 uint16_t c1 = static_cast<uint16_t>(value);
336 uint16_t c2 = static_cast<uint16_t>(value >> 16);
337 raw_allocations_hash_ =
338 StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
339 raw_allocations_hash_ =
340 StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
341 }
342
343
RegisterExternalString(String * string)344 void Heap::RegisterExternalString(String* string) {
345 external_string_table_.AddString(string);
346 }
347
348
FinalizeExternalString(String * string)349 void Heap::FinalizeExternalString(String* string) {
350 DCHECK(string->IsExternalString());
351 v8::String::ExternalStringResourceBase** resource_addr =
352 reinterpret_cast<v8::String::ExternalStringResourceBase**>(
353 reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
354 kHeapObjectTag);
355
356 // Dispose of the C++ object if it has not already been disposed.
357 if (*resource_addr != NULL) {
358 (*resource_addr)->Dispose();
359 *resource_addr = NULL;
360 }
361 }
362
363
InNewSpace(Object * object)364 bool Heap::InNewSpace(Object* object) {
365 bool result = new_space_.Contains(object);
366 DCHECK(!result || // Either not in new space
367 gc_state_ != NOT_IN_GC || // ... or in the middle of GC
368 InToSpace(object)); // ... or in to-space (where we allocate).
369 return result;
370 }
371
InFromSpace(Object * object)372 bool Heap::InFromSpace(Object* object) {
373 return new_space_.FromSpaceContains(object);
374 }
375
376
InToSpace(Object * object)377 bool Heap::InToSpace(Object* object) {
378 return new_space_.ToSpaceContains(object);
379 }
380
InOldSpace(Object * object)381 bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
382
InNewSpaceSlow(Address address)383 bool Heap::InNewSpaceSlow(Address address) {
384 return new_space_.ContainsSlow(address);
385 }
386
InOldSpaceSlow(Address address)387 bool Heap::InOldSpaceSlow(Address address) {
388 return old_space_->ContainsSlow(address);
389 }
390
OldGenerationAllocationLimitReached()391 bool Heap::OldGenerationAllocationLimitReached() {
392 if (!incremental_marking()->IsStopped()) return false;
393 return OldGenerationSpaceAvailable() < 0;
394 }
395
396 template <PromotionMode promotion_mode>
ShouldBePromoted(Address old_address,int object_size)397 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
398 Page* page = Page::FromAddress(old_address);
399 Address age_mark = new_space_.age_mark();
400
401 if (promotion_mode == PROMOTE_MARKED) {
402 MarkBit mark_bit = Marking::MarkBitFrom(old_address);
403 if (!Marking::IsWhite(mark_bit)) {
404 return true;
405 }
406 }
407
408 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
409 (!page->ContainsLimit(age_mark) || old_address < age_mark);
410 }
411
CurrentPromotionMode()412 PromotionMode Heap::CurrentPromotionMode() {
413 if (incremental_marking()->IsMarking()) {
414 return PROMOTE_MARKED;
415 } else {
416 return DEFAULT_PROMOTION;
417 }
418 }
419
RecordWrite(Object * object,int offset,Object * o)420 void Heap::RecordWrite(Object* object, int offset, Object* o) {
421 if (!InNewSpace(o) || !object->IsHeapObject() || InNewSpace(object)) {
422 return;
423 }
424 RememberedSet<OLD_TO_NEW>::Insert(
425 Page::FromAddress(reinterpret_cast<Address>(object)),
426 HeapObject::cast(object)->address() + offset);
427 }
428
RecordFixedArrayElements(FixedArray * array,int offset,int length)429 void Heap::RecordFixedArrayElements(FixedArray* array, int offset, int length) {
430 if (InNewSpace(array)) return;
431 Page* page = Page::FromAddress(reinterpret_cast<Address>(array));
432 for (int i = 0; i < length; i++) {
433 if (!InNewSpace(array->get(offset + i))) continue;
434 RememberedSet<OLD_TO_NEW>::Insert(
435 page,
436 reinterpret_cast<Address>(array->RawFieldOfElementAt(offset + i)));
437 }
438 }
439
440
AllowedToBeMigrated(HeapObject * obj,AllocationSpace dst)441 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
442 // Object migration is governed by the following rules:
443 //
444 // 1) Objects in new-space can be migrated to the old space
445 // that matches their target space or they stay in new-space.
446 // 2) Objects in old-space stay in the same space when migrating.
447 // 3) Fillers (two or more words) can migrate due to left-trimming of
448 // fixed arrays in new-space or old space.
449 // 4) Fillers (one word) can never migrate, they are skipped by
450 // incremental marking explicitly to prevent invalid pattern.
451 //
452 // Since this function is used for debugging only, we do not place
453 // asserts here, but check everything explicitly.
454 if (obj->map() == one_pointer_filler_map()) return false;
455 InstanceType type = obj->map()->instance_type();
456 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
457 AllocationSpace src = chunk->owner()->identity();
458 switch (src) {
459 case NEW_SPACE:
460 return dst == src || dst == OLD_SPACE;
461 case OLD_SPACE:
462 return dst == src &&
463 (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
464 case CODE_SPACE:
465 return dst == src && type == CODE_TYPE;
466 case MAP_SPACE:
467 case LO_SPACE:
468 return false;
469 }
470 UNREACHABLE();
471 return false;
472 }
473
CopyBlock(Address dst,Address src,int byte_size)474 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
475 CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
476 static_cast<size_t>(byte_size / kPointerSize));
477 }
478
PurgeLeftTrimmedObject(Object ** object)479 bool Heap::PurgeLeftTrimmedObject(Object** object) {
480 HeapObject* current = reinterpret_cast<HeapObject*>(*object);
481 const MapWord map_word = current->map_word();
482 if (current->IsFiller() && !map_word.IsForwardingAddress()) {
483 #ifdef DEBUG
484 // We need to find a FixedArrayBase map after walking the fillers.
485 while (current->IsFiller()) {
486 Address next = reinterpret_cast<Address>(current);
487 if (current->map() == one_pointer_filler_map()) {
488 next += kPointerSize;
489 } else if (current->map() == two_pointer_filler_map()) {
490 next += 2 * kPointerSize;
491 } else {
492 next += current->Size();
493 }
494 current = reinterpret_cast<HeapObject*>(next);
495 }
496 DCHECK(current->IsFixedArrayBase());
497 #endif // DEBUG
498 *object = nullptr;
499 return true;
500 }
501 return false;
502 }
503
504 template <Heap::FindMementoMode mode>
FindAllocationMemento(HeapObject * object)505 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
506 // Check if there is potentially a memento behind the object. If
507 // the last word of the memento is on another page we return
508 // immediately.
509 Address object_address = object->address();
510 Address memento_address = object_address + object->Size();
511 Address last_memento_word_address = memento_address + kPointerSize;
512 if (!Page::OnSamePage(object_address, last_memento_word_address)) {
513 return nullptr;
514 }
515 HeapObject* candidate = HeapObject::FromAddress(memento_address);
516 Map* candidate_map = candidate->map();
517 // This fast check may peek at an uninitialized word. However, the slow check
518 // below (memento_address == top) ensures that this is safe. Mark the word as
519 // initialized to silence MemorySanitizer warnings.
520 MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
521 if (candidate_map != allocation_memento_map()) {
522 return nullptr;
523 }
524 AllocationMemento* memento_candidate = AllocationMemento::cast(candidate);
525
526 // Depending on what the memento is used for, we might need to perform
527 // additional checks.
528 Address top;
529 switch (mode) {
530 case Heap::kForGC:
531 return memento_candidate;
532 case Heap::kForRuntime:
533 if (memento_candidate == nullptr) return nullptr;
534 // Either the object is the last object in the new space, or there is
535 // another object of at least word size (the header map word) following
536 // it, so suffices to compare ptr and top here.
537 top = NewSpaceTop();
538 DCHECK(memento_address == top ||
539 memento_address + HeapObject::kHeaderSize <= top ||
540 !Page::OnSamePage(memento_address, top - 1));
541 if ((memento_address != top) && memento_candidate->IsValid()) {
542 return memento_candidate;
543 }
544 return nullptr;
545 default:
546 UNREACHABLE();
547 }
548 UNREACHABLE();
549 return nullptr;
550 }
551
552 template <Heap::UpdateAllocationSiteMode mode>
UpdateAllocationSite(HeapObject * object,base::HashMap * pretenuring_feedback)553 void Heap::UpdateAllocationSite(HeapObject* object,
554 base::HashMap* pretenuring_feedback) {
555 DCHECK(InFromSpace(object));
556 if (!FLAG_allocation_site_pretenuring ||
557 !AllocationSite::CanTrack(object->map()->instance_type()))
558 return;
559 AllocationMemento* memento_candidate = FindAllocationMemento<kForGC>(object);
560 if (memento_candidate == nullptr) return;
561
562 if (mode == kGlobal) {
563 DCHECK_EQ(pretenuring_feedback, global_pretenuring_feedback_);
564 // Entering global pretenuring feedback is only used in the scavenger, where
565 // we are allowed to actually touch the allocation site.
566 if (!memento_candidate->IsValid()) return;
567 AllocationSite* site = memento_candidate->GetAllocationSite();
568 DCHECK(!site->IsZombie());
569 // For inserting in the global pretenuring storage we need to first
570 // increment the memento found count on the allocation site.
571 if (site->IncrementMementoFoundCount()) {
572 global_pretenuring_feedback_->LookupOrInsert(site,
573 ObjectHash(site->address()));
574 }
575 } else {
576 DCHECK_EQ(mode, kCached);
577 DCHECK_NE(pretenuring_feedback, global_pretenuring_feedback_);
578 // Entering cached feedback is used in the parallel case. We are not allowed
579 // to dereference the allocation site and rather have to postpone all checks
580 // till actually merging the data.
581 Address key = memento_candidate->GetAllocationSiteUnchecked();
582 base::HashMap::Entry* e =
583 pretenuring_feedback->LookupOrInsert(key, ObjectHash(key));
584 DCHECK(e != nullptr);
585 (*bit_cast<intptr_t*>(&e->value))++;
586 }
587 }
588
589
RemoveAllocationSitePretenuringFeedback(AllocationSite * site)590 void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
591 global_pretenuring_feedback_->Remove(
592 site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
593 }
594
595
CollectGarbage(AllocationSpace space,const char * gc_reason,const v8::GCCallbackFlags callbackFlags)596 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
597 const v8::GCCallbackFlags callbackFlags) {
598 const char* collector_reason = NULL;
599 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
600 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
601 }
602
603
isolate()604 Isolate* Heap::isolate() {
605 return reinterpret_cast<Isolate*>(
606 reinterpret_cast<intptr_t>(this) -
607 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
608 }
609
610
AddString(String * string)611 void Heap::ExternalStringTable::AddString(String* string) {
612 DCHECK(string->IsExternalString());
613 if (heap_->InNewSpace(string)) {
614 new_space_strings_.Add(string);
615 } else {
616 old_space_strings_.Add(string);
617 }
618 }
619
620
Iterate(ObjectVisitor * v)621 void Heap::ExternalStringTable::Iterate(ObjectVisitor* v) {
622 if (!new_space_strings_.is_empty()) {
623 Object** start = &new_space_strings_[0];
624 v->VisitPointers(start, start + new_space_strings_.length());
625 }
626 if (!old_space_strings_.is_empty()) {
627 Object** start = &old_space_strings_[0];
628 v->VisitPointers(start, start + old_space_strings_.length());
629 }
630 }
631
632
633 // Verify() is inline to avoid ifdef-s around its calls in release
634 // mode.
Verify()635 void Heap::ExternalStringTable::Verify() {
636 #ifdef DEBUG
637 for (int i = 0; i < new_space_strings_.length(); ++i) {
638 Object* obj = Object::cast(new_space_strings_[i]);
639 DCHECK(heap_->InNewSpace(obj));
640 DCHECK(!obj->IsTheHole(heap_->isolate()));
641 }
642 for (int i = 0; i < old_space_strings_.length(); ++i) {
643 Object* obj = Object::cast(old_space_strings_[i]);
644 DCHECK(!heap_->InNewSpace(obj));
645 DCHECK(!obj->IsTheHole(heap_->isolate()));
646 }
647 #endif
648 }
649
650
AddOldString(String * string)651 void Heap::ExternalStringTable::AddOldString(String* string) {
652 DCHECK(string->IsExternalString());
653 DCHECK(!heap_->InNewSpace(string));
654 old_space_strings_.Add(string);
655 }
656
657
ShrinkNewStrings(int position)658 void Heap::ExternalStringTable::ShrinkNewStrings(int position) {
659 new_space_strings_.Rewind(position);
660 #ifdef VERIFY_HEAP
661 if (FLAG_verify_heap) {
662 Verify();
663 }
664 #endif
665 }
666
667 // static
Hash(Object * source,Name * name)668 int DescriptorLookupCache::Hash(Object* source, Name* name) {
669 DCHECK(name->IsUniqueName());
670 // Uses only lower 32 bits if pointers are larger.
671 uint32_t source_hash =
672 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
673 kPointerSizeLog2;
674 uint32_t name_hash = name->hash_field();
675 return (source_hash ^ name_hash) % kLength;
676 }
677
Lookup(Map * source,Name * name)678 int DescriptorLookupCache::Lookup(Map* source, Name* name) {
679 int index = Hash(source, name);
680 Key& key = keys_[index];
681 if ((key.source == source) && (key.name == name)) return results_[index];
682 return kAbsent;
683 }
684
685
Update(Map * source,Name * name,int result)686 void DescriptorLookupCache::Update(Map* source, Name* name, int result) {
687 DCHECK(result != kAbsent);
688 int index = Hash(source, name);
689 Key& key = keys_[index];
690 key.source = source;
691 key.name = name;
692 results_[index] = result;
693 }
694
695
ClearInstanceofCache()696 void Heap::ClearInstanceofCache() {
697 set_instanceof_cache_function(Smi::FromInt(0));
698 }
699
ToBoolean(bool condition)700 Oddball* Heap::ToBoolean(bool condition) {
701 return condition ? true_value() : false_value();
702 }
703
704
CompletelyClearInstanceofCache()705 void Heap::CompletelyClearInstanceofCache() {
706 set_instanceof_cache_map(Smi::FromInt(0));
707 set_instanceof_cache_function(Smi::FromInt(0));
708 }
709
710
HashSeed()711 uint32_t Heap::HashSeed() {
712 uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
713 DCHECK(FLAG_randomize_hashes || seed == 0);
714 return seed;
715 }
716
717
NextScriptId()718 int Heap::NextScriptId() {
719 int last_id = last_script_id()->value();
720 if (last_id == Smi::kMaxValue) {
721 last_id = 1;
722 } else {
723 last_id++;
724 }
725 set_last_script_id(Smi::FromInt(last_id));
726 return last_id;
727 }
728
SetArgumentsAdaptorDeoptPCOffset(int pc_offset)729 void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
730 DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
731 set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
732 }
733
SetConstructStubDeoptPCOffset(int pc_offset)734 void Heap::SetConstructStubDeoptPCOffset(int pc_offset) {
735 DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
736 set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
737 }
738
SetGetterStubDeoptPCOffset(int pc_offset)739 void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
740 DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
741 set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
742 }
743
SetSetterStubDeoptPCOffset(int pc_offset)744 void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
745 DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
746 set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
747 }
748
SetInterpreterEntryReturnPCOffset(int pc_offset)749 void Heap::SetInterpreterEntryReturnPCOffset(int pc_offset) {
750 DCHECK(interpreter_entry_return_pc_offset() == Smi::FromInt(0));
751 set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
752 }
753
GetNextTemplateSerialNumber()754 int Heap::GetNextTemplateSerialNumber() {
755 int next_serial_number = next_template_serial_number()->value() + 1;
756 set_next_template_serial_number(Smi::FromInt(next_serial_number));
757 return next_serial_number;
758 }
759
SetSerializedTemplates(FixedArray * templates)760 void Heap::SetSerializedTemplates(FixedArray* templates) {
761 DCHECK_EQ(empty_fixed_array(), serialized_templates());
762 set_serialized_templates(templates);
763 }
764
AlwaysAllocateScope(Isolate * isolate)765 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
766 : heap_(isolate->heap()) {
767 heap_->always_allocate_scope_count_.Increment(1);
768 }
769
770
~AlwaysAllocateScope()771 AlwaysAllocateScope::~AlwaysAllocateScope() {
772 heap_->always_allocate_scope_count_.Increment(-1);
773 }
774
775
VisitPointers(Object ** start,Object ** end)776 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
777 for (Object** current = start; current < end; current++) {
778 if ((*current)->IsHeapObject()) {
779 HeapObject* object = HeapObject::cast(*current);
780 CHECK(object->GetIsolate()->heap()->Contains(object));
781 CHECK(object->map()->IsMap());
782 }
783 }
784 }
785
786
VisitPointers(Object ** start,Object ** end)787 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
788 for (Object** current = start; current < end; current++) {
789 CHECK((*current)->IsSmi());
790 }
791 }
792 } // namespace internal
793 } // namespace v8
794
795 #endif // V8_HEAP_HEAP_INL_H_
796