Home
last modified time | relevance | path

Searched refs:heap_ (Results 1 – 25 of 55) sorted by relevance

123

/external/v8/src/heap/
Dincremental-marking.cc60 : heap_(heap), in IncrementalMarking()
103 heap_->mark_compact_collector()->RecordSlot(obj, slot, in RecordWriteSlow()
122 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); in RecordWriteIntoCode()
217 : heap_(incremental_marking->heap()) {} in IncrementalMarkingRootMarkingVisitor()
234 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); in MarkObjectByPointer()
237 Heap* heap_; member in v8::internal::IncrementalMarkingRootMarkingVisitor
257 DeactivateIncrementalWriteBarrierForSpace(heap_->old_space()); in DeactivateIncrementalWriteBarrier()
258 DeactivateIncrementalWriteBarrierForSpace(heap_->map_space()); in DeactivateIncrementalWriteBarrier()
259 DeactivateIncrementalWriteBarrierForSpace(heap_->code_space()); in DeactivateIncrementalWriteBarrier()
260 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space()); in DeactivateIncrementalWriteBarrier()
[all …]
Dstress-scavenge-observer.cc18 heap_(heap), in StressScavengeObserver()
24 heap_.isolate()->PrintWithTimestamp( in StressScavengeObserver()
31 if (has_requested_gc_ || heap_.new_space()->Capacity() == 0) { in Step()
36 heap_.new_space()->Size() * 100.0 / heap_.new_space()->Capacity(); in Step()
39 heap_.isolate()->PrintWithTimestamp( in Step()
52 heap_.isolate()->PrintWithTimestamp("[Scavenge] GC requested\n"); in Step()
56 heap_.isolate()->stack_guard()->RequestGC(); in Step()
66 heap_.new_space()->Size() * 100.0 / heap_.new_space()->Capacity(); in RequestedGCDone()
70 heap_.isolate()->PrintWithTimestamp( in RequestedGCDone()
73 heap_.isolate()->PrintWithTimestamp("[Scavenge] %d%% is the new limit\n", in RequestedGCDone()
[all …]
Dgc-tracer.cc37 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs(); in Scope()
40 runtime_stats_ = tracer_->heap_->isolate()->counters()->runtime_call_stats(); in Scope()
46 scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_); in ~Scope()
54 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs(); in BackgroundScope()
63 tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_; in ~BackgroundScope()
141 : heap_(heap), in GCTracer()
166 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); in GCTracer()
175 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); in ResetForTesting()
209 heap_->isolate()->counters()->young_generation_handling()->AddSample( in NotifyYoungGenerationHandling()
220 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); in Start()
[all …]
Darray-buffer-collector.cc26 JSArrayBuffer::FreeBackingStore(heap_->isolate(), alloc); in FreeAllocations()
35 : CancelableTask(heap->isolate()), heap_(heap) {} in FreeingTask()
42 heap_->tracer(), in RunInternal()
44 heap_->array_buffer_collector()->FreeAllocations(); in RunInternal()
47 Heap* heap_; member in v8::internal::ArrayBufferCollector::FreeingTask
52 heap_->account_external_memory_concurrently_freed(); in FreeAllocationsOnBackgroundThread()
53 if (!heap_->IsTearingDown() && FLAG_concurrent_array_buffer_freeing) { in FreeAllocationsOnBackgroundThread()
55 base::make_unique<FreeingTask>(heap_)); in FreeAllocationsOnBackgroundThread()
Dheap-inl.h585 : heap_(isolate->heap()) { in AlwaysAllocateScope()
586 heap_->always_allocate_scope_count_++; in AlwaysAllocateScope()
590 heap_->always_allocate_scope_count_--; in ~AlwaysAllocateScope()
594 : heap_(heap) { in CodeSpaceMemoryModificationScope()
595 if (heap_->write_protect_code_memory()) { in CodeSpaceMemoryModificationScope()
596 heap_->increment_code_space_memory_modification_scope_depth(); in CodeSpaceMemoryModificationScope()
597 heap_->code_space()->SetReadAndWritable(); in CodeSpaceMemoryModificationScope()
598 LargePage* page = heap_->lo_space()->first_page(); in CodeSpaceMemoryModificationScope()
601 CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page)); in CodeSpaceMemoryModificationScope()
610 if (heap_->write_protect_code_memory()) { in ~CodeSpaceMemoryModificationScope()
[all …]
Dmark-compact.cc61 explicit MarkingVerifier(Heap* heap) : heap_(heap) {} in MarkingVerifier()
91 Heap* heap_; member in v8::internal::__anon40461b4e0111::MarkingVerifier
95 heap_->IterateStrongRoots(this, mode); in VerifyRoots()
106 object->map() != ReadOnlyRoots(heap_).one_pointer_filler_map()) { in VerifyMarkingOnPage()
158 VerifyMarking(heap_->new_space()); in Run()
159 VerifyMarking(heap_->old_space()); in Run()
160 VerifyMarking(heap_->code_space()); in Run()
161 VerifyMarking(heap_->map_space()); in Run()
163 LargeObjectIterator it(heap_->lo_space()); in Run()
233 explicit EvacuationVerifier(Heap* heap) : heap_(heap) {} in EvacuationVerifier()
[all …]
Dobject-stats.h85 explicit ObjectStats(Heap* heap) : heap_(heap) { ClearObjectStats(); } in ObjectStats()
122 Heap* heap() { return heap_; } in heap()
142 Heap* heap_; variable
165 : heap_(heap), live_(live), dead_(dead) { in ObjectStatsCollector()
166 DCHECK_NOT_NULL(heap_); in ObjectStatsCollector()
176 Heap* const heap_;
Dlocal-allocator.h23 : heap_(heap), in LocalAllocator()
31 heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); in Finalize()
32 heap_->code_space()->MergeCompactionSpace( in Finalize()
58 Heap* const heap_; variable
Dobject-stats.cc338 Isolate* isolate() { return heap_->isolate(); } in isolate()
391 Heap* heap_; member in v8::internal::ObjectStatsCollectorImpl
401 : heap_(heap), in ObjectStatsCollectorImpl()
416 if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false; in ShouldRecordObject()
619 GetFeedbackSlotType(vector->Get(slot), it.kind(), heap_->isolate()), in RecordVirtualFeedbackVectorDetails()
701 Object* list = heap_->allocation_sites_list(); in CollectGlobalStatistics()
709 RecordSimpleVirtualObjectStats(nullptr, heap_->serialized_objects(), in CollectGlobalStatistics()
711 RecordSimpleVirtualObjectStats(nullptr, heap_->number_string_cache(), in CollectGlobalStatistics()
714 nullptr, heap_->single_character_string_cache(), in CollectGlobalStatistics()
716 RecordSimpleVirtualObjectStats(nullptr, heap_->string_split_cache(), in CollectGlobalStatistics()
[all …]
Dstore-buffer.cc22 : heap_(heap), top_(nullptr), current_(0), mode_(NOT_IN_GC) { in StoreBuffer()
39 void* hint = AlignedAddress(heap_->GetRandomMmapAddr(), alignment); in SetUp()
43 heap_->FatalProcessOutOfMemory("StoreBuffer::SetUp"); in SetUp()
69 heap_->FatalProcessOutOfMemory("StoreBuffer::SetUp"); in SetUp()
146 base::make_unique<Task>(heap_->isolate(), this)); in FlipStoreBuffers()
158 base::LockGuard<base::Mutex> guard(heap_->lo_space()->chunk_map_mutex()); in MoveEntriesToRememberedSet()
162 MemoryChunk* chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr); in MoveEntriesToRememberedSet()
Dscavenger.cc22 : heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {} in IterateAndScavengePromotedObjectsVisitor()
72 heap_->mark_compact_collector()->RecordSlot(host, slot, target); in HandleSlot()
77 Heap* const heap_; member in v8::internal::IterateAndScavengePromotedObjectsVisitor
84 : heap_(heap), in Scavenger()
123 [this](Address addr) { return CheckAndScavengeObject(heap_, addr); }, in ScavengePage()
128 heap_, type, addr, [this](MaybeObject** addr) { in ScavengePage()
Dsweeper.cc27 sweeper_->heap_->mark_compact_collector()->EnsureSweepingCompleted(); in PauseOrCompleteScope()
140 heap_->mark_compact_collector()->non_atomic_marking_state(); in StartSweeping()
156 !heap_->delay_sweeper_tasks_for_testing_) { in StartSweeperTasks()
161 heap_->isolate(), this, &pending_sweeper_tasks_semaphore_, in StartSweeperTasks()
198 if (heap_->isolate()->cancelable_task_manager()->TryAbort(task_ids_[i]) != in AbortAndWaitForTasks()
451 new IncrementalSweeperTask(heap_->isolate(), this); in ScheduleIncrementalSweepingTask()
452 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap_->isolate()); in ScheduleIncrementalSweepingTask()
480 heap_->paged_space(space)->IncreaseAllocatedBytes( in PrepareToBeSweptPage()
510 if (heap_->isolate()->cancelable_task_manager()->TryAbort( in EnsureIterabilityCompleted()
559 heap_->isolate(), this, &iterability_task_semaphore_); in StartIterabilityTasks()
Dlocal-allocator-inl.h52 heap_->CreateFillerObjectAt(object->address(), object_size, in FreeLastInNewSpace()
60 heap_->CreateFillerObjectAt(object->address(), object_size, in FreeLastInOldSpace()
88 new_space_lab_ = LocalAllocationBuffer::FromResult(heap_, result, kLabSize); in NewLocalAllocationBuffer()
Dstress-marking-observer.cc12 : AllocationObserver(64), heap_(heap) {} in StressMarkingObserver()
16 heap_.StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags, in Step()
Dconcurrent-marking.cc545 : heap_(heap), in ConcurrentMarking()
557 TRACE_BACKGROUND_GC(heap_->tracer(), in Run()
566 heap_->isolate()->PrintWithTimestamp( in Run()
596 Address new_space_top = heap_->new_space()->original_top(); in Run()
597 Address new_space_limit = heap_->new_space()->original_limit(); in Run()
651 heap_->isolate()->PrintWithTimestamp( in Run()
658 DCHECK(!heap_->IsTearingDown()); in ScheduleTasks()
681 heap_->isolate()->PrintWithTimestamp( in ScheduleTasks()
688 base::make_unique<Task>(heap_->isolate(), this, &task_state_[i], i); in ScheduleTasks()
697 if (!FLAG_concurrent_marking || heap_->IsTearingDown()) return; in RescheduleTasksIfNeeded()
[all …]
Dstore-buffer-inl.h17 StoreBufferOverflow(heap_->isolate()); in InsertDeletionIntoStoreBuffer()
27 StoreBufferOverflow(heap_->isolate()); in InsertIntoStoreBuffer()
Dmark-compact-inl.h22 : heap_(collector->heap()), in MarkingVisitor()
70 if (heap_->local_embedder_heap_tracer()->InUse()) { in VisitJSApiObject()
72 heap_->TracePossibleWrapper(object); in VisitJSApiObject()
253 heap_->AddRetainer(host, object); in MarkObjectWithoutPush()
269 heap_->AddRetainer(host, object); in MarkObject()
310 heap_->incremental_marking()->NotifyIncompleteScanOfObject( in VisitFixedArrayIncremental()
355 heap_->AddRetainer(host, obj); in MarkObject()
364 heap_->AddRetainingRoot(root, obj); in MarkRootObject()
383 heap_->AddRetainingRoot(Root::kWrapperTracing, obj); in MarkExternallyReferencedObject()
Darray-buffer-collector.h24 explicit ArrayBufferCollector(Heap* heap) : heap_(heap) {} in ArrayBufferCollector()
43 Heap* heap_; variable
/external/libchrome/mojo/core/ports/
Dmessage_queue.cc33 for (const auto& message : heap_) in ~MessageQueue()
41 return !heap_.empty() && heap_[0]->sequence_num() == next_sequence_num_; in HasNextMessage()
46 if (!HasNextMessage() || (filter && !filter->Match(*heap_[0]))) { in GetNextMessage()
51 std::pop_heap(heap_.begin(), heap_.end()); in GetNextMessage()
52 *message = std::move(heap_.back()); in GetNextMessage()
54 heap_.pop_back(); in GetNextMessage()
64 heap_.emplace_back(std::move(message)); in AcceptMessage()
65 std::push_heap(heap_.begin(), heap_.end()); in AcceptMessage()
70 *has_next_message = (heap_[0]->sequence_num() == next_sequence_num_); in AcceptMessage()
76 *messages = std::move(heap_); in TakeAllMessages()
Dmessage_queue.h67 size_t queued_message_count() const { return heap_.size(); } in COMPONENT_EXPORT()
74 std::vector<std::unique_ptr<UserMessageEvent>> heap_; in COMPONENT_EXPORT()
/external/v8/src/
Droots-inl.h17 ReadOnlyRoots::ReadOnlyRoots(Isolate* isolate) : heap_(isolate->heap()) {} in ReadOnlyRoots()
21 return type::cast(heap_->roots_[Heap::k##camel_name##RootIndex]); \
25 bit_cast<type**>(&heap_->roots_[Heap::k##camel_name##RootIndex])); \
32 return String::cast(heap_->roots_[Heap::k##name##RootIndex]); \
36 bit_cast<String**>(&heap_->roots_[Heap::k##name##RootIndex])); \
43 return Symbol::cast(heap_->roots_[Heap::k##name##RootIndex]); \ in INTERNALIZED_STRING_LIST()
47 bit_cast<Symbol**>(&heap_->roots_[Heap::k##name##RootIndex])); \
54 return Symbol::cast(heap_->roots_[Heap::k##name##RootIndex]); \
58 bit_cast<Symbol**>(&heap_->roots_[Heap::k##name##RootIndex])); \
66 return Map::cast(heap_->roots_[Heap::k##Name##MapRootIndex]); \
[all …]
Didentity-map.cc25 heap_->UnregisterStrongRoots(keys_); in Clear()
48 Object* not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol(); in ScanKeysFor()
61 Object* not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol(); in InsertKey()
83 Object* not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol(); in DeleteIndex()
123 if (index < 0 && gc_counter_ != heap_->gc_count()) { in Lookup()
136 if (gc_counter_ != heap_->gc_count()) Rehash(); in LookupOrInsert()
144 CHECK_NE(address, ReadOnlyRoots(heap_).not_mapped_symbol()); in Hash()
159 gc_counter_ = heap_->gc_count(); in GetEntry()
162 Object* not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol(); in GetEntry()
167 heap_->RegisterStrongRoots(keys_, keys_ + capacity_); in GetEntry()
[all …]
/external/tensorflow/tensorflow/contrib/nearest_neighbor/kernels/
Dhyperplane_lsh_probes.h108 heap_.Resize(2 * num_probes_); in SetupProbing()
110 heap_.Reset(); in SetupProbing()
118 heap_.InsertUnsorted(score, ProbeCandidate(ii, hash_mask, 0)); in SetupProbing()
120 heap_.Heapify(); in SetupProbing()
150 if (heap_.IsEmpty()) { in GetNextProbe()
156 heap_.ExtractMin(&cur_score, &cur_candidate); in GetNextProbe()
180 heap_.Insert(next_score, ProbeCandidate(*cur_table, next_mask, in GetNextProbe()
189 heap_.Insert(next_score, ProbeCandidate(*cur_table, next_mask, in GetNextProbe()
228 SimpleHeap<CoordinateType, ProbeCandidate> heap_; variable
/external/v8/src/profiler/
Dsampling-heap-profiler.h123 Heap* heap() const { return heap_; } in heap()
144 Heap* const heap_; variable
166 heap_(heap), in SamplingAllocationObserver()
173 USE(heap_); in Step()
174 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); in Step()
187 Heap* const heap_; variable
Dsampling-heap-profiler.cc55 heap_(heap), in SamplingHeapProfiler()
57 heap_, static_cast<intptr_t>(rate), rate, this, in SamplingHeapProfiler()
60 heap_, static_cast<intptr_t>(rate), rate, this, in SamplingHeapProfiler()
70 heap_->AddAllocationObserversToAllSpaces(other_spaces_observer_.get(), in SamplingHeapProfiler()
76 heap_->RemoveAllocationObserversFromAllSpaces(other_spaces_observer_.get(), in ~SamplingHeapProfiler()

123