/external/skia/tests/ |
D | TDPQueueTest.cpp | 15 SkTDPQueue<int, intless> heap; in simple_test() local 16 REPORTER_ASSERT(reporter, 0 == heap.count()); in simple_test() 18 heap.insert(0); in simple_test() 19 REPORTER_ASSERT(reporter, 1 == heap.count()); in simple_test() 20 REPORTER_ASSERT(reporter, 0 == heap.peek()); in simple_test() 21 heap.pop(); in simple_test() 22 REPORTER_ASSERT(reporter, 0 == heap.count()); in simple_test() 24 heap.insert(0); in simple_test() 25 heap.insert(1); in simple_test() 26 REPORTER_ASSERT(reporter, 2 == heap.count()); in simple_test() [all …]
|
D | BitmapHeapTest.cpp | 58 SkBitmapHeap heap(1, 1); in DEF_TEST() local 60 controller.setBitmapStorage(&heap); in DEF_TEST() 64 REPORTER_ASSERT(reporter, heap.count() == 0); in DEF_TEST() 67 heap.deferAddingOwners(); in DEF_TEST() 69 heap.endAddingOwnersDeferral(true); in DEF_TEST() 73 REPORTER_ASSERT(reporter, heap.count() == 1); in DEF_TEST() 77 SkBitmapHeapEntry* entry = heap.getEntry(0); in DEF_TEST() 84 heap.freeMemoryIfPossible(~0U); in DEF_TEST() 85 REPORTER_ASSERT(reporter, heap.count() == 0); in DEF_TEST() 88 heap.deferAddingOwners(); in DEF_TEST() [all …]
|
/external/v8/src/heap/ |
D | incremental-marking-job.cc | 18 void IncrementalMarkingJob::Start(Heap* heap) { in Start() argument 19 DCHECK(!heap->incremental_marking()->IsStopped()); in Start() 25 ScheduleIdleTask(heap); in Start() 26 ScheduleDelayedTask(heap); in Start() 43 void IncrementalMarkingJob::ScheduleIdleTask(Heap* heap) { in ScheduleIdleTask() argument 45 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate()); in ScheduleIdleTask() 48 auto task = new IdleTask(heap->isolate(), this); in ScheduleIdleTask() 55 void IncrementalMarkingJob::ScheduleDelayedTask(Heap* heap) { in ScheduleDelayedTask() argument 57 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate()); in ScheduleDelayedTask() 60 auto task = new DelayedTask(heap->isolate(), this); in ScheduleDelayedTask() [all …]
|
D | objects-visiting-inl.h | 105 Heap* heap = map->GetHeap(); in VisitJSArrayBuffer() local 106 heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object)); in VisitJSArrayBuffer() 210 Heap* heap, HeapObject* object, Address entry_address) { in VisitCodeEntry() argument 212 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address, in VisitCodeEntry() 214 StaticVisitor::MarkObject(heap, code); in VisitCodeEntry() 220 Heap* heap, RelocInfo* rinfo) { in VisitEmbeddedPointer() argument 223 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); in VisitEmbeddedPointer() 228 StaticVisitor::MarkObject(heap, object); in VisitEmbeddedPointer() 234 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap, in VisitCell() argument 238 heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell); in VisitCell() [all …]
|
D | scavenge-job.cc | 20 Heap* heap = isolate()->heap(); in RunInternal() local 24 double start_ms = heap->MonotonicallyIncreasingTimeInMs(); in RunInternal() 27 static_cast<size_t>(heap->tracer()->ScavengeSpeedInBytesPerMillisecond()); in RunInternal() 28 size_t new_space_size = heap->new_space()->Size(); in RunInternal() 29 size_t new_space_capacity = heap->new_space()->Capacity(); in RunInternal() 37 heap->CollectGarbage(NEW_SPACE, "idle task: scavenge"); in RunInternal() 40 job_->RescheduleIdleTask(heap); in RunInternal() 84 void ScavengeJob::RescheduleIdleTask(Heap* heap) { in RescheduleIdleTask() argument 88 ScheduleIdleTask(heap); in RescheduleIdleTask() 94 void ScavengeJob::ScheduleIdleTaskIfNeeded(Heap* heap, int bytes_allocated) { in ScheduleIdleTaskIfNeeded() argument [all …]
|
D | memory-reducer.cc | 22 : CancelableTask(memory_reducer->heap()->isolate()), in TimerTask() 28 Heap* heap = memory_reducer_->heap(); in RunInternal() local 30 double time_ms = heap->MonotonicallyIncreasingTimeInMs(); in RunInternal() 31 heap->tracer()->SampleAllocation(time_ms, heap->NewSpaceAllocationCounter(), in RunInternal() 32 heap->OldGenerationAllocationCounter()); in RunInternal() 34 bool low_allocation_rate = heap->HasLowAllocationRate(); in RunInternal() 36 bool optimize_for_memory = heap->ShouldOptimizeForMemoryUsage(); in RunInternal() 38 PrintIsolate(heap->isolate(), "Memory reducer: call rate %.3lf, %s, %s\n", in RunInternal() 49 heap->incremental_marking()->IsStopped() && in RunInternal() 50 heap->incremental_marking()->CanBeActivated(); in RunInternal() [all …]
|
D | objects-visiting.cc | 174 static bool MustRecordSlots(Heap* heap) { in MustRecordSlots() argument 175 return heap->gc_state() == Heap::MARK_COMPACT && in MustRecordSlots() 176 heap->mark_compact_collector()->is_compacting(); in MustRecordSlots() 185 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) { in VisitWeakList() argument 186 Object* undefined = heap->undefined_value(); in VisitWeakList() 189 MarkCompactCollector* collector = heap->mark_compact_collector(); in VisitWeakList() 190 bool record_slots = MustRecordSlots(heap); in VisitWeakList() 217 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer); in VisitWeakList() 220 WeakListVisitor<T>::VisitPhantomObject(heap, candidate); in VisitWeakList() 234 static void ClearWeakList(Heap* heap, Object* list) { in ClearWeakList() argument [all …]
|
D | scavenger.cc | 92 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { in RecordCopiedObject() argument 99 if (heap->new_space()->Contains(obj)) { in RecordCopiedObject() 100 heap->new_space()->RecordAllocation(obj); in RecordCopiedObject() 102 heap->new_space()->RecordPromotion(obj); in RecordCopiedObject() 110 INLINE(static void MigrateObject(Heap* heap, HeapObject* source, in INLINE() argument 115 DCHECK(!heap->InToSpace(target) || in INLINE() 116 target->address() + size == heap->new_space()->top() || in INLINE() 117 target->address() + size + kPointerSize == heap->new_space()->top()); in INLINE() 121 DCHECK(!heap->InToSpace(target) || in INLINE() 122 heap->promotion_queue()->IsBelowPromotionQueue( in INLINE() [all …]
|
D | mark-compact.cc | 49 MarkCompactCollector::MarkCompactCollector(Heap* heap) in MarkCompactCollector() argument 59 heap_(heap), in MarkCompactCollector() 74 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} in VerifyMarkingVisitor() argument 106 static void VerifyMarking(Heap* heap, Address bottom, Address top) { in VerifyMarking() argument 107 VerifyMarkingVisitor visitor(heap); in VerifyMarking() 136 VerifyMarking(space->heap(), page->area_start(), limit); in VerifyMarking() 146 VerifyMarking(space->heap(), p->area_start(), p->area_end()); in VerifyMarking() 151 static void VerifyMarking(Heap* heap) { in VerifyMarking() argument 152 VerifyMarking(heap->old_space()); in VerifyMarking() 153 VerifyMarking(heap->code_space()); in VerifyMarking() [all …]
|
/external/mesa3d/src/mesa/main/ |
D | mm.c | 34 mmDumpMemInfo(const struct mem_block *heap) in mmDumpMemInfo() argument 36 fprintf(stderr, "Memory heap %p:\n", (void *)heap); in mmDumpMemInfo() 37 if (heap == 0) { in mmDumpMemInfo() 42 for(p = heap->next; p != heap; p = p->next) { in mmDumpMemInfo() 50 for(p = heap->next_free; p != heap; p = p->next_free) { in mmDumpMemInfo() 63 struct mem_block *heap, *block; in mmInit() local 68 heap = (struct mem_block *) calloc(1, sizeof(struct mem_block)); in mmInit() 69 if (!heap) in mmInit() 74 free(heap); in mmInit() 78 heap->next = block; in mmInit() [all …]
|
/external/mesa3d/src/gallium/auxiliary/util/ |
D | u_mm.c | 34 u_mmDumpMemInfo(const struct mem_block *heap) in u_mmDumpMemInfo() argument 36 debug_printf("Memory heap %p:\n", (void *) heap); in u_mmDumpMemInfo() 37 if (heap == 0) { in u_mmDumpMemInfo() 44 for (p = heap->next; p != heap; p = p->next) { in u_mmDumpMemInfo() 58 for (p = heap->next_free; p != heap; p = p->next_free) { in u_mmDumpMemInfo() 72 struct mem_block *heap, *block; in u_mmInit() local 77 heap = CALLOC_STRUCT(mem_block); in u_mmInit() 78 if (!heap) in u_mmInit() 83 FREE(heap); in u_mmInit() 87 heap->next = block; in u_mmInit() [all …]
|
/external/libdrm/intel/ |
D | mm.c | 36 drm_private void mmDumpMemInfo(const struct mem_block *heap) in mmDumpMemInfo() argument 38 drmMsg("Memory heap %p:\n", (void *)heap); in mmDumpMemInfo() 39 if (heap == 0) { in mmDumpMemInfo() 44 for (p = heap->next; p != heap; p = p->next) { in mmDumpMemInfo() 52 for (p = heap->next_free; p != heap; p = p->next_free) { in mmDumpMemInfo() 64 struct mem_block *heap, *block; in mmInit() local 69 heap = (struct mem_block *)calloc(1, sizeof(struct mem_block)); in mmInit() 70 if (!heap) in mmInit() 75 free(heap); in mmInit() 79 heap->next = block; in mmInit() [all …]
|
/external/deqp/framework/delibs/depool/ |
D | dePoolHeap.h | 67 DE_INLINE int TYPENAME##_getNumElements (const TYPENAME* heap) DE_UNUSED_FUNCTION; \ 68 DE_INLINE deBool TYPENAME##_reserve (TYPENAME* heap, int capacity) DE_UNUSED_FUNCTION; \ 69 DE_INLINE void TYPENAME##_reset (TYPENAME* heap) DE_UNUSED_FUNCTION; \ 70 DE_INLINE void TYPENAME##_moveDown (TYPENAME* heap, int ndx) DE_UNUSED_FUNCTION; \ 71 DE_INLINE void TYPENAME##_moveUp (TYPENAME* heap, int ndx) DE_UNUSED_FUNCTION; \ 72 DE_INLINE deBool TYPENAME##_push (TYPENAME* heap, VALUETYPE elem) DE_UNUSED_FUNCTION; \ 73 DE_INLINE VALUETYPE TYPENAME##_popMin (TYPENAME* heap) DE_UNUSED_FUNCTION; \ 77 TYPENAME* heap = DE_POOL_NEW(pool, TYPENAME); \ 78 if (!heap) \ 80 heap->array = TYPENAME##Array_create(pool); \ [all …]
|
D | dePoolHeap.c | 62 TestHeap* heap = TestHeap_create(pool); in dePoolHeap_selfTest() local 65 TestHeap_push(heap, HeapItem_create(10, 10)); in dePoolHeap_selfTest() 66 TestHeap_push(heap, HeapItem_create(0, 10)); in dePoolHeap_selfTest() 67 TestHeap_push(heap, HeapItem_create(20, 10)); in dePoolHeap_selfTest() 68 DE_TEST_ASSERT(TestHeap_getNumElements(heap) == 3); in dePoolHeap_selfTest() 70 DE_TEST_ASSERT(TestHeap_popMin(heap).priority == 0); in dePoolHeap_selfTest() 71 DE_TEST_ASSERT(TestHeap_popMin(heap).priority == 10); in dePoolHeap_selfTest() 72 DE_TEST_ASSERT(TestHeap_popMin(heap).priority == 20); in dePoolHeap_selfTest() 73 DE_TEST_ASSERT(TestHeap_getNumElements(heap) == 0); in dePoolHeap_selfTest() 80 TestHeap_push(heap, HeapItem_create(i, -i)); in dePoolHeap_selfTest() [all …]
|
/external/mesa3d/src/gallium/drivers/nouveau/ |
D | nouveau_heap.c | 29 nouveau_heap_init(struct nouveau_heap **heap, in nouveau_heap_init() argument 40 *heap = r; in nouveau_heap_init() 45 nouveau_heap_destroy(struct nouveau_heap **heap) in nouveau_heap_destroy() argument 47 if (!*heap) in nouveau_heap_destroy() 49 free(*heap); in nouveau_heap_destroy() 50 *heap = NULL; in nouveau_heap_destroy() 54 nouveau_heap_alloc(struct nouveau_heap *heap, unsigned size, void *priv, in nouveau_heap_alloc() argument 59 if (!heap || !size || !res || *res) in nouveau_heap_alloc() 62 while (heap) { in nouveau_heap_alloc() 63 if (!heap->in_use && heap->size >= size) { in nouveau_heap_alloc() [all …]
|
/external/chromium-trace/catapult/telemetry/third_party/altgraph/altgraph/ |
D | GraphAlgo.py | 96 heap = self.__heap 97 while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]: 98 lastItem = heap.pop() 102 if smallChild+1 < len(heap) and heap[smallChild] > heap[smallChild+1] : 104 if smallChild >= len(heap) or lastItem <= heap[smallChild]: 105 heap[insertionPoint] = lastItem 107 heap[insertionPoint] = heap[smallChild] 109 return heap[0][1] 128 heap = self.__heap 129 if len(heap) > 2 * len(self): [all …]
|
/external/v8/test/cctest/heap/ |
D | test-compaction.cc | 33 Heap* heap = isolate->heap(); in HEAP_TEST() local 36 PageIterator it(heap->old_space()); in HEAP_TEST() 43 CHECK(heap->old_space()->Expand()); in HEAP_TEST() 45 CreatePadding(heap, Page::kAllocatableMemory, TENURED); in HEAP_TEST() 51 heap->set_force_oom(true); in HEAP_TEST() 52 heap->CollectAllGarbage(); in HEAP_TEST() 78 Heap* heap = isolate->heap(); in HEAP_TEST() local 81 PageIterator it(heap->old_space()); in HEAP_TEST() 90 CHECK(heap->old_space()->Expand()); in HEAP_TEST() 92 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); in HEAP_TEST() [all …]
|
D | test-lab.cc | 16 static Address AllocateLabBackingStore(Heap* heap, intptr_t size_in_bytes) { in AllocateLabBackingStore() argument 17 AllocationResult result = heap->old_space()->AllocateRaw( in AllocateLabBackingStore() 42 static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab, in AllocateFromLab() argument 49 heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes)); in AllocateFromLab() 64 Heap* heap = CcTest::heap(); in TEST() local 65 heap->root(Heap::kOnePointerFillerMapRootIndex); in TEST() 67 Address base = AllocateLabBackingStore(heap, kLabSize); in TEST() 75 LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); in TEST() 84 Heap* heap = CcTest::heap(); in TEST() local 86 Address base = AllocateLabBackingStore(heap, kLabSize); in TEST() [all …]
|
D | test-heap.cc | 55 CHECK(CcTest::heap()->Contains(map)); in CheckMap() 57 CHECK_EQ(CcTest::heap()->meta_map(), map->map()); in CheckMap() 65 Heap* heap = CcTest::heap(); in TEST() local 66 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize); in TEST() 67 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize); in TEST() 69 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize); in TEST() 72 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel); in TEST() 73 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel); in TEST() 146 Heap* heap = isolate->heap(); in TEST() local 208 CheckOddball(isolate, heap->true_value(), "true"); in TEST() [all …]
|
D | utils-inl.h | 24 Heap* heap, int padding_size, PretenureFlag tenure, 27 Isolate* isolate = heap->isolate(); 32 heap->old_space()->EmptyAllocationInfo(); 33 int overall_free_memory = static_cast<int>(heap->old_space()->Available()); 36 heap->new_space()->DisableInlineAllocationSteps(); 38 static_cast<int>(*heap->new_space()->allocation_limit_address() - 39 *heap->new_space()->allocation_top_address()); 51 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), 57 CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) || 58 (tenure == TENURED && heap->InOldSpace(*handles.back()))); [all …]
|
D | test-alloc.cc | 40 Heap* heap = CcTest::heap(); in AllocateAfterFailures() local 43 heap->AllocateByteArray(100).ToObjectChecked(); in AllocateAfterFailures() 44 heap->AllocateFixedArray(100, NOT_TENURED).ToObjectChecked(); in AllocateAfterFailures() 48 heap->AllocateFixedArray(100).ToObjectChecked(); in AllocateAfterFailures() 49 heap->AllocateHeapNumber(0.42).ToObjectChecked(); in AllocateAfterFailures() 50 Object* object = heap->AllocateJSObject( in AllocateAfterFailures() 52 heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked(); in AllocateAfterFailures() 55 SimulateFullSpace(heap->old_space()); in AllocateAfterFailures() 56 heap->AllocateByteArray(100, TENURED).ToObjectChecked(); in AllocateAfterFailures() 59 SimulateFullSpace(heap->old_space()); in AllocateAfterFailures() [all …]
|
D | test-mark-compact.cc | 82 Heap* heap = CcTest::heap(); in HEAP_TEST() local 83 heap->ConfigureHeap(1, 1, 1, 0); in HEAP_TEST() 91 Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked(); in HEAP_TEST() 95 CHECK(heap->InSpace(*array, NEW_SPACE)); in HEAP_TEST() 98 heap->CollectAllGarbage(); in HEAP_TEST() 99 heap->CollectAllGarbage(); in HEAP_TEST() 102 CHECK(heap->InSpace(*array, OLD_SPACE)); in HEAP_TEST() 108 Heap* heap = CcTest::heap(); in HEAP_TEST() local 109 heap->ConfigureHeap(1, 1, 1, 0); in HEAP_TEST() 117 Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked(); in HEAP_TEST() [all …]
|
/external/v8/src/snapshot/ |
D | natives-common.cc | 15 FixedArray* NativesCollection<CORE>::GetSourceCache(Heap* heap) { in GetSourceCache() argument 16 return heap->natives_source_cache(); in GetSourceCache() 21 FixedArray* NativesCollection<EXPERIMENTAL>::GetSourceCache(Heap* heap) { in GetSourceCache() argument 22 return heap->experimental_natives_source_cache(); in GetSourceCache() 27 FixedArray* NativesCollection<EXTRAS>::GetSourceCache(Heap* heap) { in GetSourceCache() argument 28 return heap->extra_natives_source_cache(); in GetSourceCache() 33 FixedArray* NativesCollection<EXPERIMENTAL_EXTRAS>::GetSourceCache(Heap* heap) { in GetSourceCache() argument 34 return heap->experimental_extra_natives_source_cache(); in GetSourceCache() 39 void NativesCollection<type>::UpdateSourceCache(Heap* heap) { in UpdateSourceCache() argument 41 Object* source = GetSourceCache(heap)->get(i); in UpdateSourceCache() [all …]
|
/external/bzip2/ |
D | huffman.c | 36 zz = z; tmp = heap[zz]; \ 37 while (weight[tmp] < weight[heap[zz >> 1]]) { \ 38 heap[zz] = heap[zz >> 1]; \ 41 heap[zz] = tmp; \ 47 zz = z; tmp = heap[zz]; \ 52 weight[heap[yy+1]] < weight[heap[yy]]) \ 54 if (weight[tmp] < weight[heap[yy]]) break; \ 55 heap[zz] = heap[yy]; \ 58 heap[zz] = tmp; \ 75 Int32 heap [ BZ_MAX_ALPHA_SIZE + 2 ]; in BZ2_hbMakeCodeLengths() local [all …]
|
/external/v8/test/cctest/ |
D | test-weakmaps.cc | 71 Heap* heap = isolate->heap(); in TEST() local 100 heap->CollectAllGarbage(false); in TEST() 119 heap->CollectAllGarbage(false); in TEST() 124 heap->CollectAllGarbage(false); in TEST() 136 Heap* heap = isolate->heap(); in TEST() local 162 heap->CollectAllGarbage(false); in TEST() 180 Heap* heap = isolate->heap(); in TEST() local 188 Page* first_page = heap->old_space()->anchor()->next_page(); in TEST() 189 SimulateFullSpace(heap->old_space()); in TEST() 196 CHECK(!heap->InNewSpace(object->address())); in TEST() [all …]
|