/art/runtime/base/ |
D | arena_allocator_test.cc | 160 const size_t new_size = ArenaAllocator::kAlignment * 3; in TEST_F() local 161 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size); in TEST_F() 173 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2); in TEST_F() local 174 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size); in TEST_F() 186 const size_t new_size = ArenaAllocator::kAlignment * 4; in TEST_F() local 187 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size); in TEST_F() 199 const size_t new_size = ArenaAllocator::kAlignment * 3; in TEST_F() local 200 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size); in TEST_F() 216 const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2; in TEST_F() local 217 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size); in TEST_F() [all …]
|
D | bit_vector.cc | 353 uint32_t new_size = BitsToWords(idx + 1); in EnsureSize() local 354 DCHECK_GT(new_size, storage_size_); in EnsureSize() 356 static_cast<uint32_t*>(allocator_->Alloc(new_size * kWordBytes)); in EnsureSize() 359 memset(&new_storage[storage_size_], 0, (new_size - storage_size_) * kWordBytes); in EnsureSize() 367 storage_size_ = new_size; in EnsureSize()
|
D | arena_allocator.h | 330 size_t new_size, 332 DCHECK_GE(new_size, ptr_size); 341 const size_t aligned_new_size = RoundUp(new_size, kAlignment); 352 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
|
D | hash_set.h | 608 void Resize(size_t new_size) { in Resize() argument 609 if (new_size < kMinBuckets) { in Resize() 610 new_size = kMinBuckets; in Resize() 612 DCHECK_GE(new_size, Size()); in Resize() 617 AllocateStorage(new_size); in Resize()
|
/art/runtime/ |
D | linear_alloc.cc | 26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { in Realloc() argument 28 return allocator_.Realloc(ptr, old_size, new_size); in Realloc()
|
D | indirect_reference_table.cc | 209 bool IndirectReferenceTable::Resize(size_t new_size, std::string* error_msg) { in Resize() argument 210 CHECK_GT(new_size, max_entries_); in Resize() 213 if (new_size > kMaxEntries) { in Resize() 214 *error_msg = android::base::StringPrintf("Requested size exceeds maximum: %zu", new_size); in Resize() 219 const size_t table_bytes = new_size * sizeof(IrtEntry); in Resize() 234 max_entries_ = new_size; in Resize()
|
D | mem_map.cc | 792 void MemMap::SetSize(size_t new_size) { in SetSize() argument 793 if (new_size == base_size_) { in SetSize() 796 CHECK_ALIGNED(new_size, kPageSize); in SetSize() 798 CHECK_LE(new_size, base_size_); in SetSize() 801 new_size), in SetSize() 802 base_size_ - new_size); in SetSize() 803 CHECK_EQ(munmap(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(BaseBegin()) + new_size), in SetSize() 804 base_size_ - new_size), 0) << new_size << " " << base_size_; in SetSize() 805 base_size_ = new_size; in SetSize() 806 size_ = new_size; in SetSize()
|
D | linear_alloc.h | 35 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
|
D | mem_map.h | 148 void SetSize(size_t new_size);
|
D | indirect_reference_table.h | 378 bool Resize(size_t new_size, std::string* error_msg);
|
/art/runtime/gc/space/ |
D | dlmalloc_space.cc | 252 void DlMallocSpace::SetFootprintLimit(size_t new_size) { in SetFootprintLimit() argument 254 VLOG(heap) << "DlMallocSpace::SetFootprintLimit " << PrettySize(new_size); in SetFootprintLimit() 258 if (new_size < current_space_size) { in SetFootprintLimit() 260 new_size = current_space_size; in SetFootprintLimit() 262 mspace_set_footprint_limit(mspace_, new_size); in SetFootprintLimit()
|
D | rosalloc_space.cc | 277 void RosAllocSpace::SetFootprintLimit(size_t new_size) { in SetFootprintLimit() argument 279 VLOG(heap) << "RosAllocSpace::SetFootprintLimit " << PrettySize(new_size); in SetFootprintLimit() 283 if (new_size < current_space_size) { in SetFootprintLimit() 285 new_size = current_space_size; in SetFootprintLimit() 287 rosalloc_->SetFootprintLimit(new_size); in SetFootprintLimit()
|
/art/compiler/utils/ |
D | assembler.h | 116 void Resize(size_t new_size) { in Resize() argument 117 if (new_size > Capacity()) { in Resize() 118 ExtendCapacity(new_size); in Resize() 120 cursor_ = contents_ + new_size; in Resize()
|
/art/runtime/gc/accounting/ |
D | space_bitmap.cc | 93 size_t new_size = OffsetToIndex(new_end - heap_begin_) * sizeof(intptr_t); in SetHeapLimit() local 94 if (new_size < bitmap_size_) { in SetHeapLimit() 95 bitmap_size_ = new_size; in SetHeapLimit()
|
/art/runtime/gc/collector/ |
D | mark_compact.cc | 251 void MarkCompact::ResizeMarkStack(size_t new_size) { in ResizeMarkStack() argument 253 CHECK_LE(mark_stack_->Size(), new_size); in ResizeMarkStack() 254 mark_stack_->Resize(new_size); in ResizeMarkStack()
|
D | semi_space.cc | 447 void SemiSpace::ResizeMarkStack(size_t new_size) { in ResizeMarkStack() argument 449 CHECK_LE(mark_stack_->Size(), new_size); in ResizeMarkStack() 450 mark_stack_->Resize(new_size); in ResizeMarkStack()
|
D | mark_compact.h | 141 void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_);
|
D | semi_space.h | 181 void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_);
|
D | mark_sweep.h | 262 void ResizeMarkStack(size_t new_size)
|
D | mark_sweep.cc | 362 void MarkSweep::ResizeMarkStack(size_t new_size) { in ResizeMarkStack() argument 369 CHECK_LE(mark_stack_->Size(), new_size); in ResizeMarkStack() 370 mark_stack_->Resize(new_size); in ResizeMarkStack()
|
D | concurrent_copying.cc | 1058 const size_t new_size = gc_mark_stack_->Capacity() * 2; in ExpandGcMarkStack() local 1061 gc_mark_stack_->Resize(new_size); in ExpandGcMarkStack()
|
/art/compiler/optimizing/ |
D | bounds_check_elimination.cc | 1872 size_t new_size = graph_->GetReversePostOrder().size(); in Run() local 1873 DCHECK_GE(new_size, size); in Run() 1874 i += new_size - size; in Run() 1876 size = new_size; in Run()
|
D | nodes.h | 7040 size_t new_size = old_size + number_of_new_blocks; in FOR_EACH_CONCRETE_INSTRUCTION() local 7041 blocks->resize(new_size); in FOR_EACH_CONCRETE_INSTRUCTION()
|
/art/compiler/utils/mips64/ |
D | assembler_mips64.cc | 2299 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 2300 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 2301 return new_size - old_size; in PromoteIfNeeded() 2311 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 2312 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 2313 return new_size - old_size; in PromoteIfNeeded()
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 3470 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 3471 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 3472 return new_size - old_size; in PromoteIfNeeded() 3482 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 3483 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 3484 return new_size - old_size; in PromoteIfNeeded()
|