/art/runtime/base/ |
D | arena_allocator_test.cc | 162 const size_t new_size = ArenaAllocator::kAlignment * 3; in TEST_F() local 163 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size); in TEST_F() 175 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2); in TEST_F() local 176 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size); in TEST_F() 188 const size_t new_size = ArenaAllocator::kAlignment * 4; in TEST_F() local 189 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size); in TEST_F() 201 const size_t new_size = ArenaAllocator::kAlignment * 3; in TEST_F() local 202 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size); in TEST_F() 218 const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2; in TEST_F() local 219 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size); in TEST_F() [all …]
|
D | arena_allocator.h | 333 size_t new_size, 335 DCHECK_GE(new_size, ptr_size); 344 const size_t aligned_new_size = RoundUp(new_size, kAlignment); 355 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
|
/art/runtime/ |
D | linear_alloc.cc | 26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { in Realloc() argument 28 return allocator_.Realloc(ptr, old_size, new_size); in Realloc()
|
D | indirect_reference_table.cc | 209 bool IndirectReferenceTable::Resize(size_t new_size, std::string* error_msg) { in Resize() argument 210 CHECK_GT(new_size, max_entries_); in Resize() 213 if (new_size > kMaxEntries) { in Resize() 214 *error_msg = android::base::StringPrintf("Requested size exceeds maximum: %zu", new_size); in Resize() 219 const size_t table_bytes = new_size * sizeof(IrtEntry); in Resize() 234 max_entries_ = new_size; in Resize()
|
D | linear_alloc.h | 35 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
|
D | mem_map.h | 178 void SetSize(size_t new_size);
|
D | mem_map.cc | 883 void MemMap::SetSize(size_t new_size) { in SetSize() argument 884 CHECK_LE(new_size, size_); in SetSize() 885 size_t new_base_size = RoundUp(new_size + static_cast<size_t>(PointerDiff(Begin(), BaseBegin())), in SetSize() 888 size_ = new_size; in SetSize() 899 size_ = new_size; in SetSize()
|
D | indirect_reference_table.h | 386 bool Resize(size_t new_size, std::string* error_msg);
|
/art/libartbase/base/ |
D | bit_vector.cc | 353 uint32_t new_size = BitsToWords(idx + 1); in EnsureSize() local 354 DCHECK_GT(new_size, storage_size_); in EnsureSize() 356 static_cast<uint32_t*>(allocator_->Alloc(new_size * kWordBytes)); in EnsureSize() 359 memset(&new_storage[storage_size_], 0, (new_size - storage_size_) * kWordBytes); in EnsureSize() 367 storage_size_ = new_size; in EnsureSize()
|
D | hash_set.h | 614 void Resize(size_t new_size) { in Resize() argument 615 if (new_size < kMinBuckets) { in Resize() 616 new_size = kMinBuckets; in Resize() 618 DCHECK_GE(new_size, Size()); in Resize() 623 AllocateStorage(new_size); in Resize()
|
/art/runtime/gc/space/ |
D | dlmalloc_space.cc | 253 void DlMallocSpace::SetFootprintLimit(size_t new_size) { in SetFootprintLimit() argument 255 VLOG(heap) << "DlMallocSpace::SetFootprintLimit " << PrettySize(new_size); in SetFootprintLimit() 259 if (new_size < current_space_size) { in SetFootprintLimit() 261 new_size = current_space_size; in SetFootprintLimit() 263 mspace_set_footprint_limit(mspace_, new_size); in SetFootprintLimit()
|
D | rosalloc_space.cc | 278 void RosAllocSpace::SetFootprintLimit(size_t new_size) { in SetFootprintLimit() argument 280 VLOG(heap) << "RosAllocSpace::SetFootprintLimit " << PrettySize(new_size); in SetFootprintLimit() 284 if (new_size < current_space_size) { in SetFootprintLimit() 286 new_size = current_space_size; in SetFootprintLimit() 288 rosalloc_->SetFootprintLimit(new_size); in SetFootprintLimit()
|
/art/compiler/utils/ |
D | assembler.h | 117 void Resize(size_t new_size) { in Resize() argument 118 if (new_size > Capacity()) { in Resize() 119 ExtendCapacity(new_size); in Resize() 121 cursor_ = contents_ + new_size; in Resize()
|
/art/runtime/gc/accounting/ |
D | space_bitmap.cc | 99 size_t new_size = OffsetToIndex(new_end - heap_begin_) * sizeof(intptr_t); in SetHeapLimit() local 100 if (new_size < bitmap_size_) { in SetHeapLimit() 101 bitmap_size_ = new_size; in SetHeapLimit()
|
/art/runtime/gc/collector/ |
D | mark_compact.cc | 253 void MarkCompact::ResizeMarkStack(size_t new_size) { in ResizeMarkStack() argument 255 CHECK_LE(mark_stack_->Size(), new_size); in ResizeMarkStack() 256 mark_stack_->Resize(new_size); in ResizeMarkStack()
|
D | semi_space.h | 181 void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_);
|
D | mark_compact.h | 141 void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_);
|
D | semi_space.cc | 446 void SemiSpace::ResizeMarkStack(size_t new_size) { in ResizeMarkStack() argument 448 CHECK_LE(mark_stack_->Size(), new_size); in ResizeMarkStack() 449 mark_stack_->Resize(new_size); in ResizeMarkStack()
|
D | mark_sweep.h | 262 void ResizeMarkStack(size_t new_size)
|
D | mark_sweep.cc | 385 void MarkSweep::ResizeMarkStack(size_t new_size) { in ResizeMarkStack() argument 392 CHECK_LE(mark_stack_->Size(), new_size); in ResizeMarkStack() 393 mark_stack_->Resize(new_size); in ResizeMarkStack()
|
D | concurrent_copying.cc | 1061 const size_t new_size = gc_mark_stack_->Capacity() * 2; in ExpandGcMarkStack() local 1064 gc_mark_stack_->Resize(new_size); in ExpandGcMarkStack()
|
/art/compiler/optimizing/ |
D | bounds_check_elimination.cc | 1962 size_t new_size = graph_->GetReversePostOrder().size(); in Run() local 1963 DCHECK_GE(new_size, size); in Run() 1964 i += new_size - size; in Run() 1966 size = new_size; in Run()
|
D | nodes.h | 7606 size_t new_size = old_size + number_of_new_blocks; in FOR_EACH_CONCRETE_INSTRUCTION() local 7607 blocks->resize(new_size); in FOR_EACH_CONCRETE_INSTRUCTION()
|
/art/compiler/utils/mips64/ |
D | assembler_mips64.cc | 2739 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 2740 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 2741 return new_size - old_size; in PromoteIfNeeded() 2751 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 2752 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 2753 return new_size - old_size; in PromoteIfNeeded()
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 3423 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 3424 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 3425 return new_size - old_size; in PromoteIfNeeded() 3435 uint32_t new_size = GetSize(); in PromoteIfNeeded() local 3436 CHECK_GT(new_size, old_size); in PromoteIfNeeded() 3437 return new_size - old_size; in PromoteIfNeeded()
|