/art/compiler/utils/ |
D | assembler.cc | 42 CHECK_EQ(Capacity(), kInitialBufferCapacity); in AssemblerBuffer() 49 arena_->MakeInaccessible(contents_, Capacity()); in ~AssemblerBuffer() 77 size_t old_capacity = Capacity(); in ExtendCapacity() 91 CHECK_EQ(Capacity(), new_capacity); in ExtendCapacity()
|
D | assembler.h | 117 if (new_size > Capacity()) { in Resize() 213 int ComputeGap() { return buffer_->Capacity() - buffer_->Size(); } in ComputeGap() 240 size_t Capacity() const { in Capacity() function
|
/art/runtime/ |
D | indirect_reference_table_test.cc | 105 EXPECT_EQ(0U, irt.Capacity()); in TEST_F() 127 ASSERT_EQ(0U, irt.Capacity()); in TEST_F() 139 ASSERT_EQ(3U, irt.Capacity()); in TEST_F() 155 ASSERT_EQ(0U, irt.Capacity()); in TEST_F() 176 ASSERT_EQ(4U, irt.Capacity()) << "hole not filled"; in TEST_F() 184 ASSERT_EQ(3U, irt.Capacity()) << "should be 3 after two deletions"; in TEST_F() 191 ASSERT_EQ(0U, irt.Capacity()) << "not empty after split remove"; in TEST_F() 207 ASSERT_EQ(0U, irt.Capacity()) << "switching del not empty"; in TEST_F() 225 ASSERT_EQ(0U, irt.Capacity()) << "temporal del not empty"; in TEST_F() 251 ASSERT_EQ(kTableInitial + 1, irt.Capacity()); in TEST_F() [all …]
|
D | jni_env_ext-inl.h | 33 size_t entry_count = locals.Capacity(); in AddLocalReference()
|
D | indirect_reference_table.h | 283 size_t Capacity() const { in Capacity() function 296 return IrtIterator(table_, 0, Capacity()); in begin() 300 return IrtIterator(table_, Capacity(), Capacity()); in end()
|
D | indirect_reference_table.cc | 318 for (size_t i = 0; i < Capacity(); ++i) { in AssertEmpty() 438 const size_t top_index = Capacity(); in Trim() 457 for (size_t i = 0; i < Capacity(); ++i) { in Dump()
|
D | java_vm_ext.cc | 657 os << "; globals=" << globals_.Capacity(); in DumpForSigQuit() 661 if (weak_globals_.Capacity() > 0) { in DumpForSigQuit() 662 os << " (plus " << weak_globals_.Capacity() << " weak)"; in DumpForSigQuit()
|
D | jni_internal_test.cc | 870 return reinterpret_cast<JNIEnvExt*>(env)->locals.Capacity(); in GetLocalsCapacity()
|
/art/runtime/gc/space/ |
D | malloc_space.cc | 143 CHECK_LE(new_end, Begin() + Capacity()); in MoreCore() 186 << "Capacity " << Capacity(); in CreateZygoteSpace() 230 << ",size=" << PrettySize(Size()) << ",capacity=" << PrettySize(Capacity()) in Dump() 256 size_t new_capacity = Capacity(); in ClampGrowthLimit()
|
D | space.h | 302 virtual size_t Capacity() const { in Capacity() function 376 return Capacity(); in NonGrowthLimitCapacity()
|
D | malloc_space.h | 109 size_t Capacity() const { in Capacity() function
|
D | bump_pointer_space.h | 88 size_t Capacity() const { in Capacity() function
|
D | space_test.h | 331 EXPECT_EQ(space->Capacity(), growth_limit); in SizeFootPrintGrowthLimitAndTrimDriver() 343 EXPECT_EQ(space->Capacity(), capacity); in SizeFootPrintGrowthLimitAndTrimDriver()
|
D | dlmalloc_space.cc | 136 size_t max_allowed = Capacity(); in AllocWithGrowth()
|
D | rosalloc_space.cc | 162 size_t max_allowed = Capacity(); in AllocWithGrowth()
|
D | region_space.cc | 101 accounting::ContinuousSpaceBitmap::Create("region space live bitmap", Begin(), Capacity())); in RegionSpace()
|
/art/runtime/gc/accounting/ |
D | atomic_stack.h | 183 size_t Capacity() const { in Capacity() function
|
/art/runtime/gc/collector/ |
D | mark_compact.cc | 261 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) { in MarkStackPush() 262 ResizeMarkStack(mark_stack_->Capacity() * 2); in MarkStackPush()
|
D | mark_sweep.cc | 359 ResizeMarkStack(mark_stack_->Capacity() * 2); in ExpandMarkStack() 364 if (UNLIKELY(mark_stack_->Size() < mark_stack_->Capacity())) { in ResizeMarkStack() 385 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) { in MarkObjectNonNullParallel() 506 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) { in PushOnMarkStack()
|
D | semi_space.cc | 457 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) { in MarkStackPush() 458 ResizeMarkStack(mark_stack_->Capacity() * 2); in MarkStackPush()
|
D | concurrent_copying.cc | 1058 const size_t new_size = gc_mark_stack_->Capacity() * 2; in ExpandGcMarkStack()
|
/art/runtime/gc/ |
D | heap.cc | 427 non_moving_space_->SetFootprintLimit(non_moving_space_->Capacity()); in Heap() 684 malloc_space->SetFootprintLimit(malloc_space->Capacity()); in CreateMallocSpaceFromMemMap() 2324 madvise(main_space_->Begin(), main_space_->Capacity(), MADV_DONTNEED); in PreZygoteFork() 2381 non_moving_space_->SetFootprintLimit(non_moving_space_->Capacity()); in PreZygoteFork() 3536 malloc_space->SetFootprintLimit(malloc_space->Capacity()); in ClearGrowthLimit() 3542 main_space_backup_->SetFootprintLimit(main_space_backup_->Capacity()); in ClearGrowthLimit()
|
/art/compiler/utils/mips64/ |
D | assembler_mips64_test.cc | 712 ASSERT_LT(__ GetBuffer()->Capacity(), kRequiredCapacity); in TEST_F()
|
D | assembler_mips64.cc | 2525 if (required_capacity > buffer_.Capacity()) { in ReserveJumpTableSpace()
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 3840 if (required_capacity > buffer_.Capacity()) { in ReserveJumpTableSpace()
|