/art/runtime/gc/space/ |
D | space_create_test.cc | 25 namespace space { namespace 65 std::unique_ptr<Space> space(CreateSpace("test", 16 * MB, 32 * MB, 32 * MB, nullptr)); in TEST_P() local 66 EXPECT_TRUE(space != nullptr); in TEST_P() 68 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 16 * MB, nullptr)); in TEST_P() 69 EXPECT_TRUE(space != nullptr); in TEST_P() 71 space.reset(CreateSpace("test", 32 * MB, 16 * MB, 16 * MB, nullptr)); in TEST_P() 72 EXPECT_TRUE(space == nullptr); in TEST_P() 74 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 32 * MB, nullptr)); in TEST_P() 75 EXPECT_TRUE(space != nullptr); in TEST_P() 77 space.reset(CreateSpace("test", 16 * MB, 8 * MB, 32 * MB, nullptr)); in TEST_P() [all …]
|
D | space_test.h | 35 namespace space { 42 void AddSpace(ContinuousSpace* space, bool revoke = true) { 50 heap->AddSpace(space); 52 heap->SetSpaceAsDefault(space); 68 mirror::Object* Alloc(space::MallocSpace* alloc_space, in Alloc() 88 mirror::Object* AllocWithGrowth(space::MallocSpace* alloc_space, in AllocWithGrowth() 130 void SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, intptr_t object_size, 141 void SpaceTest<Super>::SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, in SizeFootPrintGrowthLimitAndTrimBody() argument 152 size_t footprint = space->GetFootprint(); in SizeFootPrintGrowthLimitAndTrimBody() 161 EXPECT_LE(space->Size(), growth_limit); in SizeFootPrintGrowthLimitAndTrimBody() [all …]
|
D | space.cc | 28 namespace space { namespace 37 std::ostream& operator<<(std::ostream& os, const Space& space) { in operator <<() argument 38 space.Dump(os); in operator <<() 142 AllocSpace::SweepCallbackContext::SweepCallbackContext(bool swap_bitmaps_in, space::Space* space_in) in SweepCallbackContext() 143 : swap_bitmaps(swap_bitmaps_in), space(space_in), self(Thread::Current()) { in SweepCallbackContext()
|
/art/runtime/gc/collector/ |
D | immune_spaces.cc | 44 for (space::ContinuousSpace* space : GetSpaces()) { in CreateLargestImmuneRegion() 45 uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin()); in CreateLargestImmuneRegion() 46 uintptr_t space_end = reinterpret_cast<uintptr_t>(space->Limit()); in CreateLargestImmuneRegion() 47 if (space->IsImageSpace()) { in CreateLargestImmuneRegion() 50 space::ImageSpace* image_space = space->AsImageSpace(); in CreateLargestImmuneRegion() 106 void ImmuneSpaces::AddSpace(space::ContinuousSpace* space) { in AddSpace() argument 107 DCHECK(spaces_.find(space) == spaces_.end()) << *space; in AddSpace() 109 if (space->GetLiveBitmap() != space->GetMarkBitmap()) { in AddSpace() 110 CHECK(space->IsContinuousMemMapAllocSpace()); in AddSpace() 111 space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); in AddSpace() [all …]
|
D | immune_spaces.h | 29 namespace space { 40 bool operator()(space::ContinuousSpace* a, space::ContinuousSpace* b) const; 48 void AddSpace(space::ContinuousSpace* space) REQUIRES(Locks::heap_bitmap_lock_); 57 bool ContainsSpace(space::ContinuousSpace* space) const; 60 const std::set<space::ContinuousSpace*, CompareByBegin>& GetSpaces() { in GetSpaces() 74 for (space::ContinuousSpace* space : spaces_) { in ContainsObject() 75 if (space->HasAddress(obj)) { in ContainsObject() 87 std::set<space::ContinuousSpace*, CompareByBegin> spaces_;
|
D | sticky_mark_sweep.cc | 42 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 43 if (space->IsContinuousMemMapAllocSpace() && in BindBitmaps() 44 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect) { in BindBitmaps() 45 DCHECK(space->IsContinuousMemMapAllocSpace()); in BindBitmaps() 46 space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); in BindBitmaps() 49 for (const auto& space : GetHeap()->GetDiscontinuousSpaces()) { in BindBitmaps() local 50 CHECK(space->IsLargeObjectSpace()); in BindBitmaps() 51 space->AsLargeObjectSpace()->CopyLiveToMarked(); in BindBitmaps()
|
D | partial_mark_sweep.cc | 39 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 40 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 41 CHECK(space->IsZygoteSpace()); in BindBitmaps() 42 immune_spaces_.AddSpace(space); in BindBitmaps()
|
D | semi_space.cc | 67 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 68 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || in BindBitmaps() 69 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 70 immune_spaces_.AddSpace(space); in BindBitmaps() 71 } else if (space->GetLiveBitmap() != nullptr) { in BindBitmaps() 73 if (space == to_space_ || collect_from_space_only_) { in BindBitmaps() 77 CHECK(space == GetHeap()->GetPrimaryFreeListSpace() || in BindBitmaps() 78 space == GetHeap()->GetNonMovingSpace()); in BindBitmaps() 80 CHECK(space->IsContinuousMemMapAllocSpace()); in BindBitmaps() 81 space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); in BindBitmaps() [all …]
|
D | mark_sweep.cc | 86 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 87 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect) { in BindBitmaps() 88 immune_spaces_.AddSpace(space); in BindBitmaps() 291 for (const auto& space : immune_spaces_.GetSpaces()) { in UpdateAndMarkModUnion() local 292 const char* name = space->IsZygoteSpace() in UpdateAndMarkModUnion() 295 DCHECK(space->IsZygoteSpace() || space->IsImageSpace()) << *space; in UpdateAndMarkModUnion() 297 accounting::ModUnionTable* mod_union_table = heap_->FindModUnionTableFromSpace(space); in UpdateAndMarkModUnion() 302 space->GetLiveBitmap()->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()), in UpdateAndMarkModUnion() 303 reinterpret_cast<uintptr_t>(space->End()), in UpdateAndMarkModUnion() 342 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in FindDefaultSpaceBitmap() local [all …]
|
D | garbage_collector.cc | 126 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in SwapBitmaps() local 128 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect || in SwapBitmaps() 130 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect)) { in SwapBitmaps() 131 accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); in SwapBitmaps() 132 accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap(); in SwapBitmaps() 136 CHECK(space->IsContinuousMemMapAllocSpace()); in SwapBitmaps() 137 space->AsContinuousMemMapAllocSpace()->SwapBitmaps(); in SwapBitmaps() 142 space::LargeObjectSpace* space = disc_space->AsLargeObjectSpace(); in SwapBitmaps() local 143 accounting::LargeObjectBitmap* live_set = space->GetLiveBitmap(); in SwapBitmaps() 144 accounting::LargeObjectBitmap* mark_set = space->GetMarkBitmap(); in SwapBitmaps() [all …]
|
D | mark_compact.cc | 46 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 47 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || in BindBitmaps() 48 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 49 immune_spaces_.AddSpace(space); in BindBitmaps() 80 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment); in ForwardObject() 104 DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment); in CalculateObjectForwardingAddresses() 211 for (auto& space : heap_->GetContinuousSpaces()) { in UpdateAndMarkModUnion() local 213 if (immune_spaces_.ContainsSpace(space)) { in UpdateAndMarkModUnion() 214 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space); in UpdateAndMarkModUnion() 218 space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" : in UpdateAndMarkModUnion() [all …]
|
D | semi_space.h | 50 namespace space { 83 void SetToSpace(space::ContinuousMemMapAllocSpace* to_space); 86 void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space); 184 virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const; 215 space::ContinuousMemMapAllocSpace* to_space_; 218 space::ContinuousMemMapAllocSpace* from_space_; 251 space::ContinuousMemMapAllocSpace* promo_dest_space_; 254 space::ContinuousMemMapAllocSpace* fallback_space_;
|
D | concurrent_copying.cc | 105 static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize, in ConcurrentCopying() 262 for (const auto& space : heap_->GetContinuousSpaces()) { in BindBitmaps() local 263 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || in BindBitmaps() 264 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 265 CHECK(space->IsZygoteSpace() || space->IsImageSpace()); in BindBitmaps() 266 immune_spaces_.AddSpace(space); in BindBitmaps() 267 } else if (space == region_space_) { in BindBitmaps() 323 for (space::ContinuousSpace* space : immune_spaces_.GetSpaces()) { in InitializePhase() 324 LOG(INFO) << "Immune space: " << *space; in InitializePhase() 522 for (auto& space : immune_spaces_.GetSpaces()) { in VerifyGrayImmuneObjects() local [all …]
|
D | concurrent_copying-inl.h | 125 space::RegionSpace::RegionType rtype = region_space_->GetRegionType(from_ref); in Mark() 127 case space::RegionSpace::RegionType::kRegionTypeToSpace: in Mark() 130 case space::RegionSpace::RegionType::kRegionTypeFromSpace: { in Mark() 140 case space::RegionSpace::RegionType::kRegionTypeUnevacFromSpace: { in Mark() 143 case space::RegionSpace::RegionType::kRegionTypeNone: in Mark()
|
/art/runtime/gc/accounting/ |
D | mod_union_table_test.cc | 41 TableType type, space::ContinuousSpace* space, space::ContinuousSpace* target_space); 49 Thread* self, space::ContinuousMemMapAllocSpace* space, size_t component_count) in AllocObjectArray() argument 51 auto* klass = GetObjectArrayClass(self, space); in AllocObjectArray() 55 space->Alloc(self, size, &bytes_allocated, nullptr, &bytes_tl_bulk_allocated)); in AllocObjectArray() 59 space->GetLiveBitmap()->Set(obj); in AllocObjectArray() 70 mirror::Class* GetObjectArrayClass(Thread* self, space::ContinuousMemMapAllocSpace* space) in GetObjectArrayClass() argument 83 auto* klass = down_cast<mirror::Class*>(space->Alloc(self, class_size, &bytes_allocated, in GetObjectArrayClass() 121 const std::string& name, Heap* heap, space::ContinuousSpace* space, in ModUnionTableRefCacheToSpace() argument 122 space::ContinuousSpace* target_space) in ModUnionTableRefCacheToSpace() 123 : ModUnionTableReferenceCache(name, heap, space), target_space_(target_space) {} in ModUnionTableRefCacheToSpace() [all …]
|
D | remembered_set.h | 34 namespace space { 49 explicit RememberedSet(const std::string& name, Heap* heap, space::ContinuousSpace* space) in RememberedSet() argument 50 : name_(name), heap_(heap), space_(space) {} in RememberedSet() 56 void UpdateAndMarkReferences(space::ContinuousSpace* target_space, 63 space::ContinuousSpace* GetSpace() { in GetSpace() 77 space::ContinuousSpace* const space_;
|
D | mod_union_table.h | 39 namespace space { 58 explicit ModUnionTable(const std::string& name, Heap* heap, space::ContinuousSpace* space) in ModUnionTable() argument 61 space_(space) {} in ModUnionTable() 99 space::ContinuousSpace* GetSpace() { in GetSpace() 114 space::ContinuousSpace* const space_; 121 space::ContinuousSpace* space) in ModUnionTableReferenceCache() argument 122 : ModUnionTable(name, heap, space) {} in ModUnionTableReferenceCache() 169 space::ContinuousSpace* space);
|
D | mod_union_table.cc | 93 space::ContinuousSpace* from_space, in ModUnionUpdateObjectReferencesVisitor() 94 space::ContinuousSpace* immune_space, in ModUnionUpdateObjectReferencesVisitor() 134 space::ContinuousSpace* const from_space_; 135 space::ContinuousSpace* const immune_space_; 145 space::ContinuousSpace* from_space, in ModUnionScanImageRootVisitor() 146 space::ContinuousSpace* immune_space, in ModUnionScanImageRootVisitor() 167 space::ContinuousSpace* const from_space_; 168 space::ContinuousSpace* const immune_space_; 279 space::ContinuousSpace* from_space = heap->FindContinuousSpaceFromObject(obj, false); in operator ()() 280 space::ContinuousSpace* to_space = heap->FindContinuousSpaceFromObject(ref, false); in operator ()() [all …]
|
D | mod_union_table-inl.h | 33 space::ContinuousSpace* space) in ModUnionTableToZygoteAllocspace() argument 34 : ModUnionTableReferenceCache(name, heap, space) {} in ModUnionTableToZygoteAllocspace()
|
/art/test/596-app-images/ |
D | app_images.cc | 37 for (auto* space : Runtime::Current()->GetHeap()->GetContinuousSpaces()) { in Java_Main_checkAppImageLoaded() local 38 if (space->IsImageSpace()) { in Java_Main_checkAppImageLoaded() 39 auto* image_space = space->AsImageSpace(); in Java_Main_checkAppImageLoaded() 52 for (auto* space : Runtime::Current()->GetHeap()->GetContinuousSpaces()) { in Java_Main_checkAppImageContains() local 53 if (space->IsImageSpace()) { in Java_Main_checkAppImageContains() 54 auto* image_space = space->AsImageSpace(); in Java_Main_checkAppImageContains()
|
/art/runtime/gc/ |
D | heap.cc | 174 space::LargeObjectSpaceType large_object_space_type, in Heap() 318 if (space::ImageSpace::LoadBootImage(image_file_name, in Heap() 322 for (auto space : boot_image_spaces_) { in Heap() local 323 AddSpace(space); in Heap() 424 non_moving_space_ = space::DlMallocSpace::CreateFromMemMap( in Heap() 435 MemMap* region_space_mem_map = space::RegionSpace::CreateMemMap(kRegionSpaceName, in Heap() 439 region_space_ = space::RegionSpace::Create(kRegionSpaceName, region_space_mem_map); in Heap() 446 bump_pointer_space_ = space::BumpPointerSpace::CreateFromMemMap("Bump pointer space 1", in Heap() 450 temp_space_ = space::BumpPointerSpace::CreateFromMemMap("Bump pointer space 2", in Heap() 467 bump_pointer_space_ = space::BumpPointerSpace::Create("Bump pointer space 1", in Heap() [all …]
|
D | heap.h | 91 namespace space { 145 static constexpr space::LargeObjectSpaceType kDefaultLargeObjectSpaceType = 147 space::LargeObjectSpaceType::kFreeList 148 : space::LargeObjectSpaceType::kMap; 173 space::LargeObjectSpaceType large_object_space_type, 380 void SetSpaceAsDefault(space::ContinuousSpace* continuous_space) 382 void AddSpace(space::Space* space) 385 void RemoveSpace(space::Space* space) 410 const std::vector<space::ContinuousSpace*>& GetContinuousSpaces() const in GetContinuousSpaces() 415 const std::vector<space::DiscontinuousSpace*>& GetDiscontinuousSpaces() const { in GetDiscontinuousSpaces() [all …]
|
D | verification.cc | 66 space::Space* const space = heap_->FindSpaceFromAddress(addr); in DumpObjectInfo() local 67 if (space != nullptr) { in DumpObjectInfo() 68 oss << " space=" << *space; in DumpObjectInfo() 118 bool Verification::IsAddressInHeapSpace(const void* addr, space::Space** out_space) const { in IsAddressInHeapSpace() 119 space::Space* const space = heap_->FindSpaceFromAddress(addr); in IsAddressInHeapSpace() local 120 if (space != nullptr) { in IsAddressInHeapSpace() 122 *out_space = space; in IsAddressInHeapSpace() 129 bool Verification::IsValidHeapObjectAddress(const void* addr, space::Space** out_space) const { in IsValidHeapObjectAddress()
|
D | verification.h | 32 namespace space { 56 bool IsValidHeapObjectAddress(const void* addr, space::Space** out_space = nullptr) const 65 bool IsAddressInHeapSpace(const void* addr, space::Space** out_space = nullptr) const
|
/art/runtime/native/ |
D | dalvik_system_VMDebug.cc | 350 for (gc::space::ContinuousSpace* space : heap->GetContinuousSpaces()) { in VMDebug_getHeapSpaceStats() 351 if (space->IsImageSpace()) { in VMDebug_getHeapSpaceStats() 353 } else if (space->IsZygoteSpace()) { in VMDebug_getHeapSpaceStats() 354 gc::space::ZygoteSpace* zygote_space = space->AsZygoteSpace(); in VMDebug_getHeapSpaceStats() 357 } else if (space->IsMallocSpace()) { in VMDebug_getHeapSpaceStats() 359 gc::space::MallocSpace* malloc_space = space->AsMallocSpace(); in VMDebug_getHeapSpaceStats() 362 } else if (space->IsBumpPointerSpace()) { in VMDebug_getHeapSpaceStats() 363 gc::space::BumpPointerSpace* bump_pointer_space = space->AsBumpPointerSpace(); in VMDebug_getHeapSpaceStats() 368 for (gc::space::DiscontinuousSpace* space : heap->GetDiscontinuousSpaces()) { in VMDebug_getHeapSpaceStats() 369 if (space->IsLargeObjectSpace()) { in VMDebug_getHeapSpaceStats() [all …]
|