Home
last modified time | relevance | path

Searched refs:kRegionSize (Results 1 – 8 of 8) sorted by relevance

/art/runtime/gc/accounting/
Dread_barrier_table.h38 size_t capacity = static_cast<size_t>(kHeapCapacity / kRegionSize); in ReadBarrierTable()
39 DCHECK_EQ(kHeapCapacity / kRegionSize, in ReadBarrierTable()
40 static_cast<uint64_t>(static_cast<size_t>(kHeapCapacity / kRegionSize))); in ReadBarrierTable()
58 DCHECK_ALIGNED(start_addr, kRegionSize); in Clear()
59 DCHECK_ALIGNED(end_addr, kRegionSize); in Clear()
87 static constexpr size_t kRegionSize = 256 * KB; variable
95 uint8_t* entry_addr = mem_map_.Begin() + reinterpret_cast<uintptr_t>(heap_addr) / kRegionSize; in EntryFromAddr()
/art/runtime/gc/space/
Dregion_space.cc53 CHECK_ALIGNED(capacity, kRegionSize); in CreateMemMap()
61 capacity + kRegionSize, in CreateMemMap()
80 CHECK_EQ(mem_map.Size(), capacity + kRegionSize); in CreateMemMap()
83 if (IsAlignedParam(mem_map.Begin(), kRegionSize)) { in CreateMemMap()
89 mem_map.AlignBy(kRegionSize); in CreateMemMap()
91 CHECK_ALIGNED(mem_map.Begin(), kRegionSize); in CreateMemMap()
92 CHECK_ALIGNED(mem_map.End(), kRegionSize); in CreateMemMap()
112 num_regions_(mem_map_.Size() / kRegionSize), in RegionSpace()
120 CHECK_ALIGNED(mem_map_.Size(), kRegionSize); in RegionSpace()
121 CHECK_ALIGNED(mem_map_.Begin(), kRegionSize); in RegionSpace()
[all …]
Dregion_space-inl.h57 if (LIKELY(num_bytes <= kRegionSize)) { in AllocNonvirtual()
318 DCHECK_GT(num_bytes, kRegionSize); in AllocLarge()
319 size_t num_regs_in_large_region = RoundUp(num_bytes, kRegionSize) / kRegionSize; in AllocLarge()
321 DCHECK_LT((num_regs_in_large_region - 1) * kRegionSize, num_bytes); in AllocLarge()
322 DCHECK_LE(num_bytes, num_regs_in_large_region * kRegionSize); in AllocLarge()
423 size_t allocated = num_regs_in_large_region * kRegionSize; in AllocLargeInRange()
468 DCHECK_ALIGNED(large_obj, kRegionSize); in FreeLarge()
471 uint8_t* end_addr = AlignUp(reinterpret_cast<uint8_t*>(large_obj) + bytes_allocated, kRegionSize); in FreeLarge()
473 for (uint8_t* addr = begin_addr; addr < end_addr; addr += kRegionSize) { in FreeLarge()
496 DCHECK_LT(begin_ + kRegionSize, Top()); in BytesAllocated()
[all …]
Dregion_space.h230 static constexpr size_t kRegionSize = 256 * KB; variable
347 size_t reg_idx = offset / kRegionSize; in RegionIdxForRefUnchecked()
405 DCHECK_EQ(static_cast<size_t>(end - begin), kRegionSize); in Init()
461 DCHECK_LT(begin_ + kRegionSize, Top()); in IsLarge()
652 size_t reg_idx = offset / kRegionSize; in RefToRegionLocked()
/art/test/1001-app-image-regions/
Dapp_image_regions.cc33 return gc::space::RegionSpace::kRegionSize; in Java_Main_getRegionSize()
/art/runtime/gc/collector/
Dconcurrent_copying.cc123 static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize, in ConcurrentCopying()
3331 size_t region_space_alloc_size = (obj_size <= space::RegionSpace::kRegionSize) in Copy()
3333 : RoundUp(obj_size, space::RegionSpace::kRegionSize); in Copy()
3405 if (bytes_allocated > space::RegionSpace::kRegionSize) { in Copy()
3785 << PrettySize(region_space_->GetMaxPeakNumNonFreeRegions() * space::RegionSpace::kRegionSize) in DumpPerformanceInfo()
3787 << PrettySize(region_space_->GetNumRegions() * space::RegionSpace::kRegionSize / 2) in DumpPerformanceInfo()
/art/runtime/gc/
Dheap.cc4349 if (space::RegionSpace::kRegionSize >= alloc_size) { in AllocWithNewTLAB()
4352 space::RegionSpace::kRegionSize, in AllocWithNewTLAB()
4356 : gc::space::RegionSpace::kRegionSize; in AllocWithNewTLAB()
/art/dex2oat/linker/
Dimage_writer.cc3640 region_size_ = gc::space::RegionSpace::kRegionSize; in ImageWriter()