/art/runtime/gc/accounting/ |
D | space_bitmap.cc | 31 template<size_t kAlignment> 32 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) { in ComputeBitmapSize() 33 const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT; in ComputeBitmapSize() 37 template<size_t kAlignment> 38 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) { in ComputeHeapSize() 39 return bitmap_bytes * kBitsPerByte * kAlignment; in ComputeHeapSize() 42 template<size_t kAlignment> 43 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap( in CreateFromMemMap() 51 template<size_t kAlignment> 52 SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, MemMap* mem_map, uintptr_t* bitmap_be… in SpaceBitmap() [all …]
|
D | space_bitmap-inl.h | 32 template<size_t kAlignment> 33 inline bool SpaceBitmap<kAlignment>::AtomicTestAndSet(const mirror::Object* obj) { in AtomicTestAndSet() 54 template<size_t kAlignment> 55 inline bool SpaceBitmap<kAlignment>::Test(const mirror::Object* obj) const { in Test() 64 template<size_t kAlignment> template<typename Visitor> 65 inline void SpaceBitmap<kAlignment>::VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, in VisitMarkedRange() 69 for (uintptr_t i = visit_begin; i < visit_end; i += kAlignment) { in VisitMarkedRange() 85 const size_t bit_start = (offset_start / kAlignment) % kBitsPerIntPtrT; in VisitMarkedRange() 86 const size_t bit_end = (offset_end / kAlignment) % kBitsPerIntPtrT; in VisitMarkedRange() 111 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment); in VisitMarkedRange() [all …]
|
D | bitmap.cc | 77 template<size_t kAlignment> 78 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::Create( in Create() 80 CHECK_ALIGNED(cover_begin, kAlignment); in Create() 81 CHECK_ALIGNED(cover_end, kAlignment); in Create() 82 const size_t num_bits = (cover_end - cover_begin) / kAlignment; in Create() 87 template<size_t kAlignment> 88 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::CreateFromMemMap( in CreateFromMemMap()
|
D | space_bitmap.h | 40 template<size_t kAlignment> 62 return offset / kAlignment / kBitsPerIntPtrT; in OffsetToIndex() 67 return static_cast<T>(index * kAlignment * kBitsPerIntPtrT); in IndexToOffset() 72 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerIntPtrT); in OffsetToMask() 118 for (; visit_begin < visit_end; visit_begin += kAlignment) { in VisitRange() 209 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback, 233 template<size_t kAlignment> 234 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
|
D | space_bitmap_test.cc | 113 template <size_t kAlignment> 127 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment); in RunTest() 141 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment); in RunTest() 143 size_t end = offset + RoundDown(r.next() % (remain + 1), kAlignment); in RunTest() 149 for (uintptr_t k = offset; k < end; k += kAlignment) { in RunTest()
|
D | bitmap.h | 128 template<size_t kAlignment> 148 const uintptr_t addr = CoverBegin() + bit_index * kAlignment; in AddrFromBitIndex() 156 return (addr - CoverBegin()) / kAlignment; in BitIndexFromAddr() 182 : Bitmap(mem_map, num_bits), cover_begin_(begin), cover_end_(begin + kAlignment * num_bits) { in MemoryRangeBitmap()
|
D | card_table.h | 43 template<size_t kAlignment> class SpaceBitmap;
|
/art/runtime/gc/space/ |
D | bump_pointer_space-inl.h | 30 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc() 47 num_bytes = RoundUp(num_bytes, kAlignment); in AllocThreadUnsafe() 66 DCHECK(IsAligned<kAlignment>(num_bytes)); in AllocNonvirtualWithoutAccounting() 93 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
|
D | large_object_space.cc | 261 return AlignSize() * FreeListSpace::kAlignment; in ByteSize() 266 DCHECK_ALIGNED(size, FreeListSpace::kAlignment); in SetByteSize() 267 alloc_size_ = (size / FreeListSpace::kAlignment) | (free ? kFlagFree : 0u); in SetByteSize() 306 return GetPrevFree() * FreeListSpace::kAlignment; in GetPrevFreeBytes() 310 DCHECK_ALIGNED(bytes, FreeListSpace::kAlignment); in SetPrevFreeBytes() 311 prev_free_ = bytes / FreeListSpace::kAlignment; in SetPrevFreeBytes() 350 CHECK_EQ(size % kAlignment, 0U); in Create() 364 CHECK_ALIGNED(space_capacity, kAlignment); in FreeListSpace() 365 const size_t alloc_info_size = sizeof(AllocationInfo) * (space_capacity / kAlignment); in FreeListSpace() 408 DCHECK_ALIGNED(obj, kAlignment); in Free() [all …]
|
D | region_space-inl.h | 29 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc() 46 DCHECK(IsAligned<kAlignment>(num_bytes)); in AllocNonvirtual() 118 DCHECK(IsAligned<kAlignment>(num_bytes)); in Alloc() 147 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual() 262 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject() 269 DCHECK(IsAligned<kAlignment>(num_bytes)); in AllocLarge()
|
D | large_object_space.h | 161 static constexpr size_t kAlignment = kPageSize; 177 return (address - reinterpret_cast<uintptr_t>(Begin())) / kAlignment; in GetSlotIndexForAddress() 183 return reinterpret_cast<uintptr_t>(Begin()) + slot * kAlignment; in GetAllocationAddressForSlot()
|
D | bump_pointer_space.h | 158 static constexpr size_t kAlignment = 8; variable 189 static_assert(sizeof(BlockHeader) % kAlignment == 0,
|
D | bump_pointer_space.cc | 93 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject() 141 bytes = RoundUp(bytes, kAlignment); in AllocBlock()
|
D | region_space.cc | 79 DCHECK(full_region_.Alloc(kAlignment, &ignored, nullptr, &ignored) == nullptr); in RegionSpace()
|
D | region_space.h | 167 static constexpr size_t kAlignment = kObjectAlignment; variable
|
/art/runtime/gc/ |
D | heap-inl.h | 73 byte_count = RoundUp(byte_count, space::BumpPointerSpace::kAlignment); in AllocObjectWithAllocator() 235 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate() 297 DCHECK_ALIGNED(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate() 321 alloc_size = RoundUp(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate() 328 DCHECK_ALIGNED(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()
|
/art/runtime/base/ |
D | arena_allocator.h | 213 bytes = RoundUp(bytes, kAlignment); 274 static constexpr size_t kAlignment = 8;
|
/art/runtime/entrypoints/quick/ |
D | quick_alloc_entrypoints.cc | 39 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ 66 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ 92 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
|
/art/runtime/gc/collector/ |
D | concurrent_copying.cc | 975 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in operator ()() 1236 CHECK(IsAligned<space::RegionSpace::kAlignment>(alloc_size)); in AllocateInSkippedBlock() 1238 size_t min_object_size = RoundUp(sizeof(mirror::Object), space::RegionSpace::kAlignment); in AllocateInSkippedBlock() 1255 CHECK(IsAligned<space::RegionSpace::kAlignment>(it->first - alloc_size)); in AllocateInSkippedBlock() 1266 CHECK(IsAligned<space::RegionSpace::kAlignment>(byte_size)); in AllocateInSkippedBlock() 1274 CHECK(IsAligned<space::RegionSpace::kAlignment>(byte_size - alloc_size)); in AllocateInSkippedBlock() 1290 size_t region_space_alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in Copy()
|
D | mark_compact.cc | 89 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment); in ForwardObject() 109 DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment); in operator ()()
|