Searched refs:BytesAllocated (Results 1 – 8 of 8) sorted by relevance
126 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const { in BytesAllocated() function in art::ArenaAllocatorStatsImpl145 const size_t bytes_allocated = BytesAllocated(); in Dump()193 size_t ArenaAllocator::BytesAllocated() const { in BytesAllocated() function in art::ArenaAllocator194 return ArenaAllocatorStats::BytesAllocated(); in BytesAllocated()
74 if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) { in UpdatePeakStatsAndRestore()
125 size_t BytesAllocated() const { return 0u; } in BytesAllocated() function141 size_t BytesAllocated() const;346 size_t BytesAllocated() const;
59 return PeakStats()->BytesAllocated(); in PeakBytesAllocated()
137 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()141 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()146 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()151 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()489 inline size_t RegionSpace::Region::BytesAllocated() const { in BytesAllocated() function
262 DCHECK_LE(live_bytes_, BytesAllocated()); in ShouldBeEvacuated()263 const size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in ShouldBeEvacuated()366 num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1; in SetFromSpace()512 *cleared_bytes += r->BytesAllocated(); in ClearFromSpace()519 *cleared_bytes += r->BytesAllocated(); in ClearFromSpace()949 << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize)); in Dump()
565 DCHECK_LE(live_bytes_, BytesAllocated()); in AddLiveBytes()577 size_t BytesAllocated() const;
1089 size_t total_allocated = allocator.BytesAllocated() + arena_stack.PeakBytesAllocated(); in Compile()1418 size_t total_allocated = allocator.BytesAllocated() + arena_stack.PeakBytesAllocated(); in JitCompile()