Searched refs:BytesAllocated (Results 1 – 8 of 8) sorted by relevance
125 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const { in BytesAllocated() function in art::ArenaAllocatorStatsImpl144 const size_t bytes_allocated = BytesAllocated(); in Dump()189 size_t ArenaAllocator::BytesAllocated() const { in BytesAllocated() function in art::ArenaAllocator190 return ArenaAllocatorStats::BytesAllocated(); in BytesAllocated()
74 if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) { in UpdatePeakStatsAndRestore()
126 size_t BytesAllocated() const { return 0u; } in BytesAllocated() function142 size_t BytesAllocated() const;346 size_t BytesAllocated() const;
59 return PeakStats()->BytesAllocated(); in PeakBytesAllocated()
135 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()139 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()144 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()149 bytes += r->BytesAllocated(); in GetBytesAllocatedInternal()487 inline size_t RegionSpace::Region::BytesAllocated() const { in BytesAllocated() function
262 DCHECK_LE(live_bytes_, BytesAllocated()); in ShouldBeEvacuated()263 const size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in ShouldBeEvacuated()366 num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1; in SetFromSpace()512 *cleared_bytes += r->BytesAllocated(); in ClearFromSpace()519 *cleared_bytes += r->BytesAllocated(); in ClearFromSpace()958 << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize)); in Dump()
565 DCHECK_LE(live_bytes_, BytesAllocated()); in AddLiveBytes()577 size_t BytesAllocated() const;
1104 size_t total_allocated = allocator.BytesAllocated() + arena_stack.PeakBytesAllocated(); in Compile()1452 size_t total_allocated = allocator.BytesAllocated() + arena_stack.PeakBytesAllocated(); in JitCompile()