• Home
  • Raw
  • Download

Lines Matching refs:bytes_allocated

1103   uint64_t bytes_allocated = GetBytesAllocated();  in CalculateGcWeightedAllocatedBytes()  local
1105 return weight * bytes_allocated; in CalculateGcWeightedAllocatedBytes()
1724 size_t* bytes_allocated, in AllocateInternalWithGc() argument
1745 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1760 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1781 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1790 mirror::Object* ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1809 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size, in AllocateInternalWithGc()
1830 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1884 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
2393 size_t bytes_allocated, dummy; in MarkNonForwardedObject() local
2394 forward_address = to_space_->Alloc(self_, alloc_size, &bytes_allocated, nullptr, &dummy); in MarkNonForwardedObject()
3639 const size_t bytes_allocated = GetBytesAllocated(); in GrowForUtilization() local
3641 TraceHeapSize(bytes_allocated); in GrowForUtilization()
3651 uint64_t delta = bytes_allocated * (1.0 / GetTargetHeapUtilization() - 1.0); in GrowForUtilization()
3652 DCHECK_LE(delta, std::numeric_limits<size_t>::max()) << "bytes_allocated=" << bytes_allocated in GrowForUtilization()
3654 target_size = bytes_allocated + delta * multiplier; in GrowForUtilization()
3656 static_cast<uint64_t>(bytes_allocated + adjusted_max_free)); in GrowForUtilization()
3658 static_cast<uint64_t>(bytes_allocated + adjusted_min_free)); in GrowForUtilization()
3682 bytes_allocated <= (IsGcConcurrent() ? concurrent_start_bytes_ : target_footprint)) { in GrowForUtilization()
3688 if (bytes_allocated + adjusted_max_free < target_footprint) { in GrowForUtilization()
3689 target_size = bytes_allocated + adjusted_max_free; in GrowForUtilization()
3691 target_size = std::max(bytes_allocated, target_footprint); in GrowForUtilization()
3703 CHECK_GE(bytes_allocated + freed_bytes, bytes_allocated_before_gc); in GrowForUtilization()
3704 const size_t bytes_allocated_during_gc = bytes_allocated + freed_bytes - in GrowForUtilization()
3722 concurrent_start_bytes_ = std::max(target_footprint - remaining_bytes, bytes_allocated); in GrowForUtilization()
4316 size_t* bytes_allocated, in AllocWithNewTLAB() argument
4361 bytes_allocated, in AllocWithNewTLAB()
4371 bytes_allocated, in AllocWithNewTLAB()
4382 bytes_allocated, in AllocWithNewTLAB()
4392 *bytes_allocated = alloc_size; in AllocWithNewTLAB()