Lines Matching refs:bytes
114 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) { in RecordAlloc() argument
115 alloc_stats_[kind] += bytes; in RecordAlloc()
223 void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) { in AllocWithMemoryTool() argument
227 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); in AllocWithMemoryTool()
236 MEMORY_TOOL_MAKE_DEFINED(ret, bytes); in AllocWithMemoryTool()
238 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); in AllocWithMemoryTool()
242 void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) { in AllocWithMemoryToolAlign16() argument
246 size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes; in AllocWithMemoryToolAlign16()
260 MEMORY_TOOL_MAKE_DEFINED(ret, bytes); in AllocWithMemoryToolAlign16()
262 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); in AllocWithMemoryToolAlign16()
272 uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) { in AllocFromNewArena() argument
273 Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes)); in AllocFromNewArena()
275 DCHECK_LE(bytes, new_arena->Size()); in AllocFromNewArena()
276 if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) { in AllocFromNewArena()
280 new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena. in AllocFromNewArena()
290 ptr_ = begin_ + bytes; in AllocFromNewArena()
296 uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) { in AllocFromNewArenaWithMemoryTool() argument
297 uint8_t* ret = AllocFromNewArena(bytes); in AllocFromNewArenaWithMemoryTool()
298 uint8_t* noaccess_begin = ret + bytes; in AllocFromNewArenaWithMemoryTool()
301 DCHECK(ptr_ - bytes == ret); in AllocFromNewArenaWithMemoryTool()
307 DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated()); in AllocFromNewArenaWithMemoryTool()