• Home
  • Raw
  • Download

Lines Matching refs:uptr

90 template <uptr kMaxSizeLog, uptr kMaxNumCachedT, uptr kMaxBytesCachedLog>
92 static const uptr kMinSizeLog = 4;
93 static const uptr kMidSizeLog = kMinSizeLog + 4;
94 static const uptr kMinSize = 1 << kMinSizeLog;
95 static const uptr kMidSize = 1 << kMidSizeLog;
96 static const uptr kMidClass = kMidSize / kMinSize;
97 static const uptr S = 2;
98 static const uptr M = (1 << S) - 1;
101 static const uptr kMaxNumCached = kMaxNumCachedT;
107 uptr count;
111 static const uptr kMaxSize = 1UL << kMaxSizeLog;
112 static const uptr kNumClasses =
115 static const uptr kNumClassesRounded =
120 static uptr Size(uptr class_id) { in Size()
124 uptr t = kMidSize << (class_id >> S); in Size()
128 static uptr ClassID(uptr size) { in ClassID()
132 uptr l = MostSignificantSetBitIndex(size); in ClassID()
133 uptr hbits = (size >> (l - S)) & M; in ClassID()
134 uptr lbits = size & ((1 << (l - S)) - 1); in ClassID()
135 uptr l1 = l - kMidSizeLog; in ClassID()
139 static uptr MaxCached(uptr class_id) { in MaxCached()
141 uptr n = (1UL << kMaxBytesCachedLog) / Size(class_id); in MaxCached()
142 return Max<uptr>(1, Min(kMaxNumCached, n)); in MaxCached()
146 uptr prev_s = 0; in Print()
147 uptr total_cached = 0; in Print()
148 for (uptr i = 0; i < kNumClasses; i++) { in Print()
149 uptr s = Size(i); in Print()
152 uptr d = s - prev_s; in Print()
153 uptr p = prev_s ? (d * 100 / prev_s) : 0; in Print()
154 uptr l = s ? MostSignificantSetBitIndex(s) : 0; in Print()
155 uptr cached = MaxCached(i) * s; in Print()
165 static bool SizeClassRequiresSeparateTransferBatch(uptr class_id) { in SizeClassRequiresSeparateTransferBatch()
167 sizeof(uptr) * (kMaxNumCached - MaxCached(class_id)); in SizeClassRequiresSeparateTransferBatch()
171 for (uptr c = 1; c < kNumClasses; c++) { in Validate()
173 uptr s = Size(c); in Validate()
184 for (uptr s = 1; s <= kMaxSize; s++) { in Validate()
185 uptr c = ClassID(s); in Validate()
206 typedef uptr AllocatorStatCounters[AllocatorStatCount];
216 void Add(AllocatorStat i, uptr v) { in Add()
221 void Sub(AllocatorStat i, uptr v) { in Sub()
226 void Set(AllocatorStat i, uptr v) { in Set()
230 uptr Get(AllocatorStat i) const { in Get()
270 internal_memset(s, 0, AllocatorStatCount * sizeof(uptr)); in Get()
291 void OnMap(uptr p, uptr size) const { } in OnMap()
292 void OnUnmap(uptr p, uptr size) const { } in OnUnmap()
296 typedef void (*ForEachChunkCallback)(uptr chunk, void *arg);
314 template <const uptr kSpaceBeg, const uptr kSpaceSize,
315 const uptr kMetadataSize, class SizeClassMap,
326 reinterpret_cast<uptr>(MmapNoAccess(kSpaceBeg, kSpaceSize))); in Init()
330 void MapWithCallback(uptr beg, uptr size) { in MapWithCallback()
331 CHECK_EQ(beg, reinterpret_cast<uptr>(MmapFixedOrDie(beg, size))); in MapWithCallback()
335 void UnmapWithCallback(uptr beg, uptr size) { in UnmapWithCallback()
340 static bool CanAllocate(uptr size, uptr alignment) { in CanAllocate()
346 uptr class_id) { in AllocateBatch()
356 NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id, Batch *b) { in DeallocateBatch()
364 return reinterpret_cast<uptr>(p) / kSpaceSize == kSpaceBeg / kSpaceSize; in PointerIsMine()
367 static uptr GetSizeClass(const void *p) { in GetSizeClass()
368 return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded; in GetSizeClass()
372 uptr class_id = GetSizeClass(p); in GetBlockBegin()
373 uptr size = SizeClassMap::Size(class_id); in GetBlockBegin()
375 uptr chunk_idx = GetChunkIdx((uptr)p, size); in GetBlockBegin()
376 uptr reg_beg = (uptr)p & ~(kRegionSize - 1); in GetBlockBegin()
377 uptr beg = chunk_idx * size; in GetBlockBegin()
378 uptr next_beg = beg + size; in GetBlockBegin()
386 static uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
391 uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); } in ClassID()
394 uptr class_id = GetSizeClass(p); in GetMetaData()
395 uptr size = SizeClassMap::Size(class_id); in GetMetaData()
396 uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size); in GetMetaData()
401 uptr TotalMemoryUsed() { in TotalMemoryUsed()
402 uptr res = 0; in TotalMemoryUsed()
403 for (uptr i = 0; i < kNumClasses; i++) in TotalMemoryUsed()
414 uptr total_mapped = 0; in PrintStats()
415 uptr n_allocated = 0; in PrintStats()
416 uptr n_freed = 0; in PrintStats()
417 for (uptr class_id = 1; class_id < kNumClasses; class_id++) { in PrintStats()
426 for (uptr class_id = 1; class_id < kNumClasses; class_id++) { in PrintStats()
441 for (uptr i = 0; i < kNumClasses; i++) { in ForceLock()
455 for (uptr class_id = 1; class_id < kNumClasses; class_id++) { in ForEachChunk()
457 uptr chunk_size = SizeClassMap::Size(class_id); in ForEachChunk()
458 uptr region_beg = kSpaceBeg + class_id * kRegionSize; in ForEachChunk()
459 for (uptr chunk = region_beg; in ForEachChunk()
468 static uptr AdditionalSize() { in AdditionalSize()
474 static const uptr kNumClasses = SizeClassMap::kNumClasses;
475 static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded;
478 static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
479 static const uptr kSpaceEnd = kSpaceBeg + kSpaceSize;
485 static const uptr kPopulateSize = 1 << 14;
487 static const uptr kUserMapSize = 1 << 16;
489 static const uptr kMetaMapSize = 1 << 16;
494 uptr allocated_user; // Bytes allocated for user memory.
495 uptr allocated_meta; // Bytes allocated for metadata.
496 uptr mapped_user; // Bytes mapped for user memory.
497 uptr mapped_meta; // Bytes mapped for metadata.
498 uptr n_allocated, n_freed; // Just stats.
502 RegionInfo *GetRegionInfo(uptr class_id) { in GetRegionInfo()
508 static uptr GetChunkIdx(uptr chunk, uptr size) { in GetChunkIdx()
509 uptr offset = chunk % kRegionSize; in GetChunkIdx()
518 uptr class_id, RegionInfo *region) { in PopulateFreeList()
523 uptr size = SizeClassMap::Size(class_id); in PopulateFreeList()
524 uptr count = size < kPopulateSize ? SizeClassMap::MaxCached(class_id) : 1; in PopulateFreeList()
525 uptr beg_idx = region->allocated_user; in PopulateFreeList()
526 uptr end_idx = beg_idx + count * size; in PopulateFreeList()
527 uptr region_beg = kSpaceBeg + kRegionSize * class_id; in PopulateFreeList()
530 uptr map_size = kUserMapSize; in PopulateFreeList()
538 uptr total_count = (region->mapped_user - beg_idx - size) in PopulateFreeList()
542 uptr map_size = kMetaMapSize; in PopulateFreeList()
564 for (uptr i = 0; i < count; i++) in PopulateFreeList()
586 void set(uptr idx, u8 val) { in set()
591 u8 operator[] (uptr idx) {
614 for (uptr i = 0; i < kSize1; i++) { in TestOnlyUnmap()
617 MapUnmapCallback().OnUnmap(reinterpret_cast<uptr>(p), kSize2); in TestOnlyUnmap()
622 uptr size() const { return kSize1 * kSize2; } in size()
623 uptr size1() const { return kSize1; } in size1()
624 uptr size2() const { return kSize2; } in size2()
626 void set(uptr idx, u8 val) { in set()
633 u8 operator[] (uptr idx) const {
641 u8 *Get(uptr idx) const { in Get()
647 u8 *GetOrCreate(uptr idx) { in GetOrCreate()
653 MapUnmapCallback().OnMap(reinterpret_cast<uptr>(res), kSize2); in GetOrCreate()
654 atomic_store(&map1_[idx], reinterpret_cast<uptr>(res), in GetOrCreate()
685 template <const uptr kSpaceBeg, const u64 kSpaceSize,
686 const uptr kMetadataSize, class SizeClassMap,
687 const uptr kRegionSizeLog,
702 void *MapWithCallback(uptr size) { in MapWithCallback()
705 MapUnmapCallback().OnMap((uptr)res, size); in MapWithCallback()
709 void UnmapWithCallback(uptr beg, uptr size) { in UnmapWithCallback()
714 static bool CanAllocate(uptr size, uptr alignment) { in CanAllocate()
721 uptr mem = reinterpret_cast<uptr>(p); in GetMetaData()
722 uptr beg = ComputeRegionBeg(mem); in GetMetaData()
723 uptr size = SizeClassMap::Size(GetSizeClass(p)); in GetMetaData()
725 uptr n = offset / (u32)size; // 32-bit division in GetMetaData()
726 uptr meta = (beg + kRegionSize) - (n + 1) * kMetadataSize; in GetMetaData()
731 uptr class_id) { in AllocateBatch()
743 NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id, Batch *b) { in DeallocateBatch()
755 uptr GetSizeClass(const void *p) { in GetSizeClass()
756 return possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))]; in GetSizeClass()
761 uptr mem = reinterpret_cast<uptr>(p); in GetBlockBegin()
762 uptr beg = ComputeRegionBeg(mem); in GetBlockBegin()
763 uptr size = SizeClassMap::Size(GetSizeClass(p)); in GetBlockBegin()
766 uptr res = beg + (n * (u32)size); in GetBlockBegin()
770 uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
775 uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); } in ClassID()
777 uptr TotalMemoryUsed() { in TotalMemoryUsed()
779 uptr res = 0; in TotalMemoryUsed()
780 for (uptr i = 0; i < kNumPossibleRegions; i++) in TotalMemoryUsed()
787 for (uptr i = 0; i < kNumPossibleRegions; i++) in TestOnlyUnmap()
795 for (uptr i = 0; i < kNumClasses; i++) { in ForceLock()
809 for (uptr region = 0; region < kNumPossibleRegions; region++) in ForEachChunk()
811 uptr chunk_size = SizeClassMap::Size(possible_regions[region]); in ForEachChunk()
812 uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize); in ForEachChunk()
813 uptr region_beg = region * kRegionSize; in ForEachChunk()
814 for (uptr chunk = region_beg; in ForEachChunk()
826 static uptr AdditionalSize() { in AdditionalSize()
831 static const uptr kNumClasses = SizeClassMap::kNumClasses;
834 static const uptr kRegionSize = 1 << kRegionSizeLog;
835 static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;
840 char padding[kCacheLineSize - sizeof(uptr) - sizeof(IntrusiveList<Batch>)];
844 uptr ComputeRegionId(uptr mem) { in ComputeRegionId()
845 uptr res = mem >> kRegionSizeLog; in ComputeRegionId()
850 uptr ComputeRegionBeg(uptr mem) { in ComputeRegionBeg()
854 uptr AllocateRegion(AllocatorStats *stat, uptr class_id) { in AllocateRegion()
856 uptr res = reinterpret_cast<uptr>(MmapAlignedOrDie(kRegionSize, kRegionSize, in AllocateRegion()
865 SizeClassInfo *GetSizeClassInfo(uptr class_id) { in GetSizeClassInfo()
871 SizeClassInfo *sci, uptr class_id) { in PopulateFreeList()
872 uptr size = SizeClassMap::Size(class_id); in PopulateFreeList()
873 uptr reg = AllocateRegion(stat, class_id); in PopulateFreeList()
874 uptr n_chunks = kRegionSize / (size + kMetadataSize); in PopulateFreeList()
875 uptr max_count = SizeClassMap::MaxCached(class_id); in PopulateFreeList()
877 for (uptr i = reg; i < reg + n_chunks * size; i += size) { in PopulateFreeList()
908 static const uptr kNumClasses = SizeClassAllocator::kNumClasses;
922 void *Allocate(SizeClassAllocator *allocator, uptr class_id) { in Allocate()
934 void Deallocate(SizeClassAllocator *allocator, uptr class_id, void *p) { in Deallocate()
949 for (uptr class_id = 0; class_id < kNumClasses; class_id++) { in Drain()
960 uptr count;
961 uptr max_count;
970 for (uptr i = 0; i < kNumClasses; i++) { in InitCache()
976 NOINLINE void Refill(SizeClassAllocator *allocator, uptr class_id) { in Refill()
981 for (uptr i = 0; i < b->count; i++) in Refill()
988 NOINLINE void Drain(SizeClassAllocator *allocator, uptr class_id) { in Drain()
996 uptr cnt = Min(c->max_count / 2, c->count); in Drain()
997 for (uptr i = 0; i < cnt; i++) { in Drain()
1024 void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) { in Allocate()
1026 uptr map_size = RoundUpMapSize(size); in Allocate()
1032 uptr map_beg = reinterpret_cast<uptr>( in Allocate()
1036 uptr map_end = map_beg + map_size; in Allocate()
1037 uptr res = map_beg + page_size_; in Allocate()
1048 uptr size_log = MostSignificantSetBitIndex(map_size); in Allocate()
1052 uptr idx = n_chunks_++; in Allocate()
1081 uptr idx = h->chunk_idx; in Deallocate()
1097 uptr TotalMemoryUsed() { in TotalMemoryUsed()
1099 uptr res = 0; in TotalMemoryUsed()
1100 for (uptr i = 0; i < n_chunks_; i++) { in TotalMemoryUsed()
1112 uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
1119 if (!IsAligned(reinterpret_cast<uptr>(p), page_size_)) { in GetMetaData()
1121 CHECK(IsAligned(reinterpret_cast<uptr>(p), page_size_)); in GetMetaData()
1127 uptr p = reinterpret_cast<uptr>(ptr); in GetBlockBegin()
1129 uptr nearest_chunk = 0; in GetBlockBegin()
1131 for (uptr i = 0; i < n_chunks_; i++) { in GetBlockBegin()
1132 uptr ch = reinterpret_cast<uptr>(chunks_[i]); in GetBlockBegin()
1152 uptr p = reinterpret_cast<uptr>(ptr); in GetBlockBeginFastLocked()
1153 uptr n = n_chunks_; in GetBlockBeginFastLocked()
1157 SortArray(reinterpret_cast<uptr*>(chunks_), n); in GetBlockBeginFastLocked()
1158 for (uptr i = 0; i < n; i++) in GetBlockBeginFastLocked()
1161 min_mmap_ = reinterpret_cast<uptr>(chunks_[0]); in GetBlockBeginFastLocked()
1162 max_mmap_ = reinterpret_cast<uptr>(chunks_[n - 1]) + in GetBlockBeginFastLocked()
1167 uptr beg = 0, end = n - 1; in GetBlockBeginFastLocked()
1171 uptr mid = (beg + end) / 2; // Invariant: mid >= beg + 1 in GetBlockBeginFastLocked()
1172 if (p < reinterpret_cast<uptr>(chunks_[mid])) in GetBlockBeginFastLocked()
1181 if (p >= reinterpret_cast<uptr>(chunks_[end])) in GetBlockBeginFastLocked()
1196 for (uptr i = 0; i < ARRAY_SIZE(stats.by_size_log); i++) { in PrintStats()
1197 uptr c = stats.by_size_log[i]; in PrintStats()
1217 for (uptr i = 0; i < n_chunks_; i++) in ForEachChunk()
1218 callback(reinterpret_cast<uptr>(GetUser(chunks_[i])), arg); in ForEachChunk()
1224 uptr map_beg;
1225 uptr map_size;
1226 uptr size;
1227 uptr chunk_idx;
1230 Header *GetHeader(uptr p) { in GetHeader()
1235 return GetHeader(reinterpret_cast<uptr>(p)); in GetHeader()
1239 CHECK(IsAligned((uptr)h, page_size_)); in GetUser()
1240 return reinterpret_cast<void*>(reinterpret_cast<uptr>(h) + page_size_); in GetUser()
1243 uptr RoundUpMapSize(uptr size) { in RoundUpMapSize()
1247 uptr page_size_;
1249 uptr n_chunks_;
1250 uptr min_mmap_, max_mmap_;
1253 uptr n_allocs, n_frees, currently_allocated, max_allocated, by_size_log[64];
1286 void *Allocate(AllocatorCache *cache, uptr size, uptr alignment,
1304 CHECK_EQ(reinterpret_cast<uptr>(res) & (alignment - 1), 0);
1342 void *Reallocate(AllocatorCache *cache, void *p, uptr new_size, in Reallocate()
1343 uptr alignment) { in Reallocate()
1351 uptr old_size = GetActuallyAllocatedSize(p); in Reallocate()
1352 uptr memcpy_size = Min(new_size, old_size); in Reallocate()
1390 uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
1396 uptr TotalMemoryUsed() { in TotalMemoryUsed()
1451 bool CallocShouldReturnNullDueToOverflow(uptr size, uptr n);