Lines Matching refs:uptr
87 template <uptr kMaxSizeLog, uptr kMaxNumCachedT, uptr kMaxBytesCachedLog>
89 static const uptr kMinSizeLog = 4;
90 static const uptr kMidSizeLog = kMinSizeLog + 4;
91 static const uptr kMinSize = 1 << kMinSizeLog;
92 static const uptr kMidSize = 1 << kMidSizeLog;
93 static const uptr kMidClass = kMidSize / kMinSize;
94 static const uptr S = 2;
95 static const uptr M = (1 << S) - 1;
98 static const uptr kMaxNumCached = kMaxNumCachedT;
104 uptr count;
108 static const uptr kMaxSize = 1UL << kMaxSizeLog;
109 static const uptr kNumClasses =
112 static const uptr kNumClassesRounded =
117 static uptr Size(uptr class_id) { in Size()
121 uptr t = kMidSize << (class_id >> S); in Size()
125 static uptr ClassID(uptr size) { in ClassID()
129 uptr l = MostSignificantSetBitIndex(size); in ClassID()
130 uptr hbits = (size >> (l - S)) & M; in ClassID()
131 uptr lbits = size & ((1 << (l - S)) - 1); in ClassID()
132 uptr l1 = l - kMidSizeLog; in ClassID()
136 static uptr MaxCached(uptr class_id) { in MaxCached()
138 uptr n = (1UL << kMaxBytesCachedLog) / Size(class_id); in MaxCached()
139 return Max<uptr>(1, Min(kMaxNumCached, n)); in MaxCached()
143 uptr prev_s = 0; in Print()
144 uptr total_cached = 0; in Print()
145 for (uptr i = 0; i < kNumClasses; i++) { in Print()
146 uptr s = Size(i); in Print()
149 uptr d = s - prev_s; in Print()
150 uptr p = prev_s ? (d * 100 / prev_s) : 0; in Print()
151 uptr l = s ? MostSignificantSetBitIndex(s) : 0; in Print()
152 uptr cached = MaxCached(i) * s; in Print()
162 static bool SizeClassRequiresSeparateTransferBatch(uptr class_id) { in SizeClassRequiresSeparateTransferBatch()
164 sizeof(uptr) * (kMaxNumCached - MaxCached(class_id)); in SizeClassRequiresSeparateTransferBatch()
168 for (uptr c = 1; c < kNumClasses; c++) { in Validate()
170 uptr s = Size(c); in Validate()
181 for (uptr s = 1; s <= kMaxSize; s++) { in Validate()
182 uptr c = ClassID(s); in Validate()
278 void OnMap(uptr p, uptr size) const { } in OnMap()
279 void OnUnmap(uptr p, uptr size) const { } in OnUnmap()
283 typedef void (*ForEachChunkCallback)(uptr chunk, void *arg);
301 template <const uptr kSpaceBeg, const uptr kSpaceSize,
302 const uptr kMetadataSize, class SizeClassMap,
313 reinterpret_cast<uptr>(Mprotect(kSpaceBeg, kSpaceSize))); in Init()
317 void MapWithCallback(uptr beg, uptr size) { in MapWithCallback()
318 CHECK_EQ(beg, reinterpret_cast<uptr>(MmapFixedOrDie(beg, size))); in MapWithCallback()
322 void UnmapWithCallback(uptr beg, uptr size) { in UnmapWithCallback()
327 static bool CanAllocate(uptr size, uptr alignment) { in CanAllocate()
333 uptr class_id) { in AllocateBatch()
343 NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id, Batch *b) { in DeallocateBatch()
351 return reinterpret_cast<uptr>(p) / kSpaceSize == kSpaceBeg / kSpaceSize; in PointerIsMine()
354 static uptr GetSizeClass(const void *p) { in GetSizeClass()
355 return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded; in GetSizeClass()
359 uptr class_id = GetSizeClass(p); in GetBlockBegin()
360 uptr size = SizeClassMap::Size(class_id); in GetBlockBegin()
362 uptr chunk_idx = GetChunkIdx((uptr)p, size); in GetBlockBegin()
363 uptr reg_beg = (uptr)p & ~(kRegionSize - 1); in GetBlockBegin()
364 uptr beg = chunk_idx * size; in GetBlockBegin()
365 uptr next_beg = beg + size; in GetBlockBegin()
373 static uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
378 uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); } in ClassID()
381 uptr class_id = GetSizeClass(p); in GetMetaData()
382 uptr size = SizeClassMap::Size(class_id); in GetMetaData()
383 uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size); in GetMetaData()
388 uptr TotalMemoryUsed() { in TotalMemoryUsed()
389 uptr res = 0; in TotalMemoryUsed()
390 for (uptr i = 0; i < kNumClasses; i++) in TotalMemoryUsed()
401 uptr total_mapped = 0; in PrintStats()
402 uptr n_allocated = 0; in PrintStats()
403 uptr n_freed = 0; in PrintStats()
404 for (uptr class_id = 1; class_id < kNumClasses; class_id++) { in PrintStats()
413 for (uptr class_id = 1; class_id < kNumClasses; class_id++) { in PrintStats()
428 for (uptr i = 0; i < kNumClasses; i++) { in ForceLock()
442 for (uptr class_id = 1; class_id < kNumClasses; class_id++) { in ForEachChunk()
444 uptr chunk_size = SizeClassMap::Size(class_id); in ForEachChunk()
445 uptr region_beg = kSpaceBeg + class_id * kRegionSize; in ForEachChunk()
446 for (uptr chunk = region_beg; in ForEachChunk()
456 static const uptr kNumClasses = SizeClassMap::kNumClasses;
457 static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded;
460 static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
461 static const uptr kSpaceEnd = kSpaceBeg + kSpaceSize;
467 static const uptr kPopulateSize = 1 << 14;
469 static const uptr kUserMapSize = 1 << 16;
471 static const uptr kMetaMapSize = 1 << 16;
476 uptr allocated_user; // Bytes allocated for user memory.
477 uptr allocated_meta; // Bytes allocated for metadata.
478 uptr mapped_user; // Bytes mapped for user memory.
479 uptr mapped_meta; // Bytes mapped for metadata.
480 uptr n_allocated, n_freed; // Just stats.
484 static uptr AdditionalSize() { in AdditionalSize()
489 RegionInfo *GetRegionInfo(uptr class_id) { in GetRegionInfo()
495 static uptr GetChunkIdx(uptr chunk, uptr size) { in GetChunkIdx()
496 uptr offset = chunk % kRegionSize; in GetChunkIdx()
505 uptr class_id, RegionInfo *region) { in PopulateFreeList()
510 uptr size = SizeClassMap::Size(class_id); in PopulateFreeList()
511 uptr count = size < kPopulateSize ? SizeClassMap::MaxCached(class_id) : 1; in PopulateFreeList()
512 uptr beg_idx = region->allocated_user; in PopulateFreeList()
513 uptr end_idx = beg_idx + count * size; in PopulateFreeList()
514 uptr region_beg = kSpaceBeg + kRegionSize * class_id; in PopulateFreeList()
517 uptr map_size = kUserMapSize; in PopulateFreeList()
525 uptr total_count = (region->mapped_user - beg_idx - size) in PopulateFreeList()
529 uptr map_size = kMetaMapSize; in PopulateFreeList()
551 for (uptr i = 0; i < count; i++) in PopulateFreeList()
573 void set(uptr idx, u8 val) { in set()
578 u8 operator[] (uptr idx) {
609 template <const uptr kSpaceBeg, const u64 kSpaceSize,
610 const uptr kMetadataSize, class SizeClassMap,
611 const uptr kRegionSizeLog,
626 void *MapWithCallback(uptr size) { in MapWithCallback()
629 MapUnmapCallback().OnMap((uptr)res, size); in MapWithCallback()
633 void UnmapWithCallback(uptr beg, uptr size) { in UnmapWithCallback()
638 static bool CanAllocate(uptr size, uptr alignment) { in CanAllocate()
645 uptr mem = reinterpret_cast<uptr>(p); in GetMetaData()
646 uptr beg = ComputeRegionBeg(mem); in GetMetaData()
647 uptr size = SizeClassMap::Size(GetSizeClass(p)); in GetMetaData()
649 uptr n = offset / (u32)size; // 32-bit division in GetMetaData()
650 uptr meta = (beg + kRegionSize) - (n + 1) * kMetadataSize; in GetMetaData()
655 uptr class_id) { in AllocateBatch()
667 NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id, Batch *b) { in DeallocateBatch()
679 uptr GetSizeClass(const void *p) { in GetSizeClass()
680 return possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))]; in GetSizeClass()
685 uptr mem = reinterpret_cast<uptr>(p); in GetBlockBegin()
686 uptr beg = ComputeRegionBeg(mem); in GetBlockBegin()
687 uptr size = SizeClassMap::Size(GetSizeClass(p)); in GetBlockBegin()
690 uptr res = beg + (n * (u32)size); in GetBlockBegin()
694 uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
699 uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); } in ClassID()
701 uptr TotalMemoryUsed() { in TotalMemoryUsed()
703 uptr res = 0; in TotalMemoryUsed()
704 for (uptr i = 0; i < kNumPossibleRegions; i++) in TotalMemoryUsed()
711 for (uptr i = 0; i < kNumPossibleRegions; i++) in TestOnlyUnmap()
719 for (uptr i = 0; i < kNumClasses; i++) { in ForceLock()
733 for (uptr region = 0; region < kNumPossibleRegions; region++) in ForEachChunk()
735 uptr chunk_size = SizeClassMap::Size(possible_regions[region]); in ForEachChunk()
736 uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize); in ForEachChunk()
737 uptr region_beg = region * kRegionSize; in ForEachChunk()
738 for (uptr chunk = region_beg; in ForEachChunk()
751 static const uptr kNumClasses = SizeClassMap::kNumClasses;
754 static const uptr kRegionSize = 1 << kRegionSizeLog;
755 static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;
760 char padding[kCacheLineSize - sizeof(uptr) - sizeof(IntrusiveList<Batch>)];
764 uptr ComputeRegionId(uptr mem) { in ComputeRegionId()
765 uptr res = mem >> kRegionSizeLog; in ComputeRegionId()
770 uptr ComputeRegionBeg(uptr mem) { in ComputeRegionBeg()
774 uptr AllocateRegion(AllocatorStats *stat, uptr class_id) { in AllocateRegion()
776 uptr res = reinterpret_cast<uptr>(MmapAlignedOrDie(kRegionSize, kRegionSize, in AllocateRegion()
785 SizeClassInfo *GetSizeClassInfo(uptr class_id) { in GetSizeClassInfo()
791 SizeClassInfo *sci, uptr class_id) { in PopulateFreeList()
792 uptr size = SizeClassMap::Size(class_id); in PopulateFreeList()
793 uptr reg = AllocateRegion(stat, class_id); in PopulateFreeList()
794 uptr n_chunks = kRegionSize / (size + kMetadataSize); in PopulateFreeList()
795 uptr max_count = SizeClassMap::MaxCached(class_id); in PopulateFreeList()
797 for (uptr i = reg; i < reg + n_chunks * size; i += size) { in PopulateFreeList()
828 static const uptr kNumClasses = SizeClassAllocator::kNumClasses;
842 void *Allocate(SizeClassAllocator *allocator, uptr class_id) { in Allocate()
854 void Deallocate(SizeClassAllocator *allocator, uptr class_id, void *p) { in Deallocate()
869 for (uptr class_id = 0; class_id < kNumClasses; class_id++) { in Drain()
880 uptr count;
881 uptr max_count;
890 for (uptr i = 0; i < kNumClasses; i++) { in InitCache()
896 NOINLINE void Refill(SizeClassAllocator *allocator, uptr class_id) { in Refill()
901 for (uptr i = 0; i < b->count; i++) in Refill()
908 NOINLINE void Drain(SizeClassAllocator *allocator, uptr class_id) { in Drain()
916 uptr cnt = Min(c->max_count / 2, c->count); in Drain()
917 for (uptr i = 0; i < cnt; i++) { in Drain()
939 void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) { in Allocate()
941 uptr map_size = RoundUpMapSize(size); in Allocate()
945 uptr map_beg = reinterpret_cast<uptr>( in Allocate()
948 uptr map_end = map_beg + map_size; in Allocate()
949 uptr res = map_beg + page_size_; in Allocate()
958 uptr size_log = MostSignificantSetBitIndex(map_size); in Allocate()
962 uptr idx = n_chunks_++; in Allocate()
981 uptr idx = h->chunk_idx; in Deallocate()
997 uptr TotalMemoryUsed() { in TotalMemoryUsed()
999 uptr res = 0; in TotalMemoryUsed()
1000 for (uptr i = 0; i < n_chunks_; i++) { in TotalMemoryUsed()
1012 uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
1019 CHECK(IsAligned(reinterpret_cast<uptr>(p), page_size_)); in GetMetaData()
1024 uptr p = reinterpret_cast<uptr>(ptr); in GetBlockBegin()
1026 uptr nearest_chunk = 0; in GetBlockBegin()
1028 for (uptr i = 0; i < n_chunks_; i++) { in GetBlockBegin()
1029 uptr ch = reinterpret_cast<uptr>(chunks_[i]); in GetBlockBegin()
1048 uptr p = reinterpret_cast<uptr>(ptr); in GetBlockBeginFastLocked()
1049 uptr n = n_chunks_; in GetBlockBeginFastLocked()
1053 SortArray(reinterpret_cast<uptr*>(chunks_), n); in GetBlockBeginFastLocked()
1054 for (uptr i = 0; i < n; i++) in GetBlockBeginFastLocked()
1057 min_mmap_ = reinterpret_cast<uptr>(chunks_[0]); in GetBlockBeginFastLocked()
1058 max_mmap_ = reinterpret_cast<uptr>(chunks_[n - 1]) + in GetBlockBeginFastLocked()
1063 uptr beg = 0, end = n - 1; in GetBlockBeginFastLocked()
1067 uptr mid = (beg + end) / 2; // Invariant: mid >= beg + 1 in GetBlockBeginFastLocked()
1068 if (p < reinterpret_cast<uptr>(chunks_[mid])) in GetBlockBeginFastLocked()
1077 if (p >= reinterpret_cast<uptr>(chunks_[end])) in GetBlockBeginFastLocked()
1092 for (uptr i = 0; i < ARRAY_SIZE(stats.by_size_log); i++) { in PrintStats()
1093 uptr c = stats.by_size_log[i]; in PrintStats()
1113 for (uptr i = 0; i < n_chunks_; i++) in ForEachChunk()
1114 callback(reinterpret_cast<uptr>(GetUser(chunks_[i])), arg); in ForEachChunk()
1120 uptr map_beg;
1121 uptr map_size;
1122 uptr size;
1123 uptr chunk_idx;
1126 Header *GetHeader(uptr p) { in GetHeader()
1131 return GetHeader(reinterpret_cast<uptr>(p)); in GetHeader()
1135 CHECK(IsAligned((uptr)h, page_size_)); in GetUser()
1136 return reinterpret_cast<void*>(reinterpret_cast<uptr>(h) + page_size_); in GetUser()
1139 uptr RoundUpMapSize(uptr size) { in RoundUpMapSize()
1143 uptr page_size_;
1145 uptr n_chunks_;
1146 uptr min_mmap_, max_mmap_;
1149 uptr n_allocs, n_frees, currently_allocated, max_allocated, by_size_log[64];
1170 void *Allocate(AllocatorCache *cache, uptr size, uptr alignment,
1185 CHECK_EQ(reinterpret_cast<uptr>(res) & (alignment - 1), 0);
1199 void *Reallocate(AllocatorCache *cache, void *p, uptr new_size, in Reallocate()
1200 uptr alignment) { in Reallocate()
1208 uptr old_size = GetActuallyAllocatedSize(p); in Reallocate()
1209 uptr memcpy_size = Min(new_size, old_size); in Reallocate()
1247 uptr GetActuallyAllocatedSize(void *p) { in GetActuallyAllocatedSize()
1253 uptr TotalMemoryUsed() { in TotalMemoryUsed()
1306 bool CallocShouldReturnNullDueToOverflow(uptr size, uptr n);