1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_ZONE_ZONE_ALLOCATOR_H_ 6 #define V8_ZONE_ZONE_ALLOCATOR_H_ 7 #include <limits> 8 9 #include "src/zone/zone.h" 10 11 namespace v8 { 12 namespace internal { 13 14 template <typename T> 15 class ZoneAllocator { 16 public: 17 using pointer = T*; 18 using const_pointer = const T*; 19 using reference = T&; 20 using const_reference = const T&; 21 using value_type = T; 22 using size_type = size_t; 23 using difference_type = ptrdiff_t; 24 template <class O> 25 struct rebind { 26 using other = ZoneAllocator<O>; 27 }; 28 29 #ifdef V8_OS_WIN 30 // The exported class ParallelMove derives from ZoneVector, which derives 31 // from std::vector. On Windows, the semantics of dllexport mean that 32 // a class's superclasses that are not explicitly exported themselves get 33 // implicitly exported together with the subclass, and exporting a class 34 // exports all its functions -- including the std::vector() constructors 35 // that don't take an explicit allocator argument, which in turn reference 36 // the vector allocator's default constructor. So this constructor needs 37 // to exist for linking purposes, even if it's never called. 38 // Other fixes would be to disallow subclasses of ZoneVector (etc) to be 39 // exported, or using composition instead of inheritance for either 40 // ZoneVector and friends or for ParallelMove. ZoneAllocator()41 ZoneAllocator() : ZoneAllocator(nullptr) { UNREACHABLE(); } 42 #endif ZoneAllocator(Zone * zone)43 explicit ZoneAllocator(Zone* zone) : zone_(zone) { 44 // If we are going to allocate compressed pointers in the zone it must 45 // support compression. 46 DCHECK_IMPLIES(is_compressed_pointer<T>::value, 47 zone_->supports_compression()); 48 } 49 template <typename U> ZoneAllocator(const ZoneAllocator<U> & other)50 ZoneAllocator(const ZoneAllocator<U>& other) V8_NOEXCEPT 51 : ZoneAllocator<T>(other.zone_) { 52 // If we are going to allocate compressed pointers in the zone it must 53 // support compression. 54 DCHECK_IMPLIES(is_compressed_pointer<T>::value, 55 zone_->supports_compression()); 56 } 57 template <typename U> 58 friend class ZoneAllocator; 59 allocate(size_t length)60 T* allocate(size_t length) { return zone_->NewArray<T>(length); } deallocate(T * p,size_t length)61 void deallocate(T* p, size_t length) { zone_->DeleteArray<T>(p, length); } 62 max_size()63 size_t max_size() const { 64 return std::numeric_limits<int>::max() / sizeof(T); 65 } 66 template <typename U, typename... Args> construct(U * p,Args &&...args)67 void construct(U* p, Args&&... args) { 68 void* v_p = const_cast<void*>(static_cast<const void*>(p)); 69 new (v_p) U(std::forward<Args>(args)...); 70 } 71 template <typename U> destroy(U * p)72 void destroy(U* p) { 73 p->~U(); 74 } 75 76 bool operator==(ZoneAllocator const& other) const { 77 return zone_ == other.zone_; 78 } 79 bool operator!=(ZoneAllocator const& other) const { 80 return zone_ != other.zone_; 81 } 82 zone()83 Zone* zone() { return zone_; } 84 85 private: 86 Zone* zone_; 87 }; 88 89 // A recycling zone allocator maintains a free list of deallocated chunks 90 // to reuse on subsiquent allocations. The free list management is purposely 91 // very simple and works best for data-structures which regularly allocate and 92 // free blocks of similar sized memory (such as std::deque). 93 template <typename T> 94 class RecyclingZoneAllocator : public ZoneAllocator<T> { 95 public: 96 template <class O> 97 struct rebind { 98 using other = RecyclingZoneAllocator<O>; 99 }; 100 RecyclingZoneAllocator(Zone * zone)101 explicit RecyclingZoneAllocator(Zone* zone) 102 : ZoneAllocator<T>(zone), free_list_(nullptr) {} 103 template <typename U> RecyclingZoneAllocator(const RecyclingZoneAllocator<U> & other)104 RecyclingZoneAllocator(const RecyclingZoneAllocator<U>& other) V8_NOEXCEPT 105 : ZoneAllocator<T>(other), 106 free_list_(nullptr) {} 107 template <typename U> 108 friend class RecyclingZoneAllocator; 109 allocate(size_t n)110 T* allocate(size_t n) { 111 // Only check top block in free list, since this will be equal to or larger 112 // than the other blocks in the free list. 113 if (free_list_ && free_list_->size >= n) { 114 T* return_val = reinterpret_cast<T*>(free_list_); 115 free_list_ = free_list_->next; 116 return return_val; 117 } 118 return ZoneAllocator<T>::allocate(n); 119 } 120 deallocate(T * p,size_t n)121 void deallocate(T* p, size_t n) { 122 if ((sizeof(T) * n < sizeof(FreeBlock))) return; 123 124 // Only add block to free_list if it is equal or larger than previous block 125 // so that allocation stays O(1) only having to look at the top block. 126 if (!free_list_ || free_list_->size <= n) { 127 // Store the free-list within the block being deallocated. 128 DCHECK((sizeof(T) * n >= sizeof(FreeBlock))); 129 FreeBlock* new_free_block = reinterpret_cast<FreeBlock*>(p); 130 131 new_free_block->size = n; 132 new_free_block->next = free_list_; 133 free_list_ = new_free_block; 134 } 135 } 136 137 private: 138 struct FreeBlock { 139 FreeBlock* next; 140 size_t size; 141 }; 142 143 FreeBlock* free_list_; 144 }; 145 146 using ZoneBoolAllocator = ZoneAllocator<bool>; 147 using ZoneIntAllocator = ZoneAllocator<int>; 148 149 } // namespace internal 150 } // namespace v8 151 152 #endif // V8_ZONE_ZONE_ALLOCATOR_H_ 153