1 // Copyright 2018 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_BASE_SMALL_VECTOR_H_ 6 #define V8_BASE_SMALL_VECTOR_H_ 7 8 #include <algorithm> 9 #include <type_traits> 10 #include <utility> 11 12 #include "src/base/bits.h" 13 #include "src/base/macros.h" 14 15 namespace v8 { 16 namespace base { 17 18 // Minimal SmallVector implementation. Uses inline storage first, switches to 19 // malloc when it overflows. 20 template <typename T, size_t kSize> 21 class SmallVector { 22 // Currently only support trivially copyable and trivially destructible data 23 // types, as it uses memcpy to copy elements and never calls destructors. 24 ASSERT_TRIVIALLY_COPYABLE(T); 25 STATIC_ASSERT(std::is_trivially_destructible<T>::value); 26 27 public: 28 static constexpr size_t kInlineSize = kSize; 29 30 SmallVector() = default; SmallVector(size_t size)31 explicit SmallVector(size_t size) { resize_no_init(size); } SmallVector(const SmallVector & other)32 SmallVector(const SmallVector& other) V8_NOEXCEPT { *this = other; } SmallVector(SmallVector && other)33 SmallVector(SmallVector&& other) V8_NOEXCEPT { *this = std::move(other); } SmallVector(std::initializer_list<T> init)34 SmallVector(std::initializer_list<T> init) { 35 resize_no_init(init.size()); 36 memcpy(begin_, init.begin(), sizeof(T) * init.size()); 37 } 38 ~SmallVector()39 ~SmallVector() { 40 if (is_big()) free(begin_); 41 } 42 43 SmallVector& operator=(const SmallVector& other) V8_NOEXCEPT { 44 if (this == &other) return *this; 45 size_t other_size = other.size(); 46 if (capacity() < other_size) { 47 // Create large-enough heap-allocated storage. 48 if (is_big()) free(begin_); 49 begin_ = reinterpret_cast<T*>(malloc(sizeof(T) * other_size)); 50 end_of_storage_ = begin_ + other_size; 51 } 52 memcpy(begin_, other.begin_, sizeof(T) * other_size); 53 end_ = begin_ + other_size; 54 return *this; 55 } 56 57 SmallVector& operator=(SmallVector&& other) V8_NOEXCEPT { 58 if (this == &other) return *this; 59 if (other.is_big()) { 60 if (is_big()) free(begin_); 61 begin_ = other.begin_; 62 end_ = other.end_; 63 end_of_storage_ = other.end_of_storage_; 64 other.reset(); 65 } else { 66 DCHECK_GE(capacity(), other.size()); // Sanity check. 67 size_t other_size = other.size(); 68 memcpy(begin_, other.begin_, sizeof(T) * other_size); 69 end_ = begin_ + other_size; 70 } 71 return *this; 72 } 73 data()74 T* data() { return begin_; } data()75 const T* data() const { return begin_; } 76 begin()77 T* begin() { return begin_; } begin()78 const T* begin() const { return begin_; } 79 end()80 T* end() { return end_; } end()81 const T* end() const { return end_; } 82 size()83 size_t size() const { return end_ - begin_; } empty()84 bool empty() const { return end_ == begin_; } capacity()85 size_t capacity() const { return end_of_storage_ - begin_; } 86 back()87 T& back() { 88 DCHECK_NE(0, size()); 89 return end_[-1]; 90 } back()91 const T& back() const { 92 DCHECK_NE(0, size()); 93 return end_[-1]; 94 } 95 96 T& operator[](size_t index) { 97 DCHECK_GT(size(), index); 98 return begin_[index]; 99 } 100 at(size_t index)101 const T& at(size_t index) const { 102 DCHECK_GT(size(), index); 103 return begin_[index]; 104 } 105 106 const T& operator[](size_t index) const { return at(index); } 107 108 template <typename... Args> emplace_back(Args &&...args)109 void emplace_back(Args&&... args) { 110 T* end = end_; 111 if (V8_UNLIKELY(end == end_of_storage_)) end = Grow(); 112 new (end) T(std::forward<Args>(args)...); 113 end_ = end + 1; 114 } 115 116 void pop_back(size_t count = 1) { 117 DCHECK_GE(size(), count); 118 end_ -= count; 119 } 120 resize_no_init(size_t new_size)121 void resize_no_init(size_t new_size) { 122 // Resizing without initialization is safe if T is trivially copyable. 123 ASSERT_TRIVIALLY_COPYABLE(T); 124 if (new_size > capacity()) Grow(new_size); 125 end_ = begin_ + new_size; 126 } 127 128 // Clear without freeing any storage. clear()129 void clear() { end_ = begin_; } 130 131 // Clear and go back to inline storage. reset()132 void reset() { 133 begin_ = inline_storage_begin(); 134 end_ = begin_; 135 end_of_storage_ = begin_ + kInlineSize; 136 } 137 138 private: 139 T* begin_ = inline_storage_begin(); 140 T* end_ = begin_; 141 T* end_of_storage_ = begin_ + kInlineSize; 142 typename std::aligned_storage<sizeof(T) * kInlineSize, alignof(T)>::type 143 inline_storage_; 144 145 // Grows the backing store by a factor of two. Returns the new end of the used 146 // storage (this reduces binary size). Grow()147 V8_NOINLINE T* Grow() { return Grow(0); } 148 149 // Grows the backing store by a factor of two, and at least to {min_capacity}. Grow(size_t min_capacity)150 V8_NOINLINE T* Grow(size_t min_capacity) { 151 size_t in_use = end_ - begin_; 152 size_t new_capacity = 153 base::bits::RoundUpToPowerOfTwo(std::max(min_capacity, 2 * capacity())); 154 T* new_storage = reinterpret_cast<T*>(malloc(sizeof(T) * new_capacity)); 155 memcpy(new_storage, begin_, sizeof(T) * in_use); 156 if (is_big()) free(begin_); 157 begin_ = new_storage; 158 end_ = new_storage + in_use; 159 end_of_storage_ = new_storage + new_capacity; 160 return end_; 161 } 162 is_big()163 bool is_big() const { return begin_ != inline_storage_begin(); } 164 inline_storage_begin()165 T* inline_storage_begin() { return reinterpret_cast<T*>(&inline_storage_); } inline_storage_begin()166 const T* inline_storage_begin() const { 167 return reinterpret_cast<const T*>(&inline_storage_); 168 } 169 }; 170 171 } // namespace base 172 } // namespace v8 173 174 #endif // V8_BASE_SMALL_VECTOR_H_ 175