1 // Copyright 2017 The Chromium Authors 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef BASE_CONTAINERS_VECTOR_BUFFER_H_ 6 #define BASE_CONTAINERS_VECTOR_BUFFER_H_ 7 8 #include <stdlib.h> 9 #include <string.h> 10 11 #include <type_traits> 12 #include <utility> 13 14 #include "base/check.h" 15 #include "base/check_op.h" 16 #include "base/compiler_specific.h" 17 #include "base/containers/span.h" 18 #include "base/containers/util.h" 19 #include "base/memory/raw_ptr_exclusion.h" 20 #include "base/numerics/checked_math.h" 21 22 namespace base::internal { 23 24 // Internal implementation detail of base/containers. 25 // 26 // Implements a vector-like buffer that holds a certain capacity of T. Unlike 27 // std::vector, VectorBuffer never constructs or destructs its arguments, and 28 // can't change sizes. But it does implement templates to assist in efficient 29 // moving and destruction of those items manually. 30 // 31 // In particular, the destructor function does not iterate over the items if 32 // there is no destructor. Moves should be implemented as a memcpy/memmove for 33 // trivially copyable objects (POD) otherwise, it should be a std::move if 34 // possible, and as a last resort it falls back to a copy. This behavior is 35 // similar to std::vector. 36 // 37 // No special consideration is done for noexcept move constructors since 38 // we compile without exceptions. 39 // 40 // The current API does not support moving overlapping ranges. 41 template <typename T> 42 class VectorBuffer { 43 public: 44 constexpr VectorBuffer() = default; 45 46 #if defined(__clang__) && !defined(__native_client__) 47 // This constructor converts an uninitialized void* to a T* which triggers 48 // clang Control Flow Integrity. Since this is as-designed, disable. 49 __attribute__((no_sanitize("cfi-unrelated-cast", "vptr"))) 50 #endif VectorBuffer(size_t count)51 VectorBuffer(size_t count) 52 : buffer_(reinterpret_cast<T*>( 53 malloc(CheckMul(sizeof(T), count).ValueOrDie()))), 54 capacity_(count) { 55 } VectorBuffer(VectorBuffer && other)56 VectorBuffer(VectorBuffer&& other) noexcept 57 : buffer_(other.buffer_), capacity_(other.capacity_) { 58 other.buffer_ = nullptr; 59 other.capacity_ = 0; 60 } 61 62 VectorBuffer(const VectorBuffer&) = delete; 63 VectorBuffer& operator=(const VectorBuffer&) = delete; 64 ~VectorBuffer()65 ~VectorBuffer() { free(buffer_); } 66 67 VectorBuffer& operator=(VectorBuffer&& other) { 68 free(buffer_); 69 buffer_ = other.buffer_; 70 capacity_ = other.capacity_; 71 72 other.buffer_ = nullptr; 73 other.capacity_ = 0; 74 return *this; 75 } 76 capacity()77 size_t capacity() const { return capacity_; } 78 79 T& operator[](size_t i) { 80 // TODO(crbug.com/40565371): Some call sites (at least circular_deque.h) are 81 // calling this with `i == capacity_` as a way of getting `end()`. Therefore 82 // we have to allow this for now (`i <= capacity_`), until we fix those call 83 // sites to use real iterators. This comment applies here and to `const T& 84 // operator[]`, below. 85 CHECK_LT(i, capacity_); 86 // SAFETY: `capacity_` is the size of the array pointed to by `buffer_`, 87 // which `i` is less than, so the dereference is inside the allocation. 88 return UNSAFE_BUFFERS(buffer_[i]); 89 } 90 91 const T& operator[](size_t i) const { 92 CHECK_LT(i, capacity_); 93 // SAFETY: `capacity_` is the size of the array pointed to by `buffer_`, 94 // which `i` is less than, so the dereference is inside the allocation. 95 return UNSAFE_BUFFERS(buffer_[i]); 96 } 97 data()98 const T* data() const { return buffer_; } data()99 T* data() { return buffer_; } 100 begin()101 T* begin() { return buffer_; } end()102 T* end() { 103 // SAFETY: `capacity_` is the size of the array pointed to by `buffer_`. 104 return UNSAFE_BUFFERS(buffer_ + capacity_); 105 } 106 as_span()107 span<T> as_span() { 108 // SAFETY: The `buffer_` array's size is `capacity_` so this gives the 109 // pointer to the start and one-past-the-end of the `buffer_`. 110 return UNSAFE_BUFFERS(span(buffer_, buffer_ + capacity_)); 111 } 112 subspan(size_t index)113 span<T> subspan(size_t index) { return as_span().subspan(index); } 114 subspan(size_t index,size_t size)115 span<T> subspan(size_t index, size_t size) { 116 return as_span().subspan(index, size); 117 } 118 get_at(size_t index)119 T* get_at(size_t index) { return as_span().get_at(index); } 120 121 // DestructRange ------------------------------------------------------------ 122 DestructRange(span<T> range)123 static void DestructRange(span<T> range) { 124 // Trivially destructible objects need not have their destructors called. 125 if constexpr (!std::is_trivially_destructible_v<T>) { 126 for (T& t : range) { 127 t.~T(); 128 } 129 } 130 } 131 132 // MoveRange ---------------------------------------------------------------- 133 134 template <typename T2> 135 static inline constexpr bool is_trivially_copyable_or_relocatable = 136 std::is_trivially_copyable_v<T2> || IS_TRIVIALLY_RELOCATABLE(T2); 137 138 // Moves or copies elements from the `from` span to the `to` span. Uses memcpy 139 // when possible. The memory in `to` must be uninitialized and the ranges must 140 // not overlap. 141 // 142 // The objects in `from` are destroyed afterward. MoveConstructRange(span<T> from,span<T> to)143 static void MoveConstructRange(span<T> from, span<T> to) { 144 CHECK(!RangesOverlap(from, to)); 145 CHECK_EQ(from.size(), to.size()); 146 147 if constexpr (is_trivially_copyable_or_relocatable<T>) { 148 // We can't use span::copy_from() as it tries to call copy constructors, 149 // and fails to compile on move-only trivially-relocatable types. 150 memcpy(to.data(), from.data(), to.size_bytes()); 151 // Destructors are skipped because they are trivial or should be elided in 152 // trivial relocation (https://reviews.llvm.org/D114732). 153 } else { 154 for (size_t i = 0; i < from.size(); ++i) { 155 T* to_pointer = to.subspan(i).data(); 156 if constexpr (std::move_constructible<T>) { 157 new (to_pointer) T(std::move(from[i])); 158 } else { 159 new (to_pointer) T(from[i]); 160 } 161 from[i].~T(); 162 } 163 } 164 } 165 166 private: RangesOverlap(span<T> a,span<T> b)167 static bool RangesOverlap(span<T> a, span<T> b) { 168 const auto a_start = reinterpret_cast<uintptr_t>(a.data()); 169 const auto a_end = reinterpret_cast<uintptr_t>(a.data()) + a.size(); 170 const auto b_start = reinterpret_cast<uintptr_t>(b.data()); 171 const auto b_end = reinterpret_cast<uintptr_t>(b.data()) + b.size(); 172 return a_end > b_start && a_start < b_end; 173 } 174 175 // `buffer_` is not a raw_ptr<...> for performance reasons (based on analysis 176 // of sampling profiler data and tab_search:top100:2020). 177 RAW_PTR_EXCLUSION T* buffer_ = nullptr; 178 size_t capacity_ = 0; 179 }; 180 181 } // namespace base::internal 182 183 #endif // BASE_CONTAINERS_VECTOR_BUFFER_H_ 184