1 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
2 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
3 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
4 // option. This file may not be copied, modified, or distributed
5 // except according to those terms.
6
7 //! Small vectors in various sizes. These store a certain number of elements inline, and fall back
8 //! to the heap for larger allocations. This can be a useful optimization for improving cache
9 //! locality and reducing allocator traffic for workloads that fit within the inline buffer.
10 //!
11 //! ## `no_std` support
12 //!
13 //! By default, `smallvec` does not depend on `std`. However, the optional
14 //! `write` feature implements the `std::io::Write` trait for vectors of `u8`.
15 //! When this feature is enabled, `smallvec` depends on `std`.
16 //!
17 //! ## Optional features
18 //!
19 //! ### `serde`
20 //!
21 //! When this optional dependency is enabled, `SmallVec` implements the `serde::Serialize` and
22 //! `serde::Deserialize` traits.
23 //!
24 //! ### `write`
25 //!
26 //! When this feature is enabled, `SmallVec<[u8; _]>` implements the `std::io::Write` trait.
27 //! This feature is not compatible with `#![no_std]` programs.
28 //!
29 //! ### `union`
30 //!
31 //! **This feature requires Rust 1.49.**
32 //!
33 //! When the `union` feature is enabled `smallvec` will track its state (inline or spilled)
34 //! without the use of an enum tag, reducing the size of the `smallvec` by one machine word.
35 //! This means that there is potentially no space overhead compared to `Vec`.
36 //! Note that `smallvec` can still be larger than `Vec` if the inline buffer is larger than two
37 //! machine words.
38 //!
39 //! To use this feature add `features = ["union"]` in the `smallvec` section of Cargo.toml.
40 //! Note that this feature requires Rust 1.49.
41 //!
42 //! Tracking issue: [rust-lang/rust#55149](https://github.com/rust-lang/rust/issues/55149)
43 //!
44 //! ### `const_generics`
45 //!
46 //! **This feature requires Rust 1.51.**
47 //!
48 //! When this feature is enabled, `SmallVec` works with any arrays of any size, not just a fixed
49 //! list of sizes.
50 //!
51 //! ### `const_new`
52 //!
53 //! **This feature requires Rust 1.51.**
54 //!
55 //! This feature exposes the functions [`SmallVec::new_const`], [`SmallVec::from_const`], and [`smallvec_inline`] which enables the `SmallVec` to be initialized from a const context.
56 //! For details, see the
57 //! [Rust Reference](https://doc.rust-lang.org/reference/const_eval.html#const-functions).
58 //!
59 //! ### `specialization`
60 //!
61 //! **This feature is unstable and requires a nightly build of the Rust toolchain.**
62 //!
63 //! When this feature is enabled, `SmallVec::from(slice)` has improved performance for slices
64 //! of `Copy` types. (Without this feature, you can use `SmallVec::from_slice` to get optimal
65 //! performance for `Copy` types.)
66 //!
67 //! Tracking issue: [rust-lang/rust#31844](https://github.com/rust-lang/rust/issues/31844)
68 //!
69 //! ### `may_dangle`
70 //!
71 //! **This feature is unstable and requires a nightly build of the Rust toolchain.**
72 //!
73 //! This feature makes the Rust compiler less strict about use of vectors that contain borrowed
74 //! references. For details, see the
75 //! [Rustonomicon](https://doc.rust-lang.org/1.42.0/nomicon/dropck.html#an-escape-hatch).
76 //!
77 //! Tracking issue: [rust-lang/rust#34761](https://github.com/rust-lang/rust/issues/34761)
78
79 #![no_std]
80 #![cfg_attr(docsrs, feature(doc_cfg))]
81 #![cfg_attr(feature = "specialization", allow(incomplete_features))]
82 #![cfg_attr(feature = "specialization", feature(specialization))]
83 #![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))]
84 #![cfg_attr(
85 feature = "debugger_visualizer",
86 feature(debugger_visualizer),
87 debugger_visualizer(natvis_file = "../debug_metadata/smallvec.natvis")
88 )]
89 #![deny(missing_docs)]
90
91 #[doc(hidden)]
92 pub extern crate alloc;
93
94 #[cfg(any(test, feature = "write"))]
95 extern crate std;
96
97 #[cfg(test)]
98 mod tests;
99
100 #[allow(deprecated)]
101 use alloc::alloc::{Layout, LayoutErr};
102 use alloc::boxed::Box;
103 use alloc::{vec, vec::Vec};
104 use core::borrow::{Borrow, BorrowMut};
105 use core::cmp;
106 use core::fmt;
107 use core::hash::{Hash, Hasher};
108 use core::hint::unreachable_unchecked;
109 use core::iter::{repeat, FromIterator, FusedIterator, IntoIterator};
110 use core::mem;
111 use core::mem::MaybeUninit;
112 use core::ops::{self, Range, RangeBounds};
113 use core::ptr::{self, NonNull};
114 use core::slice::{self, SliceIndex};
115
116 #[cfg(feature = "serde")]
117 use serde::{
118 de::{Deserialize, Deserializer, SeqAccess, Visitor},
119 ser::{Serialize, SerializeSeq, Serializer},
120 };
121
122 #[cfg(feature = "serde")]
123 use core::marker::PhantomData;
124
125 #[cfg(feature = "write")]
126 use std::io;
127
128 /// Creates a [`SmallVec`] containing the arguments.
129 ///
130 /// `smallvec!` allows `SmallVec`s to be defined with the same syntax as array expressions.
131 /// There are two forms of this macro:
132 ///
133 /// - Create a [`SmallVec`] containing a given list of elements:
134 ///
135 /// ```
136 /// # #[macro_use] extern crate smallvec;
137 /// # use smallvec::SmallVec;
138 /// # fn main() {
139 /// let v: SmallVec<[_; 128]> = smallvec![1, 2, 3];
140 /// assert_eq!(v[0], 1);
141 /// assert_eq!(v[1], 2);
142 /// assert_eq!(v[2], 3);
143 /// # }
144 /// ```
145 ///
146 /// - Create a [`SmallVec`] from a given element and size:
147 ///
148 /// ```
149 /// # #[macro_use] extern crate smallvec;
150 /// # use smallvec::SmallVec;
151 /// # fn main() {
152 /// let v: SmallVec<[_; 0x8000]> = smallvec![1; 3];
153 /// assert_eq!(v, SmallVec::from_buf([1, 1, 1]));
154 /// # }
155 /// ```
156 ///
157 /// Note that unlike array expressions this syntax supports all elements
158 /// which implement [`Clone`] and the number of elements doesn't have to be
159 /// a constant.
160 ///
161 /// This will use `clone` to duplicate an expression, so one should be careful
162 /// using this with types having a nonstandard `Clone` implementation. For
163 /// example, `smallvec![Rc::new(1); 5]` will create a vector of five references
164 /// to the same boxed integer value, not five references pointing to independently
165 /// boxed integers.
166
167 #[macro_export]
168 macro_rules! smallvec {
169 // count helper: transform any expression into 1
170 (@one $x:expr) => (1usize);
171 ($elem:expr; $n:expr) => ({
172 $crate::SmallVec::from_elem($elem, $n)
173 });
174 ($($x:expr),*$(,)*) => ({
175 let count = 0usize $(+ $crate::smallvec!(@one $x))*;
176 #[allow(unused_mut)]
177 let mut vec = $crate::SmallVec::new();
178 if count <= vec.inline_size() {
179 $(vec.push($x);)*
180 vec
181 } else {
182 $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*])
183 }
184 });
185 }
186
187 /// Creates an inline [`SmallVec`] containing the arguments. This macro is enabled by the feature `const_new`.
188 ///
189 /// `smallvec_inline!` allows `SmallVec`s to be defined with the same syntax as array expressions in `const` contexts.
190 /// The inline storage `A` will always be an array of the size specified by the arguments.
191 /// There are two forms of this macro:
192 ///
193 /// - Create a [`SmallVec`] containing a given list of elements:
194 ///
195 /// ```
196 /// # #[macro_use] extern crate smallvec;
197 /// # use smallvec::SmallVec;
198 /// # fn main() {
199 /// const V: SmallVec<[i32; 3]> = smallvec_inline![1, 2, 3];
200 /// assert_eq!(V[0], 1);
201 /// assert_eq!(V[1], 2);
202 /// assert_eq!(V[2], 3);
203 /// # }
204 /// ```
205 ///
206 /// - Create a [`SmallVec`] from a given element and size:
207 ///
208 /// ```
209 /// # #[macro_use] extern crate smallvec;
210 /// # use smallvec::SmallVec;
211 /// # fn main() {
212 /// const V: SmallVec<[i32; 3]> = smallvec_inline![1; 3];
213 /// assert_eq!(V, SmallVec::from_buf([1, 1, 1]));
214 /// # }
215 /// ```
216 ///
217 /// Note that the behavior mimics that of array expressions, in contrast to [`smallvec`].
218 #[cfg(feature = "const_new")]
219 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
220 #[macro_export]
221 macro_rules! smallvec_inline {
222 // count helper: transform any expression into 1
223 (@one $x:expr) => (1usize);
224 ($elem:expr; $n:expr) => ({
225 $crate::SmallVec::<[_; $n]>::from_const([$elem; $n])
226 });
227 ($($x:expr),+ $(,)?) => ({
228 const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*;
229 $crate::SmallVec::<[_; N]>::from_const([$($x,)*])
230 });
231 }
232
233 /// `panic!()` in debug builds, optimization hint in release.
234 #[cfg(not(feature = "union"))]
235 macro_rules! debug_unreachable {
236 () => {
237 debug_unreachable!("entered unreachable code")
238 };
239 ($e:expr) => {
240 if cfg!(not(debug_assertions)) {
241 unreachable_unchecked();
242 } else {
243 panic!($e);
244 }
245 };
246 }
247
248 /// Trait to be implemented by a collection that can be extended from a slice
249 ///
250 /// ## Example
251 ///
252 /// ```rust
253 /// use smallvec::{ExtendFromSlice, SmallVec};
254 ///
255 /// fn initialize<V: ExtendFromSlice<u8>>(v: &mut V) {
256 /// v.extend_from_slice(b"Test!");
257 /// }
258 ///
259 /// let mut vec = Vec::new();
260 /// initialize(&mut vec);
261 /// assert_eq!(&vec, b"Test!");
262 ///
263 /// let mut small_vec = SmallVec::<[u8; 8]>::new();
264 /// initialize(&mut small_vec);
265 /// assert_eq!(&small_vec as &[_], b"Test!");
266 /// ```
267 #[doc(hidden)]
268 #[deprecated]
269 pub trait ExtendFromSlice<T> {
270 /// Extends a collection from a slice of its element type
extend_from_slice(&mut self, other: &[T])271 fn extend_from_slice(&mut self, other: &[T]);
272 }
273
274 #[allow(deprecated)]
275 impl<T: Clone> ExtendFromSlice<T> for Vec<T> {
extend_from_slice(&mut self, other: &[T])276 fn extend_from_slice(&mut self, other: &[T]) {
277 Vec::extend_from_slice(self, other)
278 }
279 }
280
281 /// Error type for APIs with fallible heap allocation
282 #[derive(Debug)]
283 pub enum CollectionAllocErr {
284 /// Overflow `usize::MAX` or other error during size computation
285 CapacityOverflow,
286 /// The allocator return an error
287 AllocErr {
288 /// The layout that was passed to the allocator
289 layout: Layout,
290 },
291 }
292
293 impl fmt::Display for CollectionAllocErr {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result294 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
295 write!(f, "Allocation error: {:?}", self)
296 }
297 }
298
299 #[allow(deprecated)]
300 impl From<LayoutErr> for CollectionAllocErr {
from(_: LayoutErr) -> Self301 fn from(_: LayoutErr) -> Self {
302 CollectionAllocErr::CapacityOverflow
303 }
304 }
305
infallible<T>(result: Result<T, CollectionAllocErr>) -> T306 fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T {
307 match result {
308 Ok(x) => x,
309 Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
310 Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout),
311 }
312 }
313
314 /// FIXME: use `Layout::array` when we require a Rust version where it’s stable
315 /// https://github.com/rust-lang/rust/issues/55724
layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr>316 fn layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr> {
317 let size = mem::size_of::<T>()
318 .checked_mul(n)
319 .ok_or(CollectionAllocErr::CapacityOverflow)?;
320 let align = mem::align_of::<T>();
321 Layout::from_size_align(size, align).map_err(|_| CollectionAllocErr::CapacityOverflow)
322 }
323
deallocate<T>(ptr: *mut T, capacity: usize)324 unsafe fn deallocate<T>(ptr: *mut T, capacity: usize) {
325 // This unwrap should succeed since the same did when allocating.
326 let layout = layout_array::<T>(capacity).unwrap();
327 alloc::alloc::dealloc(ptr as *mut u8, layout)
328 }
329
330 /// An iterator that removes the items from a `SmallVec` and yields them by value.
331 ///
332 /// Returned from [`SmallVec::drain`][1].
333 ///
334 /// [1]: struct.SmallVec.html#method.drain
335 pub struct Drain<'a, T: 'a + Array> {
336 tail_start: usize,
337 tail_len: usize,
338 iter: slice::Iter<'a, T::Item>,
339 vec: NonNull<SmallVec<T>>,
340 }
341
342 impl<'a, T: 'a + Array> fmt::Debug for Drain<'a, T>
343 where
344 T::Item: fmt::Debug,
345 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result346 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
347 f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
348 }
349 }
350
351 unsafe impl<'a, T: Sync + Array> Sync for Drain<'a, T> {}
352 unsafe impl<'a, T: Send + Array> Send for Drain<'a, T> {}
353
354 impl<'a, T: 'a + Array> Iterator for Drain<'a, T> {
355 type Item = T::Item;
356
357 #[inline]
next(&mut self) -> Option<T::Item>358 fn next(&mut self) -> Option<T::Item> {
359 self.iter
360 .next()
361 .map(|reference| unsafe { ptr::read(reference) })
362 }
363
364 #[inline]
size_hint(&self) -> (usize, Option<usize>)365 fn size_hint(&self) -> (usize, Option<usize>) {
366 self.iter.size_hint()
367 }
368 }
369
370 impl<'a, T: 'a + Array> DoubleEndedIterator for Drain<'a, T> {
371 #[inline]
next_back(&mut self) -> Option<T::Item>372 fn next_back(&mut self) -> Option<T::Item> {
373 self.iter
374 .next_back()
375 .map(|reference| unsafe { ptr::read(reference) })
376 }
377 }
378
379 impl<'a, T: Array> ExactSizeIterator for Drain<'a, T> {
380 #[inline]
len(&self) -> usize381 fn len(&self) -> usize {
382 self.iter.len()
383 }
384 }
385
386 impl<'a, T: Array> FusedIterator for Drain<'a, T> {}
387
388 impl<'a, T: 'a + Array> Drop for Drain<'a, T> {
drop(&mut self)389 fn drop(&mut self) {
390 self.for_each(drop);
391
392 if self.tail_len > 0 {
393 unsafe {
394 let source_vec = self.vec.as_mut();
395
396 // memmove back untouched tail, update to new length
397 let start = source_vec.len();
398 let tail = self.tail_start;
399 if tail != start {
400 // as_mut_ptr creates a &mut, invalidating other pointers.
401 // This pattern avoids calling it with a pointer already present.
402 let ptr = source_vec.as_mut_ptr();
403 let src = ptr.add(tail);
404 let dst = ptr.add(start);
405 ptr::copy(src, dst, self.tail_len);
406 }
407 source_vec.set_len(start + self.tail_len);
408 }
409 }
410 }
411 }
412
413 #[cfg(feature = "union")]
414 union SmallVecData<A: Array> {
415 inline: core::mem::ManuallyDrop<MaybeUninit<A>>,
416 heap: (*mut A::Item, usize),
417 }
418
419 #[cfg(all(feature = "union", feature = "const_new"))]
420 impl<T, const N: usize> SmallVecData<[T; N]> {
421 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
422 #[inline]
from_const(inline: MaybeUninit<[T; N]>) -> Self423 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
424 SmallVecData {
425 inline: core::mem::ManuallyDrop::new(inline),
426 }
427 }
428 }
429
430 #[cfg(feature = "union")]
431 impl<A: Array> SmallVecData<A> {
432 #[inline]
inline(&self) -> *const A::Item433 unsafe fn inline(&self) -> *const A::Item {
434 self.inline.as_ptr() as *const A::Item
435 }
436 #[inline]
inline_mut(&mut self) -> *mut A::Item437 unsafe fn inline_mut(&mut self) -> *mut A::Item {
438 self.inline.as_mut_ptr() as *mut A::Item
439 }
440 #[inline]
from_inline(inline: MaybeUninit<A>) -> SmallVecData<A>441 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
442 SmallVecData {
443 inline: core::mem::ManuallyDrop::new(inline),
444 }
445 }
446 #[inline]
into_inline(self) -> MaybeUninit<A>447 unsafe fn into_inline(self) -> MaybeUninit<A> {
448 core::mem::ManuallyDrop::into_inner(self.inline)
449 }
450 #[inline]
heap(&self) -> (*mut A::Item, usize)451 unsafe fn heap(&self) -> (*mut A::Item, usize) {
452 self.heap
453 }
454 #[inline]
heap_mut(&mut self) -> &mut (*mut A::Item, usize)455 unsafe fn heap_mut(&mut self) -> &mut (*mut A::Item, usize) {
456 &mut self.heap
457 }
458 #[inline]
from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData<A>459 fn from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData<A> {
460 SmallVecData { heap: (ptr, len) }
461 }
462 }
463
464 #[cfg(not(feature = "union"))]
465 enum SmallVecData<A: Array> {
466 Inline(MaybeUninit<A>),
467 Heap((*mut A::Item, usize)),
468 }
469
470 #[cfg(all(not(feature = "union"), feature = "const_new"))]
471 impl<T, const N: usize> SmallVecData<[T; N]> {
472 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
473 #[inline]
from_const(inline: MaybeUninit<[T; N]>) -> Self474 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
475 SmallVecData::Inline(inline)
476 }
477 }
478
479 #[cfg(not(feature = "union"))]
480 impl<A: Array> SmallVecData<A> {
481 #[inline]
inline(&self) -> *const A::Item482 unsafe fn inline(&self) -> *const A::Item {
483 match self {
484 SmallVecData::Inline(a) => a.as_ptr() as *const A::Item,
485 _ => debug_unreachable!(),
486 }
487 }
488 #[inline]
inline_mut(&mut self) -> *mut A::Item489 unsafe fn inline_mut(&mut self) -> *mut A::Item {
490 match self {
491 SmallVecData::Inline(a) => a.as_mut_ptr() as *mut A::Item,
492 _ => debug_unreachable!(),
493 }
494 }
495 #[inline]
from_inline(inline: MaybeUninit<A>) -> SmallVecData<A>496 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
497 SmallVecData::Inline(inline)
498 }
499 #[inline]
into_inline(self) -> MaybeUninit<A>500 unsafe fn into_inline(self) -> MaybeUninit<A> {
501 match self {
502 SmallVecData::Inline(a) => a,
503 _ => debug_unreachable!(),
504 }
505 }
506 #[inline]
heap(&self) -> (*mut A::Item, usize)507 unsafe fn heap(&self) -> (*mut A::Item, usize) {
508 match self {
509 SmallVecData::Heap(data) => *data,
510 _ => debug_unreachable!(),
511 }
512 }
513 #[inline]
heap_mut(&mut self) -> &mut (*mut A::Item, usize)514 unsafe fn heap_mut(&mut self) -> &mut (*mut A::Item, usize) {
515 match self {
516 SmallVecData::Heap(data) => data,
517 _ => debug_unreachable!(),
518 }
519 }
520 #[inline]
from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData<A>521 fn from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData<A> {
522 SmallVecData::Heap((ptr, len))
523 }
524 }
525
526 unsafe impl<A: Array + Send> Send for SmallVecData<A> {}
527 unsafe impl<A: Array + Sync> Sync for SmallVecData<A> {}
528
529 /// A `Vec`-like container that can store a small number of elements inline.
530 ///
531 /// `SmallVec` acts like a vector, but can store a limited amount of data inline within the
532 /// `SmallVec` struct rather than in a separate allocation. If the data exceeds this limit, the
533 /// `SmallVec` will "spill" its data onto the heap, allocating a new buffer to hold it.
534 ///
535 /// The amount of data that a `SmallVec` can store inline depends on its backing store. The backing
536 /// store can be any type that implements the `Array` trait; usually it is a small fixed-sized
537 /// array. For example a `SmallVec<[u64; 8]>` can hold up to eight 64-bit integers inline.
538 ///
539 /// ## Example
540 ///
541 /// ```rust
542 /// use smallvec::SmallVec;
543 /// let mut v = SmallVec::<[u8; 4]>::new(); // initialize an empty vector
544 ///
545 /// // The vector can hold up to 4 items without spilling onto the heap.
546 /// v.extend(0..4);
547 /// assert_eq!(v.len(), 4);
548 /// assert!(!v.spilled());
549 ///
550 /// // Pushing another element will force the buffer to spill:
551 /// v.push(4);
552 /// assert_eq!(v.len(), 5);
553 /// assert!(v.spilled());
554 /// ```
555 pub struct SmallVec<A: Array> {
556 // The capacity field is used to determine which of the storage variants is active:
557 // If capacity <= Self::inline_capacity() then the inline variant is used and capacity holds the current length of the vector (number of elements actually in use).
558 // If capacity > Self::inline_capacity() then the heap variant is used and capacity holds the size of the memory allocation.
559 capacity: usize,
560 data: SmallVecData<A>,
561 }
562
563 impl<A: Array> SmallVec<A> {
564 /// Construct an empty vector
565 #[inline]
new() -> SmallVec<A>566 pub fn new() -> SmallVec<A> {
567 // Try to detect invalid custom implementations of `Array`. Hopefully,
568 // this check should be optimized away entirely for valid ones.
569 assert!(
570 mem::size_of::<A>() == A::size() * mem::size_of::<A::Item>()
571 && mem::align_of::<A>() >= mem::align_of::<A::Item>()
572 );
573 SmallVec {
574 capacity: 0,
575 data: SmallVecData::from_inline(MaybeUninit::uninit()),
576 }
577 }
578
579 /// Construct an empty vector with enough capacity pre-allocated to store at least `n`
580 /// elements.
581 ///
582 /// Will create a heap allocation only if `n` is larger than the inline capacity.
583 ///
584 /// ```
585 /// # use smallvec::SmallVec;
586 ///
587 /// let v: SmallVec<[u8; 3]> = SmallVec::with_capacity(100);
588 ///
589 /// assert!(v.is_empty());
590 /// assert!(v.capacity() >= 100);
591 /// ```
592 #[inline]
with_capacity(n: usize) -> Self593 pub fn with_capacity(n: usize) -> Self {
594 let mut v = SmallVec::new();
595 v.reserve_exact(n);
596 v
597 }
598
599 /// Construct a new `SmallVec` from a `Vec<A::Item>`.
600 ///
601 /// Elements will be copied to the inline buffer if vec.capacity() <= Self::inline_capacity().
602 ///
603 /// ```rust
604 /// use smallvec::SmallVec;
605 ///
606 /// let vec = vec![1, 2, 3, 4, 5];
607 /// let small_vec: SmallVec<[_; 3]> = SmallVec::from_vec(vec);
608 ///
609 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
610 /// ```
611 #[inline]
from_vec(mut vec: Vec<A::Item>) -> SmallVec<A>612 pub fn from_vec(mut vec: Vec<A::Item>) -> SmallVec<A> {
613 if vec.capacity() <= Self::inline_capacity() {
614 unsafe {
615 let mut data = SmallVecData::<A>::from_inline(MaybeUninit::uninit());
616 let len = vec.len();
617 vec.set_len(0);
618 ptr::copy_nonoverlapping(vec.as_ptr(), data.inline_mut(), len);
619
620 SmallVec {
621 capacity: len,
622 data,
623 }
624 }
625 } else {
626 let (ptr, cap, len) = (vec.as_mut_ptr(), vec.capacity(), vec.len());
627 mem::forget(vec);
628
629 SmallVec {
630 capacity: cap,
631 data: SmallVecData::from_heap(ptr, len),
632 }
633 }
634 }
635
636 /// Constructs a new `SmallVec` on the stack from an `A` without
637 /// copying elements.
638 ///
639 /// ```rust
640 /// use smallvec::SmallVec;
641 ///
642 /// let buf = [1, 2, 3, 4, 5];
643 /// let small_vec: SmallVec<_> = SmallVec::from_buf(buf);
644 ///
645 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
646 /// ```
647 #[inline]
from_buf(buf: A) -> SmallVec<A>648 pub fn from_buf(buf: A) -> SmallVec<A> {
649 SmallVec {
650 capacity: A::size(),
651 data: SmallVecData::from_inline(MaybeUninit::new(buf)),
652 }
653 }
654
655 /// Constructs a new `SmallVec` on the stack from an `A` without
656 /// copying elements. Also sets the length, which must be less or
657 /// equal to the size of `buf`.
658 ///
659 /// ```rust
660 /// use smallvec::SmallVec;
661 ///
662 /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
663 /// let small_vec: SmallVec<_> = SmallVec::from_buf_and_len(buf, 5);
664 ///
665 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
666 /// ```
667 #[inline]
from_buf_and_len(buf: A, len: usize) -> SmallVec<A>668 pub fn from_buf_and_len(buf: A, len: usize) -> SmallVec<A> {
669 assert!(len <= A::size());
670 unsafe { SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), len) }
671 }
672
673 /// Constructs a new `SmallVec` on the stack from an `A` without
674 /// copying elements. Also sets the length. The user is responsible
675 /// for ensuring that `len <= A::size()`.
676 ///
677 /// ```rust
678 /// use smallvec::SmallVec;
679 /// use std::mem::MaybeUninit;
680 ///
681 /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
682 /// let small_vec: SmallVec<_> = unsafe {
683 /// SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), 5)
684 /// };
685 ///
686 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
687 /// ```
688 #[inline]
from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A>689 pub unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A> {
690 SmallVec {
691 capacity: len,
692 data: SmallVecData::from_inline(buf),
693 }
694 }
695
696 /// Sets the length of a vector.
697 ///
698 /// This will explicitly set the size of the vector, without actually
699 /// modifying its buffers, so it is up to the caller to ensure that the
700 /// vector is actually the specified size.
set_len(&mut self, new_len: usize)701 pub unsafe fn set_len(&mut self, new_len: usize) {
702 let (_, len_ptr, _) = self.triple_mut();
703 *len_ptr = new_len;
704 }
705
706 /// The maximum number of elements this vector can hold inline
707 #[inline]
inline_capacity() -> usize708 fn inline_capacity() -> usize {
709 if mem::size_of::<A::Item>() > 0 {
710 A::size()
711 } else {
712 // For zero-size items code like `ptr.add(offset)` always returns the same pointer.
713 // Therefore all items are at the same address,
714 // and any array size has capacity for infinitely many items.
715 // The capacity is limited by the bit width of the length field.
716 //
717 // `Vec` also does this:
718 // https://github.com/rust-lang/rust/blob/1.44.0/src/liballoc/raw_vec.rs#L186
719 //
720 // In our case, this also ensures that a smallvec of zero-size items never spills,
721 // and we never try to allocate zero bytes which `std::alloc::alloc` disallows.
722 core::usize::MAX
723 }
724 }
725
726 /// The maximum number of elements this vector can hold inline
727 #[inline]
inline_size(&self) -> usize728 pub fn inline_size(&self) -> usize {
729 Self::inline_capacity()
730 }
731
732 /// The number of elements stored in the vector
733 #[inline]
len(&self) -> usize734 pub fn len(&self) -> usize {
735 self.triple().1
736 }
737
738 /// Returns `true` if the vector is empty
739 #[inline]
is_empty(&self) -> bool740 pub fn is_empty(&self) -> bool {
741 self.len() == 0
742 }
743
744 /// The number of items the vector can hold without reallocating
745 #[inline]
capacity(&self) -> usize746 pub fn capacity(&self) -> usize {
747 self.triple().2
748 }
749
750 /// Returns a tuple with (data ptr, len, capacity)
751 /// Useful to get all SmallVec properties with a single check of the current storage variant.
752 #[inline]
triple(&self) -> (*const A::Item, usize, usize)753 fn triple(&self) -> (*const A::Item, usize, usize) {
754 unsafe {
755 if self.spilled() {
756 let (ptr, len) = self.data.heap();
757 (ptr, len, self.capacity)
758 } else {
759 (self.data.inline(), self.capacity, Self::inline_capacity())
760 }
761 }
762 }
763
764 /// Returns a tuple with (data ptr, len ptr, capacity)
765 #[inline]
triple_mut(&mut self) -> (*mut A::Item, &mut usize, usize)766 fn triple_mut(&mut self) -> (*mut A::Item, &mut usize, usize) {
767 unsafe {
768 if self.spilled() {
769 let &mut (ptr, ref mut len_ptr) = self.data.heap_mut();
770 (ptr, len_ptr, self.capacity)
771 } else {
772 (
773 self.data.inline_mut(),
774 &mut self.capacity,
775 Self::inline_capacity(),
776 )
777 }
778 }
779 }
780
781 /// Returns `true` if the data has spilled into a separate heap-allocated buffer.
782 #[inline]
spilled(&self) -> bool783 pub fn spilled(&self) -> bool {
784 self.capacity > Self::inline_capacity()
785 }
786
787 /// Creates a draining iterator that removes the specified range in the vector
788 /// and yields the removed items.
789 ///
790 /// Note 1: The element range is removed even if the iterator is only
791 /// partially consumed or not consumed at all.
792 ///
793 /// Note 2: It is unspecified how many elements are removed from the vector
794 /// if the `Drain` value is leaked.
795 ///
796 /// # Panics
797 ///
798 /// Panics if the starting point is greater than the end point or if
799 /// the end point is greater than the length of the vector.
drain<R>(&mut self, range: R) -> Drain<'_, A> where R: RangeBounds<usize>,800 pub fn drain<R>(&mut self, range: R) -> Drain<'_, A>
801 where
802 R: RangeBounds<usize>,
803 {
804 use core::ops::Bound::*;
805
806 let len = self.len();
807 let start = match range.start_bound() {
808 Included(&n) => n,
809 Excluded(&n) => n.checked_add(1).expect("Range start out of bounds"),
810 Unbounded => 0,
811 };
812 let end = match range.end_bound() {
813 Included(&n) => n.checked_add(1).expect("Range end out of bounds"),
814 Excluded(&n) => n,
815 Unbounded => len,
816 };
817
818 assert!(start <= end);
819 assert!(end <= len);
820
821 unsafe {
822 self.set_len(start);
823
824 let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
825
826 Drain {
827 tail_start: end,
828 tail_len: len - end,
829 iter: range_slice.iter(),
830 // Since self is a &mut, passing it to a function would invalidate the slice iterator.
831 vec: NonNull::new_unchecked(self as *mut _),
832 }
833 }
834 }
835
836 /// Append an item to the vector.
837 #[inline]
push(&mut self, value: A::Item)838 pub fn push(&mut self, value: A::Item) {
839 unsafe {
840 let (mut ptr, mut len, cap) = self.triple_mut();
841 if *len == cap {
842 self.reserve(1);
843 let &mut (heap_ptr, ref mut heap_len) = self.data.heap_mut();
844 ptr = heap_ptr;
845 len = heap_len;
846 }
847 ptr::write(ptr.add(*len), value);
848 *len += 1;
849 }
850 }
851
852 /// Remove an item from the end of the vector and return it, or None if empty.
853 #[inline]
pop(&mut self) -> Option<A::Item>854 pub fn pop(&mut self) -> Option<A::Item> {
855 unsafe {
856 let (ptr, len_ptr, _) = self.triple_mut();
857 if *len_ptr == 0 {
858 return None;
859 }
860 let last_index = *len_ptr - 1;
861 *len_ptr = last_index;
862 Some(ptr::read(ptr.add(last_index)))
863 }
864 }
865
866 /// Moves all the elements of `other` into `self`, leaving `other` empty.
867 ///
868 /// # Example
869 ///
870 /// ```
871 /// # use smallvec::{SmallVec, smallvec};
872 /// let mut v0: SmallVec<[u8; 16]> = smallvec![1, 2, 3];
873 /// let mut v1: SmallVec<[u8; 32]> = smallvec![4, 5, 6];
874 /// v0.append(&mut v1);
875 /// assert_eq!(*v0, [1, 2, 3, 4, 5, 6]);
876 /// assert_eq!(*v1, []);
877 /// ```
append<B>(&mut self, other: &mut SmallVec<B>) where B: Array<Item = A::Item>,878 pub fn append<B>(&mut self, other: &mut SmallVec<B>)
879 where
880 B: Array<Item = A::Item>,
881 {
882 self.extend(other.drain(..))
883 }
884
885 /// Re-allocate to set the capacity to `max(new_cap, inline_size())`.
886 ///
887 /// Panics if `new_cap` is less than the vector's length
888 /// or if the capacity computation overflows `usize`.
grow(&mut self, new_cap: usize)889 pub fn grow(&mut self, new_cap: usize) {
890 infallible(self.try_grow(new_cap))
891 }
892
893 /// Re-allocate to set the capacity to `max(new_cap, inline_size())`.
894 ///
895 /// Panics if `new_cap` is less than the vector's length
try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr>896 pub fn try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr> {
897 unsafe {
898 let (ptr, &mut len, cap) = self.triple_mut();
899 let unspilled = !self.spilled();
900 assert!(new_cap >= len);
901 if new_cap <= self.inline_size() {
902 if unspilled {
903 return Ok(());
904 }
905 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
906 ptr::copy_nonoverlapping(ptr, self.data.inline_mut(), len);
907 self.capacity = len;
908 deallocate(ptr, cap);
909 } else if new_cap != cap {
910 let layout = layout_array::<A::Item>(new_cap)?;
911 debug_assert!(layout.size() > 0);
912 let new_alloc;
913 if unspilled {
914 new_alloc = NonNull::new(alloc::alloc::alloc(layout))
915 .ok_or(CollectionAllocErr::AllocErr { layout })?
916 .cast()
917 .as_ptr();
918 ptr::copy_nonoverlapping(ptr, new_alloc, len);
919 } else {
920 // This should never fail since the same succeeded
921 // when previously allocating `ptr`.
922 let old_layout = layout_array::<A::Item>(cap)?;
923
924 let new_ptr = alloc::alloc::realloc(ptr as *mut u8, old_layout, layout.size());
925 new_alloc = NonNull::new(new_ptr)
926 .ok_or(CollectionAllocErr::AllocErr { layout })?
927 .cast()
928 .as_ptr();
929 }
930 self.data = SmallVecData::from_heap(new_alloc, len);
931 self.capacity = new_cap;
932 }
933 Ok(())
934 }
935 }
936
937 /// Reserve capacity for `additional` more elements to be inserted.
938 ///
939 /// May reserve more space to avoid frequent reallocations.
940 ///
941 /// Panics if the capacity computation overflows `usize`.
942 #[inline]
reserve(&mut self, additional: usize)943 pub fn reserve(&mut self, additional: usize) {
944 infallible(self.try_reserve(additional))
945 }
946
947 /// Reserve capacity for `additional` more elements to be inserted.
948 ///
949 /// May reserve more space to avoid frequent reallocations.
try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr>950 pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
951 // prefer triple_mut() even if triple() would work
952 // so that the optimizer removes duplicated calls to it
953 // from callers like insert()
954 let (_, &mut len, cap) = self.triple_mut();
955 if cap - len >= additional {
956 return Ok(());
957 }
958 let new_cap = len
959 .checked_add(additional)
960 .and_then(usize::checked_next_power_of_two)
961 .ok_or(CollectionAllocErr::CapacityOverflow)?;
962 self.try_grow(new_cap)
963 }
964
965 /// Reserve the minimum capacity for `additional` more elements to be inserted.
966 ///
967 /// Panics if the new capacity overflows `usize`.
reserve_exact(&mut self, additional: usize)968 pub fn reserve_exact(&mut self, additional: usize) {
969 infallible(self.try_reserve_exact(additional))
970 }
971
972 /// Reserve the minimum capacity for `additional` more elements to be inserted.
try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr>973 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
974 let (_, &mut len, cap) = self.triple_mut();
975 if cap - len >= additional {
976 return Ok(());
977 }
978 let new_cap = len
979 .checked_add(additional)
980 .ok_or(CollectionAllocErr::CapacityOverflow)?;
981 self.try_grow(new_cap)
982 }
983
984 /// Shrink the capacity of the vector as much as possible.
985 ///
986 /// When possible, this will move data from an external heap buffer to the vector's inline
987 /// storage.
shrink_to_fit(&mut self)988 pub fn shrink_to_fit(&mut self) {
989 if !self.spilled() {
990 return;
991 }
992 let len = self.len();
993 if self.inline_size() >= len {
994 unsafe {
995 let (ptr, len) = self.data.heap();
996 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
997 ptr::copy_nonoverlapping(ptr, self.data.inline_mut(), len);
998 deallocate(ptr, self.capacity);
999 self.capacity = len;
1000 }
1001 } else if self.capacity() > len {
1002 self.grow(len);
1003 }
1004 }
1005
1006 /// Shorten the vector, keeping the first `len` elements and dropping the rest.
1007 ///
1008 /// If `len` is greater than or equal to the vector's current length, this has no
1009 /// effect.
1010 ///
1011 /// This does not re-allocate. If you want the vector's capacity to shrink, call
1012 /// `shrink_to_fit` after truncating.
truncate(&mut self, len: usize)1013 pub fn truncate(&mut self, len: usize) {
1014 unsafe {
1015 let (ptr, len_ptr, _) = self.triple_mut();
1016 while len < *len_ptr {
1017 let last_index = *len_ptr - 1;
1018 *len_ptr = last_index;
1019 ptr::drop_in_place(ptr.add(last_index));
1020 }
1021 }
1022 }
1023
1024 /// Extracts a slice containing the entire vector.
1025 ///
1026 /// Equivalent to `&s[..]`.
as_slice(&self) -> &[A::Item]1027 pub fn as_slice(&self) -> &[A::Item] {
1028 self
1029 }
1030
1031 /// Extracts a mutable slice of the entire vector.
1032 ///
1033 /// Equivalent to `&mut s[..]`.
as_mut_slice(&mut self) -> &mut [A::Item]1034 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
1035 self
1036 }
1037
1038 /// Remove the element at position `index`, replacing it with the last element.
1039 ///
1040 /// This does not preserve ordering, but is O(1).
1041 ///
1042 /// Panics if `index` is out of bounds.
1043 #[inline]
swap_remove(&mut self, index: usize) -> A::Item1044 pub fn swap_remove(&mut self, index: usize) -> A::Item {
1045 let len = self.len();
1046 self.swap(len - 1, index);
1047 self.pop()
1048 .unwrap_or_else(|| unsafe { unreachable_unchecked() })
1049 }
1050
1051 /// Remove all elements from the vector.
1052 #[inline]
clear(&mut self)1053 pub fn clear(&mut self) {
1054 self.truncate(0);
1055 }
1056
1057 /// Remove and return the element at position `index`, shifting all elements after it to the
1058 /// left.
1059 ///
1060 /// Panics if `index` is out of bounds.
remove(&mut self, index: usize) -> A::Item1061 pub fn remove(&mut self, index: usize) -> A::Item {
1062 unsafe {
1063 let (mut ptr, len_ptr, _) = self.triple_mut();
1064 let len = *len_ptr;
1065 assert!(index < len);
1066 *len_ptr = len - 1;
1067 ptr = ptr.add(index);
1068 let item = ptr::read(ptr);
1069 ptr::copy(ptr.add(1), ptr, len - index - 1);
1070 item
1071 }
1072 }
1073
1074 /// Insert an element at position `index`, shifting all elements after it to the right.
1075 ///
1076 /// Panics if `index > len`.
insert(&mut self, index: usize, element: A::Item)1077 pub fn insert(&mut self, index: usize, element: A::Item) {
1078 self.reserve(1);
1079
1080 unsafe {
1081 let (mut ptr, len_ptr, _) = self.triple_mut();
1082 let len = *len_ptr;
1083 ptr = ptr.add(index);
1084 if index < len {
1085 ptr::copy(ptr, ptr.add(1), len - index);
1086 } else if index == len {
1087 // No elements need shifting.
1088 } else {
1089 panic!("index exceeds length");
1090 }
1091 *len_ptr = len + 1;
1092 ptr::write(ptr, element);
1093 }
1094 }
1095
1096 /// Insert multiple elements at position `index`, shifting all following elements toward the
1097 /// back.
insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I)1098 pub fn insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I) {
1099 let mut iter = iterable.into_iter();
1100 if index == self.len() {
1101 return self.extend(iter);
1102 }
1103
1104 let (lower_size_bound, _) = iter.size_hint();
1105 assert!(lower_size_bound <= core::isize::MAX as usize); // Ensure offset is indexable
1106 assert!(index + lower_size_bound >= index); // Protect against overflow
1107
1108 let mut num_added = 0;
1109 let old_len = self.len();
1110 assert!(index <= old_len);
1111
1112 unsafe {
1113 // Reserve space for `lower_size_bound` elements.
1114 self.reserve(lower_size_bound);
1115 let start = self.as_mut_ptr();
1116 let ptr = start.add(index);
1117
1118 // Move the trailing elements.
1119 ptr::copy(ptr, ptr.add(lower_size_bound), old_len - index);
1120
1121 // In case the iterator panics, don't double-drop the items we just copied above.
1122 self.set_len(0);
1123 let mut guard = DropOnPanic {
1124 start,
1125 skip: index..(index + lower_size_bound),
1126 len: old_len + lower_size_bound,
1127 };
1128
1129 // The set_len above invalidates the previous pointers, so we must re-create them.
1130 let start = self.as_mut_ptr();
1131 let ptr = start.add(index);
1132
1133 while num_added < lower_size_bound {
1134 let element = match iter.next() {
1135 Some(x) => x,
1136 None => break,
1137 };
1138 let cur = ptr.add(num_added);
1139 ptr::write(cur, element);
1140 guard.skip.start += 1;
1141 num_added += 1;
1142 }
1143
1144 if num_added < lower_size_bound {
1145 // Iterator provided fewer elements than the hint. Move the tail backward.
1146 ptr::copy(
1147 ptr.add(lower_size_bound),
1148 ptr.add(num_added),
1149 old_len - index,
1150 );
1151 }
1152 // There are no more duplicate or uninitialized slots, so the guard is not needed.
1153 self.set_len(old_len + num_added);
1154 mem::forget(guard);
1155 }
1156
1157 // Insert any remaining elements one-by-one.
1158 for element in iter {
1159 self.insert(index + num_added, element);
1160 num_added += 1;
1161 }
1162
1163 struct DropOnPanic<T> {
1164 start: *mut T,
1165 skip: Range<usize>, // Space we copied-out-of, but haven't written-to yet.
1166 len: usize,
1167 }
1168
1169 impl<T> Drop for DropOnPanic<T> {
1170 fn drop(&mut self) {
1171 for i in 0..self.len {
1172 if !self.skip.contains(&i) {
1173 unsafe {
1174 ptr::drop_in_place(self.start.add(i));
1175 }
1176 }
1177 }
1178 }
1179 }
1180 }
1181
1182 /// Convert a SmallVec to a Vec, without reallocating if the SmallVec has already spilled onto
1183 /// the heap.
into_vec(self) -> Vec<A::Item>1184 pub fn into_vec(self) -> Vec<A::Item> {
1185 if self.spilled() {
1186 unsafe {
1187 let (ptr, len) = self.data.heap();
1188 let v = Vec::from_raw_parts(ptr, len, self.capacity);
1189 mem::forget(self);
1190 v
1191 }
1192 } else {
1193 self.into_iter().collect()
1194 }
1195 }
1196
1197 /// Converts a `SmallVec` into a `Box<[T]>` without reallocating if the `SmallVec` has already spilled
1198 /// onto the heap.
1199 ///
1200 /// Note that this will drop any excess capacity.
into_boxed_slice(self) -> Box<[A::Item]>1201 pub fn into_boxed_slice(self) -> Box<[A::Item]> {
1202 self.into_vec().into_boxed_slice()
1203 }
1204
1205 /// Convert the SmallVec into an `A` if possible. Otherwise return `Err(Self)`.
1206 ///
1207 /// This method returns `Err(Self)` if the SmallVec is too short (and the `A` contains uninitialized elements),
1208 /// or if the SmallVec is too long (and all the elements were spilled to the heap).
into_inner(self) -> Result<A, Self>1209 pub fn into_inner(self) -> Result<A, Self> {
1210 if self.spilled() || self.len() != A::size() {
1211 // Note: A::size, not Self::inline_capacity
1212 Err(self)
1213 } else {
1214 unsafe {
1215 let data = ptr::read(&self.data);
1216 mem::forget(self);
1217 Ok(data.into_inline().assume_init())
1218 }
1219 }
1220 }
1221
1222 /// Retains only the elements specified by the predicate.
1223 ///
1224 /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
1225 /// This method operates in place and preserves the order of the retained
1226 /// elements.
retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F)1227 pub fn retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F) {
1228 let mut del = 0;
1229 let len = self.len();
1230 for i in 0..len {
1231 if !f(&mut self[i]) {
1232 del += 1;
1233 } else if del > 0 {
1234 self.swap(i - del, i);
1235 }
1236 }
1237 self.truncate(len - del);
1238 }
1239
1240 /// Retains only the elements specified by the predicate.
1241 ///
1242 /// This method is identical in behaviour to [`retain`]; it is included only
1243 /// to maintain api-compatability with `std::Vec`, where the methods are
1244 /// separate for historical reasons.
retain_mut<F: FnMut(&mut A::Item) -> bool>(&mut self, f: F)1245 pub fn retain_mut<F: FnMut(&mut A::Item) -> bool>(&mut self, f: F) {
1246 self.retain(f)
1247 }
1248
1249 /// Removes consecutive duplicate elements.
dedup(&mut self) where A::Item: PartialEq<A::Item>,1250 pub fn dedup(&mut self)
1251 where
1252 A::Item: PartialEq<A::Item>,
1253 {
1254 self.dedup_by(|a, b| a == b);
1255 }
1256
1257 /// Removes consecutive duplicate elements using the given equality relation.
dedup_by<F>(&mut self, mut same_bucket: F) where F: FnMut(&mut A::Item, &mut A::Item) -> bool,1258 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1259 where
1260 F: FnMut(&mut A::Item, &mut A::Item) -> bool,
1261 {
1262 // See the implementation of Vec::dedup_by in the
1263 // standard library for an explanation of this algorithm.
1264 let len = self.len();
1265 if len <= 1 {
1266 return;
1267 }
1268
1269 let ptr = self.as_mut_ptr();
1270 let mut w: usize = 1;
1271
1272 unsafe {
1273 for r in 1..len {
1274 let p_r = ptr.add(r);
1275 let p_wm1 = ptr.add(w - 1);
1276 if !same_bucket(&mut *p_r, &mut *p_wm1) {
1277 if r != w {
1278 let p_w = p_wm1.add(1);
1279 mem::swap(&mut *p_r, &mut *p_w);
1280 }
1281 w += 1;
1282 }
1283 }
1284 }
1285
1286 self.truncate(w);
1287 }
1288
1289 /// Removes consecutive elements that map to the same key.
dedup_by_key<F, K>(&mut self, mut key: F) where F: FnMut(&mut A::Item) -> K, K: PartialEq<K>,1290 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1291 where
1292 F: FnMut(&mut A::Item) -> K,
1293 K: PartialEq<K>,
1294 {
1295 self.dedup_by(|a, b| key(a) == key(b));
1296 }
1297
1298 /// Resizes the `SmallVec` in-place so that `len` is equal to `new_len`.
1299 ///
1300 /// If `new_len` is greater than `len`, the `SmallVec` is extended by the difference, with each
1301 /// additional slot filled with the result of calling the closure `f`. The return values from `f`
1302 //// will end up in the `SmallVec` in the order they have been generated.
1303 ///
1304 /// If `new_len` is less than `len`, the `SmallVec` is simply truncated.
1305 ///
1306 /// This method uses a closure to create new values on every push. If you'd rather `Clone` a given
1307 /// value, use `resize`. If you want to use the `Default` trait to generate values, you can pass
1308 /// `Default::default()` as the second argument.
1309 ///
1310 /// Added for std::vec::Vec compatibility (added in Rust 1.33.0)
1311 ///
1312 /// ```
1313 /// # use smallvec::{smallvec, SmallVec};
1314 /// let mut vec : SmallVec<[_; 4]> = smallvec![1, 2, 3];
1315 /// vec.resize_with(5, Default::default);
1316 /// assert_eq!(&*vec, &[1, 2, 3, 0, 0]);
1317 ///
1318 /// let mut vec : SmallVec<[_; 4]> = smallvec![];
1319 /// let mut p = 1;
1320 /// vec.resize_with(4, || { p *= 2; p });
1321 /// assert_eq!(&*vec, &[2, 4, 8, 16]);
1322 /// ```
resize_with<F>(&mut self, new_len: usize, f: F) where F: FnMut() -> A::Item,1323 pub fn resize_with<F>(&mut self, new_len: usize, f: F)
1324 where
1325 F: FnMut() -> A::Item,
1326 {
1327 let old_len = self.len();
1328 if old_len < new_len {
1329 let mut f = f;
1330 let additional = new_len - old_len;
1331 self.reserve(additional);
1332 for _ in 0..additional {
1333 self.push(f());
1334 }
1335 } else if old_len > new_len {
1336 self.truncate(new_len);
1337 }
1338 }
1339
1340 /// Creates a `SmallVec` directly from the raw components of another
1341 /// `SmallVec`.
1342 ///
1343 /// # Safety
1344 ///
1345 /// This is highly unsafe, due to the number of invariants that aren't
1346 /// checked:
1347 ///
1348 /// * `ptr` needs to have been previously allocated via `SmallVec` for its
1349 /// spilled storage (at least, it's highly likely to be incorrect if it
1350 /// wasn't).
1351 /// * `ptr`'s `A::Item` type needs to be the same size and alignment that
1352 /// it was allocated with
1353 /// * `length` needs to be less than or equal to `capacity`.
1354 /// * `capacity` needs to be the capacity that the pointer was allocated
1355 /// with.
1356 ///
1357 /// Violating these may cause problems like corrupting the allocator's
1358 /// internal data structures.
1359 ///
1360 /// Additionally, `capacity` must be greater than the amount of inline
1361 /// storage `A` has; that is, the new `SmallVec` must need to spill over
1362 /// into heap allocated storage. This condition is asserted against.
1363 ///
1364 /// The ownership of `ptr` is effectively transferred to the
1365 /// `SmallVec` which may then deallocate, reallocate or change the
1366 /// contents of memory pointed to by the pointer at will. Ensure
1367 /// that nothing else uses the pointer after calling this
1368 /// function.
1369 ///
1370 /// # Examples
1371 ///
1372 /// ```
1373 /// # #[macro_use] extern crate smallvec;
1374 /// # use smallvec::SmallVec;
1375 /// use std::mem;
1376 /// use std::ptr;
1377 ///
1378 /// fn main() {
1379 /// let mut v: SmallVec<[_; 1]> = smallvec![1, 2, 3];
1380 ///
1381 /// // Pull out the important parts of `v`.
1382 /// let p = v.as_mut_ptr();
1383 /// let len = v.len();
1384 /// let cap = v.capacity();
1385 /// let spilled = v.spilled();
1386 ///
1387 /// unsafe {
1388 /// // Forget all about `v`. The heap allocation that stored the
1389 /// // three values won't be deallocated.
1390 /// mem::forget(v);
1391 ///
1392 /// // Overwrite memory with [4, 5, 6].
1393 /// //
1394 /// // This is only safe if `spilled` is true! Otherwise, we are
1395 /// // writing into the old `SmallVec`'s inline storage on the
1396 /// // stack.
1397 /// assert!(spilled);
1398 /// for i in 0..len {
1399 /// ptr::write(p.add(i), 4 + i);
1400 /// }
1401 ///
1402 /// // Put everything back together into a SmallVec with a different
1403 /// // amount of inline storage, but which is still less than `cap`.
1404 /// let rebuilt = SmallVec::<[_; 2]>::from_raw_parts(p, len, cap);
1405 /// assert_eq!(&*rebuilt, &[4, 5, 6]);
1406 /// }
1407 /// }
1408 #[inline]
from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A>1409 pub unsafe fn from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A> {
1410 assert!(capacity > Self::inline_capacity());
1411 SmallVec {
1412 capacity,
1413 data: SmallVecData::from_heap(ptr, length),
1414 }
1415 }
1416
1417 /// Returns a raw pointer to the vector's buffer.
as_ptr(&self) -> *const A::Item1418 pub fn as_ptr(&self) -> *const A::Item {
1419 // We shadow the slice method of the same name to avoid going through
1420 // `deref`, which creates an intermediate reference that may place
1421 // additional safety constraints on the contents of the slice.
1422 self.triple().0
1423 }
1424
1425 /// Returns a raw mutable pointer to the vector's buffer.
as_mut_ptr(&mut self) -> *mut A::Item1426 pub fn as_mut_ptr(&mut self) -> *mut A::Item {
1427 // We shadow the slice method of the same name to avoid going through
1428 // `deref_mut`, which creates an intermediate reference that may place
1429 // additional safety constraints on the contents of the slice.
1430 self.triple_mut().0
1431 }
1432 }
1433
1434 impl<A: Array> SmallVec<A>
1435 where
1436 A::Item: Copy,
1437 {
1438 /// Copy the elements from a slice into a new `SmallVec`.
1439 ///
1440 /// For slices of `Copy` types, this is more efficient than `SmallVec::from(slice)`.
from_slice(slice: &[A::Item]) -> Self1441 pub fn from_slice(slice: &[A::Item]) -> Self {
1442 let len = slice.len();
1443 if len <= Self::inline_capacity() {
1444 SmallVec {
1445 capacity: len,
1446 data: SmallVecData::from_inline(unsafe {
1447 let mut data: MaybeUninit<A> = MaybeUninit::uninit();
1448 ptr::copy_nonoverlapping(
1449 slice.as_ptr(),
1450 data.as_mut_ptr() as *mut A::Item,
1451 len,
1452 );
1453 data
1454 }),
1455 }
1456 } else {
1457 let mut b = slice.to_vec();
1458 let (ptr, cap) = (b.as_mut_ptr(), b.capacity());
1459 mem::forget(b);
1460 SmallVec {
1461 capacity: cap,
1462 data: SmallVecData::from_heap(ptr, len),
1463 }
1464 }
1465 }
1466
1467 /// Copy elements from a slice into the vector at position `index`, shifting any following
1468 /// elements toward the back.
1469 ///
1470 /// For slices of `Copy` types, this is more efficient than `insert`.
insert_from_slice(&mut self, index: usize, slice: &[A::Item])1471 pub fn insert_from_slice(&mut self, index: usize, slice: &[A::Item]) {
1472 self.reserve(slice.len());
1473
1474 let len = self.len();
1475 assert!(index <= len);
1476
1477 unsafe {
1478 let slice_ptr = slice.as_ptr();
1479 let ptr = self.as_mut_ptr().add(index);
1480 ptr::copy(ptr, ptr.add(slice.len()), len - index);
1481 ptr::copy_nonoverlapping(slice_ptr, ptr, slice.len());
1482 self.set_len(len + slice.len());
1483 }
1484 }
1485
1486 /// Copy elements from a slice and append them to the vector.
1487 ///
1488 /// For slices of `Copy` types, this is more efficient than `extend`.
1489 #[inline]
extend_from_slice(&mut self, slice: &[A::Item])1490 pub fn extend_from_slice(&mut self, slice: &[A::Item]) {
1491 let len = self.len();
1492 self.insert_from_slice(len, slice);
1493 }
1494 }
1495
1496 impl<A: Array> SmallVec<A>
1497 where
1498 A::Item: Clone,
1499 {
1500 /// Resizes the vector so that its length is equal to `len`.
1501 ///
1502 /// If `len` is less than the current length, the vector simply truncated.
1503 ///
1504 /// If `len` is greater than the current length, `value` is appended to the
1505 /// vector until its length equals `len`.
resize(&mut self, len: usize, value: A::Item)1506 pub fn resize(&mut self, len: usize, value: A::Item) {
1507 let old_len = self.len();
1508
1509 if len > old_len {
1510 self.extend(repeat(value).take(len - old_len));
1511 } else {
1512 self.truncate(len);
1513 }
1514 }
1515
1516 /// Creates a `SmallVec` with `n` copies of `elem`.
1517 /// ```
1518 /// use smallvec::SmallVec;
1519 ///
1520 /// let v = SmallVec::<[char; 128]>::from_elem('d', 2);
1521 /// assert_eq!(v, SmallVec::from_buf(['d', 'd']));
1522 /// ```
from_elem(elem: A::Item, n: usize) -> Self1523 pub fn from_elem(elem: A::Item, n: usize) -> Self {
1524 if n > Self::inline_capacity() {
1525 vec![elem; n].into()
1526 } else {
1527 let mut v = SmallVec::<A>::new();
1528 unsafe {
1529 let (ptr, len_ptr, _) = v.triple_mut();
1530 let mut local_len = SetLenOnDrop::new(len_ptr);
1531
1532 for i in 0..n {
1533 ::core::ptr::write(ptr.add(i), elem.clone());
1534 local_len.increment_len(1);
1535 }
1536 }
1537 v
1538 }
1539 }
1540 }
1541
1542 impl<A: Array> ops::Deref for SmallVec<A> {
1543 type Target = [A::Item];
1544 #[inline]
deref(&self) -> &[A::Item]1545 fn deref(&self) -> &[A::Item] {
1546 unsafe {
1547 let (ptr, len, _) = self.triple();
1548 slice::from_raw_parts(ptr, len)
1549 }
1550 }
1551 }
1552
1553 impl<A: Array> ops::DerefMut for SmallVec<A> {
1554 #[inline]
deref_mut(&mut self) -> &mut [A::Item]1555 fn deref_mut(&mut self) -> &mut [A::Item] {
1556 unsafe {
1557 let (ptr, &mut len, _) = self.triple_mut();
1558 slice::from_raw_parts_mut(ptr, len)
1559 }
1560 }
1561 }
1562
1563 impl<A: Array> AsRef<[A::Item]> for SmallVec<A> {
1564 #[inline]
as_ref(&self) -> &[A::Item]1565 fn as_ref(&self) -> &[A::Item] {
1566 self
1567 }
1568 }
1569
1570 impl<A: Array> AsMut<[A::Item]> for SmallVec<A> {
1571 #[inline]
as_mut(&mut self) -> &mut [A::Item]1572 fn as_mut(&mut self) -> &mut [A::Item] {
1573 self
1574 }
1575 }
1576
1577 impl<A: Array> Borrow<[A::Item]> for SmallVec<A> {
1578 #[inline]
borrow(&self) -> &[A::Item]1579 fn borrow(&self) -> &[A::Item] {
1580 self
1581 }
1582 }
1583
1584 impl<A: Array> BorrowMut<[A::Item]> for SmallVec<A> {
1585 #[inline]
borrow_mut(&mut self) -> &mut [A::Item]1586 fn borrow_mut(&mut self) -> &mut [A::Item] {
1587 self
1588 }
1589 }
1590
1591 #[cfg(feature = "write")]
1592 #[cfg_attr(docsrs, doc(cfg(feature = "write")))]
1593 impl<A: Array<Item = u8>> io::Write for SmallVec<A> {
1594 #[inline]
write(&mut self, buf: &[u8]) -> io::Result<usize>1595 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
1596 self.extend_from_slice(buf);
1597 Ok(buf.len())
1598 }
1599
1600 #[inline]
write_all(&mut self, buf: &[u8]) -> io::Result<()>1601 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
1602 self.extend_from_slice(buf);
1603 Ok(())
1604 }
1605
1606 #[inline]
flush(&mut self) -> io::Result<()>1607 fn flush(&mut self) -> io::Result<()> {
1608 Ok(())
1609 }
1610 }
1611
1612 #[cfg(feature = "serde")]
1613 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1614 impl<A: Array> Serialize for SmallVec<A>
1615 where
1616 A::Item: Serialize,
1617 {
serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error>1618 fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
1619 let mut state = serializer.serialize_seq(Some(self.len()))?;
1620 for item in self {
1621 state.serialize_element(&item)?;
1622 }
1623 state.end()
1624 }
1625 }
1626
1627 #[cfg(feature = "serde")]
1628 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1629 impl<'de, A: Array> Deserialize<'de> for SmallVec<A>
1630 where
1631 A::Item: Deserialize<'de>,
1632 {
deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error>1633 fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1634 deserializer.deserialize_seq(SmallVecVisitor {
1635 phantom: PhantomData,
1636 })
1637 }
1638 }
1639
1640 #[cfg(feature = "serde")]
1641 struct SmallVecVisitor<A> {
1642 phantom: PhantomData<A>,
1643 }
1644
1645 #[cfg(feature = "serde")]
1646 impl<'de, A: Array> Visitor<'de> for SmallVecVisitor<A>
1647 where
1648 A::Item: Deserialize<'de>,
1649 {
1650 type Value = SmallVec<A>;
1651
expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result1652 fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
1653 formatter.write_str("a sequence")
1654 }
1655
visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error> where B: SeqAccess<'de>,1656 fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error>
1657 where
1658 B: SeqAccess<'de>,
1659 {
1660 use serde::de::Error;
1661 let len = seq.size_hint().unwrap_or(0);
1662 let mut values = SmallVec::new();
1663 values.try_reserve(len).map_err(B::Error::custom)?;
1664
1665 while let Some(value) = seq.next_element()? {
1666 values.push(value);
1667 }
1668
1669 Ok(values)
1670 }
1671 }
1672
1673 #[cfg(feature = "specialization")]
1674 trait SpecFrom<A: Array, S> {
spec_from(slice: S) -> SmallVec<A>1675 fn spec_from(slice: S) -> SmallVec<A>;
1676 }
1677
1678 #[cfg(feature = "specialization")]
1679 mod specialization;
1680
1681 #[cfg(feature = "arbitrary")]
1682 mod arbitrary;
1683
1684 #[cfg(feature = "specialization")]
1685 impl<'a, A: Array> SpecFrom<A, &'a [A::Item]> for SmallVec<A>
1686 where
1687 A::Item: Copy,
1688 {
1689 #[inline]
spec_from(slice: &'a [A::Item]) -> SmallVec<A>1690 fn spec_from(slice: &'a [A::Item]) -> SmallVec<A> {
1691 SmallVec::from_slice(slice)
1692 }
1693 }
1694
1695 impl<'a, A: Array> From<&'a [A::Item]> for SmallVec<A>
1696 where
1697 A::Item: Clone,
1698 {
1699 #[cfg(not(feature = "specialization"))]
1700 #[inline]
from(slice: &'a [A::Item]) -> SmallVec<A>1701 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
1702 slice.iter().cloned().collect()
1703 }
1704
1705 #[cfg(feature = "specialization")]
1706 #[inline]
from(slice: &'a [A::Item]) -> SmallVec<A>1707 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
1708 SmallVec::spec_from(slice)
1709 }
1710 }
1711
1712 impl<A: Array> From<Vec<A::Item>> for SmallVec<A> {
1713 #[inline]
from(vec: Vec<A::Item>) -> SmallVec<A>1714 fn from(vec: Vec<A::Item>) -> SmallVec<A> {
1715 SmallVec::from_vec(vec)
1716 }
1717 }
1718
1719 impl<A: Array> From<A> for SmallVec<A> {
1720 #[inline]
from(array: A) -> SmallVec<A>1721 fn from(array: A) -> SmallVec<A> {
1722 SmallVec::from_buf(array)
1723 }
1724 }
1725
1726 impl<A: Array, I: SliceIndex<[A::Item]>> ops::Index<I> for SmallVec<A> {
1727 type Output = I::Output;
1728
index(&self, index: I) -> &I::Output1729 fn index(&self, index: I) -> &I::Output {
1730 &(**self)[index]
1731 }
1732 }
1733
1734 impl<A: Array, I: SliceIndex<[A::Item]>> ops::IndexMut<I> for SmallVec<A> {
index_mut(&mut self, index: I) -> &mut I::Output1735 fn index_mut(&mut self, index: I) -> &mut I::Output {
1736 &mut (&mut **self)[index]
1737 }
1738 }
1739
1740 #[allow(deprecated)]
1741 impl<A: Array> ExtendFromSlice<A::Item> for SmallVec<A>
1742 where
1743 A::Item: Copy,
1744 {
extend_from_slice(&mut self, other: &[A::Item])1745 fn extend_from_slice(&mut self, other: &[A::Item]) {
1746 SmallVec::extend_from_slice(self, other)
1747 }
1748 }
1749
1750 impl<A: Array> FromIterator<A::Item> for SmallVec<A> {
1751 #[inline]
from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A>1752 fn from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A> {
1753 let mut v = SmallVec::new();
1754 v.extend(iterable);
1755 v
1756 }
1757 }
1758
1759 impl<A: Array> Extend<A::Item> for SmallVec<A> {
extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I)1760 fn extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I) {
1761 let mut iter = iterable.into_iter();
1762 let (lower_size_bound, _) = iter.size_hint();
1763 self.reserve(lower_size_bound);
1764
1765 unsafe {
1766 let (ptr, len_ptr, cap) = self.triple_mut();
1767 let mut len = SetLenOnDrop::new(len_ptr);
1768 while len.get() < cap {
1769 if let Some(out) = iter.next() {
1770 ptr::write(ptr.add(len.get()), out);
1771 len.increment_len(1);
1772 } else {
1773 return;
1774 }
1775 }
1776 }
1777
1778 for elem in iter {
1779 self.push(elem);
1780 }
1781 }
1782 }
1783
1784 impl<A: Array> fmt::Debug for SmallVec<A>
1785 where
1786 A::Item: fmt::Debug,
1787 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result1788 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1789 f.debug_list().entries(self.iter()).finish()
1790 }
1791 }
1792
1793 impl<A: Array> Default for SmallVec<A> {
1794 #[inline]
default() -> SmallVec<A>1795 fn default() -> SmallVec<A> {
1796 SmallVec::new()
1797 }
1798 }
1799
1800 #[cfg(feature = "may_dangle")]
1801 unsafe impl<#[may_dangle] A: Array> Drop for SmallVec<A> {
drop(&mut self)1802 fn drop(&mut self) {
1803 unsafe {
1804 if self.spilled() {
1805 let (ptr, len) = self.data.heap();
1806 Vec::from_raw_parts(ptr, len, self.capacity);
1807 } else {
1808 ptr::drop_in_place(&mut self[..]);
1809 }
1810 }
1811 }
1812 }
1813
1814 #[cfg(not(feature = "may_dangle"))]
1815 impl<A: Array> Drop for SmallVec<A> {
drop(&mut self)1816 fn drop(&mut self) {
1817 unsafe {
1818 if self.spilled() {
1819 let (ptr, len) = self.data.heap();
1820 Vec::from_raw_parts(ptr, len, self.capacity);
1821 } else {
1822 ptr::drop_in_place(&mut self[..]);
1823 }
1824 }
1825 }
1826 }
1827
1828 impl<A: Array> Clone for SmallVec<A>
1829 where
1830 A::Item: Clone,
1831 {
1832 #[inline]
clone(&self) -> SmallVec<A>1833 fn clone(&self) -> SmallVec<A> {
1834 SmallVec::from(self.as_slice())
1835 }
1836
clone_from(&mut self, source: &Self)1837 fn clone_from(&mut self, source: &Self) {
1838 // Inspired from `impl Clone for Vec`.
1839
1840 // drop anything that will not be overwritten
1841 self.truncate(source.len());
1842
1843 // self.len <= other.len due to the truncate above, so the
1844 // slices here are always in-bounds.
1845 let (init, tail) = source.split_at(self.len());
1846
1847 // reuse the contained values' allocations/resources.
1848 self.clone_from_slice(init);
1849 self.extend(tail.iter().cloned());
1850 }
1851 }
1852
1853 impl<A: Array, B: Array> PartialEq<SmallVec<B>> for SmallVec<A>
1854 where
1855 A::Item: PartialEq<B::Item>,
1856 {
1857 #[inline]
eq(&self, other: &SmallVec<B>) -> bool1858 fn eq(&self, other: &SmallVec<B>) -> bool {
1859 self[..] == other[..]
1860 }
1861 }
1862
1863 impl<A: Array> Eq for SmallVec<A> where A::Item: Eq {}
1864
1865 impl<A: Array> PartialOrd for SmallVec<A>
1866 where
1867 A::Item: PartialOrd,
1868 {
1869 #[inline]
partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering>1870 fn partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering> {
1871 PartialOrd::partial_cmp(&**self, &**other)
1872 }
1873 }
1874
1875 impl<A: Array> Ord for SmallVec<A>
1876 where
1877 A::Item: Ord,
1878 {
1879 #[inline]
cmp(&self, other: &SmallVec<A>) -> cmp::Ordering1880 fn cmp(&self, other: &SmallVec<A>) -> cmp::Ordering {
1881 Ord::cmp(&**self, &**other)
1882 }
1883 }
1884
1885 impl<A: Array> Hash for SmallVec<A>
1886 where
1887 A::Item: Hash,
1888 {
hash<H: Hasher>(&self, state: &mut H)1889 fn hash<H: Hasher>(&self, state: &mut H) {
1890 (**self).hash(state)
1891 }
1892 }
1893
1894 unsafe impl<A: Array> Send for SmallVec<A> where A::Item: Send {}
1895
1896 /// An iterator that consumes a `SmallVec` and yields its items by value.
1897 ///
1898 /// Returned from [`SmallVec::into_iter`][1].
1899 ///
1900 /// [1]: struct.SmallVec.html#method.into_iter
1901 pub struct IntoIter<A: Array> {
1902 data: SmallVec<A>,
1903 current: usize,
1904 end: usize,
1905 }
1906
1907 impl<A: Array> fmt::Debug for IntoIter<A>
1908 where
1909 A::Item: fmt::Debug,
1910 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result1911 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1912 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
1913 }
1914 }
1915
1916 impl<A: Array + Clone> Clone for IntoIter<A>
1917 where
1918 A::Item: Clone,
1919 {
clone(&self) -> IntoIter<A>1920 fn clone(&self) -> IntoIter<A> {
1921 SmallVec::from(self.as_slice()).into_iter()
1922 }
1923 }
1924
1925 impl<A: Array> Drop for IntoIter<A> {
drop(&mut self)1926 fn drop(&mut self) {
1927 for _ in self {}
1928 }
1929 }
1930
1931 impl<A: Array> Iterator for IntoIter<A> {
1932 type Item = A::Item;
1933
1934 #[inline]
next(&mut self) -> Option<A::Item>1935 fn next(&mut self) -> Option<A::Item> {
1936 if self.current == self.end {
1937 None
1938 } else {
1939 unsafe {
1940 let current = self.current;
1941 self.current += 1;
1942 Some(ptr::read(self.data.as_ptr().add(current)))
1943 }
1944 }
1945 }
1946
1947 #[inline]
size_hint(&self) -> (usize, Option<usize>)1948 fn size_hint(&self) -> (usize, Option<usize>) {
1949 let size = self.end - self.current;
1950 (size, Some(size))
1951 }
1952 }
1953
1954 impl<A: Array> DoubleEndedIterator for IntoIter<A> {
1955 #[inline]
next_back(&mut self) -> Option<A::Item>1956 fn next_back(&mut self) -> Option<A::Item> {
1957 if self.current == self.end {
1958 None
1959 } else {
1960 unsafe {
1961 self.end -= 1;
1962 Some(ptr::read(self.data.as_ptr().add(self.end)))
1963 }
1964 }
1965 }
1966 }
1967
1968 impl<A: Array> ExactSizeIterator for IntoIter<A> {}
1969 impl<A: Array> FusedIterator for IntoIter<A> {}
1970
1971 impl<A: Array> IntoIter<A> {
1972 /// Returns the remaining items of this iterator as a slice.
as_slice(&self) -> &[A::Item]1973 pub fn as_slice(&self) -> &[A::Item] {
1974 let len = self.end - self.current;
1975 unsafe { core::slice::from_raw_parts(self.data.as_ptr().add(self.current), len) }
1976 }
1977
1978 /// Returns the remaining items of this iterator as a mutable slice.
as_mut_slice(&mut self) -> &mut [A::Item]1979 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
1980 let len = self.end - self.current;
1981 unsafe { core::slice::from_raw_parts_mut(self.data.as_mut_ptr().add(self.current), len) }
1982 }
1983 }
1984
1985 impl<A: Array> IntoIterator for SmallVec<A> {
1986 type IntoIter = IntoIter<A>;
1987 type Item = A::Item;
into_iter(mut self) -> Self::IntoIter1988 fn into_iter(mut self) -> Self::IntoIter {
1989 unsafe {
1990 // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements
1991 let len = self.len();
1992 self.set_len(0);
1993 IntoIter {
1994 data: self,
1995 current: 0,
1996 end: len,
1997 }
1998 }
1999 }
2000 }
2001
2002 impl<'a, A: Array> IntoIterator for &'a SmallVec<A> {
2003 type IntoIter = slice::Iter<'a, A::Item>;
2004 type Item = &'a A::Item;
into_iter(self) -> Self::IntoIter2005 fn into_iter(self) -> Self::IntoIter {
2006 self.iter()
2007 }
2008 }
2009
2010 impl<'a, A: Array> IntoIterator for &'a mut SmallVec<A> {
2011 type IntoIter = slice::IterMut<'a, A::Item>;
2012 type Item = &'a mut A::Item;
into_iter(self) -> Self::IntoIter2013 fn into_iter(self) -> Self::IntoIter {
2014 self.iter_mut()
2015 }
2016 }
2017
2018 /// Types that can be used as the backing store for a SmallVec
2019 pub unsafe trait Array {
2020 /// The type of the array's elements.
2021 type Item;
2022 /// Returns the number of items the array can hold.
size() -> usize2023 fn size() -> usize;
2024 }
2025
2026 /// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
2027 ///
2028 /// Copied from https://github.com/rust-lang/rust/pull/36355
2029 struct SetLenOnDrop<'a> {
2030 len: &'a mut usize,
2031 local_len: usize,
2032 }
2033
2034 impl<'a> SetLenOnDrop<'a> {
2035 #[inline]
new(len: &'a mut usize) -> Self2036 fn new(len: &'a mut usize) -> Self {
2037 SetLenOnDrop {
2038 local_len: *len,
2039 len,
2040 }
2041 }
2042
2043 #[inline]
get(&self) -> usize2044 fn get(&self) -> usize {
2045 self.local_len
2046 }
2047
2048 #[inline]
increment_len(&mut self, increment: usize)2049 fn increment_len(&mut self, increment: usize) {
2050 self.local_len += increment;
2051 }
2052 }
2053
2054 impl<'a> Drop for SetLenOnDrop<'a> {
2055 #[inline]
drop(&mut self)2056 fn drop(&mut self) {
2057 *self.len = self.local_len;
2058 }
2059 }
2060
2061 #[cfg(feature = "const_new")]
2062 impl<T, const N: usize> SmallVec<[T; N]> {
2063 /// Construct an empty vector.
2064 ///
2065 /// This is a `const` version of [`SmallVec::new`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2066 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2067 #[inline]
new_const() -> Self2068 pub const fn new_const() -> Self {
2069 SmallVec {
2070 capacity: 0,
2071 data: SmallVecData::from_const(MaybeUninit::uninit()),
2072 }
2073 }
2074
2075 /// The array passed as an argument is moved to be an inline version of `SmallVec`.
2076 ///
2077 /// This is a `const` version of [`SmallVec::from_buf`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2078 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2079 #[inline]
from_const(items: [T; N]) -> Self2080 pub const fn from_const(items: [T; N]) -> Self {
2081 SmallVec {
2082 capacity: N,
2083 data: SmallVecData::from_const(MaybeUninit::new(items)),
2084 }
2085 }
2086 }
2087
2088 #[cfg(all(feature = "const_generics", not(doc)))]
2089 #[cfg_attr(docsrs, doc(cfg(feature = "const_generics")))]
2090 unsafe impl<T, const N: usize> Array for [T; N] {
2091 type Item = T;
2092 #[inline]
size() -> usize2093 fn size() -> usize {
2094 N
2095 }
2096 }
2097
2098 #[cfg(any(not(feature = "const_generics"), doc))]
2099 macro_rules! impl_array(
2100 ($($size:expr),+) => {
2101 $(
2102 unsafe impl<T> Array for [T; $size] {
2103 type Item = T;
2104 #[inline]
2105 fn size() -> usize { $size }
2106 }
2107 )+
2108 }
2109 );
2110
2111 #[cfg(any(not(feature = "const_generics"), doc))]
2112 impl_array!(
2113 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
2114 26, 27, 28, 29, 30, 31, 32, 36, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x600, 0x800, 0x1000,
2115 0x2000, 0x4000, 0x6000, 0x8000, 0x10000, 0x20000, 0x40000, 0x60000, 0x80000, 0x10_0000
2116 );
2117
2118 /// Convenience trait for constructing a `SmallVec`
2119 pub trait ToSmallVec<A: Array> {
2120 /// Construct a new `SmallVec` from a slice.
to_smallvec(&self) -> SmallVec<A>2121 fn to_smallvec(&self) -> SmallVec<A>;
2122 }
2123
2124 impl<A: Array> ToSmallVec<A> for [A::Item]
2125 where
2126 A::Item: Copy,
2127 {
2128 #[inline]
to_smallvec(&self) -> SmallVec<A>2129 fn to_smallvec(&self) -> SmallVec<A> {
2130 SmallVec::from_slice(self)
2131 }
2132 }
2133