1 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
2 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
3 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
4 // option. This file may not be copied, modified, or distributed
5 // except according to those terms.
6
7 //! Small vectors in various sizes. These store a certain number of elements inline, and fall back
8 //! to the heap for larger allocations. This can be a useful optimization for improving cache
9 //! locality and reducing allocator traffic for workloads that fit within the inline buffer.
10 //!
11 //! ## `no_std` support
12 //!
13 //! By default, `smallvec` does not depend on `std`. However, the optional
14 //! `write` feature implements the `std::io::Write` trait for vectors of `u8`.
15 //! When this feature is enabled, `smallvec` depends on `std`.
16 //!
17 //! ## Optional features
18 //!
19 //! ### `serde`
20 //!
21 //! When this optional dependency is enabled, `SmallVec` implements the `serde::Serialize` and
22 //! `serde::Deserialize` traits.
23 //!
24 //! ### `write`
25 //!
26 //! When this feature is enabled, `SmallVec<[u8; _]>` implements the `std::io::Write` trait.
27 //! This feature is not compatible with `#![no_std]` programs.
28 //!
29 //! ### `union`
30 //!
31 //! **This feature requires Rust 1.49.**
32 //!
33 //! When the `union` feature is enabled `smallvec` will track its state (inline or spilled)
34 //! without the use of an enum tag, reducing the size of the `smallvec` by one machine word.
35 //! This means that there is potentially no space overhead compared to `Vec`.
36 //! Note that `smallvec` can still be larger than `Vec` if the inline buffer is larger than two
37 //! machine words.
38 //!
39 //! To use this feature add `features = ["union"]` in the `smallvec` section of Cargo.toml.
40 //! Note that this feature requires Rust 1.49.
41 //!
42 //! Tracking issue: [rust-lang/rust#55149](https://github.com/rust-lang/rust/issues/55149)
43 //!
44 //! ### `const_generics`
45 //!
46 //! **This feature requires Rust 1.51.**
47 //!
48 //! When this feature is enabled, `SmallVec` works with any arrays of any size, not just a fixed
49 //! list of sizes.
50 //!
51 //! ### `const_new`
52 //!
53 //! **This feature requires Rust 1.51.**
54 //!
55 //! This feature exposes the functions [`SmallVec::new_const`], [`SmallVec::from_const`], and [`smallvec_inline`] which enables the `SmallVec` to be initialized from a const context.
56 //! For details, see the
57 //! [Rust Reference](https://doc.rust-lang.org/reference/const_eval.html#const-functions).
58 //!
59 //! ### `drain_filter`
60 //!
61 //! **This feature is unstable.** It may change to match the unstable `drain_filter` method in libstd.
62 //!
63 //! Enables the `drain_filter` method, which produces an iterator that calls a user-provided
64 //! closure to determine which elements of the vector to remove and yield from the iterator.
65 //!
66 //! ### `drain_keep_rest`
67 //!
68 //! **This feature is unstable.** It may change to match the unstable `drain_keep_rest` method in libstd.
69 //!
70 //! Enables the `DrainFilter::keep_rest` method.
71 //!
72 //! ### `specialization`
73 //!
74 //! **This feature is unstable and requires a nightly build of the Rust toolchain.**
75 //!
76 //! When this feature is enabled, `SmallVec::from(slice)` has improved performance for slices
77 //! of `Copy` types. (Without this feature, you can use `SmallVec::from_slice` to get optimal
78 //! performance for `Copy` types.)
79 //!
80 //! Tracking issue: [rust-lang/rust#31844](https://github.com/rust-lang/rust/issues/31844)
81 //!
82 //! ### `may_dangle`
83 //!
84 //! **This feature is unstable and requires a nightly build of the Rust toolchain.**
85 //!
86 //! This feature makes the Rust compiler less strict about use of vectors that contain borrowed
87 //! references. For details, see the
88 //! [Rustonomicon](https://doc.rust-lang.org/1.42.0/nomicon/dropck.html#an-escape-hatch).
89 //!
90 //! Tracking issue: [rust-lang/rust#34761](https://github.com/rust-lang/rust/issues/34761)
91
92 #![no_std]
93 #![cfg_attr(docsrs, feature(doc_cfg))]
94 #![cfg_attr(feature = "specialization", allow(incomplete_features))]
95 #![cfg_attr(feature = "specialization", feature(specialization))]
96 #![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))]
97 #![cfg_attr(
98 feature = "debugger_visualizer",
99 feature(debugger_visualizer),
100 debugger_visualizer(natvis_file = "../debug_metadata/smallvec.natvis")
101 )]
102 #![deny(missing_docs)]
103
104 #[doc(hidden)]
105 pub extern crate alloc;
106
107 #[cfg(any(test, feature = "write"))]
108 extern crate std;
109
110 #[cfg(test)]
111 mod tests;
112
113 #[allow(deprecated)]
114 use alloc::alloc::{Layout, LayoutErr};
115 use alloc::boxed::Box;
116 use alloc::{vec, vec::Vec};
117 use core::borrow::{Borrow, BorrowMut};
118 use core::cmp;
119 use core::fmt;
120 use core::hash::{Hash, Hasher};
121 use core::hint::unreachable_unchecked;
122 use core::iter::{repeat, FromIterator, FusedIterator, IntoIterator};
123 use core::mem;
124 use core::mem::MaybeUninit;
125 use core::ops::{self, Range, RangeBounds};
126 use core::ptr::{self, NonNull};
127 use core::slice::{self, SliceIndex};
128
129 #[cfg(feature = "malloc_size_of")]
130 use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
131
132 #[cfg(feature = "serde")]
133 use serde::{
134 de::{Deserialize, Deserializer, SeqAccess, Visitor},
135 ser::{Serialize, SerializeSeq, Serializer},
136 };
137
138 #[cfg(feature = "serde")]
139 use core::marker::PhantomData;
140
141 #[cfg(feature = "write")]
142 use std::io;
143
144 #[cfg(feature = "drain_keep_rest")]
145 use core::mem::ManuallyDrop;
146
147 /// Creates a [`SmallVec`] containing the arguments.
148 ///
149 /// `smallvec!` allows `SmallVec`s to be defined with the same syntax as array expressions.
150 /// There are two forms of this macro:
151 ///
152 /// - Create a [`SmallVec`] containing a given list of elements:
153 ///
154 /// ```
155 /// # use smallvec::{smallvec, SmallVec};
156 /// # fn main() {
157 /// let v: SmallVec<[_; 128]> = smallvec![1, 2, 3];
158 /// assert_eq!(v[0], 1);
159 /// assert_eq!(v[1], 2);
160 /// assert_eq!(v[2], 3);
161 /// # }
162 /// ```
163 ///
164 /// - Create a [`SmallVec`] from a given element and size:
165 ///
166 /// ```
167 /// # use smallvec::{smallvec, SmallVec};
168 /// # fn main() {
169 /// let v: SmallVec<[_; 0x8000]> = smallvec![1; 3];
170 /// assert_eq!(v, SmallVec::from_buf([1, 1, 1]));
171 /// # }
172 /// ```
173 ///
174 /// Note that unlike array expressions this syntax supports all elements
175 /// which implement [`Clone`] and the number of elements doesn't have to be
176 /// a constant.
177 ///
178 /// This will use `clone` to duplicate an expression, so one should be careful
179 /// using this with types having a nonstandard `Clone` implementation. For
180 /// example, `smallvec![Rc::new(1); 5]` will create a vector of five references
181 /// to the same boxed integer value, not five references pointing to independently
182 /// boxed integers.
183
184 #[macro_export]
185 macro_rules! smallvec {
186 // count helper: transform any expression into 1
187 (@one $x:expr) => (1usize);
188 ($elem:expr; $n:expr) => ({
189 $crate::SmallVec::from_elem($elem, $n)
190 });
191 ($($x:expr),*$(,)*) => ({
192 let count = 0usize $(+ $crate::smallvec!(@one $x))*;
193 #[allow(unused_mut)]
194 let mut vec = $crate::SmallVec::new();
195 if count <= vec.inline_size() {
196 $(vec.push($x);)*
197 vec
198 } else {
199 $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*])
200 }
201 });
202 }
203
204 /// Creates an inline [`SmallVec`] containing the arguments. This macro is enabled by the feature `const_new`.
205 ///
206 /// `smallvec_inline!` allows `SmallVec`s to be defined with the same syntax as array expressions in `const` contexts.
207 /// The inline storage `A` will always be an array of the size specified by the arguments.
208 /// There are two forms of this macro:
209 ///
210 /// - Create a [`SmallVec`] containing a given list of elements:
211 ///
212 /// ```
213 /// # use smallvec::{smallvec_inline, SmallVec};
214 /// # fn main() {
215 /// const V: SmallVec<[i32; 3]> = smallvec_inline![1, 2, 3];
216 /// assert_eq!(V[0], 1);
217 /// assert_eq!(V[1], 2);
218 /// assert_eq!(V[2], 3);
219 /// # }
220 /// ```
221 ///
222 /// - Create a [`SmallVec`] from a given element and size:
223 ///
224 /// ```
225 /// # use smallvec::{smallvec_inline, SmallVec};
226 /// # fn main() {
227 /// const V: SmallVec<[i32; 3]> = smallvec_inline![1; 3];
228 /// assert_eq!(V, SmallVec::from_buf([1, 1, 1]));
229 /// # }
230 /// ```
231 ///
232 /// Note that the behavior mimics that of array expressions, in contrast to [`smallvec`].
233 #[cfg(feature = "const_new")]
234 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
235 #[macro_export]
236 macro_rules! smallvec_inline {
237 // count helper: transform any expression into 1
238 (@one $x:expr) => (1usize);
239 ($elem:expr; $n:expr) => ({
240 $crate::SmallVec::<[_; $n]>::from_const([$elem; $n])
241 });
242 ($($x:expr),+ $(,)?) => ({
243 const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*;
244 $crate::SmallVec::<[_; N]>::from_const([$($x,)*])
245 });
246 }
247
248 /// `panic!()` in debug builds, optimization hint in release.
249 #[cfg(not(feature = "union"))]
250 macro_rules! debug_unreachable {
251 () => {
252 debug_unreachable!("entered unreachable code")
253 };
254 ($e:expr) => {
255 if cfg!(debug_assertions) {
256 panic!($e);
257 } else {
258 unreachable_unchecked();
259 }
260 };
261 }
262
263 /// Trait to be implemented by a collection that can be extended from a slice
264 ///
265 /// ## Example
266 ///
267 /// ```rust
268 /// use smallvec::{ExtendFromSlice, SmallVec};
269 ///
270 /// fn initialize<V: ExtendFromSlice<u8>>(v: &mut V) {
271 /// v.extend_from_slice(b"Test!");
272 /// }
273 ///
274 /// let mut vec = Vec::new();
275 /// initialize(&mut vec);
276 /// assert_eq!(&vec, b"Test!");
277 ///
278 /// let mut small_vec = SmallVec::<[u8; 8]>::new();
279 /// initialize(&mut small_vec);
280 /// assert_eq!(&small_vec as &[_], b"Test!");
281 /// ```
282 #[doc(hidden)]
283 #[deprecated]
284 pub trait ExtendFromSlice<T> {
285 /// Extends a collection from a slice of its element type
extend_from_slice(&mut self, other: &[T])286 fn extend_from_slice(&mut self, other: &[T]);
287 }
288
289 #[allow(deprecated)]
290 impl<T: Clone> ExtendFromSlice<T> for Vec<T> {
extend_from_slice(&mut self, other: &[T])291 fn extend_from_slice(&mut self, other: &[T]) {
292 Vec::extend_from_slice(self, other)
293 }
294 }
295
296 /// Error type for APIs with fallible heap allocation
297 #[derive(Debug)]
298 pub enum CollectionAllocErr {
299 /// Overflow `usize::MAX` or other error during size computation
300 CapacityOverflow,
301 /// The allocator return an error
302 AllocErr {
303 /// The layout that was passed to the allocator
304 layout: Layout,
305 },
306 }
307
308 impl fmt::Display for CollectionAllocErr {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result309 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
310 write!(f, "Allocation error: {:?}", self)
311 }
312 }
313
314 #[allow(deprecated)]
315 impl From<LayoutErr> for CollectionAllocErr {
from(_: LayoutErr) -> Self316 fn from(_: LayoutErr) -> Self {
317 CollectionAllocErr::CapacityOverflow
318 }
319 }
320
infallible<T>(result: Result<T, CollectionAllocErr>) -> T321 fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T {
322 match result {
323 Ok(x) => x,
324 Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
325 Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout),
326 }
327 }
328
329 /// FIXME: use `Layout::array` when we require a Rust version where it’s stable
330 /// <https://github.com/rust-lang/rust/issues/55724>
layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr>331 fn layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr> {
332 let size = mem::size_of::<T>()
333 .checked_mul(n)
334 .ok_or(CollectionAllocErr::CapacityOverflow)?;
335 let align = mem::align_of::<T>();
336 Layout::from_size_align(size, align).map_err(|_| CollectionAllocErr::CapacityOverflow)
337 }
338
deallocate<T>(ptr: NonNull<T>, capacity: usize)339 unsafe fn deallocate<T>(ptr: NonNull<T>, capacity: usize) {
340 // This unwrap should succeed since the same did when allocating.
341 let layout = layout_array::<T>(capacity).unwrap();
342 alloc::alloc::dealloc(ptr.as_ptr() as *mut u8, layout)
343 }
344
345 /// An iterator that removes the items from a `SmallVec` and yields them by value.
346 ///
347 /// Returned from [`SmallVec::drain`][1].
348 ///
349 /// [1]: struct.SmallVec.html#method.drain
350 pub struct Drain<'a, T: 'a + Array> {
351 tail_start: usize,
352 tail_len: usize,
353 iter: slice::Iter<'a, T::Item>,
354 vec: NonNull<SmallVec<T>>,
355 }
356
357 impl<'a, T: 'a + Array> fmt::Debug for Drain<'a, T>
358 where
359 T::Item: fmt::Debug,
360 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result361 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
362 f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
363 }
364 }
365
366 unsafe impl<'a, T: Sync + Array> Sync for Drain<'a, T> {}
367 unsafe impl<'a, T: Send + Array> Send for Drain<'a, T> {}
368
369 impl<'a, T: 'a + Array> Iterator for Drain<'a, T> {
370 type Item = T::Item;
371
372 #[inline]
next(&mut self) -> Option<T::Item>373 fn next(&mut self) -> Option<T::Item> {
374 self.iter
375 .next()
376 .map(|reference| unsafe { ptr::read(reference) })
377 }
378
379 #[inline]
size_hint(&self) -> (usize, Option<usize>)380 fn size_hint(&self) -> (usize, Option<usize>) {
381 self.iter.size_hint()
382 }
383 }
384
385 impl<'a, T: 'a + Array> DoubleEndedIterator for Drain<'a, T> {
386 #[inline]
next_back(&mut self) -> Option<T::Item>387 fn next_back(&mut self) -> Option<T::Item> {
388 self.iter
389 .next_back()
390 .map(|reference| unsafe { ptr::read(reference) })
391 }
392 }
393
394 impl<'a, T: Array> ExactSizeIterator for Drain<'a, T> {
395 #[inline]
len(&self) -> usize396 fn len(&self) -> usize {
397 self.iter.len()
398 }
399 }
400
401 impl<'a, T: Array> FusedIterator for Drain<'a, T> {}
402
403 impl<'a, T: 'a + Array> Drop for Drain<'a, T> {
drop(&mut self)404 fn drop(&mut self) {
405 self.for_each(drop);
406
407 if self.tail_len > 0 {
408 unsafe {
409 let source_vec = self.vec.as_mut();
410
411 // memmove back untouched tail, update to new length
412 let start = source_vec.len();
413 let tail = self.tail_start;
414 if tail != start {
415 // as_mut_ptr creates a &mut, invalidating other pointers.
416 // This pattern avoids calling it with a pointer already present.
417 let ptr = source_vec.as_mut_ptr();
418 let src = ptr.add(tail);
419 let dst = ptr.add(start);
420 ptr::copy(src, dst, self.tail_len);
421 }
422 source_vec.set_len(start + self.tail_len);
423 }
424 }
425 }
426 }
427
428 #[cfg(feature = "drain_filter")]
429 /// An iterator which uses a closure to determine if an element should be removed.
430 ///
431 /// Returned from [`SmallVec::drain_filter`][1].
432 ///
433 /// [1]: struct.SmallVec.html#method.drain_filter
434 pub struct DrainFilter<'a, T, F>
435 where
436 F: FnMut(&mut T::Item) -> bool,
437 T: Array,
438 {
439 vec: &'a mut SmallVec<T>,
440 /// The index of the item that will be inspected by the next call to `next`.
441 idx: usize,
442 /// The number of items that have been drained (removed) thus far.
443 del: usize,
444 /// The original length of `vec` prior to draining.
445 old_len: usize,
446 /// The filter test predicate.
447 pred: F,
448 /// A flag that indicates a panic has occurred in the filter test predicate.
449 /// This is used as a hint in the drop implementation to prevent consumption
450 /// of the remainder of the `DrainFilter`. Any unprocessed items will be
451 /// backshifted in the `vec`, but no further items will be dropped or
452 /// tested by the filter predicate.
453 panic_flag: bool,
454 }
455
456 #[cfg(feature = "drain_filter")]
457 impl <T, F> fmt::Debug for DrainFilter<'_, T, F>
458 where
459 F: FnMut(&mut T::Item) -> bool,
460 T: Array,
461 T::Item: fmt::Debug,
462 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result463 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
464 f.debug_tuple("DrainFilter").field(&self.vec.as_slice()).finish()
465 }
466 }
467
468 #[cfg(feature = "drain_filter")]
469 impl <T, F> Iterator for DrainFilter<'_, T, F>
470 where
471 F: FnMut(&mut T::Item) -> bool,
472 T: Array,
473 {
474 type Item = T::Item;
475
next(&mut self) -> Option<T::Item>476 fn next(&mut self) -> Option<T::Item>
477 {
478 unsafe {
479 while self.idx < self.old_len {
480 let i = self.idx;
481 let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
482 self.panic_flag = true;
483 let drained = (self.pred)(&mut v[i]);
484 self.panic_flag = false;
485 // Update the index *after* the predicate is called. If the index
486 // is updated prior and the predicate panics, the element at this
487 // index would be leaked.
488 self.idx += 1;
489 if drained {
490 self.del += 1;
491 return Some(ptr::read(&v[i]));
492 } else if self.del > 0 {
493 let del = self.del;
494 let src: *const Self::Item = &v[i];
495 let dst: *mut Self::Item = &mut v[i - del];
496 ptr::copy_nonoverlapping(src, dst, 1);
497 }
498 }
499 None
500 }
501 }
502
size_hint(&self) -> (usize, Option<usize>)503 fn size_hint(&self) -> (usize, Option<usize>) {
504 (0, Some(self.old_len - self.idx))
505 }
506 }
507
508 #[cfg(feature = "drain_filter")]
509 impl <T, F> Drop for DrainFilter<'_, T, F>
510 where
511 F: FnMut(&mut T::Item) -> bool,
512 T: Array,
513 {
drop(&mut self)514 fn drop(&mut self) {
515 struct BackshiftOnDrop<'a, 'b, T, F>
516 where
517 F: FnMut(&mut T::Item) -> bool,
518 T: Array
519 {
520 drain: &'b mut DrainFilter<'a, T, F>,
521 }
522
523 impl<'a, 'b, T, F> Drop for BackshiftOnDrop<'a, 'b, T, F>
524 where
525 F: FnMut(&mut T::Item) -> bool,
526 T: Array
527 {
528 fn drop(&mut self) {
529 unsafe {
530 if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
531 // This is a pretty messed up state, and there isn't really an
532 // obviously right thing to do. We don't want to keep trying
533 // to execute `pred`, so we just backshift all the unprocessed
534 // elements and tell the vec that they still exist. The backshift
535 // is required to prevent a double-drop of the last successfully
536 // drained item prior to a panic in the predicate.
537 let ptr = self.drain.vec.as_mut_ptr();
538 let src = ptr.add(self.drain.idx);
539 let dst = src.sub(self.drain.del);
540 let tail_len = self.drain.old_len - self.drain.idx;
541 src.copy_to(dst, tail_len);
542 }
543 self.drain.vec.set_len(self.drain.old_len - self.drain.del);
544 }
545 }
546 }
547
548 let backshift = BackshiftOnDrop { drain: self };
549
550 // Attempt to consume any remaining elements if the filter predicate
551 // has not yet panicked. We'll backshift any remaining elements
552 // whether we've already panicked or if the consumption here panics.
553 if !backshift.drain.panic_flag {
554 backshift.drain.for_each(drop);
555 }
556 }
557 }
558
559 #[cfg(feature = "drain_keep_rest")]
560 impl <T, F> DrainFilter<'_, T, F>
561 where
562 F: FnMut(&mut T::Item) -> bool,
563 T: Array
564 {
565 /// Keep unyielded elements in the source `Vec`.
566 ///
567 /// # Examples
568 ///
569 /// ```
570 /// # use smallvec::{smallvec, SmallVec};
571 ///
572 /// let mut vec: SmallVec<[char; 2]> = smallvec!['a', 'b', 'c'];
573 /// let mut drain = vec.drain_filter(|_| true);
574 ///
575 /// assert_eq!(drain.next().unwrap(), 'a');
576 ///
577 /// // This call keeps 'b' and 'c' in the vec.
578 /// drain.keep_rest();
579 ///
580 /// // If we wouldn't call `keep_rest()`,
581 /// // `vec` would be empty.
582 /// assert_eq!(vec, SmallVec::<[char; 2]>::from_slice(&['b', 'c']));
583 /// ```
keep_rest(self)584 pub fn keep_rest(self)
585 {
586 // At this moment layout looks like this:
587 //
588 // _____________________/-- old_len
589 // / \
590 // [kept] [yielded] [tail]
591 // \_______/ ^-- idx
592 // \-- del
593 //
594 // Normally `Drop` impl would drop [tail] (via .for_each(drop), ie still calling `pred`)
595 //
596 // 1. Move [tail] after [kept]
597 // 2. Update length of the original vec to `old_len - del`
598 // a. In case of ZST, this is the only thing we want to do
599 // 3. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do
600 let mut this = ManuallyDrop::new(self);
601
602 unsafe {
603 // ZSTs have no identity, so we don't need to move them around.
604 let needs_move = mem::size_of::<T>() != 0;
605
606 if needs_move && this.idx < this.old_len && this.del > 0 {
607 let ptr = this.vec.as_mut_ptr();
608 let src = ptr.add(this.idx);
609 let dst = src.sub(this.del);
610 let tail_len = this.old_len - this.idx;
611 src.copy_to(dst, tail_len);
612 }
613
614 let new_len = this.old_len - this.del;
615 this.vec.set_len(new_len);
616 }
617 }
618 }
619
620 #[cfg(feature = "union")]
621 union SmallVecData<A: Array> {
622 inline: core::mem::ManuallyDrop<MaybeUninit<A>>,
623 heap: (NonNull<A::Item>, usize),
624 }
625
626 #[cfg(all(feature = "union", feature = "const_new"))]
627 impl<T, const N: usize> SmallVecData<[T; N]> {
628 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
629 #[inline]
from_const(inline: MaybeUninit<[T; N]>) -> Self630 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
631 SmallVecData {
632 inline: core::mem::ManuallyDrop::new(inline),
633 }
634 }
635 }
636
637 #[cfg(feature = "union")]
638 impl<A: Array> SmallVecData<A> {
639 #[inline]
inline(&self) -> ConstNonNull<A::Item>640 unsafe fn inline(&self) -> ConstNonNull<A::Item> {
641 ConstNonNull::new(self.inline.as_ptr() as *const A::Item).unwrap()
642 }
643 #[inline]
inline_mut(&mut self) -> NonNull<A::Item>644 unsafe fn inline_mut(&mut self) -> NonNull<A::Item> {
645 NonNull::new(self.inline.as_mut_ptr() as *mut A::Item).unwrap()
646 }
647 #[inline]
from_inline(inline: MaybeUninit<A>) -> SmallVecData<A>648 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
649 SmallVecData {
650 inline: core::mem::ManuallyDrop::new(inline),
651 }
652 }
653 #[inline]
into_inline(self) -> MaybeUninit<A>654 unsafe fn into_inline(self) -> MaybeUninit<A> {
655 core::mem::ManuallyDrop::into_inner(self.inline)
656 }
657 #[inline]
heap(&self) -> (ConstNonNull<A::Item>, usize)658 unsafe fn heap(&self) -> (ConstNonNull<A::Item>, usize) {
659 (ConstNonNull(self.heap.0), self.heap.1)
660 }
661 #[inline]
heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize)662 unsafe fn heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize) {
663 let h = &mut self.heap;
664 (h.0, &mut h.1)
665 }
666 #[inline]
from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A>667 fn from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A> {
668 SmallVecData { heap: (ptr, len) }
669 }
670 }
671
672 #[cfg(not(feature = "union"))]
673 enum SmallVecData<A: Array> {
674 Inline(MaybeUninit<A>),
675 // Using NonNull and NonZero here allows to reduce size of `SmallVec`.
676 Heap {
677 // Since we never allocate on heap
678 // unless our capacity is bigger than inline capacity
679 // heap capacity cannot be less than 1.
680 // Therefore, pointer cannot be null too.
681 ptr: NonNull<A::Item>,
682 len: usize,
683 },
684 }
685
686 #[cfg(all(not(feature = "union"), feature = "const_new"))]
687 impl<T, const N: usize> SmallVecData<[T; N]> {
688 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
689 #[inline]
from_const(inline: MaybeUninit<[T; N]>) -> Self690 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
691 SmallVecData::Inline(inline)
692 }
693 }
694
695 #[cfg(not(feature = "union"))]
696 impl<A: Array> SmallVecData<A> {
697 #[inline]
inline(&self) -> ConstNonNull<A::Item>698 unsafe fn inline(&self) -> ConstNonNull<A::Item> {
699 match self {
700 SmallVecData::Inline(a) => ConstNonNull::new(a.as_ptr() as *const A::Item).unwrap(),
701 _ => debug_unreachable!(),
702 }
703 }
704 #[inline]
inline_mut(&mut self) -> NonNull<A::Item>705 unsafe fn inline_mut(&mut self) -> NonNull<A::Item> {
706 match self {
707 SmallVecData::Inline(a) => NonNull::new(a.as_mut_ptr() as *mut A::Item).unwrap(),
708 _ => debug_unreachable!(),
709 }
710 }
711 #[inline]
from_inline(inline: MaybeUninit<A>) -> SmallVecData<A>712 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
713 SmallVecData::Inline(inline)
714 }
715 #[inline]
into_inline(self) -> MaybeUninit<A>716 unsafe fn into_inline(self) -> MaybeUninit<A> {
717 match self {
718 SmallVecData::Inline(a) => a,
719 _ => debug_unreachable!(),
720 }
721 }
722 #[inline]
heap(&self) -> (ConstNonNull<A::Item>, usize)723 unsafe fn heap(&self) -> (ConstNonNull<A::Item>, usize) {
724 match self {
725 SmallVecData::Heap { ptr, len } => (ConstNonNull(*ptr), *len),
726 _ => debug_unreachable!(),
727 }
728 }
729 #[inline]
heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize)730 unsafe fn heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize) {
731 match self {
732 SmallVecData::Heap { ptr, len } => (*ptr, len),
733 _ => debug_unreachable!(),
734 }
735 }
736 #[inline]
from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A>737 fn from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A> {
738 SmallVecData::Heap { ptr, len }
739 }
740 }
741
742 unsafe impl<A: Array + Send> Send for SmallVecData<A> {}
743 unsafe impl<A: Array + Sync> Sync for SmallVecData<A> {}
744
745 /// A `Vec`-like container that can store a small number of elements inline.
746 ///
747 /// `SmallVec` acts like a vector, but can store a limited amount of data inline within the
748 /// `SmallVec` struct rather than in a separate allocation. If the data exceeds this limit, the
749 /// `SmallVec` will "spill" its data onto the heap, allocating a new buffer to hold it.
750 ///
751 /// The amount of data that a `SmallVec` can store inline depends on its backing store. The backing
752 /// store can be any type that implements the `Array` trait; usually it is a small fixed-sized
753 /// array. For example a `SmallVec<[u64; 8]>` can hold up to eight 64-bit integers inline.
754 ///
755 /// ## Example
756 ///
757 /// ```rust
758 /// use smallvec::SmallVec;
759 /// let mut v = SmallVec::<[u8; 4]>::new(); // initialize an empty vector
760 ///
761 /// // The vector can hold up to 4 items without spilling onto the heap.
762 /// v.extend(0..4);
763 /// assert_eq!(v.len(), 4);
764 /// assert!(!v.spilled());
765 ///
766 /// // Pushing another element will force the buffer to spill:
767 /// v.push(4);
768 /// assert_eq!(v.len(), 5);
769 /// assert!(v.spilled());
770 /// ```
771 pub struct SmallVec<A: Array> {
772 // The capacity field is used to determine which of the storage variants is active:
773 // If capacity <= Self::inline_capacity() then the inline variant is used and capacity holds the current length of the vector (number of elements actually in use).
774 // If capacity > Self::inline_capacity() then the heap variant is used and capacity holds the size of the memory allocation.
775 capacity: usize,
776 data: SmallVecData<A>,
777 }
778
779 impl<A: Array> SmallVec<A> {
780 /// Construct an empty vector
781 #[inline]
new() -> SmallVec<A>782 pub fn new() -> SmallVec<A> {
783 // Try to detect invalid custom implementations of `Array`. Hopefully,
784 // this check should be optimized away entirely for valid ones.
785 assert!(
786 mem::size_of::<A>() == A::size() * mem::size_of::<A::Item>()
787 && mem::align_of::<A>() >= mem::align_of::<A::Item>()
788 );
789 SmallVec {
790 capacity: 0,
791 data: SmallVecData::from_inline(MaybeUninit::uninit()),
792 }
793 }
794
795 /// Construct an empty vector with enough capacity pre-allocated to store at least `n`
796 /// elements.
797 ///
798 /// Will create a heap allocation only if `n` is larger than the inline capacity.
799 ///
800 /// ```
801 /// # use smallvec::SmallVec;
802 ///
803 /// let v: SmallVec<[u8; 3]> = SmallVec::with_capacity(100);
804 ///
805 /// assert!(v.is_empty());
806 /// assert!(v.capacity() >= 100);
807 /// ```
808 #[inline]
with_capacity(n: usize) -> Self809 pub fn with_capacity(n: usize) -> Self {
810 let mut v = SmallVec::new();
811 v.reserve_exact(n);
812 v
813 }
814
815 /// Construct a new `SmallVec` from a `Vec<A::Item>`.
816 ///
817 /// Elements will be copied to the inline buffer if `vec.capacity() <= Self::inline_capacity()`.
818 ///
819 /// ```rust
820 /// use smallvec::SmallVec;
821 ///
822 /// let vec = vec![1, 2, 3, 4, 5];
823 /// let small_vec: SmallVec<[_; 3]> = SmallVec::from_vec(vec);
824 ///
825 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
826 /// ```
827 #[inline]
from_vec(mut vec: Vec<A::Item>) -> SmallVec<A>828 pub fn from_vec(mut vec: Vec<A::Item>) -> SmallVec<A> {
829 if vec.capacity() <= Self::inline_capacity() {
830 // Cannot use Vec with smaller capacity
831 // because we use value of `Self::capacity` field as indicator.
832 unsafe {
833 let mut data = SmallVecData::<A>::from_inline(MaybeUninit::uninit());
834 let len = vec.len();
835 vec.set_len(0);
836 ptr::copy_nonoverlapping(vec.as_ptr(), data.inline_mut().as_ptr(), len);
837
838 SmallVec {
839 capacity: len,
840 data,
841 }
842 }
843 } else {
844 let (ptr, cap, len) = (vec.as_mut_ptr(), vec.capacity(), vec.len());
845 mem::forget(vec);
846 let ptr = NonNull::new(ptr)
847 // See docs: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.as_mut_ptr
848 .expect("Cannot be null by `Vec` invariant");
849
850 SmallVec {
851 capacity: cap,
852 data: SmallVecData::from_heap(ptr, len),
853 }
854 }
855 }
856
857 /// Constructs a new `SmallVec` on the stack from an `A` without
858 /// copying elements.
859 ///
860 /// ```rust
861 /// use smallvec::SmallVec;
862 ///
863 /// let buf = [1, 2, 3, 4, 5];
864 /// let small_vec: SmallVec<_> = SmallVec::from_buf(buf);
865 ///
866 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
867 /// ```
868 #[inline]
from_buf(buf: A) -> SmallVec<A>869 pub fn from_buf(buf: A) -> SmallVec<A> {
870 SmallVec {
871 capacity: A::size(),
872 data: SmallVecData::from_inline(MaybeUninit::new(buf)),
873 }
874 }
875
876 /// Constructs a new `SmallVec` on the stack from an `A` without
877 /// copying elements. Also sets the length, which must be less or
878 /// equal to the size of `buf`.
879 ///
880 /// ```rust
881 /// use smallvec::SmallVec;
882 ///
883 /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
884 /// let small_vec: SmallVec<_> = SmallVec::from_buf_and_len(buf, 5);
885 ///
886 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
887 /// ```
888 #[inline]
from_buf_and_len(buf: A, len: usize) -> SmallVec<A>889 pub fn from_buf_and_len(buf: A, len: usize) -> SmallVec<A> {
890 assert!(len <= A::size());
891 unsafe { SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), len) }
892 }
893
894 /// Constructs a new `SmallVec` on the stack from an `A` without
895 /// copying elements. Also sets the length. The user is responsible
896 /// for ensuring that `len <= A::size()`.
897 ///
898 /// ```rust
899 /// use smallvec::SmallVec;
900 /// use std::mem::MaybeUninit;
901 ///
902 /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
903 /// let small_vec: SmallVec<_> = unsafe {
904 /// SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), 5)
905 /// };
906 ///
907 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
908 /// ```
909 #[inline]
from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A>910 pub unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A> {
911 SmallVec {
912 capacity: len,
913 data: SmallVecData::from_inline(buf),
914 }
915 }
916
917 /// Sets the length of a vector.
918 ///
919 /// This will explicitly set the size of the vector, without actually
920 /// modifying its buffers, so it is up to the caller to ensure that the
921 /// vector is actually the specified size.
set_len(&mut self, new_len: usize)922 pub unsafe fn set_len(&mut self, new_len: usize) {
923 let (_, len_ptr, _) = self.triple_mut();
924 *len_ptr = new_len;
925 }
926
927 /// The maximum number of elements this vector can hold inline
928 #[inline]
inline_capacity() -> usize929 fn inline_capacity() -> usize {
930 if mem::size_of::<A::Item>() > 0 {
931 A::size()
932 } else {
933 // For zero-size items code like `ptr.add(offset)` always returns the same pointer.
934 // Therefore all items are at the same address,
935 // and any array size has capacity for infinitely many items.
936 // The capacity is limited by the bit width of the length field.
937 //
938 // `Vec` also does this:
939 // https://github.com/rust-lang/rust/blob/1.44.0/src/liballoc/raw_vec.rs#L186
940 //
941 // In our case, this also ensures that a smallvec of zero-size items never spills,
942 // and we never try to allocate zero bytes which `std::alloc::alloc` disallows.
943 core::usize::MAX
944 }
945 }
946
947 /// The maximum number of elements this vector can hold inline
948 #[inline]
inline_size(&self) -> usize949 pub fn inline_size(&self) -> usize {
950 Self::inline_capacity()
951 }
952
953 /// The number of elements stored in the vector
954 #[inline]
len(&self) -> usize955 pub fn len(&self) -> usize {
956 self.triple().1
957 }
958
959 /// Returns `true` if the vector is empty
960 #[inline]
is_empty(&self) -> bool961 pub fn is_empty(&self) -> bool {
962 self.len() == 0
963 }
964
965 /// The number of items the vector can hold without reallocating
966 #[inline]
capacity(&self) -> usize967 pub fn capacity(&self) -> usize {
968 self.triple().2
969 }
970
971 /// Returns a tuple with (data ptr, len, capacity)
972 /// Useful to get all `SmallVec` properties with a single check of the current storage variant.
973 #[inline]
triple(&self) -> (ConstNonNull<A::Item>, usize, usize)974 fn triple(&self) -> (ConstNonNull<A::Item>, usize, usize) {
975 unsafe {
976 if self.spilled() {
977 let (ptr, len) = self.data.heap();
978 (ptr, len, self.capacity)
979 } else {
980 (self.data.inline(), self.capacity, Self::inline_capacity())
981 }
982 }
983 }
984
985 /// Returns a tuple with (data ptr, len ptr, capacity)
986 #[inline]
triple_mut(&mut self) -> (NonNull<A::Item>, &mut usize, usize)987 fn triple_mut(&mut self) -> (NonNull<A::Item>, &mut usize, usize) {
988 unsafe {
989 if self.spilled() {
990 let (ptr, len_ptr) = self.data.heap_mut();
991 (ptr, len_ptr, self.capacity)
992 } else {
993 (
994 self.data.inline_mut(),
995 &mut self.capacity,
996 Self::inline_capacity(),
997 )
998 }
999 }
1000 }
1001
1002 /// Returns `true` if the data has spilled into a separate heap-allocated buffer.
1003 #[inline]
spilled(&self) -> bool1004 pub fn spilled(&self) -> bool {
1005 self.capacity > Self::inline_capacity()
1006 }
1007
1008 /// Creates a draining iterator that removes the specified range in the vector
1009 /// and yields the removed items.
1010 ///
1011 /// Note 1: The element range is removed even if the iterator is only
1012 /// partially consumed or not consumed at all.
1013 ///
1014 /// Note 2: It is unspecified how many elements are removed from the vector
1015 /// if the `Drain` value is leaked.
1016 ///
1017 /// # Panics
1018 ///
1019 /// Panics if the starting point is greater than the end point or if
1020 /// the end point is greater than the length of the vector.
drain<R>(&mut self, range: R) -> Drain<'_, A> where R: RangeBounds<usize>,1021 pub fn drain<R>(&mut self, range: R) -> Drain<'_, A>
1022 where
1023 R: RangeBounds<usize>,
1024 {
1025 use core::ops::Bound::*;
1026
1027 let len = self.len();
1028 let start = match range.start_bound() {
1029 Included(&n) => n,
1030 Excluded(&n) => n.checked_add(1).expect("Range start out of bounds"),
1031 Unbounded => 0,
1032 };
1033 let end = match range.end_bound() {
1034 Included(&n) => n.checked_add(1).expect("Range end out of bounds"),
1035 Excluded(&n) => n,
1036 Unbounded => len,
1037 };
1038
1039 assert!(start <= end);
1040 assert!(end <= len);
1041
1042 unsafe {
1043 self.set_len(start);
1044
1045 let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
1046
1047 Drain {
1048 tail_start: end,
1049 tail_len: len - end,
1050 iter: range_slice.iter(),
1051 // Since self is a &mut, passing it to a function would invalidate the slice iterator.
1052 vec: NonNull::new_unchecked(self as *mut _),
1053 }
1054 }
1055 }
1056
1057 #[cfg(feature = "drain_filter")]
1058 /// Creates an iterator which uses a closure to determine if an element should be removed.
1059 ///
1060 /// If the closure returns true, the element is removed and yielded. If the closure returns
1061 /// false, the element will remain in the vector and will not be yielded by the iterator.
1062 ///
1063 /// Using this method is equivalent to the following code:
1064 /// ```
1065 /// # use smallvec::SmallVec;
1066 /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
1067 /// # let mut vec: SmallVec<[i32; 8]> = SmallVec::from_slice(&[1i32, 2, 3, 4, 5, 6]);
1068 /// let mut i = 0;
1069 /// while i < vec.len() {
1070 /// if some_predicate(&mut vec[i]) {
1071 /// let val = vec.remove(i);
1072 /// // your code here
1073 /// } else {
1074 /// i += 1;
1075 /// }
1076 /// }
1077 ///
1078 /// # assert_eq!(vec, SmallVec::<[i32; 8]>::from_slice(&[1i32, 4, 5]));
1079 /// ```
1080 /// ///
1081 /// But `drain_filter` is easier to use. `drain_filter` is also more efficient,
1082 /// because it can backshift the elements of the array in bulk.
1083 ///
1084 /// Note that `drain_filter` also lets you mutate every element in the filter closure,
1085 /// regardless of whether you choose to keep or remove it.
1086 ///
1087 /// # Examples
1088 ///
1089 /// Splitting an array into evens and odds, reusing the original allocation:
1090 ///
1091 /// ```
1092 /// # use smallvec::SmallVec;
1093 /// let mut numbers: SmallVec<[i32; 16]> = SmallVec::from_slice(&[1i32, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
1094 ///
1095 /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<SmallVec<[i32; 16]>>();
1096 /// let odds = numbers;
1097 ///
1098 /// assert_eq!(evens, SmallVec::<[i32; 16]>::from_slice(&[2i32, 4, 6, 8, 14]));
1099 /// assert_eq!(odds, SmallVec::<[i32; 16]>::from_slice(&[1i32, 3, 5, 9, 11, 13, 15]));
1100 /// ```
drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, A, F,> where F: FnMut(&mut A::Item) -> bool,1101 pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, A, F,>
1102 where
1103 F: FnMut(&mut A::Item) -> bool,
1104 {
1105 let old_len = self.len();
1106
1107 // Guard against us getting leaked (leak amplification)
1108 unsafe {
1109 self.set_len(0);
1110 }
1111
1112 DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
1113 }
1114
1115 /// Append an item to the vector.
1116 #[inline]
push(&mut self, value: A::Item)1117 pub fn push(&mut self, value: A::Item) {
1118 unsafe {
1119 let (mut ptr, mut len, cap) = self.triple_mut();
1120 if *len == cap {
1121 self.reserve_one_unchecked();
1122 let (heap_ptr, heap_len) = self.data.heap_mut();
1123 ptr = heap_ptr;
1124 len = heap_len;
1125 }
1126 ptr::write(ptr.as_ptr().add(*len), value);
1127 *len += 1;
1128 }
1129 }
1130
1131 /// Remove an item from the end of the vector and return it, or None if empty.
1132 #[inline]
pop(&mut self) -> Option<A::Item>1133 pub fn pop(&mut self) -> Option<A::Item> {
1134 unsafe {
1135 let (ptr, len_ptr, _) = self.triple_mut();
1136 let ptr: *const _ = ptr.as_ptr();
1137 if *len_ptr == 0 {
1138 return None;
1139 }
1140 let last_index = *len_ptr - 1;
1141 *len_ptr = last_index;
1142 Some(ptr::read(ptr.add(last_index)))
1143 }
1144 }
1145
1146 /// Moves all the elements of `other` into `self`, leaving `other` empty.
1147 ///
1148 /// # Example
1149 ///
1150 /// ```
1151 /// # use smallvec::{SmallVec, smallvec};
1152 /// let mut v0: SmallVec<[u8; 16]> = smallvec![1, 2, 3];
1153 /// let mut v1: SmallVec<[u8; 32]> = smallvec![4, 5, 6];
1154 /// v0.append(&mut v1);
1155 /// assert_eq!(*v0, [1, 2, 3, 4, 5, 6]);
1156 /// assert_eq!(*v1, []);
1157 /// ```
append<B>(&mut self, other: &mut SmallVec<B>) where B: Array<Item = A::Item>,1158 pub fn append<B>(&mut self, other: &mut SmallVec<B>)
1159 where
1160 B: Array<Item = A::Item>,
1161 {
1162 self.extend(other.drain(..))
1163 }
1164
1165 /// Re-allocate to set the capacity to `max(new_cap, inline_size())`.
1166 ///
1167 /// Panics if `new_cap` is less than the vector's length
1168 /// or if the capacity computation overflows `usize`.
grow(&mut self, new_cap: usize)1169 pub fn grow(&mut self, new_cap: usize) {
1170 infallible(self.try_grow(new_cap))
1171 }
1172
1173 /// Re-allocate to set the capacity to `max(new_cap, inline_size())`.
1174 ///
1175 /// Panics if `new_cap` is less than the vector's length
try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr>1176 pub fn try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr> {
1177 unsafe {
1178 let unspilled = !self.spilled();
1179 let (ptr, &mut len, cap) = self.triple_mut();
1180 assert!(new_cap >= len);
1181 if new_cap <= Self::inline_capacity() {
1182 if unspilled {
1183 return Ok(());
1184 }
1185 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
1186 ptr::copy_nonoverlapping(ptr.as_ptr(), self.data.inline_mut().as_ptr(), len);
1187 self.capacity = len;
1188 deallocate(ptr, cap);
1189 } else if new_cap != cap {
1190 let layout = layout_array::<A::Item>(new_cap)?;
1191 debug_assert!(layout.size() > 0);
1192 let new_alloc;
1193 if unspilled {
1194 new_alloc = NonNull::new(alloc::alloc::alloc(layout))
1195 .ok_or(CollectionAllocErr::AllocErr { layout })?
1196 .cast();
1197 ptr::copy_nonoverlapping(ptr.as_ptr(), new_alloc.as_ptr(), len);
1198 } else {
1199 // This should never fail since the same succeeded
1200 // when previously allocating `ptr`.
1201 let old_layout = layout_array::<A::Item>(cap)?;
1202
1203 let new_ptr =
1204 alloc::alloc::realloc(ptr.as_ptr() as *mut u8, old_layout, layout.size());
1205 new_alloc = NonNull::new(new_ptr)
1206 .ok_or(CollectionAllocErr::AllocErr { layout })?
1207 .cast();
1208 }
1209 self.data = SmallVecData::from_heap(new_alloc, len);
1210 self.capacity = new_cap;
1211 }
1212 Ok(())
1213 }
1214 }
1215
1216 /// Reserve capacity for `additional` more elements to be inserted.
1217 ///
1218 /// May reserve more space to avoid frequent reallocations.
1219 ///
1220 /// Panics if the capacity computation overflows `usize`.
1221 #[inline]
reserve(&mut self, additional: usize)1222 pub fn reserve(&mut self, additional: usize) {
1223 infallible(self.try_reserve(additional))
1224 }
1225
1226 /// Internal method used to grow in push() and insert(), where we know already we have to grow.
1227 #[cold]
reserve_one_unchecked(&mut self)1228 fn reserve_one_unchecked(&mut self) {
1229 debug_assert_eq!(self.len(), self.capacity());
1230 let new_cap = self.len()
1231 .checked_add(1)
1232 .and_then(usize::checked_next_power_of_two)
1233 .expect("capacity overflow");
1234 infallible(self.try_grow(new_cap))
1235 }
1236
1237 /// Reserve capacity for `additional` more elements to be inserted.
1238 ///
1239 /// May reserve more space to avoid frequent reallocations.
try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr>1240 pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1241 // prefer triple_mut() even if triple() would work so that the optimizer removes duplicated
1242 // calls to it from callers.
1243 let (_, &mut len, cap) = self.triple_mut();
1244 if cap - len >= additional {
1245 return Ok(());
1246 }
1247 let new_cap = len
1248 .checked_add(additional)
1249 .and_then(usize::checked_next_power_of_two)
1250 .ok_or(CollectionAllocErr::CapacityOverflow)?;
1251 self.try_grow(new_cap)
1252 }
1253
1254 /// Reserve the minimum capacity for `additional` more elements to be inserted.
1255 ///
1256 /// Panics if the new capacity overflows `usize`.
reserve_exact(&mut self, additional: usize)1257 pub fn reserve_exact(&mut self, additional: usize) {
1258 infallible(self.try_reserve_exact(additional))
1259 }
1260
1261 /// Reserve the minimum capacity for `additional` more elements to be inserted.
try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr>1262 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1263 let (_, &mut len, cap) = self.triple_mut();
1264 if cap - len >= additional {
1265 return Ok(());
1266 }
1267 let new_cap = len
1268 .checked_add(additional)
1269 .ok_or(CollectionAllocErr::CapacityOverflow)?;
1270 self.try_grow(new_cap)
1271 }
1272
1273 /// Shrink the capacity of the vector as much as possible.
1274 ///
1275 /// When possible, this will move data from an external heap buffer to the vector's inline
1276 /// storage.
shrink_to_fit(&mut self)1277 pub fn shrink_to_fit(&mut self) {
1278 if !self.spilled() {
1279 return;
1280 }
1281 let len = self.len();
1282 if self.inline_size() >= len {
1283 unsafe {
1284 let (ptr, len) = self.data.heap();
1285 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
1286 ptr::copy_nonoverlapping(ptr.as_ptr(), self.data.inline_mut().as_ptr(), len);
1287 deallocate(ptr.0, self.capacity);
1288 self.capacity = len;
1289 }
1290 } else if self.capacity() > len {
1291 self.grow(len);
1292 }
1293 }
1294
1295 /// Shorten the vector, keeping the first `len` elements and dropping the rest.
1296 ///
1297 /// If `len` is greater than or equal to the vector's current length, this has no
1298 /// effect.
1299 ///
1300 /// This does not re-allocate. If you want the vector's capacity to shrink, call
1301 /// `shrink_to_fit` after truncating.
truncate(&mut self, len: usize)1302 pub fn truncate(&mut self, len: usize) {
1303 unsafe {
1304 let (ptr, len_ptr, _) = self.triple_mut();
1305 let ptr = ptr.as_ptr();
1306 while len < *len_ptr {
1307 let last_index = *len_ptr - 1;
1308 *len_ptr = last_index;
1309 ptr::drop_in_place(ptr.add(last_index));
1310 }
1311 }
1312 }
1313
1314 /// Extracts a slice containing the entire vector.
1315 ///
1316 /// Equivalent to `&s[..]`.
as_slice(&self) -> &[A::Item]1317 pub fn as_slice(&self) -> &[A::Item] {
1318 self
1319 }
1320
1321 /// Extracts a mutable slice of the entire vector.
1322 ///
1323 /// Equivalent to `&mut s[..]`.
as_mut_slice(&mut self) -> &mut [A::Item]1324 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
1325 self
1326 }
1327
1328 /// Remove the element at position `index`, replacing it with the last element.
1329 ///
1330 /// This does not preserve ordering, but is O(1).
1331 ///
1332 /// Panics if `index` is out of bounds.
1333 #[inline]
swap_remove(&mut self, index: usize) -> A::Item1334 pub fn swap_remove(&mut self, index: usize) -> A::Item {
1335 let len = self.len();
1336 self.swap(len - 1, index);
1337 self.pop()
1338 .unwrap_or_else(|| unsafe { unreachable_unchecked() })
1339 }
1340
1341 /// Remove all elements from the vector.
1342 #[inline]
clear(&mut self)1343 pub fn clear(&mut self) {
1344 self.truncate(0);
1345 }
1346
1347 /// Remove and return the element at position `index`, shifting all elements after it to the
1348 /// left.
1349 ///
1350 /// Panics if `index` is out of bounds.
remove(&mut self, index: usize) -> A::Item1351 pub fn remove(&mut self, index: usize) -> A::Item {
1352 unsafe {
1353 let (ptr, len_ptr, _) = self.triple_mut();
1354 let len = *len_ptr;
1355 assert!(index < len);
1356 *len_ptr = len - 1;
1357 let ptr = ptr.as_ptr().add(index);
1358 let item = ptr::read(ptr);
1359 ptr::copy(ptr.add(1), ptr, len - index - 1);
1360 item
1361 }
1362 }
1363
1364 /// Insert an element at position `index`, shifting all elements after it to the right.
1365 ///
1366 /// Panics if `index > len`.
insert(&mut self, index: usize, element: A::Item)1367 pub fn insert(&mut self, index: usize, element: A::Item) {
1368 unsafe {
1369 let (mut ptr, mut len_ptr, cap) = self.triple_mut();
1370 if *len_ptr == cap {
1371 self.reserve_one_unchecked();
1372 let (heap_ptr, heap_len_ptr) = self.data.heap_mut();
1373 ptr = heap_ptr;
1374 len_ptr = heap_len_ptr;
1375 }
1376 let mut ptr = ptr.as_ptr();
1377 let len = *len_ptr;
1378 if index > len {
1379 panic!("index exceeds length");
1380 }
1381 // SAFETY: add is UB if index > len, but we panicked first
1382 ptr = ptr.add(index);
1383 if index < len {
1384 // Shift element to the right of `index`.
1385 ptr::copy(ptr, ptr.add(1), len - index);
1386 }
1387 *len_ptr = len + 1;
1388 ptr::write(ptr, element);
1389 }
1390 }
1391
1392 /// Insert multiple elements at position `index`, shifting all following elements toward the
1393 /// back.
insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I)1394 pub fn insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I) {
1395 let mut iter = iterable.into_iter();
1396 if index == self.len() {
1397 return self.extend(iter);
1398 }
1399
1400 let (lower_size_bound, _) = iter.size_hint();
1401 assert!(lower_size_bound <= core::isize::MAX as usize); // Ensure offset is indexable
1402 assert!(index + lower_size_bound >= index); // Protect against overflow
1403
1404 let mut num_added = 0;
1405 let old_len = self.len();
1406 assert!(index <= old_len);
1407
1408 unsafe {
1409 // Reserve space for `lower_size_bound` elements.
1410 self.reserve(lower_size_bound);
1411 let start = self.as_mut_ptr();
1412 let ptr = start.add(index);
1413
1414 // Move the trailing elements.
1415 ptr::copy(ptr, ptr.add(lower_size_bound), old_len - index);
1416
1417 // In case the iterator panics, don't double-drop the items we just copied above.
1418 self.set_len(0);
1419 let mut guard = DropOnPanic {
1420 start,
1421 skip: index..(index + lower_size_bound),
1422 len: old_len + lower_size_bound,
1423 };
1424
1425 // The set_len above invalidates the previous pointers, so we must re-create them.
1426 let start = self.as_mut_ptr();
1427 let ptr = start.add(index);
1428
1429 while num_added < lower_size_bound {
1430 let element = match iter.next() {
1431 Some(x) => x,
1432 None => break,
1433 };
1434 let cur = ptr.add(num_added);
1435 ptr::write(cur, element);
1436 guard.skip.start += 1;
1437 num_added += 1;
1438 }
1439
1440 if num_added < lower_size_bound {
1441 // Iterator provided fewer elements than the hint. Move the tail backward.
1442 ptr::copy(
1443 ptr.add(lower_size_bound),
1444 ptr.add(num_added),
1445 old_len - index,
1446 );
1447 }
1448 // There are no more duplicate or uninitialized slots, so the guard is not needed.
1449 self.set_len(old_len + num_added);
1450 mem::forget(guard);
1451 }
1452
1453 // Insert any remaining elements one-by-one.
1454 for element in iter {
1455 self.insert(index + num_added, element);
1456 num_added += 1;
1457 }
1458
1459 struct DropOnPanic<T> {
1460 start: *mut T,
1461 skip: Range<usize>, // Space we copied-out-of, but haven't written-to yet.
1462 len: usize,
1463 }
1464
1465 impl<T> Drop for DropOnPanic<T> {
1466 fn drop(&mut self) {
1467 for i in 0..self.len {
1468 if !self.skip.contains(&i) {
1469 unsafe {
1470 ptr::drop_in_place(self.start.add(i));
1471 }
1472 }
1473 }
1474 }
1475 }
1476 }
1477
1478 /// Convert a `SmallVec` to a `Vec`, without reallocating if the `SmallVec` has already spilled onto
1479 /// the heap.
into_vec(mut self) -> Vec<A::Item>1480 pub fn into_vec(mut self) -> Vec<A::Item> {
1481 if self.spilled() {
1482 unsafe {
1483 let (ptr, &mut len) = self.data.heap_mut();
1484 let v = Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity);
1485 mem::forget(self);
1486 v
1487 }
1488 } else {
1489 self.into_iter().collect()
1490 }
1491 }
1492
1493 /// Converts a `SmallVec` into a `Box<[T]>` without reallocating if the `SmallVec` has already spilled
1494 /// onto the heap.
1495 ///
1496 /// Note that this will drop any excess capacity.
into_boxed_slice(self) -> Box<[A::Item]>1497 pub fn into_boxed_slice(self) -> Box<[A::Item]> {
1498 self.into_vec().into_boxed_slice()
1499 }
1500
1501 /// Convert the `SmallVec` into an `A` if possible. Otherwise return `Err(Self)`.
1502 ///
1503 /// This method returns `Err(Self)` if the `SmallVec` is too short (and the `A` contains uninitialized elements),
1504 /// or if the `SmallVec` is too long (and all the elements were spilled to the heap).
into_inner(self) -> Result<A, Self>1505 pub fn into_inner(self) -> Result<A, Self> {
1506 if self.spilled() || self.len() != A::size() {
1507 // Note: A::size, not Self::inline_capacity
1508 Err(self)
1509 } else {
1510 unsafe {
1511 let data = ptr::read(&self.data);
1512 mem::forget(self);
1513 Ok(data.into_inline().assume_init())
1514 }
1515 }
1516 }
1517
1518 /// Retains only the elements specified by the predicate.
1519 ///
1520 /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
1521 /// This method operates in place and preserves the order of the retained
1522 /// elements.
retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F)1523 pub fn retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F) {
1524 let mut del = 0;
1525 let len = self.len();
1526 for i in 0..len {
1527 if !f(&mut self[i]) {
1528 del += 1;
1529 } else if del > 0 {
1530 self.swap(i - del, i);
1531 }
1532 }
1533 self.truncate(len - del);
1534 }
1535
1536 /// Retains only the elements specified by the predicate.
1537 ///
1538 /// This method is identical in behaviour to [`retain`]; it is included only
1539 /// to maintain api-compatability with `std::Vec`, where the methods are
1540 /// separate for historical reasons.
retain_mut<F: FnMut(&mut A::Item) -> bool>(&mut self, f: F)1541 pub fn retain_mut<F: FnMut(&mut A::Item) -> bool>(&mut self, f: F) {
1542 self.retain(f)
1543 }
1544
1545 /// Removes consecutive duplicate elements.
dedup(&mut self) where A::Item: PartialEq<A::Item>,1546 pub fn dedup(&mut self)
1547 where
1548 A::Item: PartialEq<A::Item>,
1549 {
1550 self.dedup_by(|a, b| a == b);
1551 }
1552
1553 /// Removes consecutive duplicate elements using the given equality relation.
dedup_by<F>(&mut self, mut same_bucket: F) where F: FnMut(&mut A::Item, &mut A::Item) -> bool,1554 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1555 where
1556 F: FnMut(&mut A::Item, &mut A::Item) -> bool,
1557 {
1558 // See the implementation of Vec::dedup_by in the
1559 // standard library for an explanation of this algorithm.
1560 let len = self.len();
1561 if len <= 1 {
1562 return;
1563 }
1564
1565 let ptr = self.as_mut_ptr();
1566 let mut w: usize = 1;
1567
1568 unsafe {
1569 for r in 1..len {
1570 let p_r = ptr.add(r);
1571 let p_wm1 = ptr.add(w - 1);
1572 if !same_bucket(&mut *p_r, &mut *p_wm1) {
1573 if r != w {
1574 let p_w = p_wm1.add(1);
1575 mem::swap(&mut *p_r, &mut *p_w);
1576 }
1577 w += 1;
1578 }
1579 }
1580 }
1581
1582 self.truncate(w);
1583 }
1584
1585 /// Removes consecutive elements that map to the same key.
dedup_by_key<F, K>(&mut self, mut key: F) where F: FnMut(&mut A::Item) -> K, K: PartialEq<K>,1586 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1587 where
1588 F: FnMut(&mut A::Item) -> K,
1589 K: PartialEq<K>,
1590 {
1591 self.dedup_by(|a, b| key(a) == key(b));
1592 }
1593
1594 /// Resizes the `SmallVec` in-place so that `len` is equal to `new_len`.
1595 ///
1596 /// If `new_len` is greater than `len`, the `SmallVec` is extended by the difference, with each
1597 /// additional slot filled with the result of calling the closure `f`. The return values from `f`
1598 /// will end up in the `SmallVec` in the order they have been generated.
1599 ///
1600 /// If `new_len` is less than `len`, the `SmallVec` is simply truncated.
1601 ///
1602 /// This method uses a closure to create new values on every push. If you'd rather `Clone` a given
1603 /// value, use `resize`. If you want to use the `Default` trait to generate values, you can pass
1604 /// `Default::default()` as the second argument.
1605 ///
1606 /// Added for `std::vec::Vec` compatibility (added in Rust 1.33.0)
1607 ///
1608 /// ```
1609 /// # use smallvec::{smallvec, SmallVec};
1610 /// let mut vec : SmallVec<[_; 4]> = smallvec![1, 2, 3];
1611 /// vec.resize_with(5, Default::default);
1612 /// assert_eq!(&*vec, &[1, 2, 3, 0, 0]);
1613 ///
1614 /// let mut vec : SmallVec<[_; 4]> = smallvec![];
1615 /// let mut p = 1;
1616 /// vec.resize_with(4, || { p *= 2; p });
1617 /// assert_eq!(&*vec, &[2, 4, 8, 16]);
1618 /// ```
resize_with<F>(&mut self, new_len: usize, f: F) where F: FnMut() -> A::Item,1619 pub fn resize_with<F>(&mut self, new_len: usize, f: F)
1620 where
1621 F: FnMut() -> A::Item,
1622 {
1623 let old_len = self.len();
1624 if old_len < new_len {
1625 let mut f = f;
1626 let additional = new_len - old_len;
1627 self.reserve(additional);
1628 for _ in 0..additional {
1629 self.push(f());
1630 }
1631 } else if old_len > new_len {
1632 self.truncate(new_len);
1633 }
1634 }
1635
1636 /// Creates a `SmallVec` directly from the raw components of another
1637 /// `SmallVec`.
1638 ///
1639 /// # Safety
1640 ///
1641 /// This is highly unsafe, due to the number of invariants that aren't
1642 /// checked:
1643 ///
1644 /// * `ptr` needs to have been previously allocated via `SmallVec` for its
1645 /// spilled storage (at least, it's highly likely to be incorrect if it
1646 /// wasn't).
1647 /// * `ptr`'s `A::Item` type needs to be the same size and alignment that
1648 /// it was allocated with
1649 /// * `length` needs to be less than or equal to `capacity`.
1650 /// * `capacity` needs to be the capacity that the pointer was allocated
1651 /// with.
1652 ///
1653 /// Violating these may cause problems like corrupting the allocator's
1654 /// internal data structures.
1655 ///
1656 /// Additionally, `capacity` must be greater than the amount of inline
1657 /// storage `A` has; that is, the new `SmallVec` must need to spill over
1658 /// into heap allocated storage. This condition is asserted against.
1659 ///
1660 /// The ownership of `ptr` is effectively transferred to the
1661 /// `SmallVec` which may then deallocate, reallocate or change the
1662 /// contents of memory pointed to by the pointer at will. Ensure
1663 /// that nothing else uses the pointer after calling this
1664 /// function.
1665 ///
1666 /// # Examples
1667 ///
1668 /// ```
1669 /// # use smallvec::{smallvec, SmallVec};
1670 /// use std::mem;
1671 /// use std::ptr;
1672 ///
1673 /// fn main() {
1674 /// let mut v: SmallVec<[_; 1]> = smallvec![1, 2, 3];
1675 ///
1676 /// // Pull out the important parts of `v`.
1677 /// let p = v.as_mut_ptr();
1678 /// let len = v.len();
1679 /// let cap = v.capacity();
1680 /// let spilled = v.spilled();
1681 ///
1682 /// unsafe {
1683 /// // Forget all about `v`. The heap allocation that stored the
1684 /// // three values won't be deallocated.
1685 /// mem::forget(v);
1686 ///
1687 /// // Overwrite memory with [4, 5, 6].
1688 /// //
1689 /// // This is only safe if `spilled` is true! Otherwise, we are
1690 /// // writing into the old `SmallVec`'s inline storage on the
1691 /// // stack.
1692 /// assert!(spilled);
1693 /// for i in 0..len {
1694 /// ptr::write(p.add(i), 4 + i);
1695 /// }
1696 ///
1697 /// // Put everything back together into a SmallVec with a different
1698 /// // amount of inline storage, but which is still less than `cap`.
1699 /// let rebuilt = SmallVec::<[_; 2]>::from_raw_parts(p, len, cap);
1700 /// assert_eq!(&*rebuilt, &[4, 5, 6]);
1701 /// }
1702 /// }
1703 #[inline]
from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A>1704 pub unsafe fn from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A> {
1705 // SAFETY: We require caller to provide same ptr as we alloc
1706 // and we never alloc null pointer.
1707 let ptr = unsafe {
1708 debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer.");
1709 NonNull::new_unchecked(ptr)
1710 };
1711 assert!(capacity > Self::inline_capacity());
1712 SmallVec {
1713 capacity,
1714 data: SmallVecData::from_heap(ptr, length),
1715 }
1716 }
1717
1718 /// Returns a raw pointer to the vector's buffer.
as_ptr(&self) -> *const A::Item1719 pub fn as_ptr(&self) -> *const A::Item {
1720 // We shadow the slice method of the same name to avoid going through
1721 // `deref`, which creates an intermediate reference that may place
1722 // additional safety constraints on the contents of the slice.
1723 self.triple().0.as_ptr()
1724 }
1725
1726 /// Returns a raw mutable pointer to the vector's buffer.
as_mut_ptr(&mut self) -> *mut A::Item1727 pub fn as_mut_ptr(&mut self) -> *mut A::Item {
1728 // We shadow the slice method of the same name to avoid going through
1729 // `deref_mut`, which creates an intermediate reference that may place
1730 // additional safety constraints on the contents of the slice.
1731 self.triple_mut().0.as_ptr()
1732 }
1733 }
1734
1735 impl<A: Array> SmallVec<A>
1736 where
1737 A::Item: Copy,
1738 {
1739 /// Copy the elements from a slice into a new `SmallVec`.
1740 ///
1741 /// For slices of `Copy` types, this is more efficient than `SmallVec::from(slice)`.
from_slice(slice: &[A::Item]) -> Self1742 pub fn from_slice(slice: &[A::Item]) -> Self {
1743 let len = slice.len();
1744 if len <= Self::inline_capacity() {
1745 SmallVec {
1746 capacity: len,
1747 data: SmallVecData::from_inline(unsafe {
1748 let mut data: MaybeUninit<A> = MaybeUninit::uninit();
1749 ptr::copy_nonoverlapping(
1750 slice.as_ptr(),
1751 data.as_mut_ptr() as *mut A::Item,
1752 len,
1753 );
1754 data
1755 }),
1756 }
1757 } else {
1758 let mut b = slice.to_vec();
1759 let cap = b.capacity();
1760 let ptr = NonNull::new(b.as_mut_ptr()).expect("Vec always contain non null pointers.");
1761 mem::forget(b);
1762 SmallVec {
1763 capacity: cap,
1764 data: SmallVecData::from_heap(ptr, len),
1765 }
1766 }
1767 }
1768
1769 /// Copy elements from a slice into the vector at position `index`, shifting any following
1770 /// elements toward the back.
1771 ///
1772 /// For slices of `Copy` types, this is more efficient than `insert`.
1773 #[inline]
insert_from_slice(&mut self, index: usize, slice: &[A::Item])1774 pub fn insert_from_slice(&mut self, index: usize, slice: &[A::Item]) {
1775 self.reserve(slice.len());
1776
1777 let len = self.len();
1778 assert!(index <= len);
1779
1780 unsafe {
1781 let slice_ptr = slice.as_ptr();
1782 let ptr = self.as_mut_ptr().add(index);
1783 ptr::copy(ptr, ptr.add(slice.len()), len - index);
1784 ptr::copy_nonoverlapping(slice_ptr, ptr, slice.len());
1785 self.set_len(len + slice.len());
1786 }
1787 }
1788
1789 /// Copy elements from a slice and append them to the vector.
1790 ///
1791 /// For slices of `Copy` types, this is more efficient than `extend`.
1792 #[inline]
extend_from_slice(&mut self, slice: &[A::Item])1793 pub fn extend_from_slice(&mut self, slice: &[A::Item]) {
1794 let len = self.len();
1795 self.insert_from_slice(len, slice);
1796 }
1797 }
1798
1799 impl<A: Array> SmallVec<A>
1800 where
1801 A::Item: Clone,
1802 {
1803 /// Resizes the vector so that its length is equal to `len`.
1804 ///
1805 /// If `len` is less than the current length, the vector simply truncated.
1806 ///
1807 /// If `len` is greater than the current length, `value` is appended to the
1808 /// vector until its length equals `len`.
resize(&mut self, len: usize, value: A::Item)1809 pub fn resize(&mut self, len: usize, value: A::Item) {
1810 let old_len = self.len();
1811
1812 if len > old_len {
1813 self.extend(repeat(value).take(len - old_len));
1814 } else {
1815 self.truncate(len);
1816 }
1817 }
1818
1819 /// Creates a `SmallVec` with `n` copies of `elem`.
1820 /// ```
1821 /// use smallvec::SmallVec;
1822 ///
1823 /// let v = SmallVec::<[char; 128]>::from_elem('d', 2);
1824 /// assert_eq!(v, SmallVec::from_buf(['d', 'd']));
1825 /// ```
from_elem(elem: A::Item, n: usize) -> Self1826 pub fn from_elem(elem: A::Item, n: usize) -> Self {
1827 if n > Self::inline_capacity() {
1828 vec![elem; n].into()
1829 } else {
1830 let mut v = SmallVec::<A>::new();
1831 unsafe {
1832 let (ptr, len_ptr, _) = v.triple_mut();
1833 let ptr = ptr.as_ptr();
1834 let mut local_len = SetLenOnDrop::new(len_ptr);
1835
1836 for i in 0..n {
1837 ::core::ptr::write(ptr.add(i), elem.clone());
1838 local_len.increment_len(1);
1839 }
1840 }
1841 v
1842 }
1843 }
1844 }
1845
1846 impl<A: Array> ops::Deref for SmallVec<A> {
1847 type Target = [A::Item];
1848 #[inline]
deref(&self) -> &[A::Item]1849 fn deref(&self) -> &[A::Item] {
1850 unsafe {
1851 let (ptr, len, _) = self.triple();
1852 slice::from_raw_parts(ptr.as_ptr(), len)
1853 }
1854 }
1855 }
1856
1857 impl<A: Array> ops::DerefMut for SmallVec<A> {
1858 #[inline]
deref_mut(&mut self) -> &mut [A::Item]1859 fn deref_mut(&mut self) -> &mut [A::Item] {
1860 unsafe {
1861 let (ptr, &mut len, _) = self.triple_mut();
1862 slice::from_raw_parts_mut(ptr.as_ptr(), len)
1863 }
1864 }
1865 }
1866
1867 impl<A: Array> AsRef<[A::Item]> for SmallVec<A> {
1868 #[inline]
as_ref(&self) -> &[A::Item]1869 fn as_ref(&self) -> &[A::Item] {
1870 self
1871 }
1872 }
1873
1874 impl<A: Array> AsMut<[A::Item]> for SmallVec<A> {
1875 #[inline]
as_mut(&mut self) -> &mut [A::Item]1876 fn as_mut(&mut self) -> &mut [A::Item] {
1877 self
1878 }
1879 }
1880
1881 impl<A: Array> Borrow<[A::Item]> for SmallVec<A> {
1882 #[inline]
borrow(&self) -> &[A::Item]1883 fn borrow(&self) -> &[A::Item] {
1884 self
1885 }
1886 }
1887
1888 impl<A: Array> BorrowMut<[A::Item]> for SmallVec<A> {
1889 #[inline]
borrow_mut(&mut self) -> &mut [A::Item]1890 fn borrow_mut(&mut self) -> &mut [A::Item] {
1891 self
1892 }
1893 }
1894
1895 #[cfg(feature = "write")]
1896 #[cfg_attr(docsrs, doc(cfg(feature = "write")))]
1897 impl<A: Array<Item = u8>> io::Write for SmallVec<A> {
1898 #[inline]
write(&mut self, buf: &[u8]) -> io::Result<usize>1899 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
1900 self.extend_from_slice(buf);
1901 Ok(buf.len())
1902 }
1903
1904 #[inline]
write_all(&mut self, buf: &[u8]) -> io::Result<()>1905 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
1906 self.extend_from_slice(buf);
1907 Ok(())
1908 }
1909
1910 #[inline]
flush(&mut self) -> io::Result<()>1911 fn flush(&mut self) -> io::Result<()> {
1912 Ok(())
1913 }
1914 }
1915
1916 #[cfg(feature = "serde")]
1917 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1918 impl<A: Array> Serialize for SmallVec<A>
1919 where
1920 A::Item: Serialize,
1921 {
serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error>1922 fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
1923 let mut state = serializer.serialize_seq(Some(self.len()))?;
1924 for item in self {
1925 state.serialize_element(&item)?;
1926 }
1927 state.end()
1928 }
1929 }
1930
1931 #[cfg(feature = "serde")]
1932 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1933 impl<'de, A: Array> Deserialize<'de> for SmallVec<A>
1934 where
1935 A::Item: Deserialize<'de>,
1936 {
deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error>1937 fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1938 deserializer.deserialize_seq(SmallVecVisitor {
1939 phantom: PhantomData,
1940 })
1941 }
1942 }
1943
1944 #[cfg(feature = "serde")]
1945 struct SmallVecVisitor<A> {
1946 phantom: PhantomData<A>,
1947 }
1948
1949 #[cfg(feature = "serde")]
1950 impl<'de, A: Array> Visitor<'de> for SmallVecVisitor<A>
1951 where
1952 A::Item: Deserialize<'de>,
1953 {
1954 type Value = SmallVec<A>;
1955
expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result1956 fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
1957 formatter.write_str("a sequence")
1958 }
1959
visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error> where B: SeqAccess<'de>,1960 fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error>
1961 where
1962 B: SeqAccess<'de>,
1963 {
1964 use serde::de::Error;
1965 let len = seq.size_hint().unwrap_or(0);
1966 let mut values = SmallVec::new();
1967 values.try_reserve(len).map_err(B::Error::custom)?;
1968
1969 while let Some(value) = seq.next_element()? {
1970 values.push(value);
1971 }
1972
1973 Ok(values)
1974 }
1975 }
1976
1977 #[cfg(feature = "malloc_size_of")]
1978 impl<A: Array> MallocShallowSizeOf for SmallVec<A> {
shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize1979 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
1980 if self.spilled() {
1981 unsafe { ops.malloc_size_of(self.as_ptr()) }
1982 } else {
1983 0
1984 }
1985 }
1986 }
1987
1988 #[cfg(feature = "malloc_size_of")]
1989 impl<A> MallocSizeOf for SmallVec<A>
1990 where
1991 A: Array,
1992 A::Item: MallocSizeOf,
1993 {
size_of(&self, ops: &mut MallocSizeOfOps) -> usize1994 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
1995 let mut n = self.shallow_size_of(ops);
1996 for elem in self.iter() {
1997 n += elem.size_of(ops);
1998 }
1999 n
2000 }
2001 }
2002
2003 #[cfg(feature = "specialization")]
2004 trait SpecFrom<A: Array, S> {
spec_from(slice: S) -> SmallVec<A>2005 fn spec_from(slice: S) -> SmallVec<A>;
2006 }
2007
2008 #[cfg(feature = "specialization")]
2009 mod specialization;
2010
2011 #[cfg(feature = "arbitrary")]
2012 mod arbitrary;
2013
2014 #[cfg(feature = "specialization")]
2015 impl<'a, A: Array> SpecFrom<A, &'a [A::Item]> for SmallVec<A>
2016 where
2017 A::Item: Copy,
2018 {
2019 #[inline]
spec_from(slice: &'a [A::Item]) -> SmallVec<A>2020 fn spec_from(slice: &'a [A::Item]) -> SmallVec<A> {
2021 SmallVec::from_slice(slice)
2022 }
2023 }
2024
2025 impl<'a, A: Array> From<&'a [A::Item]> for SmallVec<A>
2026 where
2027 A::Item: Clone,
2028 {
2029 #[cfg(not(feature = "specialization"))]
2030 #[inline]
from(slice: &'a [A::Item]) -> SmallVec<A>2031 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
2032 slice.iter().cloned().collect()
2033 }
2034
2035 #[cfg(feature = "specialization")]
2036 #[inline]
from(slice: &'a [A::Item]) -> SmallVec<A>2037 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
2038 SmallVec::spec_from(slice)
2039 }
2040 }
2041
2042 impl<A: Array> From<Vec<A::Item>> for SmallVec<A> {
2043 #[inline]
from(vec: Vec<A::Item>) -> SmallVec<A>2044 fn from(vec: Vec<A::Item>) -> SmallVec<A> {
2045 SmallVec::from_vec(vec)
2046 }
2047 }
2048
2049 impl<A: Array> From<A> for SmallVec<A> {
2050 #[inline]
from(array: A) -> SmallVec<A>2051 fn from(array: A) -> SmallVec<A> {
2052 SmallVec::from_buf(array)
2053 }
2054 }
2055
2056 impl<A: Array, I: SliceIndex<[A::Item]>> ops::Index<I> for SmallVec<A> {
2057 type Output = I::Output;
2058
index(&self, index: I) -> &I::Output2059 fn index(&self, index: I) -> &I::Output {
2060 &(**self)[index]
2061 }
2062 }
2063
2064 impl<A: Array, I: SliceIndex<[A::Item]>> ops::IndexMut<I> for SmallVec<A> {
index_mut(&mut self, index: I) -> &mut I::Output2065 fn index_mut(&mut self, index: I) -> &mut I::Output {
2066 &mut (&mut **self)[index]
2067 }
2068 }
2069
2070 #[allow(deprecated)]
2071 impl<A: Array> ExtendFromSlice<A::Item> for SmallVec<A>
2072 where
2073 A::Item: Copy,
2074 {
extend_from_slice(&mut self, other: &[A::Item])2075 fn extend_from_slice(&mut self, other: &[A::Item]) {
2076 SmallVec::extend_from_slice(self, other)
2077 }
2078 }
2079
2080 impl<A: Array> FromIterator<A::Item> for SmallVec<A> {
2081 #[inline]
from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A>2082 fn from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A> {
2083 let mut v = SmallVec::new();
2084 v.extend(iterable);
2085 v
2086 }
2087 }
2088
2089 impl<A: Array> Extend<A::Item> for SmallVec<A> {
extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I)2090 fn extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I) {
2091 let mut iter = iterable.into_iter();
2092 let (lower_size_bound, _) = iter.size_hint();
2093 self.reserve(lower_size_bound);
2094
2095 unsafe {
2096 let (ptr, len_ptr, cap) = self.triple_mut();
2097 let ptr = ptr.as_ptr();
2098 let mut len = SetLenOnDrop::new(len_ptr);
2099 while len.get() < cap {
2100 if let Some(out) = iter.next() {
2101 ptr::write(ptr.add(len.get()), out);
2102 len.increment_len(1);
2103 } else {
2104 return;
2105 }
2106 }
2107 }
2108
2109 for elem in iter {
2110 self.push(elem);
2111 }
2112 }
2113 }
2114
2115 impl<A: Array> fmt::Debug for SmallVec<A>
2116 where
2117 A::Item: fmt::Debug,
2118 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result2119 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2120 f.debug_list().entries(self.iter()).finish()
2121 }
2122 }
2123
2124 impl<A: Array> Default for SmallVec<A> {
2125 #[inline]
default() -> SmallVec<A>2126 fn default() -> SmallVec<A> {
2127 SmallVec::new()
2128 }
2129 }
2130
2131 #[cfg(feature = "may_dangle")]
2132 unsafe impl<#[may_dangle] A: Array> Drop for SmallVec<A> {
drop(&mut self)2133 fn drop(&mut self) {
2134 unsafe {
2135 if self.spilled() {
2136 let (ptr, &mut len) = self.data.heap_mut();
2137 Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity);
2138 } else {
2139 ptr::drop_in_place(&mut self[..]);
2140 }
2141 }
2142 }
2143 }
2144
2145 #[cfg(not(feature = "may_dangle"))]
2146 impl<A: Array> Drop for SmallVec<A> {
drop(&mut self)2147 fn drop(&mut self) {
2148 unsafe {
2149 if self.spilled() {
2150 let (ptr, &mut len) = self.data.heap_mut();
2151 drop(Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity));
2152 } else {
2153 ptr::drop_in_place(&mut self[..]);
2154 }
2155 }
2156 }
2157 }
2158
2159 impl<A: Array> Clone for SmallVec<A>
2160 where
2161 A::Item: Clone,
2162 {
2163 #[inline]
clone(&self) -> SmallVec<A>2164 fn clone(&self) -> SmallVec<A> {
2165 SmallVec::from(self.as_slice())
2166 }
2167
clone_from(&mut self, source: &Self)2168 fn clone_from(&mut self, source: &Self) {
2169 // Inspired from `impl Clone for Vec`.
2170
2171 // drop anything that will not be overwritten
2172 self.truncate(source.len());
2173
2174 // self.len <= other.len due to the truncate above, so the
2175 // slices here are always in-bounds.
2176 let (init, tail) = source.split_at(self.len());
2177
2178 // reuse the contained values' allocations/resources.
2179 self.clone_from_slice(init);
2180 self.extend(tail.iter().cloned());
2181 }
2182 }
2183
2184 impl<A: Array, B: Array> PartialEq<SmallVec<B>> for SmallVec<A>
2185 where
2186 A::Item: PartialEq<B::Item>,
2187 {
2188 #[inline]
eq(&self, other: &SmallVec<B>) -> bool2189 fn eq(&self, other: &SmallVec<B>) -> bool {
2190 self[..] == other[..]
2191 }
2192 }
2193
2194 impl<A: Array> Eq for SmallVec<A> where A::Item: Eq {}
2195
2196 impl<A: Array> PartialOrd for SmallVec<A>
2197 where
2198 A::Item: PartialOrd,
2199 {
2200 #[inline]
partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering>2201 fn partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering> {
2202 PartialOrd::partial_cmp(&**self, &**other)
2203 }
2204 }
2205
2206 impl<A: Array> Ord for SmallVec<A>
2207 where
2208 A::Item: Ord,
2209 {
2210 #[inline]
cmp(&self, other: &SmallVec<A>) -> cmp::Ordering2211 fn cmp(&self, other: &SmallVec<A>) -> cmp::Ordering {
2212 Ord::cmp(&**self, &**other)
2213 }
2214 }
2215
2216 impl<A: Array> Hash for SmallVec<A>
2217 where
2218 A::Item: Hash,
2219 {
hash<H: Hasher>(&self, state: &mut H)2220 fn hash<H: Hasher>(&self, state: &mut H) {
2221 (**self).hash(state)
2222 }
2223 }
2224
2225 unsafe impl<A: Array> Send for SmallVec<A> where A::Item: Send {}
2226
2227 /// An iterator that consumes a `SmallVec` and yields its items by value.
2228 ///
2229 /// Returned from [`SmallVec::into_iter`][1].
2230 ///
2231 /// [1]: struct.SmallVec.html#method.into_iter
2232 pub struct IntoIter<A: Array> {
2233 data: SmallVec<A>,
2234 current: usize,
2235 end: usize,
2236 }
2237
2238 impl<A: Array> fmt::Debug for IntoIter<A>
2239 where
2240 A::Item: fmt::Debug,
2241 {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result2242 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2243 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
2244 }
2245 }
2246
2247 impl<A: Array + Clone> Clone for IntoIter<A>
2248 where
2249 A::Item: Clone,
2250 {
clone(&self) -> IntoIter<A>2251 fn clone(&self) -> IntoIter<A> {
2252 SmallVec::from(self.as_slice()).into_iter()
2253 }
2254 }
2255
2256 impl<A: Array> Drop for IntoIter<A> {
drop(&mut self)2257 fn drop(&mut self) {
2258 for _ in self {}
2259 }
2260 }
2261
2262 impl<A: Array> Iterator for IntoIter<A> {
2263 type Item = A::Item;
2264
2265 #[inline]
next(&mut self) -> Option<A::Item>2266 fn next(&mut self) -> Option<A::Item> {
2267 if self.current == self.end {
2268 None
2269 } else {
2270 unsafe {
2271 let current = self.current;
2272 self.current += 1;
2273 Some(ptr::read(self.data.as_ptr().add(current)))
2274 }
2275 }
2276 }
2277
2278 #[inline]
size_hint(&self) -> (usize, Option<usize>)2279 fn size_hint(&self) -> (usize, Option<usize>) {
2280 let size = self.end - self.current;
2281 (size, Some(size))
2282 }
2283 }
2284
2285 impl<A: Array> DoubleEndedIterator for IntoIter<A> {
2286 #[inline]
next_back(&mut self) -> Option<A::Item>2287 fn next_back(&mut self) -> Option<A::Item> {
2288 if self.current == self.end {
2289 None
2290 } else {
2291 unsafe {
2292 self.end -= 1;
2293 Some(ptr::read(self.data.as_ptr().add(self.end)))
2294 }
2295 }
2296 }
2297 }
2298
2299 impl<A: Array> ExactSizeIterator for IntoIter<A> {}
2300 impl<A: Array> FusedIterator for IntoIter<A> {}
2301
2302 impl<A: Array> IntoIter<A> {
2303 /// Returns the remaining items of this iterator as a slice.
as_slice(&self) -> &[A::Item]2304 pub fn as_slice(&self) -> &[A::Item] {
2305 let len = self.end - self.current;
2306 unsafe { core::slice::from_raw_parts(self.data.as_ptr().add(self.current), len) }
2307 }
2308
2309 /// Returns the remaining items of this iterator as a mutable slice.
as_mut_slice(&mut self) -> &mut [A::Item]2310 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
2311 let len = self.end - self.current;
2312 unsafe { core::slice::from_raw_parts_mut(self.data.as_mut_ptr().add(self.current), len) }
2313 }
2314 }
2315
2316 impl<A: Array> IntoIterator for SmallVec<A> {
2317 type IntoIter = IntoIter<A>;
2318 type Item = A::Item;
into_iter(mut self) -> Self::IntoIter2319 fn into_iter(mut self) -> Self::IntoIter {
2320 unsafe {
2321 // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements
2322 let len = self.len();
2323 self.set_len(0);
2324 IntoIter {
2325 data: self,
2326 current: 0,
2327 end: len,
2328 }
2329 }
2330 }
2331 }
2332
2333 impl<'a, A: Array> IntoIterator for &'a SmallVec<A> {
2334 type IntoIter = slice::Iter<'a, A::Item>;
2335 type Item = &'a A::Item;
into_iter(self) -> Self::IntoIter2336 fn into_iter(self) -> Self::IntoIter {
2337 self.iter()
2338 }
2339 }
2340
2341 impl<'a, A: Array> IntoIterator for &'a mut SmallVec<A> {
2342 type IntoIter = slice::IterMut<'a, A::Item>;
2343 type Item = &'a mut A::Item;
into_iter(self) -> Self::IntoIter2344 fn into_iter(self) -> Self::IntoIter {
2345 self.iter_mut()
2346 }
2347 }
2348
2349 /// Types that can be used as the backing store for a [`SmallVec`].
2350 pub unsafe trait Array {
2351 /// The type of the array's elements.
2352 type Item;
2353 /// Returns the number of items the array can hold.
size() -> usize2354 fn size() -> usize;
2355 }
2356
2357 /// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
2358 ///
2359 /// Copied from <https://github.com/rust-lang/rust/pull/36355>
2360 struct SetLenOnDrop<'a> {
2361 len: &'a mut usize,
2362 local_len: usize,
2363 }
2364
2365 impl<'a> SetLenOnDrop<'a> {
2366 #[inline]
new(len: &'a mut usize) -> Self2367 fn new(len: &'a mut usize) -> Self {
2368 SetLenOnDrop {
2369 local_len: *len,
2370 len,
2371 }
2372 }
2373
2374 #[inline]
get(&self) -> usize2375 fn get(&self) -> usize {
2376 self.local_len
2377 }
2378
2379 #[inline]
increment_len(&mut self, increment: usize)2380 fn increment_len(&mut self, increment: usize) {
2381 self.local_len += increment;
2382 }
2383 }
2384
2385 impl<'a> Drop for SetLenOnDrop<'a> {
2386 #[inline]
drop(&mut self)2387 fn drop(&mut self) {
2388 *self.len = self.local_len;
2389 }
2390 }
2391
2392 #[cfg(feature = "const_new")]
2393 impl<T, const N: usize> SmallVec<[T; N]> {
2394 /// Construct an empty vector.
2395 ///
2396 /// This is a `const` version of [`SmallVec::new`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2397 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2398 #[inline]
new_const() -> Self2399 pub const fn new_const() -> Self {
2400 SmallVec {
2401 capacity: 0,
2402 data: SmallVecData::from_const(MaybeUninit::uninit()),
2403 }
2404 }
2405
2406 /// The array passed as an argument is moved to be an inline version of `SmallVec`.
2407 ///
2408 /// This is a `const` version of [`SmallVec::from_buf`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2409 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2410 #[inline]
from_const(items: [T; N]) -> Self2411 pub const fn from_const(items: [T; N]) -> Self {
2412 SmallVec {
2413 capacity: N,
2414 data: SmallVecData::from_const(MaybeUninit::new(items)),
2415 }
2416 }
2417
2418 /// Constructs a new `SmallVec` on the stack from an array without
2419 /// copying elements. Also sets the length. The user is responsible
2420 /// for ensuring that `len <= N`.
2421 ///
2422 /// This is a `const` version of [`SmallVec::from_buf_and_len_unchecked`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2423 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2424 #[inline]
from_const_with_len_unchecked(items: [T; N], len: usize) -> Self2425 pub const unsafe fn from_const_with_len_unchecked(items: [T; N], len: usize) -> Self {
2426 SmallVec {
2427 capacity: len,
2428 data: SmallVecData::from_const(MaybeUninit::new(items)),
2429 }
2430 }
2431 }
2432
2433 #[cfg(feature = "const_generics")]
2434 #[cfg_attr(docsrs, doc(cfg(feature = "const_generics")))]
2435 unsafe impl<T, const N: usize> Array for [T; N] {
2436 type Item = T;
2437 #[inline]
size() -> usize2438 fn size() -> usize {
2439 N
2440 }
2441 }
2442
2443 #[cfg(not(feature = "const_generics"))]
2444 macro_rules! impl_array(
2445 ($($size:expr),+) => {
2446 $(
2447 unsafe impl<T> Array for [T; $size] {
2448 type Item = T;
2449 #[inline]
2450 fn size() -> usize { $size }
2451 }
2452 )+
2453 }
2454 );
2455
2456 #[cfg(not(feature = "const_generics"))]
2457 impl_array!(
2458 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
2459 26, 27, 28, 29, 30, 31, 32, 36, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x600, 0x800, 0x1000,
2460 0x2000, 0x4000, 0x6000, 0x8000, 0x10000, 0x20000, 0x40000, 0x60000, 0x80000, 0x10_0000
2461 );
2462
2463 /// Convenience trait for constructing a `SmallVec`
2464 pub trait ToSmallVec<A: Array> {
2465 /// Construct a new `SmallVec` from a slice.
to_smallvec(&self) -> SmallVec<A>2466 fn to_smallvec(&self) -> SmallVec<A>;
2467 }
2468
2469 impl<A: Array> ToSmallVec<A> for [A::Item]
2470 where
2471 A::Item: Copy,
2472 {
2473 #[inline]
to_smallvec(&self) -> SmallVec<A>2474 fn to_smallvec(&self) -> SmallVec<A> {
2475 SmallVec::from_slice(self)
2476 }
2477 }
2478
2479 // Immutable counterpart for `NonNull<T>`.
2480 #[repr(transparent)]
2481 struct ConstNonNull<T>(NonNull<T>);
2482
2483 impl<T> ConstNonNull<T> {
2484 #[inline]
new(ptr: *const T) -> Option<Self>2485 fn new(ptr: *const T) -> Option<Self> {
2486 NonNull::new(ptr as *mut T).map(Self)
2487 }
2488 #[inline]
as_ptr(self) -> *const T2489 fn as_ptr(self) -> *const T {
2490 self.0.as_ptr()
2491 }
2492 }
2493
2494 impl<T> Clone for ConstNonNull<T> {
2495 #[inline]
clone(&self) -> Self2496 fn clone(&self) -> Self {
2497 *self
2498 }
2499 }
2500
2501 impl<T> Copy for ConstNonNull<T> {}
2502