• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2024 The Fuchsia Authors
2 //
3 // Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4 // <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5 // license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6 // This file may not be copied, modified, or distributed except according to
7 // those terms.
8 
9 use core::{marker::PhantomData, ops::Range, ptr::NonNull};
10 
11 #[allow(unused_imports)]
12 use crate::util::polyfills::NumExt as _;
13 use crate::{
14     layout::{CastType, DstLayout, MetadataCastError},
15     util::AsAddress,
16     AlignmentError, CastError, KnownLayout, PointerMetadata, SizeError,
17 };
18 
19 pub(crate) use _def::PtrInner;
20 
21 mod _def {
22     use super::*;
23     /// The inner pointer stored inside a [`Ptr`][crate::Ptr].
24     ///
25     /// `PtrInner<'a, T>` is [covariant] in `'a` and invariant in `T`.
26     ///
27     /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
28     pub(crate) struct PtrInner<'a, T>
29     where
30         T: ?Sized,
31     {
32         /// # Invariants
33         ///
34         /// 0. If `ptr`'s referent is not zero sized, then `ptr` is derived from
35         ///    some valid Rust allocation, `A`.
36         /// 1. If `ptr`'s referent is not zero sized, then `ptr` has valid
37         ///    provenance for `A`.
38         /// 2. If `ptr`'s referent is not zero sized, then `ptr` addresses a
39         ///    byte range which is entirely contained in `A`.
40         /// 3. `ptr` addresses a byte range whose length fits in an `isize`.
41         /// 4. `ptr` addresses a byte range which does not wrap around the
42         ///     address space.
43         /// 5. If `ptr`'s referent is not zero sized,`A` is guaranteed to live
44         ///    for at least `'a`.
45         ptr: NonNull<T>,
46         // SAFETY: `&'a UnsafeCell<T>` is covariant in `'a` and invariant in `T`
47         // [1]. We use this construction rather than the equivalent `&mut T`,
48         // because our MSRV of 1.65 prohibits `&mut` types in const contexts.
49         //
50         // [1] https://doc.rust-lang.org/1.81.0/reference/subtyping.html#variance
51         _marker: PhantomData<&'a core::cell::UnsafeCell<T>>,
52     }
53 
54     impl<'a, T: 'a + ?Sized> Copy for PtrInner<'a, T> {}
55     impl<'a, T: 'a + ?Sized> Clone for PtrInner<'a, T> {
clone(&self) -> PtrInner<'a, T>56         fn clone(&self) -> PtrInner<'a, T> {
57             // SAFETY: None of the invariants on `ptr` are affected by having
58             // multiple copies of a `PtrInner`.
59             *self
60         }
61     }
62 
63     impl<'a, T: 'a + ?Sized> PtrInner<'a, T> {
64         /// Constructs a `Ptr` from a [`NonNull`].
65         ///
66         /// # Safety
67         ///
68         /// The caller promises that:
69         ///
70         /// 0. If `ptr`'s referent is not zero sized, then `ptr` is derived from
71         ///    some valid Rust allocation, `A`.
72         /// 1. If `ptr`'s referent is not zero sized, then `ptr` has valid
73         ///    provenance for `A`.
74         /// 2. If `ptr`'s referent is not zero sized, then `ptr` addresses a
75         ///    byte range which is entirely contained in `A`.
76         /// 3. `ptr` addresses a byte range whose length fits in an `isize`.
77         /// 4. `ptr` addresses a byte range which does not wrap around the
78         ///    address space.
79         /// 5. If `ptr`'s referent is not zero sized, then `A` is guaranteed to
80         ///    live for at least `'a`.
new(ptr: NonNull<T>) -> PtrInner<'a, T>81         pub(crate) const unsafe fn new(ptr: NonNull<T>) -> PtrInner<'a, T> {
82             // SAFETY: The caller has promised to satisfy all safety invariants
83             // of `PtrInner`.
84             Self { ptr, _marker: PhantomData }
85         }
86 
87         /// Converts this `PtrInner<T>` to a [`NonNull<T>`].
88         ///
89         /// Note that this method does not consume `self`. The caller should
90         /// watch out for `unsafe` code which uses the returned `NonNull` in a
91         /// way that violates the safety invariants of `self`.
as_non_null(&self) -> NonNull<T>92         pub(crate) const fn as_non_null(&self) -> NonNull<T> {
93             self.ptr
94         }
95     }
96 }
97 
98 impl<'a, T: ?Sized> PtrInner<'a, T> {
99     /// Constructs a `PtrInner` from a reference.
100     #[inline]
from_ref(ptr: &'a T) -> Self101     pub(crate) fn from_ref(ptr: &'a T) -> Self {
102         let ptr = NonNull::from(ptr);
103         // SAFETY:
104         // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
105         //    `&'a T`, is derived from some valid Rust allocation, `A`.
106         // 1. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
107         //    `&'a T`, has valid provenance for `A`.
108         // 2. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
109         //    `&'a T`, addresses a byte range which is entirely contained in
110         //    `A`.
111         // 3. `ptr`, by invariant on `&'a T`, addresses a byte range whose
112         //    length fits in an `isize`.
113         // 4. `ptr`, by invariant on `&'a T`, addresses a byte range which does
114         //    not wrap around the address space.
115         // 5. If `ptr`'s referent is not zero sized, then `A`, by invariant on
116         //    `&'a T`, is guaranteed to live for at least `'a`.
117         unsafe { Self::new(ptr) }
118     }
119 
120     /// Constructs a `PtrInner` from a mutable reference.
121     #[inline]
from_mut(ptr: &'a mut T) -> Self122     pub(crate) fn from_mut(ptr: &'a mut T) -> Self {
123         let ptr = NonNull::from(ptr);
124         // SAFETY:
125         // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
126         //    `&'a mut T`, is derived from some valid Rust allocation, `A`.
127         // 1. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
128         //    `&'a mut T`, has valid provenance for `A`.
129         // 2. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
130         //    `&'a mut T`, addresses a byte range which is entirely contained in
131         //    `A`.
132         // 3. `ptr`, by invariant on `&'a mut T`, addresses a byte range whose
133         //    length fits in an `isize`.
134         // 4. `ptr`, by invariant on `&'a mut T`, addresses a byte range which
135         //    does not wrap around the address space.
136         // 5. If `ptr`'s referent is not zero sized, then `A`, by invariant on
137         //    `&'a mut T`, is guaranteed to live for at least `'a`.
138         unsafe { Self::new(ptr) }
139     }
140 }
141 
142 #[allow(clippy::needless_lifetimes)]
143 impl<'a, T> PtrInner<'a, [T]> {
144     /// Creates a pointer which addresses the given `range` of self.
145     ///
146     /// # Safety
147     ///
148     /// `range` is a valid range (`start <= end`) and `end <= self.len()`.
slice_unchecked(self, range: Range<usize>) -> Self149     pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self {
150         let base = self.as_non_null().cast::<T>().as_ptr();
151 
152         // SAFETY: The caller promises that `start <= end <= self.len()`. By
153         // invariant, if `self`'s referent is not zero-sized, then `self` refers
154         // to a byte range which is contained within a single allocation, which
155         // is no more than `isize::MAX` bytes long, and which does not wrap
156         // around the address space. Thus, this pointer arithmetic remains
157         // in-bounds of the same allocation, and does not wrap around the
158         // address space. The offset (in bytes) does not overflow `isize`.
159         //
160         // If `self`'s referent is zero-sized, then these conditions are
161         // trivially satisfied.
162         let base = unsafe { base.add(range.start) };
163 
164         // SAFETY: The caller promises that `start <= end`, and so this will not
165         // underflow.
166         #[allow(unstable_name_collisions, clippy::incompatible_msrv)]
167         let len = unsafe { range.end.unchecked_sub(range.start) };
168 
169         let ptr = core::ptr::slice_from_raw_parts_mut(base, len);
170 
171         // SAFETY: By invariant, `self`'s address is non-null and its range does
172         // not wrap around the address space. Since, by the preceding lemma,
173         // `ptr` addresses a range within that addressed by `self`, `ptr` is
174         // non-null.
175         let ptr = unsafe { NonNull::new_unchecked(ptr) };
176 
177         // SAFETY:
178         //
179         // Lemma 0: `ptr` addresses a subset of the bytes addressed by `self`,
180         //          and has the same provenance. Proof: The caller guarantees
181         // that `start <= end <= self.len()`. Thus, `base` is in-bounds of
182         //        `self`, and `base + (end - start)` is also in-bounds of self.
183         //        Finally, `ptr` is constructed using provenance-preserving
184         //        operations.
185         //
186         // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
187         //    zero sized, then `ptr` is derived from some valid Rust allocation,
188         //    `A`.
189         // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
190         //    zero sized, then `ptr` has valid provenance for `A`.
191         // 2. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
192         //    zero sized, then `ptr` addresses a byte range which is entirely
193         //    contained in `A`.
194         // 3. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
195         //    range whose length fits in an `isize`.
196         // 4. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
197         //    range which does not wrap around the address space.
198         // 5. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
199         //    zero sized, then `A` is guaranteed to live for at least `'a`.
200         unsafe { PtrInner::new(ptr) }
201     }
202 
203     /// Splits the slice in two.
204     ///
205     /// # Safety
206     ///
207     /// The caller promises that `l_len <= self.len()`.
208     ///
209     /// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed
210     /// that `left` and `right` are contiguous and non-overlapping.
split_at(self, l_len: usize) -> (Self, Self)211     pub(crate) unsafe fn split_at(self, l_len: usize) -> (Self, Self) {
212         // SAFETY: The caller promises that `l_len <= self.len()`.
213         // Trivially, `0 <= l_len`.
214         let left = unsafe { self.slice_unchecked(0..l_len) };
215 
216         // SAFETY: The caller promises that `l_len <= self.len() =
217         // slf.len()`. Trivially, `slf.len() <= slf.len()`.
218         let right = unsafe { self.slice_unchecked(l_len..self.len()) };
219 
220         // SAFETY: `left` and `right` are non-overlapping. Proof: `left` is
221         // constructed from `slf` with `l_len` as its (exclusive) upper
222         // bound, while `right` is constructed from `slf` with `l_len` as
223         // its (inclusive) lower bound. Thus, no index is a member of both
224         // ranges.
225         (left, right)
226     }
227 
228     /// Iteratively projects the elements `PtrInner<T>` from `PtrInner<[T]>`.
iter(&self) -> impl Iterator<Item = PtrInner<'a, T>>229     pub(crate) fn iter(&self) -> impl Iterator<Item = PtrInner<'a, T>> {
230         // TODO(#429): Once `NonNull::cast` documents that it preserves
231         // provenance, cite those docs.
232         let base = self.as_non_null().cast::<T>().as_ptr();
233         (0..self.len()).map(move |i| {
234             // TODO(https://github.com/rust-lang/rust/issues/74265): Use
235             // `NonNull::get_unchecked_mut`.
236 
237             // SAFETY: If the following conditions are not satisfied
238             // `pointer::cast` may induce Undefined Behavior [1]:
239             //
240             // > - The computed offset, `count * size_of::<T>()` bytes, must not
241             // >   overflow `isize``.
242             // > - If the computed offset is non-zero, then `self` must be
243             // >   derived from a pointer to some allocated object, and the
244             // >   entire memory range between `self` and the result must be in
245             // >   bounds of that allocated object. In particular, this range
246             // >   must not “wrap around” the edge of the address space.
247             //
248             // [1] https://doc.rust-lang.org/std/primitive.pointer.html#method.add
249             //
250             // We satisfy both of these conditions here:
251             // - By invariant on `Ptr`, `self` addresses a byte range whose
252             //   length fits in an `isize`. Since `elem` is contained in `self`,
253             //   the computed offset of `elem` must fit within `isize.`
254             // - If the computed offset is non-zero, then this means that the
255             //   referent is not zero-sized. In this case, `base` points to an
256             //   allocated object (by invariant on `self`). Thus:
257             //   - By contract, `self.len()` accurately reflects the number of
258             //     elements in the slice. `i` is in bounds of `c.len()` by
259             //     construction, and so the result of this addition cannot
260             //     overflow past the end of the allocation referred to by `c`.
261             //   - By invariant on `Ptr`, `self` addresses a byte range which
262             //     does not wrap around the address space. Since `elem` is
263             //     contained in `self`, the computed offset of `elem` must wrap
264             //     around the address space.
265             //
266             // TODO(#429): Once `pointer::add` documents that it preserves
267             // provenance, cite those docs.
268             let elem = unsafe { base.add(i) };
269 
270             // SAFETY:
271             //  - `elem` must not be null. `base` is constructed from a
272             //    `NonNull` pointer, and the addition that produces `elem` must
273             //    not overflow or wrap around, so `elem >= base > 0`.
274             //
275             // TODO(#429): Once `NonNull::new_unchecked` documents that it
276             // preserves provenance, cite those docs.
277             let elem = unsafe { NonNull::new_unchecked(elem) };
278 
279             // SAFETY: The safety invariants of `Ptr::new` (see definition) are
280             // satisfied:
281             // 0. If `elem`'s referent is not zero sized, then `elem` is derived
282             //    from a valid Rust allocation, because `self` is derived from a
283             //    valid Rust allocation, by invariant on `Ptr`.
284             // 1. If `elem`'s referent is not zero sized, then `elem` has valid
285             //    provenance for `self`, because it derived from `self` using a
286             //    series of provenance-preserving operations.
287             // 2. If `elem`'s referent is not zero sized, then `elem` is
288             //    entirely contained in the allocation of `self` (see above).
289             // 3. `elem` addresses a byte range whose length fits in an `isize`
290             //    (see above).
291             // 4. `elem` addresses a byte range which does not wrap around the
292             //    address space (see above).
293             // 5. If `elem`'s referent is not zero sized, then the allocation of
294             //    `elem` is guaranteed to live for at least `'a`, because `elem`
295             //    is entirely contained in `self`, which lives for at least `'a`
296             //    by invariant on `Ptr`.
297             unsafe { PtrInner::new(elem) }
298         })
299     }
300 
301     /// The number of slice elements in the object referenced by `self`.
302     ///
303     /// # Safety
304     ///
305     /// Unsafe code my rely on `len` satisfying the above contract.
len(&self) -> usize306     pub(crate) fn len(&self) -> usize {
307         self.trailing_slice_len()
308     }
309 }
310 
311 #[allow(clippy::needless_lifetimes)]
312 impl<'a, T> PtrInner<'a, T>
313 where
314     T: ?Sized + KnownLayout<PointerMetadata = usize>,
315 {
316     /// The number of trailing slice elements in the object referenced by
317     /// `self`.
318     ///
319     /// # Safety
320     ///
321     /// Unsafe code my rely on `trailing_slice_len` satisfying the above
322     /// contract.
trailing_slice_len(&self) -> usize323     pub(super) fn trailing_slice_len(&self) -> usize {
324         T::pointer_to_metadata(self.as_non_null().as_ptr())
325     }
326 }
327 
328 impl<'a, T, const N: usize> PtrInner<'a, [T; N]> {
329     /// Casts this pointer-to-array into a slice.
330     ///
331     /// # Safety
332     ///
333     /// Callers may assume that the returned `PtrInner` references the same
334     /// address and length as `self`.
335     #[allow(clippy::wrong_self_convention)]
as_slice(self) -> PtrInner<'a, [T]>336     pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> {
337         let start = self.as_non_null().cast::<T>().as_ptr();
338         let slice = core::ptr::slice_from_raw_parts_mut(start, N);
339         // SAFETY: `slice` is not null, because it is derived from `start`
340         // which is non-null.
341         let slice = unsafe { NonNull::new_unchecked(slice) };
342         // SAFETY: Lemma: In the following safety arguments, note that `slice`
343         // is derived from `self` in two steps: first, by casting `self: [T; N]`
344         // to `start: T`, then by constructing a pointer to a slice starting at
345         // `start` of length `N`. As a result, `slice` references exactly the
346         // same allocation as `self`, if any.
347         //
348         // 0. By the above lemma, if `slice`'s referent is not zero sized, then
349         //    `slice` is derived from the same allocation as `self`, which, by
350         //    invariant on `Ptr`, is valid.
351         // 1. By the above lemma, if `slice`'s referent is not zero sized, then
352         //    , `slice` has valid provenance for `A`, since it is derived from
353         //    the pointer `self`, which, by invariant on `Ptr`, has valid
354         //    provenance for `A`.
355         // 2. By the above lemma, if `slice`'s referent is not zero sized, then
356         //    `slice` addresses a byte range which is entirely contained in `A`,
357         //    because it references exactly the same byte range as `self`,
358         //    which, by invariant on `Ptr`, is entirely contained in `A`.
359         // 3. By the above lemma, `slice` addresses a byte range whose length
360         //    fits in an `isize`, since it addresses exactly the same byte range
361         //    as `self`, which, by invariant on `Ptr`, has a length that fits in
362         //    an `isize`.
363         // 4. By the above lemma, `slice` addresses a byte range which does not
364         //    wrap around the address space, since it addresses exactly the same
365         //    byte range as `self`, which, by invariant on `Ptr`, does not wrap
366         //    around the address space.
367         // 5. By the above lemma, if `slice`'s referent is not zero sized, then
368         //    `A` is guaranteed to live for at least `'a`, because it is derived
369         //    from the same allocation as `self`, which, by invariant on `Ptr`,
370         //    lives for at least `'a`.
371         unsafe { PtrInner::new(slice) }
372     }
373 }
374 
375 impl<'a> PtrInner<'a, [u8]> {
376     /// Attempts to cast `self` to a `U` using the given cast type.
377     ///
378     /// If `U` is a slice DST and pointer metadata (`meta`) is provided, then
379     /// the cast will only succeed if it would produce an object with the given
380     /// metadata.
381     ///
382     /// Returns `None` if the resulting `U` would be invalidly-aligned, if no
383     /// `U` can fit in `self`, or if the provided pointer metadata describes an
384     /// invalid instance of `U`. On success, returns a pointer to the
385     /// largest-possible `U` which fits in `self`.
386     ///
387     /// # Safety
388     ///
389     /// The caller may assume that this implementation is correct, and may rely
390     /// on that assumption for the soundness of their code. In particular, the
391     /// caller may assume that, if `try_cast_into` returns `Some((ptr,
392     /// remainder))`, then `ptr` and `remainder` refer to non-overlapping byte
393     /// ranges within `self`, and that `ptr` and `remainder` entirely cover
394     /// `self`. Finally:
395     /// - If this is a prefix cast, `ptr` has the same address as `self`.
396     /// - If this is a suffix cast, `remainder` has the same address as `self`.
397     #[inline]
try_cast_into<U>( self, cast_type: CastType, meta: Option<U::PointerMetadata>, ) -> Result<(PtrInner<'a, U>, PtrInner<'a, [u8]>), CastError<Self, U>> where U: 'a + ?Sized + KnownLayout,398     pub(crate) fn try_cast_into<U>(
399         self,
400         cast_type: CastType,
401         meta: Option<U::PointerMetadata>,
402     ) -> Result<(PtrInner<'a, U>, PtrInner<'a, [u8]>), CastError<Self, U>>
403     where
404         U: 'a + ?Sized + KnownLayout,
405     {
406         let layout = match meta {
407             None => U::LAYOUT,
408             // This can return `None` if the metadata describes an object
409             // which can't fit in an `isize`.
410             Some(meta) => {
411                 let size = match meta.size_for_metadata(U::LAYOUT) {
412                     Some(size) => size,
413                     None => return Err(CastError::Size(SizeError::new(self))),
414                 };
415                 DstLayout { align: U::LAYOUT.align, size_info: crate::SizeInfo::Sized { size } }
416             }
417         };
418         // PANICS: By invariant, the byte range addressed by
419         // `self.as_non_null()` does not wrap around the address space. This
420         // implies that the sum of the address (represented as a `usize`) and
421         // length do not overflow `usize`, as required by
422         // `validate_cast_and_convert_metadata`. Thus, this call to
423         // `validate_cast_and_convert_metadata` will only panic if `U` is a DST
424         // whose trailing slice element is zero-sized.
425         let maybe_metadata = layout.validate_cast_and_convert_metadata(
426             AsAddress::addr(self.as_non_null().as_ptr()),
427             self.len(),
428             cast_type,
429         );
430 
431         let (elems, split_at) = match maybe_metadata {
432             Ok((elems, split_at)) => (elems, split_at),
433             Err(MetadataCastError::Alignment) => {
434                 // SAFETY: Since `validate_cast_and_convert_metadata` returned
435                 // an alignment error, `U` must have an alignment requirement
436                 // greater than one.
437                 let err = unsafe { AlignmentError::<_, U>::new_unchecked(self) };
438                 return Err(CastError::Alignment(err));
439             }
440             Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))),
441         };
442 
443         // SAFETY: `validate_cast_and_convert_metadata` promises to return
444         // `split_at <= self.len()`.
445         let (l_slice, r_slice) = unsafe { self.split_at(split_at) };
446 
447         let (target, remainder) = match cast_type {
448             CastType::Prefix => (l_slice, r_slice),
449             CastType::Suffix => (r_slice, l_slice),
450         };
451 
452         let base = target.as_non_null().cast::<u8>();
453 
454         let elems = <U as KnownLayout>::PointerMetadata::from_elem_count(elems);
455         // For a slice DST type, if `meta` is `Some(elems)`, then we synthesize
456         // `layout` to describe a sized type whose size is equal to the size of
457         // the instance that we are asked to cast. For sized types,
458         // `validate_cast_and_convert_metadata` returns `elems == 0`. Thus, in
459         // this case, we need to use the `elems` passed by the caller, not the
460         // one returned by `validate_cast_and_convert_metadata`.
461         let elems = meta.unwrap_or(elems);
462 
463         let ptr = U::raw_from_ptr_len(base, elems);
464 
465         // SAFETY:
466         // 0. By invariant, if `target`'s referent is not zero sized, then
467         //    `target` is derived from some valid Rust allocation, `A`. By
468         //    contract on `cast`, `ptr` is derived from `self`, and thus from
469         //    the same valid Rust allocation, `A`.
470         // 1. By invariant, if `target`'s referent is not zero sized, then
471         //    `target` has provenance valid for some Rust allocation, `A`.
472         //    Because `ptr` is derived from `target` via provenance-preserving
473         //    operations, `ptr` will also have provenance valid for `A`.
474         // -  `validate_cast_and_convert_metadata` promises that the object
475         //    described by `elems` and `split_at` lives at a byte range which is
476         //    a subset of the input byte range. Thus:
477         //    2. Since, by invariant, if `target`'s referent is not zero sized,
478         //       then `target` addresses a byte range which is entirely
479         //       contained in `A`, so does `ptr`.
480         //    3. Since, by invariant, `target` addresses a byte range whose
481         //       length fits in an `isize`, so does `ptr`.
482         //    4. Since, by invariant, `target` addresses a byte range which does
483         //       not wrap around the address space, so does `ptr`.
484         //    5. Since, by invariant, if `target`'s referent is not zero sized,
485         //       then `target` refers to an allocation which is guaranteed to
486         //       live for at least `'a`, so does `ptr`.
487         Ok((unsafe { PtrInner::new(ptr) }, remainder))
488     }
489 }
490 
491 #[allow(clippy::needless_lifetimes)]
492 impl<'a, T> PtrInner<'a, T> {
493     /// Performs an unaligned read of `self`'s referent.
494     ///
495     /// # Safety
496     ///
497     /// `self` must point to a properly initialized value of type `T`, and
498     /// reading a copy of `T` must not violate `T`'s safety invariants.
499     ///
500     /// `self`'s referent must not be concurrently modified during this call.
read_unaligned(self) -> T501     pub(crate) unsafe fn read_unaligned(self) -> T {
502         let raw = self.as_non_null().as_ptr();
503         // SAFETY: The caller promises that `self` points to a bit-valid `T` and
504         // that reading a copy of it won't violate `T`'s safety invariants. The
505         // caller promises that `self`'s referent won't be concurrently modified
506         // during this operation.
507         //
508         // `raw` is valid for reads:
509         // - `self.as_non_null()` returns a `NonNull`, which is guaranteed to be
510         //   non-null.
511         // - By invariant on `PtrInner`, `raw` is is either zero-sized or:
512         //   - ...is within bounds of a single allocated object which lives for
513         //     at least `'a`.
514         //   - ...has valid provenance for that object.
515         unsafe { core::ptr::read_unaligned(raw) }
516     }
517 }
518 
519 #[cfg(test)]
520 mod tests {
521     use super::*;
522 
523     #[test]
test_split_at()524     fn test_split_at() {
525         const N: usize = 16;
526         let arr = [1; N];
527         let ptr = PtrInner::from_ref(&arr).as_slice();
528         for i in 0..=N {
529             assert_eq!(ptr.len(), N);
530             // SAFETY: `i` is in bounds by construction.
531             let (l, r) = unsafe { ptr.split_at(i) };
532             // SAFETY: Points to a valid value by construction.
533             let l_sum: usize = l.iter().map(|ptr| unsafe { ptr.read_unaligned() }).sum();
534             // SAFETY: Points to a valid value by construction.
535             let r_sum: usize = r.iter().map(|ptr| unsafe { ptr.read_unaligned() }).sum();
536             assert_eq!(l_sum, i);
537             assert_eq!(r_sum, N - i);
538             assert_eq!(l_sum + r_sum, N);
539         }
540     }
541 }
542