• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //! Functions concerning immediate values and operands, and reading from operands.
2 //! All high-level functions to read from memory work on operands as sources.
3 
4 use either::{Either, Left, Right};
5 
6 use rustc_hir::def::Namespace;
7 use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
8 use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter};
9 use rustc_middle::ty::{ConstInt, Ty, ValTree};
10 use rustc_middle::{mir, ty};
11 use rustc_span::Span;
12 use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size};
13 
14 use super::{
15     alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, Frame, GlobalId,
16     InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Place, PlaceTy, Pointer,
17     Provenance, Scalar,
18 };
19 
20 /// An `Immediate` represents a single immediate self-contained Rust value.
21 ///
22 /// For optimization of a few very common cases, there is also a representation for a pair of
23 /// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary
24 /// operations and wide pointers. This idea was taken from rustc's codegen.
25 /// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
26 /// defined on `Immediate`, and do not have to work with a `Place`.
27 #[derive(Copy, Clone, Debug)]
28 pub enum Immediate<Prov: Provenance = AllocId> {
29     /// A single scalar value (must have *initialized* `Scalar` ABI).
30     Scalar(Scalar<Prov>),
31     /// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
32     /// `Scalar::Initialized`).
33     ScalarPair(Scalar<Prov>, Scalar<Prov>),
34     /// A value of fully uninitialized memory. Can have and size and layout.
35     Uninit,
36 }
37 
38 impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> {
39     #[inline(always)]
from(val: Scalar<Prov>) -> Self40     fn from(val: Scalar<Prov>) -> Self {
41         Immediate::Scalar(val)
42     }
43 }
44 
45 impl<Prov: Provenance> Immediate<Prov> {
from_pointer(p: Pointer<Prov>, cx: &impl HasDataLayout) -> Self46     pub fn from_pointer(p: Pointer<Prov>, cx: &impl HasDataLayout) -> Self {
47         Immediate::Scalar(Scalar::from_pointer(p, cx))
48     }
49 
from_maybe_pointer(p: Pointer<Option<Prov>>, cx: &impl HasDataLayout) -> Self50     pub fn from_maybe_pointer(p: Pointer<Option<Prov>>, cx: &impl HasDataLayout) -> Self {
51         Immediate::Scalar(Scalar::from_maybe_pointer(p, cx))
52     }
53 
new_slice(val: Scalar<Prov>, len: u64, cx: &impl HasDataLayout) -> Self54     pub fn new_slice(val: Scalar<Prov>, len: u64, cx: &impl HasDataLayout) -> Self {
55         Immediate::ScalarPair(val, Scalar::from_target_usize(len, cx))
56     }
57 
new_dyn_trait( val: Scalar<Prov>, vtable: Pointer<Option<Prov>>, cx: &impl HasDataLayout, ) -> Self58     pub fn new_dyn_trait(
59         val: Scalar<Prov>,
60         vtable: Pointer<Option<Prov>>,
61         cx: &impl HasDataLayout,
62     ) -> Self {
63         Immediate::ScalarPair(val, Scalar::from_maybe_pointer(vtable, cx))
64     }
65 
66     #[inline]
67     #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
to_scalar(self) -> Scalar<Prov>68     pub fn to_scalar(self) -> Scalar<Prov> {
69         match self {
70             Immediate::Scalar(val) => val,
71             Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"),
72             Immediate::Uninit => bug!("Got uninit where a scalar was expected"),
73         }
74     }
75 
76     #[inline]
77     #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
to_scalar_pair(self) -> (Scalar<Prov>, Scalar<Prov>)78     pub fn to_scalar_pair(self) -> (Scalar<Prov>, Scalar<Prov>) {
79         match self {
80             Immediate::ScalarPair(val1, val2) => (val1, val2),
81             Immediate::Scalar(..) => bug!("Got a scalar where a scalar pair was expected"),
82             Immediate::Uninit => bug!("Got uninit where a scalar pair was expected"),
83         }
84     }
85 }
86 
87 // ScalarPair needs a type to interpret, so we often have an immediate and a type together
88 // as input for binary and cast operations.
89 #[derive(Clone, Debug)]
90 pub struct ImmTy<'tcx, Prov: Provenance = AllocId> {
91     imm: Immediate<Prov>,
92     pub layout: TyAndLayout<'tcx>,
93 }
94 
95 impl<Prov: Provenance> std::fmt::Display for ImmTy<'_, Prov> {
fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result96     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
97         /// Helper function for printing a scalar to a FmtPrinter
98         fn p<'a, 'tcx, Prov: Provenance>(
99             cx: FmtPrinter<'a, 'tcx>,
100             s: Scalar<Prov>,
101             ty: Ty<'tcx>,
102         ) -> Result<FmtPrinter<'a, 'tcx>, std::fmt::Error> {
103             match s {
104                 Scalar::Int(int) => cx.pretty_print_const_scalar_int(int, ty, true),
105                 Scalar::Ptr(ptr, _sz) => {
106                     // Just print the ptr value. `pretty_print_const_scalar_ptr` would also try to
107                     // print what is points to, which would fail since it has no access to the local
108                     // memory.
109                     cx.pretty_print_const_pointer(ptr, ty)
110                 }
111             }
112         }
113         ty::tls::with(|tcx| {
114             match self.imm {
115                 Immediate::Scalar(s) => {
116                     if let Some(ty) = tcx.lift(self.layout.ty) {
117                         let cx = FmtPrinter::new(tcx, Namespace::ValueNS);
118                         f.write_str(&p(cx, s, ty)?.into_buffer())?;
119                         return Ok(());
120                     }
121                     write!(f, "{:x}: {}", s, self.layout.ty)
122                 }
123                 Immediate::ScalarPair(a, b) => {
124                     // FIXME(oli-obk): at least print tuples and slices nicely
125                     write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty)
126                 }
127                 Immediate::Uninit => {
128                     write!(f, "uninit: {}", self.layout.ty)
129                 }
130             }
131         })
132     }
133 }
134 
135 impl<'tcx, Prov: Provenance> std::ops::Deref for ImmTy<'tcx, Prov> {
136     type Target = Immediate<Prov>;
137     #[inline(always)]
deref(&self) -> &Immediate<Prov>138     fn deref(&self) -> &Immediate<Prov> {
139         &self.imm
140     }
141 }
142 
143 /// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
144 /// or still in memory. The latter is an optimization, to delay reading that chunk of
145 /// memory and to avoid having to store arbitrary-sized data here.
146 #[derive(Copy, Clone, Debug)]
147 pub enum Operand<Prov: Provenance = AllocId> {
148     Immediate(Immediate<Prov>),
149     Indirect(MemPlace<Prov>),
150 }
151 
152 #[derive(Clone, Debug)]
153 pub struct OpTy<'tcx, Prov: Provenance = AllocId> {
154     op: Operand<Prov>, // Keep this private; it helps enforce invariants.
155     pub layout: TyAndLayout<'tcx>,
156     /// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
157     /// it needs to have a different alignment than the field type would usually have.
158     /// So we represent this here with a separate field that "overwrites" `layout.align`.
159     /// This means `layout.align` should never be used for an `OpTy`!
160     /// `None` means "alignment does not matter since this is a by-value operand"
161     /// (`Operand::Immediate`); this field is only relevant for `Operand::Indirect`.
162     /// Also CTFE ignores alignment anyway, so this is for Miri only.
163     pub align: Option<Align>,
164 }
165 
166 impl<'tcx, Prov: Provenance> std::ops::Deref for OpTy<'tcx, Prov> {
167     type Target = Operand<Prov>;
168     #[inline(always)]
deref(&self) -> &Operand<Prov>169     fn deref(&self) -> &Operand<Prov> {
170         &self.op
171     }
172 }
173 
174 impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
175     #[inline(always)]
from(mplace: MPlaceTy<'tcx, Prov>) -> Self176     fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
177         OpTy { op: Operand::Indirect(*mplace), layout: mplace.layout, align: Some(mplace.align) }
178     }
179 }
180 
181 impl<'tcx, Prov: Provenance> From<&'_ MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
182     #[inline(always)]
from(mplace: &MPlaceTy<'tcx, Prov>) -> Self183     fn from(mplace: &MPlaceTy<'tcx, Prov>) -> Self {
184         OpTy { op: Operand::Indirect(**mplace), layout: mplace.layout, align: Some(mplace.align) }
185     }
186 }
187 
188 impl<'tcx, Prov: Provenance> From<&'_ mut MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
189     #[inline(always)]
from(mplace: &mut MPlaceTy<'tcx, Prov>) -> Self190     fn from(mplace: &mut MPlaceTy<'tcx, Prov>) -> Self {
191         OpTy { op: Operand::Indirect(**mplace), layout: mplace.layout, align: Some(mplace.align) }
192     }
193 }
194 
195 impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
196     #[inline(always)]
from(val: ImmTy<'tcx, Prov>) -> Self197     fn from(val: ImmTy<'tcx, Prov>) -> Self {
198         OpTy { op: Operand::Immediate(val.imm), layout: val.layout, align: None }
199     }
200 }
201 
202 impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
203     #[inline]
from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self204     pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
205         ImmTy { imm: val.into(), layout }
206     }
207 
208     #[inline]
from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self209     pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self {
210         ImmTy { imm, layout }
211     }
212 
213     #[inline]
uninit(layout: TyAndLayout<'tcx>) -> Self214     pub fn uninit(layout: TyAndLayout<'tcx>) -> Self {
215         ImmTy { imm: Immediate::Uninit, layout }
216     }
217 
218     #[inline]
try_from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Option<Self>219     pub fn try_from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
220         Some(Self::from_scalar(Scalar::try_from_uint(i, layout.size)?, layout))
221     }
222     #[inline]
from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self223     pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self {
224         Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
225     }
226 
227     #[inline]
try_from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Option<Self>228     pub fn try_from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
229         Some(Self::from_scalar(Scalar::try_from_int(i, layout.size)?, layout))
230     }
231 
232     #[inline]
from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self233     pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
234         Self::from_scalar(Scalar::from_int(i, layout.size), layout)
235     }
236 
237     #[inline]
to_const_int(self) -> ConstInt238     pub fn to_const_int(self) -> ConstInt {
239         assert!(self.layout.ty.is_integral());
240         let int = self.to_scalar().assert_int();
241         ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral())
242     }
243 }
244 
245 impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
len(&self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64>246     pub fn len(&self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
247         if self.layout.is_unsized() {
248             if matches!(self.op, Operand::Immediate(Immediate::Uninit)) {
249                 // Uninit unsized places shouldn't occur. In the interpreter we have them
250                 // temporarily for unsized arguments before their value is put in; in ConstProp they
251                 // remain uninit and this code can actually be reached.
252                 throw_inval!(UninitUnsizedLocal);
253             }
254             // There are no unsized immediates.
255             self.assert_mem_place().len(cx)
256         } else {
257             match self.layout.fields {
258                 abi::FieldsShape::Array { count, .. } => Ok(count),
259                 _ => bug!("len not supported on sized type {:?}", self.layout.ty),
260             }
261         }
262     }
263 
264     /// Replace the layout of this operand. There's basically no sanity check that this makes sense,
265     /// you better know what you are doing! If this is an immediate, applying the wrong layout can
266     /// not just lead to invalid data, it can actually *shift the data around* since the offsets of
267     /// a ScalarPair are entirely determined by the layout, not the data.
transmute(&self, layout: TyAndLayout<'tcx>) -> Self268     pub fn transmute(&self, layout: TyAndLayout<'tcx>) -> Self {
269         assert_eq!(
270             self.layout.size, layout.size,
271             "transmuting with a size change, that doesn't seem right"
272         );
273         OpTy { layout, ..*self }
274     }
275 
276     /// Offset the operand in memory (if possible) and change its metadata.
277     ///
278     /// This can go wrong very easily if you give the wrong layout for the new place!
offset_with_meta( &self, offset: Size, meta: MemPlaceMeta<Prov>, layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout, ) -> InterpResult<'tcx, Self>279     pub(super) fn offset_with_meta(
280         &self,
281         offset: Size,
282         meta: MemPlaceMeta<Prov>,
283         layout: TyAndLayout<'tcx>,
284         cx: &impl HasDataLayout,
285     ) -> InterpResult<'tcx, Self> {
286         match self.as_mplace_or_imm() {
287             Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, cx)?.into()),
288             Right(imm) => {
289                 assert!(
290                     matches!(*imm, Immediate::Uninit),
291                     "Scalar/ScalarPair cannot be offset into"
292                 );
293                 assert!(!meta.has_meta()); // no place to store metadata here
294                 // Every part of an uninit is uninit.
295                 Ok(ImmTy::uninit(layout).into())
296             }
297         }
298     }
299 
300     /// Offset the operand in memory (if possible).
301     ///
302     /// This can go wrong very easily if you give the wrong layout for the new place!
offset( &self, offset: Size, layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout, ) -> InterpResult<'tcx, Self>303     pub fn offset(
304         &self,
305         offset: Size,
306         layout: TyAndLayout<'tcx>,
307         cx: &impl HasDataLayout,
308     ) -> InterpResult<'tcx, Self> {
309         assert!(layout.is_sized());
310         self.offset_with_meta(offset, MemPlaceMeta::None, layout, cx)
311     }
312 }
313 
314 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
315     /// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`.
316     /// Returns `None` if the layout does not permit loading this as a value.
317     ///
318     /// This is an internal function; call `read_immediate` instead.
read_immediate_from_mplace_raw( &self, mplace: &MPlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::Provenance>>>319     fn read_immediate_from_mplace_raw(
320         &self,
321         mplace: &MPlaceTy<'tcx, M::Provenance>,
322     ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::Provenance>>> {
323         if mplace.layout.is_unsized() {
324             // Don't touch unsized
325             return Ok(None);
326         }
327 
328         let Some(alloc) = self.get_place_alloc(mplace)? else {
329             // zero-sized type can be left uninit
330             return Ok(Some(ImmTy::uninit(mplace.layout)));
331         };
332 
333         // It may seem like all types with `Scalar` or `ScalarPair` ABI are fair game at this point.
334         // However, `MaybeUninit<u64>` is considered a `Scalar` as far as its layout is concerned --
335         // and yet cannot be represented by an interpreter `Scalar`, since we have to handle the
336         // case where some of the bytes are initialized and others are not. So, we need an extra
337         // check that walks over the type of `mplace` to make sure it is truly correct to treat this
338         // like a `Scalar` (or `ScalarPair`).
339         Ok(match mplace.layout.abi {
340             Abi::Scalar(abi::Scalar::Initialized { value: s, .. }) => {
341                 let size = s.size(self);
342                 assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size");
343                 let scalar = alloc.read_scalar(
344                     alloc_range(Size::ZERO, size),
345                     /*read_provenance*/ matches!(s, abi::Pointer(_)),
346                 )?;
347                 Some(ImmTy { imm: scalar.into(), layout: mplace.layout })
348             }
349             Abi::ScalarPair(
350                 abi::Scalar::Initialized { value: a, .. },
351                 abi::Scalar::Initialized { value: b, .. },
352             ) => {
353                 // We checked `ptr_align` above, so all fields will have the alignment they need.
354                 // We would anyway check against `ptr_align.restrict_for_offset(b_offset)`,
355                 // which `ptr.offset(b_offset)` cannot possibly fail to satisfy.
356                 let (a_size, b_size) = (a.size(self), b.size(self));
357                 let b_offset = a_size.align_to(b.align(self).abi);
358                 assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields
359                 let a_val = alloc.read_scalar(
360                     alloc_range(Size::ZERO, a_size),
361                     /*read_provenance*/ matches!(a, abi::Pointer(_)),
362                 )?;
363                 let b_val = alloc.read_scalar(
364                     alloc_range(b_offset, b_size),
365                     /*read_provenance*/ matches!(b, abi::Pointer(_)),
366                 )?;
367                 Some(ImmTy { imm: Immediate::ScalarPair(a_val, b_val), layout: mplace.layout })
368             }
369             _ => {
370                 // Neither a scalar nor scalar pair.
371                 None
372             }
373         })
374     }
375 
376     /// Try returning an immediate for the operand. If the layout does not permit loading this as an
377     /// immediate, return where in memory we can find the data.
378     /// Note that for a given layout, this operation will either always return Left or Right!
379     /// succeed!  Whether it returns Left depends on whether the layout can be represented
380     /// in an `Immediate`, not on which data is stored there currently.
381     ///
382     /// This is an internal function that should not usually be used; call `read_immediate` instead.
383     /// ConstProp needs it, though.
read_immediate_raw( &self, src: &OpTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Either<MPlaceTy<'tcx, M::Provenance>, ImmTy<'tcx, M::Provenance>>>384     pub fn read_immediate_raw(
385         &self,
386         src: &OpTy<'tcx, M::Provenance>,
387     ) -> InterpResult<'tcx, Either<MPlaceTy<'tcx, M::Provenance>, ImmTy<'tcx, M::Provenance>>> {
388         Ok(match src.as_mplace_or_imm() {
389             Left(ref mplace) => {
390                 if let Some(val) = self.read_immediate_from_mplace_raw(mplace)? {
391                     Right(val)
392                 } else {
393                     Left(*mplace)
394                 }
395             }
396             Right(val) => Right(val),
397         })
398     }
399 
400     /// Read an immediate from a place, asserting that that is possible with the given layout.
401     ///
402     /// If this succeeds, the `ImmTy` is never `Uninit`.
403     #[inline(always)]
read_immediate( &self, op: &OpTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>>404     pub fn read_immediate(
405         &self,
406         op: &OpTy<'tcx, M::Provenance>,
407     ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
408         if !matches!(
409             op.layout.abi,
410             Abi::Scalar(abi::Scalar::Initialized { .. })
411                 | Abi::ScalarPair(abi::Scalar::Initialized { .. }, abi::Scalar::Initialized { .. })
412         ) {
413             span_bug!(self.cur_span(), "primitive read not possible for type: {:?}", op.layout.ty);
414         }
415         let imm = self.read_immediate_raw(op)?.right().unwrap();
416         if matches!(*imm, Immediate::Uninit) {
417             throw_ub!(InvalidUninitBytes(None));
418         }
419         Ok(imm)
420     }
421 
422     /// Read a scalar from a place
read_scalar( &self, op: &OpTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Scalar<M::Provenance>>423     pub fn read_scalar(
424         &self,
425         op: &OpTy<'tcx, M::Provenance>,
426     ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
427         Ok(self.read_immediate(op)?.to_scalar())
428     }
429 
430     // Pointer-sized reads are fairly common and need target layout access, so we wrap them in
431     // convenience functions.
432 
433     /// Read a pointer from a place.
read_pointer( &self, op: &OpTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Pointer<Option<M::Provenance>>>434     pub fn read_pointer(
435         &self,
436         op: &OpTy<'tcx, M::Provenance>,
437     ) -> InterpResult<'tcx, Pointer<Option<M::Provenance>>> {
438         self.read_scalar(op)?.to_pointer(self)
439     }
440     /// Read a pointer-sized unsigned integer from a place.
read_target_usize(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx, u64>441     pub fn read_target_usize(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx, u64> {
442         self.read_scalar(op)?.to_target_usize(self)
443     }
444     /// Read a pointer-sized signed integer from a place.
read_target_isize(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx, i64>445     pub fn read_target_isize(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx, i64> {
446         self.read_scalar(op)?.to_target_isize(self)
447     }
448 
449     /// Turn the wide MPlace into a string (must already be dereferenced!)
read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str>450     pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> {
451         let len = mplace.len(self)?;
452         let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len))?;
453         let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?;
454         Ok(str)
455     }
456 
457     /// Converts a repr(simd) operand into an operand where `place_index` accesses the SIMD elements.
458     /// Also returns the number of elements.
459     ///
460     /// Can (but does not always) trigger UB if `op` is uninitialized.
operand_to_simd( &self, op: &OpTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::Provenance>, u64)>461     pub fn operand_to_simd(
462         &self,
463         op: &OpTy<'tcx, M::Provenance>,
464     ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::Provenance>, u64)> {
465         // Basically we just transmute this place into an array following simd_size_and_type.
466         // This only works in memory, but repr(simd) types should never be immediates anyway.
467         assert!(op.layout.ty.is_simd());
468         match op.as_mplace_or_imm() {
469             Left(mplace) => self.mplace_to_simd(&mplace),
470             Right(imm) => match *imm {
471                 Immediate::Uninit => {
472                     throw_ub!(InvalidUninitBytes(None))
473                 }
474                 Immediate::Scalar(..) | Immediate::ScalarPair(..) => {
475                     bug!("arrays/slices can never have Scalar/ScalarPair layout")
476                 }
477             },
478         }
479     }
480 
481     /// Read from a local.
482     /// Will not access memory, instead an indirect `Operand` is returned.
483     ///
484     /// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an
485     /// OpTy from a local.
local_to_op( &self, frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>, local: mir::Local, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>486     pub fn local_to_op(
487         &self,
488         frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>,
489         local: mir::Local,
490         layout: Option<TyAndLayout<'tcx>>,
491     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
492         let layout = self.layout_of_local(frame, local, layout)?;
493         let op = *frame.locals[local].access()?;
494         Ok(OpTy { op, layout, align: Some(layout.align.abi) })
495     }
496 
497     /// Every place can be read from, so we can turn them into an operand.
498     /// This will definitely return `Indirect` if the place is a `Ptr`, i.e., this
499     /// will never actually read from memory.
500     #[inline(always)]
place_to_op( &self, place: &PlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>501     pub fn place_to_op(
502         &self,
503         place: &PlaceTy<'tcx, M::Provenance>,
504     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
505         let op = match **place {
506             Place::Ptr(mplace) => Operand::Indirect(mplace),
507             Place::Local { frame, local } => {
508                 *self.local_to_op(&self.stack()[frame], local, None)?
509             }
510         };
511         Ok(OpTy { op, layout: place.layout, align: Some(place.align) })
512     }
513 
514     /// Evaluate a place with the goal of reading from it. This lets us sometimes
515     /// avoid allocations.
eval_place_to_op( &self, mir_place: mir::Place<'tcx>, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>516     pub fn eval_place_to_op(
517         &self,
518         mir_place: mir::Place<'tcx>,
519         layout: Option<TyAndLayout<'tcx>>,
520     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
521         // Do not use the layout passed in as argument if the base we are looking at
522         // here is not the entire place.
523         let layout = if mir_place.projection.is_empty() { layout } else { None };
524 
525         let mut op = self.local_to_op(self.frame(), mir_place.local, layout)?;
526         // Using `try_fold` turned out to be bad for performance, hence the loop.
527         for elem in mir_place.projection.iter() {
528             op = self.operand_projection(&op, elem)?
529         }
530 
531         trace!("eval_place_to_op: got {:?}", *op);
532         // Sanity-check the type we ended up with.
533         debug_assert!(
534             mir_assign_valid_types(
535                 *self.tcx,
536                 self.param_env,
537                 self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions(
538                     mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty
539                 )?)?,
540                 op.layout,
541             ),
542             "eval_place of a MIR place with type {:?} produced an interpreter operand with type {:?}",
543             mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
544             op.layout.ty,
545         );
546         Ok(op)
547     }
548 
549     /// Evaluate the operand, returning a place where you can then find the data.
550     /// If you already know the layout, you can save two table lookups
551     /// by passing it in here.
552     #[inline]
eval_operand( &self, mir_op: &mir::Operand<'tcx>, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>553     pub fn eval_operand(
554         &self,
555         mir_op: &mir::Operand<'tcx>,
556         layout: Option<TyAndLayout<'tcx>>,
557     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
558         use rustc_middle::mir::Operand::*;
559         let op = match mir_op {
560             // FIXME: do some more logic on `move` to invalidate the old location
561             &Copy(place) | &Move(place) => self.eval_place_to_op(place, layout)?,
562 
563             Constant(constant) => {
564                 let c =
565                     self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal)?;
566 
567                 // This can still fail:
568                 // * During ConstProp, with `TooGeneric` or since the `required_consts` were not all
569                 //   checked yet.
570                 // * During CTFE, since promoteds in `const`/`static` initializer bodies can fail.
571                 self.eval_mir_constant(&c, Some(constant.span), layout)?
572             }
573         };
574         trace!("{:?}: {:?}", mir_op, *op);
575         Ok(op)
576     }
577 
578     /// Evaluate a bunch of operands at once
eval_operands( &self, ops: &[mir::Operand<'tcx>], ) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::Provenance>>>579     pub(super) fn eval_operands(
580         &self,
581         ops: &[mir::Operand<'tcx>],
582     ) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::Provenance>>> {
583         ops.iter().map(|op| self.eval_operand(op, None)).collect()
584     }
585 
eval_ty_constant( &self, val: ty::Const<'tcx>, span: Option<Span>, ) -> InterpResult<'tcx, ValTree<'tcx>>586     fn eval_ty_constant(
587         &self,
588         val: ty::Const<'tcx>,
589         span: Option<Span>,
590     ) -> InterpResult<'tcx, ValTree<'tcx>> {
591         Ok(match val.kind() {
592             ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(..) => {
593                 throw_inval!(TooGeneric)
594             }
595             // FIXME(generic_const_exprs): `ConstKind::Expr` should be able to be evaluated
596             ty::ConstKind::Expr(_) => throw_inval!(TooGeneric),
597             ty::ConstKind::Error(reported) => {
598                 throw_inval!(AlreadyReported(reported.into()))
599             }
600             ty::ConstKind::Unevaluated(uv) => {
601                 let instance = self.resolve(uv.def, uv.substs)?;
602                 let cid = GlobalId { instance, promoted: None };
603                 self.ctfe_query(span, |tcx| {
604                     tcx.eval_to_valtree(self.param_env.with_const().and(cid))
605                 })?
606                 .unwrap_or_else(|| bug!("unable to create ValTree for {uv:?}"))
607             }
608             ty::ConstKind::Bound(..) | ty::ConstKind::Infer(..) => {
609                 span_bug!(self.cur_span(), "unexpected ConstKind in ctfe: {val:?}")
610             }
611             ty::ConstKind::Value(valtree) => valtree,
612         })
613     }
614 
eval_mir_constant( &self, val: &mir::ConstantKind<'tcx>, span: Option<Span>, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>615     pub fn eval_mir_constant(
616         &self,
617         val: &mir::ConstantKind<'tcx>,
618         span: Option<Span>,
619         layout: Option<TyAndLayout<'tcx>>,
620     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
621         match *val {
622             mir::ConstantKind::Ty(ct) => {
623                 let ty = ct.ty();
624                 let valtree = self.eval_ty_constant(ct, span)?;
625                 let const_val = self.tcx.valtree_to_const_val((ty, valtree));
626                 self.const_val_to_op(const_val, ty, layout)
627             }
628             mir::ConstantKind::Val(val, ty) => self.const_val_to_op(val, ty, layout),
629             mir::ConstantKind::Unevaluated(uv, _) => {
630                 let instance = self.resolve(uv.def, uv.substs)?;
631                 Ok(self.eval_global(GlobalId { instance, promoted: uv.promoted }, span)?.into())
632             }
633         }
634     }
635 
const_val_to_op( &self, val_val: ConstValue<'tcx>, ty: Ty<'tcx>, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>636     pub(crate) fn const_val_to_op(
637         &self,
638         val_val: ConstValue<'tcx>,
639         ty: Ty<'tcx>,
640         layout: Option<TyAndLayout<'tcx>>,
641     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
642         // Other cases need layout.
643         let adjust_scalar = |scalar| -> InterpResult<'tcx, _> {
644             Ok(match scalar {
645                 Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_base_pointer(ptr)?, size),
646                 Scalar::Int(int) => Scalar::Int(int),
647             })
648         };
649         let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?;
650         let op = match val_val {
651             ConstValue::ByRef { alloc, offset } => {
652                 let id = self.tcx.create_memory_alloc(alloc);
653                 // We rely on mutability being set correctly in that allocation to prevent writes
654                 // where none should happen.
655                 let ptr = self.global_base_pointer(Pointer::new(id, offset))?;
656                 Operand::Indirect(MemPlace::from_ptr(ptr.into()))
657             }
658             ConstValue::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()),
659             ConstValue::ZeroSized => Operand::Immediate(Immediate::Uninit),
660             ConstValue::Slice { data, start, end } => {
661                 // We rely on mutability being set correctly in `data` to prevent writes
662                 // where none should happen.
663                 let ptr = Pointer::new(
664                     self.tcx.create_memory_alloc(data),
665                     Size::from_bytes(start), // offset: `start`
666                 );
667                 Operand::Immediate(Immediate::new_slice(
668                     Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx),
669                     u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start`
670                     self,
671                 ))
672             }
673         };
674         Ok(OpTy { op, layout, align: Some(layout.align.abi) })
675     }
676 }
677 
678 // Some nodes are used a lot. Make sure they don't unintentionally get bigger.
679 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
680 mod size_asserts {
681     use super::*;
682     use rustc_data_structures::static_assert_size;
683     // tidy-alphabetical-start
684     static_assert_size!(Immediate, 48);
685     static_assert_size!(ImmTy<'_>, 64);
686     static_assert_size!(Operand, 56);
687     static_assert_size!(OpTy<'_>, 80);
688     // tidy-alphabetical-end
689 }
690