1 use std::cell::Cell;
2 use std::{fmt, mem};
3
4 use either::{Either, Left, Right};
5
6 use hir::CRATE_HIR_ID;
7 use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
8 use rustc_index::IndexVec;
9 use rustc_middle::mir;
10 use rustc_middle::mir::interpret::{ErrorHandled, InterpError, InvalidMetaKind, ReportedErrorInfo};
11 use rustc_middle::query::TyCtxtAt;
12 use rustc_middle::ty::layout::{
13 self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
14 TyAndLayout,
15 };
16 use rustc_middle::ty::{self, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable};
17 use rustc_mir_dataflow::storage::always_storage_live_locals;
18 use rustc_session::Limit;
19 use rustc_span::Span;
20 use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
21
22 use super::{
23 AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
24 MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, PointerArithmetic, Provenance,
25 Scalar, StackPopJump,
26 };
27 use crate::errors::{self, ErroneousConstUsed};
28 use crate::fluent_generated as fluent;
29 use crate::util;
30
31 pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
32 /// Stores the `Machine` instance.
33 ///
34 /// Note: the stack is provided by the machine.
35 pub machine: M,
36
37 /// The results of the type checker, from rustc.
38 /// The span in this is the "root" of the evaluation, i.e., the const
39 /// we are evaluating (if this is CTFE).
40 pub tcx: TyCtxtAt<'tcx>,
41
42 /// Bounds in scope for polymorphic evaluations.
43 pub(crate) param_env: ty::ParamEnv<'tcx>,
44
45 /// The virtual memory system.
46 pub memory: Memory<'mir, 'tcx, M>,
47
48 /// The recursion limit (cached from `tcx.recursion_limit(())`)
49 pub recursion_limit: Limit,
50 }
51
52 // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
53 // boundary and dropped in the other thread, it would exit the span in the other thread.
54 struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
55
56 impl SpanGuard {
57 /// By default a `SpanGuard` does nothing.
new() -> Self58 fn new() -> Self {
59 Self(tracing::Span::none(), std::marker::PhantomData)
60 }
61
62 /// If a span is entered, we exit the previous span (if any, normally none) and enter the
63 /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
64 /// `Frame` by creating a dummy span to being with and then entering it once the frame has
65 /// been pushed.
enter(&mut self, span: tracing::Span)66 fn enter(&mut self, span: tracing::Span) {
67 // This executes the destructor on the previous instance of `SpanGuard`, ensuring that
68 // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
69 // can't protect the tracing stack, but that'll just lead to weird logging, no actual
70 // problems.
71 *self = Self(span, std::marker::PhantomData);
72 self.0.with_subscriber(|(id, dispatch)| {
73 dispatch.enter(id);
74 });
75 }
76 }
77
78 impl Drop for SpanGuard {
drop(&mut self)79 fn drop(&mut self) {
80 self.0.with_subscriber(|(id, dispatch)| {
81 dispatch.exit(id);
82 });
83 }
84 }
85
86 /// A stack frame.
87 pub struct Frame<'mir, 'tcx, Prov: Provenance = AllocId, Extra = ()> {
88 ////////////////////////////////////////////////////////////////////////////////
89 // Function and callsite information
90 ////////////////////////////////////////////////////////////////////////////////
91 /// The MIR for the function called on this frame.
92 pub body: &'mir mir::Body<'tcx>,
93
94 /// The def_id and substs of the current function.
95 pub instance: ty::Instance<'tcx>,
96
97 /// Extra data for the machine.
98 pub extra: Extra,
99
100 ////////////////////////////////////////////////////////////////////////////////
101 // Return place and locals
102 ////////////////////////////////////////////////////////////////////////////////
103 /// Work to perform when returning from this function.
104 pub return_to_block: StackPopCleanup,
105
106 /// The location where the result of the current stack frame should be written to,
107 /// and its layout in the caller.
108 pub return_place: PlaceTy<'tcx, Prov>,
109
110 /// The list of locals for this stack frame, stored in order as
111 /// `[return_ptr, arguments..., variables..., temporaries...]`.
112 /// The locals are stored as `Option<Value>`s.
113 /// `None` represents a local that is currently dead, while a live local
114 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
115 ///
116 /// Do *not* access this directly; always go through the machine hook!
117 pub locals: IndexVec<mir::Local, LocalState<'tcx, Prov>>,
118
119 /// The span of the `tracing` crate is stored here.
120 /// When the guard is dropped, the span is exited. This gives us
121 /// a full stack trace on all tracing statements.
122 tracing_span: SpanGuard,
123
124 ////////////////////////////////////////////////////////////////////////////////
125 // Current position within the function
126 ////////////////////////////////////////////////////////////////////////////////
127 /// If this is `Right`, we are not currently executing any particular statement in
128 /// this frame (can happen e.g. during frame initialization, and during unwinding on
129 /// frames without cleanup code).
130 ///
131 /// Needs to be public because ConstProp does unspeakable things to it.
132 pub loc: Either<mir::Location, Span>,
133 }
134
135 /// What we store about a frame in an interpreter backtrace.
136 #[derive(Clone, Debug)]
137 pub struct FrameInfo<'tcx> {
138 pub instance: ty::Instance<'tcx>,
139 pub span: Span,
140 }
141
142 #[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these
143 pub enum StackPopCleanup {
144 /// Jump to the next block in the caller, or cause UB if None (that's a function
145 /// that may never return). Also store layout of return place so
146 /// we can validate it at that layout.
147 /// `ret` stores the block we jump to on a normal return, while `unwind`
148 /// stores the block used for cleanup during unwinding.
149 Goto { ret: Option<mir::BasicBlock>, unwind: mir::UnwindAction },
150 /// The root frame of the stack: nowhere else to jump to.
151 /// `cleanup` says whether locals are deallocated. Static computation
152 /// wants them leaked to intern what they need (and just throw away
153 /// the entire `ecx` when it is done).
154 Root { cleanup: bool },
155 }
156
157 /// State of a local variable including a memoized layout
158 #[derive(Clone, Debug)]
159 pub struct LocalState<'tcx, Prov: Provenance = AllocId> {
160 pub value: LocalValue<Prov>,
161 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
162 pub layout: Cell<Option<TyAndLayout<'tcx>>>,
163 }
164
165 /// Current value of a local variable
166 #[derive(Copy, Clone, Debug)] // Miri debug-prints these
167 pub enum LocalValue<Prov: Provenance = AllocId> {
168 /// This local is not currently alive, and cannot be used at all.
169 Dead,
170 /// A normal, live local.
171 /// Mostly for convenience, we re-use the `Operand` type here.
172 /// This is an optimization over just always having a pointer here;
173 /// we can thus avoid doing an allocation when the local just stores
174 /// immediate values *and* never has its address taken.
175 Live(Operand<Prov>),
176 }
177
178 impl<'tcx, Prov: Provenance + 'static> LocalState<'tcx, Prov> {
179 /// Read the local's value or error if the local is not yet live or not live anymore.
180 #[inline]
access(&self) -> InterpResult<'tcx, &Operand<Prov>>181 pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
182 match &self.value {
183 LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
184 LocalValue::Live(val) => Ok(val),
185 }
186 }
187
188 /// Overwrite the local. If the local can be overwritten in place, return a reference
189 /// to do so; otherwise return the `MemPlace` to consult instead.
190 ///
191 /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
192 /// anywhere else. You may be invalidating machine invariants if you do!
193 #[inline]
access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>>194 pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
195 match &mut self.value {
196 LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
197 LocalValue::Live(val) => Ok(val),
198 }
199 }
200 }
201
202 impl<'mir, 'tcx, Prov: Provenance> Frame<'mir, 'tcx, Prov> {
with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Prov, Extra>203 pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Prov, Extra> {
204 Frame {
205 body: self.body,
206 instance: self.instance,
207 return_to_block: self.return_to_block,
208 return_place: self.return_place,
209 locals: self.locals,
210 loc: self.loc,
211 extra,
212 tracing_span: self.tracing_span,
213 }
214 }
215 }
216
217 impl<'mir, 'tcx, Prov: Provenance, Extra> Frame<'mir, 'tcx, Prov, Extra> {
218 /// Get the current location within the Frame.
219 ///
220 /// If this is `Left`, we are not currently executing any particular statement in
221 /// this frame (can happen e.g. during frame initialization, and during unwinding on
222 /// frames without cleanup code).
223 ///
224 /// Used by priroda.
current_loc(&self) -> Either<mir::Location, Span>225 pub fn current_loc(&self) -> Either<mir::Location, Span> {
226 self.loc
227 }
228
229 /// Return the `SourceInfo` of the current instruction.
current_source_info(&self) -> Option<&mir::SourceInfo>230 pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
231 self.loc.left().map(|loc| self.body.source_info(loc))
232 }
233
current_span(&self) -> Span234 pub fn current_span(&self) -> Span {
235 match self.loc {
236 Left(loc) => self.body.source_info(loc).span,
237 Right(span) => span,
238 }
239 }
240
lint_root(&self) -> Option<hir::HirId>241 pub fn lint_root(&self) -> Option<hir::HirId> {
242 self.current_source_info().and_then(|source_info| {
243 match &self.body.source_scopes[source_info.scope].local_data {
244 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
245 mir::ClearCrossCrate::Clear => None,
246 }
247 })
248 }
249 }
250
251 // FIXME: only used by miri, should be removed once translatable.
252 impl<'tcx> fmt::Display for FrameInfo<'tcx> {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result253 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
254 ty::tls::with(|tcx| {
255 if tcx.def_key(self.instance.def_id()).disambiguated_data.data
256 == DefPathData::ClosureExpr
257 {
258 write!(f, "inside closure")
259 } else {
260 // Note: this triggers a `good_path_bug` state, which means that if we ever get here
261 // we must emit a diagnostic. We should never display a `FrameInfo` unless we
262 // actually want to emit a warning or error to the user.
263 write!(f, "inside `{}`", self.instance)
264 }
265 })
266 }
267 }
268
269 impl<'tcx> FrameInfo<'tcx> {
as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote270 pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote {
271 let span = self.span;
272 if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
273 errors::FrameNote { where_: "closure", span, instance: String::new(), times: 0 }
274 } else {
275 let instance = format!("{}", self.instance);
276 // Note: this triggers a `good_path_bug` state, which means that if we ever get here
277 // we must emit a diagnostic. We should never display a `FrameInfo` unless we
278 // actually want to emit a warning or error to the user.
279 errors::FrameNote { where_: "instance", span, instance, times: 0 }
280 }
281 }
282 }
283
284 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> {
285 #[inline]
data_layout(&self) -> &TargetDataLayout286 fn data_layout(&self) -> &TargetDataLayout {
287 &self.tcx.data_layout
288 }
289 }
290
291 impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M>
292 where
293 M: Machine<'mir, 'tcx>,
294 {
295 #[inline]
tcx(&self) -> TyCtxt<'tcx>296 fn tcx(&self) -> TyCtxt<'tcx> {
297 *self.tcx
298 }
299 }
300
301 impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M>
302 where
303 M: Machine<'mir, 'tcx>,
304 {
param_env(&self) -> ty::ParamEnv<'tcx>305 fn param_env(&self) -> ty::ParamEnv<'tcx> {
306 self.param_env
307 }
308 }
309
310 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> {
311 type LayoutOfResult = InterpResult<'tcx, TyAndLayout<'tcx>>;
312
313 #[inline]
layout_tcx_at_span(&self) -> Span314 fn layout_tcx_at_span(&self) -> Span {
315 // Using the cheap root span for performance.
316 self.tcx.span
317 }
318
319 #[inline]
handle_layout_err( &self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>, ) -> InterpErrorInfo<'tcx>320 fn handle_layout_err(
321 &self,
322 err: LayoutError<'tcx>,
323 _: Span,
324 _: Ty<'tcx>,
325 ) -> InterpErrorInfo<'tcx> {
326 err_inval!(Layout(err)).into()
327 }
328 }
329
330 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> FnAbiOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> {
331 type FnAbiOfResult = InterpResult<'tcx, &'tcx FnAbi<'tcx, Ty<'tcx>>>;
332
handle_fn_abi_err( &self, err: FnAbiError<'tcx>, _span: Span, _fn_abi_request: FnAbiRequest<'tcx>, ) -> InterpErrorInfo<'tcx>333 fn handle_fn_abi_err(
334 &self,
335 err: FnAbiError<'tcx>,
336 _span: Span,
337 _fn_abi_request: FnAbiRequest<'tcx>,
338 ) -> InterpErrorInfo<'tcx> {
339 match err {
340 FnAbiError::Layout(err) => err_inval!(Layout(err)).into(),
341 FnAbiError::AdjustForForeignAbi(err) => {
342 err_inval!(FnAbiAdjustForForeignAbi(err)).into()
343 }
344 }
345 }
346 }
347
348 /// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
349 /// This test should be symmetric, as it is primarily about layout compatibility.
mir_assign_valid_types<'tcx>( tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, src: TyAndLayout<'tcx>, dest: TyAndLayout<'tcx>, ) -> bool350 pub(super) fn mir_assign_valid_types<'tcx>(
351 tcx: TyCtxt<'tcx>,
352 param_env: ParamEnv<'tcx>,
353 src: TyAndLayout<'tcx>,
354 dest: TyAndLayout<'tcx>,
355 ) -> bool {
356 // Type-changing assignments can happen when subtyping is used. While
357 // all normal lifetimes are erased, higher-ranked types with their
358 // late-bound lifetimes are still around and can lead to type
359 // differences.
360 if util::is_subtype(tcx, param_env, src.ty, dest.ty) {
361 // Make sure the layout is equal, too -- just to be safe. Miri really
362 // needs layout equality. For performance reason we skip this check when
363 // the types are equal. Equal types *can* have different layouts when
364 // enum downcast is involved (as enum variants carry the type of the
365 // enum), but those should never occur in assignments.
366 if cfg!(debug_assertions) || src.ty != dest.ty {
367 assert_eq!(src.layout, dest.layout);
368 }
369 true
370 } else {
371 false
372 }
373 }
374
375 /// Use the already known layout if given (but sanity check in debug mode),
376 /// or compute the layout.
377 #[cfg_attr(not(debug_assertions), inline(always))]
from_known_layout<'tcx>( tcx: TyCtxtAt<'tcx>, param_env: ParamEnv<'tcx>, known_layout: Option<TyAndLayout<'tcx>>, compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, TyAndLayout<'tcx>>378 pub(super) fn from_known_layout<'tcx>(
379 tcx: TyCtxtAt<'tcx>,
380 param_env: ParamEnv<'tcx>,
381 known_layout: Option<TyAndLayout<'tcx>>,
382 compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
383 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
384 match known_layout {
385 None => compute(),
386 Some(known_layout) => {
387 if cfg!(debug_assertions) {
388 let check_layout = compute()?;
389 if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) {
390 span_bug!(
391 tcx.span,
392 "expected type differs from actual type.\nexpected: {:?}\nactual: {:?}",
393 known_layout.ty,
394 check_layout.ty,
395 );
396 }
397 }
398 Ok(known_layout)
399 }
400 }
401 }
402
403 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
new( tcx: TyCtxt<'tcx>, root_span: Span, param_env: ty::ParamEnv<'tcx>, machine: M, ) -> Self404 pub fn new(
405 tcx: TyCtxt<'tcx>,
406 root_span: Span,
407 param_env: ty::ParamEnv<'tcx>,
408 machine: M,
409 ) -> Self {
410 InterpCx {
411 machine,
412 tcx: tcx.at(root_span),
413 param_env,
414 memory: Memory::new(),
415 recursion_limit: tcx.recursion_limit(),
416 }
417 }
418
419 #[inline(always)]
cur_span(&self) -> Span420 pub fn cur_span(&self) -> Span {
421 // This deliberately does *not* honor `requires_caller_location` since it is used for much
422 // more than just panics.
423 self.stack().last().map_or(self.tcx.span, |f| f.current_span())
424 }
425
426 #[inline(always)]
427 /// Find the first stack frame that is within the current crate, if any, otherwise return the crate's HirId
best_lint_scope(&self) -> hir::HirId428 pub fn best_lint_scope(&self) -> hir::HirId {
429 self.stack()
430 .iter()
431 .find_map(|frame| frame.body.source.def_id().as_local())
432 .map_or(CRATE_HIR_ID, |def_id| self.tcx.hir().local_def_id_to_hir_id(def_id))
433 }
434
435 #[inline(always)]
stack(&self) -> &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>]436 pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>] {
437 M::stack(self)
438 }
439
440 #[inline(always)]
stack_mut( &mut self, ) -> &mut Vec<Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>>441 pub(crate) fn stack_mut(
442 &mut self,
443 ) -> &mut Vec<Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>> {
444 M::stack_mut(self)
445 }
446
447 #[inline(always)]
frame_idx(&self) -> usize448 pub fn frame_idx(&self) -> usize {
449 let stack = self.stack();
450 assert!(!stack.is_empty());
451 stack.len() - 1
452 }
453
454 #[inline(always)]
frame(&self) -> &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>455 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra> {
456 self.stack().last().expect("no call frames exist")
457 }
458
459 #[inline(always)]
frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>460 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::Provenance, M::FrameExtra> {
461 self.stack_mut().last_mut().expect("no call frames exist")
462 }
463
464 #[inline(always)]
body(&self) -> &'mir mir::Body<'tcx>465 pub(super) fn body(&self) -> &'mir mir::Body<'tcx> {
466 self.frame().body
467 }
468
469 #[inline(always)]
sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128470 pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
471 assert!(ty.abi.is_signed());
472 ty.size.sign_extend(value)
473 }
474
475 #[inline(always)]
truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128476 pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
477 ty.size.truncate(value)
478 }
479
480 #[inline]
type_is_freeze(&self, ty: Ty<'tcx>) -> bool481 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
482 ty.is_freeze(*self.tcx, self.param_env)
483 }
484
load_mir( &self, instance: ty::InstanceDef<'tcx>, promoted: Option<mir::Promoted>, ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>>485 pub fn load_mir(
486 &self,
487 instance: ty::InstanceDef<'tcx>,
488 promoted: Option<mir::Promoted>,
489 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
490 trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
491 let body = if let Some(promoted) = promoted {
492 let def = instance.def_id();
493 &self.tcx.promoted_mir(def)[promoted]
494 } else {
495 M::load_mir(self, instance)?
496 };
497 // do not continue if typeck errors occurred (can only occur in local crate)
498 if let Some(err) = body.tainted_by_errors {
499 throw_inval!(AlreadyReported(ReportedErrorInfo::tainted_by_errors(err)));
500 }
501 Ok(body)
502 }
503
504 /// Call this on things you got out of the MIR (so it is as generic as the current
505 /// stack frame), to bring it into the proper environment for this interpreter.
subst_from_current_frame_and_normalize_erasing_regions< T: TypeFoldable<TyCtxt<'tcx>>, >( &self, value: T, ) -> Result<T, InterpError<'tcx>>506 pub(super) fn subst_from_current_frame_and_normalize_erasing_regions<
507 T: TypeFoldable<TyCtxt<'tcx>>,
508 >(
509 &self,
510 value: T,
511 ) -> Result<T, InterpError<'tcx>> {
512 self.subst_from_frame_and_normalize_erasing_regions(self.frame(), value)
513 }
514
515 /// Call this on things you got out of the MIR (so it is as generic as the provided
516 /// stack frame), to bring it into the proper environment for this interpreter.
subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<TyCtxt<'tcx>>>( &self, frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>, value: T, ) -> Result<T, InterpError<'tcx>>517 pub(super) fn subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<TyCtxt<'tcx>>>(
518 &self,
519 frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>,
520 value: T,
521 ) -> Result<T, InterpError<'tcx>> {
522 frame
523 .instance
524 .try_subst_mir_and_normalize_erasing_regions(
525 *self.tcx,
526 self.param_env,
527 ty::EarlyBinder::bind(value),
528 )
529 .map_err(|_| err_inval!(TooGeneric))
530 }
531
532 /// The `substs` are assumed to already be in our interpreter "universe" (param_env).
resolve( &self, def: DefId, substs: SubstsRef<'tcx>, ) -> InterpResult<'tcx, ty::Instance<'tcx>>533 pub(super) fn resolve(
534 &self,
535 def: DefId,
536 substs: SubstsRef<'tcx>,
537 ) -> InterpResult<'tcx, ty::Instance<'tcx>> {
538 trace!("resolve: {:?}, {:#?}", def, substs);
539 trace!("param_env: {:#?}", self.param_env);
540 trace!("substs: {:#?}", substs);
541 match ty::Instance::resolve(*self.tcx, self.param_env, def, substs) {
542 Ok(Some(instance)) => Ok(instance),
543 Ok(None) => throw_inval!(TooGeneric),
544
545 // FIXME(eddyb) this could be a bit more specific than `AlreadyReported`.
546 Err(error_reported) => throw_inval!(AlreadyReported(error_reported.into())),
547 }
548 }
549
550 #[inline(always)]
layout_of_local( &self, frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>, local: mir::Local, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, TyAndLayout<'tcx>>551 pub fn layout_of_local(
552 &self,
553 frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>,
554 local: mir::Local,
555 layout: Option<TyAndLayout<'tcx>>,
556 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
557 let state = &frame.locals[local];
558 if let Some(layout) = state.layout.get() {
559 return Ok(layout);
560 }
561
562 let layout = from_known_layout(self.tcx, self.param_env, layout, || {
563 let local_ty = frame.body.local_decls[local].ty;
564 let local_ty = self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty)?;
565 self.layout_of(local_ty)
566 })?;
567
568 // Layouts of locals are requested a lot, so we cache them.
569 state.layout.set(Some(layout));
570 Ok(layout)
571 }
572
573 /// Returns the actual dynamic size and alignment of the place at the given type.
574 /// Only the "meta" (metadata) part of the place matters.
575 /// This can fail to provide an answer for extern types.
size_and_align_of( &self, metadata: &MemPlaceMeta<M::Provenance>, layout: &TyAndLayout<'tcx>, ) -> InterpResult<'tcx, Option<(Size, Align)>>576 pub(super) fn size_and_align_of(
577 &self,
578 metadata: &MemPlaceMeta<M::Provenance>,
579 layout: &TyAndLayout<'tcx>,
580 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
581 if layout.is_sized() {
582 return Ok(Some((layout.size, layout.align.abi)));
583 }
584 match layout.ty.kind() {
585 ty::Adt(..) | ty::Tuple(..) => {
586 // First get the size of all statically known fields.
587 // Don't use type_of::sizing_type_of because that expects t to be sized,
588 // and it also rounds up to alignment, which we want to avoid,
589 // as the unsized field's alignment could be smaller.
590 assert!(!layout.ty.is_simd());
591 assert!(layout.fields.count() > 0);
592 trace!("DST layout: {:?}", layout);
593
594 let sized_size = layout.fields.offset(layout.fields.count() - 1);
595 let sized_align = layout.align.abi;
596 trace!(
597 "DST {} statically sized prefix size: {:?} align: {:?}",
598 layout.ty,
599 sized_size,
600 sized_align
601 );
602
603 // Recurse to get the size of the dynamically sized field (must be
604 // the last field). Can't have foreign types here, how would we
605 // adjust alignment and size for them?
606 let field = layout.field(self, layout.fields.count() - 1);
607 let Some((unsized_size, mut unsized_align)) = self.size_and_align_of(metadata, &field)? else {
608 // A field with an extern type. We don't know the actual dynamic size
609 // or the alignment.
610 return Ok(None);
611 };
612
613 // FIXME (#26403, #27023): We should be adding padding
614 // to `sized_size` (to accommodate the `unsized_align`
615 // required of the unsized field that follows) before
616 // summing it with `sized_size`. (Note that since #26403
617 // is unfixed, we do not yet add the necessary padding
618 // here. But this is where the add would go.)
619
620 // Return the sum of sizes and max of aligns.
621 let size = sized_size + unsized_size; // `Size` addition
622
623 // Packed types ignore the alignment of their fields.
624 if let ty::Adt(def, _) = layout.ty.kind() {
625 if def.repr().packed() {
626 unsized_align = sized_align;
627 }
628 }
629
630 // Choose max of two known alignments (combined value must
631 // be aligned according to more restrictive of the two).
632 let align = sized_align.max(unsized_align);
633
634 // Issue #27023: must add any necessary padding to `size`
635 // (to make it a multiple of `align`) before returning it.
636 let size = size.align_to(align);
637
638 // Check if this brought us over the size limit.
639 if size > self.max_size_of_val() {
640 throw_ub!(InvalidMeta(InvalidMetaKind::TooBig));
641 }
642 Ok(Some((size, align)))
643 }
644 ty::Dynamic(_, _, ty::Dyn) => {
645 let vtable = metadata.unwrap_meta().to_pointer(self)?;
646 // Read size and align from vtable (already checks size).
647 Ok(Some(self.get_vtable_size_and_align(vtable)?))
648 }
649
650 ty::Slice(_) | ty::Str => {
651 let len = metadata.unwrap_meta().to_target_usize(self)?;
652 let elem = layout.field(self, 0);
653
654 // Make sure the slice is not too big.
655 let size = elem.size.bytes().saturating_mul(len); // we rely on `max_size_of_val` being smaller than `u64::MAX`.
656 let size = Size::from_bytes(size);
657 if size > self.max_size_of_val() {
658 throw_ub!(InvalidMeta(InvalidMetaKind::SliceTooBig));
659 }
660 Ok(Some((size, elem.align.abi)))
661 }
662
663 ty::Foreign(_) => Ok(None),
664
665 _ => span_bug!(self.cur_span(), "size_and_align_of::<{:?}> not supported", layout.ty),
666 }
667 }
668 #[inline]
size_and_align_of_mplace( &self, mplace: &MPlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Option<(Size, Align)>>669 pub fn size_and_align_of_mplace(
670 &self,
671 mplace: &MPlaceTy<'tcx, M::Provenance>,
672 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
673 self.size_and_align_of(&mplace.meta, &mplace.layout)
674 }
675
676 #[instrument(skip(self, body, return_place, return_to_block), level = "debug")]
push_stack_frame( &mut self, instance: ty::Instance<'tcx>, body: &'mir mir::Body<'tcx>, return_place: &PlaceTy<'tcx, M::Provenance>, return_to_block: StackPopCleanup, ) -> InterpResult<'tcx>677 pub fn push_stack_frame(
678 &mut self,
679 instance: ty::Instance<'tcx>,
680 body: &'mir mir::Body<'tcx>,
681 return_place: &PlaceTy<'tcx, M::Provenance>,
682 return_to_block: StackPopCleanup,
683 ) -> InterpResult<'tcx> {
684 trace!("body: {:#?}", body);
685 // Clobber previous return place contents, nobody is supposed to be able to see them any more
686 // This also checks dereferenceable, but not align. We rely on all constructed places being
687 // sufficiently aligned (in particular we rely on `deref_operand` checking alignment).
688 self.write_uninit(return_place)?;
689 // first push a stack frame so we have access to the local substs
690 let pre_frame = Frame {
691 body,
692 loc: Right(body.span), // Span used for errors caused during preamble.
693 return_to_block,
694 return_place: return_place.clone(),
695 // empty local array, we fill it in below, after we are inside the stack frame and
696 // all methods actually know about the frame
697 locals: IndexVec::new(),
698 instance,
699 tracing_span: SpanGuard::new(),
700 extra: (),
701 };
702 let frame = M::init_frame_extra(self, pre_frame)?;
703 self.stack_mut().push(frame);
704
705 // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
706 for ct in &body.required_consts {
707 let span = ct.span;
708 let ct = self.subst_from_current_frame_and_normalize_erasing_regions(ct.literal)?;
709 self.eval_mir_constant(&ct, Some(span), None)?;
710 }
711
712 // Most locals are initially dead.
713 let dummy = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
714 let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
715
716 // Now mark those locals as live that have no `Storage*` annotations.
717 let always_live = always_storage_live_locals(self.body());
718 for local in locals.indices() {
719 if always_live.contains(local) {
720 locals[local].value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
721 }
722 }
723 // done
724 self.frame_mut().locals = locals;
725 M::after_stack_push(self)?;
726 self.frame_mut().loc = Left(mir::Location::START);
727
728 let span = info_span!("frame", "{}", instance);
729 self.frame_mut().tracing_span.enter(span);
730
731 Ok(())
732 }
733
734 /// Jump to the given block.
735 #[inline]
go_to_block(&mut self, target: mir::BasicBlock)736 pub fn go_to_block(&mut self, target: mir::BasicBlock) {
737 self.frame_mut().loc = Left(mir::Location { block: target, statement_index: 0 });
738 }
739
740 /// *Return* to the given `target` basic block.
741 /// Do *not* use for unwinding! Use `unwind_to_block` instead.
742 ///
743 /// If `target` is `None`, that indicates the function cannot return, so we raise UB.
return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx>744 pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
745 if let Some(target) = target {
746 self.go_to_block(target);
747 Ok(())
748 } else {
749 throw_ub!(Unreachable)
750 }
751 }
752
753 /// *Unwind* to the given `target` basic block.
754 /// Do *not* use for returning! Use `return_to_block` instead.
755 ///
756 /// If `target` is `UnwindAction::Continue`, that indicates the function does not need cleanup
757 /// during unwinding, and we will just keep propagating that upwards.
758 ///
759 /// If `target` is `UnwindAction::Unreachable`, that indicates the function does not allow
760 /// unwinding, and doing so is UB.
unwind_to_block(&mut self, target: mir::UnwindAction) -> InterpResult<'tcx>761 pub fn unwind_to_block(&mut self, target: mir::UnwindAction) -> InterpResult<'tcx> {
762 self.frame_mut().loc = match target {
763 mir::UnwindAction::Cleanup(block) => Left(mir::Location { block, statement_index: 0 }),
764 mir::UnwindAction::Continue => Right(self.frame_mut().body.span),
765 mir::UnwindAction::Unreachable => {
766 throw_ub_custom!(fluent::const_eval_unreachable_unwind);
767 }
768 mir::UnwindAction::Terminate => {
769 self.frame_mut().loc = Right(self.frame_mut().body.span);
770 M::abort(self, "panic in a function that cannot unwind".to_owned())?;
771 }
772 };
773 Ok(())
774 }
775
776 /// Pops the current frame from the stack, deallocating the
777 /// memory for allocated locals.
778 ///
779 /// If `unwinding` is `false`, then we are performing a normal return
780 /// from a function. In this case, we jump back into the frame of the caller,
781 /// and continue execution as normal.
782 ///
783 /// If `unwinding` is `true`, then we are in the middle of a panic,
784 /// and need to unwind this frame. In this case, we jump to the
785 /// `cleanup` block for the function, which is responsible for running
786 /// `Drop` impls for any locals that have been initialized at this point.
787 /// The cleanup block ends with a special `Resume` terminator, which will
788 /// cause us to continue unwinding.
789 #[instrument(skip(self), level = "debug")]
pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx>790 pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
791 info!(
792 "popping stack frame ({})",
793 if unwinding { "during unwinding" } else { "returning from function" }
794 );
795
796 // Check `unwinding`.
797 assert_eq!(
798 unwinding,
799 match self.frame().loc {
800 Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
801 Right(_) => true,
802 }
803 );
804 if unwinding && self.frame_idx() == 0 {
805 throw_ub_custom!(fluent::const_eval_unwind_past_top);
806 }
807
808 // Copy return value. Must of course happen *before* we deallocate the locals.
809 let copy_ret_result = if !unwinding {
810 let op = self
811 .local_to_op(self.frame(), mir::RETURN_PLACE, None)
812 .expect("return place should always be live");
813 let dest = self.frame().return_place.clone();
814 let err = self.copy_op(&op, &dest, /*allow_transmute*/ true);
815 trace!("return value: {:?}", self.dump_place(*dest));
816 // We delay actually short-circuiting on this error until *after* the stack frame is
817 // popped, since we want this error to be attributed to the caller, whose type defines
818 // this transmute.
819 err
820 } else {
821 Ok(())
822 };
823
824 // Cleanup: deallocate locals.
825 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
826 // We do this while the frame is still on the stack, so errors point to the callee.
827 let return_to_block = self.frame().return_to_block;
828 let cleanup = match return_to_block {
829 StackPopCleanup::Goto { .. } => true,
830 StackPopCleanup::Root { cleanup, .. } => cleanup,
831 };
832 if cleanup {
833 // We need to take the locals out, since we need to mutate while iterating.
834 let locals = mem::take(&mut self.frame_mut().locals);
835 for local in &locals {
836 self.deallocate_local(local.value)?;
837 }
838 }
839
840 // All right, now it is time to actually pop the frame.
841 // Note that its locals are gone already, but that's fine.
842 let frame =
843 self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
844 // Report error from return value copy, if any.
845 copy_ret_result?;
846
847 // If we are not doing cleanup, also skip everything else.
848 if !cleanup {
849 assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
850 assert!(!unwinding, "tried to skip cleanup during unwinding");
851 // Skip machine hook.
852 return Ok(());
853 }
854 if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
855 // The hook already did everything.
856 return Ok(());
857 }
858
859 // Normal return, figure out where to jump.
860 if unwinding {
861 // Follow the unwind edge.
862 let unwind = match return_to_block {
863 StackPopCleanup::Goto { unwind, .. } => unwind,
864 StackPopCleanup::Root { .. } => {
865 panic!("encountered StackPopCleanup::Root when unwinding!")
866 }
867 };
868 self.unwind_to_block(unwind)
869 } else {
870 // Follow the normal return edge.
871 match return_to_block {
872 StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret),
873 StackPopCleanup::Root { .. } => {
874 assert!(
875 self.stack().is_empty(),
876 "only the topmost frame can have StackPopCleanup::Root"
877 );
878 Ok(())
879 }
880 }
881 }
882 }
883
884 /// Mark a storage as live, killing the previous content.
storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx>885 pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
886 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
887 trace!("{:?} is now live", local);
888
889 let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
890 // StorageLive expects the local to be dead, and marks it live.
891 let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
892 if !matches!(old, LocalValue::Dead) {
893 throw_ub_custom!(fluent::const_eval_double_storage_live);
894 }
895 Ok(())
896 }
897
storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx>898 pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
899 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
900 trace!("{:?} is now dead", local);
901
902 // It is entirely okay for this local to be already dead (at least that's how we currently generate MIR)
903 let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
904 self.deallocate_local(old)?;
905 Ok(())
906 }
907
908 #[instrument(skip(self), level = "debug")]
deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx>909 fn deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx> {
910 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
911 // All locals have a backing allocation, even if the allocation is empty
912 // due to the local having ZST type. Hence we can `unwrap`.
913 trace!(
914 "deallocating local {:?}: {:?}",
915 local,
916 // Locals always have a `alloc_id` (they are never the result of a int2ptr).
917 self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
918 );
919 self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
920 };
921 Ok(())
922 }
923
924 /// Call a query that can return `ErrorHandled`. If `span` is `Some`, point to that span when an error occurs.
ctfe_query<T>( &self, span: Option<Span>, query: impl FnOnce(TyCtxtAt<'tcx>) -> Result<T, ErrorHandled>, ) -> InterpResult<'tcx, T>925 pub fn ctfe_query<T>(
926 &self,
927 span: Option<Span>,
928 query: impl FnOnce(TyCtxtAt<'tcx>) -> Result<T, ErrorHandled>,
929 ) -> InterpResult<'tcx, T> {
930 // Use a precise span for better cycle errors.
931 query(self.tcx.at(span.unwrap_or_else(|| self.cur_span()))).map_err(|err| {
932 match err {
933 ErrorHandled::Reported(err) => {
934 if !err.is_tainted_by_errors() && let Some(span) = span {
935 // To make it easier to figure out where this error comes from, also add a note at the current location.
936 self.tcx.sess.emit_note(ErroneousConstUsed { span });
937 }
938 err_inval!(AlreadyReported(err))
939 }
940 ErrorHandled::TooGeneric => err_inval!(TooGeneric),
941 }
942 .into()
943 })
944 }
945
eval_global( &self, gid: GlobalId<'tcx>, span: Option<Span>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>>946 pub fn eval_global(
947 &self,
948 gid: GlobalId<'tcx>,
949 span: Option<Span>,
950 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
951 // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
952 // and thus don't care about the parameter environment. While we could just use
953 // `self.param_env`, that would mean we invoke the query to evaluate the static
954 // with different parameter environments, thus causing the static to be evaluated
955 // multiple times.
956 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
957 ty::ParamEnv::reveal_all()
958 } else {
959 self.param_env
960 };
961 let param_env = param_env.with_const();
962 let val = self.ctfe_query(span, |tcx| tcx.eval_to_allocation_raw(param_env.and(gid)))?;
963 self.raw_const_to_mplace(val)
964 }
965
966 #[must_use]
dump_place(&self, place: Place<M::Provenance>) -> PlacePrinter<'_, 'mir, 'tcx, M>967 pub fn dump_place(&self, place: Place<M::Provenance>) -> PlacePrinter<'_, 'mir, 'tcx, M> {
968 PlacePrinter { ecx: self, place }
969 }
970
971 #[must_use]
generate_stacktrace_from_stack( stack: &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>], ) -> Vec<FrameInfo<'tcx>>972 pub fn generate_stacktrace_from_stack(
973 stack: &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>],
974 ) -> Vec<FrameInfo<'tcx>> {
975 let mut frames = Vec::new();
976 // This deliberately does *not* honor `requires_caller_location` since it is used for much
977 // more than just panics.
978 for frame in stack.iter().rev() {
979 let span = match frame.loc {
980 Left(loc) => {
981 // If the stacktrace passes through MIR-inlined source scopes, add them.
982 let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc);
983 let mut scope_data = &frame.body.source_scopes[scope];
984 while let Some((instance, call_span)) = scope_data.inlined {
985 frames.push(FrameInfo { span, instance });
986 span = call_span;
987 scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()];
988 }
989 span
990 }
991 Right(span) => span,
992 };
993 frames.push(FrameInfo { span, instance: frame.instance });
994 }
995 trace!("generate stacktrace: {:#?}", frames);
996 frames
997 }
998
999 #[must_use]
generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>>1000 pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> {
1001 Self::generate_stacktrace_from_stack(self.stack())
1002 }
1003 }
1004
1005 #[doc(hidden)]
1006 /// Helper struct for the `dump_place` function.
1007 pub struct PlacePrinter<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
1008 ecx: &'a InterpCx<'mir, 'tcx, M>,
1009 place: Place<M::Provenance>,
1010 }
1011
1012 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
1013 for PlacePrinter<'a, 'mir, 'tcx, M>
1014 {
fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result1015 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1016 match self.place {
1017 Place::Local { frame, local } => {
1018 let mut allocs = Vec::new();
1019 write!(fmt, "{:?}", local)?;
1020 if frame != self.ecx.frame_idx() {
1021 write!(fmt, " ({} frames up)", self.ecx.frame_idx() - frame)?;
1022 }
1023 write!(fmt, ":")?;
1024
1025 match self.ecx.stack()[frame].locals[local].value {
1026 LocalValue::Dead => write!(fmt, " is dead")?,
1027 LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => {
1028 write!(fmt, " is uninitialized")?
1029 }
1030 LocalValue::Live(Operand::Indirect(mplace)) => {
1031 write!(
1032 fmt,
1033 " by {} ref {:?}:",
1034 match mplace.meta {
1035 MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta),
1036 MemPlaceMeta::None => String::new(),
1037 },
1038 mplace.ptr,
1039 )?;
1040 allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
1041 }
1042 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
1043 write!(fmt, " {:?}", val)?;
1044 if let Scalar::Ptr(ptr, _size) = val {
1045 allocs.push(ptr.provenance.get_alloc_id());
1046 }
1047 }
1048 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
1049 write!(fmt, " ({:?}, {:?})", val1, val2)?;
1050 if let Scalar::Ptr(ptr, _size) = val1 {
1051 allocs.push(ptr.provenance.get_alloc_id());
1052 }
1053 if let Scalar::Ptr(ptr, _size) = val2 {
1054 allocs.push(ptr.provenance.get_alloc_id());
1055 }
1056 }
1057 }
1058
1059 write!(fmt, ": {:?}", self.ecx.dump_allocs(allocs.into_iter().flatten().collect()))
1060 }
1061 Place::Ptr(mplace) => match mplace.ptr.provenance.and_then(Provenance::get_alloc_id) {
1062 Some(alloc_id) => {
1063 write!(fmt, "by ref {:?}: {:?}", mplace.ptr, self.ecx.dump_alloc(alloc_id))
1064 }
1065 ptr => write!(fmt, " integral by ref: {:?}", ptr),
1066 },
1067 }
1068 }
1069 }
1070