1 use crate::const_eval::CheckAlignment;
2 use crate::errors::ConstEvalError;
3
4 use either::{Left, Right};
5
6 use rustc_hir::def::DefKind;
7 use rustc_middle::mir;
8 use rustc_middle::mir::interpret::{ErrorHandled, InterpErrorInfo};
9 use rustc_middle::mir::pretty::write_allocation_bytes;
10 use rustc_middle::traits::Reveal;
11 use rustc_middle::ty::layout::LayoutOf;
12 use rustc_middle::ty::print::with_no_trimmed_paths;
13 use rustc_middle::ty::{self, TyCtxt};
14 use rustc_span::source_map::Span;
15 use rustc_target::abi::{self, Abi};
16
17 use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter};
18 use crate::errors;
19 use crate::interpret::eval_nullary_intrinsic;
20 use crate::interpret::{
21 intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
22 Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
23 RefTracking, StackPopCleanup,
24 };
25
26 // Returns a pointer to where the result lives
eval_body_using_ecx<'mir, 'tcx>( ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, cid: GlobalId<'tcx>, body: &'mir mir::Body<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx>>27 fn eval_body_using_ecx<'mir, 'tcx>(
28 ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
29 cid: GlobalId<'tcx>,
30 body: &'mir mir::Body<'tcx>,
31 ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
32 debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
33 let tcx = *ecx.tcx;
34 assert!(
35 cid.promoted.is_some()
36 || matches!(
37 ecx.tcx.def_kind(cid.instance.def_id()),
38 DefKind::Const
39 | DefKind::Static(_)
40 | DefKind::ConstParam
41 | DefKind::AnonConst
42 | DefKind::InlineConst
43 | DefKind::AssocConst
44 ),
45 "Unexpected DefKind: {:?}",
46 ecx.tcx.def_kind(cid.instance.def_id())
47 );
48 let layout = ecx.layout_of(body.bound_return_ty().subst(tcx, cid.instance.substs))?;
49 assert!(layout.is_sized());
50 let ret = ecx.allocate(layout, MemoryKind::Stack)?;
51
52 trace!(
53 "eval_body_using_ecx: pushing stack frame for global: {}{}",
54 with_no_trimmed_paths!(ecx.tcx.def_path_str(cid.instance.def_id())),
55 cid.promoted.map_or_else(String::new, |p| format!("::promoted[{:?}]", p))
56 );
57
58 ecx.push_stack_frame(
59 cid.instance,
60 body,
61 &ret.into(),
62 StackPopCleanup::Root { cleanup: false },
63 )?;
64
65 // The main interpreter loop.
66 while ecx.step()? {}
67
68 // Intern the result
69 let intern_kind = if cid.promoted.is_some() {
70 InternKind::Promoted
71 } else {
72 match tcx.static_mutability(cid.instance.def_id()) {
73 Some(m) => InternKind::Static(m),
74 None => InternKind::Constant,
75 }
76 };
77 ecx.machine.check_alignment = CheckAlignment::No; // interning doesn't need to respect alignment
78 intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
79 // we leave alignment checks off, since this `ecx` will not be used for further evaluation anyway
80
81 debug!("eval_body_using_ecx done: {:?}", *ret);
82 Ok(ret)
83 }
84
85 /// The `InterpCx` is only meant to be used to do field and index projections into constants for
86 /// `simd_shuffle` and const patterns in match arms. It never performs alignment checks.
87 ///
88 /// The function containing the `match` that is currently being analyzed may have generic bounds
89 /// that inform us about the generic bounds of the constant. E.g., using an associated constant
90 /// of a function's generic parameter will require knowledge about the bounds on the generic
91 /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
mk_eval_cx<'mir, 'tcx>( tcx: TyCtxt<'tcx>, root_span: Span, param_env: ty::ParamEnv<'tcx>, can_access_statics: CanAccessStatics, ) -> CompileTimeEvalContext<'mir, 'tcx>92 pub(super) fn mk_eval_cx<'mir, 'tcx>(
93 tcx: TyCtxt<'tcx>,
94 root_span: Span,
95 param_env: ty::ParamEnv<'tcx>,
96 can_access_statics: CanAccessStatics,
97 ) -> CompileTimeEvalContext<'mir, 'tcx> {
98 debug!("mk_eval_cx: {:?}", param_env);
99 InterpCx::new(
100 tcx,
101 root_span,
102 param_env,
103 CompileTimeInterpreter::new(can_access_statics, CheckAlignment::No),
104 )
105 }
106
107 /// This function converts an interpreter value into a constant that is meant for use in the
108 /// type system.
109 #[instrument(skip(ecx), level = "debug")]
op_to_const<'tcx>( ecx: &CompileTimeEvalContext<'_, 'tcx>, op: &OpTy<'tcx>, ) -> ConstValue<'tcx>110 pub(super) fn op_to_const<'tcx>(
111 ecx: &CompileTimeEvalContext<'_, 'tcx>,
112 op: &OpTy<'tcx>,
113 ) -> ConstValue<'tcx> {
114 // We do not have value optimizations for everything.
115 // Only scalars and slices, since they are very common.
116 // Note that further down we turn scalars of uninitialized bits back to `ByRef`. These can result
117 // from scalar unions that are initialized with one of their zero sized variants. We could
118 // instead allow `ConstValue::Scalar` to store `ScalarMaybeUninit`, but that would affect all
119 // the usual cases of extracting e.g. a `usize`, without there being a real use case for the
120 // `Undef` situation.
121 let try_as_immediate = match op.layout.abi {
122 Abi::Scalar(abi::Scalar::Initialized { .. }) => true,
123 Abi::ScalarPair(..) => match op.layout.ty.kind() {
124 ty::Ref(_, inner, _) => match *inner.kind() {
125 ty::Slice(elem) => elem == ecx.tcx.types.u8,
126 ty::Str => true,
127 _ => false,
128 },
129 _ => false,
130 },
131 _ => false,
132 };
133 let immediate = if try_as_immediate {
134 Right(ecx.read_immediate(op).expect("normalization works on validated constants"))
135 } else {
136 // It is guaranteed that any non-slice scalar pair is actually ByRef here.
137 // When we come back from raw const eval, we are always by-ref. The only way our op here is
138 // by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
139 // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
140 // structs containing such.
141 op.as_mplace_or_imm()
142 };
143
144 debug!(?immediate);
145
146 // We know `offset` is relative to the allocation, so we can use `into_parts`.
147 let to_const_value = |mplace: &MPlaceTy<'_>| {
148 debug!("to_const_value(mplace: {:?})", mplace);
149 match mplace.ptr.into_parts() {
150 (Some(alloc_id), offset) => {
151 let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
152 ConstValue::ByRef { alloc, offset }
153 }
154 (None, offset) => {
155 assert!(mplace.layout.is_zst());
156 assert_eq!(
157 offset.bytes() % mplace.layout.align.abi.bytes(),
158 0,
159 "this MPlaceTy must come from a validated constant, thus we can assume the \
160 alignment is correct",
161 );
162 ConstValue::ZeroSized
163 }
164 }
165 };
166 match immediate {
167 Left(ref mplace) => to_const_value(mplace),
168 // see comment on `let try_as_immediate` above
169 Right(imm) => match *imm {
170 _ if imm.layout.is_zst() => ConstValue::ZeroSized,
171 Immediate::Scalar(x) => ConstValue::Scalar(x),
172 Immediate::ScalarPair(a, b) => {
173 debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
174 // We know `offset` is relative to the allocation, so we can use `into_parts`.
175 let (data, start) = match a.to_pointer(ecx).unwrap().into_parts() {
176 (Some(alloc_id), offset) => {
177 (ecx.tcx.global_alloc(alloc_id).unwrap_memory(), offset.bytes())
178 }
179 (None, _offset) => (
180 ecx.tcx.mk_const_alloc(Allocation::from_bytes_byte_aligned_immutable(
181 b"" as &[u8],
182 )),
183 0,
184 ),
185 };
186 let len = b.to_target_usize(ecx).unwrap();
187 let start = start.try_into().unwrap();
188 let len: usize = len.try_into().unwrap();
189 ConstValue::Slice { data, start, end: start + len }
190 }
191 Immediate::Uninit => to_const_value(&op.assert_mem_place()),
192 },
193 }
194 }
195
196 #[instrument(skip(tcx), level = "debug", ret)]
turn_into_const_value<'tcx>( tcx: TyCtxt<'tcx>, constant: ConstAlloc<'tcx>, key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>, ) -> ConstValue<'tcx>197 pub(crate) fn turn_into_const_value<'tcx>(
198 tcx: TyCtxt<'tcx>,
199 constant: ConstAlloc<'tcx>,
200 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
201 ) -> ConstValue<'tcx> {
202 let cid = key.value;
203 let def_id = cid.instance.def.def_id();
204 let is_static = tcx.is_static(def_id);
205 // This is just accessing an already computed constant, so no need to check alignment here.
206 let ecx = mk_eval_cx(
207 tcx,
208 tcx.def_span(key.value.instance.def_id()),
209 key.param_env,
210 CanAccessStatics::from(is_static),
211 );
212
213 let mplace = ecx.raw_const_to_mplace(constant).expect(
214 "can only fail if layout computation failed, \
215 which should have given a good error before ever invoking this function",
216 );
217 assert!(
218 !is_static || cid.promoted.is_some(),
219 "the `eval_to_const_value_raw` query should not be used for statics, use `eval_to_allocation` instead"
220 );
221
222 // Turn this into a proper constant.
223 op_to_const(&ecx, &mplace.into())
224 }
225
226 #[instrument(skip(tcx), level = "debug")]
eval_to_const_value_raw_provider<'tcx>( tcx: TyCtxt<'tcx>, key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>, ) -> ::rustc_middle::mir::interpret::EvalToConstValueResult<'tcx>227 pub fn eval_to_const_value_raw_provider<'tcx>(
228 tcx: TyCtxt<'tcx>,
229 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
230 ) -> ::rustc_middle::mir::interpret::EvalToConstValueResult<'tcx> {
231 assert!(key.param_env.is_const());
232 // see comment in eval_to_allocation_raw_provider for what we're doing here
233 if key.param_env.reveal() == Reveal::All {
234 let mut key = key;
235 key.param_env = key.param_env.with_user_facing();
236 match tcx.eval_to_const_value_raw(key) {
237 // try again with reveal all as requested
238 Err(ErrorHandled::TooGeneric) => {}
239 // deduplicate calls
240 other => return other,
241 }
242 }
243
244 // We call `const_eval` for zero arg intrinsics, too, in order to cache their value.
245 // Catch such calls and evaluate them instead of trying to load a constant's MIR.
246 if let ty::InstanceDef::Intrinsic(def_id) = key.value.instance.def {
247 let ty = key.value.instance.ty(tcx, key.param_env);
248 let ty::FnDef(_, substs) = ty.kind() else {
249 bug!("intrinsic with type {:?}", ty);
250 };
251 return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| {
252 let span = tcx.def_span(def_id);
253
254 super::report(
255 tcx,
256 error.into_kind(),
257 Some(span),
258 || (span, vec![]),
259 |span, _| errors::NullaryIntrinsicError { span },
260 )
261 });
262 }
263
264 tcx.eval_to_allocation_raw(key).map(|val| turn_into_const_value(tcx, val, key))
265 }
266
267 #[instrument(skip(tcx), level = "debug")]
eval_to_allocation_raw_provider<'tcx>( tcx: TyCtxt<'tcx>, key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>, ) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx>268 pub fn eval_to_allocation_raw_provider<'tcx>(
269 tcx: TyCtxt<'tcx>,
270 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
271 ) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
272 assert!(key.param_env.is_const());
273 // Because the constant is computed twice (once per value of `Reveal`), we are at risk of
274 // reporting the same error twice here. To resolve this, we check whether we can evaluate the
275 // constant in the more restrictive `Reveal::UserFacing`, which most likely already was
276 // computed. For a large percentage of constants that will already have succeeded. Only
277 // associated constants of generic functions will fail due to not enough monomorphization
278 // information being available.
279
280 // In case we fail in the `UserFacing` variant, we just do the real computation.
281 if key.param_env.reveal() == Reveal::All {
282 let mut key = key;
283 key.param_env = key.param_env.with_user_facing();
284 match tcx.eval_to_allocation_raw(key) {
285 // try again with reveal all as requested
286 Err(ErrorHandled::TooGeneric) => {}
287 // deduplicate calls
288 other => return other,
289 }
290 }
291 if cfg!(debug_assertions) {
292 // Make sure we format the instance even if we do not print it.
293 // This serves as a regression test against an ICE on printing.
294 // The next two lines concatenated contain some discussion:
295 // https://rust-lang.zulipchat.com/#narrow/stream/146212-t-compiler.2Fconst-eval/
296 // subject/anon_const_instance_printing/near/135980032
297 let instance = with_no_trimmed_paths!(key.value.instance.to_string());
298 trace!("const eval: {:?} ({})", key, instance);
299 }
300
301 let cid = key.value;
302 let def = cid.instance.def.def_id();
303 let is_static = tcx.is_static(def);
304
305 let mut ecx = InterpCx::new(
306 tcx,
307 tcx.def_span(def),
308 key.param_env,
309 // Statics (and promoteds inside statics) may access other statics, because unlike consts
310 // they do not have to behave "as if" they were evaluated at runtime.
311 CompileTimeInterpreter::new(
312 CanAccessStatics::from(is_static),
313 if tcx.sess.opts.unstable_opts.extra_const_ub_checks {
314 CheckAlignment::Error
315 } else {
316 CheckAlignment::FutureIncompat
317 },
318 ),
319 );
320
321 let res = ecx.load_mir(cid.instance.def, cid.promoted);
322 match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body)) {
323 Err(error) => {
324 let (error, backtrace) = error.into_parts();
325 backtrace.print_backtrace();
326
327 let (kind, instance) = if is_static {
328 ("static", String::new())
329 } else {
330 // If the current item has generics, we'd like to enrich the message with the
331 // instance and its substs: to show the actual compile-time values, in addition to
332 // the expression, leading to the const eval error.
333 let instance = &key.value.instance;
334 if !instance.substs.is_empty() {
335 let instance = with_no_trimmed_paths!(instance.to_string());
336 ("const_with_path", instance)
337 } else {
338 ("const", String::new())
339 }
340 };
341
342 Err(super::report(
343 *ecx.tcx,
344 error,
345 None,
346 || super::get_span_and_frames(&ecx),
347 |span, frames| ConstEvalError {
348 span,
349 error_kind: kind,
350 instance,
351 frame_notes: frames,
352 },
353 ))
354 }
355 Ok(mplace) => {
356 // Since evaluation had no errors, validate the resulting constant.
357 // This is a separate `try` block to provide more targeted error reporting.
358 let validation: Result<_, InterpErrorInfo<'_>> = try {
359 let mut ref_tracking = RefTracking::new(mplace);
360 let mut inner = false;
361 while let Some((mplace, path)) = ref_tracking.todo.pop() {
362 let mode = match tcx.static_mutability(cid.instance.def_id()) {
363 Some(_) if cid.promoted.is_some() => {
364 // Promoteds in statics are allowed to point to statics.
365 CtfeValidationMode::Const { inner, allow_static_ptrs: true }
366 }
367 Some(_) => CtfeValidationMode::Regular, // a `static`
368 None => CtfeValidationMode::Const { inner, allow_static_ptrs: false },
369 };
370 ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)?;
371 inner = true;
372 }
373 };
374 let alloc_id = mplace.ptr.provenance.unwrap();
375
376 // Validation failed, report an error. This is always a hard error.
377 if let Err(error) = validation {
378 let (error, backtrace) = error.into_parts();
379 backtrace.print_backtrace();
380
381 let ub_note = matches!(error, InterpError::UndefinedBehavior(_)).then(|| {});
382
383 let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner();
384 let mut bytes = String::new();
385 if alloc.size() != abi::Size::ZERO {
386 bytes = "\n".into();
387 // FIXME(translation) there might be pieces that are translatable.
388 write_allocation_bytes(*ecx.tcx, alloc, &mut bytes, " ").unwrap();
389 }
390 let raw_bytes = errors::RawBytesNote {
391 size: alloc.size().bytes(),
392 align: alloc.align.bytes(),
393 bytes,
394 };
395
396 Err(super::report(
397 *ecx.tcx,
398 error,
399 None,
400 || super::get_span_and_frames(&ecx),
401 move |span, frames| errors::UndefinedBehavior {
402 span,
403 ub_note,
404 frames,
405 raw_bytes,
406 },
407 ))
408 } else {
409 // Convert to raw constant
410 Ok(ConstAlloc { alloc_id, ty: mplace.layout.ty })
411 }
412 }
413 }
414 }
415