1 //! This module provides a MIR interpreter, which is used in const eval.
2
3 use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range};
4
5 use base_db::{CrateId, FileId};
6 use chalk_ir::Mutability;
7 use either::Either;
8 use hir_def::{
9 builtin_type::BuiltinType,
10 data::adt::{StructFlags, VariantData},
11 lang_item::{lang_attr, LangItem},
12 layout::{TagEncoding, Variants},
13 AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
14 VariantId,
15 };
16 use hir_expand::InFile;
17 use intern::Interned;
18 use la_arena::ArenaMap;
19 use rustc_hash::{FxHashMap, FxHashSet};
20 use stdx::never;
21 use syntax::{SyntaxNodePtr, TextRange};
22 use triomphe::Arc;
23
24 use crate::{
25 consteval::{intern_const_scalar, try_const_usize, ConstEvalError},
26 db::HirDatabase,
27 display::{ClosureStyle, HirDisplay},
28 infer::PointerCast,
29 layout::{Layout, LayoutError, RustcEnumVariantIdx},
30 mapping::from_chalk,
31 method_resolution::{is_dyn_method, lookup_impl_method},
32 name, static_lifetime,
33 traits::FnTrait,
34 utils::{detect_variant_from_bytes, ClosureSubst},
35 CallableDefId, ClosureId, Const, ConstScalar, FnDefId, GenericArgData, Interner, MemoryMap,
36 Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
37 };
38
39 use super::{
40 return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan, Operand,
41 Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp,
42 };
43
44 mod shim;
45 #[cfg(test)]
46 mod tests;
47
48 macro_rules! from_bytes {
49 ($ty:tt, $value:expr) => {
50 ($ty::from_le_bytes(match ($value).try_into() {
51 Ok(x) => x,
52 Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))),
53 }))
54 };
55 }
56
57 macro_rules! not_supported {
58 ($x: expr) => {
59 return Err(MirEvalError::NotSupported(format!($x)))
60 };
61 }
62
63 #[derive(Debug, Default, Clone, PartialEq, Eq)]
64 pub struct VTableMap {
65 ty_to_id: FxHashMap<Ty, usize>,
66 id_to_ty: Vec<Ty>,
67 }
68
69 impl VTableMap {
id(&mut self, ty: Ty) -> usize70 fn id(&mut self, ty: Ty) -> usize {
71 if let Some(x) = self.ty_to_id.get(&ty) {
72 return *x;
73 }
74 let id = self.id_to_ty.len();
75 self.id_to_ty.push(ty.clone());
76 self.ty_to_id.insert(ty, id);
77 id
78 }
79
ty(&self, id: usize) -> Result<&Ty>80 pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
81 self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id))
82 }
83
ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty>84 fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
85 let id = from_bytes!(usize, bytes);
86 self.ty(id)
87 }
88 }
89
90 #[derive(Debug, Default, Clone, PartialEq, Eq)]
91 struct TlsData {
92 keys: Vec<u128>,
93 }
94
95 impl TlsData {
create_key(&mut self) -> usize96 fn create_key(&mut self) -> usize {
97 self.keys.push(0);
98 self.keys.len() - 1
99 }
100
get_key(&mut self, key: usize) -> Result<u128>101 fn get_key(&mut self, key: usize) -> Result<u128> {
102 let r = self.keys.get(key).ok_or_else(|| {
103 MirEvalError::UndefinedBehavior(format!("Getting invalid tls key {key}"))
104 })?;
105 Ok(*r)
106 }
107
set_key(&mut self, key: usize, value: u128) -> Result<()>108 fn set_key(&mut self, key: usize, value: u128) -> Result<()> {
109 let r = self.keys.get_mut(key).ok_or_else(|| {
110 MirEvalError::UndefinedBehavior(format!("Setting invalid tls key {key}"))
111 })?;
112 *r = value;
113 Ok(())
114 }
115 }
116
117 pub struct Evaluator<'a> {
118 db: &'a dyn HirDatabase,
119 trait_env: Arc<TraitEnvironment>,
120 stack: Vec<u8>,
121 heap: Vec<u8>,
122 /// Stores the global location of the statics. We const evaluate every static first time we need it
123 /// and see it's missing, then we add it to this to reuse.
124 static_locations: FxHashMap<StaticId, Address>,
125 /// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we
126 /// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the
127 /// time of use.
128 vtable_map: VTableMap,
129 thread_local_storage: TlsData,
130 stdout: Vec<u8>,
131 stderr: Vec<u8>,
132 crate_id: CrateId,
133 // FIXME: This is a workaround, see the comment on `interpret_mir`
134 assert_placeholder_ty_is_unused: bool,
135 /// A general limit on execution, to prevent non terminating programs from breaking r-a main process
136 execution_limit: usize,
137 /// An additional limit on stack depth, to prevent stack overflow
138 stack_depth_limit: usize,
139 }
140
141 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
142 enum Address {
143 Stack(usize),
144 Heap(usize),
145 Invalid(usize),
146 }
147
148 use Address::*;
149
150 #[derive(Debug, Clone, Copy)]
151 struct Interval {
152 addr: Address,
153 size: usize,
154 }
155
156 #[derive(Debug, Clone)]
157 struct IntervalAndTy {
158 interval: Interval,
159 ty: Ty,
160 }
161
162 impl Interval {
new(addr: Address, size: usize) -> Self163 fn new(addr: Address, size: usize) -> Self {
164 Self { addr, size }
165 }
166
get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]>167 fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
168 memory.read_memory(self.addr, self.size)
169 }
170
write_from_bytes(&self, memory: &mut Evaluator<'_>, bytes: &[u8]) -> Result<()>171 fn write_from_bytes(&self, memory: &mut Evaluator<'_>, bytes: &[u8]) -> Result<()> {
172 memory.write_memory(self.addr, bytes)
173 }
174
write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()>175 fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> {
176 // FIXME: this could be more efficient
177 let bytes = &interval.get(memory)?.to_vec();
178 memory.write_memory(self.addr, bytes)
179 }
180
slice(self, range: Range<usize>) -> Interval181 fn slice(self, range: Range<usize>) -> Interval {
182 Interval { addr: self.addr.offset(range.start), size: range.len() }
183 }
184 }
185
186 impl IntervalAndTy {
get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]>187 fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
188 memory.read_memory(self.interval.addr, self.interval.size)
189 }
190
new( addr: Address, ty: Ty, evaluator: &Evaluator<'_>, locals: &Locals<'_>, ) -> Result<IntervalAndTy>191 fn new(
192 addr: Address,
193 ty: Ty,
194 evaluator: &Evaluator<'_>,
195 locals: &Locals<'_>,
196 ) -> Result<IntervalAndTy> {
197 let size = evaluator.size_of_sized(&ty, locals, "type of interval")?;
198 Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
199 }
200 }
201
202 enum IntervalOrOwned {
203 Owned(Vec<u8>),
204 Borrowed(Interval),
205 }
206
207 impl From<Interval> for IntervalOrOwned {
from(it: Interval) -> IntervalOrOwned208 fn from(it: Interval) -> IntervalOrOwned {
209 IntervalOrOwned::Borrowed(it)
210 }
211 }
212
213 impl IntervalOrOwned {
to_vec(self, memory: &Evaluator<'_>) -> Result<Vec<u8>>214 pub(crate) fn to_vec(self, memory: &Evaluator<'_>) -> Result<Vec<u8>> {
215 Ok(match self {
216 IntervalOrOwned::Owned(o) => o,
217 IntervalOrOwned::Borrowed(b) => b.get(memory)?.to_vec(),
218 })
219 }
220
get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]>221 fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
222 Ok(match self {
223 IntervalOrOwned::Owned(o) => o,
224 IntervalOrOwned::Borrowed(b) => b.get(memory)?,
225 })
226 }
227 }
228
229 impl Address {
from_bytes(x: &[u8]) -> Result<Self>230 fn from_bytes(x: &[u8]) -> Result<Self> {
231 Ok(Address::from_usize(from_bytes!(usize, x)))
232 }
233
from_usize(x: usize) -> Self234 fn from_usize(x: usize) -> Self {
235 if x > usize::MAX / 2 {
236 Stack(x - usize::MAX / 2)
237 } else if x > usize::MAX / 4 {
238 Heap(x - usize::MAX / 4)
239 } else {
240 Invalid(x)
241 }
242 }
243
to_bytes(&self) -> Vec<u8>244 fn to_bytes(&self) -> Vec<u8> {
245 usize::to_le_bytes(self.to_usize()).to_vec()
246 }
247
to_usize(&self) -> usize248 fn to_usize(&self) -> usize {
249 let as_num = match self {
250 Stack(x) => *x + usize::MAX / 2,
251 Heap(x) => *x + usize::MAX / 4,
252 Invalid(x) => *x,
253 };
254 as_num
255 }
256
map(&self, f: impl FnOnce(usize) -> usize) -> Address257 fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
258 match self {
259 Stack(x) => Stack(f(*x)),
260 Heap(x) => Heap(f(*x)),
261 Invalid(x) => Invalid(f(*x)),
262 }
263 }
264
offset(&self, offset: usize) -> Address265 fn offset(&self, offset: usize) -> Address {
266 self.map(|x| x + offset)
267 }
268 }
269
270 #[derive(Clone, PartialEq, Eq)]
271 pub enum MirEvalError {
272 ConstEvalError(String, Box<ConstEvalError>),
273 LayoutError(LayoutError, Ty),
274 /// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place.
275 TypeError(&'static str),
276 /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
277 /// then use this type of error.
278 UndefinedBehavior(String),
279 Panic(String),
280 MirLowerError(FunctionId, MirLowerError),
281 MirLowerErrorForClosure(ClosureId, MirLowerError),
282 TypeIsUnsized(Ty, &'static str),
283 NotSupported(String),
284 InvalidConst(Const),
285 InFunction(Either<FunctionId, ClosureId>, Box<MirEvalError>, MirSpan, DefWithBodyId),
286 ExecutionLimitExceeded,
287 StackOverflow,
288 TargetDataLayoutNotAvailable,
289 InvalidVTableId(usize),
290 CoerceUnsizedError(Ty),
291 LangItemNotFound(LangItem),
292 }
293
294 impl MirEvalError {
pretty_print( &self, f: &mut String, db: &dyn HirDatabase, span_formatter: impl Fn(FileId, TextRange) -> String, ) -> std::result::Result<(), std::fmt::Error>295 pub fn pretty_print(
296 &self,
297 f: &mut String,
298 db: &dyn HirDatabase,
299 span_formatter: impl Fn(FileId, TextRange) -> String,
300 ) -> std::result::Result<(), std::fmt::Error> {
301 writeln!(f, "Mir eval error:")?;
302 let mut err = self;
303 while let MirEvalError::InFunction(func, e, span, def) = err {
304 err = e;
305 match func {
306 Either::Left(func) => {
307 let function_name = db.function_data(*func);
308 writeln!(
309 f,
310 "In function {} ({:?})",
311 function_name.name.display(db.upcast()),
312 func
313 )?;
314 }
315 Either::Right(clos) => {
316 writeln!(f, "In {:?}", clos)?;
317 }
318 }
319 let source_map = db.body_with_source_map(*def).1;
320 let span: InFile<SyntaxNodePtr> = match span {
321 MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
322 Ok(s) => s.map(|x| x.into()),
323 Err(_) => continue,
324 },
325 MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
326 Ok(s) => s.map(|x| match x {
327 Either::Left(e) => e.into(),
328 Either::Right(e) => e.into(),
329 }),
330 Err(_) => continue,
331 },
332 MirSpan::Unknown => continue,
333 };
334 let file_id = span.file_id.original_file(db.upcast());
335 let text_range = span.value.text_range();
336 writeln!(f, "{}", span_formatter(file_id, text_range))?;
337 }
338 match err {
339 MirEvalError::InFunction(..) => unreachable!(),
340 MirEvalError::LayoutError(err, ty) => {
341 write!(
342 f,
343 "Layout for type `{}` is not available due {err:?}",
344 ty.display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string()
345 )?;
346 }
347 MirEvalError::MirLowerError(func, err) => {
348 let function_name = db.function_data(*func);
349 writeln!(
350 f,
351 "MIR lowering for function `{}` ({:?}) failed due:",
352 function_name.name.display(db.upcast()),
353 func
354 )?;
355 err.pretty_print(f, db, span_formatter)?;
356 }
357 MirEvalError::ConstEvalError(name, err) => {
358 MirLowerError::ConstEvalError(name.clone(), err.clone()).pretty_print(
359 f,
360 db,
361 span_formatter,
362 )?;
363 }
364 MirEvalError::TypeError(_)
365 | MirEvalError::UndefinedBehavior(_)
366 | MirEvalError::Panic(_)
367 | MirEvalError::MirLowerErrorForClosure(_, _)
368 | MirEvalError::TypeIsUnsized(_, _)
369 | MirEvalError::NotSupported(_)
370 | MirEvalError::InvalidConst(_)
371 | MirEvalError::ExecutionLimitExceeded
372 | MirEvalError::StackOverflow
373 | MirEvalError::TargetDataLayoutNotAvailable
374 | MirEvalError::CoerceUnsizedError(_)
375 | MirEvalError::LangItemNotFound(_)
376 | MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
377 }
378 Ok(())
379 }
380 }
381
382 impl std::fmt::Debug for MirEvalError {
fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result383 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
384 match self {
385 Self::ConstEvalError(arg0, arg1) => {
386 f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
387 }
388 Self::LangItemNotFound(arg0) => f.debug_tuple("LangItemNotFound").field(arg0).finish(),
389 Self::LayoutError(arg0, arg1) => {
390 f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
391 }
392 Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(),
393 Self::UndefinedBehavior(arg0) => {
394 f.debug_tuple("UndefinedBehavior").field(arg0).finish()
395 }
396 Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
397 Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"),
398 Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
399 Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
400 Self::StackOverflow => write!(f, "stack overflow"),
401 Self::MirLowerError(arg0, arg1) => {
402 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
403 }
404 Self::MirLowerErrorForClosure(arg0, arg1) => {
405 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
406 }
407 Self::CoerceUnsizedError(arg0) => {
408 f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
409 }
410 Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
411 Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
412 Self::InvalidConst(arg0) => {
413 let data = &arg0.data(Interner);
414 f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish()
415 }
416 Self::InFunction(func, e, span, _) => {
417 let mut e = &**e;
418 let mut stack = vec![(*func, *span)];
419 while let Self::InFunction(f, next_e, span, _) = e {
420 e = &next_e;
421 stack.push((*f, *span));
422 }
423 f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
424 }
425 }
426 }
427 }
428
429 type Result<T> = std::result::Result<T, MirEvalError>;
430
431 #[derive(Debug, Default)]
432 struct DropFlags {
433 need_drop: FxHashSet<Place>,
434 }
435
436 impl DropFlags {
add_place(&mut self, p: Place)437 fn add_place(&mut self, p: Place) {
438 if p.iterate_over_parents().any(|x| self.need_drop.contains(&x)) {
439 return;
440 }
441 self.need_drop.retain(|x| !p.is_parent(x));
442 self.need_drop.insert(p);
443 }
444
remove_place(&mut self, p: &Place) -> bool445 fn remove_place(&mut self, p: &Place) -> bool {
446 // FIXME: replace parents with parts
447 self.need_drop.remove(p)
448 }
449 }
450
451 #[derive(Debug)]
452 struct Locals<'a> {
453 ptr: &'a ArenaMap<LocalId, Interval>,
454 body: &'a MirBody,
455 drop_flags: DropFlags,
456 }
457
interpret_mir( db: &dyn HirDatabase, body: &MirBody, assert_placeholder_ty_is_unused: bool, ) -> (Result<Const>, String, String)458 pub fn interpret_mir(
459 db: &dyn HirDatabase,
460 body: &MirBody,
461 // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
462 // they share their body with their parent, so in MIR lowering we have locals of the parent body, which
463 // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
464 // a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
465 // (and probably should) do better here, for example by excluding bindings outside of the target expression.
466 assert_placeholder_ty_is_unused: bool,
467 ) -> (Result<Const>, String, String) {
468 let ty = body.locals[return_slot()].ty.clone();
469 let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
470 let x: Result<Const> = (|| {
471 let bytes = evaluator.interpret_mir(&body, None.into_iter())?;
472 let mut memory_map = evaluator.create_memory_map(
473 &bytes,
474 &ty,
475 &Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() },
476 )?;
477 memory_map.vtable = evaluator.vtable_map.clone();
478 return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
479 })();
480 (
481 x,
482 String::from_utf8_lossy(&evaluator.stdout).into_owned(),
483 String::from_utf8_lossy(&evaluator.stderr).into_owned(),
484 )
485 }
486
487 impl Evaluator<'_> {
new<'a>( db: &'a dyn HirDatabase, body: &MirBody, assert_placeholder_ty_is_unused: bool, ) -> Evaluator<'a>488 pub fn new<'a>(
489 db: &'a dyn HirDatabase,
490 body: &MirBody,
491 assert_placeholder_ty_is_unused: bool,
492 ) -> Evaluator<'a> {
493 let crate_id = body.owner.module(db.upcast()).krate();
494 let trait_env = db.trait_environment_for_body(body.owner);
495 Evaluator {
496 stack: vec![0],
497 heap: vec![0],
498 vtable_map: VTableMap::default(),
499 thread_local_storage: TlsData::default(),
500 static_locations: HashMap::default(),
501 db,
502 trait_env,
503 crate_id,
504 stdout: vec![],
505 stderr: vec![],
506 assert_placeholder_ty_is_unused,
507 stack_depth_limit: 100,
508 execution_limit: 1000_000,
509 }
510 }
511
place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address>512 fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> {
513 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
514 }
515
place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result<Interval>516 fn place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
517 let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
518 Ok(Interval {
519 addr: place_addr_and_ty.0,
520 size: self.size_of_sized(
521 &place_addr_and_ty.1,
522 locals,
523 "Type of place that we need its interval",
524 )?,
525 })
526 }
527
ptr_size(&self) -> usize528 fn ptr_size(&self) -> usize {
529 match self.db.target_data_layout(self.crate_id) {
530 Some(x) => x.pointer_size.bytes_usize(),
531 None => 8,
532 }
533 }
534
place_addr_and_ty_and_metadata<'a>( &'a self, p: &Place, locals: &'a Locals<'a>, ) -> Result<(Address, Ty, Option<IntervalOrOwned>)>535 fn place_addr_and_ty_and_metadata<'a>(
536 &'a self,
537 p: &Place,
538 locals: &'a Locals<'a>,
539 ) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
540 let mut addr = locals.ptr[p.local].addr;
541 let mut ty: Ty = locals.body.locals[p.local].ty.clone();
542 let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
543 for proj in &*p.projection {
544 let prev_ty = ty.clone();
545 ty = proj.projected_ty(
546 ty,
547 self.db,
548 |c, subst, f| {
549 let (def, _) = self.db.lookup_intern_closure(c.into());
550 let infer = self.db.infer(def);
551 let (captures, _) = infer.closure_info(&c);
552 let parent_subst = ClosureSubst(subst).parent_subst();
553 captures
554 .get(f)
555 .expect("broken closure field")
556 .ty
557 .clone()
558 .substitute(Interner, parent_subst)
559 },
560 self.crate_id,
561 );
562 match proj {
563 ProjectionElem::Deref => {
564 metadata = if self.size_align_of(&ty, locals)?.is_none() {
565 Some(
566 Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }
567 .into(),
568 )
569 } else {
570 None
571 };
572 let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
573 addr = Address::from_usize(x);
574 }
575 ProjectionElem::Index(op) => {
576 let offset = from_bytes!(
577 usize,
578 self.read_memory(locals.ptr[*op].addr, self.ptr_size())?
579 );
580 metadata = None; // Result of index is always sized
581 let ty_size =
582 self.size_of_sized(&ty, locals, "array inner type should be sized")?;
583 addr = addr.offset(ty_size * offset);
584 }
585 &ProjectionElem::ConstantIndex { from_end, offset } => {
586 let offset = if from_end {
587 let len = match prev_ty.kind(Interner) {
588 TyKind::Array(_, c) => match try_const_usize(self.db, c) {
589 Some(x) => x as u64,
590 None => {
591 not_supported!("indexing array with unknown const from end")
592 }
593 },
594 TyKind::Slice(_) => match metadata {
595 Some(x) => from_bytes!(u64, x.get(self)?),
596 None => not_supported!("slice place without metadata"),
597 },
598 _ => not_supported!("bad type for const index"),
599 };
600 (len - offset - 1) as usize
601 } else {
602 offset as usize
603 };
604 metadata = None; // Result of index is always sized
605 let ty_size =
606 self.size_of_sized(&ty, locals, "array inner type should be sized")?;
607 addr = addr.offset(ty_size * offset);
608 }
609 &ProjectionElem::Subslice { from, to } => {
610 let inner_ty = match &ty.data(Interner).kind {
611 TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
612 _ => TyKind::Error.intern(Interner),
613 };
614 metadata = match metadata {
615 Some(x) => {
616 let prev_len = from_bytes!(u64, x.get(self)?);
617 Some(IntervalOrOwned::Owned(
618 (prev_len - from - to).to_le_bytes().to_vec(),
619 ))
620 }
621 None => None,
622 };
623 let ty_size =
624 self.size_of_sized(&inner_ty, locals, "array inner type should be sized")?;
625 addr = addr.offset(ty_size * (from as usize));
626 }
627 &ProjectionElem::TupleOrClosureField(f) => {
628 let layout = self.layout(&prev_ty)?;
629 let offset = layout.fields.offset(f).bytes_usize();
630 addr = addr.offset(offset);
631 metadata = None; // tuple field is always sized
632 }
633 ProjectionElem::Field(f) => {
634 let layout = self.layout(&prev_ty)?;
635 let variant_layout = match &layout.variants {
636 Variants::Single { .. } => &layout,
637 Variants::Multiple { variants, .. } => {
638 &variants[match f.parent {
639 hir_def::VariantId::EnumVariantId(x) => {
640 RustcEnumVariantIdx(x.local_id)
641 }
642 _ => {
643 return Err(MirEvalError::TypeError(
644 "Multivariant layout only happens for enums",
645 ))
646 }
647 }]
648 }
649 };
650 let offset = variant_layout
651 .fields
652 .offset(u32::from(f.local_id.into_raw()) as usize)
653 .bytes_usize();
654 addr = addr.offset(offset);
655 // FIXME: support structs with unsized fields
656 metadata = None;
657 }
658 ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
659 }
660 }
661 Ok((addr, ty, metadata))
662 }
663
layout(&self, ty: &Ty) -> Result<Arc<Layout>>664 fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> {
665 self.db
666 .layout_of_ty(ty.clone(), self.crate_id)
667 .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))
668 }
669
layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>>670 fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
671 self.db.layout_of_adt(adt, subst.clone(), self.crate_id).map_err(|e| {
672 MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
673 })
674 }
675
place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty>676 fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> {
677 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
678 }
679
operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result<Ty>680 fn operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result<Ty> {
681 Ok(match o {
682 Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
683 Operand::Constant(c) => c.data(Interner).ty.clone(),
684 &Operand::Static(s) => {
685 let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr].clone();
686 TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner)
687 }
688 })
689 }
690
operand_ty_and_eval( &mut self, o: &Operand, locals: &mut Locals<'_>, ) -> Result<IntervalAndTy>691 fn operand_ty_and_eval(
692 &mut self,
693 o: &Operand,
694 locals: &mut Locals<'_>,
695 ) -> Result<IntervalAndTy> {
696 Ok(IntervalAndTy {
697 interval: self.eval_operand(o, locals)?,
698 ty: self.operand_ty(o, locals)?,
699 })
700 }
701
interpret_mir( &mut self, body: &MirBody, args: impl Iterator<Item = Vec<u8>>, ) -> Result<Vec<u8>>702 fn interpret_mir(
703 &mut self,
704 body: &MirBody,
705 args: impl Iterator<Item = Vec<u8>>,
706 ) -> Result<Vec<u8>> {
707 if let Some(x) = self.stack_depth_limit.checked_sub(1) {
708 self.stack_depth_limit = x;
709 } else {
710 return Err(MirEvalError::StackOverflow);
711 }
712 let mut current_block_idx = body.start_block;
713 let mut locals =
714 Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() };
715 let (locals_ptr, stack_size) = {
716 let mut stack_ptr = self.stack.len();
717 let addr = body
718 .locals
719 .iter()
720 .map(|(id, x)| {
721 let size =
722 self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?;
723 let my_ptr = stack_ptr;
724 stack_ptr += size;
725 Ok((id, Interval { addr: Stack(my_ptr), size }))
726 })
727 .collect::<Result<ArenaMap<LocalId, _>>>()?;
728 let stack_size = stack_ptr - self.stack.len();
729 (addr, stack_size)
730 };
731 locals.ptr = &locals_ptr;
732 self.stack.extend(iter::repeat(0).take(stack_size));
733 let mut remain_args = body.param_locals.len();
734 for ((l, interval), value) in locals_ptr.iter().skip(1).zip(args) {
735 locals.drop_flags.add_place(l.into());
736 interval.write_from_bytes(self, &value)?;
737 if remain_args == 0 {
738 return Err(MirEvalError::TypeError("more arguments provided"));
739 }
740 remain_args -= 1;
741 }
742 if remain_args > 0 {
743 return Err(MirEvalError::TypeError("not enough arguments provided"));
744 }
745 loop {
746 let current_block = &body.basic_blocks[current_block_idx];
747 if let Some(x) = self.execution_limit.checked_sub(1) {
748 self.execution_limit = x;
749 } else {
750 return Err(MirEvalError::ExecutionLimitExceeded);
751 }
752 for statement in ¤t_block.statements {
753 match &statement.kind {
754 StatementKind::Assign(l, r) => {
755 let addr = self.place_addr(l, &locals)?;
756 let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
757 self.write_memory(addr, &result)?;
758 locals.drop_flags.add_place(l.clone());
759 }
760 StatementKind::Deinit(_) => not_supported!("de-init statement"),
761 StatementKind::StorageLive(_)
762 | StatementKind::StorageDead(_)
763 | StatementKind::Nop => (),
764 }
765 }
766 let Some(terminator) = current_block.terminator.as_ref() else {
767 not_supported!("block without terminator");
768 };
769 match &terminator.kind {
770 TerminatorKind::Goto { target } => {
771 current_block_idx = *target;
772 }
773 TerminatorKind::Call {
774 func,
775 args,
776 destination,
777 target,
778 cleanup: _,
779 from_hir_call: _,
780 } => {
781 let destination_interval = self.place_interval(destination, &locals)?;
782 let fn_ty = self.operand_ty(func, &locals)?;
783 let args = args
784 .iter()
785 .map(|x| self.operand_ty_and_eval(x, &mut locals))
786 .collect::<Result<Vec<_>>>()?;
787 match &fn_ty.data(Interner).kind {
788 TyKind::Function(_) => {
789 let bytes = self.eval_operand(func, &mut locals)?;
790 self.exec_fn_pointer(
791 bytes,
792 destination_interval,
793 &args,
794 &locals,
795 terminator.span,
796 )?;
797 }
798 TyKind::FnDef(def, generic_args) => {
799 self.exec_fn_def(
800 *def,
801 generic_args,
802 destination_interval,
803 &args,
804 &locals,
805 terminator.span,
806 )?;
807 }
808 x => not_supported!("unknown function type {x:?}"),
809 }
810 locals.drop_flags.add_place(destination.clone());
811 current_block_idx = target.expect("broken mir, function without target");
812 }
813 TerminatorKind::SwitchInt { discr, targets } => {
814 let val = u128::from_le_bytes(pad16(
815 self.eval_operand(discr, &mut locals)?.get(&self)?,
816 false,
817 ));
818 current_block_idx = targets.target_for_value(val);
819 }
820 TerminatorKind::Return => {
821 self.stack_depth_limit += 1;
822 return Ok(locals.ptr[return_slot()].get(self)?.to_vec());
823 }
824 TerminatorKind::Unreachable => {
825 return Err(MirEvalError::UndefinedBehavior("unreachable executed".to_owned()));
826 }
827 TerminatorKind::Drop { place, target, unwind: _ } => {
828 self.drop_place(place, &mut locals, terminator.span)?;
829 current_block_idx = *target;
830 }
831 _ => not_supported!("unknown terminator"),
832 }
833 }
834 }
835
eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned>836 fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned> {
837 use IntervalOrOwned::*;
838 Ok(match r {
839 Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
840 Rvalue::Ref(_, p) => {
841 let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
842 let mut r = addr.to_bytes();
843 if let Some(metadata) = metadata {
844 r.extend(metadata.get(self)?);
845 }
846 Owned(r)
847 }
848 Rvalue::Len(p) => {
849 let (_, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
850 match metadata {
851 Some(m) => m,
852 None => {
853 return Err(MirEvalError::TypeError(
854 "type without metadata is used for Rvalue::Len",
855 ));
856 }
857 }
858 }
859 Rvalue::UnaryOp(op, val) => {
860 let mut c = self.eval_operand(val, locals)?.get(&self)?;
861 let mut ty = self.operand_ty(val, locals)?;
862 while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
863 ty = z.clone();
864 let size = self.size_of_sized(&ty, locals, "operand of unary op")?;
865 c = self.read_memory(Address::from_bytes(c)?, size)?;
866 }
867 if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
868 match f {
869 chalk_ir::FloatTy::F32 => {
870 let c = -from_bytes!(f32, c);
871 Owned(c.to_le_bytes().into())
872 }
873 chalk_ir::FloatTy::F64 => {
874 let c = -from_bytes!(f64, c);
875 Owned(c.to_le_bytes().into())
876 }
877 }
878 } else {
879 let mut c = c.to_vec();
880 if ty.as_builtin() == Some(BuiltinType::Bool) {
881 c[0] = 1 - c[0];
882 } else {
883 match op {
884 UnOp::Not => c.iter_mut().for_each(|x| *x = !*x),
885 UnOp::Neg => {
886 c.iter_mut().for_each(|x| *x = !*x);
887 for k in c.iter_mut() {
888 let o;
889 (*k, o) = k.overflowing_add(1);
890 if !o {
891 break;
892 }
893 }
894 }
895 }
896 }
897 Owned(c)
898 }
899 }
900 Rvalue::CheckedBinaryOp(op, lhs, rhs) => 'binary_op: {
901 let lc = self.eval_operand(lhs, locals)?;
902 let rc = self.eval_operand(rhs, locals)?;
903 let mut lc = lc.get(&self)?;
904 let mut rc = rc.get(&self)?;
905 let mut ty = self.operand_ty(lhs, locals)?;
906 while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
907 ty = z.clone();
908 let size = if ty.kind(Interner) == &TyKind::Str {
909 if *op != BinOp::Eq {
910 never!("Only eq is builtin for `str`");
911 }
912 let ls = from_bytes!(usize, &lc[self.ptr_size()..self.ptr_size() * 2]);
913 let rs = from_bytes!(usize, &rc[self.ptr_size()..self.ptr_size() * 2]);
914 if ls != rs {
915 break 'binary_op Owned(vec![0]);
916 }
917 lc = &lc[..self.ptr_size()];
918 rc = &rc[..self.ptr_size()];
919 ls
920 } else {
921 self.size_of_sized(&ty, locals, "operand of binary op")?
922 };
923 lc = self.read_memory(Address::from_bytes(lc)?, size)?;
924 rc = self.read_memory(Address::from_bytes(rc)?, size)?;
925 }
926 if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
927 match f {
928 chalk_ir::FloatTy::F32 => {
929 let l = from_bytes!(f32, lc);
930 let r = from_bytes!(f32, rc);
931 match op {
932 BinOp::Ge
933 | BinOp::Gt
934 | BinOp::Le
935 | BinOp::Lt
936 | BinOp::Eq
937 | BinOp::Ne => {
938 let r = op.run_compare(l, r) as u8;
939 Owned(vec![r])
940 }
941 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
942 let r = match op {
943 BinOp::Add => l + r,
944 BinOp::Sub => l - r,
945 BinOp::Mul => l * r,
946 BinOp::Div => l / r,
947 _ => unreachable!(),
948 };
949 Owned(r.to_le_bytes().into())
950 }
951 x => not_supported!(
952 "invalid binop {x:?} on floating point operators"
953 ),
954 }
955 }
956 chalk_ir::FloatTy::F64 => {
957 let l = from_bytes!(f64, lc);
958 let r = from_bytes!(f64, rc);
959 match op {
960 BinOp::Ge
961 | BinOp::Gt
962 | BinOp::Le
963 | BinOp::Lt
964 | BinOp::Eq
965 | BinOp::Ne => {
966 let r = op.run_compare(l, r) as u8;
967 Owned(vec![r])
968 }
969 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
970 let r = match op {
971 BinOp::Add => l + r,
972 BinOp::Sub => l - r,
973 BinOp::Mul => l * r,
974 BinOp::Div => l / r,
975 _ => unreachable!(),
976 };
977 Owned(r.to_le_bytes().into())
978 }
979 x => not_supported!(
980 "invalid binop {x:?} on floating point operators"
981 ),
982 }
983 }
984 }
985 } else {
986 let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
987 let l128 = i128::from_le_bytes(pad16(lc, is_signed));
988 let r128 = i128::from_le_bytes(pad16(rc, is_signed));
989 let check_overflow = |r: i128| {
990 // FIXME: this is not very correct, and only catches the basic cases.
991 let r = r.to_le_bytes();
992 for &k in &r[lc.len()..] {
993 if k != 0 && (k != 255 || !is_signed) {
994 return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
995 }
996 }
997 Ok(Owned(r[0..lc.len()].into()))
998 };
999 match op {
1000 BinOp::Ge | BinOp::Gt | BinOp::Le | BinOp::Lt | BinOp::Eq | BinOp::Ne => {
1001 let r = op.run_compare(l128, r128) as u8;
1002 Owned(vec![r])
1003 }
1004 BinOp::BitAnd
1005 | BinOp::BitOr
1006 | BinOp::BitXor
1007 | BinOp::Add
1008 | BinOp::Mul
1009 | BinOp::Div
1010 | BinOp::Rem
1011 | BinOp::Sub => {
1012 let r = match op {
1013 BinOp::Add => l128.overflowing_add(r128).0,
1014 BinOp::Mul => l128.overflowing_mul(r128).0,
1015 BinOp::Div => l128.checked_div(r128).ok_or_else(|| {
1016 MirEvalError::Panic(format!("Overflow in {op:?}"))
1017 })?,
1018 BinOp::Rem => l128.checked_rem(r128).ok_or_else(|| {
1019 MirEvalError::Panic(format!("Overflow in {op:?}"))
1020 })?,
1021 BinOp::Sub => l128.overflowing_sub(r128).0,
1022 BinOp::BitAnd => l128 & r128,
1023 BinOp::BitOr => l128 | r128,
1024 BinOp::BitXor => l128 ^ r128,
1025 _ => unreachable!(),
1026 };
1027 check_overflow(r)?
1028 }
1029 BinOp::Shl | BinOp::Shr => {
1030 let r = 'b: {
1031 if let Ok(shift_amount) = u32::try_from(r128) {
1032 let r = match op {
1033 BinOp::Shl => l128.checked_shl(shift_amount),
1034 BinOp::Shr => l128.checked_shr(shift_amount),
1035 _ => unreachable!(),
1036 };
1037 if let Some(r) = r {
1038 break 'b r;
1039 }
1040 };
1041 return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
1042 };
1043 check_overflow(r)?
1044 }
1045 BinOp::Offset => not_supported!("offset binop"),
1046 }
1047 }
1048 }
1049 Rvalue::Discriminant(p) => {
1050 let ty = self.place_ty(p, locals)?;
1051 let bytes = self.eval_place(p, locals)?.get(&self)?;
1052 let layout = self.layout(&ty)?;
1053 let enum_id = 'b: {
1054 match ty.kind(Interner) {
1055 TyKind::Adt(e, _) => match e.0 {
1056 AdtId::EnumId(e) => break 'b e,
1057 _ => (),
1058 },
1059 _ => (),
1060 }
1061 return Ok(Owned(0u128.to_le_bytes().to_vec()));
1062 };
1063 match &layout.variants {
1064 Variants::Single { index } => {
1065 let r = self.const_eval_discriminant(EnumVariantId {
1066 parent: enum_id,
1067 local_id: index.0,
1068 })?;
1069 Owned(r.to_le_bytes().to_vec())
1070 }
1071 Variants::Multiple { tag, tag_encoding, variants, .. } => {
1072 let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
1073 not_supported!("missing target data layout");
1074 };
1075 let size = tag.size(&*target_data_layout).bytes_usize();
1076 let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
1077 match tag_encoding {
1078 TagEncoding::Direct => {
1079 let tag = &bytes[offset..offset + size];
1080 Owned(pad16(tag, false).to_vec())
1081 }
1082 TagEncoding::Niche { untagged_variant, niche_start, .. } => {
1083 let tag = &bytes[offset..offset + size];
1084 let candidate_tag = i128::from_le_bytes(pad16(tag, false))
1085 .wrapping_sub(*niche_start as i128)
1086 as usize;
1087 let variant = variants
1088 .iter_enumerated()
1089 .map(|(x, _)| x)
1090 .filter(|x| x != untagged_variant)
1091 .nth(candidate_tag)
1092 .unwrap_or(*untagged_variant)
1093 .0;
1094 let result = self.const_eval_discriminant(EnumVariantId {
1095 parent: enum_id,
1096 local_id: variant,
1097 })?;
1098 Owned(result.to_le_bytes().to_vec())
1099 }
1100 }
1101 }
1102 }
1103 }
1104 Rvalue::Repeat(x, len) => {
1105 let len = match try_const_usize(self.db, &len) {
1106 Some(x) => x as usize,
1107 None => not_supported!("non evaluatable array len in repeat Rvalue"),
1108 };
1109 let val = self.eval_operand(x, locals)?.get(self)?;
1110 let size = len * val.len();
1111 Owned(val.iter().copied().cycle().take(size).collect())
1112 }
1113 Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
1114 Rvalue::ShallowInitBoxWithAlloc(ty) => {
1115 let Some((size, align)) = self.size_align_of(ty, locals)? else {
1116 not_supported!("unsized box initialization");
1117 };
1118 let addr = self.heap_allocate(size, align);
1119 Owned(addr.to_bytes())
1120 }
1121 Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
1122 Rvalue::Aggregate(kind, values) => {
1123 let values = values
1124 .iter()
1125 .map(|x| self.eval_operand(x, locals))
1126 .collect::<Result<Vec<_>>>()?;
1127 match kind {
1128 AggregateKind::Array(_) => {
1129 let mut r = vec![];
1130 for x in values {
1131 let value = x.get(&self)?;
1132 r.extend(value);
1133 }
1134 Owned(r)
1135 }
1136 AggregateKind::Tuple(ty) => {
1137 let layout = self.layout(&ty)?;
1138 Owned(self.make_by_layout(
1139 layout.size.bytes_usize(),
1140 &layout,
1141 None,
1142 values.iter().map(|&x| x.into()),
1143 )?)
1144 }
1145 AggregateKind::Union(x, f) => {
1146 let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
1147 let offset = layout
1148 .fields
1149 .offset(u32::from(f.local_id.into_raw()) as usize)
1150 .bytes_usize();
1151 let op = values[0].get(&self)?;
1152 let mut result = vec![0; layout.size.bytes_usize()];
1153 result[offset..offset + op.len()].copy_from_slice(op);
1154 Owned(result)
1155 }
1156 AggregateKind::Adt(x, subst) => {
1157 let (size, variant_layout, tag) =
1158 self.layout_of_variant(*x, subst.clone(), locals)?;
1159 Owned(self.make_by_layout(
1160 size,
1161 &variant_layout,
1162 tag,
1163 values.iter().map(|&x| x.into()),
1164 )?)
1165 }
1166 AggregateKind::Closure(ty) => {
1167 let layout = self.layout(&ty)?;
1168 Owned(self.make_by_layout(
1169 layout.size.bytes_usize(),
1170 &layout,
1171 None,
1172 values.iter().map(|&x| x.into()),
1173 )?)
1174 }
1175 }
1176 }
1177 Rvalue::Cast(kind, operand, target_ty) => match kind {
1178 CastKind::Pointer(cast) => match cast {
1179 PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {
1180 let current_ty = self.operand_ty(operand, locals)?;
1181 if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
1182 ¤t_ty.data(Interner).kind
1183 {
1184 let id = self.vtable_map.id(current_ty);
1185 let ptr_size = self.ptr_size();
1186 Owned(id.to_le_bytes()[0..ptr_size].to_vec())
1187 } else {
1188 not_supported!(
1189 "creating a fn pointer from a non FnDef or Closure type"
1190 );
1191 }
1192 }
1193 PointerCast::Unsize => {
1194 let current_ty = self.operand_ty(operand, locals)?;
1195 let addr = self.eval_operand(operand, locals)?;
1196 self.coerce_unsized(addr, ¤t_ty, target_ty)?
1197 }
1198 PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => {
1199 // This is no-op
1200 Borrowed(self.eval_operand(operand, locals)?)
1201 }
1202 PointerCast::ArrayToPointer => {
1203 // We should remove the metadata part if the current type is slice
1204 Borrowed(self.eval_operand(operand, locals)?.slice(0..self.ptr_size()))
1205 }
1206 },
1207 CastKind::DynStar => not_supported!("dyn star cast"),
1208 CastKind::IntToInt
1209 | CastKind::PointerExposeAddress
1210 | CastKind::PointerFromExposedAddress => {
1211 let current_ty = self.operand_ty(operand, locals)?;
1212 let is_signed = match current_ty.kind(Interner) {
1213 TyKind::Scalar(s) => match s {
1214 chalk_ir::Scalar::Int(_) => true,
1215 _ => false,
1216 },
1217 _ => false,
1218 };
1219 let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, is_signed);
1220 let dest_size =
1221 self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
1222 Owned(current[0..dest_size].to_vec())
1223 }
1224 CastKind::FloatToInt => not_supported!("float to int cast"),
1225 CastKind::FloatToFloat => not_supported!("float to float cast"),
1226 CastKind::IntToFloat => not_supported!("float to int cast"),
1227 CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
1228 },
1229 })
1230 }
1231
coerce_unsized_look_through_fields<T>( &self, ty: &Ty, goal: impl Fn(&TyKind) -> Option<T>, ) -> Result<T>1232 fn coerce_unsized_look_through_fields<T>(
1233 &self,
1234 ty: &Ty,
1235 goal: impl Fn(&TyKind) -> Option<T>,
1236 ) -> Result<T> {
1237 let kind = ty.kind(Interner);
1238 if let Some(x) = goal(kind) {
1239 return Ok(x);
1240 }
1241 if let TyKind::Adt(id, subst) = kind {
1242 if let AdtId::StructId(struct_id) = id.0 {
1243 let field_types = self.db.field_types(struct_id.into());
1244 let mut field_types = field_types.iter();
1245 if let Some(ty) =
1246 field_types.next().map(|x| x.1.clone().substitute(Interner, subst))
1247 {
1248 return self.coerce_unsized_look_through_fields(&ty, goal);
1249 }
1250 }
1251 }
1252 Err(MirEvalError::CoerceUnsizedError(ty.clone()))
1253 }
1254
coerce_unsized( &mut self, addr: Interval, current_ty: &Ty, target_ty: &Ty, ) -> Result<IntervalOrOwned>1255 fn coerce_unsized(
1256 &mut self,
1257 addr: Interval,
1258 current_ty: &Ty,
1259 target_ty: &Ty,
1260 ) -> Result<IntervalOrOwned> {
1261 use IntervalOrOwned::*;
1262 fn for_ptr(x: &TyKind) -> Option<Ty> {
1263 match x {
1264 TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()),
1265 _ => None,
1266 }
1267 }
1268 Ok(match self.coerce_unsized_look_through_fields(target_ty, for_ptr)? {
1269 ty => match &ty.data(Interner).kind {
1270 TyKind::Slice(_) => {
1271 match self.coerce_unsized_look_through_fields(current_ty, for_ptr)? {
1272 ty => match &ty.data(Interner).kind {
1273 TyKind::Array(_, size) => {
1274 let len = match try_const_usize(self.db, size) {
1275 None => not_supported!(
1276 "unevaluatble len of array in coerce unsized"
1277 ),
1278 Some(x) => x as usize,
1279 };
1280 let mut r = Vec::with_capacity(16);
1281 let addr = addr.get(self)?;
1282 r.extend(addr.iter().copied());
1283 r.extend(len.to_le_bytes().into_iter());
1284 Owned(r)
1285 }
1286 t => {
1287 not_supported!("slice unsizing from non array type {t:?}")
1288 }
1289 },
1290 }
1291 }
1292 TyKind::Dyn(_) => match ¤t_ty.data(Interner).kind {
1293 TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
1294 let vtable = self.vtable_map.id(ty.clone());
1295 let mut r = Vec::with_capacity(16);
1296 let addr = addr.get(self)?;
1297 r.extend(addr.iter().copied());
1298 r.extend(vtable.to_le_bytes().into_iter());
1299 Owned(r)
1300 }
1301 _ => not_supported!("dyn unsizing from non pointers"),
1302 },
1303 _ => not_supported!("unknown unsized cast"),
1304 },
1305 })
1306 }
1307
layout_of_variant( &mut self, x: VariantId, subst: Substitution, locals: &Locals<'_>, ) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)>1308 fn layout_of_variant(
1309 &mut self,
1310 x: VariantId,
1311 subst: Substitution,
1312 locals: &Locals<'_>,
1313 ) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
1314 let adt = x.adt_id();
1315 if let DefWithBodyId::VariantId(f) = locals.body.owner {
1316 if let VariantId::EnumVariantId(x) = x {
1317 if AdtId::from(f.parent) == adt {
1318 // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
1319 // infinite sized type errors) we use a dummy layout
1320 let i = self.const_eval_discriminant(x)?;
1321 return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
1322 }
1323 }
1324 }
1325 let layout = self.layout_adt(adt, subst)?;
1326 Ok(match &layout.variants {
1327 Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
1328 Variants::Multiple { variants, tag, tag_encoding, .. } => {
1329 let cx = self
1330 .db
1331 .target_data_layout(self.crate_id)
1332 .ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
1333 let enum_variant_id = match x {
1334 VariantId::EnumVariantId(x) => x,
1335 _ => not_supported!("multi variant layout for non-enums"),
1336 };
1337 let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id);
1338 let mut discriminant = self.const_eval_discriminant(enum_variant_id)?;
1339 let variant_layout = variants[rustc_enum_variant_idx].clone();
1340 let have_tag = match tag_encoding {
1341 TagEncoding::Direct => true,
1342 TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {
1343 if *untagged_variant == rustc_enum_variant_idx {
1344 false
1345 } else {
1346 discriminant = (variants
1347 .iter_enumerated()
1348 .filter(|(x, _)| x != untagged_variant)
1349 .position(|(x, _)| x == rustc_enum_variant_idx)
1350 .unwrap() as i128)
1351 .wrapping_add(*niche_start as i128);
1352 true
1353 }
1354 }
1355 };
1356 (
1357 layout.size.bytes_usize(),
1358 Arc::new(variant_layout),
1359 if have_tag {
1360 Some((
1361 layout.fields.offset(0).bytes_usize(),
1362 tag.size(&*cx).bytes_usize(),
1363 discriminant,
1364 ))
1365 } else {
1366 None
1367 },
1368 )
1369 }
1370 })
1371 }
1372
make_by_layout( &mut self, size: usize, variant_layout: &Layout, tag: Option<(usize, usize, i128)>, values: impl Iterator<Item = IntervalOrOwned>, ) -> Result<Vec<u8>>1373 fn make_by_layout(
1374 &mut self,
1375 size: usize, // Not necessarily equal to variant_layout.size
1376 variant_layout: &Layout,
1377 tag: Option<(usize, usize, i128)>,
1378 values: impl Iterator<Item = IntervalOrOwned>,
1379 ) -> Result<Vec<u8>> {
1380 let mut result = vec![0; size];
1381 if let Some((offset, size, value)) = tag {
1382 result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
1383 }
1384 for (i, op) in values.enumerate() {
1385 let offset = variant_layout.fields.offset(i).bytes_usize();
1386 let op = op.get(&self)?;
1387 result[offset..offset + op.len()].copy_from_slice(op);
1388 }
1389 Ok(result)
1390 }
1391
eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval>1392 fn eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval> {
1393 Ok(match x {
1394 Operand::Copy(p) | Operand::Move(p) => {
1395 locals.drop_flags.remove_place(p);
1396 self.eval_place(p, locals)?
1397 }
1398 Operand::Static(st) => {
1399 let addr = self.eval_static(*st, locals)?;
1400 Interval::new(addr, self.ptr_size())
1401 }
1402 Operand::Constant(konst) => {
1403 let data = &konst.data(Interner);
1404 match &data.value {
1405 chalk_ir::ConstValue::BoundVar(_) => not_supported!("bound var constant"),
1406 chalk_ir::ConstValue::InferenceVar(_) => {
1407 not_supported!("inference var constant")
1408 }
1409 chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"),
1410 chalk_ir::ConstValue::Concrete(c) => {
1411 self.allocate_const_in_heap(c, &data.ty, locals, konst)?
1412 }
1413 }
1414 }
1415 })
1416 }
1417
allocate_const_in_heap( &mut self, c: &chalk_ir::ConcreteConst<Interner>, ty: &Ty, locals: &Locals<'_>, konst: &chalk_ir::Const<Interner>, ) -> Result<Interval>1418 fn allocate_const_in_heap(
1419 &mut self,
1420 c: &chalk_ir::ConcreteConst<Interner>,
1421 ty: &Ty,
1422 locals: &Locals<'_>,
1423 konst: &chalk_ir::Const<Interner>,
1424 ) -> Result<Interval> {
1425 Ok(match &c.interned {
1426 ConstScalar::Bytes(v, memory_map) => {
1427 let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
1428 let patch_map = memory_map.transform_addresses(|b| {
1429 let addr = self.heap_allocate(b.len(), 1); // FIXME: align is wrong
1430 self.write_memory(addr, b)?;
1431 Ok(addr.to_usize())
1432 })?;
1433 let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
1434 if size != v.len() {
1435 // Handle self enum
1436 if size == 16 && v.len() < 16 {
1437 v = Cow::Owned(pad16(&v, false).to_vec());
1438 } else if size < 16 && v.len() == 16 {
1439 v = Cow::Owned(v[0..size].to_vec());
1440 } else {
1441 return Err(MirEvalError::InvalidConst(konst.clone()));
1442 }
1443 }
1444 let addr = self.heap_allocate(size, align);
1445 self.write_memory(addr, &v)?;
1446 self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?;
1447 Interval::new(addr, size)
1448 }
1449 ConstScalar::UnevaluatedConst(..) => {
1450 not_supported!("unevaluated const present in monomorphized mir");
1451 }
1452 ConstScalar::Unknown => not_supported!("evaluating unknown const"),
1453 })
1454 }
1455
eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval>1456 fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
1457 let addr = self.place_addr(p, locals)?;
1458 Ok(Interval::new(
1459 addr,
1460 self.size_of_sized(&self.place_ty(p, locals)?, locals, "type of this place")?,
1461 ))
1462 }
1463
read_memory(&self, addr: Address, size: usize) -> Result<&[u8]>1464 fn read_memory(&self, addr: Address, size: usize) -> Result<&[u8]> {
1465 if size == 0 {
1466 return Ok(&[]);
1467 }
1468 let (mem, pos) = match addr {
1469 Stack(x) => (&self.stack, x),
1470 Heap(x) => (&self.heap, x),
1471 Invalid(x) => {
1472 return Err(MirEvalError::UndefinedBehavior(format!(
1473 "read invalid memory address {x} with size {size}"
1474 )));
1475 }
1476 };
1477 mem.get(pos..pos + size)
1478 .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string()))
1479 }
1480
write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()>1481 fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
1482 if r.is_empty() {
1483 return Ok(());
1484 }
1485 let (mem, pos) = match addr {
1486 Stack(x) => (&mut self.stack, x),
1487 Heap(x) => (&mut self.heap, x),
1488 Invalid(x) => {
1489 return Err(MirEvalError::UndefinedBehavior(format!(
1490 "write invalid memory address {x} with content {r:?}"
1491 )));
1492 }
1493 };
1494 mem.get_mut(pos..pos + r.len())
1495 .ok_or_else(|| {
1496 MirEvalError::UndefinedBehavior("out of bound memory write".to_string())
1497 })?
1498 .copy_from_slice(r);
1499 Ok(())
1500 }
1501
size_align_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<(usize, usize)>>1502 fn size_align_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<(usize, usize)>> {
1503 if let DefWithBodyId::VariantId(f) = locals.body.owner {
1504 if let Some((adt, _)) = ty.as_adt() {
1505 if AdtId::from(f.parent) == adt {
1506 // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
1507 // infinite sized type errors) we use a dummy size
1508 return Ok(Some((16, 16)));
1509 }
1510 }
1511 }
1512 let layout = self.layout(ty);
1513 if self.assert_placeholder_ty_is_unused {
1514 if matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) {
1515 return Ok(Some((0, 1)));
1516 }
1517 }
1518 let layout = layout?;
1519 Ok(layout
1520 .is_sized()
1521 .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)))
1522 }
1523
1524 /// A version of `self.size_of` which returns error if the type is unsized. `what` argument should
1525 /// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize>1526 fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> {
1527 match self.size_align_of(ty, locals)? {
1528 Some(x) => Ok(x.0),
1529 None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
1530 }
1531 }
1532
heap_allocate(&mut self, size: usize, _align: usize) -> Address1533 fn heap_allocate(&mut self, size: usize, _align: usize) -> Address {
1534 let pos = self.heap.len();
1535 self.heap.extend(iter::repeat(0).take(size));
1536 Address::Heap(pos)
1537 }
1538
detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait>1539 fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
1540 use LangItem::*;
1541 let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else {
1542 return None;
1543 };
1544 let l = lang_attr(self.db.upcast(), parent)?;
1545 match l {
1546 FnOnce => Some(FnTrait::FnOnce),
1547 FnMut => Some(FnTrait::FnMut),
1548 Fn => Some(FnTrait::Fn),
1549 _ => None,
1550 }
1551 }
1552
create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap>1553 fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> {
1554 fn rec(
1555 this: &Evaluator<'_>,
1556 bytes: &[u8],
1557 ty: &Ty,
1558 locals: &Locals<'_>,
1559 mm: &mut MemoryMap,
1560 ) -> Result<()> {
1561 match ty.kind(Interner) {
1562 TyKind::Ref(_, _, t) => {
1563 let size = this.size_align_of(t, locals)?;
1564 match size {
1565 Some((size, _)) => {
1566 let addr_usize = from_bytes!(usize, bytes);
1567 mm.insert(
1568 addr_usize,
1569 this.read_memory(Address::from_usize(addr_usize), size)?.to_vec(),
1570 )
1571 }
1572 None => {
1573 let mut check_inner = None;
1574 let (addr, meta) = bytes.split_at(bytes.len() / 2);
1575 let element_size = match t.kind(Interner) {
1576 TyKind::Str => 1,
1577 TyKind::Slice(t) => {
1578 check_inner = Some(t);
1579 this.size_of_sized(t, locals, "slice inner type")?
1580 }
1581 TyKind::Dyn(_) => {
1582 let t = this.vtable_map.ty_of_bytes(meta)?;
1583 check_inner = Some(t);
1584 this.size_of_sized(t, locals, "dyn concrete type")?
1585 }
1586 _ => return Ok(()),
1587 };
1588 let count = match t.kind(Interner) {
1589 TyKind::Dyn(_) => 1,
1590 _ => from_bytes!(usize, meta),
1591 };
1592 let size = element_size * count;
1593 let addr = Address::from_bytes(addr)?;
1594 let b = this.read_memory(addr, size)?;
1595 mm.insert(addr.to_usize(), b.to_vec());
1596 if let Some(ty) = check_inner {
1597 for i in 0..count {
1598 let offset = element_size * i;
1599 rec(this, &b[offset..offset + element_size], &ty, locals, mm)?;
1600 }
1601 }
1602 }
1603 }
1604 }
1605 chalk_ir::TyKind::Tuple(_, subst) => {
1606 let layout = this.layout(ty)?;
1607 for (id, ty) in subst.iter(Interner).enumerate() {
1608 let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
1609 let offset = layout.fields.offset(id).bytes_usize();
1610 let size = this.layout(ty)?.size.bytes_usize();
1611 rec(this, &bytes[offset..offset + size], ty, locals, mm)?;
1612 }
1613 }
1614 chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
1615 AdtId::StructId(s) => {
1616 let data = this.db.struct_data(s);
1617 let layout = this.layout(ty)?;
1618 let field_types = this.db.field_types(s.into());
1619 for (f, _) in data.variant_data.fields().iter() {
1620 let offset = layout
1621 .fields
1622 .offset(u32::from(f.into_raw()) as usize)
1623 .bytes_usize();
1624 let ty = &field_types[f].clone().substitute(Interner, subst);
1625 let size = this.layout(ty)?.size.bytes_usize();
1626 rec(this, &bytes[offset..offset + size], ty, locals, mm)?;
1627 }
1628 }
1629 AdtId::EnumId(e) => {
1630 let layout = this.layout(ty)?;
1631 if let Some((v, l)) =
1632 detect_variant_from_bytes(&layout, this.db, this.crate_id, bytes, e)
1633 {
1634 let data = &this.db.enum_data(e).variants[v].variant_data;
1635 let field_types = this
1636 .db
1637 .field_types(EnumVariantId { parent: e, local_id: v }.into());
1638 for (f, _) in data.fields().iter() {
1639 let offset =
1640 l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
1641 let ty = &field_types[f].clone().substitute(Interner, subst);
1642 let size = this.layout(ty)?.size.bytes_usize();
1643 rec(this, &bytes[offset..offset + size], ty, locals, mm)?;
1644 }
1645 }
1646 }
1647 AdtId::UnionId(_) => (),
1648 },
1649 _ => (),
1650 }
1651 Ok(())
1652 }
1653 let mut mm = MemoryMap::default();
1654 rec(self, bytes, ty, locals, &mut mm)?;
1655 Ok(mm)
1656 }
1657
patch_addresses( &mut self, patch_map: &HashMap<usize, usize>, old_vtable: &VTableMap, addr: Address, ty: &Ty, locals: &Locals<'_>, ) -> Result<()>1658 fn patch_addresses(
1659 &mut self,
1660 patch_map: &HashMap<usize, usize>,
1661 old_vtable: &VTableMap,
1662 addr: Address,
1663 ty: &Ty,
1664 locals: &Locals<'_>,
1665 ) -> Result<()> {
1666 // FIXME: support indirect references
1667 let layout = self.layout(ty)?;
1668 let my_size = self.size_of_sized(ty, locals, "value to patch address")?;
1669 match ty.kind(Interner) {
1670 TyKind::Ref(_, _, t) => {
1671 let size = self.size_align_of(t, locals)?;
1672 match size {
1673 Some(_) => {
1674 let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
1675 if let Some(x) = patch_map.get(¤t) {
1676 self.write_memory(addr, &x.to_le_bytes())?;
1677 }
1678 }
1679 None => {
1680 let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
1681 if let Some(x) = patch_map.get(¤t) {
1682 self.write_memory(addr, &x.to_le_bytes())?;
1683 }
1684 }
1685 }
1686 }
1687 TyKind::Function(_) => {
1688 let ty = old_vtable.ty_of_bytes(self.read_memory(addr, my_size)?)?.clone();
1689 let new_id = self.vtable_map.id(ty);
1690 self.write_memory(addr, &new_id.to_le_bytes())?;
1691 }
1692 TyKind::Adt(id, subst) => match id.0 {
1693 AdtId::StructId(s) => {
1694 for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() {
1695 let offset = layout.fields.offset(i).bytes_usize();
1696 let ty = ty.clone().substitute(Interner, subst);
1697 self.patch_addresses(
1698 patch_map,
1699 old_vtable,
1700 addr.offset(offset),
1701 &ty,
1702 locals,
1703 )?;
1704 }
1705 }
1706 AdtId::UnionId(_) => (),
1707 AdtId::EnumId(_) => (),
1708 },
1709 TyKind::AssociatedType(_, _)
1710 | TyKind::Scalar(_)
1711 | TyKind::Tuple(_, _)
1712 | TyKind::Array(_, _)
1713 | TyKind::Slice(_)
1714 | TyKind::Raw(_, _)
1715 | TyKind::OpaqueType(_, _)
1716 | TyKind::FnDef(_, _)
1717 | TyKind::Str
1718 | TyKind::Never
1719 | TyKind::Closure(_, _)
1720 | TyKind::Generator(_, _)
1721 | TyKind::GeneratorWitness(_, _)
1722 | TyKind::Foreign(_)
1723 | TyKind::Error
1724 | TyKind::Placeholder(_)
1725 | TyKind::Dyn(_)
1726 | TyKind::Alias(_)
1727 | TyKind::BoundVar(_)
1728 | TyKind::InferenceVar(_, _) => (),
1729 }
1730 Ok(())
1731 }
1732
exec_fn_pointer( &mut self, bytes: Interval, destination: Interval, args: &[IntervalAndTy], locals: &Locals<'_>, span: MirSpan, ) -> Result<()>1733 fn exec_fn_pointer(
1734 &mut self,
1735 bytes: Interval,
1736 destination: Interval,
1737 args: &[IntervalAndTy],
1738 locals: &Locals<'_>,
1739 span: MirSpan,
1740 ) -> Result<()> {
1741 let id = from_bytes!(usize, bytes.get(self)?);
1742 let next_ty = self.vtable_map.ty(id)?.clone();
1743 match &next_ty.data(Interner).kind {
1744 TyKind::FnDef(def, generic_args) => {
1745 self.exec_fn_def(*def, generic_args, destination, args, &locals, span)?;
1746 }
1747 TyKind::Closure(id, subst) => {
1748 self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)?;
1749 }
1750 _ => return Err(MirEvalError::TypeError("function pointer to non function")),
1751 }
1752 Ok(())
1753 }
1754
exec_closure( &mut self, closure: ClosureId, closure_data: Interval, generic_args: &Substitution, destination: Interval, args: &[IntervalAndTy], locals: &Locals<'_>, span: MirSpan, ) -> Result<()>1755 fn exec_closure(
1756 &mut self,
1757 closure: ClosureId,
1758 closure_data: Interval,
1759 generic_args: &Substitution,
1760 destination: Interval,
1761 args: &[IntervalAndTy],
1762 locals: &Locals<'_>,
1763 span: MirSpan,
1764 ) -> Result<()> {
1765 let mir_body = self
1766 .db
1767 .monomorphized_mir_body_for_closure(
1768 closure,
1769 generic_args.clone(),
1770 self.trait_env.clone(),
1771 )
1772 .map_err(|x| MirEvalError::MirLowerErrorForClosure(closure, x))?;
1773 let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
1774 {
1775 closure_data.addr.to_bytes()
1776 } else {
1777 closure_data.get(self)?.to_owned()
1778 };
1779 let arg_bytes = iter::once(Ok(closure_data))
1780 .chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned())))
1781 .collect::<Result<Vec<_>>>()?;
1782 let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter()).map_err(|e| {
1783 MirEvalError::InFunction(Either::Right(closure), Box::new(e), span, locals.body.owner)
1784 })?;
1785 destination.write_from_bytes(self, &bytes)
1786 }
1787
exec_fn_def( &mut self, def: FnDefId, generic_args: &Substitution, destination: Interval, args: &[IntervalAndTy], locals: &Locals<'_>, span: MirSpan, ) -> Result<()>1788 fn exec_fn_def(
1789 &mut self,
1790 def: FnDefId,
1791 generic_args: &Substitution,
1792 destination: Interval,
1793 args: &[IntervalAndTy],
1794 locals: &Locals<'_>,
1795 span: MirSpan,
1796 ) -> Result<()> {
1797 let def: CallableDefId = from_chalk(self.db, def);
1798 let generic_args = generic_args.clone();
1799 match def {
1800 CallableDefId::FunctionId(def) => {
1801 if let Some(_) = self.detect_fn_trait(def) {
1802 self.exec_fn_trait(&args, destination, locals, span)?;
1803 return Ok(());
1804 }
1805 self.exec_fn_with_args(def, args, generic_args, locals, destination, span)?;
1806 }
1807 CallableDefId::StructId(id) => {
1808 let (size, variant_layout, tag) =
1809 self.layout_of_variant(id.into(), generic_args, &locals)?;
1810 let result = self.make_by_layout(
1811 size,
1812 &variant_layout,
1813 tag,
1814 args.iter().map(|x| x.interval.into()),
1815 )?;
1816 destination.write_from_bytes(self, &result)?;
1817 }
1818 CallableDefId::EnumVariantId(id) => {
1819 let (size, variant_layout, tag) =
1820 self.layout_of_variant(id.into(), generic_args, &locals)?;
1821 let result = self.make_by_layout(
1822 size,
1823 &variant_layout,
1824 tag,
1825 args.iter().map(|x| x.interval.into()),
1826 )?;
1827 destination.write_from_bytes(self, &result)?;
1828 }
1829 }
1830 Ok(())
1831 }
1832
exec_fn_with_args( &mut self, def: FunctionId, args: &[IntervalAndTy], generic_args: Substitution, locals: &Locals<'_>, destination: Interval, span: MirSpan, ) -> Result<()>1833 fn exec_fn_with_args(
1834 &mut self,
1835 def: FunctionId,
1836 args: &[IntervalAndTy],
1837 generic_args: Substitution,
1838 locals: &Locals<'_>,
1839 destination: Interval,
1840 span: MirSpan,
1841 ) -> Result<()> {
1842 if self.detect_and_exec_special_function(
1843 def,
1844 args,
1845 &generic_args,
1846 locals,
1847 destination,
1848 span,
1849 )? {
1850 return Ok(());
1851 }
1852 let arg_bytes =
1853 args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
1854 if let Some(self_ty_idx) =
1855 is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
1856 {
1857 // In the layout of current possible receiver, which at the moment of writing this code is one of
1858 // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
1859 // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
1860 // the type.
1861 let ty =
1862 self.vtable_map.ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?;
1863 let mut args_for_target = args.to_vec();
1864 args_for_target[0] = IntervalAndTy {
1865 interval: args_for_target[0].interval.slice(0..self.ptr_size()),
1866 ty: ty.clone(),
1867 };
1868 let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
1869 let generics_for_target =
1870 Substitution::from_iter(
1871 Interner,
1872 generic_args.iter(Interner).enumerate().map(|(i, x)| {
1873 if i == self_ty_idx {
1874 &ty
1875 } else {
1876 x
1877 }
1878 }),
1879 );
1880 return self.exec_fn_with_args(
1881 def,
1882 &args_for_target,
1883 generics_for_target,
1884 locals,
1885 destination,
1886 span,
1887 );
1888 }
1889 let (imp, generic_args) =
1890 lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args);
1891 self.exec_looked_up_function(generic_args, locals, imp, arg_bytes, span, destination)
1892 }
1893
exec_looked_up_function( &mut self, generic_args: Substitution, locals: &Locals<'_>, imp: FunctionId, arg_bytes: Vec<Vec<u8>>, span: MirSpan, destination: Interval, ) -> Result<()>1894 fn exec_looked_up_function(
1895 &mut self,
1896 generic_args: Substitution,
1897 locals: &Locals<'_>,
1898 imp: FunctionId,
1899 arg_bytes: Vec<Vec<u8>>,
1900 span: MirSpan,
1901 destination: Interval,
1902 ) -> Result<()> {
1903 let def = imp.into();
1904 let mir_body = self
1905 .db
1906 .monomorphized_mir_body(def, generic_args, self.trait_env.clone())
1907 .map_err(|e| {
1908 MirEvalError::InFunction(
1909 Either::Left(imp),
1910 Box::new(MirEvalError::MirLowerError(imp, e)),
1911 span,
1912 locals.body.owner,
1913 )
1914 })?;
1915 let result = self.interpret_mir(&mir_body, arg_bytes.iter().cloned()).map_err(|e| {
1916 MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner)
1917 })?;
1918 destination.write_from_bytes(self, &result)?;
1919 Ok(())
1920 }
1921
exec_fn_trait( &mut self, args: &[IntervalAndTy], destination: Interval, locals: &Locals<'_>, span: MirSpan, ) -> Result<()>1922 fn exec_fn_trait(
1923 &mut self,
1924 args: &[IntervalAndTy],
1925 destination: Interval,
1926 locals: &Locals<'_>,
1927 span: MirSpan,
1928 ) -> Result<()> {
1929 let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
1930 let mut func_ty = func.ty.clone();
1931 let mut func_data = func.interval;
1932 while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
1933 func_ty = z.clone();
1934 if matches!(func_ty.kind(Interner), TyKind::Dyn(_)) {
1935 let id =
1936 from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]);
1937 func_data = func_data.slice(0..self.ptr_size());
1938 func_ty = self.vtable_map.ty(id)?.clone();
1939 }
1940 let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?;
1941 func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
1942 }
1943 match &func_ty.data(Interner).kind {
1944 TyKind::FnDef(def, subst) => {
1945 self.exec_fn_def(*def, subst, destination, &args[1..], locals, span)?;
1946 }
1947 TyKind::Function(_) => {
1948 self.exec_fn_pointer(func_data, destination, &args[1..], locals, span)?;
1949 }
1950 TyKind::Closure(closure, subst) => {
1951 self.exec_closure(
1952 *closure,
1953 func_data,
1954 &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()),
1955 destination,
1956 &args[1..],
1957 locals,
1958 span,
1959 )?;
1960 }
1961 x => not_supported!("Call FnTrait methods with type {x:?}"),
1962 }
1963 Ok(())
1964 }
1965
eval_static(&mut self, st: StaticId, locals: &Locals<'_>) -> Result<Address>1966 fn eval_static(&mut self, st: StaticId, locals: &Locals<'_>) -> Result<Address> {
1967 if let Some(o) = self.static_locations.get(&st) {
1968 return Ok(*o);
1969 };
1970 let static_data = self.db.static_data(st);
1971 let result = if !static_data.is_extern {
1972 let konst = self.db.const_eval_static(st).map_err(|e| {
1973 MirEvalError::ConstEvalError(
1974 static_data.name.as_str().unwrap_or("_").to_owned(),
1975 Box::new(e),
1976 )
1977 })?;
1978 let data = &konst.data(Interner);
1979 if let chalk_ir::ConstValue::Concrete(c) = &data.value {
1980 self.allocate_const_in_heap(&c, &data.ty, locals, &konst)?
1981 } else {
1982 not_supported!("unevaluatable static");
1983 }
1984 } else {
1985 let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr];
1986 let Some((size, align)) = self.size_align_of(&ty, locals)? else {
1987 not_supported!("unsized extern static");
1988 };
1989 let addr = self.heap_allocate(size, align);
1990 Interval::new(addr, size)
1991 };
1992 let addr = self.heap_allocate(self.ptr_size(), self.ptr_size());
1993 self.write_memory(addr, &result.addr.to_bytes())?;
1994 self.static_locations.insert(st, addr);
1995 Ok(addr)
1996 }
1997
const_eval_discriminant(&self, variant: EnumVariantId) -> Result<i128>1998 fn const_eval_discriminant(&self, variant: EnumVariantId) -> Result<i128> {
1999 let r = self.db.const_eval_discriminant(variant);
2000 match r {
2001 Ok(r) => Ok(r),
2002 Err(e) => {
2003 let data = self.db.enum_data(variant.parent);
2004 let name = format!(
2005 "{}::{}",
2006 data.name.display(self.db.upcast()),
2007 data.variants[variant.local_id].name.display(self.db.upcast())
2008 );
2009 Err(MirEvalError::ConstEvalError(name, Box::new(e)))
2010 }
2011 }
2012 }
2013
drop_place(&mut self, place: &Place, locals: &mut Locals<'_>, span: MirSpan) -> Result<()>2014 fn drop_place(&mut self, place: &Place, locals: &mut Locals<'_>, span: MirSpan) -> Result<()> {
2015 let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
2016 if !locals.drop_flags.remove_place(place) {
2017 return Ok(());
2018 }
2019 let metadata = match metadata {
2020 Some(x) => x.get(self)?.to_vec(),
2021 None => vec![],
2022 };
2023 self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
2024 }
2025
run_drop_glue_deep( &mut self, ty: Ty, locals: &Locals<'_>, addr: Address, _metadata: &[u8], span: MirSpan, ) -> Result<()>2026 fn run_drop_glue_deep(
2027 &mut self,
2028 ty: Ty,
2029 locals: &Locals<'_>,
2030 addr: Address,
2031 _metadata: &[u8],
2032 span: MirSpan,
2033 ) -> Result<()> {
2034 let Some(drop_fn) = (|| {
2035 let drop_trait = self.db.lang_item(self.crate_id, LangItem::Drop)?.as_trait()?;
2036 self.db.trait_data(drop_trait).method_by_name(&name![drop])
2037 })() else {
2038 // in some tests we don't have drop trait in minicore, and
2039 // we can ignore drop in them.
2040 return Ok(());
2041 };
2042 let (impl_drop_candidate, subst) = lookup_impl_method(
2043 self.db,
2044 self.trait_env.clone(),
2045 drop_fn,
2046 Substitution::from1(Interner, ty.clone()),
2047 );
2048 if impl_drop_candidate != drop_fn {
2049 self.exec_looked_up_function(
2050 subst,
2051 locals,
2052 impl_drop_candidate,
2053 vec![addr.to_bytes()],
2054 span,
2055 Interval { addr: Address::Invalid(0), size: 0 },
2056 )?;
2057 }
2058 match ty.kind(Interner) {
2059 TyKind::Adt(id, subst) => {
2060 match id.0 {
2061 AdtId::StructId(s) => {
2062 let data = self.db.struct_data(s);
2063 if data.flags.contains(StructFlags::IS_MANUALLY_DROP) {
2064 return Ok(());
2065 }
2066 let layout = self.layout_adt(id.0, subst.clone())?;
2067 match data.variant_data.as_ref() {
2068 VariantData::Record(fields) | VariantData::Tuple(fields) => {
2069 let field_types = self.db.field_types(s.into());
2070 for (field, _) in fields.iter() {
2071 let offset = layout
2072 .fields
2073 .offset(u32::from(field.into_raw()) as usize)
2074 .bytes_usize();
2075 let addr = addr.offset(offset);
2076 let ty = field_types[field].clone().substitute(Interner, subst);
2077 self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
2078 }
2079 }
2080 VariantData::Unit => (),
2081 }
2082 }
2083 AdtId::UnionId(_) => (), // union fields don't need drop
2084 AdtId::EnumId(_) => (),
2085 }
2086 }
2087 TyKind::AssociatedType(_, _)
2088 | TyKind::Scalar(_)
2089 | TyKind::Tuple(_, _)
2090 | TyKind::Array(_, _)
2091 | TyKind::Slice(_)
2092 | TyKind::Raw(_, _)
2093 | TyKind::Ref(_, _, _)
2094 | TyKind::OpaqueType(_, _)
2095 | TyKind::FnDef(_, _)
2096 | TyKind::Str
2097 | TyKind::Never
2098 | TyKind::Closure(_, _)
2099 | TyKind::Generator(_, _)
2100 | TyKind::GeneratorWitness(_, _)
2101 | TyKind::Foreign(_)
2102 | TyKind::Error
2103 | TyKind::Placeholder(_)
2104 | TyKind::Dyn(_)
2105 | TyKind::Alias(_)
2106 | TyKind::Function(_)
2107 | TyKind::BoundVar(_)
2108 | TyKind::InferenceVar(_, _) => (),
2109 };
2110 Ok(())
2111 }
2112
write_to_stdout(&mut self, interval: Interval) -> Result<()>2113 fn write_to_stdout(&mut self, interval: Interval) -> Result<()> {
2114 self.stdout.extend(interval.get(self)?.to_vec());
2115 Ok(())
2116 }
2117
write_to_stderr(&mut self, interval: Interval) -> Result<()>2118 fn write_to_stderr(&mut self, interval: Interval) -> Result<()> {
2119 self.stderr.extend(interval.get(self)?.to_vec());
2120 Ok(())
2121 }
2122 }
2123
pad16(x: &[u8], is_signed: bool) -> [u8; 16]2124 pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {
2125 let is_negative = is_signed && x.last().unwrap_or(&0) > &128;
2126 let fill_with = if is_negative { 255 } else { 0 };
2127 x.iter()
2128 .copied()
2129 .chain(iter::repeat(fill_with))
2130 .take(16)
2131 .collect::<Vec<u8>>()
2132 .try_into()
2133 .expect("iterator take is not working")
2134 }
2135