1 pub mod convert;
2
3 use std::any::Any;
4 use std::cmp;
5 use std::iter;
6 use std::num::NonZeroUsize;
7 use std::time::Duration;
8
9 use log::trace;
10
11 use rustc_hir::def::{DefKind, Namespace};
12 use rustc_hir::def_id::{DefId, CRATE_DEF_INDEX};
13 use rustc_middle::mir;
14 use rustc_middle::ty::{
15 self,
16 layout::{LayoutOf, TyAndLayout},
17 List, TyCtxt,
18 };
19 use rustc_span::{def_id::CrateNum, sym, Span, Symbol};
20 use rustc_target::abi::{Align, FieldsShape, Size, Variants};
21 use rustc_target::spec::abi::Abi;
22
23 use rand::RngCore;
24
25 use crate::*;
26
27 /// A trait to work around not having trait object upcasting:
28 /// Add `AsAny` as supertrait and your trait objects can be turned into `&dyn Any` on which you can
29 /// then call `downcast`.
30 pub trait AsAny: Any {
as_any(&self) -> &dyn Any31 fn as_any(&self) -> &dyn Any;
as_any_mut(&mut self) -> &mut dyn Any32 fn as_any_mut(&mut self) -> &mut dyn Any;
33 }
34 impl<T: Any> AsAny for T {
35 #[inline(always)]
as_any(&self) -> &dyn Any36 fn as_any(&self) -> &dyn Any {
37 self
38 }
39 #[inline(always)]
as_any_mut(&mut self) -> &mut dyn Any40 fn as_any_mut(&mut self) -> &mut dyn Any {
41 self
42 }
43 }
44
45 // This mapping should match `decode_error_kind` in
46 // <https://github.com/rust-lang/rust/blob/master/library/std/src/sys/unix/mod.rs>.
47 const UNIX_IO_ERROR_TABLE: &[(&str, std::io::ErrorKind)] = {
48 use std::io::ErrorKind::*;
49 &[
50 ("E2BIG", ArgumentListTooLong),
51 ("EADDRINUSE", AddrInUse),
52 ("EADDRNOTAVAIL", AddrNotAvailable),
53 ("EBUSY", ResourceBusy),
54 ("ECONNABORTED", ConnectionAborted),
55 ("ECONNREFUSED", ConnectionRefused),
56 ("ECONNRESET", ConnectionReset),
57 ("EDEADLK", Deadlock),
58 ("EDQUOT", FilesystemQuotaExceeded),
59 ("EEXIST", AlreadyExists),
60 ("EFBIG", FileTooLarge),
61 ("EHOSTUNREACH", HostUnreachable),
62 ("EINTR", Interrupted),
63 ("EINVAL", InvalidInput),
64 ("EISDIR", IsADirectory),
65 ("ELOOP", FilesystemLoop),
66 ("ENOENT", NotFound),
67 ("ENOMEM", OutOfMemory),
68 ("ENOSPC", StorageFull),
69 ("ENOSYS", Unsupported),
70 ("EMLINK", TooManyLinks),
71 ("ENAMETOOLONG", InvalidFilename),
72 ("ENETDOWN", NetworkDown),
73 ("ENETUNREACH", NetworkUnreachable),
74 ("ENOTCONN", NotConnected),
75 ("ENOTDIR", NotADirectory),
76 ("ENOTEMPTY", DirectoryNotEmpty),
77 ("EPIPE", BrokenPipe),
78 ("EROFS", ReadOnlyFilesystem),
79 ("ESPIPE", NotSeekable),
80 ("ESTALE", StaleNetworkFileHandle),
81 ("ETIMEDOUT", TimedOut),
82 ("ETXTBSY", ExecutableFileBusy),
83 ("EXDEV", CrossesDevices),
84 // The following have two valid options. We have both for the forwards mapping; only the
85 // first one will be used for the backwards mapping.
86 ("EPERM", PermissionDenied),
87 ("EACCES", PermissionDenied),
88 ("EWOULDBLOCK", WouldBlock),
89 ("EAGAIN", WouldBlock),
90 ]
91 };
92
93 /// Gets an instance for a path.
94 ///
95 /// A `None` namespace indicates we are looking for a module.
try_resolve_did(tcx: TyCtxt<'_>, path: &[&str], namespace: Option<Namespace>) -> Option<DefId>96 fn try_resolve_did(tcx: TyCtxt<'_>, path: &[&str], namespace: Option<Namespace>) -> Option<DefId> {
97 /// Yield all children of the given item, that have the given name.
98 fn find_children<'tcx: 'a, 'a>(
99 tcx: TyCtxt<'tcx>,
100 item: DefId,
101 name: &'a str,
102 ) -> impl Iterator<Item = DefId> + 'a {
103 tcx.module_children(item)
104 .iter()
105 .filter(move |item| item.ident.name.as_str() == name)
106 .map(move |item| item.res.def_id())
107 }
108
109 // Take apart the path: leading crate, a sequence of modules, and potentially a final item.
110 let (&crate_name, path) = path.split_first().expect("paths must have at least one segment");
111 let (modules, item) = if let Some(namespace) = namespace {
112 let (&item_name, modules) =
113 path.split_last().expect("non-module paths must have at least 2 segments");
114 (modules, Some((item_name, namespace)))
115 } else {
116 (path, None)
117 };
118
119 // First find the crate.
120 let krate =
121 tcx.crates(()).iter().find(|&&krate| tcx.crate_name(krate).as_str() == crate_name)?;
122 let mut cur_item = DefId { krate: *krate, index: CRATE_DEF_INDEX };
123 // Then go over the modules.
124 for &segment in modules {
125 cur_item = find_children(tcx, cur_item, segment)
126 .find(|item| tcx.def_kind(item) == DefKind::Mod)?;
127 }
128 // Finally, look up the desired item in this module, if any.
129 match item {
130 Some((item_name, namespace)) =>
131 Some(
132 find_children(tcx, cur_item, item_name)
133 .find(|item| tcx.def_kind(item).ns() == Some(namespace))?,
134 ),
135 None => Some(cur_item),
136 }
137 }
138
139 impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
140 pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
141 /// Checks if the given crate/module exists.
have_module(&self, path: &[&str]) -> bool142 fn have_module(&self, path: &[&str]) -> bool {
143 try_resolve_did(*self.eval_context_ref().tcx, path, None).is_some()
144 }
145
146 /// Gets an instance for a path; fails gracefully if the path does not exist.
try_resolve_path(&self, path: &[&str], namespace: Namespace) -> Option<ty::Instance<'tcx>>147 fn try_resolve_path(&self, path: &[&str], namespace: Namespace) -> Option<ty::Instance<'tcx>> {
148 let tcx = self.eval_context_ref().tcx.tcx;
149 let did = try_resolve_did(tcx, path, Some(namespace))?;
150 Some(ty::Instance::mono(tcx, did))
151 }
152
153 /// Gets an instance for a path.
resolve_path(&self, path: &[&str], namespace: Namespace) -> ty::Instance<'tcx>154 fn resolve_path(&self, path: &[&str], namespace: Namespace) -> ty::Instance<'tcx> {
155 self.try_resolve_path(path, namespace)
156 .unwrap_or_else(|| panic!("failed to find required Rust item: {path:?}"))
157 }
158
159 /// Evaluates the scalar at the specified path.
eval_path_scalar(&self, path: &[&str]) -> Scalar<Provenance>160 fn eval_path_scalar(&self, path: &[&str]) -> Scalar<Provenance> {
161 let this = self.eval_context_ref();
162 let instance = this.resolve_path(path, Namespace::ValueNS);
163 let cid = GlobalId { instance, promoted: None };
164 // We don't give a span -- this isn't actually used directly by the program anyway.
165 let const_val = this.eval_global(cid, None).unwrap_or_else(|err| {
166 panic!("failed to evaluate required Rust item: {path:?}\n{err:?}")
167 });
168 this.read_scalar(&const_val.into())
169 .unwrap_or_else(|err| panic!("failed to read required Rust item: {path:?}\n{err:?}"))
170 }
171
172 /// Helper function to get a `libc` constant as a `Scalar`.
eval_libc(&self, name: &str) -> Scalar<Provenance>173 fn eval_libc(&self, name: &str) -> Scalar<Provenance> {
174 self.eval_path_scalar(&["libc", name])
175 }
176
177 /// Helper function to get a `libc` constant as an `i32`.
eval_libc_i32(&self, name: &str) -> i32178 fn eval_libc_i32(&self, name: &str) -> i32 {
179 // TODO: Cache the result.
180 self.eval_libc(name).to_i32().unwrap_or_else(|_err| {
181 panic!("required libc item has unexpected type (not `i32`): {name}")
182 })
183 }
184
185 /// Helper function to get a `libc` constant as an `u32`.
eval_libc_u32(&self, name: &str) -> u32186 fn eval_libc_u32(&self, name: &str) -> u32 {
187 // TODO: Cache the result.
188 self.eval_libc(name).to_u32().unwrap_or_else(|_err| {
189 panic!("required libc item has unexpected type (not `u32`): {name}")
190 })
191 }
192
193 /// Helper function to get a `windows` constant as a `Scalar`.
eval_windows(&self, module: &str, name: &str) -> Scalar<Provenance>194 fn eval_windows(&self, module: &str, name: &str) -> Scalar<Provenance> {
195 self.eval_context_ref().eval_path_scalar(&["std", "sys", "windows", module, name])
196 }
197
198 /// Helper function to get a `windows` constant as a `u32`.
eval_windows_u32(&self, module: &str, name: &str) -> u32199 fn eval_windows_u32(&self, module: &str, name: &str) -> u32 {
200 // TODO: Cache the result.
201 self.eval_windows(module, name).to_u32().unwrap_or_else(|_err| {
202 panic!("required Windows item has unexpected type (not `u32`): {module}::{name}")
203 })
204 }
205
206 /// Helper function to get a `windows` constant as a `u64`.
eval_windows_u64(&self, module: &str, name: &str) -> u64207 fn eval_windows_u64(&self, module: &str, name: &str) -> u64 {
208 // TODO: Cache the result.
209 self.eval_windows(module, name).to_u64().unwrap_or_else(|_err| {
210 panic!("required Windows item has unexpected type (not `u64`): {module}::{name}")
211 })
212 }
213
214 /// Helper function to get the `TyAndLayout` of a `libc` type
libc_ty_layout(&self, name: &str) -> TyAndLayout<'tcx>215 fn libc_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
216 let this = self.eval_context_ref();
217 let ty = this
218 .resolve_path(&["libc", name], Namespace::TypeNS)
219 .ty(*this.tcx, ty::ParamEnv::reveal_all());
220 this.layout_of(ty).unwrap()
221 }
222
223 /// Helper function to get the `TyAndLayout` of a `windows` type
windows_ty_layout(&self, name: &str) -> TyAndLayout<'tcx>224 fn windows_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
225 let this = self.eval_context_ref();
226 let ty = this
227 .resolve_path(&["std", "sys", "windows", "c", name], Namespace::TypeNS)
228 .ty(*this.tcx, ty::ParamEnv::reveal_all());
229 this.layout_of(ty).unwrap()
230 }
231
232 /// Project to the given *named* field of the mplace (which must be a struct or union type).
mplace_field_named( &self, mplace: &MPlaceTy<'tcx, Provenance>, name: &str, ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>>233 fn mplace_field_named(
234 &self,
235 mplace: &MPlaceTy<'tcx, Provenance>,
236 name: &str,
237 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>> {
238 let this = self.eval_context_ref();
239 let adt = mplace.layout.ty.ty_adt_def().unwrap();
240 for (idx, field) in adt.non_enum_variant().fields.iter().enumerate() {
241 if field.name.as_str() == name {
242 return this.mplace_field(mplace, idx);
243 }
244 }
245 bug!("No field named {} in type {}", name, mplace.layout.ty);
246 }
247
248 /// Write an int of the appropriate size to `dest`. The target type may be signed or unsigned,
249 /// we try to do the right thing anyway. `i128` can fit all integer types except for `u128` so
250 /// this method is fine for almost all integer types.
write_int( &mut self, i: impl Into<i128>, dest: &PlaceTy<'tcx, Provenance>, ) -> InterpResult<'tcx>251 fn write_int(
252 &mut self,
253 i: impl Into<i128>,
254 dest: &PlaceTy<'tcx, Provenance>,
255 ) -> InterpResult<'tcx> {
256 assert!(dest.layout.abi.is_scalar(), "write_int on non-scalar type {}", dest.layout.ty);
257 let val = if dest.layout.abi.is_signed() {
258 Scalar::from_int(i, dest.layout.size)
259 } else {
260 Scalar::from_uint(u64::try_from(i.into()).unwrap(), dest.layout.size)
261 };
262 self.eval_context_mut().write_scalar(val, dest)
263 }
264
265 /// Write the first N fields of the given place.
write_int_fields( &mut self, values: &[i128], dest: &MPlaceTy<'tcx, Provenance>, ) -> InterpResult<'tcx>266 fn write_int_fields(
267 &mut self,
268 values: &[i128],
269 dest: &MPlaceTy<'tcx, Provenance>,
270 ) -> InterpResult<'tcx> {
271 let this = self.eval_context_mut();
272 for (idx, &val) in values.iter().enumerate() {
273 let field = this.mplace_field(dest, idx)?;
274 this.write_int(val, &field.into())?;
275 }
276 Ok(())
277 }
278
279 /// Write the given fields of the given place.
write_int_fields_named( &mut self, values: &[(&str, i128)], dest: &MPlaceTy<'tcx, Provenance>, ) -> InterpResult<'tcx>280 fn write_int_fields_named(
281 &mut self,
282 values: &[(&str, i128)],
283 dest: &MPlaceTy<'tcx, Provenance>,
284 ) -> InterpResult<'tcx> {
285 let this = self.eval_context_mut();
286 for &(name, val) in values.iter() {
287 let field = this.mplace_field_named(dest, name)?;
288 this.write_int(val, &field.into())?;
289 }
290 Ok(())
291 }
292
293 /// Write a 0 of the appropriate size to `dest`.
write_null(&mut self, dest: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx>294 fn write_null(&mut self, dest: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
295 self.write_int(0, dest)
296 }
297
298 /// Test if this pointer equals 0.
ptr_is_null(&self, ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, bool>299 fn ptr_is_null(&self, ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, bool> {
300 Ok(ptr.addr().bytes() == 0)
301 }
302
303 /// Get the `Place` for a local
local_place(&mut self, local: mir::Local) -> InterpResult<'tcx, PlaceTy<'tcx, Provenance>>304 fn local_place(&mut self, local: mir::Local) -> InterpResult<'tcx, PlaceTy<'tcx, Provenance>> {
305 let this = self.eval_context_mut();
306 let place = mir::Place { local, projection: List::empty() };
307 this.eval_place(place)
308 }
309
310 /// Generate some random bytes, and write them to `dest`.
gen_random(&mut self, ptr: Pointer<Option<Provenance>>, len: u64) -> InterpResult<'tcx>311 fn gen_random(&mut self, ptr: Pointer<Option<Provenance>>, len: u64) -> InterpResult<'tcx> {
312 // Some programs pass in a null pointer and a length of 0
313 // to their platform's random-generation function (e.g. getrandom())
314 // on Linux. For compatibility with these programs, we don't perform
315 // any additional checks - it's okay if the pointer is invalid,
316 // since we wouldn't actually be writing to it.
317 if len == 0 {
318 return Ok(());
319 }
320 let this = self.eval_context_mut();
321
322 let mut data = vec![0; usize::try_from(len).unwrap()];
323
324 if this.machine.communicate() {
325 // Fill the buffer using the host's rng.
326 getrandom::getrandom(&mut data)
327 .map_err(|err| err_unsup_format!("host getrandom failed: {}", err))?;
328 } else {
329 let rng = this.machine.rng.get_mut();
330 rng.fill_bytes(&mut data);
331 }
332
333 this.write_bytes_ptr(ptr, data.iter().copied())
334 }
335
336 /// Call a function: Push the stack frame and pass the arguments.
337 /// For now, arguments must be scalars (so that the caller does not have to know the layout).
338 ///
339 /// If you do not provie a return place, a dangling zero-sized place will be created
340 /// for your convenience.
call_function( &mut self, f: ty::Instance<'tcx>, caller_abi: Abi, args: &[Immediate<Provenance>], dest: Option<&PlaceTy<'tcx, Provenance>>, stack_pop: StackPopCleanup, ) -> InterpResult<'tcx>341 fn call_function(
342 &mut self,
343 f: ty::Instance<'tcx>,
344 caller_abi: Abi,
345 args: &[Immediate<Provenance>],
346 dest: Option<&PlaceTy<'tcx, Provenance>>,
347 stack_pop: StackPopCleanup,
348 ) -> InterpResult<'tcx> {
349 let this = self.eval_context_mut();
350 let param_env = ty::ParamEnv::reveal_all(); // in Miri this is always the param_env we use... and this.param_env is private.
351 let callee_abi = f.ty(*this.tcx, param_env).fn_sig(*this.tcx).abi();
352 if this.machine.enforce_abi && callee_abi != caller_abi {
353 throw_ub_format!(
354 "calling a function with ABI {} using caller ABI {}",
355 callee_abi.name(),
356 caller_abi.name()
357 )
358 }
359
360 // Push frame.
361 let mir = this.load_mir(f.def, None)?;
362 let dest = match dest {
363 Some(dest) => dest.clone(),
364 None => MPlaceTy::fake_alloc_zst(this.layout_of(mir.return_ty())?).into(),
365 };
366 this.push_stack_frame(f, mir, &dest, stack_pop)?;
367
368 // Initialize arguments.
369 let mut callee_args = this.frame().body.args_iter();
370 for arg in args {
371 let callee_arg = this.local_place(
372 callee_args
373 .next()
374 .ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?,
375 )?;
376 this.write_immediate(*arg, &callee_arg)?;
377 }
378 if callee_args.next().is_some() {
379 throw_ub_format!("callee has more arguments than expected");
380 }
381
382 Ok(())
383 }
384
385 /// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
386 /// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
387 /// The range is relative to `place`.
visit_freeze_sensitive( &self, place: &MPlaceTy<'tcx, Provenance>, size: Size, mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>, ) -> InterpResult<'tcx>388 fn visit_freeze_sensitive(
389 &self,
390 place: &MPlaceTy<'tcx, Provenance>,
391 size: Size,
392 mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>,
393 ) -> InterpResult<'tcx> {
394 let this = self.eval_context_ref();
395 trace!("visit_frozen(place={:?}, size={:?})", *place, size);
396 debug_assert_eq!(
397 size,
398 this.size_and_align_of_mplace(place)?
399 .map(|(size, _)| size)
400 .unwrap_or_else(|| place.layout.size)
401 );
402 // Store how far we proceeded into the place so far. Everything to the left of
403 // this offset has already been handled, in the sense that the frozen parts
404 // have had `action` called on them.
405 let start_addr = place.ptr.addr();
406 let mut cur_addr = start_addr;
407 // Called when we detected an `UnsafeCell` at the given offset and size.
408 // Calls `action` and advances `cur_ptr`.
409 let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer<Option<Provenance>>,
410 unsafe_cell_size: Size| {
411 // We assume that we are given the fields in increasing offset order,
412 // and nothing else changes.
413 let unsafe_cell_addr = unsafe_cell_ptr.addr();
414 assert!(unsafe_cell_addr >= cur_addr);
415 let frozen_size = unsafe_cell_addr - cur_addr;
416 // Everything between the cur_ptr and this `UnsafeCell` is frozen.
417 if frozen_size != Size::ZERO {
418 action(alloc_range(cur_addr - start_addr, frozen_size), /*frozen*/ true)?;
419 }
420 cur_addr += frozen_size;
421 // This `UnsafeCell` is NOT frozen.
422 if unsafe_cell_size != Size::ZERO {
423 action(
424 alloc_range(cur_addr - start_addr, unsafe_cell_size),
425 /*frozen*/ false,
426 )?;
427 }
428 cur_addr += unsafe_cell_size;
429 // Done
430 Ok(())
431 };
432 // Run a visitor
433 {
434 let mut visitor = UnsafeCellVisitor {
435 ecx: this,
436 unsafe_cell_action: |place| {
437 trace!("unsafe_cell_action on {:?}", place.ptr);
438 // We need a size to go on.
439 let unsafe_cell_size = this
440 .size_and_align_of_mplace(place)?
441 .map(|(size, _)| size)
442 // for extern types, just cover what we can
443 .unwrap_or_else(|| place.layout.size);
444 // Now handle this `UnsafeCell`, unless it is empty.
445 if unsafe_cell_size != Size::ZERO {
446 unsafe_cell_action(&place.ptr, unsafe_cell_size)
447 } else {
448 Ok(())
449 }
450 },
451 };
452 visitor.visit_value(place)?;
453 }
454 // The part between the end_ptr and the end of the place is also frozen.
455 // So pretend there is a 0-sized `UnsafeCell` at the end.
456 unsafe_cell_action(&place.ptr.offset(size, this)?, Size::ZERO)?;
457 // Done!
458 return Ok(());
459
460 /// Visiting the memory covered by a `MemPlace`, being aware of
461 /// whether we are inside an `UnsafeCell` or not.
462 struct UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
463 where
464 F: FnMut(&MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx>,
465 {
466 ecx: &'ecx MiriInterpCx<'mir, 'tcx>,
467 unsafe_cell_action: F,
468 }
469
470 impl<'ecx, 'mir, 'tcx: 'mir, F> ValueVisitor<'mir, 'tcx, MiriMachine<'mir, 'tcx>>
471 for UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
472 where
473 F: FnMut(&MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx>,
474 {
475 type V = MPlaceTy<'tcx, Provenance>;
476
477 #[inline(always)]
478 fn ecx(&self) -> &MiriInterpCx<'mir, 'tcx> {
479 self.ecx
480 }
481
482 // Hook to detect `UnsafeCell`.
483 fn visit_value(&mut self, v: &MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
484 trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
485 let is_unsafe_cell = match v.layout.ty.kind() {
486 ty::Adt(adt, _) =>
487 Some(adt.did()) == self.ecx.tcx.lang_items().unsafe_cell_type(),
488 _ => false,
489 };
490 if is_unsafe_cell {
491 // We do not have to recurse further, this is an `UnsafeCell`.
492 (self.unsafe_cell_action)(v)
493 } else if self.ecx.type_is_freeze(v.layout.ty) {
494 // This is `Freeze`, there cannot be an `UnsafeCell`
495 Ok(())
496 } else if matches!(v.layout.fields, FieldsShape::Union(..)) {
497 // A (non-frozen) union. We fall back to whatever the type says.
498 (self.unsafe_cell_action)(v)
499 } else if matches!(v.layout.ty.kind(), ty::Dynamic(_, _, ty::DynStar)) {
500 // This needs to read the vtable pointer to proceed type-driven, but we don't
501 // want to reentrantly read from memory here.
502 (self.unsafe_cell_action)(v)
503 } else {
504 // We want to not actually read from memory for this visit. So, before
505 // walking this value, we have to make sure it is not a
506 // `Variants::Multiple`.
507 match v.layout.variants {
508 Variants::Multiple { .. } => {
509 // A multi-variant enum, or generator, or so.
510 // Treat this like a union: without reading from memory,
511 // we cannot determine the variant we are in. Reading from
512 // memory would be subject to Stacked Borrows rules, leading
513 // to all sorts of "funny" recursion.
514 // We only end up here if the type is *not* freeze, so we just call the
515 // `UnsafeCell` action.
516 (self.unsafe_cell_action)(v)
517 }
518 Variants::Single { .. } => {
519 // Proceed further, try to find where exactly that `UnsafeCell`
520 // is hiding.
521 self.walk_value(v)
522 }
523 }
524 }
525 }
526
527 // Make sure we visit aggregates in increasing offset order.
528 fn visit_aggregate(
529 &mut self,
530 place: &MPlaceTy<'tcx, Provenance>,
531 fields: impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Provenance>>>,
532 ) -> InterpResult<'tcx> {
533 match place.layout.fields {
534 FieldsShape::Array { .. } => {
535 // For the array layout, we know the iterator will yield sorted elements so
536 // we can avoid the allocation.
537 self.walk_aggregate(place, fields)
538 }
539 FieldsShape::Arbitrary { .. } => {
540 // Gather the subplaces and sort them before visiting.
541 let mut places = fields
542 .collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Provenance>>>>()?;
543 // we just compare offsets, the abs. value never matters
544 places.sort_by_key(|place| place.ptr.addr());
545 self.walk_aggregate(place, places.into_iter().map(Ok))
546 }
547 FieldsShape::Union { .. } | FieldsShape::Primitive => {
548 // Uh, what?
549 bug!("unions/primitives are not aggregates we should ever visit")
550 }
551 }
552 }
553
554 fn visit_union(
555 &mut self,
556 _v: &MPlaceTy<'tcx, Provenance>,
557 _fields: NonZeroUsize,
558 ) -> InterpResult<'tcx> {
559 bug!("we should have already handled unions in `visit_value`")
560 }
561 }
562 }
563
564 /// Helper function used inside the shims of foreign functions to check that isolation is
565 /// disabled. It returns an error using the `name` of the foreign function if this is not the
566 /// case.
check_no_isolation(&self, name: &str) -> InterpResult<'tcx>567 fn check_no_isolation(&self, name: &str) -> InterpResult<'tcx> {
568 if !self.eval_context_ref().machine.communicate() {
569 self.reject_in_isolation(name, RejectOpWith::Abort)?;
570 }
571 Ok(())
572 }
573
574 /// Helper function used inside the shims of foreign functions which reject the op
575 /// when isolation is enabled. It is used to print a warning/backtrace about the rejection.
reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx>576 fn reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx> {
577 let this = self.eval_context_ref();
578 match reject_with {
579 RejectOpWith::Abort => isolation_abort_error(op_name),
580 RejectOpWith::WarningWithoutBacktrace => {
581 this.tcx
582 .sess
583 .warn(format!("{op_name} was made to return an error due to isolation"));
584 Ok(())
585 }
586 RejectOpWith::Warning => {
587 this.emit_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
588 Ok(())
589 }
590 RejectOpWith::NoWarning => Ok(()), // no warning
591 }
592 }
593
594 /// Helper function used inside the shims of foreign functions to assert that the target OS
595 /// is `target_os`. It panics showing a message with the `name` of the foreign function
596 /// if this is not the case.
assert_target_os(&self, target_os: &str, name: &str)597 fn assert_target_os(&self, target_os: &str, name: &str) {
598 assert_eq!(
599 self.eval_context_ref().tcx.sess.target.os,
600 target_os,
601 "`{name}` is only available on the `{target_os}` target OS",
602 )
603 }
604
605 /// Helper function used inside the shims of foreign functions to assert that the target OS
606 /// is part of the UNIX family. It panics showing a message with the `name` of the foreign function
607 /// if this is not the case.
assert_target_os_is_unix(&self, name: &str)608 fn assert_target_os_is_unix(&self, name: &str) {
609 assert!(
610 target_os_is_unix(self.eval_context_ref().tcx.sess.target.os.as_ref()),
611 "`{name}` is only available for supported UNIX family targets",
612 );
613 }
614
615 /// Get last error variable as a place, lazily allocating thread-local storage for it if
616 /// necessary.
last_error_place(&mut self) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>>617 fn last_error_place(&mut self) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>> {
618 let this = self.eval_context_mut();
619 if let Some(errno_place) = this.active_thread_ref().last_error {
620 Ok(errno_place)
621 } else {
622 // Allocate new place, set initial value to 0.
623 let errno_layout = this.machine.layouts.u32;
624 let errno_place = this.allocate(errno_layout, MiriMemoryKind::Machine.into())?;
625 this.write_scalar(Scalar::from_u32(0), &errno_place.into())?;
626 this.active_thread_mut().last_error = Some(errno_place);
627 Ok(errno_place)
628 }
629 }
630
631 /// Sets the last error variable.
set_last_error(&mut self, scalar: Scalar<Provenance>) -> InterpResult<'tcx>632 fn set_last_error(&mut self, scalar: Scalar<Provenance>) -> InterpResult<'tcx> {
633 let this = self.eval_context_mut();
634 let errno_place = this.last_error_place()?;
635 this.write_scalar(scalar, &errno_place.into())
636 }
637
638 /// Gets the last error variable.
get_last_error(&mut self) -> InterpResult<'tcx, Scalar<Provenance>>639 fn get_last_error(&mut self) -> InterpResult<'tcx, Scalar<Provenance>> {
640 let this = self.eval_context_mut();
641 let errno_place = this.last_error_place()?;
642 this.read_scalar(&errno_place.into())
643 }
644
645 /// This function tries to produce the most similar OS error from the `std::io::ErrorKind`
646 /// as a platform-specific errnum.
io_error_to_errnum( &self, err_kind: std::io::ErrorKind, ) -> InterpResult<'tcx, Scalar<Provenance>>647 fn io_error_to_errnum(
648 &self,
649 err_kind: std::io::ErrorKind,
650 ) -> InterpResult<'tcx, Scalar<Provenance>> {
651 let this = self.eval_context_ref();
652 let target = &this.tcx.sess.target;
653 if target.families.iter().any(|f| f == "unix") {
654 for &(name, kind) in UNIX_IO_ERROR_TABLE {
655 if err_kind == kind {
656 return Ok(this.eval_libc(name));
657 }
658 }
659 throw_unsup_format!("io error {:?} cannot be translated into a raw os error", err_kind)
660 } else if target.families.iter().any(|f| f == "windows") {
661 // FIXME: we have to finish implementing the Windows equivalent of this.
662 use std::io::ErrorKind::*;
663 Ok(this.eval_windows(
664 "c",
665 match err_kind {
666 NotFound => "ERROR_FILE_NOT_FOUND",
667 PermissionDenied => "ERROR_ACCESS_DENIED",
668 _ =>
669 throw_unsup_format!(
670 "io error {:?} cannot be translated into a raw os error",
671 err_kind
672 ),
673 },
674 ))
675 } else {
676 throw_unsup_format!(
677 "converting io::Error into errnum is unsupported for OS {}",
678 target.os
679 )
680 }
681 }
682
683 /// The inverse of `io_error_to_errnum`.
684 #[allow(clippy::needless_return)]
try_errnum_to_io_error( &self, errnum: Scalar<Provenance>, ) -> InterpResult<'tcx, Option<std::io::ErrorKind>>685 fn try_errnum_to_io_error(
686 &self,
687 errnum: Scalar<Provenance>,
688 ) -> InterpResult<'tcx, Option<std::io::ErrorKind>> {
689 let this = self.eval_context_ref();
690 let target = &this.tcx.sess.target;
691 if target.families.iter().any(|f| f == "unix") {
692 let errnum = errnum.to_i32()?;
693 for &(name, kind) in UNIX_IO_ERROR_TABLE {
694 if errnum == this.eval_libc_i32(name) {
695 return Ok(Some(kind));
696 }
697 }
698 // Our table is as complete as the mapping in std, so we are okay with saying "that's a
699 // strange one" here.
700 return Ok(None);
701 } else {
702 throw_unsup_format!(
703 "converting errnum into io::Error is unsupported for OS {}",
704 target.os
705 )
706 }
707 }
708
709 /// Sets the last OS error using a `std::io::ErrorKind`.
set_last_error_from_io_error(&mut self, err_kind: std::io::ErrorKind) -> InterpResult<'tcx>710 fn set_last_error_from_io_error(&mut self, err_kind: std::io::ErrorKind) -> InterpResult<'tcx> {
711 self.set_last_error(self.io_error_to_errnum(err_kind)?)
712 }
713
714 /// Helper function that consumes an `std::io::Result<T>` and returns an
715 /// `InterpResult<'tcx,T>::Ok` instead. In case the result is an error, this function returns
716 /// `Ok(-1)` and sets the last OS error accordingly.
717 ///
718 /// This function uses `T: From<i32>` instead of `i32` directly because some IO related
719 /// functions return different integer types (like `read`, that returns an `i64`).
try_unwrap_io_result<T: From<i32>>( &mut self, result: std::io::Result<T>, ) -> InterpResult<'tcx, T>720 fn try_unwrap_io_result<T: From<i32>>(
721 &mut self,
722 result: std::io::Result<T>,
723 ) -> InterpResult<'tcx, T> {
724 match result {
725 Ok(ok) => Ok(ok),
726 Err(e) => {
727 self.eval_context_mut().set_last_error_from_io_error(e.kind())?;
728 Ok((-1).into())
729 }
730 }
731 }
732
733 /// Dereference a pointer operand to a place using `layout` instead of the pointer's declared type
deref_operand_as( &self, op: &OpTy<'tcx, Provenance>, layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>>734 fn deref_operand_as(
735 &self,
736 op: &OpTy<'tcx, Provenance>,
737 layout: TyAndLayout<'tcx>,
738 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>> {
739 let this = self.eval_context_ref();
740 let ptr = this.read_pointer(op)?;
741
742 let mplace = MPlaceTy::from_aligned_ptr(ptr, layout);
743
744 this.check_mplace(mplace)?;
745
746 Ok(mplace)
747 }
748
deref_pointer_as( &self, val: &ImmTy<'tcx, Provenance>, layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>>749 fn deref_pointer_as(
750 &self,
751 val: &ImmTy<'tcx, Provenance>,
752 layout: TyAndLayout<'tcx>,
753 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>> {
754 let this = self.eval_context_ref();
755 let mut mplace = this.ref_to_mplace(val)?;
756
757 mplace.layout = layout;
758 mplace.align = layout.align.abi;
759
760 Ok(mplace)
761 }
762
763 /// Calculates the MPlaceTy given the offset and layout of an access on an operand
deref_operand_and_offset( &self, op: &OpTy<'tcx, Provenance>, offset: u64, base_layout: TyAndLayout<'tcx>, value_layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>>764 fn deref_operand_and_offset(
765 &self,
766 op: &OpTy<'tcx, Provenance>,
767 offset: u64,
768 base_layout: TyAndLayout<'tcx>,
769 value_layout: TyAndLayout<'tcx>,
770 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Provenance>> {
771 let this = self.eval_context_ref();
772 let op_place = this.deref_operand_as(op, base_layout)?;
773 let offset = Size::from_bytes(offset);
774
775 // Ensure that the access is within bounds.
776 assert!(base_layout.size >= offset + value_layout.size);
777 let value_place = op_place.offset(offset, value_layout, this)?;
778 Ok(value_place)
779 }
780
read_scalar_at_offset( &self, op: &OpTy<'tcx, Provenance>, offset: u64, base_layout: TyAndLayout<'tcx>, value_layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, Scalar<Provenance>>781 fn read_scalar_at_offset(
782 &self,
783 op: &OpTy<'tcx, Provenance>,
784 offset: u64,
785 base_layout: TyAndLayout<'tcx>,
786 value_layout: TyAndLayout<'tcx>,
787 ) -> InterpResult<'tcx, Scalar<Provenance>> {
788 let this = self.eval_context_ref();
789 let value_place = this.deref_operand_and_offset(op, offset, base_layout, value_layout)?;
790 this.read_scalar(&value_place.into())
791 }
792
write_scalar_at_offset( &mut self, op: &OpTy<'tcx, Provenance>, offset: u64, value: impl Into<Scalar<Provenance>>, base_layout: TyAndLayout<'tcx>, value_layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ()>793 fn write_scalar_at_offset(
794 &mut self,
795 op: &OpTy<'tcx, Provenance>,
796 offset: u64,
797 value: impl Into<Scalar<Provenance>>,
798 base_layout: TyAndLayout<'tcx>,
799 value_layout: TyAndLayout<'tcx>,
800 ) -> InterpResult<'tcx, ()> {
801 let this = self.eval_context_mut();
802 let value_place = this.deref_operand_and_offset(op, offset, base_layout, value_layout)?;
803 this.write_scalar(value, &value_place.into())
804 }
805
806 /// Parse a `timespec` struct and return it as a `std::time::Duration`. It returns `None`
807 /// if the value in the `timespec` struct is invalid. Some libc functions will return
808 /// `EINVAL` in this case.
read_timespec( &mut self, tp: &MPlaceTy<'tcx, Provenance>, ) -> InterpResult<'tcx, Option<Duration>>809 fn read_timespec(
810 &mut self,
811 tp: &MPlaceTy<'tcx, Provenance>,
812 ) -> InterpResult<'tcx, Option<Duration>> {
813 let this = self.eval_context_mut();
814 let seconds_place = this.mplace_field(tp, 0)?;
815 let seconds_scalar = this.read_scalar(&seconds_place.into())?;
816 let seconds = seconds_scalar.to_target_isize(this)?;
817 let nanoseconds_place = this.mplace_field(tp, 1)?;
818 let nanoseconds_scalar = this.read_scalar(&nanoseconds_place.into())?;
819 let nanoseconds = nanoseconds_scalar.to_target_isize(this)?;
820
821 Ok(try {
822 // tv_sec must be non-negative.
823 let seconds: u64 = seconds.try_into().ok()?;
824 // tv_nsec must be non-negative.
825 let nanoseconds: u32 = nanoseconds.try_into().ok()?;
826 if nanoseconds >= 1_000_000_000 {
827 // tv_nsec must not be greater than 999,999,999.
828 None?
829 }
830 Duration::new(seconds, nanoseconds)
831 })
832 }
833
834 /// Read a sequence of bytes until the first null terminator.
read_c_str<'a>(&'a self, ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, &'a [u8]> where 'tcx: 'a, 'mir: 'a,835 fn read_c_str<'a>(&'a self, ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, &'a [u8]>
836 where
837 'tcx: 'a,
838 'mir: 'a,
839 {
840 let this = self.eval_context_ref();
841 let size1 = Size::from_bytes(1);
842
843 // Step 1: determine the length.
844 let mut len = Size::ZERO;
845 loop {
846 // FIXME: We are re-getting the allocation each time around the loop.
847 // Would be nice if we could somehow "extend" an existing AllocRange.
848 let alloc = this.get_ptr_alloc(ptr.offset(len, this)?, size1, Align::ONE)?.unwrap(); // not a ZST, so we will get a result
849 let byte = alloc.read_integer(alloc_range(Size::ZERO, size1))?.to_u8()?;
850 if byte == 0 {
851 break;
852 } else {
853 len += size1;
854 }
855 }
856
857 // Step 2: get the bytes.
858 this.read_bytes_ptr_strip_provenance(ptr, len)
859 }
860
861 /// Helper function to write a sequence of bytes with an added null-terminator, which is what
862 /// the Unix APIs usually handle. This function returns `Ok((false, length))` without trying
863 /// to write if `size` is not large enough to fit the contents of `c_str` plus a null
864 /// terminator. It returns `Ok((true, length))` if the writing process was successful. The
865 /// string length returned does include the null terminator.
write_c_str( &mut self, c_str: &[u8], ptr: Pointer<Option<Provenance>>, size: u64, ) -> InterpResult<'tcx, (bool, u64)>866 fn write_c_str(
867 &mut self,
868 c_str: &[u8],
869 ptr: Pointer<Option<Provenance>>,
870 size: u64,
871 ) -> InterpResult<'tcx, (bool, u64)> {
872 // If `size` is smaller or equal than `bytes.len()`, writing `bytes` plus the required null
873 // terminator to memory using the `ptr` pointer would cause an out-of-bounds access.
874 let string_length = u64::try_from(c_str.len()).unwrap();
875 let string_length = string_length.checked_add(1).unwrap();
876 if size < string_length {
877 return Ok((false, string_length));
878 }
879 self.eval_context_mut()
880 .write_bytes_ptr(ptr, c_str.iter().copied().chain(iter::once(0u8)))?;
881 Ok((true, string_length))
882 }
883
884 /// Read a sequence of u16 until the first null terminator.
read_wide_str(&self, mut ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, Vec<u16>>885 fn read_wide_str(&self, mut ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, Vec<u16>> {
886 let this = self.eval_context_ref();
887 let size2 = Size::from_bytes(2);
888 let align2 = Align::from_bytes(2).unwrap();
889
890 let mut wchars = Vec::new();
891 loop {
892 // FIXME: We are re-getting the allocation each time around the loop.
893 // Would be nice if we could somehow "extend" an existing AllocRange.
894 let alloc = this.get_ptr_alloc(ptr, size2, align2)?.unwrap(); // not a ZST, so we will get a result
895 let wchar = alloc.read_integer(alloc_range(Size::ZERO, size2))?.to_u16()?;
896 if wchar == 0 {
897 break;
898 } else {
899 wchars.push(wchar);
900 ptr = ptr.offset(size2, this)?;
901 }
902 }
903
904 Ok(wchars)
905 }
906
907 /// Helper function to write a sequence of u16 with an added 0x0000-terminator, which is what
908 /// the Windows APIs usually handle. This function returns `Ok((false, length))` without trying
909 /// to write if `size` is not large enough to fit the contents of `os_string` plus a null
910 /// terminator. It returns `Ok((true, length))` if the writing process was successful. The
911 /// string length returned does include the null terminator. Length is measured in units of
912 /// `u16.`
write_wide_str( &mut self, wide_str: &[u16], ptr: Pointer<Option<Provenance>>, size: u64, ) -> InterpResult<'tcx, (bool, u64)>913 fn write_wide_str(
914 &mut self,
915 wide_str: &[u16],
916 ptr: Pointer<Option<Provenance>>,
917 size: u64,
918 ) -> InterpResult<'tcx, (bool, u64)> {
919 // If `size` is smaller or equal than `bytes.len()`, writing `bytes` plus the required
920 // 0x0000 terminator to memory would cause an out-of-bounds access.
921 let string_length = u64::try_from(wide_str.len()).unwrap();
922 let string_length = string_length.checked_add(1).unwrap();
923 if size < string_length {
924 return Ok((false, string_length));
925 }
926
927 // Store the UTF-16 string.
928 let size2 = Size::from_bytes(2);
929 let this = self.eval_context_mut();
930 let mut alloc = this
931 .get_ptr_alloc_mut(ptr, size2 * string_length, Align::from_bytes(2).unwrap())?
932 .unwrap(); // not a ZST, so we will get a result
933 for (offset, wchar) in wide_str.iter().copied().chain(iter::once(0x0000)).enumerate() {
934 let offset = u64::try_from(offset).unwrap();
935 alloc.write_scalar(alloc_range(size2 * offset, size2), Scalar::from_u16(wchar))?;
936 }
937 Ok((true, string_length))
938 }
939
940 /// Check that the ABI is what we expect.
check_abi<'a>(&self, abi: Abi, exp_abi: Abi) -> InterpResult<'a, ()>941 fn check_abi<'a>(&self, abi: Abi, exp_abi: Abi) -> InterpResult<'a, ()> {
942 if self.eval_context_ref().machine.enforce_abi && abi != exp_abi {
943 throw_ub_format!(
944 "calling a function with ABI {} using caller ABI {}",
945 exp_abi.name(),
946 abi.name()
947 )
948 }
949 Ok(())
950 }
951
frame_in_std(&self) -> bool952 fn frame_in_std(&self) -> bool {
953 let this = self.eval_context_ref();
954 let Some(start_fn) = this.tcx.lang_items().start_fn() else {
955 // no_std situations
956 return false;
957 };
958 let frame = this.frame();
959 // Make an attempt to get at the instance of the function this is inlined from.
960 let instance: Option<_> = try {
961 let scope = frame.current_source_info()?.scope;
962 let inlined_parent = frame.body.source_scopes[scope].inlined_parent_scope?;
963 let source = &frame.body.source_scopes[inlined_parent];
964 source.inlined.expect("inlined_parent_scope points to scope without inline info").0
965 };
966 // Fall back to the instance of the function itself.
967 let instance = instance.unwrap_or(frame.instance);
968 // Now check if this is in the same crate as start_fn.
969 // As a special exception we also allow unit tests from
970 // <https://github.com/rust-lang/miri-test-libstd/tree/master/std_miri_test> to call these
971 // shims.
972 let frame_crate = this.tcx.def_path(instance.def_id()).krate;
973 frame_crate == this.tcx.def_path(start_fn).krate
974 || this.tcx.crate_name(frame_crate).as_str() == "std_miri_test"
975 }
976
977 /// Handler that should be called when unsupported functionality is encountered.
978 /// This function will either panic within the context of the emulated application
979 /// or return an error in the Miri process context
980 ///
981 /// Return value of `Ok(bool)` indicates whether execution should continue.
handle_unsupported<S: AsRef<str>>(&mut self, error_msg: S) -> InterpResult<'tcx, ()>982 fn handle_unsupported<S: AsRef<str>>(&mut self, error_msg: S) -> InterpResult<'tcx, ()> {
983 let this = self.eval_context_mut();
984 if this.machine.panic_on_unsupported {
985 // message is slightly different here to make automated analysis easier
986 let error_msg = format!("unsupported Miri functionality: {}", error_msg.as_ref());
987 this.start_panic(error_msg.as_ref(), mir::UnwindAction::Continue)?;
988 Ok(())
989 } else {
990 throw_unsup_format!("{}", error_msg.as_ref());
991 }
992 }
993
check_abi_and_shim_symbol_clash( &mut self, abi: Abi, exp_abi: Abi, link_name: Symbol, ) -> InterpResult<'tcx, ()>994 fn check_abi_and_shim_symbol_clash(
995 &mut self,
996 abi: Abi,
997 exp_abi: Abi,
998 link_name: Symbol,
999 ) -> InterpResult<'tcx, ()> {
1000 self.check_abi(abi, exp_abi)?;
1001 if let Some((body, instance)) = self.eval_context_mut().lookup_exported_symbol(link_name)? {
1002 // If compiler-builtins is providing the symbol, then don't treat it as a clash.
1003 // We'll use our built-in implementation in `emulate_foreign_item_by_name` for increased
1004 // performance. Note that this means we won't catch any undefined behavior in
1005 // compiler-builtins when running other crates, but Miri can still be run on
1006 // compiler-builtins itself (or any crate that uses it as a normal dependency)
1007 if self.eval_context_ref().tcx.is_compiler_builtins(instance.def_id().krate) {
1008 return Ok(());
1009 }
1010
1011 throw_machine_stop!(TerminationInfo::SymbolShimClashing {
1012 link_name,
1013 span: body.span.data(),
1014 })
1015 }
1016 Ok(())
1017 }
1018
check_shim<'a, const N: usize>( &mut self, abi: Abi, exp_abi: Abi, link_name: Symbol, args: &'a [OpTy<'tcx, Provenance>], ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Provenance>; N]> where &'a [OpTy<'tcx, Provenance>; N]: TryFrom<&'a [OpTy<'tcx, Provenance>]>,1019 fn check_shim<'a, const N: usize>(
1020 &mut self,
1021 abi: Abi,
1022 exp_abi: Abi,
1023 link_name: Symbol,
1024 args: &'a [OpTy<'tcx, Provenance>],
1025 ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Provenance>; N]>
1026 where
1027 &'a [OpTy<'tcx, Provenance>; N]: TryFrom<&'a [OpTy<'tcx, Provenance>]>,
1028 {
1029 self.check_abi_and_shim_symbol_clash(abi, exp_abi, link_name)?;
1030 check_arg_count(args)
1031 }
1032
1033 /// Mark a machine allocation that was just created as immutable.
mark_immutable(&mut self, mplace: &MemPlace<Provenance>)1034 fn mark_immutable(&mut self, mplace: &MemPlace<Provenance>) {
1035 let this = self.eval_context_mut();
1036 // This got just allocated, so there definitely is a pointer here.
1037 let provenance = mplace.ptr.into_pointer_or_addr().unwrap().provenance;
1038 this.alloc_mark_immutable(provenance.get_alloc_id().unwrap()).unwrap();
1039 }
1040
item_link_name(&self, def_id: DefId) -> Symbol1041 fn item_link_name(&self, def_id: DefId) -> Symbol {
1042 let tcx = self.eval_context_ref().tcx;
1043 match tcx.get_attrs(def_id, sym::link_name).filter_map(|a| a.value_str()).next() {
1044 Some(name) => name,
1045 None => tcx.item_name(def_id),
1046 }
1047 }
1048 }
1049
1050 impl<'mir, 'tcx> MiriMachine<'mir, 'tcx> {
1051 /// Get the current span in the topmost function which is workspace-local and not
1052 /// `#[track_caller]`.
1053 /// This function is backed by a cache, and can be assumed to be very fast.
1054 /// It will work even when the stack is empty.
current_span(&self) -> Span1055 pub fn current_span(&self) -> Span {
1056 self.top_user_relevant_frame()
1057 .map(|frame_idx| self.stack()[frame_idx].current_span())
1058 .unwrap_or(rustc_span::DUMMY_SP)
1059 }
1060
1061 /// Returns the span of the *caller* of the current operation, again
1062 /// walking down the stack to find the closest frame in a local crate, if the caller of the
1063 /// current operation is not in a local crate.
1064 /// This is useful when we are processing something which occurs on function-entry and we want
1065 /// to point at the call to the function, not the function definition generally.
caller_span(&self) -> Span1066 pub fn caller_span(&self) -> Span {
1067 // We need to go down at least to the caller (len - 2), or however
1068 // far we have to go to find a frame in a local crate which is also not #[track_caller].
1069 let frame_idx = self.top_user_relevant_frame().unwrap();
1070 let frame_idx = cmp::min(frame_idx, self.stack().len().checked_sub(2).unwrap());
1071 self.stack()[frame_idx].current_span()
1072 }
1073
stack(&self) -> &[Frame<'mir, 'tcx, Provenance, machine::FrameExtra<'tcx>>]1074 fn stack(&self) -> &[Frame<'mir, 'tcx, Provenance, machine::FrameExtra<'tcx>>] {
1075 self.threads.active_thread_stack()
1076 }
1077
top_user_relevant_frame(&self) -> Option<usize>1078 fn top_user_relevant_frame(&self) -> Option<usize> {
1079 self.threads.active_thread_ref().top_user_relevant_frame()
1080 }
1081
1082 /// This is the source of truth for the `is_user_relevant` flag in our `FrameExtra`.
is_user_relevant(&self, frame: &Frame<'mir, 'tcx, Provenance>) -> bool1083 pub fn is_user_relevant(&self, frame: &Frame<'mir, 'tcx, Provenance>) -> bool {
1084 let def_id = frame.instance.def_id();
1085 (def_id.is_local() || self.local_crates.contains(&def_id.krate))
1086 && !frame.instance.def.requires_caller_location(self.tcx)
1087 }
1088 }
1089
1090 /// Check that the number of args is what we expect.
check_arg_count<'a, 'tcx, const N: usize>( args: &'a [OpTy<'tcx, Provenance>], ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Provenance>; N]> where &'a [OpTy<'tcx, Provenance>; N]: TryFrom<&'a [OpTy<'tcx, Provenance>]>,1091 pub fn check_arg_count<'a, 'tcx, const N: usize>(
1092 args: &'a [OpTy<'tcx, Provenance>],
1093 ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Provenance>; N]>
1094 where
1095 &'a [OpTy<'tcx, Provenance>; N]: TryFrom<&'a [OpTy<'tcx, Provenance>]>,
1096 {
1097 if let Ok(ops) = args.try_into() {
1098 return Ok(ops);
1099 }
1100 throw_ub_format!("incorrect number of arguments: got {}, expected {}", args.len(), N)
1101 }
1102
isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx>1103 pub fn isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx> {
1104 throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!(
1105 "{name} not available when isolation is enabled",
1106 )))
1107 }
1108
1109 /// Retrieve the list of local crates that should have been passed by cargo-miri in
1110 /// MIRI_LOCAL_CRATES and turn them into `CrateNum`s.
get_local_crates(tcx: TyCtxt<'_>) -> Vec<CrateNum>1111 pub fn get_local_crates(tcx: TyCtxt<'_>) -> Vec<CrateNum> {
1112 // Convert the local crate names from the passed-in config into CrateNums so that they can
1113 // be looked up quickly during execution
1114 let local_crate_names = std::env::var("MIRI_LOCAL_CRATES")
1115 .map(|crates| crates.split(',').map(|krate| krate.to_string()).collect::<Vec<_>>())
1116 .unwrap_or_default();
1117 let mut local_crates = Vec::new();
1118 for &crate_num in tcx.crates(()) {
1119 let name = tcx.crate_name(crate_num);
1120 let name = name.as_str();
1121 if local_crate_names.iter().any(|local_name| local_name == name) {
1122 local_crates.push(crate_num);
1123 }
1124 }
1125 local_crates
1126 }
1127
1128 /// Helper function used inside the shims of foreign functions to check that
1129 /// `target_os` is a supported UNIX OS.
target_os_is_unix(target_os: &str) -> bool1130 pub fn target_os_is_unix(target_os: &str) -> bool {
1131 matches!(target_os, "linux" | "macos" | "freebsd" | "android")
1132 }
1133