• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //! An interpreter for MIR used in CTFE and by miri.
2 
3 #[macro_export]
4 macro_rules! err_unsup {
5     ($($tt:tt)*) => {
6         $crate::mir::interpret::InterpError::Unsupported(
7             $crate::mir::interpret::UnsupportedOpInfo::$($tt)*
8         )
9     };
10 }
11 
12 #[macro_export]
13 macro_rules! err_unsup_format {
14     ($($tt:tt)*) => { err_unsup!(Unsupported(format!($($tt)*))) };
15 }
16 
17 #[macro_export]
18 macro_rules! err_inval {
19     ($($tt:tt)*) => {
20         $crate::mir::interpret::InterpError::InvalidProgram(
21             $crate::mir::interpret::InvalidProgramInfo::$($tt)*
22         )
23     };
24 }
25 
26 #[macro_export]
27 macro_rules! err_ub {
28     ($($tt:tt)*) => {
29         $crate::mir::interpret::InterpError::UndefinedBehavior(
30             $crate::mir::interpret::UndefinedBehaviorInfo::$($tt)*
31         )
32     };
33 }
34 
35 #[macro_export]
36 macro_rules! err_ub_format {
37     ($($tt:tt)*) => { err_ub!(Ub(format!($($tt)*))) };
38 }
39 
40 #[macro_export]
41 macro_rules! err_exhaust {
42     ($($tt:tt)*) => {
43         $crate::mir::interpret::InterpError::ResourceExhaustion(
44             $crate::mir::interpret::ResourceExhaustionInfo::$($tt)*
45         )
46     };
47 }
48 
49 #[macro_export]
50 macro_rules! err_machine_stop {
51     ($($tt:tt)*) => {
52         $crate::mir::interpret::InterpError::MachineStop(Box::new($($tt)*))
53     };
54 }
55 
56 // In the `throw_*` macros, avoid `return` to make them work with `try {}`.
57 #[macro_export]
58 macro_rules! throw_unsup {
59     ($($tt:tt)*) => { do yeet err_unsup!($($tt)*) };
60 }
61 
62 #[macro_export]
63 macro_rules! throw_unsup_format {
64     ($($tt:tt)*) => { throw_unsup!(Unsupported(format!($($tt)*))) };
65 }
66 
67 #[macro_export]
68 macro_rules! throw_inval {
69     ($($tt:tt)*) => { do yeet err_inval!($($tt)*) };
70 }
71 
72 #[macro_export]
73 macro_rules! throw_ub {
74     ($($tt:tt)*) => { do yeet err_ub!($($tt)*) };
75 }
76 
77 #[macro_export]
78 macro_rules! throw_ub_format {
79     ($($tt:tt)*) => { throw_ub!(Ub(format!($($tt)*))) };
80 }
81 
82 #[macro_export]
83 macro_rules! throw_exhaust {
84     ($($tt:tt)*) => { do yeet err_exhaust!($($tt)*) };
85 }
86 
87 #[macro_export]
88 macro_rules! throw_machine_stop {
89     ($($tt:tt)*) => { do yeet err_machine_stop!($($tt)*) };
90 }
91 
92 #[macro_export]
93 macro_rules! err_ub_custom {
94     ($msg:expr $(, $($name:ident = $value:expr),* $(,)?)?) => {{
95         $(
96             let ($($name,)*) = ($($value,)*);
97         )?
98         err_ub!(Custom(
99             rustc_middle::error::CustomSubdiagnostic {
100                 msg: || $msg,
101                 add_args: Box::new(move |mut set_arg| {
102                     $($(
103                         set_arg(stringify!($name).into(), rustc_errors::IntoDiagnosticArg::into_diagnostic_arg($name));
104                     )*)?
105                 })
106             }
107         ))
108     }};
109 }
110 
111 #[macro_export]
112 macro_rules! throw_ub_custom {
113     ($($tt:tt)*) => { do yeet err_ub_custom!($($tt)*) };
114 }
115 
116 mod allocation;
117 mod error;
118 mod pointer;
119 mod queries;
120 mod value;
121 
122 use std::fmt;
123 use std::io;
124 use std::io::{Read, Write};
125 use std::num::{NonZeroU32, NonZeroU64};
126 use std::sync::atomic::{AtomicU32, Ordering};
127 
128 use rustc_ast::LitKind;
129 use rustc_data_structures::fx::FxHashMap;
130 use rustc_data_structures::sync::{HashMapExt, Lock};
131 use rustc_data_structures::tiny_list::TinyList;
132 use rustc_errors::ErrorGuaranteed;
133 use rustc_hir::def_id::DefId;
134 use rustc_macros::HashStable;
135 use rustc_middle::ty::print::with_no_trimmed_paths;
136 use rustc_serialize::{Decodable, Encodable};
137 use rustc_target::abi::{AddressSpace, Endian, HasDataLayout};
138 
139 use crate::mir;
140 use crate::ty::codec::{TyDecoder, TyEncoder};
141 use crate::ty::subst::GenericArgKind;
142 use crate::ty::{self, Instance, Ty, TyCtxt};
143 
144 pub use self::error::{
145     struct_error, CheckInAllocMsg, ErrorHandled, EvalToAllocationRawResult, EvalToConstValueResult,
146     EvalToValTreeResult, ExpectedKind, InterpError, InterpErrorInfo, InterpResult, InvalidMetaKind,
147     InvalidProgramInfo, MachineStopType, PointerKind, ReportedErrorInfo, ResourceExhaustionInfo,
148     ScalarSizeMismatch, UndefinedBehaviorInfo, UninitBytesAccess, UnsupportedOpInfo,
149     ValidationErrorInfo, ValidationErrorKind,
150 };
151 
152 pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
153 
154 pub use self::allocation::{
155     alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation,
156     InitChunk, InitChunkIter,
157 };
158 
159 pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
160 
161 /// Uniquely identifies one of the following:
162 /// - A constant
163 /// - A static
164 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, TyEncodable, TyDecodable)]
165 #[derive(HashStable, Lift, TypeFoldable, TypeVisitable)]
166 pub struct GlobalId<'tcx> {
167     /// For a constant or static, the `Instance` of the item itself.
168     /// For a promoted global, the `Instance` of the function they belong to.
169     pub instance: ty::Instance<'tcx>,
170 
171     /// The index for promoted globals within their function's `mir::Body`.
172     pub promoted: Option<mir::Promoted>,
173 }
174 
175 impl<'tcx> GlobalId<'tcx> {
display(self, tcx: TyCtxt<'tcx>) -> String176     pub fn display(self, tcx: TyCtxt<'tcx>) -> String {
177         let instance_name = with_no_trimmed_paths!(tcx.def_path_str(self.instance.def.def_id()));
178         if let Some(promoted) = self.promoted {
179             format!("{}::{:?}", instance_name, promoted)
180         } else {
181             instance_name
182         }
183     }
184 }
185 
186 /// Input argument for `tcx.lit_to_const`.
187 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
188 pub struct LitToConstInput<'tcx> {
189     /// The absolute value of the resultant constant.
190     pub lit: &'tcx LitKind,
191     /// The type of the constant.
192     pub ty: Ty<'tcx>,
193     /// If the constant is negative.
194     pub neg: bool,
195 }
196 
197 /// Error type for `tcx.lit_to_const`.
198 #[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
199 pub enum LitToConstError {
200     /// The literal's inferred type did not match the expected `ty` in the input.
201     /// This is used for graceful error handling (`delay_span_bug`) in
202     /// type checking (`Const::from_anon_const`).
203     TypeError,
204     Reported(ErrorGuaranteed),
205 }
206 
207 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
208 pub struct AllocId(pub NonZeroU64);
209 
210 // We want the `Debug` output to be readable as it is used by `derive(Debug)` for
211 // all the Miri types.
212 impl fmt::Debug for AllocId {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result213     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
214         if f.alternate() { write!(f, "a{}", self.0) } else { write!(f, "alloc{}", self.0) }
215     }
216 }
217 
218 // No "Display" since AllocIds are not usually user-visible.
219 
220 #[derive(TyDecodable, TyEncodable)]
221 enum AllocDiscriminant {
222     Alloc,
223     Fn,
224     VTable,
225     Static,
226 }
227 
specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>( encoder: &mut E, tcx: TyCtxt<'tcx>, alloc_id: AllocId, )228 pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>(
229     encoder: &mut E,
230     tcx: TyCtxt<'tcx>,
231     alloc_id: AllocId,
232 ) {
233     match tcx.global_alloc(alloc_id) {
234         GlobalAlloc::Memory(alloc) => {
235             trace!("encoding {:?} with {:#?}", alloc_id, alloc);
236             AllocDiscriminant::Alloc.encode(encoder);
237             alloc.encode(encoder);
238         }
239         GlobalAlloc::Function(fn_instance) => {
240             trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
241             AllocDiscriminant::Fn.encode(encoder);
242             fn_instance.encode(encoder);
243         }
244         GlobalAlloc::VTable(ty, poly_trait_ref) => {
245             trace!("encoding {:?} with {ty:#?}, {poly_trait_ref:#?}", alloc_id);
246             AllocDiscriminant::VTable.encode(encoder);
247             ty.encode(encoder);
248             poly_trait_ref.encode(encoder);
249         }
250         GlobalAlloc::Static(did) => {
251             assert!(!tcx.is_thread_local_static(did));
252             // References to statics doesn't need to know about their allocations,
253             // just about its `DefId`.
254             AllocDiscriminant::Static.encode(encoder);
255             // Cannot use `did.encode(encoder)` because of a bug around
256             // specializations and method calls.
257             Encodable::<E>::encode(&did, encoder);
258         }
259     }
260 }
261 
262 // Used to avoid infinite recursion when decoding cyclic allocations.
263 type DecodingSessionId = NonZeroU32;
264 
265 #[derive(Clone)]
266 enum State {
267     Empty,
268     InProgressNonAlloc(TinyList<DecodingSessionId>),
269     InProgress(TinyList<DecodingSessionId>, AllocId),
270     Done(AllocId),
271 }
272 
273 pub struct AllocDecodingState {
274     // For each `AllocId`, we keep track of which decoding state it's currently in.
275     decoding_state: Vec<Lock<State>>,
276     // The offsets of each allocation in the data stream.
277     data_offsets: Vec<u32>,
278 }
279 
280 impl AllocDecodingState {
281     #[inline]
new_decoding_session(&self) -> AllocDecodingSession<'_>282     pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
283         static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
284         let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
285 
286         // Make sure this is never zero.
287         let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
288 
289         AllocDecodingSession { state: self, session_id }
290     }
291 
new(data_offsets: Vec<u32>) -> Self292     pub fn new(data_offsets: Vec<u32>) -> Self {
293         let decoding_state =
294             std::iter::repeat_with(|| Lock::new(State::Empty)).take(data_offsets.len()).collect();
295 
296         Self { decoding_state, data_offsets }
297     }
298 }
299 
300 #[derive(Copy, Clone)]
301 pub struct AllocDecodingSession<'s> {
302     state: &'s AllocDecodingState,
303     session_id: DecodingSessionId,
304 }
305 
306 impl<'s> AllocDecodingSession<'s> {
307     /// Decodes an `AllocId` in a thread-safe way.
decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> AllocId where D: TyDecoder<I = TyCtxt<'tcx>>,308     pub fn decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> AllocId
309     where
310         D: TyDecoder<I = TyCtxt<'tcx>>,
311     {
312         // Read the index of the allocation.
313         let idx = usize::try_from(decoder.read_u32()).unwrap();
314         let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
315 
316         // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
317         // `AllocId`.
318         let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
319             let alloc_kind = AllocDiscriminant::decode(decoder);
320             (alloc_kind, decoder.position())
321         });
322 
323         // Check the decoding state to see if it's already decoded or if we should
324         // decode it here.
325         let alloc_id = {
326             let mut entry = self.state.decoding_state[idx].lock();
327 
328             match *entry {
329                 State::Done(alloc_id) => {
330                     return alloc_id;
331                 }
332                 ref mut entry @ State::Empty => {
333                     // We are allowed to decode.
334                     match alloc_kind {
335                         AllocDiscriminant::Alloc => {
336                             // If this is an allocation, we need to reserve an
337                             // `AllocId` so we can decode cyclic graphs.
338                             let alloc_id = decoder.interner().reserve_alloc_id();
339                             *entry =
340                                 State::InProgress(TinyList::new_single(self.session_id), alloc_id);
341                             Some(alloc_id)
342                         }
343                         AllocDiscriminant::Fn
344                         | AllocDiscriminant::Static
345                         | AllocDiscriminant::VTable => {
346                             // Fns and statics cannot be cyclic, and their `AllocId`
347                             // is determined later by interning.
348                             *entry =
349                                 State::InProgressNonAlloc(TinyList::new_single(self.session_id));
350                             None
351                         }
352                     }
353                 }
354                 State::InProgressNonAlloc(ref mut sessions) => {
355                     if sessions.contains(&self.session_id) {
356                         bug!("this should be unreachable");
357                     } else {
358                         // Start decoding concurrently.
359                         sessions.insert(self.session_id);
360                         None
361                     }
362                 }
363                 State::InProgress(ref mut sessions, alloc_id) => {
364                     if sessions.contains(&self.session_id) {
365                         // Don't recurse.
366                         return alloc_id;
367                     } else {
368                         // Start decoding concurrently.
369                         sessions.insert(self.session_id);
370                         Some(alloc_id)
371                     }
372                 }
373             }
374         };
375 
376         // Now decode the actual data.
377         let alloc_id = decoder.with_position(pos, |decoder| {
378             match alloc_kind {
379                 AllocDiscriminant::Alloc => {
380                     let alloc = <ConstAllocation<'tcx> as Decodable<_>>::decode(decoder);
381                     // We already have a reserved `AllocId`.
382                     let alloc_id = alloc_id.unwrap();
383                     trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc);
384                     decoder.interner().set_alloc_id_same_memory(alloc_id, alloc);
385                     alloc_id
386                 }
387                 AllocDiscriminant::Fn => {
388                     assert!(alloc_id.is_none());
389                     trace!("creating fn alloc ID");
390                     let instance = ty::Instance::decode(decoder);
391                     trace!("decoded fn alloc instance: {:?}", instance);
392                     let alloc_id = decoder.interner().create_fn_alloc(instance);
393                     alloc_id
394                 }
395                 AllocDiscriminant::VTable => {
396                     assert!(alloc_id.is_none());
397                     trace!("creating vtable alloc ID");
398                     let ty = <Ty<'_> as Decodable<D>>::decode(decoder);
399                     let poly_trait_ref =
400                         <Option<ty::PolyExistentialTraitRef<'_>> as Decodable<D>>::decode(decoder);
401                     trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}");
402                     let alloc_id = decoder.interner().create_vtable_alloc(ty, poly_trait_ref);
403                     alloc_id
404                 }
405                 AllocDiscriminant::Static => {
406                     assert!(alloc_id.is_none());
407                     trace!("creating extern static alloc ID");
408                     let did = <DefId as Decodable<D>>::decode(decoder);
409                     trace!("decoded static def-ID: {:?}", did);
410                     let alloc_id = decoder.interner().create_static_alloc(did);
411                     alloc_id
412                 }
413             }
414         });
415 
416         self.state.decoding_state[idx].with_lock(|entry| {
417             *entry = State::Done(alloc_id);
418         });
419 
420         alloc_id
421     }
422 }
423 
424 /// An allocation in the global (tcx-managed) memory can be either a function pointer,
425 /// a static, or a "real" allocation with some data in it.
426 #[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)]
427 pub enum GlobalAlloc<'tcx> {
428     /// The alloc ID is used as a function pointer.
429     Function(Instance<'tcx>),
430     /// This alloc ID points to a symbolic (not-reified) vtable.
431     VTable(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>),
432     /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
433     /// This is also used to break the cycle in recursive statics.
434     Static(DefId),
435     /// The alloc ID points to memory.
436     Memory(ConstAllocation<'tcx>),
437 }
438 
439 impl<'tcx> GlobalAlloc<'tcx> {
440     /// Panics if the `GlobalAlloc` does not refer to an `GlobalAlloc::Memory`
441     #[track_caller]
442     #[inline]
unwrap_memory(&self) -> ConstAllocation<'tcx>443     pub fn unwrap_memory(&self) -> ConstAllocation<'tcx> {
444         match *self {
445             GlobalAlloc::Memory(mem) => mem,
446             _ => bug!("expected memory, got {:?}", self),
447         }
448     }
449 
450     /// Panics if the `GlobalAlloc` is not `GlobalAlloc::Function`
451     #[track_caller]
452     #[inline]
unwrap_fn(&self) -> Instance<'tcx>453     pub fn unwrap_fn(&self) -> Instance<'tcx> {
454         match *self {
455             GlobalAlloc::Function(instance) => instance,
456             _ => bug!("expected function, got {:?}", self),
457         }
458     }
459 
460     /// Panics if the `GlobalAlloc` is not `GlobalAlloc::VTable`
461     #[track_caller]
462     #[inline]
unwrap_vtable(&self) -> (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>)463     pub fn unwrap_vtable(&self) -> (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
464         match *self {
465             GlobalAlloc::VTable(ty, poly_trait_ref) => (ty, poly_trait_ref),
466             _ => bug!("expected vtable, got {:?}", self),
467         }
468     }
469 
470     /// The address space that this `GlobalAlloc` should be placed in.
471     #[inline]
address_space(&self, cx: &impl HasDataLayout) -> AddressSpace472     pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace {
473         match self {
474             GlobalAlloc::Function(..) => cx.data_layout().instruction_address_space,
475             GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => {
476                 AddressSpace::DATA
477             }
478         }
479     }
480 }
481 
482 pub(crate) struct AllocMap<'tcx> {
483     /// Maps `AllocId`s to their corresponding allocations.
484     alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
485 
486     /// Used to ensure that statics and functions only get one associated `AllocId`.
487     /// Should never contain a `GlobalAlloc::Memory`!
488     //
489     // FIXME: Should we just have two separate dedup maps for statics and functions each?
490     dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
491 
492     /// The `AllocId` to assign to the next requested ID.
493     /// Always incremented; never gets smaller.
494     next_id: AllocId,
495 }
496 
497 impl<'tcx> AllocMap<'tcx> {
new() -> Self498     pub(crate) fn new() -> Self {
499         AllocMap {
500             alloc_map: Default::default(),
501             dedup: Default::default(),
502             next_id: AllocId(NonZeroU64::new(1).unwrap()),
503         }
504     }
reserve(&mut self) -> AllocId505     fn reserve(&mut self) -> AllocId {
506         let next = self.next_id;
507         self.next_id.0 = self.next_id.0.checked_add(1).expect(
508             "You overflowed a u64 by incrementing by 1... \
509              You've just earned yourself a free drink if we ever meet. \
510              Seriously, how did you do that?!",
511         );
512         next
513     }
514 }
515 
516 impl<'tcx> TyCtxt<'tcx> {
517     /// Obtains a new allocation ID that can be referenced but does not
518     /// yet have an allocation backing it.
519     ///
520     /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
521     /// an `AllocId` from a query.
reserve_alloc_id(self) -> AllocId522     pub fn reserve_alloc_id(self) -> AllocId {
523         self.alloc_map.lock().reserve()
524     }
525 
526     /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
527     /// Should only be used for "symbolic" allocations (function pointers, vtables, statics), we
528     /// don't want to dedup IDs for "real" memory!
reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId529     fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId {
530         let mut alloc_map = self.alloc_map.lock();
531         match alloc {
532             GlobalAlloc::Function(..) | GlobalAlloc::Static(..) | GlobalAlloc::VTable(..) => {}
533             GlobalAlloc::Memory(..) => bug!("Trying to dedup-reserve memory with real data!"),
534         }
535         if let Some(&alloc_id) = alloc_map.dedup.get(&alloc) {
536             return alloc_id;
537         }
538         let id = alloc_map.reserve();
539         debug!("creating alloc {alloc:?} with id {id:?}");
540         alloc_map.alloc_map.insert(id, alloc.clone());
541         alloc_map.dedup.insert(alloc, id);
542         id
543     }
544 
545     /// Generates an `AllocId` for a static or return a cached one in case this function has been
546     /// called on the same static before.
create_static_alloc(self, static_id: DefId) -> AllocId547     pub fn create_static_alloc(self, static_id: DefId) -> AllocId {
548         self.reserve_and_set_dedup(GlobalAlloc::Static(static_id))
549     }
550 
551     /// Generates an `AllocId` for a function. Depending on the function type,
552     /// this might get deduplicated or assigned a new ID each time.
create_fn_alloc(self, instance: Instance<'tcx>) -> AllocId553     pub fn create_fn_alloc(self, instance: Instance<'tcx>) -> AllocId {
554         // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
555         // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
556         // duplicated across crates.
557         // We thus generate a new `AllocId` for every mention of a function. This means that
558         // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
559         // However, formatting code relies on function identity (see #58320), so we only do
560         // this for generic functions. Lifetime parameters are ignored.
561         let is_generic = instance
562             .substs
563             .into_iter()
564             .any(|kind| !matches!(kind.unpack(), GenericArgKind::Lifetime(_)));
565         if is_generic {
566             // Get a fresh ID.
567             let mut alloc_map = self.alloc_map.lock();
568             let id = alloc_map.reserve();
569             alloc_map.alloc_map.insert(id, GlobalAlloc::Function(instance));
570             id
571         } else {
572             // Deduplicate.
573             self.reserve_and_set_dedup(GlobalAlloc::Function(instance))
574         }
575     }
576 
577     /// Generates an `AllocId` for a (symbolic, not-reified) vtable. Will get deduplicated.
create_vtable_alloc( self, ty: Ty<'tcx>, poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>, ) -> AllocId578     pub fn create_vtable_alloc(
579         self,
580         ty: Ty<'tcx>,
581         poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
582     ) -> AllocId {
583         self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, poly_trait_ref))
584     }
585 
586     /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
587     /// `Allocation` with a different `AllocId`.
588     /// Statics with identical content will still point to the same `Allocation`, i.e.,
589     /// their data will be deduplicated through `Allocation` interning -- but they
590     /// are different places in memory and as such need different IDs.
create_memory_alloc(self, mem: ConstAllocation<'tcx>) -> AllocId591     pub fn create_memory_alloc(self, mem: ConstAllocation<'tcx>) -> AllocId {
592         let id = self.reserve_alloc_id();
593         self.set_alloc_id_memory(id, mem);
594         id
595     }
596 
597     /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
598     /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
599     /// illegal and will likely ICE.
600     /// This function exists to allow const eval to detect the difference between evaluation-
601     /// local dangling pointers and allocations in constants/statics.
602     #[inline]
try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>>603     pub fn try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
604         self.alloc_map.lock().alloc_map.get(&id).cloned()
605     }
606 
607     #[inline]
608     #[track_caller]
609     /// Panics in case the `AllocId` is dangling. Since that is impossible for `AllocId`s in
610     /// constants (as all constants must pass interning and validation that check for dangling
611     /// ids), this function is frequently used throughout rustc, but should not be used within
612     /// the miri engine.
global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx>613     pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> {
614         match self.try_get_global_alloc(id) {
615             Some(alloc) => alloc,
616             None => bug!("could not find allocation for {id:?}"),
617         }
618     }
619 
620     /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
621     /// call this function twice, even with the same `Allocation` will ICE the compiler.
set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>)622     pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
623         if let Some(old) = self.alloc_map.lock().alloc_map.insert(id, GlobalAlloc::Memory(mem)) {
624             bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
625         }
626     }
627 
628     /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
629     /// twice for the same `(AllocId, Allocation)` pair.
set_alloc_id_same_memory(self, id: AllocId, mem: ConstAllocation<'tcx>)630     fn set_alloc_id_same_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
631         self.alloc_map.lock().alloc_map.insert_same(id, GlobalAlloc::Memory(mem));
632     }
633 }
634 
635 ////////////////////////////////////////////////////////////////////////////////
636 // Methods to access integers in the target endianness
637 ////////////////////////////////////////////////////////////////////////////////
638 
639 #[inline]
write_target_uint( endianness: Endian, mut target: &mut [u8], data: u128, ) -> Result<(), io::Error>640 pub fn write_target_uint(
641     endianness: Endian,
642     mut target: &mut [u8],
643     data: u128,
644 ) -> Result<(), io::Error> {
645     // This u128 holds an "any-size uint" (since smaller uints can fits in it)
646     // So we do not write all bytes of the u128, just the "payload".
647     match endianness {
648         Endian::Little => target.write(&data.to_le_bytes())?,
649         Endian::Big => target.write(&data.to_be_bytes()[16 - target.len()..])?,
650     };
651     debug_assert!(target.len() == 0); // We should have filled the target buffer.
652     Ok(())
653 }
654 
655 #[inline]
read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error>656 pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
657     // This u128 holds an "any-size uint" (since smaller uints can fits in it)
658     let mut buf = [0u8; std::mem::size_of::<u128>()];
659     // So we do not read exactly 16 bytes into the u128, just the "payload".
660     let uint = match endianness {
661         Endian::Little => {
662             source.read(&mut buf)?;
663             Ok(u128::from_le_bytes(buf))
664         }
665         Endian::Big => {
666             source.read(&mut buf[16 - source.len()..])?;
667             Ok(u128::from_be_bytes(buf))
668         }
669     };
670     debug_assert!(source.len() == 0); // We should have consumed the source buffer.
671     uint
672 }
673