• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use crate::base;
2 use crate::traits::*;
3 use rustc_index::bit_set::BitSet;
4 use rustc_index::IndexVec;
5 use rustc_middle::mir;
6 use rustc_middle::mir::interpret::ErrorHandled;
7 use rustc_middle::mir::traversal;
8 use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout};
9 use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
10 use rustc_target::abi::call::{FnAbi, PassMode};
11 
12 use std::iter;
13 
14 mod analyze;
15 mod block;
16 pub mod constant;
17 pub mod coverageinfo;
18 pub mod debuginfo;
19 mod intrinsic;
20 mod locals;
21 pub mod operand;
22 pub mod place;
23 mod rvalue;
24 mod statement;
25 
26 use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
27 use self::operand::{OperandRef, OperandValue};
28 use self::place::PlaceRef;
29 
30 // Used for tracking the state of generated basic blocks.
31 enum CachedLlbb<T> {
32     /// Nothing created yet.
33     None,
34 
35     /// Has been created.
36     Some(T),
37 
38     /// Nothing created yet, and nothing should be.
39     Skip,
40 }
41 
42 /// Master context for codegenning from MIR.
43 pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
44     instance: Instance<'tcx>,
45 
46     mir: &'tcx mir::Body<'tcx>,
47 
48     debug_context: Option<FunctionDebugContext<Bx::DIScope, Bx::DILocation>>,
49 
50     llfn: Bx::Function,
51 
52     cx: &'a Bx::CodegenCx,
53 
54     fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
55 
56     /// When unwinding is initiated, we have to store this personality
57     /// value somewhere so that we can load it and re-use it in the
58     /// resume instruction. The personality is (afaik) some kind of
59     /// value used for C++ unwinding, which must filter by type: we
60     /// don't really care about it very much. Anyway, this value
61     /// contains an alloca into which the personality is stored and
62     /// then later loaded when generating the DIVERGE_BLOCK.
63     personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
64 
65     /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
66     /// as-needed (e.g. RPO reaching it or another block branching to it).
67     // FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
68     // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
69     cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
70 
71     /// The funclet status of each basic block
72     cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
73 
74     /// When targeting MSVC, this stores the cleanup info for each funclet BB.
75     /// This is initialized at the same time as the `landing_pads` entry for the
76     /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge.
77     funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
78 
79     /// This stores the cached landing/cleanup pad block for a given BB.
80     // FIXME(eddyb) rename this to `eh_pads`.
81     landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
82 
83     /// Cached unreachable block
84     unreachable_block: Option<Bx::BasicBlock>,
85 
86     /// Cached terminate upon unwinding block
87     terminate_block: Option<Bx::BasicBlock>,
88 
89     /// The location where each MIR arg/var/tmp/ret is stored. This is
90     /// usually an `PlaceRef` representing an alloca, but not always:
91     /// sometimes we can skip the alloca and just store the value
92     /// directly using an `OperandRef`, which makes for tighter LLVM
93     /// IR. The conditions for using an `OperandRef` are as follows:
94     ///
95     /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
96     /// - the operand must never be referenced indirectly
97     ///     - we should not take its address using the `&` operator
98     ///     - nor should it appear in a place path like `tmp.a`
99     /// - the operand must be defined by an rvalue that can generate immediate
100     ///   values
101     ///
102     /// Avoiding allocs can also be important for certain intrinsics,
103     /// notably `expect`.
104     locals: locals::Locals<'tcx, Bx::Value>,
105 
106     /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
107     /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
108     per_local_var_debug_info:
109         Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
110 
111     /// Caller location propagated if this function has `#[track_caller]`.
112     caller_location: Option<OperandRef<'tcx, Bx::Value>>,
113 }
114 
115 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
monomorphize<T>(&self, value: T) -> T where T: Copy + TypeFoldable<TyCtxt<'tcx>>,116     pub fn monomorphize<T>(&self, value: T) -> T
117     where
118         T: Copy + TypeFoldable<TyCtxt<'tcx>>,
119     {
120         debug!("monomorphize: self.instance={:?}", self.instance);
121         self.instance.subst_mir_and_normalize_erasing_regions(
122             self.cx.tcx(),
123             ty::ParamEnv::reveal_all(),
124             ty::EarlyBinder::bind(value),
125         )
126     }
127 }
128 
129 enum LocalRef<'tcx, V> {
130     Place(PlaceRef<'tcx, V>),
131     /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
132     /// `*p` is the fat pointer that references the actual unsized place.
133     /// Every time it is initialized, we have to reallocate the place
134     /// and update the fat pointer. That's the reason why it is indirect.
135     UnsizedPlace(PlaceRef<'tcx, V>),
136     /// The backend [`OperandValue`] has already been generated.
137     Operand(OperandRef<'tcx, V>),
138     /// Will be a `Self::Operand` once we get to its definition.
139     PendingOperand,
140 }
141 
142 impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V>143     fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
144         if layout.is_zst() {
145             // Zero-size temporaries aren't always initialized, which
146             // doesn't matter because they don't contain data, but
147             // we need something in the operand.
148             LocalRef::Operand(OperandRef::zero_sized(layout))
149         } else {
150             LocalRef::PendingOperand
151         }
152     }
153 }
154 
155 ///////////////////////////////////////////////////////////////////////////
156 
157 #[instrument(level = "debug", skip(cx))]
codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( cx: &'a Bx::CodegenCx, instance: Instance<'tcx>, )158 pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
159     cx: &'a Bx::CodegenCx,
160     instance: Instance<'tcx>,
161 ) {
162     assert!(!instance.substs.has_infer());
163 
164     let llfn = cx.get_fn(instance);
165 
166     let mir = cx.tcx().instance_mir(instance.def);
167 
168     let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
169     debug!("fn_abi: {:?}", fn_abi);
170 
171     let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir);
172 
173     let start_llbb = Bx::append_block(cx, llfn, "start");
174     let mut start_bx = Bx::build(cx, start_llbb);
175 
176     if mir.basic_blocks.iter().any(|bb| {
177         bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate))
178     }) {
179         start_bx.set_personality_fn(cx.eh_personality());
180     }
181 
182     let cleanup_kinds =
183         base::wants_new_eh_instructions(cx.tcx().sess).then(|| analyze::cleanup_kinds(&mir));
184 
185     let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
186         mir.basic_blocks
187             .indices()
188             .map(|bb| {
189                 if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
190             })
191             .collect();
192 
193     let mut fx = FunctionCx {
194         instance,
195         mir,
196         llfn,
197         fn_abi,
198         cx,
199         personality_slot: None,
200         cached_llbbs,
201         unreachable_block: None,
202         terminate_block: None,
203         cleanup_kinds,
204         landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
205         funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
206         locals: locals::Locals::empty(),
207         debug_context,
208         per_local_var_debug_info: None,
209         caller_location: None,
210     };
211 
212     fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx);
213 
214     // Evaluate all required consts; codegen later assumes that CTFE will never fail.
215     let mut all_consts_ok = true;
216     for const_ in &mir.required_consts {
217         if let Err(err) = fx.eval_mir_constant(const_) {
218             all_consts_ok = false;
219             match err {
220                 // errored or at least linted
221                 ErrorHandled::Reported(_) => {}
222                 ErrorHandled::TooGeneric => {
223                     span_bug!(const_.span, "codegen encountered polymorphic constant: {:?}", err)
224                 }
225             }
226         }
227     }
228     if !all_consts_ok {
229         // We leave the IR in some half-built state here, and rely on this code not even being
230         // submitted to LLVM once an error was raised.
231         return;
232     }
233 
234     let memory_locals = analyze::non_ssa_locals(&fx);
235 
236     // Allocate variable and temp allocas
237     let local_values = {
238         let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
239 
240         let mut allocate_local = |local| {
241             let decl = &mir.local_decls[local];
242             let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
243             assert!(!layout.ty.has_erasable_regions());
244 
245             if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
246                 debug!("alloc: {:?} (return place) -> place", local);
247                 let llretptr = start_bx.get_param(0);
248                 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
249             }
250 
251             if memory_locals.contains(local) {
252                 debug!("alloc: {:?} -> place", local);
253                 if layout.is_unsized() {
254                     LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
255                 } else {
256                     LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
257                 }
258             } else {
259                 debug!("alloc: {:?} -> operand", local);
260                 LocalRef::new_operand(layout)
261             }
262         };
263 
264         let retptr = allocate_local(mir::RETURN_PLACE);
265         iter::once(retptr)
266             .chain(args.into_iter())
267             .chain(mir.vars_and_temps_iter().map(allocate_local))
268             .collect()
269     };
270     fx.initialize_locals(local_values);
271 
272     // Apply debuginfo to the newly allocated locals.
273     fx.debug_introduce_locals(&mut start_bx);
274 
275     // The builders will be created separately for each basic block at `codegen_block`.
276     // So drop the builder of `start_llbb` to avoid having two at the same time.
277     drop(start_bx);
278 
279     // Codegen the body of each block using reverse postorder
280     for (bb, _) in traversal::reverse_postorder(&mir) {
281         fx.codegen_block(bb);
282     }
283 }
284 
285 /// Produces, for each argument, a `Value` pointing at the
286 /// argument's value. As arguments are places, these are always
287 /// indirect.
arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx: &mut Bx, fx: &mut FunctionCx<'a, 'tcx, Bx>, memory_locals: &BitSet<mir::Local>, ) -> Vec<LocalRef<'tcx, Bx::Value>>288 fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
289     bx: &mut Bx,
290     fx: &mut FunctionCx<'a, 'tcx, Bx>,
291     memory_locals: &BitSet<mir::Local>,
292 ) -> Vec<LocalRef<'tcx, Bx::Value>> {
293     let mir = fx.mir;
294     let mut idx = 0;
295     let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
296 
297     let mut num_untupled = None;
298 
299     let args = mir
300         .args_iter()
301         .enumerate()
302         .map(|(arg_index, local)| {
303             let arg_decl = &mir.local_decls[local];
304             let arg_ty = fx.monomorphize(arg_decl.ty);
305 
306             if Some(local) == mir.spread_arg {
307                 // This argument (e.g., the last argument in the "rust-call" ABI)
308                 // is a tuple that was spread at the ABI level and now we have
309                 // to reconstruct it into a tuple local variable, from multiple
310                 // individual LLVM function arguments.
311                 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
312                     bug!("spread argument isn't a tuple?!");
313                 };
314 
315                 let layout = bx.layout_of(arg_ty);
316 
317                 // FIXME: support unsized params in "rust-call" ABI
318                 if layout.is_unsized() {
319                     span_bug!(
320                         arg_decl.source_info.span,
321                         "\"rust-call\" ABI does not support unsized params",
322                     );
323                 }
324 
325                 let place = PlaceRef::alloca(bx, layout);
326                 for i in 0..tupled_arg_tys.len() {
327                     let arg = &fx.fn_abi.args[idx];
328                     idx += 1;
329                     if let PassMode::Cast(_, true) = arg.mode {
330                         llarg_idx += 1;
331                     }
332                     let pr_field = place.project_field(bx, i);
333                     bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
334                 }
335                 assert_eq!(
336                     None,
337                     num_untupled.replace(tupled_arg_tys.len()),
338                     "Replaced existing num_tupled"
339                 );
340 
341                 return LocalRef::Place(place);
342             }
343 
344             if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
345                 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
346                 bx.va_start(va_list.llval);
347 
348                 return LocalRef::Place(va_list);
349             }
350 
351             let arg = &fx.fn_abi.args[idx];
352             idx += 1;
353             if let PassMode::Cast(_, true) = arg.mode {
354                 llarg_idx += 1;
355             }
356 
357             if !memory_locals.contains(local) {
358                 // We don't have to cast or keep the argument in the alloca.
359                 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
360                 // of putting everything in allocas just so we can use llvm.dbg.declare.
361                 let local = |op| LocalRef::Operand(op);
362                 match arg.mode {
363                     PassMode::Ignore => {
364                         return local(OperandRef::zero_sized(arg.layout));
365                     }
366                     PassMode::Direct(_) => {
367                         let llarg = bx.get_param(llarg_idx);
368                         llarg_idx += 1;
369                         return local(OperandRef::from_immediate_or_packed_pair(
370                             bx, llarg, arg.layout,
371                         ));
372                     }
373                     PassMode::Pair(..) => {
374                         let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
375                         llarg_idx += 2;
376 
377                         return local(OperandRef {
378                             val: OperandValue::Pair(a, b),
379                             layout: arg.layout,
380                         });
381                     }
382                     _ => {}
383                 }
384             }
385 
386             if arg.is_sized_indirect() {
387                 // Don't copy an indirect argument to an alloca, the caller
388                 // already put it in a temporary alloca and gave it up.
389                 // FIXME: lifetimes
390                 let llarg = bx.get_param(llarg_idx);
391                 llarg_idx += 1;
392                 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
393             } else if arg.is_unsized_indirect() {
394                 // As the storage for the indirect argument lives during
395                 // the whole function call, we just copy the fat pointer.
396                 let llarg = bx.get_param(llarg_idx);
397                 llarg_idx += 1;
398                 let llextra = bx.get_param(llarg_idx);
399                 llarg_idx += 1;
400                 let indirect_operand = OperandValue::Pair(llarg, llextra);
401 
402                 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
403                 indirect_operand.store(bx, tmp);
404                 LocalRef::UnsizedPlace(tmp)
405             } else {
406                 let tmp = PlaceRef::alloca(bx, arg.layout);
407                 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
408                 LocalRef::Place(tmp)
409             }
410         })
411         .collect::<Vec<_>>();
412 
413     if fx.instance.def.requires_caller_location(bx.tcx()) {
414         let mir_args = if let Some(num_untupled) = num_untupled {
415             // Subtract off the tupled argument that gets 'expanded'
416             args.len() - 1 + num_untupled
417         } else {
418             args.len()
419         };
420         assert_eq!(
421             fx.fn_abi.args.len(),
422             mir_args + 1,
423             "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
424             fx.instance
425         );
426 
427         let arg = fx.fn_abi.args.last().unwrap();
428         match arg.mode {
429             PassMode::Direct(_) => (),
430             _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
431         }
432 
433         fx.caller_location = Some(OperandRef {
434             val: OperandValue::Immediate(bx.get_param(llarg_idx)),
435             layout: arg.layout,
436         });
437     }
438 
439     args
440 }
441