1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 mod comments;
4 mod pass_mode;
5 mod returning;
6
7 use std::borrow::Cow;
8
9 use cranelift_module::ModuleError;
10 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
11 use rustc_middle::ty::layout::FnAbiOf;
12 use rustc_session::Session;
13 use rustc_target::abi::call::{Conv, FnAbi};
14 use rustc_target::spec::abi::Abi;
15
16 use cranelift_codegen::ir::{AbiParam, SigRef};
17
18 use self::pass_mode::*;
19 use crate::prelude::*;
20
21 pub(crate) use self::returning::codegen_return;
22
clif_sig_from_fn_abi<'tcx>( tcx: TyCtxt<'tcx>, default_call_conv: CallConv, fn_abi: &FnAbi<'tcx, Ty<'tcx>>, ) -> Signature23 fn clif_sig_from_fn_abi<'tcx>(
24 tcx: TyCtxt<'tcx>,
25 default_call_conv: CallConv,
26 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
27 ) -> Signature {
28 let call_conv = conv_to_call_conv(tcx.sess, fn_abi.conv, default_call_conv);
29
30 let inputs = fn_abi.args.iter().flat_map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter());
31
32 let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
33 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
34 let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
35
36 Signature { params, returns, call_conv }
37 }
38
conv_to_call_conv(sess: &Session, c: Conv, default_call_conv: CallConv) -> CallConv39 pub(crate) fn conv_to_call_conv(sess: &Session, c: Conv, default_call_conv: CallConv) -> CallConv {
40 match c {
41 Conv::Rust | Conv::C => default_call_conv,
42 Conv::RustCold => CallConv::Cold,
43 Conv::X86_64SysV => CallConv::SystemV,
44 Conv::X86_64Win64 => CallConv::WindowsFastcall,
45
46 // Should already get a back compat warning
47 Conv::X86Fastcall | Conv::X86Stdcall | Conv::X86ThisCall | Conv::X86VectorCall => {
48 default_call_conv
49 }
50
51 Conv::X86Intr => sess.fatal("x86-interrupt call conv not yet implemented"),
52
53 Conv::ArmAapcs => sess.fatal("aapcs call conv not yet implemented"),
54 Conv::CCmseNonSecureCall => {
55 sess.fatal("C-cmse-nonsecure-call call conv is not yet implemented");
56 }
57
58 Conv::Msp430Intr
59 | Conv::PtxKernel
60 | Conv::AmdGpuKernel
61 | Conv::AvrInterrupt
62 | Conv::AvrNonBlockingInterrupt => {
63 unreachable!("tried to use {c:?} call conv which only exists on an unsupported target");
64 }
65 }
66 }
67
get_function_sig<'tcx>( tcx: TyCtxt<'tcx>, default_call_conv: CallConv, inst: Instance<'tcx>, ) -> Signature68 pub(crate) fn get_function_sig<'tcx>(
69 tcx: TyCtxt<'tcx>,
70 default_call_conv: CallConv,
71 inst: Instance<'tcx>,
72 ) -> Signature {
73 assert!(!inst.substs.has_infer());
74 clif_sig_from_fn_abi(
75 tcx,
76 default_call_conv,
77 &RevealAllLayoutCx(tcx).fn_abi_of_instance(inst, ty::List::empty()),
78 )
79 }
80
81 /// Instance must be monomorphized
import_function<'tcx>( tcx: TyCtxt<'tcx>, module: &mut dyn Module, inst: Instance<'tcx>, ) -> FuncId82 pub(crate) fn import_function<'tcx>(
83 tcx: TyCtxt<'tcx>,
84 module: &mut dyn Module,
85 inst: Instance<'tcx>,
86 ) -> FuncId {
87 let name = tcx.symbol_name(inst).name;
88 let sig = get_function_sig(tcx, module.target_config().default_call_conv, inst);
89 match module.declare_function(name, Linkage::Import, &sig) {
90 Ok(func_id) => func_id,
91 Err(ModuleError::IncompatibleDeclaration(_)) => tcx.sess.fatal(format!(
92 "attempt to declare `{name}` as function, but it was already declared as static"
93 )),
94 Err(ModuleError::IncompatibleSignature(_, prev_sig, new_sig)) => tcx.sess.fatal(format!(
95 "attempt to declare `{name}` with signature {new_sig:?}, \
96 but it was already declared with signature {prev_sig:?}"
97 )),
98 Err(err) => Err::<_, _>(err).unwrap(),
99 }
100 }
101
102 impl<'tcx> FunctionCx<'_, '_, 'tcx> {
103 /// Instance must be monomorphized
get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef104 pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
105 let func_id = import_function(self.tcx, self.module, inst);
106 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
107
108 if self.clif_comments.enabled() {
109 self.add_comment(func_ref, format!("{:?}", inst));
110 }
111
112 func_ref
113 }
114
lib_call( &mut self, name: &str, params: Vec<AbiParam>, returns: Vec<AbiParam>, args: &[Value], ) -> Cow<'_, [Value]>115 pub(crate) fn lib_call(
116 &mut self,
117 name: &str,
118 params: Vec<AbiParam>,
119 returns: Vec<AbiParam>,
120 args: &[Value],
121 ) -> Cow<'_, [Value]> {
122 if self.tcx.sess.target.is_like_windows {
123 let (mut params, mut args): (Vec<_>, Vec<_>) =
124 params
125 .into_iter()
126 .zip(args)
127 .map(|(param, &arg)| {
128 if param.value_type == types::I128 {
129 let arg_ptr = Pointer::stack_slot(self.bcx.create_sized_stack_slot(
130 StackSlotData { kind: StackSlotKind::ExplicitSlot, size: 16 },
131 ));
132 arg_ptr.store(self, arg, MemFlags::trusted());
133 (AbiParam::new(self.pointer_type), arg_ptr.get_addr(self))
134 } else {
135 (param, arg)
136 }
137 })
138 .unzip();
139
140 let indirect_ret_val = returns.len() == 1 && returns[0].value_type == types::I128;
141
142 if indirect_ret_val {
143 params.insert(0, AbiParam::new(self.pointer_type));
144 let ret_ptr =
145 Pointer::stack_slot(self.bcx.create_sized_stack_slot(StackSlotData {
146 kind: StackSlotKind::ExplicitSlot,
147 size: 16,
148 }));
149 args.insert(0, ret_ptr.get_addr(self));
150 self.lib_call_unadjusted(name, params, vec![], &args);
151 return Cow::Owned(vec![ret_ptr.load(self, types::I128, MemFlags::trusted())]);
152 } else {
153 return self.lib_call_unadjusted(name, params, returns, &args);
154 }
155 }
156
157 self.lib_call_unadjusted(name, params, returns, args)
158 }
159
lib_call_unadjusted( &mut self, name: &str, params: Vec<AbiParam>, returns: Vec<AbiParam>, args: &[Value], ) -> Cow<'_, [Value]>160 pub(crate) fn lib_call_unadjusted(
161 &mut self,
162 name: &str,
163 params: Vec<AbiParam>,
164 returns: Vec<AbiParam>,
165 args: &[Value],
166 ) -> Cow<'_, [Value]> {
167 let sig = Signature { params, returns, call_conv: self.target_config.default_call_conv };
168 let func_id = self.module.declare_function(name, Linkage::Import, &sig).unwrap();
169 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
170 if self.clif_comments.enabled() {
171 self.add_comment(func_ref, format!("{:?}", name));
172 }
173 let call_inst = self.bcx.ins().call(func_ref, args);
174 if self.clif_comments.enabled() {
175 self.add_comment(call_inst, format!("lib_call {}", name));
176 }
177 let results = self.bcx.inst_results(call_inst);
178 assert!(results.len() <= 2, "{}", results.len());
179 Cow::Borrowed(results)
180 }
181 }
182
183 /// Make a [`CPlace`] capable of holding value of the specified type.
make_local_place<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, local: Local, layout: TyAndLayout<'tcx>, is_ssa: bool, ) -> CPlace<'tcx>184 fn make_local_place<'tcx>(
185 fx: &mut FunctionCx<'_, '_, 'tcx>,
186 local: Local,
187 layout: TyAndLayout<'tcx>,
188 is_ssa: bool,
189 ) -> CPlace<'tcx> {
190 if layout.is_unsized() {
191 fx.tcx.sess.span_fatal(
192 fx.mir.local_decls[local].source_info.span,
193 "unsized locals are not yet supported",
194 );
195 }
196 let place = if is_ssa {
197 if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
198 CPlace::new_var_pair(fx, local, layout)
199 } else {
200 CPlace::new_var(fx, local, layout)
201 }
202 } else {
203 CPlace::new_stack_slot(fx, layout)
204 };
205
206 self::comments::add_local_place_comments(fx, place, local);
207
208 place
209 }
210
codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block)211 pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
212 fx.bcx.append_block_params_for_function_params(start_block);
213
214 fx.bcx.switch_to_block(start_block);
215 fx.bcx.ins().nop();
216
217 let ssa_analyzed = crate::analyze::analyze(fx);
218
219 self::comments::add_args_header_comment(fx);
220
221 let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
222 let ret_place =
223 self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
224 assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
225
226 // None means pass_mode == NoPass
227 enum ArgKind<'tcx> {
228 Normal(Option<CValue<'tcx>>),
229 Spread(Vec<Option<CValue<'tcx>>>),
230 }
231
232 let fn_abi = fx.fn_abi.take().unwrap();
233
234 // FIXME implement variadics in cranelift
235 if fn_abi.c_variadic {
236 fx.tcx.sess.span_fatal(
237 fx.mir.span,
238 "Defining variadic functions is not yet supported by Cranelift",
239 );
240 }
241
242 let mut arg_abis_iter = fn_abi.args.iter();
243
244 let func_params = fx
245 .mir
246 .args_iter()
247 .map(|local| {
248 let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
249
250 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
251 if Some(local) == fx.mir.spread_arg {
252 // This argument (e.g. the last argument in the "rust-call" ABI)
253 // is a tuple that was spread at the ABI level and now we have
254 // to reconstruct it into a tuple local variable, from multiple
255 // individual function arguments.
256
257 let tupled_arg_tys = match arg_ty.kind() {
258 ty::Tuple(ref tys) => tys,
259 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
260 };
261
262 let mut params = Vec::new();
263 for (i, _arg_ty) in tupled_arg_tys.iter().enumerate() {
264 let arg_abi = arg_abis_iter.next().unwrap();
265 let param =
266 cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
267 params.push(param);
268 }
269
270 (local, ArgKind::Spread(params), arg_ty)
271 } else {
272 let arg_abi = arg_abis_iter.next().unwrap();
273 let param =
274 cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
275 (local, ArgKind::Normal(param), arg_ty)
276 }
277 })
278 .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
279
280 assert!(fx.caller_location.is_none());
281 if fx.instance.def.requires_caller_location(fx.tcx) {
282 // Store caller location for `#[track_caller]`.
283 let arg_abi = arg_abis_iter.next().unwrap();
284 fx.caller_location =
285 Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
286 }
287
288 assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
289 fx.fn_abi = Some(fn_abi);
290 assert!(block_params_iter.next().is_none(), "arg_value left behind");
291
292 self::comments::add_locals_header_comment(fx);
293
294 for (local, arg_kind, ty) in func_params {
295 // While this is normally an optimization to prevent an unnecessary copy when an argument is
296 // not mutated by the current function, this is necessary to support unsized arguments.
297 if let ArgKind::Normal(Some(val)) = arg_kind {
298 if let Some((addr, meta)) = val.try_to_ptr() {
299 // Ownership of the value at the backing storage for an argument is passed to the
300 // callee per the ABI, so it is fine to borrow the backing storage of this argument
301 // to prevent a copy.
302
303 let place = if let Some(meta) = meta {
304 CPlace::for_ptr_with_extra(addr, meta, val.layout())
305 } else {
306 CPlace::for_ptr(addr, val.layout())
307 };
308
309 self::comments::add_local_place_comments(fx, place, local);
310
311 assert_eq!(fx.local_map.push(place), local);
312 continue;
313 }
314 }
315
316 let layout = fx.layout_of(ty);
317 let is_ssa = ssa_analyzed[local].is_ssa(fx, ty);
318 let place = make_local_place(fx, local, layout, is_ssa);
319 assert_eq!(fx.local_map.push(place), local);
320
321 match arg_kind {
322 ArgKind::Normal(param) => {
323 if let Some(param) = param {
324 place.write_cvalue(fx, param);
325 }
326 }
327 ArgKind::Spread(params) => {
328 for (i, param) in params.into_iter().enumerate() {
329 if let Some(param) = param {
330 place.place_field(fx, FieldIdx::new(i)).write_cvalue(fx, param);
331 }
332 }
333 }
334 }
335 }
336
337 for local in fx.mir.vars_and_temps_iter() {
338 let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
339 let layout = fx.layout_of(ty);
340
341 let is_ssa = ssa_analyzed[local].is_ssa(fx, ty);
342
343 let place = make_local_place(fx, local, layout, is_ssa);
344 assert_eq!(fx.local_map.push(place), local);
345 }
346
347 fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
348 }
349
350 struct CallArgument<'tcx> {
351 value: CValue<'tcx>,
352 is_owned: bool,
353 }
354
355 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
codegen_call_argument_operand<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, operand: &Operand<'tcx>, ) -> CallArgument<'tcx>356 fn codegen_call_argument_operand<'tcx>(
357 fx: &mut FunctionCx<'_, '_, 'tcx>,
358 operand: &Operand<'tcx>,
359 ) -> CallArgument<'tcx> {
360 CallArgument {
361 value: codegen_operand(fx, operand),
362 is_owned: matches!(operand, Operand::Move(_)),
363 }
364 }
365
codegen_terminator_call<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, source_info: mir::SourceInfo, func: &Operand<'tcx>, args: &[Operand<'tcx>], destination: Place<'tcx>, target: Option<BasicBlock>, )366 pub(crate) fn codegen_terminator_call<'tcx>(
367 fx: &mut FunctionCx<'_, '_, 'tcx>,
368 source_info: mir::SourceInfo,
369 func: &Operand<'tcx>,
370 args: &[Operand<'tcx>],
371 destination: Place<'tcx>,
372 target: Option<BasicBlock>,
373 ) {
374 let func = codegen_operand(fx, func);
375 let fn_sig = func.layout().ty.fn_sig(fx.tcx);
376
377 let ret_place = codegen_place(fx, destination);
378
379 // Handle special calls like intrinsics and empty drop glue.
380 let instance = if let ty::FnDef(def_id, substs) = *func.layout().ty.kind() {
381 let instance =
382 ty::Instance::expect_resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
383 .polymorphize(fx.tcx);
384
385 if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
386 crate::intrinsics::codegen_llvm_intrinsic_call(
387 fx,
388 &fx.tcx.symbol_name(instance).name,
389 substs,
390 args,
391 ret_place,
392 target,
393 );
394 return;
395 }
396
397 match instance.def {
398 InstanceDef::Intrinsic(_) => {
399 crate::intrinsics::codegen_intrinsic_call(
400 fx,
401 instance,
402 args,
403 ret_place,
404 target,
405 source_info,
406 );
407 return;
408 }
409 InstanceDef::DropGlue(_, None) => {
410 // empty drop glue - a nop.
411 let dest = target.expect("Non terminating drop_in_place_real???");
412 let ret_block = fx.get_block(dest);
413 fx.bcx.ins().jump(ret_block, &[]);
414 return;
415 }
416 _ => Some(instance),
417 }
418 } else {
419 None
420 };
421
422 let extra_args = &args[fn_sig.inputs().skip_binder().len()..];
423 let extra_args = fx.tcx.mk_type_list_from_iter(
424 extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx))),
425 );
426 let fn_abi = if let Some(instance) = instance {
427 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(instance, extra_args)
428 } else {
429 RevealAllLayoutCx(fx.tcx).fn_abi_of_fn_ptr(fn_sig, extra_args)
430 };
431
432 let is_cold = if fn_sig.abi() == Abi::RustCold {
433 true
434 } else {
435 instance.is_some_and(|inst| {
436 fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD)
437 })
438 };
439 if is_cold {
440 fx.bcx.set_cold_block(fx.bcx.current_block().unwrap());
441 if let Some(destination_block) = target {
442 fx.bcx.set_cold_block(fx.get_block(destination_block));
443 }
444 }
445
446 // Unpack arguments tuple for closures
447 let mut args = if fn_sig.abi() == Abi::RustCall {
448 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
449 let self_arg = codegen_call_argument_operand(fx, &args[0]);
450 let pack_arg = codegen_call_argument_operand(fx, &args[1]);
451
452 let tupled_arguments = match pack_arg.value.layout().ty.kind() {
453 ty::Tuple(ref tupled_arguments) => tupled_arguments,
454 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
455 };
456
457 let mut args = Vec::with_capacity(1 + tupled_arguments.len());
458 args.push(self_arg);
459 for i in 0..tupled_arguments.len() {
460 args.push(CallArgument {
461 value: pack_arg.value.value_field(fx, FieldIdx::new(i)),
462 is_owned: pack_arg.is_owned,
463 });
464 }
465 args
466 } else {
467 args.iter().map(|arg| codegen_call_argument_operand(fx, arg)).collect::<Vec<_>>()
468 };
469
470 // Pass the caller location for `#[track_caller]`.
471 if instance.is_some_and(|inst| inst.def.requires_caller_location(fx.tcx)) {
472 let caller_location = fx.get_caller_location(source_info);
473 args.push(CallArgument { value: caller_location, is_owned: false });
474 }
475
476 let args = args;
477 assert_eq!(fn_abi.args.len(), args.len());
478
479 enum CallTarget {
480 Direct(FuncRef),
481 Indirect(SigRef, Value),
482 }
483
484 let (func_ref, first_arg_override) = match instance {
485 // Trait object call
486 Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
487 if fx.clif_comments.enabled() {
488 let nop_inst = fx.bcx.ins().nop();
489 fx.add_comment(
490 nop_inst,
491 format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0]),
492 );
493 }
494
495 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0].value, idx);
496 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
497 let sig = fx.bcx.import_signature(sig);
498
499 (CallTarget::Indirect(sig, method), Some(ptr.get_addr(fx)))
500 }
501
502 // Normal call
503 Some(instance) => {
504 let func_ref = fx.get_function_ref(instance);
505 (CallTarget::Direct(func_ref), None)
506 }
507
508 // Indirect call
509 None => {
510 if fx.clif_comments.enabled() {
511 let nop_inst = fx.bcx.ins().nop();
512 fx.add_comment(nop_inst, "indirect call");
513 }
514
515 let func = func.load_scalar(fx);
516 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
517 let sig = fx.bcx.import_signature(sig);
518
519 (CallTarget::Indirect(sig, func), None)
520 }
521 };
522
523 self::returning::codegen_with_call_return_arg(fx, &fn_abi.ret, ret_place, |fx, return_ptr| {
524 let call_args = return_ptr
525 .into_iter()
526 .chain(first_arg_override.into_iter())
527 .chain(
528 args.into_iter()
529 .enumerate()
530 .skip(if first_arg_override.is_some() { 1 } else { 0 })
531 .flat_map(|(i, arg)| {
532 adjust_arg_for_abi(fx, arg.value, &fn_abi.args[i], arg.is_owned).into_iter()
533 }),
534 )
535 .collect::<Vec<Value>>();
536
537 let call_inst = match func_ref {
538 CallTarget::Direct(func_ref) => fx.bcx.ins().call(func_ref, &call_args),
539 CallTarget::Indirect(sig, func_ptr) => {
540 fx.bcx.ins().call_indirect(sig, func_ptr, &call_args)
541 }
542 };
543
544 // FIXME find a cleaner way to support varargs
545 if fn_sig.c_variadic() {
546 if !matches!(fn_sig.abi(), Abi::C { .. }) {
547 fx.tcx.sess.span_fatal(
548 source_info.span,
549 format!("Variadic call for non-C abi {:?}", fn_sig.abi()),
550 );
551 }
552 let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
553 let abi_params = call_args
554 .into_iter()
555 .map(|arg| {
556 let ty = fx.bcx.func.dfg.value_type(arg);
557 if !ty.is_int() {
558 // FIXME set %al to upperbound on float args once floats are supported
559 fx.tcx.sess.span_fatal(
560 source_info.span,
561 format!("Non int ty {:?} for variadic call", ty),
562 );
563 }
564 AbiParam::new(ty)
565 })
566 .collect::<Vec<AbiParam>>();
567 fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
568 }
569
570 call_inst
571 });
572
573 if let Some(dest) = target {
574 let ret_block = fx.get_block(dest);
575 fx.bcx.ins().jump(ret_block, &[]);
576 } else {
577 fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
578 }
579 }
580
codegen_drop<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, source_info: mir::SourceInfo, drop_place: CPlace<'tcx>, )581 pub(crate) fn codegen_drop<'tcx>(
582 fx: &mut FunctionCx<'_, '_, 'tcx>,
583 source_info: mir::SourceInfo,
584 drop_place: CPlace<'tcx>,
585 ) {
586 let ty = drop_place.layout().ty;
587 let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
588
589 if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
590 // we don't actually need to drop anything
591 } else {
592 match ty.kind() {
593 ty::Dynamic(_, _, ty::Dyn) => {
594 // IN THIS ARM, WE HAVE:
595 // ty = *mut (dyn Trait)
596 // which is: exists<T> ( *mut T, Vtable<T: Trait> )
597 // args[0] args[1]
598 //
599 // args = ( Data, Vtable )
600 // |
601 // v
602 // /-------\
603 // | ... |
604 // \-------/
605 //
606 let (ptr, vtable) = drop_place.to_ptr_unsized();
607 let ptr = ptr.get_addr(fx);
608 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable);
609
610 // FIXME(eddyb) perhaps move some of this logic into
611 // `Instance::resolve_drop_in_place`?
612 let virtual_drop = Instance {
613 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
614 substs: drop_instance.substs,
615 };
616 let fn_abi =
617 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
618
619 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
620 let sig = fx.bcx.import_signature(sig);
621 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
622 }
623 ty::Dynamic(_, _, ty::DynStar) => {
624 // IN THIS ARM, WE HAVE:
625 // ty = *mut (dyn* Trait)
626 // which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
627 //
628 // args = [ * ]
629 // |
630 // v
631 // ( Data, Vtable )
632 // |
633 // v
634 // /-------\
635 // | ... |
636 // \-------/
637 //
638 //
639 // WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
640 //
641 // data = &(*args[0]).0 // gives a pointer to Data above (really the same pointer)
642 // vtable = (*args[0]).1 // loads the vtable out
643 // (data, vtable) // an equivalent Rust `*mut dyn Trait`
644 //
645 // SO THEN WE CAN USE THE ABOVE CODE.
646 let (data, vtable) = drop_place.to_cvalue(fx).dyn_star_force_data_on_stack(fx);
647 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable);
648
649 let virtual_drop = Instance {
650 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
651 substs: drop_instance.substs,
652 };
653 let fn_abi =
654 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
655
656 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
657 let sig = fx.bcx.import_signature(sig);
658 fx.bcx.ins().call_indirect(sig, drop_fn, &[data]);
659 }
660 _ => {
661 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
662
663 let fn_abi =
664 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(drop_instance, ty::List::empty());
665
666 let arg_value = drop_place.place_ref(
667 fx,
668 fx.layout_of(Ty::new_ref(
669 fx.tcx,
670 fx.tcx.lifetimes.re_erased,
671 TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
672 )),
673 );
674 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0], true);
675
676 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
677
678 if drop_instance.def.requires_caller_location(fx.tcx) {
679 // Pass the caller location for `#[track_caller]`.
680 let caller_location = fx.get_caller_location(source_info);
681 call_args.extend(
682 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1], false).into_iter(),
683 );
684 }
685
686 let func_ref = fx.get_function_ref(drop_instance);
687 fx.bcx.ins().call(func_ref, &call_args);
688 }
689 }
690 }
691 }
692