1 //! Return value handling
2
3 use crate::prelude::*;
4
5 use rustc_target::abi::call::{ArgAbi, PassMode};
6 use smallvec::{smallvec, SmallVec};
7
8 /// Return a place where the return value of the current function can be written to. If necessary
9 /// this adds an extra parameter pointing to where the return value needs to be stored.
codegen_return_param<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, ssa_analyzed: &rustc_index::IndexSlice<Local, crate::analyze::SsaKind>, block_params_iter: &mut impl Iterator<Item = Value>, ) -> CPlace<'tcx>10 pub(super) fn codegen_return_param<'tcx>(
11 fx: &mut FunctionCx<'_, '_, 'tcx>,
12 ssa_analyzed: &rustc_index::IndexSlice<Local, crate::analyze::SsaKind>,
13 block_params_iter: &mut impl Iterator<Item = Value>,
14 ) -> CPlace<'tcx> {
15 let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode {
16 PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(..) => {
17 let is_ssa =
18 ssa_analyzed[RETURN_PLACE].is_ssa(fx, fx.fn_abi.as_ref().unwrap().ret.layout.ty);
19 (
20 super::make_local_place(
21 fx,
22 RETURN_PLACE,
23 fx.fn_abi.as_ref().unwrap().ret.layout,
24 is_ssa,
25 ),
26 smallvec![],
27 )
28 }
29 PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
30 let ret_param = block_params_iter.next().unwrap();
31 assert_eq!(fx.bcx.func.dfg.value_type(ret_param), fx.pointer_type);
32 (
33 CPlace::for_ptr(Pointer::new(ret_param), fx.fn_abi.as_ref().unwrap().ret.layout),
34 smallvec![ret_param],
35 )
36 }
37 PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
38 unreachable!("unsized return value")
39 }
40 };
41
42 crate::abi::comments::add_arg_comment(
43 fx,
44 "ret",
45 Some(RETURN_PLACE),
46 None,
47 &ret_param,
48 &fx.fn_abi.as_ref().unwrap().ret.mode,
49 fx.fn_abi.as_ref().unwrap().ret.layout,
50 );
51
52 ret_place
53 }
54
55 /// Invokes the closure with if necessary a value representing the return pointer. When the closure
56 /// returns the call return value(s) if any are written to the correct place.
codegen_with_call_return_arg<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, ret_arg_abi: &ArgAbi<'tcx, Ty<'tcx>>, ret_place: CPlace<'tcx>, f: impl FnOnce(&mut FunctionCx<'_, '_, 'tcx>, Option<Value>) -> Inst, )57 pub(super) fn codegen_with_call_return_arg<'tcx>(
58 fx: &mut FunctionCx<'_, '_, 'tcx>,
59 ret_arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
60 ret_place: CPlace<'tcx>,
61 f: impl FnOnce(&mut FunctionCx<'_, '_, 'tcx>, Option<Value>) -> Inst,
62 ) {
63 let (ret_temp_place, return_ptr) = match ret_arg_abi.mode {
64 PassMode::Ignore => (None, None),
65 PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
66 if let Some(ret_ptr) = ret_place.try_to_ptr() {
67 // This is an optimization to prevent unnecessary copies of the return value when
68 // the return place is already a memory place as opposed to a register.
69 // This match arm can be safely removed.
70 (None, Some(ret_ptr.get_addr(fx)))
71 } else {
72 let place = CPlace::new_stack_slot(fx, ret_arg_abi.layout);
73 (Some(place), Some(place.to_ptr().get_addr(fx)))
74 }
75 }
76 PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
77 unreachable!("unsized return value")
78 }
79 PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(..) => (None, None),
80 };
81
82 let call_inst = f(fx, return_ptr);
83
84 match ret_arg_abi.mode {
85 PassMode::Ignore => {}
86 PassMode::Direct(_) => {
87 let ret_val = fx.bcx.inst_results(call_inst)[0];
88 ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_arg_abi.layout));
89 }
90 PassMode::Pair(_, _) => {
91 let ret_val_a = fx.bcx.inst_results(call_inst)[0];
92 let ret_val_b = fx.bcx.inst_results(call_inst)[1];
93 ret_place
94 .write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout));
95 }
96 PassMode::Cast(ref cast, _) => {
97 let results =
98 fx.bcx.inst_results(call_inst).iter().copied().collect::<SmallVec<[Value; 2]>>();
99 let result =
100 super::pass_mode::from_casted_value(fx, &results, ret_place.layout(), cast);
101 ret_place.write_cvalue(fx, result);
102 }
103 PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
104 if let Some(ret_temp_place) = ret_temp_place {
105 // If ret_temp_place is None, it is not necessary to copy the return value.
106 let ret_temp_value = ret_temp_place.to_cvalue(fx);
107 ret_place.write_cvalue(fx, ret_temp_value);
108 }
109 }
110 PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
111 unreachable!("unsized return value")
112 }
113 }
114 }
115
116 /// Codegen a return instruction with the right return value(s) if any.
codegen_return(fx: &mut FunctionCx<'_, '_, '_>)117 pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) {
118 match fx.fn_abi.as_ref().unwrap().ret.mode {
119 PassMode::Ignore | PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
120 fx.bcx.ins().return_(&[]);
121 }
122 PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
123 unreachable!("unsized return value")
124 }
125 PassMode::Direct(_) => {
126 let place = fx.get_local_place(RETURN_PLACE);
127 let ret_val = place.to_cvalue(fx).load_scalar(fx);
128 fx.bcx.ins().return_(&[ret_val]);
129 }
130 PassMode::Pair(_, _) => {
131 let place = fx.get_local_place(RETURN_PLACE);
132 let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx);
133 fx.bcx.ins().return_(&[ret_val_a, ret_val_b]);
134 }
135 PassMode::Cast(ref cast, _) => {
136 let place = fx.get_local_place(RETURN_PLACE);
137 let ret_val = place.to_cvalue(fx);
138 let ret_vals = super::pass_mode::to_casted_value(fx, ret_val, cast);
139 fx.bcx.ins().return_(&ret_vals);
140 }
141 }
142 }
143