1 //! Codegen vtables and vtable accesses.
2 //!
3 //! See `rustc_codegen_ssa/src/meth.rs` for reference.
4
5 use crate::constant::data_id_for_alloc_id;
6 use crate::prelude::*;
7
vtable_memflags() -> MemFlags8 pub(crate) fn vtable_memflags() -> MemFlags {
9 let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
10 flags.set_readonly(); // A vtable is always read-only.
11 flags
12 }
13
drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value14 pub(crate) fn drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value {
15 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
16 fx.bcx.ins().load(
17 fx.pointer_type,
18 vtable_memflags(),
19 vtable,
20 (ty::COMMON_VTABLE_ENTRIES_DROPINPLACE * usize_size) as i32,
21 )
22 }
23
size_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value24 pub(crate) fn size_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value {
25 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
26 fx.bcx.ins().load(
27 fx.pointer_type,
28 vtable_memflags(),
29 vtable,
30 (ty::COMMON_VTABLE_ENTRIES_SIZE * usize_size) as i32,
31 )
32 }
33
min_align_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value34 pub(crate) fn min_align_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value {
35 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
36 fx.bcx.ins().load(
37 fx.pointer_type,
38 vtable_memflags(),
39 vtable,
40 (ty::COMMON_VTABLE_ENTRIES_ALIGN * usize_size) as i32,
41 )
42 }
43
get_ptr_and_method_ref<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, mut arg: CValue<'tcx>, idx: usize, ) -> (Pointer, Value)44 pub(crate) fn get_ptr_and_method_ref<'tcx>(
45 fx: &mut FunctionCx<'_, '_, 'tcx>,
46 mut arg: CValue<'tcx>,
47 idx: usize,
48 ) -> (Pointer, Value) {
49 let (ptr, vtable) = 'block: {
50 if let Abi::Scalar(_) = arg.layout().abi {
51 'descend_newtypes: while !arg.layout().ty.is_unsafe_ptr() && !arg.layout().ty.is_ref() {
52 for i in 0..arg.layout().fields.count() {
53 let field = arg.value_field(fx, FieldIdx::new(i));
54 if !field.layout().is_zst() {
55 // we found the one non-zero-sized field that is allowed
56 // now find *its* non-zero-sized field, or stop if it's a
57 // pointer
58 arg = field;
59 continue 'descend_newtypes;
60 }
61 }
62
63 bug!("receiver has no non-zero-sized fields {:?}", arg);
64 }
65 }
66
67 if let ty::Ref(_, ty, _) = arg.layout().ty.kind() {
68 if ty.is_dyn_star() {
69 let inner_layout = fx.layout_of(arg.layout().ty.builtin_deref(true).unwrap().ty);
70 let dyn_star = CPlace::for_ptr(Pointer::new(arg.load_scalar(fx)), inner_layout);
71 let ptr = dyn_star.place_field(fx, FieldIdx::new(0)).to_ptr();
72 let vtable =
73 dyn_star.place_field(fx, FieldIdx::new(1)).to_cvalue(fx).load_scalar(fx);
74 break 'block (ptr, vtable);
75 }
76 }
77
78 if let Abi::ScalarPair(_, _) = arg.layout().abi {
79 let (ptr, vtable) = arg.load_scalar_pair(fx);
80 (Pointer::new(ptr), vtable)
81 } else {
82 let (ptr, vtable) = arg.try_to_ptr().unwrap();
83 (ptr, vtable.unwrap())
84 }
85 };
86
87 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes();
88 let func_ref = fx.bcx.ins().load(
89 fx.pointer_type,
90 vtable_memflags(),
91 vtable,
92 (idx * usize_size as usize) as i32,
93 );
94 (ptr, func_ref)
95 }
96
get_vtable<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, ty: Ty<'tcx>, trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>, ) -> Value97 pub(crate) fn get_vtable<'tcx>(
98 fx: &mut FunctionCx<'_, '_, 'tcx>,
99 ty: Ty<'tcx>,
100 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
101 ) -> Value {
102 let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
103 let data_id =
104 data_id_for_alloc_id(&mut fx.constants_cx, &mut *fx.module, alloc_id, Mutability::Not);
105 let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
106 if fx.clif_comments.enabled() {
107 fx.add_comment(local_data_id, format!("vtable: {:?}", alloc_id));
108 }
109 fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
110 }
111