1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
|
use crate::traits::*;
use rustc_middle::ty::{self, subst::GenericArgKind, ExistentialPredicate, Ty, TyCtxt};
use rustc_session::config::Lto;
use rustc_symbol_mangling::typeid_for_trait_ref;
use rustc_target::abi::call::FnAbi;
#[derive(Copy, Clone, Debug)]
pub struct VirtualIndex(u64);
impl<'a, 'tcx> VirtualIndex {
pub fn from_index(index: usize) -> Self {
VirtualIndex(index as u64)
}
pub fn get_fn<Bx: BuilderMethods<'a, 'tcx>>(
self,
bx: &mut Bx,
llvtable: Bx::Value,
ty: Ty<'tcx>,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
) -> Bx::Value {
// Load the data pointer from the object.
debug!("get_fn({llvtable:?}, {ty:?}, {self:?})");
let llty = bx.fn_ptr_backend_type(fn_abi);
let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty));
if bx.cx().sess().opts.unstable_opts.virtual_function_elimination
&& bx.cx().sess().lto() == Lto::Fat
{
let typeid =
bx.typeid_metadata(typeid_for_trait_ref(bx.tcx(), get_trait_ref(bx.tcx(), ty)));
let vtable_byte_offset = self.0 * bx.data_layout().pointer_size.bytes();
let type_checked_load = bx.type_checked_load(llvtable, vtable_byte_offset, typeid);
let func = bx.extract_value(type_checked_load, 0);
bx.pointercast(func, llty)
} else {
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
let gep = bx.inbounds_gep(llty, llvtable, &[bx.const_usize(self.0)]);
let ptr = bx.load(llty, gep, ptr_align);
bx.nonnull_metadata(ptr);
// VTable loads are invariant.
bx.set_invariant_load(ptr);
ptr
}
}
pub fn get_usize<Bx: BuilderMethods<'a, 'tcx>>(
self,
bx: &mut Bx,
llvtable: Bx::Value,
) -> Bx::Value {
// Load the data pointer from the object.
debug!("get_int({:?}, {:?})", llvtable, self);
let llty = bx.type_isize();
let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty));
let usize_align = bx.tcx().data_layout.pointer_align.abi;
let gep = bx.inbounds_gep(llty, llvtable, &[bx.const_usize(self.0)]);
let ptr = bx.load(llty, gep, usize_align);
// VTable loads are invariant.
bx.set_invariant_load(ptr);
ptr
}
}
fn get_trait_ref<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ty::PolyExistentialTraitRef<'tcx> {
for arg in ty.peel_refs().walk() {
if let GenericArgKind::Type(ty) = arg.unpack() {
if let ty::Dynamic(trait_refs, _) = ty.kind() {
return trait_refs[0].map_bound(|trait_ref| match trait_ref {
ExistentialPredicate::Trait(tr) => tr,
ExistentialPredicate::Projection(proj) => proj.trait_ref(tcx),
ExistentialPredicate::AutoTrait(_) => {
bug!("auto traits don't have functions")
}
});
}
}
}
bug!("expected a `dyn Trait` ty, found {ty:?}")
}
/// Creates a dynamic vtable for the given type and vtable origin.
/// This is used only for objects.
///
/// The vtables are cached instead of created on every call.
///
/// The `trait_ref` encodes the erased self type. Hence if we are
/// making an object `Foo<dyn Trait>` from a value of type `Foo<T>`, then
/// `trait_ref` would map `T: Trait`.
pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
cx: &Cx,
ty: Ty<'tcx>,
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> Cx::Value {
let tcx = cx.tcx();
debug!("get_vtable(ty={:?}, trait_ref={:?})", ty, trait_ref);
// Check the cache.
if let Some(&val) = cx.vtables().borrow().get(&(ty, trait_ref)) {
return val;
}
let vtable_alloc_id = tcx.vtable_allocation((ty, trait_ref));
let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory();
let vtable_const = cx.const_data_from_alloc(vtable_allocation);
let align = cx.data_layout().pointer_align.abi;
let vtable = cx.static_addr_of(vtable_const, align, Some("vtable"));
cx.create_vtable_debuginfo(ty, trait_ref, vtable);
cx.vtables().borrow_mut().insert((ty, trait_ref), vtable);
vtable
}
|