diff --git a/compiler/rustc_codegen_cranelift/src/vtable.rs b/compiler/rustc_codegen_cranelift/src/vtable.rs index f97d416b66f9f..36b3725ef42bc 100644 --- a/compiler/rustc_codegen_cranelift/src/vtable.rs +++ b/compiler/rustc_codegen_cranelift/src/vtable.rs @@ -68,7 +68,7 @@ pub(crate) fn get_vtable<'tcx>( ty: Ty<'tcx>, trait_ref: Option>, ) -> Value { - let alloc_id = fx.tcx.vtable_allocation(ty, trait_ref); + let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref)); let data_id = data_id_for_alloc_id(&mut fx.constants_cx, &mut *fx.module, alloc_id, Mutability::Not); let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func); diff --git a/compiler/rustc_codegen_ssa/src/meth.rs b/compiler/rustc_codegen_ssa/src/meth.rs index efeec5b728413..3267d3206f73f 100644 --- a/compiler/rustc_codegen_ssa/src/meth.rs +++ b/compiler/rustc_codegen_ssa/src/meth.rs @@ -72,7 +72,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>( return val; } - let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref); + let vtable_alloc_id = tcx.vtable_allocation((ty, trait_ref)); let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory(); let vtable_const = cx.const_data_from_alloc(vtable_allocation); let align = cx.data_layout().pointer_align.abi; diff --git a/compiler/rustc_const_eval/src/interpret/traits.rs b/compiler/rustc_const_eval/src/interpret/traits.rs index a6ba00ec6952e..131674decc961 100644 --- a/compiler/rustc_const_eval/src/interpret/traits.rs +++ b/compiler/rustc_const_eval/src/interpret/traits.rs @@ -30,7 +30,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ensure_monomorphic_enough(*self.tcx, ty)?; ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?; - let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref); + let vtable_allocation = self.tcx.vtable_allocation((ty, poly_trait_ref)); let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_allocation))?; diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 9b0b1377875d1..e3eda8483b692 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -1012,6 +1012,13 @@ rustc_queries! { key.1, key.0 } } + query vtable_allocation(key: (Ty<'tcx>, Option>)) -> mir::interpret::AllocId { + desc { |tcx| "vtable const allocation for <{} as {}>", + key.0, + key.1.map(|trait_ref| format!("{}", trait_ref)).unwrap_or("_".to_owned()) + } + } + query codegen_fulfill_obligation( key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>) ) -> Result, ErrorReported> { diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 6a6fb30dce837..83d7c307bdfe9 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -8,7 +8,7 @@ use crate::lint::{struct_lint_level, LintDiagnosticBuilder, LintLevelSource}; use crate::middle; use crate::middle::resolve_lifetime::{self, LifetimeScopeForPath, ObjectLifetimeDefault}; use crate::middle::stability; -use crate::mir::interpret::{self, AllocId, Allocation, ConstValue, Scalar}; +use crate::mir::interpret::{self, Allocation, ConstValue, Scalar}; use crate::mir::{Body, Field, Local, Place, PlaceElem, ProjectionKind, Promoted}; use crate::thir::Thir; use crate::traits; @@ -1047,10 +1047,6 @@ pub struct GlobalCtxt<'tcx> { pub(crate) alloc_map: Lock>, output_filenames: Arc, - - // FIXME(eddyb) this doesn't belong here and should be using a query. - pub(super) vtables_cache: - Lock, Option>), AllocId>>, } impl<'tcx> TyCtxt<'tcx> { @@ -1189,7 +1185,6 @@ impl<'tcx> TyCtxt<'tcx> { const_stability_interner: Default::default(), alloc_map: Lock::new(interpret::AllocMap::new()), output_filenames: Arc::new(output_filenames), - vtables_cache: Default::default(), } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index e124f0c389475..e95493acbb734 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -2051,6 +2051,7 @@ pub fn provide(providers: &mut ty::query::Providers) { trait_impls_of: trait_def::trait_impls_of_provider, type_uninhabited_from: inhabitedness::type_uninhabited_from, const_param_default: consts::const_param_default, + vtable_allocation: vtable::vtable_allocation_provider, ..*providers }; } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 0bf81ea01130f..2610a76b2810b 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2208,6 +2208,7 @@ forward_display_to_print! { // because `for<'tcx>` isn't possible yet. ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>, ty::Binder<'tcx, ty::TraitRef<'tcx>>, + ty::Binder<'tcx, ty::ExistentialTraitRef<'tcx>>, ty::Binder<'tcx, TraitRefPrintOnlyTraitPath<'tcx>>, ty::Binder<'tcx, TraitRefPrintOnlyTraitName<'tcx>>, ty::Binder<'tcx, ty::FnSig<'tcx>>, diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs index 5904f133e78c3..f766cad2b3d21 100644 --- a/compiler/rustc_middle/src/ty/vtable.rs +++ b/compiler/rustc_middle/src/ty/vtable.rs @@ -43,85 +43,72 @@ pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0; pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1; pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2; -impl<'tcx> TyCtxt<'tcx> { - /// Retrieves an allocation that represents the contents of a vtable. - /// There's a cache within `TyCtxt` so it will be deduplicated. - pub fn vtable_allocation( - self, - ty: Ty<'tcx>, - poly_trait_ref: Option>, - ) -> AllocId { - let tcx = self; - let vtables_cache = tcx.vtables_cache.lock(); - if let Some(alloc_id) = vtables_cache.get(&(ty, poly_trait_ref)).cloned() { - return alloc_id; - } - drop(vtables_cache); - - let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref { - let trait_ref = poly_trait_ref.with_self_ty(tcx, ty); - let trait_ref = tcx.erase_regions(trait_ref); +/// Retrieves an allocation that represents the contents of a vtable. +/// Since this is a query, allocations are cached and not duplicated. +pub(super) fn vtable_allocation_provider<'tcx>( + tcx: TyCtxt<'tcx>, + key: (Ty<'tcx>, Option>), +) -> AllocId { + let (ty, poly_trait_ref) = key; - tcx.vtable_entries(trait_ref) - } else { - COMMON_VTABLE_ENTRIES - }; + let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref { + let trait_ref = poly_trait_ref.with_self_ty(tcx, ty); + let trait_ref = tcx.erase_regions(trait_ref); - let layout = tcx - .layout_of(ty::ParamEnv::reveal_all().and(ty)) - .expect("failed to build vtable representation"); - assert!(!layout.is_unsized(), "can't create a vtable for an unsized type"); - let size = layout.size.bytes(); - let align = layout.align.abi.bytes(); + tcx.vtable_entries(trait_ref) + } else { + COMMON_VTABLE_ENTRIES + }; - let ptr_size = tcx.data_layout.pointer_size; - let ptr_align = tcx.data_layout.pointer_align.abi; + let layout = tcx + .layout_of(ty::ParamEnv::reveal_all().and(ty)) + .expect("failed to build vtable representation"); + assert!(!layout.is_unsized(), "can't create a vtable for an unsized type"); + let size = layout.size.bytes(); + let align = layout.align.abi.bytes(); - let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap(); - let mut vtable = - Allocation::uninit(vtable_size, ptr_align, /* panic_on_fail */ true).unwrap(); + let ptr_size = tcx.data_layout.pointer_size; + let ptr_align = tcx.data_layout.pointer_align.abi; - // No need to do any alignment checks on the memory accesses below, because we know the - // allocation is correctly aligned as we created it above. Also we're only offsetting by - // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`. + let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap(); + let mut vtable = Allocation::uninit(vtable_size, ptr_align, /* panic_on_fail */ true).unwrap(); - for (idx, entry) in vtable_entries.iter().enumerate() { - let idx: u64 = u64::try_from(idx).unwrap(); - let scalar = match entry { - VtblEntry::MetadataDropInPlace => { - let instance = ty::Instance::resolve_drop_in_place(tcx, ty); - let fn_alloc_id = tcx.create_fn_alloc(instance); - let fn_ptr = Pointer::from(fn_alloc_id); - ScalarMaybeUninit::from_pointer(fn_ptr, &tcx) - } - VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(), - VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(), - VtblEntry::Vacant => continue, - VtblEntry::Method(instance) => { - // Prepare the fn ptr we write into the vtable. - let instance = instance.polymorphize(tcx); - let fn_alloc_id = tcx.create_fn_alloc(instance); - let fn_ptr = Pointer::from(fn_alloc_id); - ScalarMaybeUninit::from_pointer(fn_ptr, &tcx) - } - VtblEntry::TraitVPtr(trait_ref) => { - let super_trait_ref = trait_ref.map_bound(|trait_ref| { - ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref) - }); - let supertrait_alloc_id = self.vtable_allocation(ty, Some(super_trait_ref)); - let vptr = Pointer::from(supertrait_alloc_id); - ScalarMaybeUninit::from_pointer(vptr, &tcx) - } - }; - vtable - .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar) - .expect("failed to build vtable representation"); - } + // No need to do any alignment checks on the memory accesses below, because we know the + // allocation is correctly aligned as we created it above. Also we're only offsetting by + // multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`. - vtable.mutability = Mutability::Not; - let alloc_id = tcx.create_memory_alloc(tcx.intern_const_alloc(vtable)); - let mut vtables_cache = self.vtables_cache.lock(); - vtables_cache.insert((ty, poly_trait_ref), alloc_id); - alloc_id + for (idx, entry) in vtable_entries.iter().enumerate() { + let idx: u64 = u64::try_from(idx).unwrap(); + let scalar = match entry { + VtblEntry::MetadataDropInPlace => { + let instance = ty::Instance::resolve_drop_in_place(tcx, ty); + let fn_alloc_id = tcx.create_fn_alloc(instance); + let fn_ptr = Pointer::from(fn_alloc_id); + ScalarMaybeUninit::from_pointer(fn_ptr, &tcx) + } + VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(), + VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(), + VtblEntry::Vacant => continue, + VtblEntry::Method(instance) => { + // Prepare the fn ptr we write into the vtable. + let instance = instance.polymorphize(tcx); + let fn_alloc_id = tcx.create_fn_alloc(instance); + let fn_ptr = Pointer::from(fn_alloc_id); + ScalarMaybeUninit::from_pointer(fn_ptr, &tcx) + } + VtblEntry::TraitVPtr(trait_ref) => { + let super_trait_ref = trait_ref + .map_bound(|trait_ref| ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref)); + let supertrait_alloc_id = tcx.vtable_allocation((ty, Some(super_trait_ref))); + let vptr = Pointer::from(supertrait_alloc_id); + ScalarMaybeUninit::from_pointer(vptr, &tcx) + } + }; + vtable + .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar) + .expect("failed to build vtable representation"); } + + vtable.mutability = Mutability::Not; + tcx.create_memory_alloc(tcx.intern_const_alloc(vtable)) } diff --git a/compiler/rustc_query_impl/src/keys.rs b/compiler/rustc_query_impl/src/keys.rs index 563a3cf14382c..344892875961a 100644 --- a/compiler/rustc_query_impl/src/keys.rs +++ b/compiler/rustc_query_impl/src/keys.rs @@ -72,6 +72,17 @@ impl<'tcx> Key for mir::interpret::GlobalId<'tcx> { } } +impl<'tcx> Key for (Ty<'tcx>, Option>) { + #[inline(always)] + fn query_crate_is_local(&self) -> bool { + true + } + + fn default_span(&self, _: TyCtxt<'_>) -> Span { + DUMMY_SP + } +} + impl<'tcx> Key for mir::interpret::LitToConstInput<'tcx> { #[inline(always)] fn query_crate_is_local(&self) -> bool { diff --git a/src/test/incremental/reorder_vtable.rs b/src/test/incremental/reorder_vtable.rs new file mode 100644 index 0000000000000..8dacba63351ef --- /dev/null +++ b/src/test/incremental/reorder_vtable.rs @@ -0,0 +1,41 @@ +// revisions:rpass1 rpass2 + +// This test case makes sure re-order the methods in a vtable will +// trigger recompilation of codegen units that instantiate it. +// +// See https://github.com/rust-lang/rust/issues/89598 + +trait Foo { + #[cfg(rpass1)] + fn method1(&self) -> u32; + + fn method2(&self) -> u32; + + #[cfg(rpass2)] + fn method1(&self) -> u32; +} + +impl Foo for u32 { + fn method1(&self) -> u32 { 17 } + fn method2(&self) -> u32 { 42 } +} + +fn main() { + // Before #89598 was fixed, the vtable allocation would be cached during + // a MIR optimization pass and then the codegen pass for the main object + // file would not register a dependency on it (because of the missing + // dep-tracking). + // + // In the rpass2 session, the main object file would not be re-compiled, + // thus the mod1::foo(x) call would pass in an outdated vtable, while the + // mod1 object would expect the new, re-ordered vtable, resulting in a + // call to the wrong method. + let x: &dyn Foo = &0u32; + assert_eq!(mod1::foo(x), 17); +} + +mod mod1 { + pub(super) fn foo(x: &dyn super::Foo) -> u32 { + x.method1() + } +}