diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index f7b3385668f7b..40e8111997361 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -2,7 +2,7 @@ use std::fmt; use rustc_macros::HashStable; use rustc_apfloat::{Float, ieee::{Double, Single}}; -use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size}, subst::SubstsRef}; +use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size, Align}, subst::SubstsRef}; use crate::ty::PlaceholderConst; use crate::hir::def_id::DefId; @@ -45,7 +45,12 @@ pub enum ConstValue<'tcx> { /// An allocation together with a pointer into the allocation. /// Invariant: the pointer's `AllocId` resolves to the allocation. - ByRef(Pointer, &'tcx Allocation), + /// The alignment exists to allow `const_field` to have `ByRef` access to nonprimitive fields + /// of `repr(packed)` structs. The alignment may be lower than the type of this constant. + /// This permits reads with lower alignment than what the type would normally require. + /// FIXME(RalfJ,oli-obk): The alignment checks are part of miri, but const eval doesn't really + /// need them. Disabling them may be too hard though. + ByRef(Pointer, Align, &'tcx Allocation), /// Used in the HIR by using `Unevaluated` everywhere and later normalizing to one of the other /// variants when the code is monomorphic enough for that. diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index a4efb566e13e8..4cd0fd3e824f5 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -1335,7 +1335,7 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> { impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { match *self { - ConstValue::ByRef(ptr, alloc) => ConstValue::ByRef(ptr, alloc), + ConstValue::ByRef(ptr, align, alloc) => ConstValue::ByRef(ptr, align, alloc), ConstValue::Infer(ic) => ConstValue::Infer(ic.fold_with(folder)), ConstValue::Param(p) => ConstValue::Param(p.fold_with(folder)), ConstValue::Placeholder(p) => ConstValue::Placeholder(p), diff --git a/src/librustc_codegen_llvm/common.rs b/src/librustc_codegen_llvm/common.rs index 0b23aac5224c6..f21f203fcc99f 100644 --- a/src/librustc_codegen_llvm/common.rs +++ b/src/librustc_codegen_llvm/common.rs @@ -11,7 +11,7 @@ use crate::value::Value; use rustc_codegen_ssa::traits::*; use crate::consts::const_alloc_to_llvm; -use rustc::ty::layout::{HasDataLayout, LayoutOf, self, TyLayout, Size}; +use rustc::ty::layout::{HasDataLayout, LayoutOf, self, TyLayout, Size, Align}; use rustc::mir::interpret::{Scalar, GlobalAlloc, Allocation}; use rustc_codegen_ssa::mir::place::PlaceRef; @@ -344,11 +344,12 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { fn from_const_alloc( &self, layout: TyLayout<'tcx>, + align: Align, alloc: &Allocation, offset: Size, ) -> PlaceRef<'tcx, &'ll Value> { let init = const_alloc_to_llvm(self, alloc); - let base_addr = self.static_addr_of(init, layout.align.abi, None); + let base_addr = self.static_addr_of(init, align, None); let llval = unsafe { llvm::LLVMConstInBoundsGEP( self.const_bitcast(base_addr, self.type_i8p()), @@ -356,7 +357,7 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { 1, )}; let llval = self.const_bitcast(llval, self.type_ptr_to(layout.llvm_type(self))); - PlaceRef::new_sized(llval, layout, alloc.align) + PlaceRef::new_sized(llval, layout, align) } fn const_ptrcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value { diff --git a/src/librustc_codegen_llvm/consts.rs b/src/librustc_codegen_llvm/consts.rs index eb059ac453c3d..4bf91bbed60ea 100644 --- a/src/librustc_codegen_llvm/consts.rs +++ b/src/librustc_codegen_llvm/consts.rs @@ -71,7 +71,9 @@ pub fn codegen_static_initializer( let static_ = cx.tcx.const_eval(param_env.and(cid))?; let alloc = match static_.val { - ConstValue::ByRef(ptr, alloc) if ptr.offset.bytes() == 0 => alloc, + ConstValue::ByRef(ptr, align, alloc) if ptr.offset.bytes() == 0 && align == alloc.align => { + alloc + }, _ => bug!("static const eval returned {:#?}", static_), }; Ok((const_alloc_to_llvm(cx, alloc), alloc)) diff --git a/src/librustc_codegen_ssa/mir/operand.rs b/src/librustc_codegen_ssa/mir/operand.rs index 3305dfe1ffbb8..c1626d31c7801 100644 --- a/src/librustc_codegen_ssa/mir/operand.rs +++ b/src/librustc_codegen_ssa/mir/operand.rs @@ -109,8 +109,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { let b_llval = bx.const_usize((end - start) as u64); OperandValue::Pair(a_llval, b_llval) }, - ConstValue::ByRef(ptr, alloc) => { - return bx.load_operand(bx.from_const_alloc(layout, alloc, ptr.offset)); + ConstValue::ByRef(ptr, align, alloc) => { + return bx.load_operand(bx.from_const_alloc(layout, align, alloc, ptr.offset)); }, }; diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index 81b17d0bee804..72aedb4812a21 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -424,8 +424,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let layout = cx.layout_of(self.monomorphize(&ty)); match bx.tcx().const_eval(param_env.and(cid)) { Ok(val) => match val.val { - mir::interpret::ConstValue::ByRef(ptr, alloc) => { - bx.cx().from_const_alloc(layout, alloc, ptr.offset) + mir::interpret::ConstValue::ByRef(ptr, align, alloc) => { + bx.cx().from_const_alloc(layout, align, alloc, ptr.offset) } _ => bug!("promoteds should have an allocation: {:?}", val), }, diff --git a/src/librustc_codegen_ssa/traits/consts.rs b/src/librustc_codegen_ssa/traits/consts.rs index 32412f303c155..46286b5329e43 100644 --- a/src/librustc_codegen_ssa/traits/consts.rs +++ b/src/librustc_codegen_ssa/traits/consts.rs @@ -34,6 +34,7 @@ pub trait ConstMethods<'tcx>: BackendTypes { fn from_const_alloc( &self, layout: layout::TyLayout<'tcx>, + align: layout::Align, alloc: &Allocation, offset: layout::Size, ) -> PlaceRef<'tcx, Self::Value>; diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 7d05e7be26eb9..284a8f40e1f35 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -9,7 +9,7 @@ use std::convert::TryInto; use rustc::hir::def::DefKind; use rustc::hir::def_id::DefId; -use rustc::mir::interpret::{ConstEvalErr, ErrorHandled}; +use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef}; use rustc::mir; use rustc::ty::{self, TyCtxt, query::TyCtxtAt}; use rustc::ty::layout::{self, LayoutOf, VariantIdx}; @@ -18,15 +18,14 @@ use rustc::traits::Reveal; use rustc::util::common::ErrorReported; use rustc_data_structures::fx::FxHashMap; -use syntax::ast::Mutability; use syntax::source_map::{Span, DUMMY_SP}; use crate::interpret::{self, - PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar, + PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar, RawConst, ConstValue, InterpResult, InterpErrorInfo, InterpError, GlobalId, InterpretCx, StackPopCleanup, Allocation, AllocId, MemoryKind, - snapshot, RefTracking, + snapshot, RefTracking, intern_const_alloc_recursive, }; /// Number of steps until the detector even starts doing anything. @@ -63,33 +62,19 @@ pub(crate) fn eval_promoted<'mir, 'tcx>( eval_body_using_ecx(&mut ecx, cid, body, param_env) } -fn mplace_to_const<'tcx>( - ecx: &CompileTimeEvalContext<'_, 'tcx>, - mplace: MPlaceTy<'tcx>, -) -> &'tcx ty::Const<'tcx> { - let MemPlace { ptr, align, meta } = *mplace; - // extract alloc-offset pair - assert!(meta.is_none()); - let ptr = ptr.to_ptr().unwrap(); - let alloc = ecx.memory.get(ptr.alloc_id).unwrap(); - assert!(alloc.align >= align); - assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= mplace.layout.size.bytes()); - let mut alloc = alloc.clone(); - alloc.align = align; - // FIXME shouldn't it be the case that `mark_static_initialized` has already - // interned this? I thought that is the entire point of that `FinishStatic` stuff? - let alloc = ecx.tcx.intern_const_alloc(alloc); - let val = ConstValue::ByRef(ptr, alloc); - ecx.tcx.mk_const(ty::Const { val, ty: mplace.layout.ty }) -} - fn op_to_const<'tcx>( ecx: &CompileTimeEvalContext<'_, 'tcx>, op: OpTy<'tcx>, ) -> &'tcx ty::Const<'tcx> { - // We do not normalize just any data. Only non-union scalars and slices. - let normalize = match op.layout.abi { - layout::Abi::Scalar(..) => op.layout.ty.ty_adt_def().map_or(true, |adt| !adt.is_union()), + // We do not have value optmizations for everything. + // Only scalars and slices, since they are very common. + // Note that further down we turn scalars of undefined bits back to `ByRef`. These can result + // from scalar unions that are initialized with one of their zero sized variants. We could + // instead allow `ConstValue::Scalar` to store `ScalarMaybeUndef`, but that would affect all + // the usual cases of extracting e.g. a `usize`, without there being a real use case for the + // `Undef` situation. + let try_as_immediate = match op.layout.abi { + layout::Abi::Scalar(..) => true, layout::Abi::ScalarPair(..) => match op.layout.ty.sty { ty::Ref(_, inner, _) => match inner.sty { ty::Slice(elem) => elem == ecx.tcx.types.u8, @@ -100,16 +85,38 @@ fn op_to_const<'tcx>( }, _ => false, }; - let normalized_op = if normalize { - Err(*ecx.read_immediate(op).expect("normalization works on validated constants")) + let immediate = if try_as_immediate { + Err(ecx.read_immediate(op).expect("normalization works on validated constants")) } else { + // It is guaranteed that any non-slice scalar pair is actually ByRef here. + // When we come back from raw const eval, we are always by-ref. The only way our op here is + // by-val is if we are in const_field, i.e., if this is (a field of) something that we + // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or + // structs containing such. op.try_as_mplace() }; - let val = match normalized_op { - Ok(mplace) => return mplace_to_const(ecx, mplace), - Err(Immediate::Scalar(x)) => - ConstValue::Scalar(x.not_undef().unwrap()), - Err(Immediate::ScalarPair(a, b)) => { + let val = match immediate { + Ok(mplace) => { + let ptr = mplace.ptr.to_ptr().unwrap(); + let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); + ConstValue::ByRef(ptr, mplace.align, alloc) + }, + // see comment on `let try_as_immediate` above + Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x { + ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s), + ScalarMaybeUndef::Undef => { + // When coming out of "normal CTFE", we'll always have an `Indirect` operand as + // argument and we will not need this. The only way we can already have an + // `Immediate` is when we are called from `const_field`, and that `Immediate` + // comes from a constant so it can happen have `Undef`, because the indirect + // memory that was read had undefined bytes. + let mplace = op.to_mem_place(); + let ptr = mplace.ptr.to_ptr().unwrap(); + let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); + ConstValue::ByRef(ptr, mplace.align, alloc) + }, + }, + Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => { let (data, start) = match a.not_undef().unwrap() { Scalar::Ptr(ptr) => ( ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), @@ -164,13 +171,12 @@ fn eval_body_using_ecx<'mir, 'tcx>( ecx.run()?; // Intern the result - let mutability = if tcx.is_mutable_static(cid.instance.def_id()) || - !layout.ty.is_freeze(tcx, param_env, body.span) { - Mutability::Mutable - } else { - Mutability::Immutable - }; - ecx.memory.intern_static(ret.ptr.to_ptr()?.alloc_id, mutability)?; + intern_const_alloc_recursive( + ecx, + cid.instance.def_id(), + ret, + param_env, + )?; debug!("eval_body_using_ecx done: {:?}", *ret); Ok(ret) @@ -297,7 +303,7 @@ impl interpret::AllocMap for FxHashMap { } } -type CompileTimeEvalContext<'mir, 'tcx> = +crate type CompileTimeEvalContext<'mir, 'tcx> = InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>; impl interpret::MayLeak for ! { @@ -526,13 +532,22 @@ fn validate_and_turn_into_const<'tcx>( mplace.into(), path, Some(&mut ref_tracking), - true, // const mode )?; } // Now that we validated, turn this into a proper constant. + // Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides + // whether they become immediates. let def_id = cid.instance.def.def_id(); if tcx.is_static(def_id) || cid.promoted.is_some() { - Ok(mplace_to_const(&ecx, mplace)) + let ptr = mplace.ptr.to_ptr()?; + Ok(tcx.mk_const(ty::Const { + val: ConstValue::ByRef( + ptr, + mplace.align, + ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), + ), + ty: mplace.layout.ty, + })) } else { Ok(op_to_const(&ecx, mplace.into())) } diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index 764ff0e15d5a4..974c863792eed 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -215,10 +215,15 @@ impl LiteralExpander<'tcx> { debug!("fold_const_value_deref {:?} {:?} {:?}", val, rty, crty); match (val, &crty.sty, &rty.sty) { // the easy case, deref a reference - (ConstValue::Scalar(Scalar::Ptr(p)), x, y) if x == y => ConstValue::ByRef( - p, - self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id), - ), + (ConstValue::Scalar(Scalar::Ptr(p)), x, y) if x == y => { + let alloc = self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id); + ConstValue::ByRef( + p, + // FIXME(oli-obk): this should be the type's layout + alloc.align, + alloc, + ) + }, // unsize array to slice if pattern is array but match value or other patterns are slice (ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => { assert_eq!(t, u); @@ -1431,7 +1436,7 @@ fn slice_pat_covered_by_const<'tcx>( suffix: &[Pattern<'tcx>], ) -> Result { let data: &[u8] = match (const_val.val, &const_val.ty.sty) { - (ConstValue::ByRef(ptr, alloc), ty::Array(t, n)) => { + (ConstValue::ByRef(ptr, _, alloc), ty::Array(t, n)) => { assert_eq!(*t, tcx.types.u8); let n = n.assert_usize(tcx).unwrap(); alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap() @@ -1753,7 +1758,7 @@ fn specialize<'p, 'a: 'p, 'tcx>( let (alloc, offset, n, ty) = match value.ty.sty { ty::Array(t, n) => { match value.val { - ConstValue::ByRef(ptr, alloc) => ( + ConstValue::ByRef(ptr, _, alloc) => ( alloc, ptr.offset, n.unwrap_usize(cx.tcx), diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index b01826c98db05..4afa4a0cbb3d7 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -576,7 +576,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { self.place_to_op(return_place)?, vec![], None, - /*const_mode*/false, )?; } } else { @@ -641,6 +640,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { &self, gid: GlobalId<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { + // FIXME(oli-obk): make this check an assertion that it's not a static here + // FIXME(RalfJ, oli-obk): document that `Place::Static` can never be anything but a static + // and `ConstValue::Unevaluated` can never be a static let param_env = if self.tcx.is_static(gid.instance.def_id()) { ty::ParamEnv::reveal_all() } else { diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs new file mode 100644 index 0000000000000..d998f40c86ecc --- /dev/null +++ b/src/librustc_mir/interpret/intern.rs @@ -0,0 +1,333 @@ +//! This module specifies the type based interner for constants. +//! +//! After a const evaluation has computed a value, before we destroy the const evaluator's session +//! memory, we need to extract all memory allocations to the global memory pool so they stay around. + +use rustc::ty::layout::LayoutOf; +use rustc::ty::{Ty, TyCtxt, ParamEnv, self}; +use rustc::mir::interpret::{ + InterpResult, ErrorHandled, +}; +use rustc::hir; +use rustc::hir::def_id::DefId; +use super::validity::RefTracking; +use rustc_data_structures::fx::FxHashSet; + +use syntax::ast::Mutability; +use syntax_pos::Span; + +use super::{ + ValueVisitor, MemoryKind, Pointer, AllocId, MPlaceTy, InterpError, Scalar, +}; +use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext}; + +struct InternVisitor<'rt, 'mir: 'rt, 'tcx: 'rt + 'mir> { + /// previously encountered safe references + ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>, + ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, + param_env: ParamEnv<'tcx>, + /// The root node of the value that we're looking at. This field is never mutated and only used + /// for sanity assertions that will ICE when `const_qualif` screws up. + mode: InternMode, + /// This field stores the mutability of the value *currently* being checked. + /// It is set to mutable when an `UnsafeCell` is encountered + /// When recursing across a reference, we don't recurse but store the + /// value to be checked in `ref_tracking` together with the mutability at which we are checking + /// the value. + /// When encountering an immutable reference, we treat everything as immutable that is behind + /// it. + mutability: Mutability, + /// A list of all encountered relocations. After type-based interning, we traverse this list to + /// also intern allocations that are only referenced by a raw pointer or inside a union. + leftover_relocations: &'rt mut FxHashSet, +} + +#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)] +enum InternMode { + /// Mutable references must in fact be immutable due to their surrounding immutability in a + /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut` + /// that will actually be treated as mutable. + Static, + /// UnsafeCell is OK in the value of a constant, but not behind references in a constant + ConstBase, + /// `UnsafeCell` ICEs + Const, +} + +/// Signalling data structure to ensure we don't recurse +/// into the memory of other constants or statics +struct IsStaticOrFn; + +impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> { + /// Intern an allocation without looking at its children + fn intern_shallow( + &mut self, + ptr: Pointer, + mutability: Mutability, + ) -> InterpResult<'tcx, Option> { + trace!( + "InternVisitor::intern {:?} with {:?}", + ptr, mutability, + ); + // remove allocation + let tcx = self.ecx.tcx; + let memory = self.ecx.memory_mut(); + let (kind, mut alloc) = match memory.alloc_map.remove(&ptr.alloc_id) { + Some(entry) => entry, + None => { + // if the pointer is dangling (neither in local nor global memory), we leave it + // to validation to error. The `delay_span_bug` ensures that we don't forget such + // a check in validation. + if tcx.alloc_map.lock().get(ptr.alloc_id).is_none() { + tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer"); + } + // treat dangling pointers like other statics + // just to stop trying to recurse into them + return Ok(Some(IsStaticOrFn)); + }, + }; + // This match is just a canary for future changes to `MemoryKind`, which most likely need + // changes in this function. + match kind { + MemoryKind::Stack | MemoryKind::Vtable => {}, + } + // Ensure llvm knows to only put this into immutable memory if the value is immutable either + // by being behind a reference or by being part of a static or const without interior + // mutability + alloc.mutability = mutability; + // link the alloc id to the actual allocation + let alloc = tcx.intern_const_alloc(alloc); + self.leftover_relocations.extend(alloc.relocations.iter().map(|&(_, ((), reloc))| reloc)); + tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc); + Ok(None) + } +} + +impl<'rt, 'mir, 'tcx> + ValueVisitor<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> +for + InternVisitor<'rt, 'mir, 'tcx> +{ + type V = MPlaceTy<'tcx>; + + #[inline(always)] + fn ecx(&self) -> &CompileTimeEvalContext<'mir, 'tcx> { + &self.ecx + } + + fn visit_aggregate( + &mut self, + mplace: MPlaceTy<'tcx>, + fields: impl Iterator>, + ) -> InterpResult<'tcx> { + if let Some(def) = mplace.layout.ty.ty_adt_def() { + if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() { + // We are crossing over an `UnsafeCell`, we can mutate again + let old = std::mem::replace(&mut self.mutability, Mutability::Mutable); + assert_ne!( + self.mode, InternMode::Const, + "UnsafeCells are not allowed behind references in constants. This should have \ + been prevented statically by const qualification. If this were allowed one \ + would be able to change a constant at one use site and other use sites may \ + arbitrarily decide to change, too.", + ); + let walked = self.walk_aggregate(mplace, fields); + self.mutability = old; + return walked; + } + } + self.walk_aggregate(mplace, fields) + } + + fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> { + // Handle Reference types, as these are the only relocations supported by const eval. + // Raw pointers (and boxes) are handled by the `leftover_relocations` logic. + let ty = mplace.layout.ty; + if let ty::Ref(_, _, mutability) = ty.sty { + let value = self.ecx.read_immediate(mplace.into())?; + // Handle trait object vtables + if let Ok(meta) = value.to_meta() { + let layout = self.ecx.layout_of(ty.builtin_deref(true).unwrap().ty)?; + if layout.is_unsized() { + if let ty::Dynamic(..) = self.ecx.tcx.struct_tail(layout.ty).sty { + if let Ok(vtable) = meta.unwrap().to_ptr() { + // explitly choose `Immutable` here, since vtables are immutable, even + // if the reference of the fat pointer is mutable + self.intern_shallow(vtable, Mutability::Immutable)?; + } + } + } + } + let mplace = self.ecx.ref_to_mplace(value)?; + // Check if we have encountered this pointer+layout combination before. + // Only recurse for allocation-backed pointers. + if let Scalar::Ptr(ptr) = mplace.ptr { + // We do not have any `frozen` logic here, because it's essentially equivalent to + // the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze", + // and we check that in `visit_aggregate`. + // This is not an inherent limitation, but one that we know to be true, because + // const qualification enforces it. We can lift it in the future. + match (self.mode, mutability) { + // immutable references are fine everywhere + (_, hir::Mutability::MutImmutable) => {}, + // all is "good and well" in the unsoundness of `static mut` + + // mutable references are ok in `static`. Either they are treated as immutable + // because they are behind an immutable one, or they are behind an `UnsafeCell` + // and thus ok. + (InternMode::Static, hir::Mutability::MutMutable) => {}, + // we statically prevent `&mut T` via `const_qualif` and double check this here + (InternMode::ConstBase, hir::Mutability::MutMutable) | + (InternMode::Const, hir::Mutability::MutMutable) => + bug!("const qualif failed to prevent mutable references"), + } + // Compute the mutability with which we'll start visiting the allocation. This is + // what gets changed when we encounter an `UnsafeCell` + let mutability = match (self.mutability, mutability) { + // The only way a mutable reference actually works as a mutable reference is + // by being in a `static mut` directly or behind another mutable reference. + // If there's an immutable reference or we are inside a static, then our + // mutable reference is equivalent to an immutable one. As an example: + // `&&mut Foo` is semantically equivalent to `&&Foo` + (Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable, + _ => Mutability::Immutable, + }; + // Compute the mutability of the allocation + let intern_mutability = intern_mutability( + self.ecx.tcx.tcx, + self.param_env, + mplace.layout.ty, + self.ecx.tcx.span, + mutability, + ); + // Recursing behind references changes the intern mode for constants in order to + // cause assertions to trigger if we encounter any `UnsafeCell`s. + let mode = match self.mode { + InternMode::ConstBase => InternMode::Const, + other => other, + }; + match self.intern_shallow(ptr, intern_mutability)? { + // No need to recurse, these are interned already and statics may have + // cycles, so we don't want to recurse there + Some(IsStaticOrFn) => {}, + // intern everything referenced by this value. The mutability is taken from the + // reference. It is checked above that mutable references only happen in + // `static mut` + None => self.ref_tracking.track((mplace, mutability, mode), || ()), + } + } + } + Ok(()) + } +} + +/// Figure out the mutability of the allocation. +/// Mutable if it has interior mutability *anywhere* in the type. +fn intern_mutability<'tcx>( + tcx: TyCtxt<'tcx>, + param_env: ParamEnv<'tcx>, + ty: Ty<'tcx>, + span: Span, + mutability: Mutability, +) -> Mutability { + let has_interior_mutability = !ty.is_freeze(tcx, param_env, span); + if has_interior_mutability { + Mutability::Mutable + } else { + mutability + } +} + +pub fn intern_const_alloc_recursive( + ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, + def_id: DefId, + ret: MPlaceTy<'tcx>, + // FIXME(oli-obk): can we scrap the param env? I think we can, the final value of a const eval + // must always be monomorphic, right? + param_env: ty::ParamEnv<'tcx>, +) -> InterpResult<'tcx> { + let tcx = ecx.tcx; + // this `mutability` is the mutability of the place, ignoring the type + let (mutability, base_intern_mode) = match tcx.static_mutability(def_id) { + Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static), + None => (Mutability::Immutable, InternMode::ConstBase), + // `static mut` doesn't care about interior mutability, it's mutable anyway + Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static), + }; + + // type based interning + let mut ref_tracking = RefTracking::new((ret, mutability, base_intern_mode)); + let leftover_relocations = &mut FxHashSet::default(); + + // This mutability is the combination of the place mutability and the type mutability. If either + // is mutable, `alloc_mutability` is mutable. This exists because the entire allocation needs + // to be mutable if it contains an `UnsafeCell` anywhere. The other `mutability` exists so that + // the visitor does not treat everything outside the `UnsafeCell` as mutable. + let alloc_mutability = intern_mutability( + tcx.tcx, param_env, ret.layout.ty, tcx.span, mutability, + ); + + // start with the outermost allocation + InternVisitor { + ref_tracking: &mut ref_tracking, + ecx, + mode: base_intern_mode, + leftover_relocations, + param_env, + mutability, + }.intern_shallow(ret.ptr.to_ptr()?, alloc_mutability)?; + + while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() { + let interned = InternVisitor { + ref_tracking: &mut ref_tracking, + ecx, + mode, + leftover_relocations, + param_env, + mutability, + }.visit_value(mplace); + if let Err(error) = interned { + // This can happen when e.g. the tag of an enum is not a valid discriminant. We do have + // to read enum discriminants in order to find references in enum variant fields. + if let InterpError::ValidationFailure(_) = error.kind { + let err = crate::const_eval::error_to_const_error(&ecx, error); + match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") { + Ok(mut diag) => { + diag.note("The rules on what exactly is undefined behavior aren't clear, \ + so this check might be overzealous. Please open an issue on the rust \ + compiler repository if you believe it should not be considered \ + undefined behavior", + ); + diag.emit(); + } + Err(ErrorHandled::TooGeneric) | + Err(ErrorHandled::Reported) => {}, + } + } + } + } + + // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw + // pointers, ... So we can't intern them according to their type rules + + let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect(); + while let Some(alloc_id) = todo.pop() { + if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { + // We can't call the `intern` method here, as its logic is tailored to safe references. + // So we hand-roll the interning logic here again + let alloc = tcx.intern_const_alloc(alloc); + tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); + for &(_, ((), reloc)) in alloc.relocations.iter() { + if leftover_relocations.insert(reloc) { + todo.push(reloc); + } + } + } else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) { + // dangling pointer + return err!(ValidationFailure( + "encountered dangling pointer in final constant".into(), + )) + } + } + Ok(()) +} diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index a0a34df3a5ea4..f2a115f8f3599 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -20,6 +20,7 @@ use super::{ Pointer, AllocId, Allocation, GlobalId, AllocationExtra, InterpResult, Scalar, InterpError, GlobalAlloc, PointerArithmetic, Machine, AllocMap, MayLeak, ErrorHandled, CheckInAllocMsg, InboundsCheck, + InterpError::ValidationFailure, }; #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] @@ -55,12 +56,13 @@ pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> { /// the wrong type), so we let the machine override this type. /// Either way, if the machine allows writing to a static, doing so will /// create a copy of the static allocation here. - alloc_map: M::MemoryMap, + // FIXME: this should not be public, but interning currently needs access to it + pub(super) alloc_map: M::MemoryMap, /// To be able to compare pointers with NULL, and to check alignment for accesses /// to ZSTs (where pointers may dangle), we keep track of the size even for allocations /// that do not exist any more. - dead_alloc_map: FxHashMap, + pub(super) dead_alloc_map: FxHashMap, /// Extra data added by the machine. pub extra: M::MemoryExtra, @@ -452,9 +454,13 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { if let Ok(alloc) = self.get(id) { return Ok((Size::from_bytes(alloc.bytes.len() as u64), alloc.align)); } + // can't do this in the match argument, we may get cycle errors since the lock would get + // dropped after the match. + let alloc = self.tcx.alloc_map.lock().get(id); // Could also be a fn ptr or extern static - match self.tcx.alloc_map.lock().get(id) { + match alloc { Some(GlobalAlloc::Function(..)) => Ok((Size::ZERO, Align::from_bytes(1).unwrap())), + // `self.get` would also work, but can cause cycles if a static refers to itself Some(GlobalAlloc::Static(did)) => { // The only way `get` couldn't have worked here is if this is an extern static assert!(self.tcx.is_foreign_item(did)); @@ -463,14 +469,20 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap(); Ok((layout.size, layout.align.abi)) } - _ => match liveness { - InboundsCheck::MaybeDead => { - // Must be a deallocated pointer - Ok(*self.dead_alloc_map.get(&id).expect( - "allocation missing in dead_alloc_map" - )) - }, - InboundsCheck::Live => err!(DanglingPointerDeref), + _ => { + if let Ok(alloc) = self.get(id) { + return Ok((Size::from_bytes(alloc.bytes.len() as u64), alloc.align)); + } + match liveness { + InboundsCheck::MaybeDead => { + // Must be a deallocated pointer + self.dead_alloc_map.get(&id).cloned().ok_or_else(|| + ValidationFailure("allocation missing in dead_alloc_map".to_string()) + .into() + ) + }, + InboundsCheck::Live => err!(DanglingPointerDeref), + } }, } } @@ -633,56 +645,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { } } -/// Interning (for CTFE) -impl<'mir, 'tcx, M> Memory<'mir, 'tcx, M> -where - M: Machine<'mir, 'tcx, PointerTag = (), AllocExtra = (), MemoryExtra = ()>, - // FIXME: Working around https://github.com/rust-lang/rust/issues/24159 - M::MemoryMap: AllocMap, Allocation)>, -{ - /// mark an allocation as static and initialized, either mutable or not - pub fn intern_static( - &mut self, - alloc_id: AllocId, - mutability: Mutability, - ) -> InterpResult<'tcx> { - trace!( - "mark_static_initialized {:?}, mutability: {:?}", - alloc_id, - mutability - ); - // remove allocation - let (kind, mut alloc) = self.alloc_map.remove(&alloc_id).unwrap(); - match kind { - MemoryKind::Machine(_) => bug!("Static cannot refer to machine memory"), - MemoryKind::Stack | MemoryKind::Vtable => {}, - } - // ensure llvm knows not to put this into immutable memory - alloc.mutability = mutability; - let alloc = self.tcx.intern_const_alloc(alloc); - self.tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); - // recurse into inner allocations - for &(_, alloc) in alloc.relocations.values() { - // FIXME: Reusing the mutability here is likely incorrect. It is originally - // determined via `is_freeze`, and data is considered frozen if there is no - // `UnsafeCell` *immediately* in that data -- however, this search stops - // at references. So whenever we follow a reference, we should likely - // assume immutability -- and we should make sure that the compiler - // does not permit code that would break this! - if self.alloc_map.contains_key(&alloc) { - // Not yet interned, so proceed recursively - self.intern_static(alloc, mutability)?; - } else if self.dead_alloc_map.contains_key(&alloc) { - // dangling pointer - return err!(ValidationFailure( - "encountered dangling pointer in final constant".into(), - )) - } - } - Ok(()) - } -} - /// Reading and writing. impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { pub fn copy( diff --git a/src/librustc_mir/interpret/mod.rs b/src/librustc_mir/interpret/mod.rs index d59bee6943a51..0293a8366d083 100644 --- a/src/librustc_mir/interpret/mod.rs +++ b/src/librustc_mir/interpret/mod.rs @@ -14,6 +14,7 @@ mod traits; mod validity; mod intrinsics; mod visitor; +mod intern; pub use rustc::mir::interpret::*; // have all the `interpret` symbols in one place: here @@ -34,3 +35,5 @@ pub use self::visitor::{ValueVisitor, MutValueVisitor}; pub use self::validity::RefTracking; pub(super) use self::intrinsics::type_name; + +pub use self::intern::intern_const_alloc_recursive; diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 4b1e782ba1a45..1b451e0b8f18f 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -4,7 +4,9 @@ use std::convert::TryInto; use rustc::{mir, ty}; -use rustc::ty::layout::{self, Size, LayoutOf, TyLayout, HasDataLayout, IntegerExt, VariantIdx}; +use rustc::ty::layout::{ + self, Size, LayoutOf, TyLayout, HasDataLayout, IntegerExt, VariantIdx, +}; use rustc::mir::interpret::{ GlobalId, AllocId, CheckInAllocMsg, @@ -217,7 +219,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { fn try_read_immediate_from_mplace( &self, mplace: MPlaceTy<'tcx, M::PointerTag>, - ) -> InterpResult<'tcx, Option>> { + ) -> InterpResult<'tcx, Option>> { if mplace.layout.is_unsized() { // Don't touch unsized return Ok(None); @@ -228,7 +230,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { // Not all ZSTs have a layout we would handle below, so just short-circuit them // all here. self.memory.check_align(ptr, ptr_align)?; - return Ok(Some(Immediate::Scalar(Scalar::zst().into()))); + return Ok(Some(ImmTy { + imm: Immediate::Scalar(Scalar::zst().into()), + layout: mplace.layout, + })); } // check for integer pointers before alignment to report better errors @@ -239,7 +244,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { let scalar = self.memory .get(ptr.alloc_id)? .read_scalar(self, ptr, mplace.layout.size)?; - Ok(Some(Immediate::Scalar(scalar))) + Ok(Some(ImmTy { + imm: Immediate::Scalar(scalar), + layout: mplace.layout, + })) } layout::Abi::ScalarPair(ref a, ref b) => { let (a, b) = (&a.value, &b.value); @@ -256,7 +264,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { let b_val = self.memory .get(ptr.alloc_id)? .read_scalar(self, b_ptr, b_size)?; - Ok(Some(Immediate::ScalarPair(a_val, b_val))) + Ok(Some(ImmTy { + imm: Immediate::ScalarPair(a_val, b_val), + layout: mplace.layout, + })) } _ => Ok(None), } @@ -271,13 +282,13 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { pub(crate) fn try_read_immediate( &self, src: OpTy<'tcx, M::PointerTag>, - ) -> InterpResult<'tcx, Result, MemPlace>> { + ) -> InterpResult<'tcx, Result, MPlaceTy<'tcx, M::PointerTag>>> { Ok(match src.try_as_mplace() { Ok(mplace) => { if let Some(val) = self.try_read_immediate_from_mplace(mplace)? { Ok(val) } else { - Err(*mplace) + Err(mplace) } }, Err(val) => Ok(val), @@ -291,7 +302,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { op: OpTy<'tcx, M::PointerTag> ) -> InterpResult<'tcx, ImmTy<'tcx, M::PointerTag>> { if let Ok(imm) = self.try_read_immediate(op)? { - Ok(ImmTy { imm, layout: op.layout }) + Ok(imm) } else { bug!("primitive read failed for type: {:?}", op.layout.ty); } @@ -339,9 +350,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }); } let offset = op.layout.fields.offset(field); - let immediate = match base { + let immediate = match *base { // the field covers the entire type - _ if offset.bytes() == 0 && field_layout.size == op.layout.size => base, + _ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base, // extract fields from types with `ScalarPair` ABI Immediate::ScalarPair(a, b) => { let val = if offset.bytes() == 0 { a } else { b }; @@ -527,11 +538,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { self.layout_of(self.monomorphize(val.ty)?) })?; let op = match val.val { - ConstValue::ByRef(ptr, _alloc) => { + ConstValue::ByRef(ptr, align, _alloc) => { // We rely on mutability being set correctly in that allocation to prevent writes // where none should happen. let ptr = self.tag_static_base_pointer(ptr); - Operand::Indirect(MemPlace::from_ptr(ptr, layout.align.abi)) + Operand::Indirect(MemPlace::from_ptr(ptr, align)) }, ConstValue::Scalar(x) => Operand::Immediate(Immediate::Scalar(tag_scalar(x).into())), diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index a8f88af3f3833..1285549015cdd 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -232,10 +232,10 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { impl<'tcx, Tag: ::std::fmt::Debug + Copy> OpTy<'tcx, Tag> { #[inline(always)] - pub fn try_as_mplace(self) -> Result, Immediate> { + pub fn try_as_mplace(self) -> Result, ImmTy<'tcx, Tag>> { match *self { Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }), - Operand::Immediate(imm) => Err(imm), + Operand::Immediate(imm) => Err(ImmTy { imm, layout: self.layout }), } } @@ -660,7 +660,7 @@ where if M::enforce_validity(self) { // Data got changed, better make sure it matches the type! - self.validate_operand(self.place_to_op(dest)?, vec![], None, /*const_mode*/false)?; + self.validate_operand(self.place_to_op(dest)?, vec![], None)?; } Ok(()) @@ -677,7 +677,7 @@ where if M::enforce_validity(self) { // Data got changed, better make sure it matches the type! - self.validate_operand(dest.into(), vec![], None, /*const_mode*/ false)?; + self.validate_operand(dest.into(), vec![], None)?; } Ok(()) @@ -809,7 +809,7 @@ where if M::enforce_validity(self) { // Data got changed, better make sure it matches the type! - self.validate_operand(self.place_to_op(dest)?, vec![], None, /*const_mode*/false)?; + self.validate_operand(self.place_to_op(dest)?, vec![], None)?; } Ok(()) @@ -836,7 +836,7 @@ where // Yay, we got a value that we can write directly. // FIXME: Add a check to make sure that if `src` is indirect, // it does not overlap with `dest`. - return self.write_immediate_no_validate(src_val, dest); + return self.write_immediate_no_validate(*src_val, dest); } Err(mplace) => mplace, }; @@ -897,7 +897,7 @@ where if M::enforce_validity(self) { // Data got changed, better make sure it matches the type! - self.validate_operand(dest.into(), vec![], None, /*const_mode*/false)?; + self.validate_operand(dest.into(), vec![], None)?; } Ok(()) diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs index 3460c21b52d03..d747eddd8d7ba 100644 --- a/src/librustc_mir/interpret/validity.rs +++ b/src/librustc_mir/interpret/validity.rs @@ -1,5 +1,4 @@ use std::fmt::Write; -use std::hash::Hash; use std::ops::RangeInclusive; use syntax_pos::symbol::{sym, Symbol}; @@ -11,6 +10,8 @@ use rustc::mir::interpret::{ Scalar, GlobalAlloc, InterpResult, InterpError, CheckInAllocMsg, }; +use std::hash::Hash; + use super::{ OpTy, Machine, InterpretCx, ValueVisitor, MPlaceTy, }; @@ -76,19 +77,34 @@ pub enum PathElem { } /// State for tracking recursive validation of references -pub struct RefTracking { +pub struct RefTracking { pub seen: FxHashSet, - pub todo: Vec<(T, Vec)>, + pub todo: Vec<(T, PATH)>, } -impl RefTracking { +impl RefTracking { + pub fn empty() -> Self { + RefTracking { + seen: FxHashSet::default(), + todo: vec![], + } + } pub fn new(op: T) -> Self { - let mut ref_tracking = RefTracking { + let mut ref_tracking_for_consts = RefTracking { seen: FxHashSet::default(), - todo: vec![(op, Vec::new())], + todo: vec![(op, PATH::default())], }; - ref_tracking.seen.insert(op); - ref_tracking + ref_tracking_for_consts.seen.insert(op); + ref_tracking_for_consts + } + + pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) { + if self.seen.insert(op) { + trace!("Recursing below ptr {:#?}", op); + let path = path(); + // Remember to come back to this later. + self.todo.push((op, path)); + } } } @@ -154,8 +170,10 @@ struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> { /// starts must not be changed! `visit_fields` and `visit_array` rely on /// this stack discipline. path: Vec, - ref_tracking: Option<&'rt mut RefTracking>>, - const_mode: bool, + ref_tracking_for_consts: Option<&'rt mut RefTracking< + MPlaceTy<'tcx, M::PointerTag>, + Vec, + >>, ecx: &'rt InterpretCx<'mir, 'tcx, M>, } @@ -314,7 +332,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> // types below! let size = value.layout.size; let value = value.to_scalar_or_undef(); - if self.const_mode { + if self.ref_tracking_for_consts.is_some() { // Integers/floats in CTFE: Must be scalar bits, pointers are dangerous try_validation!(value.to_bits(size), value, self.path, "initialized plain (non-pointer) bytes"); @@ -324,7 +342,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> } } ty::RawPtr(..) => { - if self.const_mode { + if self.ref_tracking_for_consts.is_some() { // Integers/floats in CTFE: For consistency with integers, we do not // accept undef. let _ptr = try_validation!(value.to_scalar_ptr(), @@ -393,9 +411,9 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> } } // Recursive checking - if let Some(ref mut ref_tracking) = self.ref_tracking { - assert!(self.const_mode, "We should only do recursie checking in const mode"); + if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts { let place = self.ecx.ref_to_mplace(value)?; + // FIXME(RalfJ): check ZST for inbound pointers if size != Size::ZERO { // Non-ZST also have to be dereferencable let ptr = try_validation!(place.ptr.to_ptr(), @@ -423,16 +441,15 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> // before. Proceed recursively even for integer pointers, no // reason to skip them! They are (recursively) valid for some ZST, // but not for others (e.g., `!` is a ZST). - if ref_tracking.seen.insert(place) { - trace!("Recursing below ptr {:#?}", *place); + let path = &self.path; + ref_tracking.track(place, || { // We need to clone the path anyway, make sure it gets created // with enough space for the additional `Deref`. - let mut new_path = Vec::with_capacity(self.path.len()+1); - new_path.clone_from(&self.path); + let mut new_path = Vec::with_capacity(path.len() + 1); + new_path.clone_from(path); new_path.push(PathElem::Deref); - // Remember to come back to this later. - ref_tracking.todo.push((place, new_path)); - } + new_path + }); } } ty::FnPtr(_sig) => { @@ -488,10 +505,17 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> let non_null = self.ecx.memory.check_align( Scalar::Ptr(ptr), Align::from_bytes(1).unwrap() - ).is_ok() || - self.ecx.memory.get_fn(ptr).is_ok(); + ).is_ok(); if !non_null { - // could be NULL + // These conditions are just here to improve the diagnostics so we can + // differentiate between null pointers and dangling pointers + if self.ref_tracking_for_consts.is_some() && + self.ecx.memory.get(ptr.alloc_id).is_err() && + self.ecx.memory.get_fn(ptr).is_err() { + return validation_failure!( + "encountered dangling pointer", self.path + ); + } return validation_failure!("a potentially NULL pointer", self.path); } return Ok(()); @@ -574,7 +598,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> self.ecx, ptr, size, - /*allow_ptr_and_undef*/!self.const_mode, + /*allow_ptr_and_undef*/ self.ref_tracking_for_consts.is_none(), ) { // In the happy case, we needn't check anything else. Ok(()) => {}, @@ -612,23 +636,25 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> { /// is an indirect operand. /// It will error if the bits at the destination do not match the ones described by the layout. /// - /// `ref_tracking` can be `None` to avoid recursive checking below references. + /// `ref_tracking_for_consts` can be `None` to avoid recursive checking below references. /// This also toggles between "run-time" (no recursion) and "compile-time" (with recursion) - /// validation (e.g., pointer values are fine in integers at runtime). + /// validation (e.g., pointer values are fine in integers at runtime) and various other const + /// specific validation checks pub fn validate_operand( &self, op: OpTy<'tcx, M::PointerTag>, path: Vec, - ref_tracking: Option<&mut RefTracking>>, - const_mode: bool, + ref_tracking_for_consts: Option<&mut RefTracking< + MPlaceTy<'tcx, M::PointerTag>, + Vec, + >>, ) -> InterpResult<'tcx> { trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty); // Construct a visitor let mut visitor = ValidityVisitor { path, - ref_tracking, - const_mode, + ref_tracking_for_consts, ecx: self, }; diff --git a/src/librustc_mir/interpret/visitor.rs b/src/librustc_mir/interpret/visitor.rs index 9150f16526ba7..d04dc3ab37ed3 100644 --- a/src/librustc_mir/interpret/visitor.rs +++ b/src/librustc_mir/interpret/visitor.rs @@ -81,6 +81,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for OpTy<'tcx, M:: ecx.operand_field(self, field) } } + impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for MPlaceTy<'tcx, M::PointerTag> { #[inline(always)] fn layout(&self) -> TyLayout<'tcx> { diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index acc786050a84e..c64bb73802dc8 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -1262,7 +1262,7 @@ fn collect_const<'tcx>( ConstValue::Scalar(Scalar::Ptr(ptr)) => collect_miri(tcx, ptr.alloc_id, output), ConstValue::Slice { data: alloc, start: _, end: _ } | - ConstValue::ByRef(_, alloc) => { + ConstValue::ByRef(_, _, alloc) => { for &((), id) in alloc.relocations.values() { collect_miri(tcx, id, output); } diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index 2ec5c192726b0..5b567512a7b6f 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -551,7 +551,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { source_info: SourceInfo, ) { trace!("attepting to replace {:?} with {:?}", rval, value); - if let Err(e) = self.ecx.validate_operand(value, vec![], None, true) { + if let Err(e) = self.ecx.validate_operand( + value, + vec![], + // FIXME: is ref tracking too expensive? + Some(&mut interpret::RefTracking::empty()), + ) { trace!("validation error, attempt failed: {:?}", e); return; } @@ -562,7 +567,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { }); if let Some(Ok(imm)) = imm { - match imm { + match *imm { interpret::Immediate::Scalar(ScalarMaybeUndef::Scalar(scalar)) => { *rval = Rvalue::Use( self.operand_from_scalar(scalar, value.layout.ty, source_info.span)); diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index b6abfdb7425aa..f082b5e5a046c 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -738,27 +738,29 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { qualifs[IsNotPromotable] = true; if self.mode.requires_const_checking() { - if let BorrowKind::Mut { .. } = kind { - let mut err = struct_span_err!(self.tcx.sess, self.span, E0017, - "references in {}s may only refer \ - to immutable values", self.mode); - err.span_label(self.span, format!("{}s require immutable values", - self.mode)); - if self.tcx.sess.teach(&err.get_code().unwrap()) { - err.note("References in statics and constants may only refer to \ - immutable values.\n\n\ - Statics are shared everywhere, and if they refer to \ - mutable data one might violate memory safety since \ - holding multiple mutable references to shared data is \ - not allowed.\n\n\ - If you really want global mutable state, try using \ - static mut or a global UnsafeCell."); + if !self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { + if let BorrowKind::Mut { .. } = kind { + let mut err = struct_span_err!(self.tcx.sess, self.span, E0017, + "references in {}s may only refer \ + to immutable values", self.mode); + err.span_label(self.span, format!("{}s require immutable values", + self.mode)); + if self.tcx.sess.teach(&err.get_code().unwrap()) { + err.note("References in statics and constants may only refer to \ + immutable values.\n\n\ + Statics are shared everywhere, and if they refer to \ + mutable data one might violate memory safety since \ + holding multiple mutable references to shared data is \ + not allowed.\n\n\ + If you really want global mutable state, try using \ + static mut or a global UnsafeCell."); + } + err.emit(); + } else { + span_err!(self.tcx.sess, self.span, E0492, + "cannot borrow a constant which may contain \ + interior mutability, create a static instead"); } - err.emit(); - } else { - span_err!(self.tcx.sess, self.span, E0492, - "cannot borrow a constant which may contain \ - interior mutability, create a static instead"); } } } else if let BorrowKind::Mut { .. } | BorrowKind::Shared = kind { diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 177e215804408..fe742a74fffec 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -1448,7 +1448,7 @@ fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: DefId, span: Span) }; let param_env = ty::ParamEnv::reveal_all(); if let Ok(static_) = tcx.const_eval(param_env.and(cid)) { - let alloc = if let ConstValue::ByRef(_, allocation) = static_.val { + let alloc = if let ConstValue::ByRef(_, _, allocation) = static_.val { allocation } else { bug!("Matching on non-ByRef static") diff --git a/src/test/ui/consts/miri_unleashed/mutable_references.rs b/src/test/ui/consts/miri_unleashed/mutable_references.rs new file mode 100644 index 0000000000000..5f9888053a196 --- /dev/null +++ b/src/test/ui/consts/miri_unleashed/mutable_references.rs @@ -0,0 +1,35 @@ +// compile-flags: -Zunleash-the-miri-inside-of-you +#![allow(const_err)] + +use std::cell::UnsafeCell; + +// a test demonstrating what things we could allow with a smarter const qualification + +static FOO: &&mut u32 = &&mut 42; + +static BAR: &mut () = &mut (); + +struct Foo(T); + +static BOO: &mut Foo<()> = &mut Foo(()); + +struct Meh { + x: &'static UnsafeCell, +} + +unsafe impl Sync for Meh {} + +static MEH: Meh = Meh { + x: &UnsafeCell::new(42), +}; + +static OH_YES: &mut i32 = &mut 42; + +fn main() { + unsafe { + *MEH.x.get() = 99; //~ WARN skipping const checks + //~^ WARN skipping const checks + } + *OH_YES = 99; //~ ERROR cannot assign to `*OH_YES`, as `OH_YES` is an immutable static item + //~^ WARN skipping const checks +} diff --git a/src/test/ui/consts/miri_unleashed/mutable_references.stderr b/src/test/ui/consts/miri_unleashed/mutable_references.stderr new file mode 100644 index 0000000000000..b870aca640a0a --- /dev/null +++ b/src/test/ui/consts/miri_unleashed/mutable_references.stderr @@ -0,0 +1,26 @@ +warning: skipping const checks + --> $DIR/mutable_references.rs:30:10 + | +LL | *MEH.x.get() = 99; + | ^^^^^ + +warning: skipping const checks + --> $DIR/mutable_references.rs:30:9 + | +LL | *MEH.x.get() = 99; + | ^^^^^^^^^^^^^^^^^ + +warning: skipping const checks + --> $DIR/mutable_references.rs:33:5 + | +LL | *OH_YES = 99; + | ^^^^^^^^^^^^ + +error[E0594]: cannot assign to `*OH_YES`, as `OH_YES` is an immutable static item + --> $DIR/mutable_references.rs:33:5 + | +LL | *OH_YES = 99; + | ^^^^^^^^^^^^ cannot assign + +error: aborting due to previous error + diff --git a/src/test/ui/consts/miri_unleashed/mutable_references_ice.rs b/src/test/ui/consts/miri_unleashed/mutable_references_ice.rs new file mode 100644 index 0000000000000..4a77534c6c70c --- /dev/null +++ b/src/test/ui/consts/miri_unleashed/mutable_references_ice.rs @@ -0,0 +1,28 @@ +// compile-flags: -Zunleash-the-miri-inside-of-you +// failure-status: 101 +// rustc-env:RUST_BACKTRACE=0 +// normalize-stderr-test "note: rustc 1.* running on .*" -> "note: rustc VERSION running on TARGET" +// normalize-stderr-test "note: compiler flags: .*" -> "note: compiler flags: FLAGS" + +#![allow(const_err)] + +use std::cell::UnsafeCell; + +// this test ICEs to ensure that our mutability story is sound + +struct Meh { + x: &'static UnsafeCell, +} + +unsafe impl Sync for Meh {} + +// the following will never be ok! +const MUH: Meh = Meh { + x: &UnsafeCell::new(42), +}; + +fn main() { + unsafe { + *MUH.x.get() = 99; //~ WARN skipping const checks + } +} diff --git a/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr b/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr new file mode 100644 index 0000000000000..ff2351e1fdc50 --- /dev/null +++ b/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr @@ -0,0 +1,21 @@ +warning: skipping const checks + --> $DIR/mutable_references_ice.rs:26:9 + | +LL | *MUH.x.get() = 99; + | ^^^^^^^^^^^^^^^^^ + +thread 'rustc' panicked at 'assertion failed: `(left != right)` + left: `Const`, + right: `Const`: UnsafeCells are not allowed behind references in constants. This should have been prevented statically by const qualification. If this were allowed one would be able to change a constant at one use site and other use sites may arbitrarily decide to change, too.', src/librustc_mir/interpret/intern.rs:127:17 +note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace. + +error: internal compiler error: unexpected panic + +note: the compiler unexpectedly panicked. this is a bug. + +note: we would appreciate a bug report: https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md#bug-reports + +note: rustc VERSION running on TARGET + +note: compiler flags: FLAGS + diff --git a/src/test/ui/consts/packed_pattern.rs b/src/test/ui/consts/packed_pattern.rs new file mode 100644 index 0000000000000..37ae45b6df7ea --- /dev/null +++ b/src/test/ui/consts/packed_pattern.rs @@ -0,0 +1,19 @@ +// run-pass + +#[derive(PartialEq, Eq, Copy, Clone)] +#[repr(packed)] +struct Foo { + field: (i64, u32, u32, u32), +} + +const FOO: Foo = Foo { + field: (5, 6, 7, 8), +}; + +fn main() { + match FOO { + Foo { field: (5, 6, 7, 8) } => {}, + FOO => unreachable!(), + _ => unreachable!(), + } +} diff --git a/src/test/ui/consts/packed_pattern2.rs b/src/test/ui/consts/packed_pattern2.rs new file mode 100644 index 0000000000000..174161fbd9460 --- /dev/null +++ b/src/test/ui/consts/packed_pattern2.rs @@ -0,0 +1,27 @@ +// run-pass + +#[derive(PartialEq, Eq, Copy, Clone)] +#[repr(packed)] +struct Foo { + field: (u8, u16), +} + +#[derive(PartialEq, Eq, Copy, Clone)] +#[repr(align(2))] +struct Bar { + a: Foo, +} + +const FOO: Bar = Bar { + a: Foo { + field: (5, 6), + } +}; + +fn main() { + match FOO { + Bar { a: Foo { field: (5, 6) } } => {}, + FOO => unreachable!(), + _ => unreachable!(), + } +} diff --git a/src/test/ui/consts/static-raw-pointer-interning.rs b/src/test/ui/consts/static-raw-pointer-interning.rs new file mode 100644 index 0000000000000..cab60c91e165c --- /dev/null +++ b/src/test/ui/consts/static-raw-pointer-interning.rs @@ -0,0 +1,15 @@ +// run-pass + +static FOO: Foo = Foo { + field: &42 as *const i32, +}; + +struct Foo { + field: *const i32, +} + +unsafe impl Sync for Foo {} + +fn main() { + assert_eq!(unsafe { *FOO.field }, 42); +} diff --git a/src/test/ui/consts/static-raw-pointer-interning2.rs b/src/test/ui/consts/static-raw-pointer-interning2.rs new file mode 100644 index 0000000000000..2b915fd7cb329 --- /dev/null +++ b/src/test/ui/consts/static-raw-pointer-interning2.rs @@ -0,0 +1,15 @@ +// run-pass + +static mut FOO: Foo = Foo { + field: &mut [42] as *mut [i32] as *mut i32, +}; + +struct Foo { + field: *mut i32, +} + +unsafe impl Sync for Foo {} + +fn main() { + assert_eq!(unsafe { *FOO.field = 69; *FOO.field }, 69); +} diff --git a/src/test/ui/consts/union_constant.rs b/src/test/ui/consts/union_constant.rs index 6b6042194ba20..16b05310b9fcb 100644 --- a/src/test/ui/consts/union_constant.rs +++ b/src/test/ui/consts/union_constant.rs @@ -6,5 +6,6 @@ union Uninit { } const UNINIT: Uninit = Uninit { uninit: () }; +const UNINIT2: (Uninit,) = (Uninit { uninit: () }, ); fn main() {}