Skip to content

Commit

Permalink
Auto merge of #58351 - oli-obk:double_check_const_eval, r=RalfJung
Browse files Browse the repository at this point in the history
Refactor interning to properly mark memory as mutable or immutable

r? @RalfJung

This implementation is incomplete out of multiple reasons

* [ ] add `-Zunleash_the_miri_inside_of_you` tests
* [ ] report an error if there's an `UnsafeCell` behind a reference in a constant
* [ ] make validity checks actually test whether the mutability of their allocations match what they see in the type
  • Loading branch information
bors committed Jun 19, 2019
2 parents a6cbf2d + cd290c7 commit 9cb052a
Show file tree
Hide file tree
Showing 30 changed files with 758 additions and 197 deletions.
9 changes: 7 additions & 2 deletions src/librustc/mir/interpret/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::fmt;
use rustc_macros::HashStable;
use rustc_apfloat::{Float, ieee::{Double, Single}};

use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size}, subst::SubstsRef};
use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size, Align}, subst::SubstsRef};
use crate::ty::PlaceholderConst;
use crate::hir::def_id::DefId;

Expand Down Expand Up @@ -45,7 +45,12 @@ pub enum ConstValue<'tcx> {

/// An allocation together with a pointer into the allocation.
/// Invariant: the pointer's `AllocId` resolves to the allocation.
ByRef(Pointer, &'tcx Allocation),
/// The alignment exists to allow `const_field` to have `ByRef` access to nonprimitive fields
/// of `repr(packed)` structs. The alignment may be lower than the type of this constant.
/// This permits reads with lower alignment than what the type would normally require.
/// FIXME(RalfJ,oli-obk): The alignment checks are part of miri, but const eval doesn't really
/// need them. Disabling them may be too hard though.
ByRef(Pointer, Align, &'tcx Allocation),

/// Used in the HIR by using `Unevaluated` everywhere and later normalizing to one of the other
/// variants when the code is monomorphic enough for that.
Expand Down
2 changes: 1 addition & 1 deletion src/librustc/ty/structural_impls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1335,7 +1335,7 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> {
impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
match *self {
ConstValue::ByRef(ptr, alloc) => ConstValue::ByRef(ptr, alloc),
ConstValue::ByRef(ptr, align, alloc) => ConstValue::ByRef(ptr, align, alloc),
ConstValue::Infer(ic) => ConstValue::Infer(ic.fold_with(folder)),
ConstValue::Param(p) => ConstValue::Param(p.fold_with(folder)),
ConstValue::Placeholder(p) => ConstValue::Placeholder(p),
Expand Down
7 changes: 4 additions & 3 deletions src/librustc_codegen_llvm/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::value::Value;
use rustc_codegen_ssa::traits::*;

use crate::consts::const_alloc_to_llvm;
use rustc::ty::layout::{HasDataLayout, LayoutOf, self, TyLayout, Size};
use rustc::ty::layout::{HasDataLayout, LayoutOf, self, TyLayout, Size, Align};
use rustc::mir::interpret::{Scalar, GlobalAlloc, Allocation};
use rustc_codegen_ssa::mir::place::PlaceRef;

Expand Down Expand Up @@ -344,19 +344,20 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
fn from_const_alloc(
&self,
layout: TyLayout<'tcx>,
align: Align,
alloc: &Allocation,
offset: Size,
) -> PlaceRef<'tcx, &'ll Value> {
let init = const_alloc_to_llvm(self, alloc);
let base_addr = self.static_addr_of(init, layout.align.abi, None);
let base_addr = self.static_addr_of(init, align, None);

let llval = unsafe { llvm::LLVMConstInBoundsGEP(
self.const_bitcast(base_addr, self.type_i8p()),
&self.const_usize(offset.bytes()),
1,
)};
let llval = self.const_bitcast(llval, self.type_ptr_to(layout.llvm_type(self)));
PlaceRef::new_sized(llval, layout, alloc.align)
PlaceRef::new_sized(llval, layout, align)
}

fn const_ptrcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
Expand Down
4 changes: 3 additions & 1 deletion src/librustc_codegen_llvm/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,9 @@ pub fn codegen_static_initializer(
let static_ = cx.tcx.const_eval(param_env.and(cid))?;

let alloc = match static_.val {
ConstValue::ByRef(ptr, alloc) if ptr.offset.bytes() == 0 => alloc,
ConstValue::ByRef(ptr, align, alloc) if ptr.offset.bytes() == 0 && align == alloc.align => {
alloc
},
_ => bug!("static const eval returned {:#?}", static_),
};
Ok((const_alloc_to_llvm(cx, alloc), alloc))
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_codegen_ssa/mir/operand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let b_llval = bx.const_usize((end - start) as u64);
OperandValue::Pair(a_llval, b_llval)
},
ConstValue::ByRef(ptr, alloc) => {
return bx.load_operand(bx.from_const_alloc(layout, alloc, ptr.offset));
ConstValue::ByRef(ptr, align, alloc) => {
return bx.load_operand(bx.from_const_alloc(layout, align, alloc, ptr.offset));
},
};

Expand Down
4 changes: 2 additions & 2 deletions src/librustc_codegen_ssa/mir/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -424,8 +424,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let layout = cx.layout_of(self.monomorphize(&ty));
match bx.tcx().const_eval(param_env.and(cid)) {
Ok(val) => match val.val {
mir::interpret::ConstValue::ByRef(ptr, alloc) => {
bx.cx().from_const_alloc(layout, alloc, ptr.offset)
mir::interpret::ConstValue::ByRef(ptr, align, alloc) => {
bx.cx().from_const_alloc(layout, align, alloc, ptr.offset)
}
_ => bug!("promoteds should have an allocation: {:?}", val),
},
Expand Down
1 change: 1 addition & 0 deletions src/librustc_codegen_ssa/traits/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ pub trait ConstMethods<'tcx>: BackendTypes {
fn from_const_alloc(
&self,
layout: layout::TyLayout<'tcx>,
align: layout::Align,
alloc: &Allocation,
offset: layout::Size,
) -> PlaceRef<'tcx, Self::Value>;
Expand Down
103 changes: 59 additions & 44 deletions src/librustc_mir/const_eval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use std::convert::TryInto;

use rustc::hir::def::DefKind;
use rustc::hir::def_id::DefId;
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled};
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef};
use rustc::mir;
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
use rustc::ty::layout::{self, LayoutOf, VariantIdx};
Expand All @@ -18,15 +18,14 @@ use rustc::traits::Reveal;
use rustc::util::common::ErrorReported;
use rustc_data_structures::fx::FxHashMap;

use syntax::ast::Mutability;
use syntax::source_map::{Span, DUMMY_SP};

use crate::interpret::{self,
PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar,
PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar,
RawConst, ConstValue,
InterpResult, InterpErrorInfo, InterpError, GlobalId, InterpretCx, StackPopCleanup,
Allocation, AllocId, MemoryKind,
snapshot, RefTracking,
snapshot, RefTracking, intern_const_alloc_recursive,
};

/// Number of steps until the detector even starts doing anything.
Expand Down Expand Up @@ -63,33 +62,19 @@ pub(crate) fn eval_promoted<'mir, 'tcx>(
eval_body_using_ecx(&mut ecx, cid, body, param_env)
}

fn mplace_to_const<'tcx>(
ecx: &CompileTimeEvalContext<'_, 'tcx>,
mplace: MPlaceTy<'tcx>,
) -> &'tcx ty::Const<'tcx> {
let MemPlace { ptr, align, meta } = *mplace;
// extract alloc-offset pair
assert!(meta.is_none());
let ptr = ptr.to_ptr().unwrap();
let alloc = ecx.memory.get(ptr.alloc_id).unwrap();
assert!(alloc.align >= align);
assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= mplace.layout.size.bytes());
let mut alloc = alloc.clone();
alloc.align = align;
// FIXME shouldn't it be the case that `mark_static_initialized` has already
// interned this? I thought that is the entire point of that `FinishStatic` stuff?
let alloc = ecx.tcx.intern_const_alloc(alloc);
let val = ConstValue::ByRef(ptr, alloc);
ecx.tcx.mk_const(ty::Const { val, ty: mplace.layout.ty })
}

fn op_to_const<'tcx>(
ecx: &CompileTimeEvalContext<'_, 'tcx>,
op: OpTy<'tcx>,
) -> &'tcx ty::Const<'tcx> {
// We do not normalize just any data. Only non-union scalars and slices.
let normalize = match op.layout.abi {
layout::Abi::Scalar(..) => op.layout.ty.ty_adt_def().map_or(true, |adt| !adt.is_union()),
// We do not have value optmizations for everything.
// Only scalars and slices, since they are very common.
// Note that further down we turn scalars of undefined bits back to `ByRef`. These can result
// from scalar unions that are initialized with one of their zero sized variants. We could
// instead allow `ConstValue::Scalar` to store `ScalarMaybeUndef`, but that would affect all
// the usual cases of extracting e.g. a `usize`, without there being a real use case for the
// `Undef` situation.
let try_as_immediate = match op.layout.abi {
layout::Abi::Scalar(..) => true,
layout::Abi::ScalarPair(..) => match op.layout.ty.sty {
ty::Ref(_, inner, _) => match inner.sty {
ty::Slice(elem) => elem == ecx.tcx.types.u8,
Expand All @@ -100,16 +85,38 @@ fn op_to_const<'tcx>(
},
_ => false,
};
let normalized_op = if normalize {
Err(*ecx.read_immediate(op).expect("normalization works on validated constants"))
let immediate = if try_as_immediate {
Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
} else {
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
// When we come back from raw const eval, we are always by-ref. The only way our op here is
// by-val is if we are in const_field, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such.
op.try_as_mplace()
};
let val = match normalized_op {
Ok(mplace) => return mplace_to_const(ecx, mplace),
Err(Immediate::Scalar(x)) =>
ConstValue::Scalar(x.not_undef().unwrap()),
Err(Immediate::ScalarPair(a, b)) => {
let val = match immediate {
Ok(mplace) => {
let ptr = mplace.ptr.to_ptr().unwrap();
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
ConstValue::ByRef(ptr, mplace.align, alloc)
},
// see comment on `let try_as_immediate` above
Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x {
ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s),
ScalarMaybeUndef::Undef => {
// When coming out of "normal CTFE", we'll always have an `Indirect` operand as
// argument and we will not need this. The only way we can already have an
// `Immediate` is when we are called from `const_field`, and that `Immediate`
// comes from a constant so it can happen have `Undef`, because the indirect
// memory that was read had undefined bytes.
let mplace = op.to_mem_place();
let ptr = mplace.ptr.to_ptr().unwrap();
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
ConstValue::ByRef(ptr, mplace.align, alloc)
},
},
Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => {
let (data, start) = match a.not_undef().unwrap() {
Scalar::Ptr(ptr) => (
ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
Expand Down Expand Up @@ -164,13 +171,12 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.run()?;

// Intern the result
let mutability = if tcx.is_mutable_static(cid.instance.def_id()) ||
!layout.ty.is_freeze(tcx, param_env, body.span) {
Mutability::Mutable
} else {
Mutability::Immutable
};
ecx.memory.intern_static(ret.ptr.to_ptr()?.alloc_id, mutability)?;
intern_const_alloc_recursive(
ecx,
cid.instance.def_id(),
ret,
param_env,
)?;

debug!("eval_body_using_ecx done: {:?}", *ret);
Ok(ret)
Expand Down Expand Up @@ -297,7 +303,7 @@ impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
}
}

type CompileTimeEvalContext<'mir, 'tcx> =
crate type CompileTimeEvalContext<'mir, 'tcx> =
InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;

impl interpret::MayLeak for ! {
Expand Down Expand Up @@ -526,13 +532,22 @@ fn validate_and_turn_into_const<'tcx>(
mplace.into(),
path,
Some(&mut ref_tracking),
true, // const mode
)?;
}
// Now that we validated, turn this into a proper constant.
// Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides
// whether they become immediates.
let def_id = cid.instance.def.def_id();
if tcx.is_static(def_id) || cid.promoted.is_some() {
Ok(mplace_to_const(&ecx, mplace))
let ptr = mplace.ptr.to_ptr()?;
Ok(tcx.mk_const(ty::Const {
val: ConstValue::ByRef(
ptr,
mplace.align,
ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
),
ty: mplace.layout.ty,
}))
} else {
Ok(op_to_const(&ecx, mplace.into()))
}
Expand Down
17 changes: 11 additions & 6 deletions src/librustc_mir/hair/pattern/_match.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,10 +215,15 @@ impl LiteralExpander<'tcx> {
debug!("fold_const_value_deref {:?} {:?} {:?}", val, rty, crty);
match (val, &crty.sty, &rty.sty) {
// the easy case, deref a reference
(ConstValue::Scalar(Scalar::Ptr(p)), x, y) if x == y => ConstValue::ByRef(
p,
self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id),
),
(ConstValue::Scalar(Scalar::Ptr(p)), x, y) if x == y => {
let alloc = self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id);
ConstValue::ByRef(
p,
// FIXME(oli-obk): this should be the type's layout
alloc.align,
alloc,
)
},
// unsize array to slice if pattern is array but match value or other patterns are slice
(ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => {
assert_eq!(t, u);
Expand Down Expand Up @@ -1431,7 +1436,7 @@ fn slice_pat_covered_by_const<'tcx>(
suffix: &[Pattern<'tcx>],
) -> Result<bool, ErrorReported> {
let data: &[u8] = match (const_val.val, &const_val.ty.sty) {
(ConstValue::ByRef(ptr, alloc), ty::Array(t, n)) => {
(ConstValue::ByRef(ptr, _, alloc), ty::Array(t, n)) => {
assert_eq!(*t, tcx.types.u8);
let n = n.assert_usize(tcx).unwrap();
alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap()
Expand Down Expand Up @@ -1753,7 +1758,7 @@ fn specialize<'p, 'a: 'p, 'tcx>(
let (alloc, offset, n, ty) = match value.ty.sty {
ty::Array(t, n) => {
match value.val {
ConstValue::ByRef(ptr, alloc) => (
ConstValue::ByRef(ptr, _, alloc) => (
alloc,
ptr.offset,
n.unwrap_usize(cx.tcx),
Expand Down
4 changes: 3 additions & 1 deletion src/librustc_mir/interpret/eval_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -576,7 +576,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
self.place_to_op(return_place)?,
vec![],
None,
/*const_mode*/false,
)?;
}
} else {
Expand Down Expand Up @@ -641,6 +640,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
&self,
gid: GlobalId<'tcx>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
// FIXME(oli-obk): make this check an assertion that it's not a static here
// FIXME(RalfJ, oli-obk): document that `Place::Static` can never be anything but a static
// and `ConstValue::Unevaluated` can never be a static
let param_env = if self.tcx.is_static(gid.instance.def_id()) {
ty::ParamEnv::reveal_all()
} else {
Expand Down
Loading

0 comments on commit 9cb052a

Please sign in to comment.