Skip to content

Commit

Permalink
Rollup merge of #78351 - RalfJung:validity-unsafe-cell, r=oli-obk
Browse files Browse the repository at this point in the history
Move "mutable thing in const" check from interning to validity

This moves the check for mutable things (such as `UnsafeCell` or `&mut`) in a`const` from interning to validity. That means we can give more targeted error messages (pointing out *where* the problem lies), and we can simplify interning a bit.

Also fix the interning mode used for promoteds in statics.

r? @oli-obk
  • Loading branch information
Dylan-DPC committed Oct 25, 2020
2 parents 9c08ed0 + db01d97 commit 04fe7fa
Show file tree
Hide file tree
Showing 10 changed files with 157 additions and 164 deletions.
12 changes: 0 additions & 12 deletions compiler/rustc_middle/src/mir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,16 +210,6 @@ pub struct Body<'tcx> {
/// We hold in this field all the constants we are not able to evaluate yet.
pub required_consts: Vec<Constant<'tcx>>,

/// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
/// we'd statically know that no thing with interior mutability will ever be available to the
/// user without some serious unsafe code. Now this means that our promoted is actually
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
/// the index may be a runtime value. Such a promoted value is illegal because it has reachable
/// interior mutability. This flag just makes this situation very obvious where the previous
/// implementation without the flag hid this situation silently.
/// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
pub ignore_interior_mut_in_const_validation: bool,

/// Does this body use generic parameters. This is used for the `ConstEvaluatable` check.
///
/// Note that this does not actually mean that this body is not computable right now.
Expand Down Expand Up @@ -276,7 +266,6 @@ impl<'tcx> Body<'tcx> {
var_debug_info,
span,
required_consts: Vec::new(),
ignore_interior_mut_in_const_validation: false,
is_polymorphic: false,
predecessor_cache: PredecessorCache::new(),
};
Expand Down Expand Up @@ -306,7 +295,6 @@ impl<'tcx> Body<'tcx> {
required_consts: Vec::new(),
generator_kind: None,
var_debug_info: Vec::new(),
ignore_interior_mut_in_const_validation: false,
is_polymorphic: false,
predecessor_cache: PredecessorCache::new(),
};
Expand Down
49 changes: 24 additions & 25 deletions compiler/rustc_mir/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, GlobalId, Immediate,
InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
ScalarMaybeUninit, StackPopCleanup,
};

Expand Down Expand Up @@ -59,23 +59,15 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.run()?;

// Intern the result
// FIXME: since the DefId of a promoted is the DefId of its owner, this
// means that promoteds in statics are actually interned like statics!
// However, this is also currently crucial because we promote mutable
// non-empty slices in statics to extend their lifetime, and this
// ensures that they are put into a mutable allocation.
// For other kinds of promoteds in statics (like array initializers), this is rather silly.
let intern_kind = match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None if cid.promoted.is_some() => InternKind::Promoted,
_ => InternKind::Constant,
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
intern_const_alloc_recursive(
ecx,
intern_kind,
ret,
body.ignore_interior_mut_in_const_validation,
);
intern_const_alloc_recursive(ecx, intern_kind, ret);

debug!("eval_body_using_ecx done: {:?}", *ret);
Ok(ret)
Expand Down Expand Up @@ -376,16 +368,23 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
// Since evaluation had no errors, valiate the resulting constant:
let validation = try {
// FIXME do not validate promoteds until a decision on
// https://github.com/rust-lang/rust/issues/67465 is made
// https://github.com/rust-lang/rust/issues/67465 and
// https://github.com/rust-lang/rust/issues/67534 is made.
// Promoteds can contain unexpected `UnsafeCell` and reference `static`s, but their
// otherwise restricted form ensures that this is still sound. We just lose the
// extra safety net of some of the dynamic checks. They can also contain invalid
// values, but since we do not usually check intermediate results of a computation
// for validity, it might be surprising to do that here.
if cid.promoted.is_none() {
let mut ref_tracking = RefTracking::new(mplace);
let mut inner = false;
while let Some((mplace, path)) = ref_tracking.todo.pop() {
ecx.const_validate_operand(
mplace.into(),
path,
&mut ref_tracking,
/*may_ref_to_static*/ ecx.memory.extra.can_access_statics,
)?;
let mode = match tcx.static_mutability(cid.instance.def_id()) {
Some(_) => CtfeValidationMode::Regular, // a `static`
None => CtfeValidationMode::Const { inner },
};
ecx.const_validate_operand(mplace.into(), path, &mut ref_tracking, mode)?;
inner = true;
}
}
};
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_mir/src/const_eval/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub(crate) fn const_caller_location(
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);

let loc_place = ecx.alloc_caller_location(file, line, col);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place);
ConstValue::Scalar(loc_place.ptr)
}

Expand Down
109 changes: 42 additions & 67 deletions compiler/rustc_mir/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,23 @@
//!
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
//!
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
//! is picking the right mutability for the allocations in a `static` initializer: we want to make
//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
//! same time, we need to make memory that could be mutated by the program mutable to avoid
//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
//! always immutable. At least for `const` however we use this opportunity to reject any `const`
//! that contains allocations whose mutability we cannot identify.)

use super::validity::RefTracking;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_middle::mir::interpret::InterpResult;
use rustc_middle::ty::{self, layout::TyAndLayout, query::TyCtxtAt, Ty};
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
use rustc_target::abi::Size;

use rustc_ast::Mutability;
Expand All @@ -33,17 +44,13 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
/// A list of all encountered allocations. After type-based interning, we traverse this list to
/// also intern allocations that are only referenced by a raw pointer or inside a union.
leftover_allocations: &'rt mut FxHashSet<AllocId>,
/// The root kind of the value that we're looking at. This field is never mutated and only used
/// for sanity assertions that will ICE when `const_qualif` screws up.
/// The root kind of the value that we're looking at. This field is never mutated for a
/// particular allocation. It is primarily used to make as many allocations as possible
/// read-only so LLVM can place them in const memory.
mode: InternMode,
/// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
/// the intern mode of references we encounter.
inside_unsafe_cell: bool,

/// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants
/// for promoteds.
/// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field
ignore_interior_mut_in_const: bool,
}

#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
Expand All @@ -52,22 +59,14 @@ enum InternMode {
/// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
/// is *mutable*.
Static(hir::Mutability),
/// The "base value" of a const, which can have `UnsafeCell` (as in `const FOO: Cell<i32>`),
/// but that interior mutability is simply ignored.
ConstBase,
/// The "inner values" of a const with references, where `UnsafeCell` is an error.
ConstInner,
/// A `const`.
Const,
}

/// Signalling data structure to ensure we don't recurse
/// into the memory of other constants or statics
struct IsStaticOrFn;

fn mutable_memory_in_const(tcx: TyCtxtAt<'_>, kind: &str) {
// FIXME: show this in validation instead so we can point at where in the value the error is?
tcx.sess.span_err(tcx.span, &format!("mutable memory ({}) is not allowed in constant", kind));
}

/// Intern an allocation without looking at its children.
/// `mode` is the mode of the environment where we found this pointer.
/// `mutablity` is the mutability of the place to be interned; even if that says
Expand Down Expand Up @@ -113,8 +112,8 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
// For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
// no interior mutability.
let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env));
// For statics, allocation mutability is the combination of the place mutability and
// the type mutability.
// For statics, allocation mutability is the combination of place mutability and
// type mutability.
// The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
let immutable = mutability == Mutability::Not && frozen;
if immutable {
Expand All @@ -128,9 +127,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
// See const_eval::machine::MemoryExtra::can_access_statics for why
// immutability is so important.

// There are no sensible checks we can do here; grep for `mutable_memory_in_const` to
// find the checks we are doing elsewhere to avoid even getting here for memory
// that "wants" to be mutable.
// Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
Expand Down Expand Up @@ -166,17 +163,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
mplace: MPlaceTy<'tcx>,
fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
) -> InterpResult<'tcx> {
// ZSTs cannot contain pointers, so we can skip them.
if mplace.layout.is_zst() {
return Ok(());
}

if let Some(def) = mplace.layout.ty.ty_adt_def() {
if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
if self.mode == InternMode::ConstInner && !self.ignore_interior_mut_in_const {
// We do not actually make this memory mutable. But in case the user
// *expected* it to be mutable, make sure we error. This is just a
// sanity check to prevent users from accidentally exploiting the UB
// they caused. It also helps us to find cases where const-checking
// failed to prevent an `UnsafeCell` (but as `ignore_interior_mut_in_const`
// shows that part is not airtight).
mutable_memory_in_const(self.ecx.tcx, "`UnsafeCell`");
}
// We are crossing over an `UnsafeCell`, we can mutate again. This means that
// References we encounter inside here are interned as pointing to mutable
// allocations.
Expand All @@ -188,11 +181,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
}

// ZSTs do not need validation unless they're uninhabited
if mplace.layout.is_zst() && !mplace.layout.abi.is_uninhabited() {
return Ok(());
}

self.walk_aggregate(mplace, fields)
}

Expand All @@ -209,13 +197,12 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
if let ty::Dynamic(..) =
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
{
// Validation will error (with a better message) on an invalid vtable pointer
// so we can safely not do anything if this is not a real pointer.
if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
// Explicitly choose const mode here, since vtables are immutable, even
// if the reference of the fat pointer is mutable.
self.intern_shallow(vtable.alloc_id, InternMode::ConstInner, None);
self.intern_shallow(vtable.alloc_id, InternMode::Const, None);
} else {
// Validation will error (with a better message) on an invalid vtable pointer.
// Let validation show the error message, but make sure it *does* error.
tcx.sess
.delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
Expand All @@ -224,7 +211,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
// Check if we have encountered this pointer+layout combination before.
// Only recurse for allocation-backed pointers.
if let Scalar::Ptr(ptr) = mplace.ptr {
// Compute the mode with which we intern this.
// Compute the mode with which we intern this. Our goal here is to make as many
// statics as we can immutable so they can be placed in read-only memory by LLVM.
let ref_mode = match self.mode {
InternMode::Static(mutbl) => {
// In statics, merge outer mutability with reference mutability and
Expand All @@ -243,8 +231,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
Mutability::Not => {
// A shared reference, things become immutable.
// We do *not* consier `freeze` here -- that is done more precisely
// when traversing the referenced data (by tracking `UnsafeCell`).
// We do *not* consider `freeze` here: `intern_shallow` considers
// `freeze` for the actual mutability of this allocation; the intern
// mode for references contained in this allocation is tracked more
// precisely when traversing the referenced data (by tracking
// `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
// has the left inner reference interned into a read-only
// allocation.
InternMode::Static(Mutability::Not)
}
Mutability::Mut => {
Expand All @@ -253,27 +246,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
}
}
InternMode::ConstBase | InternMode::ConstInner => {
// Ignore `UnsafeCell`, everything is immutable. Do some sanity checking
// for mutable references that we encounter -- they must all be ZST.
// This helps to prevent users from accidentally exploiting UB that they
// caused (by somehow getting a mutable reference in a `const`).
if ref_mutability == Mutability::Mut {
match referenced_ty.kind() {
ty::Array(_, n) if n.eval_usize(*tcx, self.ecx.param_env) == 0 => {}
ty::Slice(_)
if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)?
== 0 => {}
_ => mutable_memory_in_const(tcx, "`&mut`"),
}
} else {
// A shared reference. We cannot check `freeze` here due to references
// like `&dyn Trait` that are actually immutable. We do check for
// concrete `UnsafeCell` when traversing the pointee though (if it is
// a new allocation, not yet interned).
}
// Go on with the "inner" rules.
InternMode::ConstInner
InternMode::Const => {
// Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
// checking for mutable references that we encounter -- they must all be
// ZST.
InternMode::Const
}
};
match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
Expand Down Expand Up @@ -312,7 +289,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx: &mut InterpCx<'mir, 'tcx, M>,
intern_kind: InternKind,
ret: MPlaceTy<'tcx>,
ignore_interior_mut_in_const: bool,
) where
'tcx: 'mir,
{
Expand All @@ -321,7 +297,7 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
InternKind::Static(mutbl) => InternMode::Static(mutbl),
// `Constant` includes array lengths.
// `Promoted` includes non-`Copy` array initializers and `rustc_args_required_const` arguments.
InternKind::Constant | InternKind::Promoted => InternMode::ConstBase,
InternKind::Constant | InternKind::Promoted => InternMode::Const,
};

// Type based interning.
Expand Down Expand Up @@ -351,7 +327,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx,
mode,
leftover_allocations,
ignore_interior_mut_in_const,
inside_unsafe_cell: false,
}
.visit_value(mplace);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_mir/src/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackP
pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
pub use self::validity::RefTracking;
pub use self::validity::{CtfeValidationMode, RefTracking};
pub use self::visitor::{MutValueVisitor, ValueVisitor};

crate use self::intrinsics::eval_nullary_intrinsic;
Expand Down
Loading

0 comments on commit 04fe7fa

Please sign in to comment.