Skip to content

Commit

Permalink
Auto merge of #122432 - oli-obk:validate_before_interning, r=<try>
Browse files Browse the repository at this point in the history
Validate before interning

based on #122397

fixes #122398

r? `@RalfJung`

There are more cleanups that can be done afterwards, but I think they may be unnecessary to make this PR useful on its own
  • Loading branch information
bors committed Mar 18, 2024
2 parents 13abc0a + c99636f commit abaca54
Show file tree
Hide file tree
Showing 25 changed files with 416 additions and 575 deletions.
1 change: 0 additions & 1 deletion compiler/rustc_const_eval/messages.ftl
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ const_eval_dangling_int_pointer =
const_eval_dangling_null_pointer =
{$bad_pointer_message}: null pointer is a dangling pointer (it has no provenance)
const_eval_dangling_ptr_in_final = encountered dangling pointer in final value of {const_eval_intern_kind}
const_eval_dead_local =
accessing a dead local variable
const_eval_dealloc_immutable =
Expand Down
12 changes: 7 additions & 5 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ use super::{CanAccessMutGlobal, CompileTimeEvalContext, CompileTimeInterpreter};
use crate::const_eval::CheckAlignment;
use crate::errors;
use crate::errors::ConstEvalError;
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
create_static_alloc, intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate,
InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
StackPopCleanup,
};
use crate::interpret::{eval_nullary_intrinsic, patch_mutability_of_allocs};

// Returns a pointer to where the result lives
#[instrument(level = "trace", skip(ecx, body))]
Expand Down Expand Up @@ -81,11 +81,13 @@ fn eval_body_using_ecx<'mir, 'tcx, R: InterpretationResult<'tcx>>(
// The main interpreter loop.
while ecx.step()? {}

// Intern the result
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
patch_mutability_of_allocs(ecx, intern_kind, &ret)?;

// Since evaluation had no errors, validate the resulting constant.
const_validate_mplace(&ecx, &ret, cid)?;
const_validate_mplace(ecx, &ret, cid)?;

// Intern the result
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;

Ok(R::make_result(ret, ecx))
}
Expand Down Expand Up @@ -401,7 +403,7 @@ fn const_validate_mplace<'mir, 'tcx>(
CtfeValidationMode::Const { allow_immutable_unsafe_cell: !inner }
}
};
ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)
ecx.const_validate_operand(mplace, path, &mut ref_tracking, mode)
// Instead of just reporting the `InterpError` via the usual machinery, we give a more targetted
// error about the validation failure.
.map_err(|error| report_validation_error(&ecx, error, alloc_id))?;
Expand Down
4 changes: 3 additions & 1 deletion compiler/rustc_const_eval/src/const_eval/valtrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ use super::machine::CompileTimeEvalContext;
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
use crate::const_eval::CanAccessMutGlobal;
use crate::errors::MaxNumNodesInConstErr;
use crate::interpret::MPlaceTy;
use crate::interpret::{
intern_const_alloc_recursive, ImmTy, Immediate, InternKind, MemPlaceMeta, MemoryKind, PlaceTy,
Projectable, Scalar,
};
use crate::interpret::{patch_mutability_of_allocs, MPlaceTy};

#[instrument(skip(ecx), level = "debug")]
fn branches<'tcx>(
Expand Down Expand Up @@ -323,6 +323,7 @@ pub fn valtree_to_const_value<'tcx>(

valtree_into_mplace(&mut ecx, &place, valtree);
dump_place(&ecx, &place);
patch_mutability_of_allocs(&mut ecx, InternKind::Constant, &place).unwrap();
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();

op_to_const(&ecx, &place.into(), /* for diagnostics */ false)
Expand Down Expand Up @@ -359,6 +360,7 @@ fn valtree_to_ref<'tcx>(

valtree_into_mplace(ecx, &pointee_place, valtree);
dump_place(ecx, &pointee_place);
patch_mutability_of_allocs(ecx, InternKind::Constant, &pointee_place).unwrap();
intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();

pointee_place.to_ref(&ecx.tcx)
Expand Down
8 changes: 0 additions & 8 deletions compiler/rustc_const_eval/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,6 @@ use rustc_target::abi::{Size, WrappingRange};

use crate::interpret::InternKind;

#[derive(Diagnostic)]
#[diag(const_eval_dangling_ptr_in_final)]
pub(crate) struct DanglingPtrInFinal {
#[primary_span]
pub span: Span,
pub kind: InternKind,
}

#[derive(LintDiagnostic)]
#[diag(const_eval_mutable_ptr_in_final)]
pub(crate) struct MutablePtrInFinal {
Expand Down
115 changes: 68 additions & 47 deletions compiler/rustc_const_eval/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use rustc_span::sym;

use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy};
use crate::const_eval;
use crate::errors::{DanglingPtrInFinal, MutablePtrInFinal};
use crate::errors::MutablePtrInFinal;

pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
'mir,
Expand Down Expand Up @@ -62,26 +62,13 @@ impl HasStaticRootDefId for const_eval::CompileTimeInterpreter<'_, '_> {
fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
alloc_id: AllocId,
mutability: Mutability,
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
trace!("intern_shallow {:?}", alloc_id);
// remove allocation
// FIXME(#120456) - is `swap_remove` correct?
let Some((_kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
let Some((_kind, alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
return Err(());
};
// Set allocation mutability as appropriate. This is used by LLVM to put things into
// read-only memory, and also by Miri when evaluating other globals that
// access this one.
match mutability {
Mutability::Not => {
alloc.mutability = Mutability::Not;
}
Mutability::Mut => {
// This must be already mutable, we won't "un-freeze" allocations ever.
assert_eq!(alloc.mutability, Mutability::Mut);
}
}
// link the alloc id to the actual allocation
let alloc = ecx.tcx.mk_const_alloc(alloc);
if let Some(static_id) = ecx.machine.static_def_id() {
Expand Down Expand Up @@ -123,14 +110,9 @@ pub enum InternKind {
Promoted,
}

/// Intern `ret` and everything it references.
///
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
/// tracks where in the value we are and thus can show much better error messages.
///
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
#[instrument(level = "debug", skip(ecx))]
pub fn intern_const_alloc_recursive<
/// Now that evaluation is finished, and we are not going to modify allocations anymore,
/// recursively mark all allocations as immutable if the item kind calls for it (const/promoted/immut static).
pub fn patch_mutability_of_allocs<
'mir,
'tcx: 'mir,
M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
Expand All @@ -141,12 +123,12 @@ pub fn intern_const_alloc_recursive<
) -> Result<(), ErrorGuaranteed> {
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
// that we are starting in, and all other allocations that we are encountering recursively.
let (base_mutability, inner_mutability, is_static) = match intern_kind {
let (base_mutability, inner_mutability) = match intern_kind {
InternKind::Constant | InternKind::Promoted => {
// Completely immutable. Interning anything mutably here can only lead to unsoundness,
// since all consts are conceptually independent values but share the same underlying
// memory.
(Mutability::Not, Mutability::Not, false)
(Mutability::Not, Mutability::Not)
}
InternKind::Static(Mutability::Not) => {
(
Expand All @@ -159,30 +141,79 @@ pub fn intern_const_alloc_recursive<
// Inner allocations are never mutable. They can only arise via the "tail
// expression" / "outer scope" rule, and we treat them consistently with `const`.
Mutability::Not,
true,
)
}
InternKind::Static(Mutability::Mut) => {
// Just make everything mutable. We accept code like
// `static mut X = &mut [42]`, so even inner allocations need to be mutable.
(Mutability::Mut, Mutability::Mut, true)
(Mutability::Mut, Mutability::Mut)
}
};
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
let mut todo: Vec<_> = {
let base_alloc = &mut ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap().1;
base_alloc.mutability = base_mutability;
base_alloc.provenance().ptrs().iter().copied().collect()
};
let mut seen = FxHashSet::default();
seen.insert(base_alloc_id);
while let Some((_, prov)) = todo.pop() {
if !seen.insert(prov.alloc_id()) {
// Already processed
continue;
}
let Some((_, alloc)) = &mut ecx.memory.alloc_map.get_mut(&prov.alloc_id()) else {
continue;
};
// We always intern with `inner_mutability`, and furthermore we ensured above that if
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
// interned memory -- justifying that we can indeed intern immutably. However this also
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
// we'd have to somehow check that they are *all* immutable before deciding that this
// allocation can be made immutable. In the future we could consider analyzing all
// pointers before deciding which allocations can be made immutable; but for now we are
// okay with losing some potential for immutability here. This can anyway only affect
// `static mut`.
alloc.mutability = inner_mutability;
todo.extend(alloc.provenance().ptrs().iter().copied());
}
Ok(())
}

/// Intern `ret` and everything it references.
///
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
/// tracks where in the value we are and thus can show much better error messages.
///
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
#[instrument(level = "debug", skip(ecx))]
pub fn intern_const_alloc_recursive<
'mir,
'tcx: 'mir,
M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
>(
ecx: &mut InterpCx<'mir, 'tcx, M>,
intern_kind: InternKind,
ret: &MPlaceTy<'tcx>,
) -> Result<(), ErrorGuaranteed> {
let (inner_mutability, is_static) = match intern_kind {
InternKind::Constant | InternKind::Promoted => (Mutability::Not, false),
InternKind::Static(mutability) => (mutability, true),
};

// Intern the base allocation, and initialize todo list for recursive interning.
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
trace!(?base_alloc_id, ?base_mutability);
trace!(?base_alloc_id);
// First we intern the base allocation, as it requires a different mutability.
// This gives us the initial set of nested allocations, which will then all be processed
// recursively in the loop below.
let mut todo: Vec<_> = if is_static {
// Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
// But still change its mutability to match the requested one.
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
alloc.1.mutability = base_mutability;
let alloc = ecx.memory.alloc_map.get(&base_alloc_id).unwrap();
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
} else {
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect()
intern_shallow(ecx, base_alloc_id).unwrap().collect()
};
// We need to distinguish "has just been interned" from "was already in `tcx`",
// so we track this in a separate set.
Expand Down Expand Up @@ -248,19 +279,7 @@ pub fn intern_const_alloc_recursive<
continue;
}
just_interned.insert(alloc_id);
// We always intern with `inner_mutability`, and furthermore we ensured above that if
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
// interned memory -- justifying that we can indeed intern immutably. However this also
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
// we'd have to somehow check that they are *all* immutable before deciding that this
// allocation can be made immutable. In the future we could consider analyzing all
// pointers before deciding which allocations can be made immutable; but for now we are
// okay with losing some potential for immutability here. This can anyway only affect
// `static mut`.
todo.extend(intern_shallow(ecx, alloc_id, inner_mutability).map_err(|()| {
ecx.tcx.dcx().emit_err(DanglingPtrInFinal { span: ecx.tcx.span, kind: intern_kind })
})?);
todo.extend(intern_shallow(ecx, alloc_id).unwrap());
}
if found_bad_mutable_pointer {
let err_diag = MutablePtrInFinal { span: ecx.tcx.span, kind: intern_kind };
Expand Down Expand Up @@ -291,7 +310,8 @@ pub fn intern_const_alloc_for_constprop<
return Ok(());
}
// Move allocation to `tcx`.
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
ecx.memory.alloc_map.get_mut(&alloc_id).unwrap().1.mutability = Mutability::Not;
for _ in intern_shallow(ecx, alloc_id).map_err(|()| err_ub!(DeadLocal))? {
// We are not doing recursive interning, so we don't currently support provenance.
// (If this assertion ever triggers, we should just implement a
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
Expand All @@ -318,7 +338,8 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
let dest = self.allocate(layout, MemoryKind::Stack)?;
f(self, &dest.clone().into())?;
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
self.memory.alloc_map.get_mut(&alloc_id).unwrap().1.mutability = Mutability::Not;
for prov in intern_shallow(self, alloc_id).unwrap() {
// We are not doing recursive interning, so we don't currently support provenance.
// (If this assertion ever triggers, we should just implement a
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_const_eval/src/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ pub use rustc_middle::mir::interpret::*; // have all the `interpret` symbols in

pub use self::eval_context::{format_interp_error, Frame, FrameInfo, InterpCx, StackPopCleanup};
pub use self::intern::{
intern_const_alloc_for_constprop, intern_const_alloc_recursive, HasStaticRootDefId, InternKind,
intern_const_alloc_for_constprop, intern_const_alloc_recursive, patch_mutability_of_allocs,
HasStaticRootDefId, InternKind,
};
pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackPopJump};
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
Expand Down
Loading

0 comments on commit abaca54

Please sign in to comment.