Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Detect unaligned fields via aggregate.align < field.align, instead of a packed flag. #46436

Merged
merged 7 commits into from
Dec 17, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions src/librustc/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@ mod value;

pub use self::error::{EvalError, EvalResult, EvalErrorKind};

pub use self::value::{PrimVal, PrimValKind, Value, Pointer, PtrAndAlign, bytes_to_f32, bytes_to_f64};
pub use self::value::{PrimVal, PrimValKind, Value, Pointer, bytes_to_f32, bytes_to_f64};

use std::collections::BTreeMap;
use ty::layout::HasDataLayout;
use std::fmt;
use ty::layout;
use mir;
use ty;
use ty::layout::{self, Align, HasDataLayout};
use middle::region;
use std::iter;

Expand Down Expand Up @@ -166,7 +165,7 @@ pub struct Allocation {
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
pub align: u64,
pub align: Align,
}

impl Allocation {
Expand All @@ -177,7 +176,7 @@ impl Allocation {
bytes: slice.to_owned(),
relocations: BTreeMap::new(),
undef_mask,
align: 1,
align: Align::from_bytes(1, 1).unwrap(),
}
}
}
Expand Down
30 changes: 2 additions & 28 deletions src/librustc/mir/interpret/value.rs
Original file line number Diff line number Diff line change
@@ -1,30 +1,11 @@
#![allow(unknown_lints)]

use ty::layout::HasDataLayout;
use ty::layout::{Align, HasDataLayout};

use super::{EvalResult, MemoryPointer, PointerArithmetic};
use syntax::ast::FloatTy;
use rustc_const_math::ConstFloat;

#[derive(Copy, Clone, Debug)]
pub struct PtrAndAlign {
pub ptr: Pointer,
/// Remember whether this place is *supposed* to be aligned.
pub aligned: bool,
}

impl PtrAndAlign {
pub fn to_ptr<'tcx>(self) -> EvalResult<'tcx, MemoryPointer> {
self.ptr.to_ptr()
}
pub fn offset<'tcx, C: HasDataLayout>(self, i: u64, cx: C) -> EvalResult<'tcx, Self> {
Ok(PtrAndAlign {
ptr: self.ptr.offset(i, cx)?,
aligned: self.aligned,
})
}
}

pub fn bytes_to_f32(bits: u128) -> ConstFloat {
ConstFloat {
bits,
Expand All @@ -50,7 +31,7 @@ pub fn bytes_to_f64(bits: u128) -> ConstFloat {
/// operations and fat pointers. This idea was taken from rustc's trans.
#[derive(Clone, Copy, Debug)]
pub enum Value {
ByRef(PtrAndAlign),
ByRef(Pointer, Align),
ByVal(PrimVal),
ByValPair(PrimVal, PrimVal),
}
Expand Down Expand Up @@ -182,13 +163,6 @@ pub enum PrimValKind {
Char,
}

impl<'a, 'tcx: 'a> Value {
#[inline]
pub fn by_ref(ptr: Pointer) -> Self {
Value::ByRef(PtrAndAlign { ptr, aligned: true })
}
}

impl<'tcx> PrimVal {
pub fn from_u128(n: u128) -> Self {
PrimVal::Bytes(n)
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -895,7 +895,7 @@ pub struct InterpretInterner<'tcx> {
/// Allows checking whether a constant already has an allocation
///
/// The pointers are to the beginning of an `alloc_by_id` allocation
alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::PtrAndAlign>,
alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::Pointer>,

/// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
/// allocations for string and bytestring literals.
Expand Down Expand Up @@ -931,14 +931,14 @@ impl<'tcx> InterpretInterner<'tcx> {
pub fn get_cached(
&self,
global_id: interpret::GlobalId<'tcx>,
) -> Option<interpret::PtrAndAlign> {
) -> Option<interpret::Pointer> {
self.alloc_cache.get(&global_id).cloned()
}

pub fn cache(
&mut self,
global_id: interpret::GlobalId<'tcx>,
ptr: interpret::PtrAndAlign,
ptr: interpret::Pointer,
) {
if let Some(old) = self.alloc_cache.insert(global_id, ptr) {
bug!("tried to cache {:?}, but was already existing as {:#?}", global_id, old);
Expand Down
63 changes: 11 additions & 52 deletions src/librustc/ty/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -778,7 +778,6 @@ pub enum Abi {
Aggregate {
/// If true, the size is exact, otherwise it's only a lower bound.
sized: bool,
packed: bool
}
}

Expand All @@ -790,18 +789,7 @@ impl Abi {
Abi::Scalar(_) |
Abi::ScalarPair(..) |
Abi::Vector { .. } => false,
Abi::Aggregate { sized, .. } => !sized
}
}

/// Returns true if the fields of the layout are packed.
pub fn is_packed(&self) -> bool {
match *self {
Abi::Uninhabited |
Abi::Scalar(_) |
Abi::ScalarPair(..) |
Abi::Vector { .. } => false,
Abi::Aggregate { packed, .. } => packed
Abi::Aggregate { sized } => !sized
}
}
}
Expand Down Expand Up @@ -1077,10 +1065,7 @@ impl<'a, 'tcx> LayoutDetails {
}

let size = min_size.abi_align(align);
let mut abi = Abi::Aggregate {
sized,
packed
};
let mut abi = Abi::Aggregate { sized };

// Unpack newtype ABIs and find scalar pairs.
if sized && size.bytes() > 0 {
Expand Down Expand Up @@ -1254,10 +1239,7 @@ impl<'a, 'tcx> LayoutDetails {
stride: element.size,
count
},
abi: Abi::Aggregate {
sized: true,
packed: false
},
abi: Abi::Aggregate { sized: true },
align: element.align,
size
})
Expand All @@ -1270,10 +1252,7 @@ impl<'a, 'tcx> LayoutDetails {
stride: element.size,
count: 0
},
abi: Abi::Aggregate {
sized: false,
packed: false
},
abi: Abi::Aggregate { sized: false },
align: element.align,
size: Size::from_bytes(0)
})
Expand All @@ -1285,10 +1264,7 @@ impl<'a, 'tcx> LayoutDetails {
stride: Size::from_bytes(1),
count: 0
},
abi: Abi::Aggregate {
sized: false,
packed: false
},
abi: Abi::Aggregate { sized: false },
align: dl.i8_align,
size: Size::from_bytes(0)
})
Expand All @@ -1302,7 +1278,7 @@ impl<'a, 'tcx> LayoutDetails {
let mut unit = univariant_uninterned(&[], &ReprOptions::default(),
StructKind::AlwaysSized)?;
match unit.abi {
Abi::Aggregate { ref mut sized, .. } => *sized = false,
Abi::Aggregate { ref mut sized } => *sized = false,
_ => bug!()
}
tcx.intern_layout(unit)
Expand Down Expand Up @@ -1418,10 +1394,7 @@ impl<'a, 'tcx> LayoutDetails {
return Ok(tcx.intern_layout(LayoutDetails {
variants: Variants::Single { index: 0 },
fields: FieldPlacement::Union(variants[0].len()),
abi: Abi::Aggregate {
sized: true,
packed
},
abi: Abi::Aggregate { sized: true },
align,
size: size.abi_align(align)
}));
Expand Down Expand Up @@ -1525,15 +1498,10 @@ impl<'a, 'tcx> LayoutDetails {
let abi = if offset.bytes() == 0 && niche.value.size(dl) == size {
Abi::Scalar(niche.clone())
} else {
let mut packed = st[i].abi.is_packed();
if offset.abi_align(niche_align) != offset {
packed = true;
niche_align = dl.i8_align;
}
Abi::Aggregate {
sized: true,
packed
}
Abi::Aggregate { sized: true }
};
align = align.max(niche_align);

Expand Down Expand Up @@ -1681,10 +1649,7 @@ impl<'a, 'tcx> LayoutDetails {
let abi = if discr.value.size(dl) == size {
Abi::Scalar(discr.clone())
} else {
Abi::Aggregate {
sized: true,
packed: false
}
Abi::Aggregate { sized: true }
};
tcx.intern_layout(LayoutDetails {
variants: Variants::Tagged {
Expand Down Expand Up @@ -2277,19 +2242,14 @@ impl<'a, 'tcx> TyLayout<'tcx> {
self.abi.is_unsized()
}

/// Returns true if the fields of the layout are packed.
pub fn is_packed(&self) -> bool {
self.abi.is_packed()
}

/// Returns true if the type is a ZST and not unsized.
pub fn is_zst(&self) -> bool {
match self.abi {
Abi::Uninhabited => true,
Abi::Scalar(_) |
Abi::ScalarPair(..) |
Abi::Vector { .. } => false,
Abi::Aggregate { sized, .. } => sized && self.size.bytes() == 0
Abi::Aggregate { sized } => sized && self.size.bytes() == 0
}
}

Expand Down Expand Up @@ -2452,8 +2412,7 @@ impl<'gcx> HashStable<StableHashingContext<'gcx>> for Abi {
element.hash_stable(hcx, hasher);
count.hash_stable(hcx, hasher);
}
Aggregate { packed, sized } => {
packed.hash_stable(hcx, hasher);
Aggregate { sized } => {
sized.hash_stable(hcx, hasher);
}
}
Expand Down
Loading