Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 6 pull requests #110137

Merged
merged 30 commits into from
Apr 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
454bca5
Check `CastKind::Transmute` sizes in a better way
scottmcm Apr 6, 2023
1042b5d
Avoid some manual slice length calculation
scottmcm Apr 7, 2023
464a24e
compiletest: Use remap-path-prefix only in CI
jyn514 Apr 9, 2023
d757c4b
Handle not all immediates having `abi::Scalar`s
scottmcm Apr 6, 2023
2b43f25
Fix `x check --stage 1` when download-rustc is enabled
jyn514 Apr 9, 2023
f058d05
Some simple `clippy::perf` fixes
Noratrieb Apr 9, 2023
54e1309
Use HashMap entry APIs more
Noratrieb Apr 9, 2023
f00366d
Box large enum variants
Noratrieb Apr 9, 2023
968be98
Allow `clippy::from_mut_ref`
Noratrieb Apr 9, 2023
0a0968b
Allow `modulo_one` on function using cfg consts
Noratrieb Apr 9, 2023
07cd382
Small clippy::correctness fixes
Noratrieb Apr 9, 2023
1c75724
Properly compare previously shadowed values
Noratrieb Apr 9, 2023
5a90de8
Delete useless loop
Noratrieb Apr 9, 2023
6fceb0f
Improve `Allocation::hash
Noratrieb Apr 9, 2023
81c320e
Fix some clippy::complexity
Noratrieb Apr 9, 2023
4b4948c
Remove identity casts
Noratrieb Apr 9, 2023
e5defd0
Convert manual loop into `while let`
Noratrieb Apr 9, 2023
5853c28
Simply Abi::fmt
Noratrieb Apr 9, 2023
cab94d2
fixup! Improve `Allocation::hash
Noratrieb Apr 9, 2023
73417b1
Inline format_args
Noratrieb Apr 9, 2023
9fc1555
Remove turbofish
Noratrieb Apr 9, 2023
2b0f572
prioritize param-env candidates
lcnr Mar 29, 2023
2186847
move `structural_traits` into `assembly`
lcnr Mar 29, 2023
3fab7f7
review + some small stuff
lcnr Mar 30, 2023
c30d7e9
Rollup merge of #109724 - lcnr:prioritize-env, r=compiler-errors
Dylan-DPC Apr 10, 2023
b872552
Rollup merge of #110021 - scottmcm:fix-110005, r=compiler-errors
Dylan-DPC Apr 10, 2023
b6633ff
Rollup merge of #110044 - scottmcm:more-size-of-val, r=ChrisDenton
Dylan-DPC Apr 10, 2023
e327487
Rollup merge of #110115 - jyn514:remap-path-prefix-ci, r=compiler-errors
Dylan-DPC Apr 10, 2023
6f2fd3e
Rollup merge of #110121 - jyn514:check-stage1, r=ozkanonur
Dylan-DPC Apr 10, 2023
97921ab
Rollup merge of #110124 - Nilstrieb:📎-told-me-so, r=compiler-errors
Dylan-DPC Apr 10, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/rustc_abi/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1176,7 +1176,7 @@ impl FieldsShape {

/// Gets source indices of the fields by increasing offsets.
#[inline]
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
pub fn index_by_increasing_offset(&self) -> impl Iterator<Item = usize> + '_ {
let mut inverse_small = [0u8; 64];
let mut inverse_big = IndexVec::new();
let use_small = self.count() <= inverse_small.len();
Expand Down
9 changes: 9 additions & 0 deletions compiler/rustc_arena/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
#![feature(strict_provenance)]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
#![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine.

use smallvec::SmallVec;

Expand Down Expand Up @@ -568,7 +569,9 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
}

pub trait ArenaAllocatable<'tcx, C = rustc_arena::IsNotCopy>: Sized {
#[allow(clippy::mut_from_ref)]
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self;
#[allow(clippy::mut_from_ref)]
fn allocate_from_iter<'a>(
arena: &'a Arena<'tcx>,
iter: impl ::std::iter::IntoIterator<Item = Self>,
Expand All @@ -578,10 +581,12 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
// Any type that impls `Copy` can be arena-allocated in the `DroplessArena`.
impl<'tcx, T: Copy> ArenaAllocatable<'tcx, rustc_arena::IsCopy> for T {
#[inline]
#[allow(clippy::mut_from_ref)]
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self {
arena.dropless.alloc(self)
}
#[inline]
#[allow(clippy::mut_from_ref)]
fn allocate_from_iter<'a>(
arena: &'a Arena<'tcx>,
iter: impl ::std::iter::IntoIterator<Item = Self>,
Expand All @@ -601,6 +606,7 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
}

#[inline]
#[allow(clippy::mut_from_ref)]
fn allocate_from_iter<'a>(
arena: &'a Arena<'tcx>,
iter: impl ::std::iter::IntoIterator<Item = Self>,
Expand All @@ -616,19 +622,22 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {

impl<'tcx> Arena<'tcx> {
#[inline]
#[allow(clippy::mut_from_ref)]
pub fn alloc<T: ArenaAllocatable<'tcx, C>, C>(&self, value: T) -> &mut T {
value.allocate_on(self)
}

// Any type that impls `Copy` can have slices be arena-allocated in the `DroplessArena`.
#[inline]
#[allow(clippy::mut_from_ref)]
pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
if value.is_empty() {
return &mut [];
}
self.dropless.alloc_slice(value)
}

#[allow(clippy::mut_from_ref)]
pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, C>, C>(
&'a self,
iter: impl ::std::iter::IntoIterator<Item = T>,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast_passes/src/ast_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -691,7 +691,7 @@ fn validate_generic_param_order(
GenericParamKind::Lifetime => (),
GenericParamKind::Const { ty: _, kw_span: _, default: Some(default) } => {
ordered_params += " = ";
ordered_params += &pprust::expr_to_string(&*default.value);
ordered_params += &pprust::expr_to_string(&default.value);
}
GenericParamKind::Const { ty: _, kw_span: _, default: None } => (),
}
Expand Down
7 changes: 5 additions & 2 deletions compiler/rustc_ast_passes/src/feature_gate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,11 +404,14 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
);
} else {
// And if it isn't, cancel the early-pass warning.
self.sess
if let Some(err) = self
.sess
.parse_sess
.span_diagnostic
.steal_diagnostic(e.span, StashKey::EarlySyntaxWarning)
.map(|err| err.cancel());
{
err.cancel()
}
}
}
ast::ExprKind::TryBlock(_) => {
Expand Down
6 changes: 4 additions & 2 deletions compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn bclose_maybe_open(&mut self, span: rustc_span::Span, empty: bool, close_box: bool) {
let has_comment = self.maybe_print_comment(span.hi());
if !empty || has_comment {
self.break_offset_if_not_bol(1, -(INDENT_UNIT as isize));
self.break_offset_if_not_bol(1, -INDENT_UNIT);
}
self.word("}");
if close_box {
Expand Down Expand Up @@ -988,7 +988,9 @@ impl<'a> State<'a> {

pub fn print_assoc_constraint(&mut self, constraint: &ast::AssocConstraint) {
self.print_ident(constraint.ident);
constraint.gen_args.as_ref().map(|args| self.print_generic_args(args, false));
if let Some(args) = constraint.gen_args.as_ref() {
self.print_generic_args(args, false)
}
self.space();
match &constraint.kind {
ast::AssocConstraintKind::Equality { term } => {
Expand Down
25 changes: 25 additions & 0 deletions compiler/rustc_codegen_ssa/src/mir/operand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,31 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
}

impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
/// Returns an `OperandValue` that's generally UB to use in any way.
///
/// Depending on the `layout`, returns an `Immediate` or `Pair` containing
/// poison value(s), or a `Ref` containing a poison pointer.
///
/// Supports sized types only.
pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx,
layout: TyAndLayout<'tcx>,
) -> OperandValue<V> {
assert!(layout.is_sized());
if bx.cx().is_backend_immediate(layout) {
let ibty = bx.cx().immediate_backend_type(layout);
OperandValue::Immediate(bx.const_poison(ibty))
} else if bx.cx().is_backend_scalar_pair(layout) {
let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
} else {
let bty = bx.cx().backend_type(layout);
let ptr_bty = bx.cx().type_ptr_to(bty);
OperandValue::Ref(bx.const_poison(ptr_bty), None, layout.align.abi)
}
}

pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
self,
bx: &mut Bx,
Expand Down
107 changes: 71 additions & 36 deletions compiler/rustc_codegen_ssa/src/mir/rvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,17 +158,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
debug_assert!(src.layout.is_sized());
debug_assert!(dst.layout.is_sized());

if src.layout.size != dst.layout.size
|| src.layout.abi.is_uninhabited()
|| dst.layout.abi.is_uninhabited()
{
// In all of these cases it's UB to run this transmute, but that's
// known statically so might as well trap for it, rather than just
// making it unreachable.
bx.abort();
return;
}

if let Some(val) = self.codegen_transmute_operand(bx, src, dst.layout) {
val.store(bx, dst);
return;
Expand Down Expand Up @@ -202,8 +191,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
operand: OperandRef<'tcx, Bx::Value>,
cast: TyAndLayout<'tcx>,
) -> Option<OperandValue<Bx::Value>> {
// Callers already checked that the layout sizes match
debug_assert_eq!(operand.layout.size, cast.size);
// Check for transmutes that are always UB.
if operand.layout.size != cast.size
|| operand.layout.abi.is_uninhabited()
|| cast.abi.is_uninhabited()
{
if !operand.layout.abi.is_uninhabited() {
// Since this is known statically and the input could have existed
// without already having hit UB, might as well trap for it.
bx.abort();
}

// Because this transmute is UB, return something easy to generate,
// since it's fine that later uses of the value are probably UB.
return Some(OperandValue::poison(bx, cast));
}

let operand_kind = self.value_kind(operand.layout);
let cast_kind = self.value_kind(cast);
Expand All @@ -222,10 +224,20 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bug!("Found {operand_kind:?} for operand {operand:?}");
};
if let OperandValueKind::Immediate(out_scalar) = cast_kind {
let cast_bty = bx.backend_type(cast);
Some(OperandValue::Immediate(Self::transmute_immediate(
bx, imm, in_scalar, out_scalar, cast_bty,
)))
match (in_scalar, out_scalar) {
(ScalarOrZst::Zst, ScalarOrZst::Zst) => {
Some(OperandRef::new_zst(bx, cast).val)
}
(ScalarOrZst::Scalar(in_scalar), ScalarOrZst::Scalar(out_scalar))
if in_scalar.size(self.cx) == out_scalar.size(self.cx) =>
{
let cast_bty = bx.backend_type(cast);
Some(OperandValue::Immediate(
self.transmute_immediate(bx, imm, in_scalar, out_scalar, cast_bty),
))
}
_ => None,
}
} else {
None
}
Expand All @@ -234,12 +246,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let OperandValueKind::Pair(in_a, in_b) = operand_kind else {
bug!("Found {operand_kind:?} for operand {operand:?}");
};
if let OperandValueKind::Pair(out_a, out_b) = cast_kind {
if let OperandValueKind::Pair(out_a, out_b) = cast_kind
&& in_a.size(self.cx) == out_a.size(self.cx)
&& in_b.size(self.cx) == out_b.size(self.cx)
{
let out_a_ibty = bx.scalar_pair_element_backend_type(cast, 0, false);
let out_b_ibty = bx.scalar_pair_element_backend_type(cast, 1, false);
Some(OperandValue::Pair(
Self::transmute_immediate(bx, imm_a, in_a, out_a, out_a_ibty),
Self::transmute_immediate(bx, imm_b, in_b, out_b, out_b_ibty),
self.transmute_immediate(bx, imm_a, in_a, out_a, out_a_ibty),
self.transmute_immediate(bx, imm_b, in_b, out_b, out_b_ibty),
))
} else {
None
Expand All @@ -254,12 +269,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
/// `to_backend_ty` must be the *non*-immediate backend type (so it will be
/// `i8`, not `i1`, for `bool`-like types.)
fn transmute_immediate(
&self,
bx: &mut Bx,
mut imm: Bx::Value,
from_scalar: abi::Scalar,
to_scalar: abi::Scalar,
to_backend_ty: Bx::Type,
) -> Bx::Value {
debug_assert_eq!(from_scalar.size(self.cx), to_scalar.size(self.cx));

use abi::Primitive::*;
imm = bx.from_immediate(imm);
imm = match (from_scalar.primitive(), to_scalar.primitive()) {
Expand Down Expand Up @@ -831,14 +849,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let operand_ty = operand.ty(self.mir, self.cx.tcx());
let cast_layout = self.cx.layout_of(self.monomorphize(cast_ty));
let operand_layout = self.cx.layout_of(self.monomorphize(operand_ty));
if operand_layout.size != cast_layout.size
|| operand_layout.abi.is_uninhabited()
|| cast_layout.abi.is_uninhabited()
{
// Send UB cases to the full form so the operand version can
// `bitcast` without worrying about malformed IR.
return false;
}

match (self.value_kind(operand_layout), self.value_kind(cast_layout)) {
// Can always load from a pointer as needed
Expand All @@ -847,9 +857,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// Need to generate an `alloc` to get a pointer from an immediate
(OperandValueKind::Immediate(..) | OperandValueKind::Pair(..), OperandValueKind::Ref) => false,

// When we have scalar immediates, we can convert them as needed
(OperandValueKind::Immediate(..), OperandValueKind::Immediate(..)) |
(OperandValueKind::Pair(..), OperandValueKind::Pair(..)) => true,
// When we have scalar immediates, we can only convert things
// where the sizes match, to avoid endianness questions.
(OperandValueKind::Immediate(a), OperandValueKind::Immediate(b)) =>
a.size(self.cx) == b.size(self.cx),
(OperandValueKind::Pair(a0, a1), OperandValueKind::Pair(b0, b1)) =>
a0.size(self.cx) == b0.size(self.cx) && a1.size(self.cx) == b1.size(self.cx),

// Send mixings between scalars and pairs through the memory route
// FIXME: Maybe this could use insertvalue/extractvalue instead?
Expand Down Expand Up @@ -887,13 +900,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if self.cx.is_backend_immediate(layout) {
debug_assert!(!self.cx.is_backend_scalar_pair(layout));
OperandValueKind::Immediate(match layout.abi {
abi::Abi::Scalar(s) => s,
abi::Abi::Vector { element, .. } => element,
x => bug!("Couldn't translate {x:?} as backend immediate"),
abi::Abi::Scalar(s) => ScalarOrZst::Scalar(s),
abi::Abi::Vector { element, .. } => ScalarOrZst::Scalar(element),
_ if layout.is_zst() => ScalarOrZst::Zst,
x => span_bug!(self.mir.span, "Couldn't translate {x:?} as backend immediate"),
})
} else if self.cx.is_backend_scalar_pair(layout) {
let abi::Abi::ScalarPair(s1, s2) = layout.abi else {
bug!("Couldn't translate {:?} as backend scalar pair", layout.abi)
span_bug!(
self.mir.span,
"Couldn't translate {:?} as backend scalar pair",
layout.abi,
);
};
OperandValueKind::Pair(s1, s2)
} else {
Expand All @@ -902,9 +920,26 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}

/// The variants of this match [`OperandValue`], giving details about the
/// backend values that will be held in that other type.
#[derive(Debug, Copy, Clone)]
enum OperandValueKind {
Ref,
Immediate(abi::Scalar),
Immediate(ScalarOrZst),
Pair(abi::Scalar, abi::Scalar),
}

#[derive(Debug, Copy, Clone)]
enum ScalarOrZst {
Zst,
Scalar(abi::Scalar),
}

impl ScalarOrZst {
pub fn size(self, cx: &impl abi::HasDataLayout) -> abi::Size {
match self {
ScalarOrZst::Zst => abi::Size::ZERO,
ScalarOrZst::Scalar(s) => s.size(cx),
}
}
}
10 changes: 2 additions & 8 deletions compiler/rustc_data_structures/src/graph/implementation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,17 +206,11 @@ impl<N: Debug, E: Debug> Graph<N, E> {
AdjacentEdges { graph: self, direction, next: first_edge }
}

pub fn successor_nodes<'a>(
&'a self,
source: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
self.outgoing_edges(source).targets()
}

pub fn predecessor_nodes<'a>(
&'a self,
target: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
self.incoming_edges(target).sources()
}

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_data_structures/src/memmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ impl Deref for Mmap {

impl AsRef<[u8]> for Mmap {
fn as_ref(&self) -> &[u8] {
&*self.0
&self.0
}
}

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_data_structures/src/profiling.rs
Original file line number Diff line number Diff line change
Expand Up @@ -778,7 +778,7 @@ pub fn print_time_passes_entry(
"rss_start": start_rss,
"rss_end": end_rss,
});
eprintln!("time: {}", json.to_string());
eprintln!("time: {json}");
return;
}
TimePassesFormat::Text => (),
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_data_structures/src/sharded.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ pub fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
/// `hash` can be computed with any hasher, so long as that hasher is used
/// consistently for each `Sharded` instance.
#[inline]
#[allow(clippy::modulo_one)]
pub fn get_shard_index_by_hash(hash: u64) -> usize {
let hash_len = mem::size_of::<usize>();
// Ignore the top 7 bits as hashbrown uses these and get the next SHARD_BITS highest bits.
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_data_structures/src/stable_hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -312,14 +312,14 @@ impl<CTX> HashStable<CTX> for ::std::num::NonZeroUsize {

impl<CTX> HashStable<CTX> for f32 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u32 = unsafe { ::std::mem::transmute(*self) };
let val: u32 = self.to_bits();
val.hash_stable(ctx, hasher);
}
}

impl<CTX> HashStable<CTX> for f64 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u64 = unsafe { ::std::mem::transmute(*self) };
let val: u64 = self.to_bits();
val.hash_stable(ctx, hasher);
}
}
Expand Down
Loading