Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 4 pull requests #65406

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/librustc/hir/map/def_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl<'a> DefCollector<'a> {
}
}

pub fn visit_macro_invoc(&mut self, id: NodeId) {
fn visit_macro_invoc(&mut self, id: NodeId) {
self.definitions.set_invocation_parent(id.placeholder_to_expn_id(), self.parent_def);
}
}
Expand Down
26 changes: 16 additions & 10 deletions src/librustc/infer/lexical_region_resolve/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
}

fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
self.iterate_until_fixed_point("Expansion", |constraint| {
self.iterate_until_fixed_point(|constraint| {
debug!("expansion: constraint={:?}", constraint);
let (a_region, b_vid, b_data, retain) = match *constraint {
Constraint::RegSubVar(a_region, b_vid) => {
Expand Down Expand Up @@ -360,13 +360,21 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
match *b_data {
VarValue::Value(cur_region) => {
// Identical scopes can show up quite often, if the fixed point
// iteration converges slowly, skip them
// iteration converges slowly. Skip them. This is purely an
// optimization.
if let (ReScope(a_scope), ReScope(cur_scope)) = (a_region, cur_region) {
if a_scope == cur_scope {
return false;
}
}

// This is a specialized version of the `lub_concrete_regions`
// check below for a common case, here purely as an
// optimization.
if let ReEmpty = a_region {
return false;
}

let mut lub = self.lub_concrete_regions(a_region, cur_region);
if lub == cur_region {
return false;
Expand Down Expand Up @@ -407,8 +415,6 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {

/// Returns the smallest region `c` such that `a <= c` and `b <= c`.
fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> {
let tcx = self.tcx();

match (a, b) {
(&ty::ReClosureBound(..), _)
| (_, &ty::ReClosureBound(..))
Expand Down Expand Up @@ -468,15 +474,15 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {

// otherwise, we don't know what the free region is,
// so we must conservatively say the LUB is static:
tcx.lifetimes.re_static
self.tcx().lifetimes.re_static
}

(&ReScope(a_id), &ReScope(b_id)) => {
// The region corresponding to an outer block is a
// subtype of the region corresponding to an inner
// block.
let lub = self.region_rels.region_scope_tree.nearest_common_ancestor(a_id, b_id);
tcx.mk_region(ReScope(lub))
self.tcx().mk_region(ReScope(lub))
}

(&ReEarlyBound(_), &ReEarlyBound(_))
Expand All @@ -490,7 +496,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
if a == b {
a
} else {
tcx.lifetimes.re_static
self.tcx().lifetimes.re_static
}
}
}
Expand Down Expand Up @@ -860,7 +866,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
}
}

fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F)
fn iterate_until_fixed_point<F>(&self, mut body: F)
where
F: FnMut(&Constraint<'tcx>) -> (bool, bool),
{
Expand All @@ -870,7 +876,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
while changed {
changed = false;
iteration += 1;
debug!("---- {} Iteration {}{}", "#", tag, iteration);
debug!("---- Expansion iteration {}", iteration);
constraints.retain(|constraint| {
let (edge_changed, retain) = body(constraint);
if edge_changed {
Expand All @@ -880,7 +886,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
retain
});
}
debug!("---- {} Complete after {} iteration(s)", tag, iteration);
debug!("---- Expansion complete after {} iteration(s)", iteration);
}

fn bound_is_met(
Expand Down
6 changes: 0 additions & 6 deletions src/librustc/mir/interpret/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -389,10 +389,6 @@ pub enum UnsupportedOpInfo<'tcx> {
/// Free-form case. Only for errors that are never caught!
Unsupported(String),

/// FIXME(#64506) Error used to work around accessing projections of
/// uninhabited types.
UninhabitedValue,

// -- Everything below is not categorized yet --
FunctionAbiMismatch(Abi, Abi),
FunctionArgMismatch(Ty<'tcx>, Ty<'tcx>),
Expand Down Expand Up @@ -556,8 +552,6 @@ impl fmt::Debug for UnsupportedOpInfo<'tcx> {
not a power of two"),
Unsupported(ref msg) =>
write!(f, "{}", msg),
UninhabitedValue =>
write!(f, "tried to use an uninhabited value"),
}
}
}
Expand Down
10 changes: 7 additions & 3 deletions src/librustc/ty/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -824,10 +824,14 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
});
(present_variants.next(), present_variants.next())
};
if present_first.is_none() {
let present_first = match present_first {
present_first @ Some(_) => present_first,
// Uninhabited because it has no variants, or only absent ones.
return tcx.layout_raw(param_env.and(tcx.types.never));
}
None if def.is_enum() => return tcx.layout_raw(param_env.and(tcx.types.never)),
// if it's a struct, still compute a layout so that we can still compute the
// field offsets
None => Some(VariantIdx::new(0)),
};

let is_struct = !def.is_enum() ||
// Only one variant is present.
Expand Down
13 changes: 5 additions & 8 deletions src/librustc_mir/interpret/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use rustc::mir;
use rustc::mir::interpret::truncate;
use rustc::ty::{self, Ty};
use rustc::ty::layout::{
self, Size, Abi, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt
self, Size, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt
};
use rustc::ty::TypeFoldable;

Expand Down Expand Up @@ -377,20 +377,17 @@ where
layout::FieldPlacement::Array { stride, .. } => {
let len = base.len(self)?;
if field >= len {
// This can be violated because this runs during promotion on code where the
// type system has not yet ensured that such things don't happen.
// This can be violated because the index (field) can be a runtime value
// provided by the user.
debug!("tried to access element {} of array/slice with length {}", field, len);
throw_panic!(BoundsCheck { len, index: field });
}
stride * field
}
layout::FieldPlacement::Union(count) => {
// FIXME(#64506) `UninhabitedValue` can be removed when this issue is resolved
if base.layout.abi == Abi::Uninhabited {
throw_unsup!(UninhabitedValue);
}
assert!(field < count as u64,
"Tried to access field {} of union with {} fields", field, count);
"Tried to access field {} of union {:#?} with {} fields",
field, base.layout, count);
// Offset is always 0
Size::from_bytes(0)
}
Expand Down
25 changes: 12 additions & 13 deletions src/librustc_resolve/build_reduced_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,25 +163,15 @@ impl<'a> Resolver<'a> {
Some(ext)
}

// FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders.
crate fn build_reduced_graph(
&mut self,
fragment: &AstFragment,
extra_placeholders: &[NodeId],
parent_scope: ParentScope<'a>,
) -> LegacyScope<'a> {
let mut def_collector = DefCollector::new(&mut self.definitions, parent_scope.expansion);
fragment.visit_with(&mut def_collector);
for placeholder in extra_placeholders {
def_collector.visit_macro_invoc(*placeholder);
}

let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope };
fragment.visit_with(&mut visitor);
for placeholder in extra_placeholders {
visitor.parent_scope.legacy = visitor.visit_invoc(*placeholder);
}

visitor.parent_scope.legacy
}

Expand Down Expand Up @@ -1064,8 +1054,17 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
None
}

// Mark the given macro as unused unless its name starts with `_`.
// Macro uses will remove items from this set, and the remaining
// items will be reported as `unused_macros`.
fn insert_unused_macro(&mut self, ident: Ident, node_id: NodeId, span: Span) {
if !ident.as_str().starts_with("_") {
self.r.unused_macros.insert(node_id, span);
}
}

fn define_macro(&mut self, item: &ast::Item) -> LegacyScope<'a> {
let parent_scope = &self.parent_scope;
let parent_scope = self.parent_scope;
let expansion = parent_scope.expansion;
let (ext, ident, span, is_legacy) = match &item.kind {
ItemKind::MacroDef(def) => {
Expand Down Expand Up @@ -1105,7 +1104,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
(res, vis, span, expansion, IsMacroExport));
} else {
self.r.check_reserved_macro_name(ident, res);
self.r.unused_macros.insert(item.id, span);
self.insert_unused_macro(ident, item.id, span);
}
LegacyScope::Binding(self.r.arenas.alloc_legacy_binding(LegacyBinding {
parent_legacy_scope: parent_scope.legacy, binding, ident
Expand All @@ -1114,7 +1113,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
let module = parent_scope.module;
let vis = self.resolve_visibility(&item.vis);
if vis != ty::Visibility::Public {
self.r.unused_macros.insert(item.id, span);
self.insert_unused_macro(ident, item.id, span);
}
self.r.define(module, ident, MacroNS, (res, vis, span, expansion));
self.parent_scope.legacy
Expand Down
8 changes: 2 additions & 6 deletions src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,15 +108,11 @@ impl<'a> base::Resolver for Resolver<'a> {
});
}

// FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders.
fn visit_ast_fragment_with_placeholders(
&mut self, expansion: ExpnId, fragment: &AstFragment, extra_placeholders: &[NodeId]
) {
fn visit_ast_fragment_with_placeholders(&mut self, expansion: ExpnId, fragment: &AstFragment) {
// Integrate the new AST fragment into all the definition and module structures.
// We are inside the `expansion` now, but other parent scope components are still the same.
let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] };
let output_legacy_scope =
self.build_reduced_graph(fragment, extra_placeholders, parent_scope);
let output_legacy_scope = self.build_reduced_graph(fragment, parent_scope);
self.output_legacy_scopes.insert(expansion, output_legacy_scope);

parent_scope.module.unexpanded_invocations.borrow_mut().remove(&expansion);
Expand Down
6 changes: 5 additions & 1 deletion src/librustc_target/abi/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -738,7 +738,11 @@ impl FieldPlacement {

pub fn offset(&self, i: usize) -> Size {
match *self {
FieldPlacement::Union(_) => Size::ZERO,
FieldPlacement::Union(count) => {
assert!(i < count,
"Tried to access field {} of union with {} fields", i, count);
Size::ZERO
},
FieldPlacement::Array { stride, count } => {
let i = i as u64;
assert!(i < count);
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ impl MetaItem {
impl MetaItemKind {
pub fn tokens(&self, span: Span) -> TokenStream {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::Word => TokenStream::default(),
MetaItemKind::NameValue(ref lit) => {
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
Expand Down
3 changes: 1 addition & 2 deletions src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -849,8 +849,7 @@ pub trait Resolver {
fn next_node_id(&mut self) -> NodeId;

fn resolve_dollar_crates(&mut self);
fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment,
extra_placeholders: &[NodeId]);
fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment);
fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension);

fn expansion_for_ast_pass(
Expand Down
37 changes: 24 additions & 13 deletions src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ use errors::{Applicability, FatalError};
use smallvec::{smallvec, SmallVec};
use syntax_pos::{Span, DUMMY_SP, FileName};

use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use std::io::ErrorKind;
use std::{iter, mem, slice};
Expand Down Expand Up @@ -73,6 +72,22 @@ macro_rules! ast_fragments {
}

impl AstFragment {
pub fn add_placeholders(&mut self, placeholders: &[NodeId]) {
if placeholders.is_empty() {
return;
}
match self {
$($(AstFragment::$Kind(ast) => ast.extend(placeholders.iter().flat_map(|id| {
// We are repeating through arguments with `many`, to do that we have to
// mention some macro variable from those arguments even if it's not used.
#[cfg_attr(bootstrap, allow(unused_macros))]
macro _repeating($flat_map_ast_elt) {}
placeholder(AstFragmentKind::$Kind, *id).$make_ast()
})),)?)*
_ => panic!("unexpected AST fragment kind")
}
}

pub fn make_opt_expr(self) -> Option<P<ast::Expr>> {
match self {
AstFragment::OptExpr(expr) => expr,
Expand Down Expand Up @@ -340,7 +355,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
// Unresolved macros produce dummy outputs as a recovery measure.
invocations.reverse();
let mut expanded_fragments = Vec::new();
let mut all_derive_placeholders: FxHashMap<ExpnId, Vec<_>> = FxHashMap::default();
let mut undetermined_invocations = Vec::new();
let (mut progress, mut force) = (false, !self.monotonic);
loop {
Expand Down Expand Up @@ -418,9 +432,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
self.cx.resolver.add_derives(invoc.expansion_data.id, SpecialDerives::COPY);
}

let derive_placeholders =
all_derive_placeholders.entry(invoc.expansion_data.id).or_default();
derive_placeholders.reserve(derives.len());
let mut derive_placeholders = Vec::with_capacity(derives.len());
invocations.reserve(derives.len());
for path in derives {
let expn_id = ExpnId::fresh(None);
Expand All @@ -436,7 +448,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
let fragment = invoc.fragment_kind
.expect_from_annotatables(::std::iter::once(item));
self.collect_invocations(fragment, derive_placeholders)
self.collect_invocations(fragment, &derive_placeholders)
}
};

Expand All @@ -455,10 +467,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic);
while let Some(expanded_fragments) = expanded_fragments.pop() {
for (expn_id, expanded_fragment) in expanded_fragments.into_iter().rev() {
let derive_placeholders =
all_derive_placeholders.remove(&expn_id).unwrap_or_else(Vec::new);
placeholder_expander.add(NodeId::placeholder_from_expn_id(expn_id),
expanded_fragment, derive_placeholders);
expanded_fragment);
}
}
fragment_with_placeholders.mut_visit_with(&mut placeholder_expander);
Expand Down Expand Up @@ -491,13 +501,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
monotonic: self.monotonic,
};
fragment.mut_visit_with(&mut collector);
fragment.add_placeholders(extra_placeholders);
collector.invocations
};

// FIXME: Merge `extra_placeholders` into the `fragment` as regular placeholders.
if self.monotonic {
self.cx.resolver.visit_ast_fragment_with_placeholders(
self.cx.current_expansion.id, &fragment, extra_placeholders);
self.cx.current_expansion.id, &fragment
);
}

(fragment, invocations)
Expand Down Expand Up @@ -676,12 +687,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
}
Some(TokenTree::Token(..)) => {}
None => return TokenStream::empty(),
None => return TokenStream::default(),
}
self.cx.span_err(span, "custom attribute invocations must be \
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
followed by a delimiter token");
TokenStream::empty()
TokenStream::default()
}

fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
Expand Down
Loading