From 43069635e422b3201a93683f536a45a603280c57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sat, 3 Mar 2018 06:19:15 +0100 Subject: [PATCH 01/42] crate store --- src/librustc/ty/context.rs | 4 ++-- src/librustc_driver/driver.rs | 4 ++-- src/librustc_driver/pretty.rs | 7 ++++--- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 28ad5edbd2db7..c3665f6836017 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -855,7 +855,7 @@ pub struct GlobalCtxt<'tcx> { global_arenas: &'tcx GlobalArenas<'tcx>, global_interners: CtxtInterners<'tcx>, - cstore: &'tcx dyn CrateStore, + cstore: &'tcx (dyn CrateStore + Sync), pub sess: &'tcx Session, @@ -1191,7 +1191,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. pub fn create_and_enter(s: &'tcx Session, - cstore: &'tcx dyn CrateStore, + cstore: &'tcx (dyn CrateStore + Sync), local_providers: ty::maps::Providers<'tcx>, extern_providers: ty::maps::Providers<'tcx>, arenas: &'tcx AllArenas<'tcx>, diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index f8c0289cc98c8..c5f5825afff73 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -49,7 +49,7 @@ use std::fs; use std::io::{self, Write}; use std::iter; use std::path::{Path, PathBuf}; -use rustc_data_structures::sync::Lrc; +use rustc_data_structures::sync::{Sync, Lrc}; use std::sync::mpsc; use syntax::{self, ast, attr, diagnostics, visit}; use syntax::ext::base::ExtCtxt; @@ -1046,7 +1046,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>( trans: &TransCrate, control: &CompileController, sess: &'tcx Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), hir_map: hir_map::Map<'tcx>, mut analysis: ty::CrateAnalysis, resolutions: Resolutions, diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 70b73ebb8cdeb..234ac31f5a430 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -17,6 +17,7 @@ use self::NodesMatchingUII::*; use {abort_on_err, driver}; +use rustc_data_structures::sync::Sync; use rustc::ty::{self, TyCtxt, Resolutions, AllArenas}; use rustc::cfg; use rustc::cfg::graphviz::LabelledCFG; @@ -199,7 +200,7 @@ impl PpSourceMode { } fn call_with_pp_support_hir<'tcx, A, F>(&self, sess: &'tcx Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), hir_map: &hir_map::Map<'tcx>, analysis: &ty::CrateAnalysis, resolutions: &Resolutions, @@ -912,7 +913,7 @@ pub fn print_after_parsing(sess: &Session, } pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), hir_map: &hir_map::Map<'tcx>, analysis: &ty::CrateAnalysis, resolutions: &Resolutions, @@ -1068,7 +1069,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, // with a different callback than the standard driver, so that isn't easy. // Instead, we call that function ourselves. fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, - cstore: &'a CrateStore, + cstore: &'a (CrateStore + Sync), hir_map: &hir_map::Map<'tcx>, analysis: &ty::CrateAnalysis, resolutions: &Resolutions, From 41efd6102e572cdac5decd172b8b73cb62221bfd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sat, 3 Mar 2018 06:21:40 +0100 Subject: [PATCH 02/42] rustc_driver rec limit --- src/librustc_driver/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index fb333ec38fb60..b424dd3249118 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -27,6 +27,8 @@ #![feature(rustc_stack_internals)] #![feature(no_debug)] +#![recursion_limit="256"] + extern crate arena; extern crate getopts; extern crate graphviz; From e0b869cf6634519292159cb4f5ff84bca0030f3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sat, 3 Mar 2018 06:22:02 +0100 Subject: [PATCH 03/42] rustc_metadata rec limit --- src/librustc_metadata/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index cbbc9d74228de..c8d2455702c34 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -24,6 +24,8 @@ #![feature(specialization)] #![feature(rustc_private)] +#![recursion_limit="256"] + extern crate libc; #[macro_use] extern crate log; From cabf00b07b85b1b3ffd46ea477862e33110d8026 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sat, 3 Mar 2018 06:22:19 +0100 Subject: [PATCH 04/42] rustdoc rec limit --- src/librustdoc/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 60b713f2995e1..8b8c4ad6a5d43 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -25,6 +25,8 @@ #![feature(entry_and_modify)] #![feature(dyn_trait)] +#![recursion_limit="256"] + extern crate arena; extern crate getopts; extern crate env_logger; From 2757c662f8742b10a7e121d4996122797d9f14b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sat, 3 Mar 2018 06:23:28 +0100 Subject: [PATCH 05/42] rustc_trans_utils rec limit --- src/librustc_trans_utils/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/librustc_trans_utils/lib.rs b/src/librustc_trans_utils/lib.rs index 0c6bc9e246bce..3cb04ab13f181 100644 --- a/src/librustc_trans_utils/lib.rs +++ b/src/librustc_trans_utils/lib.rs @@ -23,6 +23,8 @@ #![feature(quote)] #![feature(rustc_diagnostic_macros)] +#![recursion_limit="256"] + extern crate ar; extern crate flate2; #[macro_use] From 8193d1521249c2555413ee3c779a89bde77ff866 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 6 Apr 2018 14:53:20 +0200 Subject: [PATCH 06/42] locks --- src/librustc/traits/select.rs | 10 +++++----- src/librustc/ty/maps/on_disk_cache.rs | 9 ++++----- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 51493f2619497..45262fd73f3bb 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -44,9 +44,9 @@ use ty::relate::TypeRelation; use middle::lang_items; use mir::interpret::{GlobalId}; +use rustc_data_structures::sync::Lock; use rustc_data_structures::bitvec::BitVector; use std::iter; -use std::cell::RefCell; use std::cmp; use std::fmt; use std::mem; @@ -143,7 +143,7 @@ struct TraitObligationStack<'prev, 'tcx: 'prev> { #[derive(Clone)] pub struct SelectionCache<'tcx> { - hashmap: RefCell, + hashmap: Lock, WithDepNode>>>>, } @@ -410,7 +410,7 @@ impl EvaluationResult { #[derive(Clone)] pub struct EvaluationCache<'tcx> { - hashmap: RefCell, WithDepNode>> + hashmap: Lock, WithDepNode>> } impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { @@ -3327,7 +3327,7 @@ impl<'tcx> TraitObligation<'tcx> { impl<'tcx> SelectionCache<'tcx> { pub fn new() -> SelectionCache<'tcx> { SelectionCache { - hashmap: RefCell::new(FxHashMap()) + hashmap: Lock::new(FxHashMap()) } } @@ -3339,7 +3339,7 @@ impl<'tcx> SelectionCache<'tcx> { impl<'tcx> EvaluationCache<'tcx> { pub fn new() -> EvaluationCache<'tcx> { EvaluationCache { - hashmap: RefCell::new(FxHashMap()) + hashmap: Lock::new(FxHashMap()) } } diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index f88e33c708e36..e7aa798955e06 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -23,7 +23,6 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, SpecializedDecoder, SpecializedEncoder, UseSpecializedDecodable, UseSpecializedEncodable}; use session::{CrateDisambiguator, Session}; -use std::cell::RefCell; use std::mem; use syntax::ast::NodeId; use syntax::codemap::{CodeMap, StableFilemapId}; @@ -82,7 +81,7 @@ pub struct OnDiskCache<'sess> { prev_interpret_alloc_index: Vec, /// Deserialization: A cache to ensure we don't read allocations twice - interpret_alloc_cache: RefCell>, + interpret_alloc_cache: Lock>, } // This type is used only for (de-)serialization. @@ -151,7 +150,7 @@ impl<'sess> OnDiskCache<'sess> { prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(), synthetic_expansion_infos: Lock::new(FxHashMap()), prev_interpret_alloc_index: footer.interpret_alloc_index, - interpret_alloc_cache: RefCell::new(FxHashMap::default()), + interpret_alloc_cache: Lock::new(FxHashMap::default()), } } @@ -168,7 +167,7 @@ impl<'sess> OnDiskCache<'sess> { prev_diagnostics_index: FxHashMap(), synthetic_expansion_infos: Lock::new(FxHashMap()), prev_interpret_alloc_index: Vec::new(), - interpret_alloc_cache: RefCell::new(FxHashMap::default()), + interpret_alloc_cache: Lock::new(FxHashMap::default()), } } @@ -490,7 +489,7 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> { synthetic_expansion_infos: &'x Lock>, file_index_to_file: &'x Lock>>, file_index_to_stable_id: &'x FxHashMap, - interpret_alloc_cache: &'x RefCell>, + interpret_alloc_cache: &'x Lock>, /// maps from index in the cache file to location in the cache file prev_interpret_alloc_index: &'x [AbsoluteBytePos], } From 55abb6db4b14df6ecc0f141e7f902ffd403564e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sun, 3 Dec 2017 14:38:57 +0100 Subject: [PATCH 07/42] Make the symbol interner leak strings so InternedString are always safe to use --- src/libsyntax_pos/lib.rs | 1 + src/libsyntax_pos/symbol.rs | 25 +++++++++---------------- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 9a7d1fd8ee6cb..e02e52430fe0d 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -23,6 +23,7 @@ #![feature(optin_builtin_traits)] #![allow(unused_attributes)] #![feature(specialization)] +#![feature(box_leak)] use std::borrow::Cow; use std::cell::Cell; diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 50fac600a978d..a2819c81f6237 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -135,9 +135,9 @@ impl Symbol { } pub fn as_str(self) -> InternedString { - with_interner(|interner| unsafe { + with_interner(|interner| { InternedString { - string: ::std::mem::transmute::<&str, &str>(interner.get(self)) + string: interner.get(self) } }) } @@ -185,7 +185,7 @@ impl> PartialEq for Symbol { #[derive(Default)] pub struct Interner { names: HashMap, Symbol>, - strings: Vec>, + strings: Vec<&'static str>, gensyms: Vec, } @@ -209,7 +209,7 @@ impl Interner { let name = Symbol(self.strings.len() as u32); let string = string.to_string().into_boxed_str(); - self.strings.push(string.clone()); + self.strings.push(Box::leak(string.clone())); self.names.insert(string, name); name } @@ -236,7 +236,7 @@ impl Interner { symbol.0 as usize >= self.strings.len() } - pub fn get(&self, symbol: Symbol) -> &str { + pub fn get(&self, symbol: Symbol) -> &'static str { match self.strings.get(symbol.0 as usize) { Some(ref string) => string, None => self.get(self.gensyms[(!0 - symbol.0) as usize]), @@ -361,15 +361,10 @@ fn with_interner T>(f: F) -> T { GLOBALS.with(|globals| f(&mut *globals.symbol_interner.lock())) } -/// Represents a string stored in the thread-local interner. Because the -/// interner lives for the life of the thread, this can be safely treated as an -/// immortal string, as long as it never crosses between threads. -/// -/// FIXME(pcwalton): You must be careful about what you do in the destructors -/// of objects stored in TLS, because they may run after the interner is -/// destroyed. In particular, they must not access string contents. This can -/// be fixed in the future by just leaking all strings until thread death -/// somehow. +/// Represents a string stored in the interner. +/// The interner leaks strings so this can refer to &'static str +/// which enables it to deref to str. Ideally this should do lookup +/// in the interner instead so we can free the memory. #[derive(Clone, Copy, Hash, PartialOrd, Eq, Ord)] pub struct InternedString { string: &'static str, @@ -411,8 +406,6 @@ impl<'a> ::std::cmp::PartialEq for &'a String { } } -impl !Send for InternedString { } - impl ::std::ops::Deref for InternedString { type Target = str; fn deref(&self) -> &str { self.string } From ad0b966654de082d30cd1b1939b5ec42749727a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Wed, 14 Mar 2018 23:26:06 +0100 Subject: [PATCH 08/42] Make interners thread-safe --- src/librustc/ty/context.rs | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index c3665f6836017..7e7da77744114 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -2113,28 +2113,31 @@ macro_rules! intern_method { $needs_infer:expr) -> $ty:ty) => { impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> { pub fn $method(self, v: $alloc) -> &$lt_tcx $ty { - { - let key = ($alloc_to_key)(&v); - if let Some(i) = self.interners.$name.borrow().get(key) { + let key = ($alloc_to_key)(&v); + let mut interner = self.interners.$name.borrow_mut(); + if let Some(i) = interner.get(key) { + return i.0; + } + let global_interner = if !self.is_global() { + let global_interner = self.global_interners.$name.borrow_mut(); + if let Some(i) = global_interner.get(key) { return i.0; } - if !self.is_global() { - if let Some(i) = self.global_interners.$name.borrow().get(key) { - return i.0; - } - } - } + Some(global_interner) + } else { + None + }; // HACK(eddyb) Depend on flags being accurate to // determine that all contents are in the global tcx. // See comments on Lift for why we can't use that. if !($needs_infer)(&v) { - if !self.is_global() { + if let Some(mut global_interners) = global_interner { let v = unsafe { mem::transmute(v) }; let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v)); - self.global_interners.$name.borrow_mut().insert(Interned(i)); + global_interners.insert(Interned(i)); return i; } } else { @@ -2148,7 +2151,7 @@ macro_rules! intern_method { } let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v)); - self.interners.$name.borrow_mut().insert(Interned(i)); + interner.insert(Interned(i)); i } } From 52e764d096cd6abcfdc218adbc65ef6683db00bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 09:59:35 +0100 Subject: [PATCH 09/42] parallel abs --- src/librustc/hir/itemlikevisit.rs | 30 ++++++++++++++++++++++++++++++ src/librustc/hir/mod.rs | 26 ++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/src/librustc/hir/itemlikevisit.rs b/src/librustc/hir/itemlikevisit.rs index 2221ecf07b434..a62000e10c79f 100644 --- a/src/librustc/hir/itemlikevisit.rs +++ b/src/librustc/hir/itemlikevisit.rs @@ -88,3 +88,33 @@ impl<'v, 'hir, V> ItemLikeVisitor<'hir> for DeepVisitor<'v, V> self.visitor.visit_impl_item(impl_item); } } + +/// A parallel variant of ItemLikeVisitor +pub trait ParItemLikeVisitor<'hir> { + fn visit_item(&self, item: &'hir Item); + fn visit_trait_item(&self, trait_item: &'hir TraitItem); + fn visit_impl_item(&self, impl_item: &'hir ImplItem); +} + +pub trait IntoVisitor<'hir> { + type Visitor: Visitor<'hir>; + fn into_visitor(&self) -> Self::Visitor; +} + +pub struct ParDeepVisitor(pub V); + +impl<'hir, V> ParItemLikeVisitor<'hir> for ParDeepVisitor + where V: IntoVisitor<'hir> +{ + fn visit_item(&self, item: &'hir Item) { + self.0.into_visitor().visit_item(item); + } + + fn visit_trait_item(&self, trait_item: &'hir TraitItem) { + self.0.into_visitor().visit_trait_item(trait_item); + } + + fn visit_impl_item(&self, impl_item: &'hir ImplItem) { + self.0.into_visitor().visit_impl_item(impl_item); + } +} diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index f471ffb072d67..a4d803704485a 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -48,6 +48,7 @@ use ty::AdtKind; use ty::maps::Providers; use rustc_data_structures::indexed_vec; +use rustc_data_structures::sync::{ParallelIterator, par_iter, Send, Sync, scope}; use serialize::{self, Encoder, Encodable, Decoder, Decodable}; use std::collections::BTreeMap; @@ -720,6 +721,31 @@ impl Crate { } } + /// A parallel version of visit_all_item_likes + pub fn par_visit_all_item_likes<'hir, V>(&'hir self, visitor: &V) + where V: itemlikevisit::ParItemLikeVisitor<'hir> + Sync + Send + { + scope(|s| { + s.spawn(|_| { + par_iter(&self.items).for_each(|(_, item)| { + visitor.visit_item(item); + }); + }); + + s.spawn(|_| { + par_iter(&self.trait_items).for_each(|(_, trait_item)| { + visitor.visit_trait_item(trait_item); + }); + }); + + s.spawn(|_| { + par_iter(&self.impl_items).for_each(|(_, impl_item)| { + visitor.visit_impl_item(impl_item); + }); + }); + }); + } + pub fn body(&self, id: BodyId) -> &Body { &self.bodies[&id] } From aa61d1accdb32419c4f929c297605ca8ce925de5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 6 Apr 2018 14:52:36 +0200 Subject: [PATCH 10/42] dep graph --- src/librustc/dep_graph/graph.rs | 390 ++++++++++++++++++++---------- src/librustc/dep_graph/mod.rs | 3 +- src/librustc/dep_graph/raii.rs | 33 --- src/librustc/hir/map/collector.rs | 26 +- src/librustc/ty/context.rs | 5 + src/librustc/ty/maps/plumbing.rs | 1 + 6 files changed, 278 insertions(+), 180 deletions(-) delete mode 100644 src/librustc/dep_graph/raii.rs diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index d60c22064d3a0..60529c488879a 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -12,11 +12,10 @@ use errors::DiagnosticBuilder; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc_data_structures::sync::Lrc; -use std::cell::{Ref, RefCell}; +use rustc_data_structures::sync::{Lrc, RwLock, ReadGuard, Lock}; use std::env; use std::hash::Hash; -use ty::TyCtxt; +use ty::{self, TyCtxt}; use util::common::{ProfileQueriesMsg, profq_msg}; use ich::{StableHashingContext, StableHashingContextProvider, Fingerprint}; @@ -24,7 +23,6 @@ use ich::{StableHashingContext, StableHashingContextProvider, Fingerprint}; use super::debug::EdgeFilter; use super::dep_node::{DepNode, DepKind, WorkProductId}; use super::query::DepGraphQuery; -use super::raii; use super::safe::DepGraphSafe; use super::serialized::{SerializedDepGraph, SerializedDepNodeIndex}; use super::prev::PreviousDepGraph; @@ -37,7 +35,7 @@ pub struct DepGraph { // result value fingerprints. Do not rely on the length of this vector // being the same as the number of nodes in the graph. The vector can // contain an arbitrary number of zero-entries at the end. - fingerprints: Lrc>> + fingerprints: Lrc>> } @@ -67,27 +65,27 @@ struct DepGraphData { /// tracking. The `current` field is the dependency graph of only the /// current compilation session: We don't merge the previous dep-graph into /// current one anymore. - current: RefCell, + current: Lock, /// The dep-graph from the previous compilation session. It contains all /// nodes and edges as well as all fingerprints of nodes that have them. previous: PreviousDepGraph, - colors: RefCell, + colors: Lock, /// When we load, there may be `.o` files, cached mir, or other such /// things available to us. If we find that they are not dirty, we /// load the path to the file storing those work-products here into /// this map. We can later look for and extract that data. - previous_work_products: RefCell>, + previous_work_products: RwLock>, /// Work-products that we generate in this run. - work_products: RefCell>, + work_products: RwLock>, - dep_node_debug: RefCell>, + dep_node_debug: Lock>, // Used for testing, only populated when -Zquery-dep-graph is specified. - loaded_from_cache: RefCell>, + loaded_from_cache: Lock>, } impl DepGraph { @@ -102,22 +100,22 @@ impl DepGraph { (prev_graph_node_count * 115) / 100); DepGraph { data: Some(Lrc::new(DepGraphData { - previous_work_products: RefCell::new(FxHashMap()), - work_products: RefCell::new(FxHashMap()), - dep_node_debug: RefCell::new(FxHashMap()), - current: RefCell::new(CurrentDepGraph::new()), + previous_work_products: RwLock::new(FxHashMap()), + work_products: RwLock::new(FxHashMap()), + dep_node_debug: Lock::new(FxHashMap()), + current: Lock::new(CurrentDepGraph::new()), previous: prev_graph, - colors: RefCell::new(DepNodeColorMap::new(prev_graph_node_count)), - loaded_from_cache: RefCell::new(FxHashMap()), + colors: Lock::new(DepNodeColorMap::new(prev_graph_node_count)), + loaded_from_cache: Lock::new(FxHashMap()), })), - fingerprints: Lrc::new(RefCell::new(fingerprints)), + fingerprints: Lrc::new(Lock::new(fingerprints)), } } pub fn new_disabled() -> DepGraph { DepGraph { data: None, - fingerprints: Lrc::new(RefCell::new(IndexVec::new())), + fingerprints: Lrc::new(Lock::new(IndexVec::new())), } } @@ -144,21 +142,32 @@ impl DepGraph { pub fn assert_ignored(&self) { - if let Some(ref data) = self.data { - match data.current.borrow().task_stack.last() { - Some(&OpenTask::Ignore) | None => { - // ignored + if let Some(..) = self.data { + ty::tls::with_context_opt(|icx| { + let icx = if let Some(icx) = icx { icx } else { return }; + match *icx.task.lock() { + OpenTask::Ignore => { + // ignored + } + _ => panic!("expected an ignore context") } - _ => panic!("expected an ignore context") - } + }) } } pub fn with_ignore(&self, op: OP) -> R where OP: FnOnce() -> R { - let _task = self.data.as_ref().map(|data| raii::IgnoreTask::new(&data.current)); - op() + ty::tls::with_context(|icx| { + let icx = ty::tls::ImplicitCtxt { + task: &Lock::new(OpenTask::Ignore), + ..icx.clone() + }; + + ty::tls::enter_context(&icx, |_| { + op() + }) + }) } /// Starts a new dep-graph task. Dep-graph tasks are specified @@ -198,23 +207,42 @@ impl DepGraph { R: HashStable>, { self.with_task_impl(key, cx, arg, task, - |data, key| data.borrow_mut().push_task(key), - |data, key| data.borrow_mut().pop_task(key)) + |key| OpenTask::Regular { + node: key, + reads: Vec::new(), + read_set: FxHashSet(), + }, + |data, key, task| data.borrow_mut().pop_task(key, task)) } - fn with_task_impl<'gcx, C, A, R>(&self, - key: DepNode, - cx: C, - arg: A, - task: fn(C, A) -> R, - push: fn(&RefCell, DepNode), - pop: fn(&RefCell, DepNode) -> DepNodeIndex) + pub fn empty_task<'gcx, C, R>(&self, + key: DepNode, + cx: C, + result: R) + -> (R, DepNodeIndex) + where C: DepGraphSafe + StableHashingContextProvider<'gcx>, + R: HashStable>, + { + fn identity_fn(_: C, arg: A) -> A { + arg + } + + self.with_empty_task_impl(key, cx, result, identity_fn, + |data, key| data.borrow_mut().alloc_node(key, Vec::new())) + } + + fn with_empty_task_impl<'gcx, C, A, R>(&self, + key: DepNode, + cx: C, + arg: A, + task: fn(C, A) -> R, + pop: fn(&Lock, + DepNode) -> DepNodeIndex) -> (R, DepNodeIndex) where C: DepGraphSafe + StableHashingContextProvider<'gcx>, R: HashStable>, { if let Some(ref data) = self.data { - push(&data.current, key); // In incremental mode, hash the result of the task. We don't // do anything with the hash yet, but we are computing it @@ -296,17 +324,146 @@ impl DepGraph { } } + fn with_task_impl<'gcx, C, A, R>(&self, + key: DepNode, + cx: C, + arg: A, + task: fn(C, A) -> R, + push: fn(DepNode) -> OpenTask, + pop: fn(&Lock, + DepNode, + OpenTask) -> DepNodeIndex) + -> (R, DepNodeIndex) + where C: DepGraphSafe + StableHashingContextProvider<'gcx>, + R: HashStable>, + { + if let Some(ref data) = self.data { + let open_task = push(key); + + let mut hcx = cx.get_stable_hashing_context(); + + if cfg!(debug_assertions) { + profq_msg(hcx.sess(), ProfileQueriesMsg::TaskBegin(key.clone())) + }; + + // In incremental mode, hash the result of the task. We don't + // do anything with the hash yet, but we are computing it + // anyway so that + // - we make sure that the infrastructure works and + // - we can get an idea of the runtime cost. + + let (result, open_task) = ty::tls::with_context(|icx| { + let open_task = Lock::new(open_task); + + let r = { + let icx = ty::tls::ImplicitCtxt { + task: &open_task, + ..icx.clone() + }; + + ty::tls::enter_context(&icx, |_| { + task(cx, arg) + }) + }; + + (r, open_task.into_inner()) + }); + + if cfg!(debug_assertions) { + profq_msg(hcx.sess(), ProfileQueriesMsg::TaskEnd) + }; + + let dep_node_index = pop(&data.current, key, open_task); + + let mut stable_hasher = StableHasher::new(); + result.hash_stable(&mut hcx, &mut stable_hasher); + + let current_fingerprint = stable_hasher.finish(); + + // Store the current fingerprint + { + let mut fingerprints = self.fingerprints.borrow_mut(); + + if dep_node_index.index() >= fingerprints.len() { + fingerprints.resize(dep_node_index.index() + 1, Fingerprint::ZERO); + } + + debug_assert!(fingerprints[dep_node_index] == Fingerprint::ZERO, + "DepGraph::with_task() - Duplicate fingerprint \ + insertion for {:?}", key); + fingerprints[dep_node_index] = current_fingerprint; + } + + // Determine the color of the new DepNode. + if let Some(prev_index) = data.previous.node_to_index_opt(&key) { + let prev_fingerprint = data.previous.fingerprint_by_index(prev_index); + + let color = if current_fingerprint == prev_fingerprint { + DepNodeColor::Green(dep_node_index) + } else { + DepNodeColor::Red + }; + + let mut colors = data.colors.borrow_mut(); + debug_assert!(colors.get(prev_index).is_none(), + "DepGraph::with_task() - Duplicate DepNodeColor \ + insertion for {:?}", key); + + colors.insert(prev_index, color); + } + + (result, dep_node_index) + } else { + if key.kind.fingerprint_needed_for_crate_hash() { + let mut hcx = cx.get_stable_hashing_context(); + let result = task(cx, arg); + let mut stable_hasher = StableHasher::new(); + result.hash_stable(&mut hcx, &mut stable_hasher); + let fingerprint = stable_hasher.finish(); + + let mut fingerprints = self.fingerprints.borrow_mut(); + let dep_node_index = DepNodeIndex::new(fingerprints.len()); + fingerprints.push(fingerprint); + + debug_assert!(fingerprints[dep_node_index] == fingerprint, + "DepGraph::with_task() - Assigned fingerprint to \ + unexpected index for {:?}", key); + + (result, dep_node_index) + } else { + (task(cx, arg), DepNodeIndex::INVALID) + } + } + } + /// Execute something within an "anonymous" task, that is, a task the /// DepNode of which is determined by the list of inputs it read from. pub fn with_anon_task(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex) where OP: FnOnce() -> R { if let Some(ref data) = self.data { - data.current.borrow_mut().push_anon_task(); - let result = op(); + let (result, open_task) = ty::tls::with_context(|icx| { + let task = Lock::new(OpenTask::Anon { + reads: Vec::new(), + read_set: FxHashSet(), + }); + + let r = { + let icx = ty::tls::ImplicitCtxt { + task: &task, + ..icx.clone() + }; + + ty::tls::enter_context(&icx, |_| { + op() + }) + }; + + (r, task.into_inner()) + }); let dep_node_index = data.current .borrow_mut() - .pop_anon_task(dep_kind); + .pop_anon_task(dep_kind, open_task); (result, dep_node_index) } else { (op(), DepNodeIndex::INVALID) @@ -325,8 +482,8 @@ impl DepGraph { R: HashStable>, { self.with_task_impl(key, cx, arg, task, - |data, key| data.borrow_mut().push_eval_always_task(key), - |data, key| data.borrow_mut().pop_eval_always_task(key)) + |key| OpenTask::EvalAlways { node: key }, + |data, key, task| data.borrow_mut().pop_eval_always_task(key, task)) } #[inline] @@ -432,13 +589,13 @@ impl DepGraph { /// Access the map of work-products created during this run. Only /// used during saving of the dep-graph. - pub fn work_products(&self) -> Ref> { + pub fn work_products(&self) -> ReadGuard> { self.data.as_ref().unwrap().work_products.borrow() } /// Access the map of work-products created during the cached run. Only /// used during saving of the dep-graph. - pub fn previous_work_products(&self) -> Ref> { + pub fn previous_work_products(&self) -> ReadGuard> { self.data.as_ref().unwrap().previous_work_products.borrow() } @@ -528,6 +685,7 @@ impl DepGraph { debug!("try_mark_green({:?}) - BEGIN", dep_node); let data = self.data.as_ref().unwrap(); + #[cfg(not(parallel_queries))] debug_assert!(!data.current.borrow().node_to_node_index.contains_key(dep_node)); if dep_node.kind.is_input() { @@ -668,16 +826,24 @@ impl DepGraph { } } - // If we got here without hitting a `return` that means that all // dependencies of this DepNode could be marked as green. Therefore we - // can also mark this DepNode as green. We do so by... + // can also mark this DepNode as green. + + // There may be multiple threads trying to mark the same dep node green concurrently + + let (dep_node_index, did_allocation) = { + let mut current = data.current.borrow_mut(); - // ... allocating an entry for it in the current dependency graph and - // adding all the appropriate edges imported from the previous graph ... - let dep_node_index = data.current - .borrow_mut() - .alloc_node(*dep_node, current_deps); + if let Some(&dep_node_index) = current.node_to_node_index.get(&dep_node) { + // Someone else allocated it before us + (dep_node_index, false) + } else { + // We allocating an entry for the node in the current dependency graph and + // adding all the appropriate edges imported from the previous graph + (current.alloc_node(*dep_node, current_deps), true) + } + }; // ... copying the fingerprint from the previous graph too, so we don't // have to recompute it ... @@ -689,6 +855,8 @@ impl DepGraph { fingerprints.resize(dep_node_index.index() + 1, Fingerprint::ZERO); } + // Multiple threads can all write the same fingerprint here + #[cfg(not(parallel_queries))] debug_assert!(fingerprints[dep_node_index] == Fingerprint::ZERO, "DepGraph::try_mark_green() - Duplicate fingerprint \ insertion for {:?}", dep_node); @@ -697,7 +865,9 @@ impl DepGraph { } // ... emitting any stored diagnostic ... - { + if did_allocation { + // Only the thread which did the allocation emits the error messages + // FIXME: Ensure that these are printed before returning for all threads let diagnostics = tcx.on_disk_query_result_cache .load_diagnostics(tcx, prev_dep_node_index); @@ -716,6 +886,8 @@ impl DepGraph { // ... and finally storing a "Green" entry in the color map. let mut colors = data.colors.borrow_mut(); + // Multiple threads can all write the same color here + #[cfg(not(parallel_queries))] debug_assert!(colors.get(prev_dep_node_index).is_none(), "DepGraph::try_mark_green() - Duplicate DepNodeColor \ insertion for {:?}", dep_node); @@ -839,7 +1011,6 @@ pub(super) struct CurrentDepGraph { nodes: IndexVec, edges: IndexVec>, node_to_node_index: FxHashMap, - task_stack: Vec, forbidden_edge: Option, // Anonymous DepNodes are nodes the ID of which we compute from the list of @@ -888,38 +1059,18 @@ impl CurrentDepGraph { edges: IndexVec::new(), node_to_node_index: FxHashMap(), anon_id_seed: stable_hasher.finish(), - task_stack: Vec::new(), forbidden_edge, total_read_count: 0, total_duplicate_read_count: 0, } } - pub(super) fn push_ignore(&mut self) { - self.task_stack.push(OpenTask::Ignore); - } - - pub(super) fn pop_ignore(&mut self) { - let popped_node = self.task_stack.pop().unwrap(); - debug_assert_eq!(popped_node, OpenTask::Ignore); - } - - pub(super) fn push_task(&mut self, key: DepNode) { - self.task_stack.push(OpenTask::Regular { - node: key, - reads: Vec::new(), - read_set: FxHashSet(), - }); - } - - pub(super) fn pop_task(&mut self, key: DepNode) -> DepNodeIndex { - let popped_node = self.task_stack.pop().unwrap(); - + pub(super) fn pop_task(&mut self, key: DepNode, task: OpenTask) -> DepNodeIndex { if let OpenTask::Regular { node, read_set: _, reads - } = popped_node { + } = task { assert_eq!(node, key); // If this is an input node, we expect that it either has no @@ -950,20 +1101,11 @@ impl CurrentDepGraph { } } - fn push_anon_task(&mut self) { - self.task_stack.push(OpenTask::Anon { - reads: Vec::new(), - read_set: FxHashSet(), - }); - } - - fn pop_anon_task(&mut self, kind: DepKind) -> DepNodeIndex { - let popped_node = self.task_stack.pop().unwrap(); - + fn pop_anon_task(&mut self, kind: DepKind, task: OpenTask) -> DepNodeIndex { if let OpenTask::Anon { read_set: _, reads - } = popped_node { + } = task { debug_assert!(!kind.is_input()); let mut fingerprint = self.anon_id_seed; @@ -997,16 +1139,10 @@ impl CurrentDepGraph { } } - fn push_eval_always_task(&mut self, key: DepNode) { - self.task_stack.push(OpenTask::EvalAlways { node: key }); - } - - fn pop_eval_always_task(&mut self, key: DepNode) -> DepNodeIndex { - let popped_node = self.task_stack.pop().unwrap(); - + fn pop_eval_always_task(&mut self, key: DepNode, task: OpenTask) -> DepNodeIndex { if let OpenTask::EvalAlways { node, - } = popped_node { + } = task { debug_assert_eq!(node, key); let krate_idx = self.node_to_node_index[&DepNode::new_no_params(DepKind::Krate)]; self.alloc_node(node, vec![krate_idx]) @@ -1016,43 +1152,45 @@ impl CurrentDepGraph { } fn read_index(&mut self, source: DepNodeIndex) { - match self.task_stack.last_mut() { - Some(&mut OpenTask::Regular { - ref mut reads, - ref mut read_set, - node: ref target, - }) => { - self.total_read_count += 1; - if read_set.insert(source) { - reads.push(source); - - if cfg!(debug_assertions) { - if let Some(ref forbidden_edge) = self.forbidden_edge { - let source = self.nodes[source]; - if forbidden_edge.test(&source, &target) { - bug!("forbidden edge {:?} -> {:?} created", - source, - target) + ty::tls::with_context_opt(|icx| { + let icx = if let Some(icx) = icx { icx } else { return }; + match *icx.task.lock() { + OpenTask::Regular { + ref mut reads, + ref mut read_set, + node: ref target, + } => { + self.total_read_count += 1; + if read_set.insert(source) { + reads.push(source); + + if cfg!(debug_assertions) { + if let Some(ref forbidden_edge) = self.forbidden_edge { + let source = self.nodes[source]; + if forbidden_edge.test(&source, &target) { + bug!("forbidden edge {:?} -> {:?} created", + source, + target) + } } } + } else { + self.total_duplicate_read_count += 1; } - } else { - self.total_duplicate_read_count += 1; } - } - Some(&mut OpenTask::Anon { - ref mut reads, - ref mut read_set, - }) => { - if read_set.insert(source) { - reads.push(source); + OpenTask::Anon { + ref mut reads, + ref mut read_set, + } => { + if read_set.insert(source) { + reads.push(source); + } + } + OpenTask::Ignore | OpenTask::EvalAlways { .. } => { + // ignore } } - Some(&mut OpenTask::Ignore) | - Some(&mut OpenTask::EvalAlways { .. }) | None => { - // ignore - } - } + }) } fn alloc_node(&mut self, @@ -1071,7 +1209,7 @@ impl CurrentDepGraph { } #[derive(Clone, Debug, PartialEq)] -enum OpenTask { +pub enum OpenTask { Regular { node: DepNode, reads: Vec, diff --git a/src/librustc/dep_graph/mod.rs b/src/librustc/dep_graph/mod.rs index a472183698abf..8a6f66911ece4 100644 --- a/src/librustc/dep_graph/mod.rs +++ b/src/librustc/dep_graph/mod.rs @@ -14,13 +14,12 @@ mod dep_tracking_map; mod graph; mod prev; mod query; -mod raii; mod safe; mod serialized; pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig}; pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, label_strs}; -pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor}; +pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, OpenTask}; pub use self::graph::WorkProductFileKind; pub use self::prev::PreviousDepGraph; pub use self::query::DepGraphQuery; diff --git a/src/librustc/dep_graph/raii.rs b/src/librustc/dep_graph/raii.rs deleted file mode 100644 index 5728bcc7d2771..0000000000000 --- a/src/librustc/dep_graph/raii.rs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use super::graph::CurrentDepGraph; - -use std::cell::RefCell; - -pub struct IgnoreTask<'graph> { - graph: &'graph RefCell, -} - -impl<'graph> IgnoreTask<'graph> { - pub(super) fn new(graph: &'graph RefCell) -> IgnoreTask<'graph> { - graph.borrow_mut().push_ignore(); - IgnoreTask { - graph, - } - } -} - -impl<'graph> Drop for IgnoreTask<'graph> { - fn drop(&mut self) { - self.graph.borrow_mut().pop_ignore(); - } -} - diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index f77275926eba3..ead225641a259 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -79,26 +79,23 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { body_ids: _, } = *krate; - root_mod_sig_dep_index = dep_graph.with_task( + root_mod_sig_dep_index = dep_graph.empty_task( root_mod_def_path_hash.to_dep_node(DepKind::Hir), &hcx, HirItemLike { item_like: (module, attrs, span), hash_bodies: false }, - identity_fn ).1; - root_mod_full_dep_index = dep_graph.with_task( + root_mod_full_dep_index = dep_graph.empty_task( root_mod_def_path_hash.to_dep_node(DepKind::HirBody), &hcx, HirItemLike { item_like: (module, attrs, span), hash_bodies: true }, - identity_fn ).1; } { - dep_graph.with_task( + dep_graph.empty_task( DepNode::new_no_params(DepKind::AllLocalTraitImpls), &hcx, &krate.trait_impls, - identity_fn ); } @@ -169,12 +166,11 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { let (_, crate_dep_node_index) = self .dep_graph - .with_task(DepNode::new_no_params(DepKind::Krate), + .empty_task(DepNode::new_no_params(DepKind::Krate), &self.hcx, (((node_hashes, upstream_crates), source_file_names), (commandline_args_hash, - crate_disambiguator.to_fingerprint())), - identity_fn); + crate_disambiguator.to_fingerprint()))); let svh = Svh::new(self.dep_graph .fingerprint_of(crate_dep_node_index) @@ -267,18 +263,16 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { let def_path_hash = self.definitions.def_path_hash(dep_node_owner); - self.current_signature_dep_index = self.dep_graph.with_task( + self.current_signature_dep_index = self.dep_graph.empty_task( def_path_hash.to_dep_node(DepKind::Hir), &self.hcx, HirItemLike { item_like, hash_bodies: false }, - identity_fn ).1; - self.current_full_dep_index = self.dep_graph.with_task( + self.current_full_dep_index = self.dep_graph.empty_task( def_path_hash.to_dep_node(DepKind::HirBody), &self.hcx, HirItemLike { item_like, hash_bodies: true }, - identity_fn ).1; self.hir_body_nodes.push((def_path_hash, self.current_full_dep_index)); @@ -520,12 +514,6 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { } } -// We use this with DepGraph::with_task(). Since we are handling only input -// values here, the "task" computing them just passes them through. -fn identity_fn(_: &StableHashingContext, item_like: T) -> T { - item_like -} - // This is a wrapper structure that allows determining if span values within // the wrapped item should be hashed or not. struct HirItemLike { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 7e7da77744114..b369a456ddd50 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1562,6 +1562,7 @@ impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> { tcx, query: icx.query.clone(), layout_depth: icx.layout_depth, + task: icx.task, }; ty::tls::enter_context(&new_icx, |new_icx| { f(new_icx.tcx) @@ -1741,6 +1742,7 @@ pub mod tls { use errors::{Diagnostic, TRACK_DIAGNOSTICS}; use rustc_data_structures::OnDrop; use rustc_data_structures::sync::Lrc; + use dep_graph::OpenTask; /// This is the implicit state of rustc. It contains the current /// TyCtxt and query. It is updated when creating a local interner or @@ -1759,6 +1761,8 @@ pub mod tls { /// Used to prevent layout from recursing too deeply. pub layout_depth: usize, + + pub task: &'a Lock, } // A thread local value which stores a pointer to the current ImplicitCtxt @@ -1845,6 +1849,7 @@ pub mod tls { tcx, query: None, layout_depth: 0, + task: &Lock::new(OpenTask::Ignore), }; enter_context(&icx, |_| { f(tcx) diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 4f6925938c802..2b50694f8e2a1 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -530,6 +530,7 @@ macro_rules! define_maps { tcx, query: Some(job.clone()), layout_depth: icx.layout_depth, + task: icx.task, }; // Use the ImplicitCtxt while we execute the query From e4c25b19d556870c70d3fcab5ace7c4e33cbaeef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 6 Apr 2018 12:56:52 +0200 Subject: [PATCH 11/42] gcx ptr --- src/librustc/ty/context.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index b369a456ddd50..c60687dbf885c 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1201,6 +1201,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { crate_name: &str, tx: mpsc::Sender>, output_filenames: &OutputFilenames, + gcx_ptr: Arc>, f: F) -> R where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R { @@ -1294,7 +1295,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { output_filenames: Arc::new(output_filenames.clone()), }; - tls::enter_global(gcx, f) + tls::enter_global(gcx, gcx_ptr, f) } pub fn consider_optimizing String>(&self, msg: T) -> bool { @@ -1837,10 +1838,16 @@ pub mod tls { /// creating a initial TyCtxt and ImplicitCtxt. /// This happens once per rustc session and TyCtxts only exists /// inside the `f` function. - pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R + pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, gcx_ptr: Arc>, f: F) -> R where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R { with_thread_locals(|| { + *gcx_ptr.lock().unwrap() = gcx as *const _ as usize; + + let _on_drop = OnDrop(move || { + *gcx_ptr.lock().unwrap() = 0; + }); + let tcx = TyCtxt { gcx, interners: &gcx.global_interners, From e080622f0ca5b7ce4fe6dfdc2b58edb142cc5209 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 6 Apr 2018 12:56:59 +0200 Subject: [PATCH 12/42] queries --- src/librustc/ty/context.rs | 44 +++- src/librustc/ty/maps/job.rs | 383 ++++++++++++++++++++++++++++++- src/librustc/ty/maps/mod.rs | 4 +- src/librustc/ty/maps/plumbing.rs | 52 ++++- 4 files changed, 474 insertions(+), 9 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index c60687dbf885c..1544dc45cbc75 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -70,6 +70,7 @@ use std::ops::Deref; use std::iter; use std::sync::mpsc; use std::sync::Arc; +use std::sync::Mutex; use syntax::abi; use syntax::ast::{self, NodeId}; use syntax::attr; @@ -1564,6 +1565,7 @@ impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> { query: icx.query.clone(), layout_depth: icx.layout_depth, task: icx.task, + waiter_cycle: None, }; ty::tls::enter_context(&new_icx, |new_icx| { f(new_icx.tcx) @@ -1735,15 +1737,21 @@ impl<'a, 'tcx> Lift<'tcx> for &'a Slice { pub mod tls { use super::{GlobalCtxt, TyCtxt}; + #[cfg(not(parallel_queries))] use std::cell::Cell; use std::fmt; use std::mem; use syntax_pos; + use syntax_pos::Span; use ty::maps; + use ty::maps::CycleError; use errors::{Diagnostic, TRACK_DIAGNOSTICS}; use rustc_data_structures::OnDrop; - use rustc_data_structures::sync::Lrc; + use rayon_core; use dep_graph::OpenTask; + use rustc_data_structures::sync::{Lrc, Lock}; + use std::sync::Arc; + use std::sync::Mutex; /// This is the implicit state of rustc. It contains the current /// TyCtxt and query. It is updated when creating a local interner or @@ -1764,11 +1772,25 @@ pub mod tls { pub layout_depth: usize, pub task: &'a Lock, + + pub waiter_cycle: Option<&'a (Span, Lock>>)>, + } + + #[cfg(parallel_queries)] + fn set_tlv R, R>(value: usize, f: F) -> R { + rayon_core::fiber::tlv::set(value, f) + } + + #[cfg(parallel_queries)] + fn get_tlv() -> usize { + rayon_core::fiber::tlv::get() } // A thread local value which stores a pointer to the current ImplicitCtxt + #[cfg(not(parallel_queries))] thread_local!(static TLV: Cell = Cell::new(0)); + #[cfg(not(parallel_queries))] fn set_tlv R, R>(value: usize, f: F) -> R { let old = get_tlv(); let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old))); @@ -1776,6 +1798,7 @@ pub mod tls { f() } + #[cfg(not(parallel_queries))] fn get_tlv() -> usize { TLV.with(|tlv| tlv.get()) } @@ -1857,6 +1880,7 @@ pub mod tls { query: None, layout_depth: 0, task: &Lock::new(OpenTask::Ignore), + waiter_cycle: None, }; enter_context(&icx, |_| { f(tcx) @@ -1864,6 +1888,24 @@ pub mod tls { }) } + pub unsafe fn with_global_query(gcx_ptr: usize, f: F) -> R + where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R + { + let gcx = &*(gcx_ptr as *const GlobalCtxt<'static>); + let tcx = TyCtxt { + gcx, + interners: &gcx.global_interners, + }; + let icx = ImplicitCtxt { + query: None, + tcx, + layout_depth: 0, + task: &Lock::new(OpenTask::Ignore), + waiter_cycle: None, + }; + enter_context(&icx, |_| f(tcx)) + } + /// Allows access to the current ImplicitCtxt in a closure if one is available pub fn with_context_opt(f: F) -> R where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R diff --git a/src/librustc/ty/maps/job.rs b/src/librustc/ty/maps/job.rs index 374406158c1d5..9fded88ea7d11 100644 --- a/src/librustc/ty/maps/job.rs +++ b/src/librustc/ty/maps/job.rs @@ -8,13 +8,33 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc_data_structures::sync::{Lock, Lrc}; +#![allow(warnings)] + +use std::mem; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering; +use rustc_data_structures::sync::{Lock, LockGuard, Lrc, Weak}; +use rustc_data_structures::OnDrop; +use rayon_core::registry::{self, Registry, WorkerThread}; +use rayon_core::fiber::{Fiber, Waitable, WaiterLatch}; +use rayon_core::latch::{LatchProbe, Latch}; use syntax_pos::Span; use ty::tls; use ty::maps::Query; use ty::maps::plumbing::CycleError; use ty::context::TyCtxt; use errors::Diagnostic; +use std::process; +use std::fmt; +use std::sync::{Arc, Mutex}; +use std::collections::HashSet; +#[cfg(parallel_queries)] +use { + std::iter, + std::iter::FromIterator, + syntax_pos::DUMMY_SP, + rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, HashStable}, +}; /// Indicates the state of a query for a given key in a query map pub(super) enum QueryResult<'tcx, T> { @@ -45,6 +65,8 @@ pub struct QueryJob<'tcx> { /// Diagnostic messages which are emitted while the query executes pub diagnostics: Lock>, + + latch: Lock, } impl<'tcx> QueryJob<'tcx> { @@ -54,6 +76,7 @@ impl<'tcx> QueryJob<'tcx> { diagnostics: Lock::new(Vec::new()), info, parent, + latch: Lock::new(QueryLatch::new()), } } @@ -65,6 +88,45 @@ impl<'tcx> QueryJob<'tcx> { &self, tcx: TyCtxt<'_, 'tcx, 'lcx>, span: Span, + ) -> Result<(), CycleError<'tcx>> { + #[cfg(not(parallel_queries))] + { + self.find_cycle_in_stack(tcx, span) + } + + #[cfg(parallel_queries)] + { + tls::with_related_context(tcx, move |icx| { + let cycle = (span, Lock::new(None)); + + { + let icx = tls::ImplicitCtxt { + waiter_cycle: Some(&cycle), + ..icx.clone() + }; + + tls::enter_context(&icx, |_| { + registry::in_worker(|worker, _| { + unsafe { + worker.wait_enqueue(self); + } + }); + }) + } + + match cycle.1.into_inner() { + None => Ok(()), + Some(cycle) => Err(cycle) + } + }) + } + } + + #[cfg(not(parallel_queries))] + fn find_cycle_in_stack<'lcx>( + &self, + tcx: TyCtxt<'_, 'tcx, 'lcx>, + span: Span, ) -> Result<(), CycleError<'tcx>> { // Get the current executing query (waiter) and find the waitee amongst its parents let mut current_job = tls::with_related_context(tcx, |icx| icx.query.clone()); @@ -96,5 +158,322 @@ impl<'tcx> QueryJob<'tcx> { /// /// This does nothing for single threaded rustc, /// as there are no concurrent jobs which could be waiting on us - pub fn signal_complete(&self) {} + pub fn signal_complete(&self) { + #[cfg(parallel_queries)] + self.latch.lock().set(); + } +} + +#[cfg(parallel_queries)] +impl<'tcx> LatchProbe for QueryJob<'tcx> { + #[inline] + fn probe(&self) -> bool { + self.latch.lock().complete + } +} + +#[cfg(parallel_queries)] +impl<'tcx> Latch for QueryJob<'tcx> { + fn set(&self) { + self.latch.lock().set(); + } +} + +#[cfg(parallel_queries)] +impl<'tcx> Waitable for QueryJob<'tcx> { + fn complete(&self, _worker_thread: &WorkerThread) -> bool { + self.probe() + } + + fn await(&self, worker_thread: &WorkerThread, waiter: Fiber, tlv: usize) { + let mut latch = self.latch.lock(); + if latch.complete { + worker_thread.registry.resume_fiber(worker_thread.index(), waiter); + } else { + latch.waiters.push(QueryWaiter { + worker_index: worker_thread.index(), + fiber: waiter, + tlv, + }); + } + } +} + +#[cfg(parallel_queries)] +struct QueryWaiter { + worker_index: usize, + fiber: Fiber, + tlv: usize, +} + +#[cfg(parallel_queries)] +impl QueryWaiter { + fn icx<'a, 'b, 'gcx, 'tcx>(&'a self) -> *const tls::ImplicitCtxt<'b, 'gcx, 'tcx> { + self.tlv as *const tls::ImplicitCtxt + } +} + +#[cfg(parallel_queries)] +struct QueryLatch { + complete: bool, + waiters: Vec, +} + +#[cfg(parallel_queries)] +impl QueryLatch { + fn new() -> Self { + QueryLatch { + complete: false, + waiters: Vec::new(), + } + } + + fn set(&mut self) { + debug_assert!(!self.complete); + self.complete = true; + if !self.waiters.is_empty() { + let registry = Registry::current(); + for waiter in self.waiters.drain(..) { + registry.resume_fiber(waiter.worker_index, waiter.fiber); + } + registry.signal(); + } + } + + fn resume_waiter(&mut self, waiter: usize, error: CycleError) { + debug_assert!(!self.complete); + // Remove the waiter from the list of waiters + let waiter = self.waiters.remove(waiter); + + // Set the cycle error in its icx so it can pick it up when resumed + { + let icx = unsafe { &*waiter.icx() }; + *icx.waiter_cycle.unwrap().1.lock() = Some(error); + } + + // Resume the waiter + let registry = Registry::current(); + registry.resume_fiber(waiter.worker_index, waiter.fiber); + } +} + +fn print_job<'a, 'tcx, 'lcx>(tcx: TyCtxt<'a, 'tcx, 'lcx>, job: &QueryJob<'tcx>) -> String { + format!("[{}] {:x} {:?}", + 0/*entry.id*/, job as *const _ as usize, job.info.query.describe(tcx)) +} + +type Ref<'tcx> = *const QueryJob<'tcx>; + +type Waiter<'tcx> = (Ref<'tcx>, usize); + +fn visit_waiters<'tcx, F>(query_ref: Ref<'tcx>, mut visit: F) -> Option>> +where + F: FnMut(Span, Ref<'tcx>) -> Option>> +{ + let query = unsafe { &*query_ref }; + if let Some(ref parent) = query.parent { + //eprintln!("visiting parent {:?} of query {:?}", parent, query_ref); + if let Some(cycle) = visit(query.info.span, &**parent as Ref) { + return Some(cycle); + } + } + for (i, waiter) in query.latch.lock().waiters.iter().enumerate() { + let icx = unsafe { &*waiter.icx() }; + if let Some(ref waiter_query) = icx.query { + //eprintln!("visiting waiter {:?} of query {:?}", waiter, query_ref); + if visit(icx.waiter_cycle.unwrap().0, &**waiter_query as Ref).is_some() { + // We found a cycle, return this edge as the waiter + return Some(Some((query_ref, i))); + } + } + } + None +} + +fn cycle_check<'tcx>(query: Ref<'tcx>, + span: Span, + stack: &mut Vec<(Span, Ref<'tcx>)>, + visited: &mut HashSet>) -> Option>> { + if visited.contains(&query) { + //eprintln!("visited query {:?} already for cycle {:#?}", query, stack); + + return if let Some(p) = stack.iter().position(|q| q.1 == query) { + // Remove previous stack entries + stack.splice(0..p, iter::empty()); + // Replace the span for the first query with the cycle cause + stack[0].0 = span; + //eprintln!("[found on stack] visited query {:?} already for cycle {:#?}", query, stack); + Some(None) + } else { + /*eprintln!("[not found on stack] visited query {:?} already for cycle {:#?}", + query, stack);*/ + None + } + } + + //eprintln!("looking for cycle {:#?} in query {:?}", stack, query); + + visited.insert(query); + stack.push((span, query)); + + let r = visit_waiters(query, |span, successor| { + //eprintln!("found successor {:?} in query {:?}", successor, query); + cycle_check(successor, span, stack, visited) + }); + + //eprintln!("result for query {:?} {:?}", query, r); + + if r.is_none() { + stack.pop(); + } + + r +} + +fn connected_to_root<'tcx>(query: Ref<'tcx>, visited: &mut HashSet>) -> bool { + if visited.contains(&query) { + return false; + } + + if unsafe { (*query).parent.is_none() } { + return true; + } + + visited.insert(query); + + let mut connected = false; + + visit_waiters(query, |_, successor| { + if connected_to_root(successor, visited) { + Some(None) + } else { + None + } + }).is_some() +} + +fn query_entry<'tcx>(r: Ref<'tcx>) -> QueryInfo<'tcx> { + unsafe { (*r).info.clone() } +} + +fn remove_cycle<'tcx>(jobs: &mut Vec>, tcx: TyCtxt<'_, 'tcx, '_>) { + let mut visited = HashSet::new(); + let mut stack = Vec::new(); + if let Some(waiter) = cycle_check(jobs.pop().unwrap(), + DUMMY_SP, + &mut stack, + &mut visited) { + // Reverse the stack so earlier entries require later entries + stack.reverse(); + + let mut spans: Vec<_> = stack.iter().map(|e| e.0).collect(); + let queries = stack.iter().map(|e| e.1); + + // Shift the spans so that a query is matched the span for its waitee + let last = spans.pop().unwrap(); + spans.insert(0, last); + + let mut stack: Vec<_> = spans.into_iter().zip(queries).collect(); + + // Remove the queries in our cycle from the list of jobs to look at + for r in &stack { + jobs.remove_item(&r.1); + } + + let (waitee_query, waiter_idx) = waiter.unwrap(); + let waitee_query = unsafe { &*waitee_query }; + +/* eprintln!("found cycle {:#?} with waitee {:?}", stack, waitee_query.info.query); + + for r in &stack { unsafe { + eprintln!("- query: {}", (*r.1).info.query.describe(tcx)); + } } +*/ + // Find the queries in the cycle which are + // connected to queries outside the cycle + let entry_points: Vec> = stack.iter().filter_map(|query| { + // Mark all the other queries in the cycle as already visited + let mut visited = HashSet::from_iter(stack.iter().filter_map(|q| { + if q.1 != query.1 { + Some(q.1) + } else { + None + } + })); + + if connected_to_root(query.1, &mut visited) { + Some(query.1) + } else { + None + } + }).collect(); + + //eprintln!("entry_points {:#?}", entry_points); + + // Deterministically pick an entry point + // FIXME: Sort this instead + let mut hcx = tcx.create_stable_hashing_context(); + let entry_point = *entry_points.iter().min_by_key(|&&q| { + let mut stable_hasher = StableHasher::::new(); + unsafe { (*q).info.query.hash_stable(&mut hcx, &mut stable_hasher); } + stable_hasher.finish() + }).unwrap(); + + /*unsafe { + eprintln!("found entry point {:?} {:?}", + entry_point, (*entry_point).info.query); + }*/ + + // Shift the stack until our entry point is first + while stack[0].1 != entry_point { + let last = stack.pop().unwrap(); + stack.insert(0, last); + } + + let mut error = CycleError { + usage: None, + cycle: stack.iter().map(|&(s, q)| QueryInfo { + span: s, + query: unsafe { (*q).info.query.clone() }, + } ).collect(), + }; + + waitee_query.latch.lock().resume_waiter(waiter_idx, error); + } +} + +pub fn deadlock(gcx_ptr: usize) { + let on_panic = OnDrop(|| { + eprintln!("deadlock handler panicked, aborting process"); + process::abort(); + }); + + //eprintln!("saw rayon deadlock"); + unsafe { tls::with_global_query(gcx_ptr, |tcx| { + let mut jobs: Vec<_> = tcx.maps.collect_active_jobs().iter().map(|j| &**j as Ref).collect(); +/* + for job in &jobs { unsafe { + eprintln!("still active query: {}", print_job(tcx, &**job)); + if let Some(ref parent) = (**job).parent { + eprintln!(" - has parent: {}", print_job(tcx, &**parent)); + } + for (i, waiter) in (**job).latch.lock().waiters.iter().enumerate() { + let icx = &*waiter.icx(); + if let Some(ref query) = icx.query { + eprintln!(" - has waiter d{}: {}", i, print_job(tcx, &**query)); + + } else { + eprintln!(" - has no-query waiter d{}", i); + } + } + } } +*/ + while jobs.len() > 0 { + remove_cycle(&mut jobs, tcx); + } + })}; + //eprintln!("aborting due to deadlock"); + //process::abort(); + mem::forget(on_panic); + Registry::current().signal(); } diff --git a/src/librustc/ty/maps/mod.rs b/src/librustc/ty/maps/mod.rs index 2325b1893d996..118bb8577c89e 100644 --- a/src/librustc/ty/maps/mod.rs +++ b/src/librustc/ty/maps/mod.rs @@ -64,10 +64,10 @@ use syntax::symbol::Symbol; #[macro_use] mod plumbing; use self::plumbing::*; -pub use self::plumbing::force_from_dep_node; +pub use self::plumbing::{force_from_dep_node, CycleError}; mod job; -pub use self::job::{QueryJob, QueryInfo}; +pub use self::job::{QueryJob, QueryInfo, deadlock}; use self::job::QueryResult; mod keys; diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 2b50694f8e2a1..5be571c29e2fc 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -63,7 +63,7 @@ pub(super) trait GetCacheInternal<'tcx>: QueryDescription<'tcx> + Sized { } #[derive(Clone)] -pub(super) struct CycleError<'tcx> { +pub struct CycleError<'tcx> { /// The query and related span which uses the cycle pub(super) usage: Option<(Span, Query<'tcx>)>, pub(super) cycle: Vec>, @@ -226,9 +226,18 @@ macro_rules! define_maps { use dep_graph::DepNodeIndex; use std::mem; use errors::Diagnostic; + #[cfg(not(parallel_queries))] use errors::FatalError; use rustc_data_structures::sync::{Lock, LockGuard}; use rustc_data_structures::OnDrop; + use std::panic; + use rayon_core; + use { + rustc_data_structures::stable_hasher::HashStable, + rustc_data_structures::stable_hasher::StableHasherResult, + rustc_data_structures::stable_hasher::StableHasher, + ich::StableHashingContext + }; define_map_struct! { tcx: $tcx, @@ -243,10 +252,23 @@ macro_rules! define_maps { $($name: Lock::new(QueryMap::new())),* } } + + pub fn collect_active_jobs(&self) -> Vec>> { + let mut jobs = Vec::new(); + + $(for v in self.$name.lock().map.values() { + match *v { + QueryResult::Started(ref job) => jobs.push(job.clone()), + _ => (), + } + })* + + return jobs; + } } #[allow(bad_style)] - #[derive(Copy, Clone, Debug, PartialEq, Eq)] + #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum Query<$tcx> { $($(#[$attr])* $name($K)),* } @@ -294,6 +316,17 @@ macro_rules! define_maps { } } + impl<'a, $tcx> HashStable> for Query<$tcx> { + fn hash_stable(&self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher) { + mem::discriminant(self).hash_stable(hcx, hasher); + match *self { + $(Query::$name(key) => key.hash_stable(hcx, hasher),)* + } + } + } + pub mod queries { use std::marker::PhantomData; @@ -343,7 +376,16 @@ macro_rules! define_maps { let result = Ok(((&value.value).clone(), value.index)); return TryGetLock::JobCompleted(result); }, - QueryResult::Poisoned => FatalError.raise(), + QueryResult::Poisoned => { + #[cfg(not(parallel_queries))] + { + FatalError.raise(); + } + #[cfg(parallel_queries)] + { + panic::resume_unwind(Box::new(rayon_core::PoisonedJob)) + } + }, } } else { None @@ -531,6 +573,7 @@ macro_rules! define_maps { query: Some(job.clone()), layout_depth: icx.layout_depth, task: icx.task, + waiter_cycle: None, }; // Use the ImplicitCtxt while we execute the query @@ -930,7 +973,8 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, ::ty::maps::QueryMsg::$query(profq_key!(tcx, $key)) ) ); - + // Forcing doesn't add a read edge, but executing the query may add read edges. + // Could this add a `read edge too? match ::ty::maps::queries::$query::force(tcx, $key, span, *dep_node) { Ok(_) => {}, Err(e) => { From bb47361f6c89cb6c0642f71f5fb9b96004c3687d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:05:33 +0100 Subject: [PATCH 13/42] parallel abs --- src/librustc/ty/mod.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index c3d2d5675de05..1e24df269f559 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -44,7 +44,7 @@ use std::cmp; use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::Deref; -use rustc_data_structures::sync::Lrc; +use rustc_data_structures::sync::{ParallelIterator, par_iter, Send, Sync, Lrc}; use std::slice; use std::vec::IntoIter; use std::mem; @@ -2276,6 +2276,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { .map(move |&body_id| self.hir.body_owner_def_id(body_id)) } + pub fn par_body_owners(self, f: F) { + par_iter(&self.hir.krate().body_ids).for_each(|&body_id| { + f(self.hir.body_owner_def_id(body_id)) + }); + } + pub fn expr_span(self, id: NodeId) -> Span { match self.hir.find(id) { Some(hir_map::NodeExpr(e)) => { From ff5fe8ca0c411a1f36d09ab2c2812db5b0eab1e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:06:13 +0100 Subject: [PATCH 14/42] timing changes --- src/librustc/Cargo.toml | 4 ++ src/librustc/lib.rs | 5 ++ src/librustc/util/common.rs | 114 +++++++++++++++++++++++++++++++++--- 3 files changed, 116 insertions(+), 7 deletions(-) diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 2aae0f24d4849..e44661c786b95 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -57,3 +57,7 @@ byteorder = { version = "1.1", features = ["i128"]} # compiles, then please feel free to do so! flate2 = "1.0" tempdir = "0.3" + +[target.'cfg(windows)'.dependencies] +kernel32-sys = "0.2.2" +winapi = "0.2.8" diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index a2cefe488c68a..8aee10a71a3f1 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -40,6 +40,7 @@ html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] +#![feature(asm)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(const_fn)] @@ -80,6 +81,10 @@ extern crate graphviz; #[macro_use] extern crate lazy_static; #[cfg(windows)] extern crate libc; +#[cfg(windows)] +extern crate kernel32; +#[cfg(windows)] +extern crate winapi; extern crate rustc_back; #[macro_use] extern crate rustc_data_structures; extern crate serialize; diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index bb6aa654c2960..60b91546090fc 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -159,6 +159,20 @@ pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> profq_msg(sess, ProfileQueriesMsg::TimeBegin(what.to_string())) } } + + #[cfg(not(all(windows, parallel_queries, any(target_arch = "x86", target_arch = "x86_64"))))] + let rv = time_impl(sess, what, f); + #[cfg(all(windows, parallel_queries, any(target_arch = "x86", target_arch = "x86_64")))] + let rv = time_threads_impl(sess, what, f); + + TIME_DEPTH.with(|slot| slot.set(old)); + + rv +} + +fn time_impl(sess: Option<&Session>, what: &str, f: F) -> T where + F: FnOnce() -> T, +{ let start = Instant::now(); let rv = f(); let dur = start.elapsed(); @@ -167,12 +181,98 @@ pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> profq_msg(sess, ProfileQueriesMsg::TimeEnd) } } + print_time_passes_entry_internal(what, duration_to_secs_str(dur)); + rv +} - print_time_passes_entry_internal(what, dur); +#[cfg(all(windows, parallel_queries, any(target_arch = "x86", target_arch = "x86_64")))] +fn time_threads_impl(sess: Option<&Session>, what: &str, f: F) -> T where + F: FnOnce() -> T, +{ + use rayon_core::registry; + use std::iter; + use winapi; + use kernel32; + + #[allow(unused_mut)] + fn read_counter() -> u64 { + let mut low: u32; + let mut high: u32; + + unsafe { + asm!("xor %rax, %rax; cpuid; rdtsc" + : "={eax}" (low), "={edx}" (high) :: "memory", "rbx", "rcx"); + } - TIME_DEPTH.with(|slot| slot.set(old)); + ((high as u64) << 32) | (low as u64) + } - rv + let registry = registry::get_current_registry(); + if let Some(registry) = registry { + let freq = unsafe { + let mut freq = 0; + assert!(kernel32::QueryPerformanceFrequency(&mut freq) == winapi::TRUE); + freq as u64 * 1000 + }; + + let threads: Vec<_> = { + let threads = registry.handles.lock(); + let current = unsafe { + iter::once(kernel32::GetCurrentThread()) + }; + current.chain(threads.iter().map(|t| t.0)).collect() + }; + let mut begin: Vec = iter::repeat(0).take(threads.len()).collect(); + let mut end: Vec = iter::repeat(0).take(threads.len()).collect(); + for (i, &handle) in threads.iter().enumerate() { + unsafe { + assert!(kernel32::QueryThreadCycleTime(handle, &mut begin[i]) == winapi::TRUE); + } + } + + let time_start = read_counter(); + let result = f(); + let time_end = read_counter(); + for (i, &handle) in threads.iter().enumerate() { + unsafe { + assert!(kernel32::QueryThreadCycleTime(handle, &mut end[i]) == winapi::TRUE); + } + } + if let Some(sess) = sess { + if cfg!(debug_assertions) { + profq_msg(sess, ProfileQueriesMsg::TimeEnd) + } + } + let time = time_end - time_start; + let time_secs = time as f64 / freq as f64; + + let thread_times: Vec = end.iter().zip(begin.iter()).map(|(e, b)| *e - *b).collect(); + + let total_thread_time: u64 = thread_times.iter().cloned().sum(); + let core_usage = total_thread_time as f64 / time as f64; + + let mut data = format!("{:.6} - cores {:.2}x - cpu {:.2} - threads (", + time_secs, + core_usage, + core_usage / (thread_times.len() - 1) as f64); + + for (i, thread_time) in thread_times.into_iter().enumerate() { + data.push_str(&format!("{:.2}", thread_time as f64 / time as f64)); + if i == 0 { + data.push_str(" - "); + } + else if i < begin.len() - 1 { + data.push_str(" "); + } + } + + data.push_str(")"); + + print_time_passes_entry_internal(what, data); + result + } else { + time_impl(sess, what, f) + } } pub fn print_time_passes_entry(do_it: bool, what: &str, dur: Duration) { @@ -186,12 +286,12 @@ pub fn print_time_passes_entry(do_it: bool, what: &str, dur: Duration) { r }); - print_time_passes_entry_internal(what, dur); + print_time_passes_entry_internal(what, duration_to_secs_str(dur)); TIME_DEPTH.with(|slot| slot.set(old)); } -fn print_time_passes_entry_internal(what: &str, dur: Duration) { +fn print_time_passes_entry_internal(what: &str, data: String) { let indentation = TIME_DEPTH.with(|slot| slot.get()); let mem_string = match get_resident() { @@ -203,7 +303,7 @@ fn print_time_passes_entry_internal(what: &str, dur: Duration) { }; println!("{}time: {}{}\t{}", repeat(" ").take(indentation).collect::(), - duration_to_secs_str(dur), + data, mem_string, what); } @@ -215,7 +315,7 @@ pub fn duration_to_secs_str(dur: Duration) -> String { let secs = dur.as_secs() as f64 + dur.subsec_nanos() as f64 / NANOS_PER_SEC; - format!("{:.3}", secs) + format!("{:.6}", secs) } pub fn to_readable_str(mut val: usize) -> String { From 43eb33877b24b0159ec0c6c293ed52be503637ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:06:46 +0100 Subject: [PATCH 15/42] ThreadLocal --- src/librustc/util/common.rs | 42 +++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index 60b91546090fc..459e8dd07f530 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -22,16 +22,58 @@ use std::panic; use std::env; use std::path::Path; use std::time::{Duration, Instant}; +use std::cell::UnsafeCell; use std::sync::mpsc::{Sender}; use syntax_pos::{SpanData}; use ty::maps::{QueryMsg}; use ty::TyCtxt; use dep_graph::{DepNode}; +use rayon_core::registry::Registry; use proc_macro; use lazy_static; use session::Session; +scoped_thread_local!(pub static THREAD_INDEX: usize); + +#[repr(align(64))] +struct CacheAligned(T); + +// FIXME: Find a way to ensure this isn't transferred between multiple thread pools +// Thread pools should be the only thing that has a valid THREAD_INDEX. +// Make it contain a Arc and get the index based on the current worker? +pub struct ThreadLocal(Vec>>); + +unsafe impl Send for ThreadLocal {} +unsafe impl Sync for ThreadLocal {} + +impl ThreadLocal { + pub fn new(f: F) -> ThreadLocal + where F: Fn() -> T, + { + let n = Registry::current_num_threads(); + ThreadLocal((0..(1 + n)).map(|_| CacheAligned(UnsafeCell::new(f()))).collect()) + } + + pub fn into_inner(self) -> Vec { + self.0.into_iter().map(|c| c.0.into_inner()).collect() + } + + pub fn current(&self) -> &mut T { + use std::ops::Index; + + unsafe { + &mut *(self.0.index(THREAD_INDEX.with(|t| *t)).0.get()) + } + } +} + +impl ThreadLocal> { + pub fn collect(self) -> Vec { + self.into_inner().into_iter().flat_map(|v| v).collect() + } +} + // The name of the associated type for `Fn` return types pub const FN_OUTPUT_NAME: &'static str = "Output"; From bbcd11dbbd3823818a7a85e6bac2f1a4170cee66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:08:52 +0100 Subject: [PATCH 16/42] Parallel code --- src/librustc_borrowck/borrowck/mod.rs | 4 ++-- src/librustc_borrowck/lib.rs | 2 ++ src/librustc_driver/driver.rs | 20 +++++++++---------- src/librustc_typeck/check/mod.rs | 16 +++++++-------- .../coherence/inherent_impls_overlap.rs | 12 +++++------ src/librustc_typeck/coherence/mod.rs | 5 +++-- src/librustc_typeck/coherence/orphan.rs | 14 ++++++------- src/librustc_typeck/coherence/unsafety.rs | 16 +++++++-------- src/librustc_typeck/collect.rs | 14 +++++++++++-- src/librustc_typeck/impl_wf_check.rs | 12 +++++------ src/librustc_typeck/lib.rs | 2 ++ src/librustc_typeck/variance/test.rs | 12 +++++------ 12 files changed, 71 insertions(+), 58 deletions(-) diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 6d832d4060a1f..cff26f53b6e41 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -67,9 +67,9 @@ pub struct LoanDataFlowOperator; pub type LoanDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, LoanDataFlowOperator>; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - for body_owner_def_id in tcx.body_owners() { + tcx.par_body_owners(|body_owner_def_id| { tcx.borrowck(body_owner_def_id); - } + }); } pub fn provide(providers: &mut Providers) { diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index 52a357e1a1d31..a5a20af0e4e4a 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -17,6 +17,8 @@ #![feature(from_ref)] #![feature(quote)] +#![recursion_limit="256"] + #[macro_use] extern crate log; extern crate syntax; extern crate syntax_pos; diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index c5f5825afff73..28fe48bde4307 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -1149,17 +1149,15 @@ where time(sess, "borrow checking", || borrowck::check_crate(tcx)); - time(sess, "MIR borrow checking", || { - for def_id in tcx.body_owners() { - tcx.mir_borrowck(def_id); - } - }); - - time(sess, "MIR effect checking", || { - for def_id in tcx.body_owners() { - mir::transform::check_unsafety::check_unsafety(tcx, def_id) - } - }); + time(sess, + "MIR borrow checking", + || tcx.par_body_owners(|def_id| { tcx.mir_borrowck(def_id); })); + + time(sess, + "MIR effect checking", + || tcx.par_body_owners(|def_id| { + mir::transform::check_unsafety::check_unsafety(tcx.global_tcx(), def_id) + })); // Avoid overwhelming user with errors if type checking failed. // I'm not sure how helpful this is, to be honest, but it avoids // a diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index ca35153d571db..ab0fa77787f20 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -129,7 +129,7 @@ use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::{self, BytePos, Span, MultiSpan}; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::hir::map::Node; use rustc::hir::{self, PatKind}; use rustc::middle::lang_items; @@ -681,12 +681,12 @@ impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> { struct CheckItemTypesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } -impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> { - fn visit_item(&mut self, i: &'tcx hir::Item) { +impl<'a, 'tcx> ParItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> { + fn visit_item(&self, i: &'tcx hir::Item) { check_item_type(self.tcx, i); } - fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem) { } - fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) { } + fn visit_trait_item(&self, _: &'tcx hir::TraitItem) { } + fn visit_impl_item(&self, _: &'tcx hir::ImplItem) { } } pub fn check_wf_new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Result<(), ErrorReported> { @@ -698,7 +698,7 @@ pub fn check_wf_new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Result<(), ErrorRe pub fn check_item_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Result<(), ErrorReported> { tcx.sess.track_errors(|| { - tcx.hir.krate().visit_all_item_likes(&mut CheckItemTypesVisitor { tcx }); + tcx.hir.krate().par_visit_all_item_likes(&CheckItemTypesVisitor { tcx }); }) } @@ -711,9 +711,9 @@ fn typeck_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum { debug_assert!(crate_num == LOCAL_CRATE); Ok(tcx.sess.track_errors(|| { - for body_owner_def_id in tcx.body_owners() { + tcx.par_body_owners(|body_owner_def_id| { ty::maps::queries::typeck_tables_of::ensure(tcx, body_owner_def_id); - } + }); })?) } diff --git a/src/librustc_typeck/coherence/inherent_impls_overlap.rs b/src/librustc_typeck/coherence/inherent_impls_overlap.rs index 88a2dc817ae63..c9644bb1fdaaf 100644 --- a/src/librustc_typeck/coherence/inherent_impls_overlap.rs +++ b/src/librustc_typeck/coherence/inherent_impls_overlap.rs @@ -11,7 +11,7 @@ use namespace::Namespace; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::hir; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::traits::{self, IntercrateMode}; use rustc::ty::TyCtxt; @@ -21,7 +21,7 @@ pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) { assert_eq!(crate_num, LOCAL_CRATE); let krate = tcx.hir.krate(); - krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx }); + krate.par_visit_all_item_likes(&InherentOverlapChecker { tcx }); } struct InherentOverlapChecker<'a, 'tcx: 'a> { @@ -119,8 +119,8 @@ impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> { } } -impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> { - fn visit_item(&mut self, item: &'v hir::Item) { +impl<'a, 'tcx, 'v> ParItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> { + fn visit_item(&self, item: &'v hir::Item) { match item.node { hir::ItemEnum(..) | hir::ItemStruct(..) | @@ -133,9 +133,9 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> { } } - fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) { + fn visit_trait_item(&self, _trait_item: &hir::TraitItem) { } - fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) { + fn visit_impl_item(&self, _impl_item: &hir::ImplItem) { } } diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 07b7c600b9f30..1a52e36b103e8 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -19,6 +19,7 @@ use hir::def_id::{DefId, LOCAL_CRATE}; use rustc::traits; use rustc::ty::{self, TyCtxt, TypeFoldable}; use rustc::ty::maps::Providers; +use rustc_data_structures::sync::{ParallelIterator, par_iter}; use syntax::ast; @@ -126,9 +127,9 @@ fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) { } pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - for &trait_def_id in tcx.hir.krate().trait_impls.keys() { + par_iter(&tcx.hir.krate().trait_impls).for_each(|(&trait_def_id, _)| { ty::maps::queries::coherent_trait::ensure(tcx, trait_def_id); - } + }); unsafety::check(tcx); orphan::check(tcx); diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs index 6d6594e55437d..1598437ba02d1 100644 --- a/src/librustc_typeck/coherence/orphan.rs +++ b/src/librustc_typeck/coherence/orphan.rs @@ -13,25 +13,25 @@ use rustc::traits; use rustc::ty::{self, TyCtxt}; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::hir; pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut orphan = OrphanChecker { tcx: tcx }; - tcx.hir.krate().visit_all_item_likes(&mut orphan); + let orphan = OrphanChecker { tcx: tcx }; + tcx.hir.krate().par_visit_all_item_likes(&orphan); } struct OrphanChecker<'cx, 'tcx: 'cx> { tcx: TyCtxt<'cx, 'tcx, 'tcx>, } -impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> { +impl<'cx, 'tcx, 'v> ParItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> { /// Checks exactly one impl for orphan rules and other such /// restrictions. In this fn, it can happen that multiple errors /// apply to a specific impl, so just return after reporting one /// to prevent inundating the user with a bunch of similar error /// reports. - fn visit_item(&mut self, item: &hir::Item) { + fn visit_item(&self, item: &hir::Item) { let def_id = self.tcx.hir.local_def_id(item.id); match item.node { hir::ItemImpl(.., Some(_), _, _) => { @@ -161,9 +161,9 @@ impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> { } } - fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) { + fn visit_trait_item(&self, _trait_item: &hir::TraitItem) { } - fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) { + fn visit_impl_item(&self, _impl_item: &hir::ImplItem) { } } diff --git a/src/librustc_typeck/coherence/unsafety.rs b/src/librustc_typeck/coherence/unsafety.rs index 4aa876e85b69a..60d88b5c7ee07 100644 --- a/src/librustc_typeck/coherence/unsafety.rs +++ b/src/librustc_typeck/coherence/unsafety.rs @@ -12,12 +12,12 @@ //! crate or pertains to a type defined in this crate. use rustc::ty::TyCtxt; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::hir::{self, Unsafety}; pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut unsafety = UnsafetyChecker { tcx: tcx }; - tcx.hir.krate().visit_all_item_likes(&mut unsafety); + let unsafety = UnsafetyChecker { tcx: tcx }; + tcx.hir.krate().par_visit_all_item_likes(&unsafety); } struct UnsafetyChecker<'cx, 'tcx: 'cx> { @@ -25,7 +25,7 @@ struct UnsafetyChecker<'cx, 'tcx: 'cx> { } impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> { - fn check_unsafety_coherence(&mut self, + fn check_unsafety_coherence(&self, item: &'v hir::Item, impl_generics: Option<&hir::Generics>, unsafety: hir::Unsafety, @@ -78,8 +78,8 @@ impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> { } } -impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for UnsafetyChecker<'cx, 'tcx> { - fn visit_item(&mut self, item: &'v hir::Item) { +impl<'cx, 'tcx, 'v> ParItemLikeVisitor<'v> for UnsafetyChecker<'cx, 'tcx> { + fn visit_item(&self, item: &'v hir::Item) { match item.node { hir::ItemImpl(unsafety, polarity, _, ref generics, ..) => { self.check_unsafety_coherence(item, Some(generics), unsafety, polarity); @@ -88,9 +88,9 @@ impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for UnsafetyChecker<'cx, 'tcx> { } } - fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) { + fn visit_trait_item(&self, _trait_item: &hir::TraitItem) { } - fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) { + fn visit_impl_item(&self, _impl_item: &hir::ImplItem) { } } diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 6bd38244e8caf..7542f33739e2d 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -49,6 +49,7 @@ use syntax_pos::{Span, DUMMY_SP}; use rustc::hir::{self, map as hir_map, TransFnAttrs, TransFnAttrFlags, Unsafety}; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; +use rustc::hir::itemlikevisit::{IntoVisitor, ParDeepVisitor}; use rustc::hir::def::{Def, CtorKind}; use rustc::hir::def_id::{DefId, LOCAL_CRATE}; @@ -56,8 +57,8 @@ use rustc::hir::def_id::{DefId, LOCAL_CRATE}; // Main entry point pub fn collect_item_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut visitor = CollectItemTypesVisitor { tcx: tcx }; - tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); + let visitor = CollectItemTypesVisitor { tcx: tcx }; + tcx.hir.krate().par_visit_all_item_likes(&ParDeepVisitor(visitor)); } pub fn provide(providers: &mut Providers) { @@ -97,10 +98,19 @@ pub struct ItemCtxt<'a,'tcx:'a> { /////////////////////////////////////////////////////////////////////////// +#[derive(Clone)] struct CollectItemTypesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } +impl<'a, 'tcx: 'a> IntoVisitor<'tcx> for CollectItemTypesVisitor<'a, 'tcx> { + type Visitor = Self; + + fn into_visitor(&self) -> Self { + self.clone() + } +} + impl<'a, 'tcx> Visitor<'tcx> for CollectItemTypesVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::OnlyBodies(&self.tcx.hir) diff --git a/src/librustc_typeck/impl_wf_check.rs b/src/librustc_typeck/impl_wf_check.rs index faf3ccb1133ad..8c9a309d7ce38 100644 --- a/src/librustc_typeck/impl_wf_check.rs +++ b/src/librustc_typeck/impl_wf_check.rs @@ -20,7 +20,7 @@ use constrained_type_params as ctp; use rustc::hir; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::hir::def_id::DefId; use rustc::ty::{self, TyCtxt}; use rustc::util::nodemap::{FxHashMap, FxHashSet}; @@ -62,15 +62,15 @@ pub fn impl_wf_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { // We will tag this as part of the WF check -- logically, it is, // but it's one that we must perform earlier than the rest of // WfCheck. - tcx.hir.krate().visit_all_item_likes(&mut ImplWfCheck { tcx: tcx }); + tcx.hir.krate().par_visit_all_item_likes(&ImplWfCheck { tcx: tcx }); } struct ImplWfCheck<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, } -impl<'a, 'tcx> ItemLikeVisitor<'tcx> for ImplWfCheck<'a, 'tcx> { - fn visit_item(&mut self, item: &'tcx hir::Item) { +impl<'a, 'tcx> ParItemLikeVisitor<'tcx> for ImplWfCheck<'a, 'tcx> { + fn visit_item(&self, item: &'tcx hir::Item) { match item.node { hir::ItemImpl(.., ref generics, _, _, ref impl_item_refs) => { let impl_def_id = self.tcx.hir.local_def_id(item.id); @@ -84,9 +84,9 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for ImplWfCheck<'a, 'tcx> { } } - fn visit_trait_item(&mut self, _trait_item: &'tcx hir::TraitItem) { } + fn visit_trait_item(&self, _trait_item: &'tcx hir::TraitItem) { } - fn visit_impl_item(&mut self, _impl_item: &'tcx hir::ImplItem) { } + fn visit_impl_item(&self, _impl_item: &'tcx hir::ImplItem) { } } fn enforce_impl_params_are_constrained<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 4b66939963ed0..49fd4475c1320 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -83,6 +83,8 @@ This API is completely unstable and subject to change. #![feature(slice_sort_by_cached_key)] #![feature(dyn_trait)] +#![recursion_limit="256"] + #[macro_use] extern crate log; #[macro_use] extern crate syntax; extern crate syntax_pos; diff --git a/src/librustc_typeck/variance/test.rs b/src/librustc_typeck/variance/test.rs index 1acadb7e77236..007c828576080 100644 --- a/src/librustc_typeck/variance/test.rs +++ b/src/librustc_typeck/variance/test.rs @@ -9,19 +9,19 @@ // except according to those terms. use rustc::hir; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::ty::TyCtxt; pub fn test_variance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.hir.krate().visit_all_item_likes(&mut VarianceTest { tcx }); + tcx.hir.krate().par_visit_all_item_likes(&VarianceTest { tcx }); } struct VarianceTest<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } -impl<'a, 'tcx> ItemLikeVisitor<'tcx> for VarianceTest<'a, 'tcx> { - fn visit_item(&mut self, item: &'tcx hir::Item) { +impl<'a, 'tcx> ParItemLikeVisitor<'tcx> for VarianceTest<'a, 'tcx> { + fn visit_item(&self, item: &'tcx hir::Item) { let item_def_id = self.tcx.hir.local_def_id(item.id); // For unit testing: check for a special "rustc_variance" @@ -36,6 +36,6 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for VarianceTest<'a, 'tcx> { } } - fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem) { } - fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) { } + fn visit_trait_item(&self, _: &'tcx hir::TraitItem) { } + fn visit_impl_item(&self, _: &'tcx hir::ImplItem) { } } From 681bbc02b00f76e2a02c6cdaf96db6929a121cff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:11:12 +0100 Subject: [PATCH 17/42] thread pool --- src/librustc_driver/test.rs | 3 +++ src/librustdoc/core.rs | 3 +++ src/librustdoc/test.rs | 2 ++ src/libsyntax/lib.rs | 4 ++-- 4 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 04f6503d92dd8..7c46355acab4a 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -117,6 +117,7 @@ fn test_env_impl(source_string: &str, None, diagnostic_handler, Lrc::new(CodeMap::new(FilePathMapping::empty()))); + driver::spawn_thread_pool(&sess, |gcx_ptr| { let cstore = CStore::new(::get_trans(&sess).metadata_loader()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let input = config::Input::Str { @@ -161,6 +162,7 @@ fn test_env_impl(source_string: &str, "test_crate", tx, &outputs, + gcx_ptr, |tcx| { tcx.infer_ctxt().enter(|infcx| { let mut region_scope_tree = region::ScopeTree::default(); @@ -176,6 +178,7 @@ fn test_env_impl(source_string: &str, assert_eq!(tcx.sess.err_count(), expected_err_count); }); }); + }); } impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 9fb024fd90609..671a374df21ed 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -206,6 +206,7 @@ pub fn run_core(search_paths: SearchPaths, target_features::add_configuration(&mut cfg, &sess, &*trans); sess.parse_sess.config = cfg; + driver::spawn_thread_pool(&sess, |gcx_ptr| { let control = &driver::CompileController::basic(); let krate = panictry!(driver::phase_1_parse_input(control, &sess, &input)); @@ -268,6 +269,7 @@ pub fn run_core(search_paths: SearchPaths, &arenas, &name, &output_filenames, + gcx_ptr, |tcx, analysis, _, result| { if let Err(_) = result { sess.fatal("Compilation failed, aborting rustdoc"); @@ -318,4 +320,5 @@ pub fn run_core(search_paths: SearchPaths, (krate, ctxt.renderinfo.into_inner()) }), &sess) + }) } diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 600e9eaa05f14..55f37599022dd 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -103,6 +103,7 @@ pub fn run(input_path: &Path, target_features::add_configuration(&mut cfg, &sess, &*trans); sess.parse_sess.config = cfg; + driver::spawn_thread_pool(&sess, |_| { let krate = panictry!(driver::phase_1_parse_input(&driver::CompileController::basic(), &sess, &input)); @@ -156,6 +157,7 @@ pub fn run(input_path: &Path, collector.tests.into_iter().collect(), testing::Options::new().display_output(display_warnings)); 0 + }) } // Look for #![doc(test(no_crate_inject))], used by crates in the std facade diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index ad98e2a6b71ad..2975b716ae08f 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -75,7 +75,7 @@ macro_rules! unwrap_or { } } -struct Globals { +pub struct Globals { used_attrs: Lock>, known_attrs: Lock>, syntax_pos_globals: syntax_pos::Globals, @@ -100,7 +100,7 @@ pub fn with_globals(f: F) -> R }) } -scoped_thread_local!(static GLOBALS: Globals); +scoped_thread_local!(pub static GLOBALS: Globals); #[macro_use] pub mod diagnostics { From b234c0f188f7b6b9070c3387fe128a290f2c91ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=83=C2=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:12:04 +0100 Subject: [PATCH 18/42] gcx ptr --- src/librustc_driver/driver.rs | 43 +++++++++++++++++++++++++++-------- src/librustc_driver/lib.rs | 1 + src/librustc_driver/pretty.rs | 11 +++++++++ 3 files changed, 46 insertions(+), 9 deletions(-) diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 28fe48bde4307..2e93fc47cbefe 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -48,6 +48,7 @@ use std::ffi::{OsStr, OsString}; use std::fs; use std::io::{self, Write}; use std::iter; +use std::sync::{Arc, Mutex}; use std::path::{Path, PathBuf}; use rustc_data_structures::sync::{Sync, Lrc}; use std::sync::mpsc; @@ -108,8 +109,15 @@ pub fn compile_input( }; let (krate, registry) = { - let mut compile_state = - CompileState::state_after_parse(input, sess, outdir, output, krate, &cstore); + let mut compile_state = CompileState::state_after_parse( + input, + sess, + outdir, + gcx_ptr.clone(), + output, + krate, + &cstore + ); controller_entry_point!(after_parse, sess, compile_state, Ok(())); (compile_state.krate.unwrap(), compile_state.registry) @@ -140,6 +148,7 @@ pub fn compile_input( input, sess, outdir, + gcx_ptr.clone(), output, &cstore, expanded_crate, @@ -206,6 +215,7 @@ pub fn compile_input( input, sess, outdir, + gcx_ptr.clone(), output, &arenas, &cstore, @@ -239,6 +249,7 @@ pub fn compile_input( &arenas, &crate_name, &outputs, + gcx_ptr.clone(), |tcx, analysis, rx, result| { { // Eventually, we will want to track plugins. @@ -247,6 +258,7 @@ pub fn compile_input( input, sess, outdir, + gcx_ptr.clone(), output, opt_crate, tcx.hir.krate(), @@ -301,7 +313,7 @@ pub fn compile_input( controller_entry_point!( compilation_done, sess, - CompileState::state_when_compilation_done(input, sess, outdir, output), + CompileState::state_when_compilation_done(input, sess, outdir, gcx_ptr.clone(), output), Ok(()) ); @@ -393,6 +405,7 @@ impl<'a> PhaseController<'a> { pub struct CompileState<'a, 'tcx: 'a> { pub input: &'a Input, pub session: &'tcx Session, + pub gcx_ptr: Arc>, pub krate: Option, pub registry: Option>, pub cstore: Option<&'tcx CStore>, @@ -410,10 +423,15 @@ pub struct CompileState<'a, 'tcx: 'a> { } impl<'a, 'tcx> CompileState<'a, 'tcx> { - fn empty(input: &'a Input, session: &'tcx Session, out_dir: &'a Option) -> Self { + fn empty( + input: &'a Input,session: &'tcx Session, + out_dir: &'a Option, + gcx_ptr: Arc> + ) -> Self { CompileState { input, session, + gcx_ptr, out_dir: out_dir.as_ref().map(|s| &**s), out_file: None, arenas: None, @@ -435,6 +453,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { input: &'a Input, session: &'tcx Session, out_dir: &'a Option, + gcx_ptr: Arc>, out_file: &'a Option, krate: ast::Crate, cstore: &'tcx CStore, @@ -445,7 +464,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { krate: Some(krate), cstore: Some(cstore), out_file: out_file.as_ref().map(|s| &**s), - ..CompileState::empty(input, session, out_dir) + ..CompileState::empty(input, session, out_dir, gcx_ptr) } } @@ -453,6 +472,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { input: &'a Input, session: &'tcx Session, out_dir: &'a Option, + gcx_ptr: Arc>, out_file: &'a Option, cstore: &'tcx CStore, expanded_crate: &'a ast::Crate, @@ -463,7 +483,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { cstore: Some(cstore), expanded_crate: Some(expanded_crate), out_file: out_file.as_ref().map(|s| &**s), - ..CompileState::empty(input, session, out_dir) + ..CompileState::empty(input, session, out_dir, gcx_ptr) } } @@ -471,6 +491,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { input: &'a Input, session: &'tcx Session, out_dir: &'a Option, + gcx_ptr: Arc>, out_file: &'a Option, arenas: &'tcx AllArenas<'tcx>, cstore: &'tcx CStore, @@ -493,7 +514,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { hir_crate: Some(hir_crate), output_filenames: Some(output_filenames), out_file: out_file.as_ref().map(|s| &**s), - ..CompileState::empty(input, session, out_dir) + ..CompileState::empty(input, session, out_dir, gcx_ptr) } } @@ -501,6 +522,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { input: &'a Input, session: &'tcx Session, out_dir: &'a Option, + gcx_ptr: Arc>, out_file: &'a Option, krate: Option<&'a ast::Crate>, hir_crate: &'a hir::Crate, @@ -515,7 +537,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { hir_crate: Some(hir_crate), crate_name: Some(crate_name), out_file: out_file.as_ref().map(|s| &**s), - ..CompileState::empty(input, session, out_dir) + ..CompileState::empty(input, session, out_dir, gcx_ptr) } } @@ -523,11 +545,12 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { input: &'a Input, session: &'tcx Session, out_dir: &'a Option, + gcx_ptr: Arc>, out_file: &'a Option, ) -> Self { CompileState { out_file: out_file.as_ref().map(|s| &**s), - ..CompileState::empty(input, session, out_dir) + ..CompileState::empty(input, session, out_dir, gcx_ptr) } } } @@ -1053,6 +1076,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>( arenas: &'tcx AllArenas<'tcx>, name: &str, output_filenames: &OutputFilenames, + gcx_ptr: Arc>, f: F, ) -> Result where @@ -1104,6 +1128,7 @@ where name, tx, output_filenames, + gcx_ptr, |tcx| { // Do some initialization of the DepGraph that can only be done with the // tcx available. diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index b424dd3249118..f65ac3a6ac371 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -900,6 +900,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { state.arenas.unwrap(), state.output_filenames.unwrap(), opt_uii.clone(), + state.gcx_ptr.clone(), state.out_file); }; } else { diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 234ac31f5a430..01d159187e6dd 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -46,6 +46,7 @@ use std::option; use std::path::Path; use std::str::FromStr; use std::mem; +use std::sync::{Arc, Mutex}; use rustc::hir::map as hir_map; use rustc::hir::map::blocks; @@ -206,6 +207,7 @@ impl PpSourceMode { resolutions: &Resolutions, arenas: &'tcx AllArenas<'tcx>, output_filenames: &OutputFilenames, + gcx_ptr: Arc>, id: &str, f: F) -> A @@ -240,6 +242,7 @@ impl PpSourceMode { arenas, id, output_filenames, + gcx_ptr, |tcx, _, _, _| { let empty_tables = ty::TypeckTables::empty(None); let annotation = TypedAnnotation { @@ -924,6 +927,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, arenas: &'tcx AllArenas<'tcx>, output_filenames: &OutputFilenames, opt_uii: Option, + gcx_ptr: Arc>, ofile: Option<&Path>) { if ppm.needs_analysis() { print_with_analysis(sess, @@ -936,6 +940,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, output_filenames, ppm, opt_uii, + gcx_ptr, ofile); return; } @@ -972,6 +977,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, resolutions, arenas, output_filenames, + gcx_ptr, crate_name, move |annotation, krate| { debug!("pretty printing source code {:?}", s); @@ -996,6 +1002,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, resolutions, arenas, output_filenames, + gcx_ptr, crate_name, move |_annotation, krate| { debug!("pretty printing source code {:?}", s); @@ -1012,6 +1019,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, resolutions, arenas, output_filenames, + gcx_ptr, crate_name, move |annotation, _| { debug!("pretty printing source code {:?}", s); @@ -1046,6 +1054,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, resolutions, arenas, output_filenames, + gcx_ptr, crate_name, move |_annotation, _krate| { debug!("pretty printing source code {:?}", s); @@ -1078,6 +1087,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, output_filenames: &OutputFilenames, ppm: PpMode, uii: Option, + gcx_ptr: Arc>, ofile: Option<&Path>) { let nodeid = if let Some(uii) = uii { debug!("pretty printing for {:?}", uii); @@ -1101,6 +1111,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, arenas, crate_name, output_filenames, + gcx_ptr, |tcx, _, _, _| { match ppm { PpmMir | PpmMirCFG => { From 354b7ac3b6704a1ae477eaa36378e59d6dfdc487 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:14:32 +0100 Subject: [PATCH 19/42] misc --- src/librustc/Cargo.toml | 3 +++ src/librustc/lib.rs | 7 +++++++ src/librustc_driver/lib.rs | 3 +++ 3 files changed, 13 insertions(+) diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index e44661c786b95..692a6e150f178 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -17,6 +17,9 @@ jobserver = "0.1" lazy_static = "1.0.0" log = { version = "0.4", features = ["release_max_level_info", "std"] } proc_macro = { path = "../libproc_macro" } +rayon = { git = "https://github.com/Zoxc/rayon.git", branch = "fiber" } +rayon-core = { git = "https://github.com/Zoxc/rayon.git", branch = "fiber", features=["tlv"] } +scoped-tls = { version = "0.1.1", features = ["nightly"] } rustc_apfloat = { path = "../librustc_apfloat" } rustc_back = { path = "../librustc_back" } rustc_const_math = { path = "../librustc_const_math" } diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 8aee10a71a3f1..7cdee74b1507b 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -41,6 +41,7 @@ html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(asm)] +#![feature(attr_literals)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(const_fn)] @@ -50,6 +51,7 @@ #![feature(entry_or_default)] #![feature(from_ref)] #![feature(fs_read_write)] +#![feature(iterator_step_by)] #![cfg_attr(windows, feature(libc))] #![feature(macro_lifetime_matcher)] #![feature(macro_vis_matcher)] @@ -60,6 +62,7 @@ #![feature(quote)] #![feature(refcell_replace_swap)] #![feature(rustc_diagnostic_macros)] +#![feature(set_stdio)] #![feature(slice_patterns)] #![feature(slice_sort_by_cached_key)] #![feature(specialization)] @@ -69,6 +72,7 @@ #![feature(catch_expr)] #![feature(test)] #![feature(inclusive_range_fields)] +#![feature(vec_remove_item)] #![recursion_limit="512"] @@ -90,9 +94,12 @@ extern crate rustc_back; extern crate serialize; extern crate rustc_const_math; extern crate rustc_errors as errors; +extern crate rayon; +extern crate rayon_core; #[macro_use] extern crate log; #[macro_use] extern crate syntax; extern crate syntax_pos; +#[macro_use] extern crate scoped_tls; extern crate jobserver; extern crate proc_macro; diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index f65ac3a6ac371..a1add93808ca3 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -35,6 +35,8 @@ extern crate graphviz; extern crate env_logger; #[cfg(unix)] extern crate libc; +extern crate rayon; +extern crate rayon_core; extern crate rustc; extern crate rustc_allocator; extern crate rustc_back; @@ -53,6 +55,7 @@ extern crate rustc_save_analysis; extern crate rustc_traits; extern crate rustc_trans_utils; extern crate rustc_typeck; +extern crate scoped_tls; extern crate serialize; #[macro_use] extern crate log; From 16038117781cc226eabf651f26e767f9ed61592b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:15:19 +0100 Subject: [PATCH 20/42] rayon abs --- src/librustc_data_structures/Cargo.toml | 1 + src/librustc_data_structures/lib.rs | 1 + src/librustc_data_structures/sync.rs | 44 ++++++++++++++++++++++++- 3 files changed, 45 insertions(+), 1 deletion(-) diff --git a/src/librustc_data_structures/Cargo.toml b/src/librustc_data_structures/Cargo.toml index e1f0a74fc683d..e1dd321fee21d 100644 --- a/src/librustc_data_structures/Cargo.toml +++ b/src/librustc_data_structures/Cargo.toml @@ -15,6 +15,7 @@ serialize = { path = "../libserialize" } cfg-if = "0.1.2" stable_deref_trait = "1.0.0" parking_lot_core = "0.2.8" +rayon = { git = "https://github.com/Zoxc/rayon.git", branch = "fiber" } [dependencies.parking_lot] version = "0.5" diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index ba1d73dc268df..422eb1fe42aa7 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -44,6 +44,7 @@ extern crate parking_lot; #[macro_use] extern crate cfg_if; extern crate stable_deref_trait; +extern crate rayon; pub use rustc_serialize::hex::ToHex; diff --git a/src/librustc_data_structures/sync.rs b/src/librustc_data_structures/sync.rs index 3b7d6efbdae1e..3661763133014 100644 --- a/src/librustc_data_structures/sync.rs +++ b/src/librustc_data_structures/sync.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! This mdoule defines types which are thread safe if cfg!(parallel_queries) is true. +//! This module defines types which are thread safe if cfg!(parallel_queries) is true. //! //! `Lrc` is an alias of either Rc or Arc. //! @@ -40,6 +40,29 @@ use std; use std::ops::{Deref, DerefMut}; use owning_ref::{Erased, OwningRef}; +pub fn serial_join(oper_a: A, oper_b: B) -> (RA, RB) + where A: FnOnce() -> RA, + B: FnOnce() -> RB +{ + (oper_a(), oper_b()) +} + +pub struct SerialScope; + +impl SerialScope { + pub fn spawn(&self, f: F) + where F: FnOnce(&SerialScope) + { + f(self) + } +} + +pub fn serial_scope(f: F) -> R + where F: FnOnce(&SerialScope) -> R +{ + f(&SerialScope) +} + cfg_if! { if #[cfg(not(parallel_queries))] { pub auto trait Send {} @@ -55,9 +78,19 @@ cfg_if! { } } + pub use self::serial_join as join; + pub use self::serial_scope as scope; + + pub use std::iter::Iterator as ParallelIterator; + + pub fn par_iter(t: T) -> T::IntoIter { + t.into_iter() + } + pub type MetadataRef = OwningRef, [u8]>; pub use std::rc::Rc as Lrc; + pub use std::rc::Weak as Weak; pub use std::cell::Ref as ReadGuard; pub use std::cell::RefMut as WriteGuard; pub use std::cell::RefMut as LockGuard; @@ -160,6 +193,7 @@ cfg_if! { pub use parking_lot::MutexGuard as LockGuard; pub use std::sync::Arc as Lrc; + pub use std::sync::Weak as Weak; pub use self::Lock as MTLock; @@ -167,6 +201,14 @@ cfg_if! { use parking_lot::RwLock as InnerRwLock; use std::thread; + pub use rayon::{join, scope}; + + pub use rayon::iter::ParallelIterator; + use rayon::iter::IntoParallelIterator; + + pub fn par_iter(t: T) -> T::Iter { + t.into_par_iter() + } pub type MetadataRef = OwningRef, [u8]>; From 34098deedfb0fe8574c9a18c7242673c6c6daa79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:16:14 +0100 Subject: [PATCH 21/42] thread pool --- src/librustc_driver/driver.rs | 104 ++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 2e93fc47cbefe..f746dcf45e813 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -65,6 +65,84 @@ use pretty::ReplaceBodyWithLoop; use profile; +#[cfg(not(parallel_queries))] +pub fn spawn_thread_pool>) -> R, R>(_: &Session, f: F) -> R { + use rustc::util::common::THREAD_INDEX; + + THREAD_INDEX.set(&0, || { + f(Arc::new(Mutex::new(0))) + }) +} + +#[cfg(parallel_queries)] +pub fn spawn_thread_pool>) -> R, R>(sess: &Session, f: F) -> R { + use syntax; + use syntax_pos; + use scoped_tls::ScopedKey; + use rayon::{Configuration, ThreadPool}; + use rayon_core; + use rustc::util::common::THREAD_INDEX; + + let gcx_ptr = Arc::new(Mutex::new(0)); + + let arg_gcx_ptr = gcx_ptr.clone(); + + let config = Configuration::new().num_threads(sess.query_threads()) + .deadlock_handler(move || { + ty::maps::deadlock(*gcx_ptr.lock().unwrap()); + }).stack_size(16 * 1024 * 1024); + + let with_pool = move |pool: &ThreadPool| { + pool.with_global_registry(|| { + THREAD_INDEX.set(&0, || { + f(arg_gcx_ptr) + }) + }) + }; + + fn try_with(key: &'static ScopedKey, f: F) -> R + where F: FnOnce(Option<&T>) -> R + { + if key.is_set() { + key.with(|v| f(Some(v))) + } else { + f(None) + } + } + + fn maybe_set(key: &'static ScopedKey, + value: Option<&T>, f: F) -> R + where F: FnOnce() -> R + { + if let Some(v) = value { + key.set(v, f) + } else { + f() + } + } + + try_with(&syntax::GLOBALS, |syntax_globals| { + try_with(&syntax_pos::GLOBALS, |syntax_pos_globals| { + let main_handler = move |worker: &mut FnMut()| { + let idx = unsafe { + 1 + (*rayon_core::registry::WorkerThread::current()).index() + }; + THREAD_INDEX.set(&idx, || { + maybe_set(&syntax::GLOBALS, syntax_globals, || { + maybe_set(&syntax_pos::GLOBALS, syntax_pos_globals, || { + ty::tls::with_thread_locals(|| { + worker() + }) + }) + }) + }) + }; + + ThreadPool::scoped_pool(config, main_handler, with_pool).unwrap() + }) + }) +} + pub fn compile_input( trans: Box, sess: &Session, @@ -75,6 +153,32 @@ pub fn compile_input( output: &Option, addl_plugins: Option>, control: &CompileController, +) -> CompileResult { + spawn_thread_pool(sess, |gcx_ptr| { + compile_input_impl(trans, + sess, + cstore, + input_path, + input, + outdir, + output, + addl_plugins, + gcx_ptr, + control) + }) +} + +fn compile_input_impl( + trans: Box, + sess: &Session, + cstore: &CStore, + input_path: &Option, + input: &Input, + outdir: &Option, + output: &Option, + addl_plugins: Option>, + gcx_ptr: Arc>, + control: &CompileController, ) -> CompileResult { macro_rules! controller_entry_point { ($point: ident, $tsess: expr, $make_state: expr, $phase_result: expr) => {{ From 82f6eb2a309bba6f4c8ffa8cefba63f2985bd071 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 15 Mar 2018 10:17:32 +0100 Subject: [PATCH 22/42] misc --- src/librustc_driver/Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 3bff79ed3a6fc..5800fcb0d6012 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -13,6 +13,9 @@ arena = { path = "../libarena" } graphviz = { path = "../libgraphviz" } log = "0.4" env_logger = { version = "0.5", default-features = false } +rayon = { git = "https://github.com/Zoxc/rayon.git", branch = "fiber" } +scoped-tls = { version = "0.1.1", features = ["nightly"] } +rayon-core = { git = "https://github.com/Zoxc/rayon.git", branch = "fiber" } rustc = { path = "../librustc" } rustc_allocator = { path = "../librustc_allocator" } rustc_back = { path = "../librustc_back" } From 86327d438b5b45cbf8dc61e8db3698825d700aa1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 7 Dec 2017 17:23:15 +0100 Subject: [PATCH 23/42] Parallelize passes --- src/librustc/hir/itemlikevisit.rs | 11 +++++++++++ src/librustc/hir/mod.rs | 7 +++++++ src/librustc/middle/intrinsicck.rs | 6 ++---- src/librustc/middle/stability.rs | 7 ++++--- src/librustc/traits/mod.rs | 3 +-- src/librustc/traits/select.rs | 5 ++--- src/librustc/traits/structural_impls.rs | 4 ++-- src/librustc/ty/structural_impls.rs | 11 +++++++++++ src/librustc_mir/hair/pattern/check_match.rs | 3 ++- src/librustc_passes/lib.rs | 2 ++ src/librustc_passes/rvalue_promotion.rs | 5 +---- src/librustc_trans/lib.rs | 2 ++ src/librustc_trans_utils/symbol_names_test.rs | 2 ++ src/librustc_typeck/check/mod.rs | 4 ++-- src/librustc_typeck/check/wfcheck.rs | 1 + src/librustc_typeck/check_unused.rs | 14 +++++++------- src/librustc_typeck/collect.rs | 12 +----------- src/librustc_typeck/outlives/test.rs | 14 ++++++-------- 18 files changed, 66 insertions(+), 47 deletions(-) diff --git a/src/librustc/hir/itemlikevisit.rs b/src/librustc/hir/itemlikevisit.rs index a62000e10c79f..bbf3fb3eb68c3 100644 --- a/src/librustc/hir/itemlikevisit.rs +++ b/src/librustc/hir/itemlikevisit.rs @@ -101,6 +101,17 @@ pub trait IntoVisitor<'hir> { fn into_visitor(&self) -> Self::Visitor; } +#[derive(Clone)] +pub struct ClonableVisitor(pub V); + +impl<'hir, V: Visitor<'hir> + Clone> IntoVisitor<'hir> for ClonableVisitor { + type Visitor = V; + + fn into_visitor(&self) -> V { + self.clone().0 + } +} + pub struct ParDeepVisitor(pub V); impl<'hir, V> ParItemLikeVisitor<'hir> for ParDeepVisitor diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index a4d803704485a..5ae52d02ef8b8 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -746,6 +746,13 @@ impl Crate { }); } + pub fn par_deep_visit_items<'hir, V>(&'hir self, visitor: V) + where V: intravisit::Visitor<'hir> + Clone + Sync + Send + { + let visitor = itemlikevisit::ClonableVisitor(visitor); + self.par_visit_all_item_likes(&itemlikevisit::ParDeepVisitor(visitor)); + } + pub fn body(&self, id: BodyId) -> &Body { &self.bodies[&id] } diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 0a4e5094cde77..d36ba7fb0042b 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -19,12 +19,10 @@ use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut visitor = ItemVisitor { - tcx, - }; - tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); + tcx.hir.krate().par_deep_visit_items(ItemVisitor { tcx }); } +#[derive(Clone)] struct ItemVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 328b2db2b5828..c8ca7e352f13d 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -324,6 +324,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> { } } +#[derive(Clone)] struct MissingStabilityAnnotations<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, access_levels: &'a AccessLevels, @@ -466,8 +467,7 @@ impl<'a, 'tcx> Index<'tcx> { /// Cross-references the feature names of unstable APIs with enabled /// features and possibly prints errors. pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut checker = Checker { tcx: tcx }; - tcx.hir.krate().visit_all_item_likes(&mut checker.as_deep_visitor()); + tcx.hir.krate().par_deep_visit_items(Checker { tcx: tcx }); } /// Check whether an item marked with `deprecated(since="X")` is currently @@ -494,6 +494,7 @@ pub fn deprecation_in_effect(since: &str) -> bool { } } +#[derive(Clone)] struct Checker<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, } @@ -807,7 +808,7 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { }; missing.check_missing_stability(ast::CRATE_NODE_ID, krate.span); intravisit::walk_crate(&mut missing, krate); - krate.visit_all_item_likes(&mut missing.as_deep_visitor()); + krate.par_deep_visit_items(missing); } let ref declared_lib_features = tcx.features().declared_lib_features; diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 8d2398d34090d..ae29e6d07ca79 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -28,7 +28,6 @@ use ty::error::{ExpectedFound, TypeError}; use infer::{InferCtxt}; use rustc_data_structures::sync::Lrc; -use std::rc::Rc; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; @@ -239,7 +238,7 @@ pub struct DerivedObligationCause<'tcx> { parent_trait_ref: ty::PolyTraitRef<'tcx>, /// The parent trait had this cause - parent_code: Rc> + parent_code: Lrc> } pub type Obligations<'tcx, O> = Vec>; diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 45262fd73f3bb..6aa40fc42b83b 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -44,13 +44,12 @@ use ty::relate::TypeRelation; use middle::lang_items; use mir::interpret::{GlobalId}; -use rustc_data_structures::sync::Lock; +use rustc_data_structures::sync::{Lrc, Lock}; use rustc_data_structures::bitvec::BitVector; use std::iter; use std::cmp; use std::fmt; use std::mem; -use std::rc::Rc; use syntax::abi::Abi; use hir; use util::nodemap::{FxHashMap, FxHashSet}; @@ -3314,7 +3313,7 @@ impl<'tcx> TraitObligation<'tcx> { if obligation.recursion_depth >= 0 { let derived_cause = DerivedObligationCause { parent_trait_ref: obligation.predicate.to_poly_trait_ref(), - parent_code: Rc::new(obligation.cause.code.clone()) + parent_code: Lrc::new(obligation.cause.code.clone()) }; let derived_code = variant(derived_cause); ObligationCause::new(obligation.cause.span, obligation.cause.body_id, derived_code) diff --git a/src/librustc/traits/structural_impls.rs b/src/librustc/traits/structural_impls.rs index 31c5bf1bbad84..67acc947c89a5 100644 --- a/src/librustc/traits/structural_impls.rs +++ b/src/librustc/traits/structural_impls.rs @@ -15,7 +15,7 @@ use ty::{self, Lift, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use std::fmt; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; // structural impls for the structs in traits @@ -253,7 +253,7 @@ impl<'a, 'tcx> Lift<'tcx> for traits::DerivedObligationCause<'a> { tcx.lift(&*self.parent_code).map(|code| { traits::DerivedObligationCause { parent_trait_ref: trait_ref, - parent_code: Rc::new(code) + parent_code: Lrc::new(code) } }) }) diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index 7b4b7082bb6ce..142747643474a 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -22,6 +22,7 @@ use rustc_data_structures::sync::Lrc; use mir::interpret; use std::rc::Rc; +use std::sync::Arc; /////////////////////////////////////////////////////////////////////////// // Atomic structs @@ -687,6 +688,16 @@ EnumTypeFoldableImpl! { } where T: TypeFoldable<'tcx> } +impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Arc { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + Arc::new((**self).fold_with(folder)) + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + (**self).visit_with(visitor) + } +} + impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Rc { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { Rc::new((**self).fold_with(folder)) diff --git a/src/librustc_mir/hair/pattern/check_match.rs b/src/librustc_mir/hair/pattern/check_match.rs index d924baaf00521..439587a8e6043 100644 --- a/src/librustc_mir/hair/pattern/check_match.rs +++ b/src/librustc_mir/hair/pattern/check_match.rs @@ -37,6 +37,7 @@ use syntax::ast; use syntax::ptr::P; use syntax_pos::{Span, DUMMY_SP}; +#[derive(Clone)] struct OuterVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } impl<'a, 'tcx> Visitor<'tcx> for OuterVisitor<'a, 'tcx> { @@ -52,7 +53,7 @@ impl<'a, 'tcx> Visitor<'tcx> for OuterVisitor<'a, 'tcx> { } pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.hir.krate().visit_all_item_likes(&mut OuterVisitor { tcx: tcx }.as_deep_visitor()); + tcx.hir.krate().par_deep_visit_items(OuterVisitor { tcx: tcx }); tcx.sess.abort_if_errors(); } diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs index e65c9de8df1ea..870547f49e7b2 100644 --- a/src/librustc_passes/lib.rs +++ b/src/librustc_passes/lib.rs @@ -20,6 +20,8 @@ #![feature(rustc_diagnostic_macros)] +#![recursion_limit="256"] + #[macro_use] extern crate rustc; extern crate rustc_mir; diff --git a/src/librustc_passes/rvalue_promotion.rs b/src/librustc_passes/rvalue_promotion.rs index c5d2f0041a0f2..eacf1dc841b8d 100644 --- a/src/librustc_passes/rvalue_promotion.rs +++ b/src/librustc_passes/rvalue_promotion.rs @@ -50,10 +50,7 @@ pub fn provide(providers: &mut Providers) { } pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - for &body_id in &tcx.hir.krate().body_ids { - let def_id = tcx.hir.body_owner_def_id(body_id); - tcx.const_is_rvalue_promotable_to_static(def_id); - } + tcx.par_body_owners(|def_id| { tcx.const_is_rvalue_promotable_to_static(def_id); }); tcx.sess.abort_if_errors(); } diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 49d0f638f2061..a3de1e03e28b8 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -30,6 +30,8 @@ #![feature(optin_builtin_traits)] #![feature(inclusive_range_fields)] +#![recursion_limit="256"] + use rustc::dep_graph::WorkProduct; use syntax_pos::symbol::Symbol; diff --git a/src/librustc_trans_utils/symbol_names_test.rs b/src/librustc_trans_utils/symbol_names_test.rs index 47bbd67fb5c70..773663819249e 100644 --- a/src/librustc_trans_utils/symbol_names_test.rs +++ b/src/librustc_trans_utils/symbol_names_test.rs @@ -33,10 +33,12 @@ pub fn report_symbol_names<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.dep_graph.with_ignore(|| { let mut visitor = SymbolNamesTest { tcx: tcx }; + // FIXME: Try parallel version tcx.hir.krate().visit_all_item_likes(&mut visitor); }) } +#[derive(Clone)] struct SymbolNamesTest<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index ab0fa77787f20..a0f238cf259e0 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -691,8 +691,8 @@ impl<'a, 'tcx> ParItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> { pub fn check_wf_new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Result<(), ErrorReported> { tcx.sess.track_errors(|| { - let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx); - tcx.hir.krate().visit_all_item_likes(&mut visit.as_deep_visitor()); + let visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx); + tcx.hir.krate().par_deep_visit_items(visit); }) } diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index 6348f3861770f..312f2858f9083 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -660,6 +660,7 @@ fn reject_shadowing_type_parameters(tcx: TyCtxt, def_id: DefId) { } } +#[derive(Clone)] pub struct CheckTypeWellFormedVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, } diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs index f2f1e2938cb12..638f601024034 100644 --- a/src/librustc_typeck/check_unused.rs +++ b/src/librustc_typeck/check_unused.rs @@ -15,7 +15,7 @@ use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; use rustc::hir::def_id::LOCAL_CRATE; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::hir; use rustc::util::nodemap::DefIdSet; @@ -45,8 +45,8 @@ impl<'a, 'tcx> CheckVisitor<'a, 'tcx> { } } -impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for CheckVisitor<'a, 'tcx> { - fn visit_item(&mut self, item: &hir::Item) { +impl<'a, 'tcx, 'v> ParItemLikeVisitor<'v> for CheckVisitor<'a, 'tcx> { + fn visit_item(&self, item: &hir::Item) { if item.vis == hir::Public || item.span == DUMMY_SP { return; } @@ -55,10 +55,10 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for CheckVisitor<'a, 'tcx> { } } - fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) { + fn visit_trait_item(&self, _trait_item: &hir::TraitItem) { } - fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) { + fn visit_impl_item(&self, _impl_item: &hir::ImplItem) { } } @@ -71,8 +71,8 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { used_trait_imports.extend(imports.iter()); } - let mut visitor = CheckVisitor { tcx, used_trait_imports }; - tcx.hir.krate().visit_all_item_likes(&mut visitor); + let visitor = CheckVisitor { tcx, used_trait_imports }; + tcx.hir.krate().par_visit_all_item_likes(&visitor); for &(def_id, span) in tcx.maybe_unused_extern_crates(LOCAL_CRATE).iter() { // The `def_id` here actually was calculated during resolution (at least diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 7542f33739e2d..df5d1a014f4dc 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -49,7 +49,6 @@ use syntax_pos::{Span, DUMMY_SP}; use rustc::hir::{self, map as hir_map, TransFnAttrs, TransFnAttrFlags, Unsafety}; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; -use rustc::hir::itemlikevisit::{IntoVisitor, ParDeepVisitor}; use rustc::hir::def::{Def, CtorKind}; use rustc::hir::def_id::{DefId, LOCAL_CRATE}; @@ -57,8 +56,7 @@ use rustc::hir::def_id::{DefId, LOCAL_CRATE}; // Main entry point pub fn collect_item_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let visitor = CollectItemTypesVisitor { tcx: tcx }; - tcx.hir.krate().par_visit_all_item_likes(&ParDeepVisitor(visitor)); + tcx.hir.krate().par_deep_visit_items(CollectItemTypesVisitor { tcx: tcx }); } pub fn provide(providers: &mut Providers) { @@ -103,14 +101,6 @@ struct CollectItemTypesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } -impl<'a, 'tcx: 'a> IntoVisitor<'tcx> for CollectItemTypesVisitor<'a, 'tcx> { - type Visitor = Self; - - fn into_visitor(&self) -> Self { - self.clone() - } -} - impl<'a, 'tcx> Visitor<'tcx> for CollectItemTypesVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::OnlyBodies(&self.tcx.hir) diff --git a/src/librustc_typeck/outlives/test.rs b/src/librustc_typeck/outlives/test.rs index c3c2ae667ddd9..bdb0999cbeecb 100644 --- a/src/librustc_typeck/outlives/test.rs +++ b/src/librustc_typeck/outlives/test.rs @@ -9,21 +9,19 @@ // except according to those terms. use rustc::hir; -use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::ty::TyCtxt; pub fn test_inferred_outlives<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.hir - .krate() - .visit_all_item_likes(&mut OutlivesTest { tcx }); + tcx.hir.krate().par_visit_all_item_likes(&mut OutlivesTest { tcx }); } struct OutlivesTest<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, } -impl<'a, 'tcx> ItemLikeVisitor<'tcx> for OutlivesTest<'a, 'tcx> { - fn visit_item(&mut self, item: &'tcx hir::Item) { +impl<'a, 'tcx> ParItemLikeVisitor<'tcx> for OutlivesTest<'a, 'tcx> { + fn visit_item(&self, item: &'tcx hir::Item) { let item_def_id = self.tcx.hir.local_def_id(item.id); // For unit testing: check for a special "rustc_outlives" @@ -40,6 +38,6 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for OutlivesTest<'a, 'tcx> { } } - fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem) {} - fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) {} + fn visit_trait_item(&self, _: &'tcx hir::TraitItem) { } + fn visit_impl_item(&self, _: &'tcx hir::ImplItem) { } } From 7b9b9e7ba8d1cf00f4c53a12f8e68ea3f9c60d5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Tue, 12 Dec 2017 17:35:25 +0100 Subject: [PATCH 24/42] Parallelize trans --- src/librustc/Cargo.toml | 1 + src/librustc/lib.rs | 1 + src/librustc/session/config.rs | 6 + src/librustc/session/mod.rs | 6 + src/librustc_trans/Cargo.toml | 2 +- src/librustc_trans/back/lto.rs | 7 + src/librustc_trans/back/write.rs | 235 ++++++++++++++----------------- src/librustc_trans/base.rs | 164 +++++++++++++-------- src/librustc_trans/lib.rs | 4 +- 9 files changed, 230 insertions(+), 196 deletions(-) diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 692a6e150f178..fb10a26b5f8c4 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -28,6 +28,7 @@ rustc_errors = { path = "../librustc_errors" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } +num_cpus = "1.0" backtrace = "0.3.3" byteorder = { version = "1.1", features = ["i128"]} diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 7cdee74b1507b..d3fde014698de 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -83,6 +83,7 @@ extern crate fmt_macros; extern crate getopts; extern crate graphviz; #[macro_use] extern crate lazy_static; +extern crate num_cpus; #[cfg(windows)] extern crate libc; #[cfg(windows)] diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 2e6689efee572..b873ab1f750ce 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -1132,6 +1132,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "print the AST as JSON and halt"), query_threads: Option = (None, parse_opt_uint, [UNTRACKED], "execute queries on a thread pool with N threads"), + codegen_threads: Option = (None, parse_opt_uint, [UNTRACKED], + "execute code generation work with N threads"), ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED], "print the pre-expansion AST as JSON and halt"), ls: bool = (false, parse_bool, [UNTRACKED], @@ -1861,6 +1863,10 @@ pub fn build_session_options_and_crate_config( ); } + if debugging_opts.codegen_threads == Some(0) { + early_error(error_format, "Value for codegen threads must be a positive nonzero integer"); + } + if debugging_opts.query_threads.unwrap_or(1) > 1 && debugging_opts.fuel.is_some() { early_error( error_format, diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 2993234f26625..2cd225186ba65 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -865,6 +865,12 @@ impl Session { self.opts.debugging_opts.query_threads.unwrap_or(1) } + /// Returns the number of codegen threads that should be used for this + /// compilation + pub fn codegen_threads(&self) -> usize { + self.opts.debugging_opts.codegen_threads.unwrap_or(::num_cpus::get()) + } + /// Returns the number of codegen units that should be used for this /// compilation pub fn codegen_units(&self) -> usize { diff --git a/src/librustc_trans/Cargo.toml b/src/librustc_trans/Cargo.toml index 176fd86f29ddc..79f44fa89d407 100644 --- a/src/librustc_trans/Cargo.toml +++ b/src/librustc_trans/Cargo.toml @@ -16,7 +16,7 @@ flate2 = "1.0" jobserver = "0.1.5" libc = "0.2" log = "0.4" -num_cpus = "1.0" +rayon = { git = "https://github.com/Zoxc/rayon.git", branch = "fiber" } rustc = { path = "../librustc" } rustc-demangle = "0.1.4" rustc_allocator = { path = "../librustc_allocator" } diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index 2a473f1ecbcc5..4b1f57dfd0f0e 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -42,6 +42,7 @@ pub fn crate_type_allows_lto(crate_type: config::CrateType) -> bool { } } +#[derive(Debug)] pub(crate) enum LtoModuleTranslation { Fat { module: Option, @@ -506,6 +507,7 @@ fn run_pass_manager(cgcx: &CodegenContext, debug!("lto done"); } +#[derive(Debug)] pub enum SerializedModule { Local(ModuleBuffer), FromRlib(Vec), @@ -520,6 +522,7 @@ impl SerializedModule { } } +#[derive(Debug)] pub struct ModuleBuffer(*mut llvm::ModuleBuffer); unsafe impl Send for ModuleBuffer {} @@ -547,11 +550,13 @@ impl Drop for ModuleBuffer { } } +#[derive(Debug)] pub struct ThinModule { shared: Arc, idx: usize, } +#[derive(Debug)] struct ThinShared { data: ThinData, thin_buffers: Vec, @@ -559,6 +564,7 @@ struct ThinShared { module_names: Vec, } +#[derive(Debug)] struct ThinData(*mut llvm::ThinLTOData); unsafe impl Send for ThinData {} @@ -572,6 +578,7 @@ impl Drop for ThinData { } } +#[derive(Debug)] pub struct ThinBuffer(*mut llvm::ThinLTOBuffer); unsafe impl Send for ThinBuffer {} diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index f501b1739eb9c..1d580e53c4b8e 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -51,6 +51,7 @@ use std::ffi::{CString, CStr}; use std::fs; use std::io::{self, Write}; use std::mem; +use std::cmp; use std::path::{Path, PathBuf}; use std::str; use std::sync::Arc; @@ -58,6 +59,7 @@ use std::sync::mpsc::{channel, Sender, Receiver}; use std::slice; use std::time::Instant; use std::thread; +use std::panic; use libc::{c_uint, c_void, c_char, size_t}; pub const RELOC_MODEL_ARGS : [(&'static str, llvm::RelocMode); 7] = [ @@ -372,7 +374,7 @@ pub struct CodegenContext { debuginfo: config::DebugInfoLevel, // Number of cgus excluding the allocator/metadata modules - pub total_cgus: usize, + pub user_cgus: usize, // Handler to use for diagnostics produced during codegen. pub diag_emitter: SharedEmitter, // LLVM passes added by plugins. @@ -902,6 +904,7 @@ pub fn start_async_translation(tcx: TyCtxt, link: LinkMeta, metadata: EncodedMetadata, coordinator_receive: Receiver>, + user_cgus: usize, total_cgus: usize) -> OngoingCrateTranslation { let sess = tcx.sess; @@ -1016,12 +1019,16 @@ pub fn start_async_translation(tcx: TyCtxt, let (shared_emitter, shared_emitter_main) = SharedEmitter::new(); let (trans_worker_send, trans_worker_receive) = channel(); + let trans_worker_send_2 = trans_worker_send.clone(); + + let start = Instant::now(); let coordinator_thread = start_executing_work(tcx, &crate_info, shared_emitter, trans_worker_send, coordinator_receive, + user_cgus, total_cgus, client, time_graph.clone(), @@ -1036,10 +1043,11 @@ pub fn start_async_translation(tcx: TyCtxt, windows_subsystem, linker_info, crate_info, - + start, time_graph, coordinator_send: tcx.tx_to_llvm_workers.lock().clone(), trans_worker_receive, + trans_worker_send: trans_worker_send_2, shared_emitter_main, future: coordinator_thread, output_filenames: tcx.output_filenames(LOCAL_CRATE), @@ -1226,6 +1234,7 @@ pub(crate) fn dump_incremental_data(trans: &CrateTranslation) { trans.modules.len()); } +#[derive(Debug)] enum WorkItem { Optimize(ModuleTranslation), LTO(lto::LtoModuleTranslation), @@ -1403,6 +1412,7 @@ enum Message { cost: u64, }, TranslationComplete, + TranslationPanic(Box), TranslateItem, } @@ -1412,18 +1422,12 @@ struct Diagnostic { lvl: Level, } -#[derive(PartialEq, Clone, Copy, Debug)] -enum MainThreadWorkerState { - Idle, - Translating, - LLVMing, -} - fn start_executing_work(tcx: TyCtxt, crate_info: &CrateInfo, shared_emitter: SharedEmitter, trans_worker_send: Sender, coordinator_receive: Receiver>, + user_cgus: usize, total_cgus: usize, jobserver: Client, time_graph: Option, @@ -1480,6 +1484,8 @@ fn start_executing_work(tcx: TyCtxt, each_linked_rlib_for_lto.push((cnum, path.to_path_buf())); })); + + let max_workers = sess.codegen_threads(); let assembler_cmd = if modules_config.no_integrated_as { // HACK: currently we use linker (gcc) as our assembler let (name, mut cmd) = get_linker(sess); @@ -1514,12 +1520,13 @@ fn start_executing_work(tcx: TyCtxt, metadata_module_config: metadata_config, allocator_module_config: allocator_config, tm_factory: target_machine_factory(tcx.sess, false), - total_cgus, + user_cgus, msvc_imps_needed: msvc_imps_needed(tcx), target_pointer_width: tcx.sess.target.target.target_pointer_width.clone(), debuginfo: tcx.sess.opts.debuginfo, assembler_cmd, }; + let mut main_thread_capacity = sess.query_threads(); // This is the "main loop" of parallel work happening for parallel codegen. // It's here that we manage parallelism, schedule work, and work with @@ -1660,7 +1667,6 @@ fn start_executing_work(tcx: TyCtxt, // We pretend to be within the top-level LLVM time-passes task here: set_time_depth(1); - let max_workers = ::num_cpus::get(); let mut worker_id_counter = 0; let mut free_worker_ids = Vec::new(); let mut get_worker_id = |free_worker_ids: &mut Vec| { @@ -1681,66 +1687,37 @@ fn start_executing_work(tcx: TyCtxt, let mut needs_lto = Vec::new(); let mut started_lto = false; - // This flag tracks whether all items have gone through translations - let mut translation_done = false; - // This is the queue of LLVM work items that still need processing. let mut work_items = Vec::<(WorkItem, u64)>::new(); // This are the Jobserver Tokens we currently hold. Does not include // the implicit Token the compiler process owns no matter what. let mut tokens = Vec::new(); + let mut available_tokens = 1; + let mut requested_tokens = 0; - let mut main_thread_worker_state = MainThreadWorkerState::Idle; let mut running = 0; + let mut translated_cgus = 0; + let mut translation_cgu_queue = total_cgus; + let mut llvm_start_time = None; // Run the message loop while there's still anything that needs message // processing: - while !translation_done || + while translated_cgus < total_cgus || work_items.len() > 0 || running > 0 || - needs_lto.len() > 0 || - main_thread_worker_state != MainThreadWorkerState::Idle { - - // While there are still CGUs to be translated, the coordinator has - // to decide how to utilize the compiler processes implicit Token: - // For translating more CGU or for running them through LLVM. - if !translation_done { - if main_thread_worker_state == MainThreadWorkerState::Idle { - if !queue_full_enough(work_items.len(), running, max_workers) { - // The queue is not full enough, translate more items: - if let Err(_) = trans_worker_send.send(Message::TranslateItem) { - panic!("Could not send Message::TranslateItem to main thread") - } - main_thread_worker_state = MainThreadWorkerState::Translating; - } else { - // The queue is full enough to not let the worker - // threads starve. Use the implicit Token to do some - // LLVM work too. - let (item, _) = work_items.pop() - .expect("queue empty - queue_full_enough() broken?"); - let cgcx = CodegenContext { - worker: get_worker_id(&mut free_worker_ids), - .. cgcx.clone() - }; - maybe_start_llvm_timer(cgcx.config(item.kind()), - &mut llvm_start_time); - main_thread_worker_state = MainThreadWorkerState::LLVMing; - spawn_work(cgcx, item); - } - } - } else { + needs_lto.len() > 0 { + if translated_cgus == total_cgus { // If we've finished everything related to normal translation // then it must be the case that we've got some LTO work to do. // Perform the serial work here of figuring out what we're // going to LTO and then push a bunch of work items onto our // queue to do LTO if work_items.len() == 0 && - running == 0 && - main_thread_worker_state == MainThreadWorkerState::Idle { - assert!(!started_lto); + running == 0 { + assert!(!started_lto); // CHECK THIS assert!(needs_lto.len() > 0); started_lto = true; let modules = mem::replace(&mut needs_lto, Vec::new()); @@ -1749,49 +1726,23 @@ fn start_executing_work(tcx: TyCtxt, .binary_search_by_key(&cost, |&(_, cost)| cost) .unwrap_or_else(|e| e); work_items.insert(insertion_index, (work, cost)); - helper.request_token(); } } + } - // In this branch, we know that everything has been translated, - // so it's just a matter of determining whether the implicit - // Token is free to use for LLVM work. - match main_thread_worker_state { - MainThreadWorkerState::Idle => { - if let Some((item, _)) = work_items.pop() { - let cgcx = CodegenContext { - worker: get_worker_id(&mut free_worker_ids), - .. cgcx.clone() - }; - maybe_start_llvm_timer(cgcx.config(item.kind()), - &mut llvm_start_time); - main_thread_worker_state = MainThreadWorkerState::LLVMing; - spawn_work(cgcx, item); - } else { - // There is no unstarted work, so let the main thread - // take over for a running worker. Otherwise the - // implicit token would just go to waste. - // We reduce the `running` counter by one. The - // `tokens.truncate()` below will take care of - // giving the Token back. - debug_assert!(running > 0); - running -= 1; - main_thread_worker_state = MainThreadWorkerState::LLVMing; - } - } - MainThreadWorkerState::Translating => { - bug!("trans worker should not be translating after \ - translation was already completed") - } - MainThreadWorkerState::LLVMing => { - // Already making good use of that token - } - } + // Request more tokens if there is more parallel work to do + let translation_work = cmp::min(main_thread_capacity, translation_cgu_queue); + let parallel_work = cmp::min(max_workers, work_items.len() + translation_work); + while parallel_work > (available_tokens + requested_tokens) { + helper.request_token(); + requested_tokens += 1; } + // Give priority to LLVM work, since when that ends, we can free memory. + // Spin up what work we can, only doing this while we've got available // parallelism slots and work left to spawn. - while work_items.len() > 0 && running < tokens.len() { + while work_items.len() > 0 && running < available_tokens { let (item, _) = work_items.pop().unwrap(); maybe_start_llvm_timer(cgcx.config(item.kind()), @@ -1806,8 +1757,28 @@ fn start_executing_work(tcx: TyCtxt, running += 1; } + // Then we try to translate CGUs with free tokens + + // While there are still CGUs to be translated, the coordinator has + // to decide how to utilize the compiler processes implicit Token: + // For translating more CGU or for running them through LLVM. + while main_thread_capacity > 0 && + running < available_tokens && + translation_cgu_queue > 0 { + if let Err(_) = trans_worker_send.send(Message::TranslateItem) { + panic!("Could not send Message::TranslateItem to main thread") + } + running += 1; + main_thread_capacity -= 1; + translation_cgu_queue -= 1; + } + // Relinquish accidentally acquired extra tokens - tokens.truncate(running); + if running > 0 { + tokens.truncate(running - 1); + } else { + tokens.truncate(0); + } let msg = coordinator_receive.recv().unwrap(); match *msg.downcast::().ok().unwrap() { @@ -1818,15 +1789,8 @@ fn start_executing_work(tcx: TyCtxt, match token { Ok(token) => { tokens.push(token); - - if main_thread_worker_state == MainThreadWorkerState::LLVMing { - // If the main thread token is used for LLVM work - // at the moment, we turn that thread into a regular - // LLVM worker thread, so the main thread is free - // to react to translation demand. - main_thread_worker_state = MainThreadWorkerState::Idle; - running += 1; - } + available_tokens += 1; + requested_tokens -= 1; } Err(e) => { let msg = &format!("failed to acquire jobserver token: {}", e); @@ -1837,6 +1801,10 @@ fn start_executing_work(tcx: TyCtxt, } } + Message::TranslationComplete => { + assert_eq!(translated_cgus, total_cgus); + } + Message::TranslationDone { llvm_work_item, cost } => { // We keep the queue sorted by estimated processing cost, // so that more expensive items are processed earlier. This @@ -1853,17 +1821,9 @@ fn start_executing_work(tcx: TyCtxt, }; work_items.insert(insertion_index, (llvm_work_item, cost)); - helper.request_token(); - assert_eq!(main_thread_worker_state, - MainThreadWorkerState::Translating); - main_thread_worker_state = MainThreadWorkerState::Idle; - } - - Message::TranslationComplete => { - translation_done = true; - assert_eq!(main_thread_worker_state, - MainThreadWorkerState::Translating); - main_thread_worker_state = MainThreadWorkerState::Idle; + translated_cgus += 1; + running -= 1; + main_thread_capacity += 1; } // If a thread exits successfully then we drop a token associated @@ -1875,11 +1835,7 @@ fn start_executing_work(tcx: TyCtxt, // Note that if the thread failed that means it panicked, so we // abort immediately. Message::Done { result: Ok(compiled_module), worker_id } => { - if main_thread_worker_state == MainThreadWorkerState::LLVMing { - main_thread_worker_state = MainThreadWorkerState::Idle; - } else { - running -= 1; - } + running -= 1; free_worker_ids.push(worker_id); @@ -1899,11 +1855,7 @@ fn start_executing_work(tcx: TyCtxt, } Message::NeedsLTO { result, worker_id } => { assert!(!started_lto); - if main_thread_worker_state == MainThreadWorkerState::LLVMing { - main_thread_worker_state = MainThreadWorkerState::Idle; - } else { - running -= 1; - } + running -= 1; free_worker_ids.push(worker_id); needs_lto.push(result); @@ -1913,8 +1865,9 @@ fn start_executing_work(tcx: TyCtxt, // Exit the coordinator thread return Err(()) } - Message::TranslateItem => { - bug!("the coordinator should not receive translation requests") + Message::TranslateItem | + Message::TranslationPanic(..) => { + bug!("the coordinator should not receive translation requests or panics") } } } @@ -1944,16 +1897,6 @@ fn start_executing_work(tcx: TyCtxt, }) }); - // A heuristic that determines if we have enough LLVM WorkItems in the - // queue so that the main thread can do LLVM work instead of translation - fn queue_full_enough(items_in_queue: usize, - workers_running: usize, - max_workers: usize) -> bool { - // Tune me, plz. - items_in_queue > 0 && - items_in_queue >= max_workers.saturating_sub(workers_running / 2) - } - fn maybe_start_llvm_timer(config: &ModuleConfig, llvm_start_time: &mut Option) { // We keep track of the -Ztime-passes output manually, @@ -2240,15 +2183,19 @@ impl SharedEmitterMain { } } +pub struct TransWorkerSender(Sender); + pub struct OngoingCrateTranslation { crate_name: Symbol, link: LinkMeta, metadata: EncodedMetadata, windows_subsystem: Option, + start: Instant, linker_info: LinkerInfo, crate_info: CrateInfo, time_graph: Option, coordinator_send: Sender>, + trans_worker_send: Sender, trans_worker_receive: Receiver, shared_emitter_main: SharedEmitterMain, future: thread::JoinHandle>, @@ -2269,6 +2216,11 @@ impl OngoingCrateTranslation { } }; + let total_time = Instant::now().duration_since(self.start); + print_time_passes_entry(sess.time_passes(), + "Translation + LLVM passes", + total_time); + sess.abort_if_errors(); if let Some(time_graph) = self.time_graph { @@ -2315,12 +2267,28 @@ impl OngoingCrateTranslation { submit_translated_module_to_llvm(tcx, mtrans, cost); } - pub fn translation_finished(&self, tcx: TyCtxt) { - self.wait_for_signal_to_translate_item(); - self.check_for_errors(tcx.sess); + pub fn translation_finished(&self) { + // See if there are any pending panics + while let Ok(msg) = self.trans_worker_receive.try_recv() { + match msg { + Message::TranslationPanic(panic) => { + panic::resume_unwind(panic) + } + _ => (), + } + } + drop(self.coordinator_send.send(Box::new(Message::TranslationComplete))); } + pub fn translation_panic(panic: Box, sender: TransWorkerSender) { + drop(sender.0.send(Message::TranslationPanic(panic))); + } + + pub fn trans_worker_sender(&self) -> TransWorkerSender { + TransWorkerSender(self.trans_worker_send.clone()) + } + pub fn check_for_errors(&self, sess: &Session) { self.shared_emitter_main.check(sess, false); } @@ -2330,6 +2298,9 @@ impl OngoingCrateTranslation { Ok(Message::TranslateItem) => { // Nothing to do } + Ok(Message::TranslationPanic(panic)) => { + panic::resume_unwind(panic) + } Ok(_) => panic!("unexpected message"), Err(_) => { // One of the LLVM threads must have panicked, fall through so diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index 1da6f25fd639a..0d49a52acdcfa 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -34,6 +34,7 @@ use back::write::{self, OngoingCrateTranslation, create_target_machine}; use llvm::{ContextRef, ModuleRef, ValueRef, Vector, get_param}; use llvm; use metadata; +use rustc_data_structures::sync::{scope, Lock}; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::middle::lang_items::StartFnLangItem; use rustc::middle::weak_lang_items; @@ -85,6 +86,7 @@ use std::time::{Instant, Duration}; use std::i32; use std::cmp; use std::sync::mpsc; +use std::panic; use syntax_pos::Span; use syntax_pos::symbol::InternedString; use syntax::attr; @@ -758,10 +760,11 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, link_meta, metadata, rx, + 0, 1); ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module); - ongoing_translation.translation_finished(tcx); + ongoing_translation.translation_finished(); assert_and_save_dep_graph(tcx); @@ -787,13 +790,16 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } + let total_cgus = codegen_units.len() + tcx.sess.allocator_kind.get().is_some() as usize + 1; + let ongoing_translation = write::start_async_translation( tcx, time_graph.clone(), link_meta, metadata, rx, - codegen_units.len()); + codegen_units.len(), + total_cgus); // Translate an allocator shim, if any let allocator_module = if let Some(kind) = *tcx.sess.allocator_kind.get() { @@ -835,80 +841,114 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, codegen_units }; - let mut total_trans_time = Duration::new(0, 0); - let mut all_stats = Stats::default(); + let total_trans_time = Lock::new(Duration::new(0, 0)); + let all_stats = Lock::new(Stats::default()); + + scope(|scope| { + for cgu in codegen_units.into_iter() { + ongoing_translation.wait_for_signal_to_translate_item(); + ongoing_translation.check_for_errors(tcx.sess); + + // First, if incremental compilation is enabled, we try to re-use the + // codegen unit from the cache. + if tcx.dep_graph.is_fully_enabled() { + let cgu_id = cgu.work_product_id(); + + // Check whether there is a previous work-product we can + // re-use. Not only must the file exist, and the inputs not + // be dirty, but the hash of the symbols we will generate must + // be the same. + if let Some(buf) = tcx.dep_graph.previous_work_product(&cgu_id) { + let dep_node = &DepNode::new(tcx, + DepConstructor::CompileCodegenUnit(cgu.name().clone())); + + // We try to mark the DepNode::CompileCodegenUnit green. If we + // succeed it means that none of the dependencies has changed + // and we can safely re-use. + if let Some(dep_node_index) = tcx.dep_graph + .try_mark_green(tcx, dep_node) { + // Append ".rs" to LLVM module identifier. + // + // LLVM code generator emits a ".file filename" directive + // for ELF backends. Value of the "filename" is set as the + // LLVM module identifier. Due to a LLVM MC bug[1], LLVM + // crashes if the module identifier is same as other symbols + // such as a function name in the module. + // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 + let llmod_id = format!("{}.rs", cgu.name()); + + let module = ModuleTranslation { + name: cgu.name().to_string(), + source: ModuleSource::Preexisting(buf), + kind: ModuleKind::Regular, + llmod_id, + }; + tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true); + write::submit_translated_module_to_llvm(tcx, module, 0); + // Continue to next cgu, this one is done. + continue + } + } else { + // This can happen if files were deleted from the cache + // directory for some reason. We just re-compile then. + } + } - for cgu in codegen_units.into_iter() { - ongoing_translation.wait_for_signal_to_translate_item(); - ongoing_translation.check_for_errors(tcx.sess); + let cgu_name = *cgu.name(); + let all_stats = &all_stats; + let total_trans_time = &total_trans_time; + let time_graph = time_graph.as_ref(); + let sender = ongoing_translation.trans_worker_sender(); + + scope.spawn(move |_| { + let _timing_guard = time_graph.map(|time_graph| { + time_graph.start(write::TRANS_WORKER_TIMELINE, + write::TRANS_WORK_PACKAGE_KIND, + &format!("codegen {}", cgu_name)) + }); + + let cgu_name = cgu_name; + let start_time = Instant::now(); + + #[cfg(parallel_queries)] + { + let result = panic::catch_unwind(panic::AssertUnwindSafe(|| { + let result = tcx.compile_codegen_unit(cgu_name); + all_stats.lock().extend(result); + })); + + if let Err(panic) = result { + OngoingCrateTranslation::translation_panic(panic, sender); + } + } - // First, if incremental compilation is enabled, we try to re-use the - // codegen unit from the cache. - if tcx.dep_graph.is_fully_enabled() { - let cgu_id = cgu.work_product_id(); - - // Check whether there is a previous work-product we can - // re-use. Not only must the file exist, and the inputs not - // be dirty, but the hash of the symbols we will generate must - // be the same. - if let Some(buf) = tcx.dep_graph.previous_work_product(&cgu_id) { - let dep_node = &DepNode::new(tcx, - DepConstructor::CompileCodegenUnit(cgu.name().clone())); - - // We try to mark the DepNode::CompileCodegenUnit green. If we - // succeed it means that none of the dependencies has changed - // and we can safely re-use. - if let Some(dep_node_index) = tcx.dep_graph.try_mark_green(tcx, dep_node) { - // Append ".rs" to LLVM module identifier. - // - // LLVM code generator emits a ".file filename" directive - // for ELF backends. Value of the "filename" is set as the - // LLVM module identifier. Due to a LLVM MC bug[1], LLVM - // crashes if the module identifier is same as other symbols - // such as a function name in the module. - // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 - let llmod_id = format!("{}.rs", cgu.name()); - - let module = ModuleTranslation { - name: cgu.name().to_string(), - source: ModuleSource::Preexisting(buf), - kind: ModuleKind::Regular, - llmod_id, - }; - tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true); - write::submit_translated_module_to_llvm(tcx, module, 0); - // Continue to next cgu, this one is done. - continue + #[cfg(not(parallel_queries))] + { + let result = tcx.compile_codegen_unit(cgu_name); + all_stats.lock().extend(result); } - } else { - // This can happen if files were deleted from the cache - // directory for some reason. We just re-compile then. - } - } - let _timing_guard = time_graph.as_ref().map(|time_graph| { - time_graph.start(write::TRANS_WORKER_TIMELINE, - write::TRANS_WORK_PACKAGE_KIND, - &format!("codegen {}", cgu.name())) - }); - let start_time = Instant::now(); - all_stats.extend(tcx.compile_codegen_unit(*cgu.name())); - total_trans_time += start_time.elapsed(); - ongoing_translation.check_for_errors(tcx.sess); - } + let mut total_trans_time = total_trans_time.lock(); + *total_trans_time += start_time.elapsed(); + }); + } + }); - ongoing_translation.translation_finished(tcx); + ongoing_translation.translation_finished(); + ongoing_translation.check_for_errors(tcx.sess); // Since the main thread is sometimes blocked during trans, we keep track // -Ztime-passes output manually. print_time_passes_entry(tcx.sess.time_passes(), - "translate to LLVM IR", - total_trans_time); + "translate to LLVM IR (total time in all threads)", + total_trans_time.into_inner()); if tcx.sess.opts.incremental.is_some() { ::rustc_incremental::assert_module_sources::assert_module_sources(tcx); } + let mut all_stats = all_stats.into_inner(); + symbol_names_test::report_symbol_names(tcx); if tcx.sess.trans_stats() { diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index a3de1e03e28b8..394088bdf7c7c 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -39,9 +39,9 @@ use syntax_pos::symbol::Symbol; extern crate bitflags; extern crate flate2; extern crate libc; +extern crate rayon; #[macro_use] extern crate rustc; extern crate jobserver; -extern crate num_cpus; extern crate rustc_mir; extern crate rustc_allocator; extern crate rustc_apfloat; @@ -272,6 +272,7 @@ pub fn __rustc_codegen_backend() -> Box { LlvmTransCrate::new() } +#[derive(Debug)] struct ModuleTranslation { /// The name of the module. When the crate may be saved between /// compilations, incremental compilation requires that name be @@ -348,6 +349,7 @@ struct CompiledModule { bytecode_compressed: Option, } +#[derive(Debug)] enum ModuleSource { /// Copy the `.o` files or whatever from the incr. comp. directory. Preexisting(WorkProduct), From 7903e7df72aa7dc6150d3ea667884d17adf55087 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sat, 16 Dec 2017 17:40:43 +0100 Subject: [PATCH 25/42] Disable tests --- src/bootstrap/builder.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 7ac9b146fe57e..367e2038f4fdc 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -311,7 +311,7 @@ impl<'a> Builder<'a> { tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, native::Llvm, tool::Rustfmt, tool::Miri, native::Lld), Kind::Check => describe!(check::Std, check::Test, check::Rustc, check::CodegenBackend), - Kind::Test => describe!(test::Tidy, test::Bootstrap, test::Ui, test::RunPass, + Kind::Test => describe!(test::Tidy, test::Bootstrap/**/, test::Ui, test::RunPass, test::CompileFail, test::ParseFail, test::RunFail, test::RunPassValgrind, test::MirOpt, test::Codegen, test::CodegenUnits, test::Incremental, test::Debuginfo, test::UiFullDeps, test::RunPassFullDeps, test::RunFailFullDeps, @@ -324,8 +324,9 @@ impl<'a> Builder<'a> { test::Nomicon, test::Reference, test::RustdocBook, test::RustByExample, test::TheBook, test::UnstableBook, test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme, + test::RustdocUi, // Run run-make last, since these won't pass without make on Windows - test::RunMake, test::RustdocUi), + test::RunMake/**/), Kind::Bench => describe!(test::Crate, test::CrateLibrustc), Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, doc::Standalone, doc::Std, doc::Test, doc::WhitelistedRustc, doc::Rustc, From 3100379a628e39555353739c97ee90b83cc8fb18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Wed, 24 Jan 2018 09:59:08 +0100 Subject: [PATCH 26/42] Save query results and the dep graph in parallel --- src/librustc_incremental/lib.rs | 2 ++ src/librustc_incremental/persist/save.rs | 22 ++++++++++++++-------- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs index a5e07bcec24bb..fd9ee96277212 100644 --- a/src/librustc_incremental/lib.rs +++ b/src/librustc_incremental/lib.rs @@ -17,6 +17,8 @@ #![feature(fs_read_write)] #![feature(specialization)] +#![recursion_limit="256"] + extern crate graphviz; #[macro_use] extern crate rustc; extern crate rustc_data_structures; diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index e524fcecf9094..c69b23dfb20ef 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -13,6 +13,7 @@ use rustc::session::Session; use rustc::ty::TyCtxt; use rustc::util::common::time; use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sync::join; use rustc_serialize::Encodable as RustcEncodable; use rustc_serialize::opaque::Encoder; use std::io::{self, Cursor}; @@ -33,23 +34,28 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { return; } - time(sess, "persist query result cache", || { - save_in(sess, - query_cache_path(sess), - |e| encode_query_cache(tcx, e)); - }); + let query_cache_path = query_cache_path(sess); + let dep_graph_path = dep_graph_path(sess); - if tcx.sess.opts.debugging_opts.incremental_queries { + join(move || { + if tcx.sess.opts.debugging_opts.incremental_queries { + time(sess, "persist query result cache", || { + save_in(sess, + query_cache_path, + |e| encode_query_cache(tcx, e)); + }); + } + }, || { time(sess, "persist dep-graph", || { save_in(sess, - dep_graph_path(sess), + dep_graph_path, |e| { time(sess, "encode dep-graph", || { encode_dep_graph(tcx, e) }) }); }); - } + }); dirty_clean::check_dirty_clean_annotations(tcx); }) From 851ee7496084dc365e40a81f6a3158d7e55d4315 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sun, 21 Jan 2018 18:27:18 +0100 Subject: [PATCH 27/42] Force parallel queries --- src/bootstrap/config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 1b4b2c5fb2a54..f2c4857a9902e 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -518,7 +518,7 @@ impl Config { set(&mut config.test_miri, rust.test_miri); set(&mut config.wasm_syscall, rust.wasm_syscall); set(&mut config.lld_enabled, rust.lld); - config.rustc_parallel_queries = rust.experimental_parallel_queries.unwrap_or(false); + config.rustc_parallel_queries = rust.experimental_parallel_queries.unwrap_or(true); config.rustc_default_linker = rust.default_linker.clone(); config.musl_root = rust.musl_root.clone().map(PathBuf::from); config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from); From b1e83a561372266be66601d9c5915d7037b18599 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Sun, 21 Jan 2018 19:46:14 +0100 Subject: [PATCH 28/42] Parallelize trans item collection --- src/librustc_mir/lib.rs | 3 ++ src/librustc_mir/monomorphize/collector.rs | 42 ++++++++++++---------- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs index de3063a575673..9d6831cd9bff0 100644 --- a/src/librustc_mir/lib.rs +++ b/src/librustc_mir/lib.rs @@ -35,7 +35,10 @@ Rust MIR: a lowered representation of Rust. Also: an experiment! #![feature(crate_visibility_modifier)] #![cfg_attr(stage0, feature(try_trait))] +#![recursion_limit="256"] + extern crate arena; + #[macro_use] extern crate bitflags; #[macro_use] extern crate log; diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index 008165f33b2bb..9027b433f00a4 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -207,10 +207,12 @@ use rustc::mir::interpret::GlobalId; use monomorphize::{self, Instance}; use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap}; +use rustc::util::common::time; use monomorphize::item::{MonoItemExt, DefPathBasedNames, InstantiationMode}; use rustc_data_structures::bitvec::BitVector; +use rustc_data_structures::sync::{ParallelIterator, par_iter, Lock}; #[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] pub enum MonoItemCollectionMode { @@ -298,22 +300,26 @@ pub fn collect_crate_mono_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mode: MonoItemCollectionMode) -> (FxHashSet>, InliningMap<'tcx>) { - let roots = collect_roots(tcx, mode); + let roots = time(tcx.sess, "collecting roots", || { + collect_roots(tcx, mode) + }); debug!("Building mono item graph, beginning at roots"); - let mut visited = FxHashSet(); - let mut recursion_depths = DefIdMap(); - let mut inlining_map = InliningMap::new(); - - for root in roots { - collect_items_rec(tcx, - root, - &mut visited, - &mut recursion_depths, - &mut inlining_map); - } + let visited = Lock::new(FxHashSet()); + let inlining_map = Lock::new(InliningMap::new()); + + time(tcx.sess, "collecting mono items", || { + par_iter(roots).for_each(|root| { + let mut recursion_depths = DefIdMap(); + collect_items_rec(tcx, + root, + &visited, + &mut recursion_depths, + &inlining_map); + }); + }); - (visited, inlining_map) + (visited.into_inner(), inlining_map.into_inner()) } // Find all non-generic items by walking the HIR. These items serve as roots to @@ -354,10 +360,10 @@ fn collect_roots<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Collect all monomorphized items reachable from `starting_point` fn collect_items_rec<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, starting_point: MonoItem<'tcx>, - visited: &mut FxHashSet>, + visited: &Lock>>, recursion_depths: &mut DefIdMap, - inlining_map: &mut InliningMap<'tcx>) { - if !visited.insert(starting_point.clone()) { + inlining_map: &Lock>) { + if !visited.lock().insert(starting_point.clone()) { // We've been here already, no need to search again. return; } @@ -425,7 +431,7 @@ fn collect_items_rec<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn record_accesses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, caller: MonoItem<'tcx>, callees: &[MonoItem<'tcx>], - inlining_map: &mut InliningMap<'tcx>) { + inlining_map: &Lock>) { let is_inlining_candidate = |mono_item: &MonoItem<'tcx>| { mono_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy }; @@ -435,7 +441,7 @@ fn record_accesses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (*mono_item, is_inlining_candidate(mono_item)) }); - inlining_map.record_accesses(caller, accesses); + inlining_map.lock().record_accesses(caller, accesses); } fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, From 9626d4e291dd9028bf49b7f812db1366f7d2a6f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Mon, 22 Jan 2018 14:31:23 +0100 Subject: [PATCH 29/42] Have thread-local GlobalArenas --- src/librustc/ty/context.rs | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 1544dc45cbc75..9c5da53b7c27b 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -78,18 +78,19 @@ use syntax::codemap::MultiSpan; use syntax::feature_gate; use syntax::symbol::{Symbol, keywords, InternedString}; use syntax_pos::Span; +use util::common::ThreadLocal; use hir; pub struct AllArenas<'tcx> { - pub global: GlobalArenas<'tcx>, + pub global: ThreadLocal>, pub interner: SyncDroplessArena, } impl<'tcx> AllArenas<'tcx> { pub fn new() -> Self { AllArenas { - global: GlobalArenas::new(), + global: ThreadLocal::new(|| GlobalArenas::new()), interner: SyncDroplessArena::new(), } } @@ -853,7 +854,7 @@ impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> { } pub struct GlobalCtxt<'tcx> { - global_arenas: &'tcx GlobalArenas<'tcx>, + global_arenas: &'tcx ThreadLocal>, global_interners: CtxtInterners<'tcx>, cstore: &'tcx (dyn CrateStore + Sync), @@ -1054,23 +1055,23 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics { - self.global_arenas.generics.alloc(generics) + self.global_arenas.current().generics.alloc(generics) } pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal> { - self.global_arenas.steal_mir.alloc(Steal::new(mir)) + self.global_arenas.current().steal_mir.alloc(Steal::new(mir)) } pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> { - self.global_arenas.mir.alloc(mir) + self.global_arenas.current().mir.alloc(mir) } pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> { - self.global_arenas.tables.alloc(tables) + self.global_arenas.current().tables.alloc(tables) } pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef { - self.global_arenas.trait_def.alloc(def) + self.global_arenas.current().trait_def.alloc(def) } pub fn alloc_adt_def(self, @@ -1080,7 +1081,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { repr: ReprOptions) -> &'gcx ty::AdtDef { let def = ty::AdtDef::new(self, did, kind, variants, repr); - self.global_arenas.adt_def.alloc(def) + self.global_arenas.current().adt_def.alloc(def) } pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] { @@ -1118,7 +1119,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { return alloc; } - let interned = self.global_arenas.const_allocs.alloc(alloc); + let interned = self.global_arenas.current().const_allocs.alloc(alloc); if let Some(prev) = allocs.replace(interned) { bug!("Tried to overwrite interned Allocation: {:#?}", prev) } @@ -1164,7 +1165,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { return layout; } - let interned = self.global_arenas.layout.alloc(layout); + let interned = self.global_arenas.current().layout.alloc(layout); if let Some(prev) = layout_interner.replace(interned) { bug!("Tried to overwrite interned Layout: {:?}", prev) } From cee2dbbcd6d7f00e9a5acd9a04eb6f3c46907dea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 9 Feb 2018 10:33:29 +0100 Subject: [PATCH 30/42] Force 8 query threads --- src/librustc/session/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 2cd225186ba65..65ee22d3af235 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -862,7 +862,7 @@ impl Session { /// Returns the number of query threads that should be used for this /// compilation pub fn query_threads(&self) -> usize { - self.opts.debugging_opts.query_threads.unwrap_or(1) + self.opts.debugging_opts.query_threads.unwrap_or(1)/*8*/ } /// Returns the number of codegen threads that should be used for this From 5381ed9acc0074ecfd5fda7a5bf6f914308a801d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Wed, 14 Mar 2018 21:25:52 +0100 Subject: [PATCH 31/42] Add crates to whitelist --- src/tools/tidy/src/deps.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 63f40110225ec..3c8fc0e00d24b 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -58,6 +58,7 @@ static WHITELIST_CRATES: &'static [CrateVersion] = &[ static WHITELIST: &'static [Crate] = &[ Crate("aho-corasick"), Crate("ar"), + Crate("arrayvec"), Crate("atty"), Crate("backtrace"), Crate("backtrace-sys"), @@ -66,8 +67,15 @@ static WHITELIST: &'static [Crate] = &[ Crate("cc"), Crate("cfg-if"), Crate("cmake"), + Crate("coco"), + Crate("context"), + Crate("crossbeam-channel"), + Crate("crossbeam-epoch"), + Crate("crossbeam-utils"), + Crate("either"), Crate("ena"), Crate("env_logger"), + Crate("gcc"), Crate("filetime"), Crate("flate2"), Crate("fuchsia-zircon"), @@ -80,13 +88,17 @@ static WHITELIST: &'static [Crate] = &[ Crate("log"), Crate("log_settings"), Crate("memchr"), + Crate("memoffset"), Crate("miniz-sys"), Crate("num_cpus"), + Crate("nodrop"), Crate("owning_ref"), Crate("parking_lot"), Crate("parking_lot_core"), Crate("quick-error"), Crate("rand"), + Crate("rayon"), + Crate("rayon-core"), Crate("redox_syscall"), Crate("redox_termios"), Crate("regex"), @@ -94,6 +106,7 @@ static WHITELIST: &'static [Crate] = &[ Crate("remove_dir_all"), Crate("rustc-demangle"), Crate("scoped-tls"), + Crate("scopeguard"), Crate("smallvec"), Crate("stable_deref_trait"), Crate("tempdir"), From f0ed8cb16d397ebd4148d6c57a25e1b46e68c8e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 6 Apr 2018 14:27:08 +0200 Subject: [PATCH 32/42] fix proc_macro docs --- src/libproc_macro/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index e171216523a1e..6b591ca558583 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -38,6 +38,8 @@ #![feature(lang_items)] #![feature(optin_builtin_traits)] +#![recursion_limit="256"] + extern crate syntax; extern crate syntax_pos; extern crate rustc_errors; From ff9baa8c45db13001879dbfd3b4d9bf40d6fb85f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Tue, 17 Apr 2018 16:53:56 +0200 Subject: [PATCH 33/42] Add Cargo.lock --- src/Cargo.lock | 117 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 116 insertions(+), 1 deletion(-) diff --git a/src/Cargo.lock b/src/Cargo.lock index 4f979b7e9f497..ff9205c48ec66 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -331,6 +331,15 @@ dependencies = [ "cc 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "coco" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "commoncrypto" version = "0.2.0" @@ -395,6 +404,17 @@ dependencies = [ name = "completion" version = "0.1.0" +[[package]] +name = "context" +version = "2.0.0" +source = "git+https://github.com/Zoxc/context-rs.git#f932ce270044194fec0572f719ab903af0248c0d" +dependencies = [ + "gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "core" version = "0.0.0" @@ -436,6 +456,16 @@ name = "crossbeam" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "crossbeam-channel" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam-epoch 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "crossbeam-deque" version = "0.2.0" @@ -445,6 +475,19 @@ dependencies = [ "crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "crossbeam-epoch" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "memoffset 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "crossbeam-epoch" version = "0.3.1" @@ -733,6 +776,11 @@ name = "futures" version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "gcc" +version = "0.3.54" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "getopts" version = "0.2.17" @@ -1131,6 +1179,11 @@ dependencies = [ "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "memoffset" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "memoffset" version = "0.2.1" @@ -1274,6 +1327,15 @@ dependencies = [ "unwind 0.0.0", ] +[[package]] +name = "parking_lot" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "parking_lot" version = "0.5.4" @@ -1494,6 +1556,15 @@ dependencies = [ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rayon" +version = "0.9.0" +source = "git+https://github.com/Zoxc/rayon.git?branch=fiber#c258be9089f826891439ce1c1cb1d57d434dc9fe" +dependencies = [ + "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon-core 1.3.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", +] + [[package]] name = "rayon" version = "1.0.1" @@ -1503,6 +1574,25 @@ dependencies = [ "rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rayon-core" +version = "1.3.0" +source = "git+https://github.com/Zoxc/rayon.git?branch=fiber#c258be9089f826891439ce1c1cb1d57d434dc9fe" +dependencies = [ + "coco 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "context 2.0.0 (git+https://github.com/Zoxc/context-rs.git)", + "crossbeam-channel 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", + "scoped-tls 0.1.0 (git+https://github.com/Zoxc/scoped-tls.git)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rayon-core" version = "1.4.0" @@ -1675,18 +1765,24 @@ dependencies = [ "fmt_macros 0.0.0", "graphviz 0.0.0", "jobserver 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "proc_macro 0.0.0", + "rayon 0.9.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", + "rayon-core 1.3.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", "rustc_apfloat 0.0.0", "rustc_back 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", + "scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "serialize 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", "tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1857,6 +1953,7 @@ dependencies = [ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 0.9.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", "serialize 0.0.0", "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1870,6 +1967,8 @@ dependencies = [ "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 0.9.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", + "rayon-core 1.3.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", "rustc 0.0.0", "rustc_allocator 0.0.0", "rustc_back 0.0.0", @@ -1888,6 +1987,7 @@ dependencies = [ "rustc_traits 0.0.0", "rustc_trans_utils 0.0.0", "rustc_typeck 0.0.0", + "scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "serialize 0.0.0", "syntax 0.0.0", "syntax_ext 0.0.0", @@ -2096,7 +2196,7 @@ dependencies = [ "jobserver 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 0.9.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)", "rustc 0.0.0", "rustc-demangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_allocator 0.0.0", @@ -2220,6 +2320,11 @@ dependencies = [ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "scoped-tls" +version = "0.1.0" +source = "git+https://github.com/Zoxc/scoped-tls.git#0327b5c7f98633d698a1a2fee41077da2316fd21" + [[package]] name = "scoped-tls" version = "0.1.1" @@ -2887,13 +2992,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum chrono 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ba5f60682a4c264e7f8d77b82e7788938a76befdf949d4a98026d19099c9d873" "checksum clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f0f16b89cbb9ee36d87483dc939fe9f1e13c05898d56d7b230a0d4dff033a536" "checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb" +"checksum coco 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04d5eef9c9354cbb35d5069c39054c657469d2aa7789d4c71d0a6b686dc48bea" "checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007" "checksum commoncrypto-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2" "checksum compiletest_rs 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "608d9d3ccc45b63bf337d2ff5e65def5a5a52c187122232509f6b72707f61b1b" +"checksum context 2.0.0 (git+https://github.com/Zoxc/context-rs.git)" = "" "checksum core-foundation 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "286e0b41c3a20da26536c6000a280585d519fd07b3956b43aed8a79e9edce980" "checksum core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "716c271e8613ace48344f723b60b900a93150271e5be206212d052bbc0883efa" "checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" +"checksum crossbeam-channel 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9d7b07a3084d8718d95338443d5a46aab38ce16d5f991d4027a0906b369f70a3" "checksum crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f739f8c5363aca78cfb059edf753d8f0d36908c348f3d8d1503f03d8b75d9cf3" +"checksum crossbeam-epoch 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9898f21d6d647793e163c804944941fb19aecd1f4a1a4c254bbb0bee15ccdea5" "checksum crossbeam-epoch 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "927121f5407de9956180ff5e936fe3cf4324279280001cd56b669d28ee7e9150" "checksum crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2760899e32a1d58d5abb31129f8fae5de75220bc2176e77ff7c627ae45c918d9" "checksum crypto-hash 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "09de9ee0fc255ace04c7fa0763c9395a945c37c8292bb554f8d48361d1dcf1b4" @@ -2924,6 +3033,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "51f93f3de6ba1794dcd5810b3546d004600a59a98266487c8407bc4b24e398f3" "checksum futures 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5a3176836efa0b37f0e321b86672dfada1564aeb516fbed67b7c24050a0263" +"checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb" "checksum getopts 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "b900c08c1939860ce8b54dc6a89e26e00c04c380fd0e09796799bd7f12861e05" "checksum git2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f41c0035c37ec11ed3f1e1946a76070b0c740393687e9a9c7612f6a709036b3" "checksum git2-curl 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b502f6b1b467957403d168f0039e0c46fa6a1220efa2adaef25d5b267b5fe024" @@ -2962,6 +3072,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376" "checksum mdbook 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "326d0861da5681a13c19a00952a56c254dd04f00eb944e506fdb36e93ae6f1ca" "checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d" +"checksum memoffset 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e163e5baece1a039e71e75b074de17a9b4114982aa109921fc20253bdf91a53c" "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" "checksum miniz-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "609ce024854aeb19a0ef7567d348aaa5a746b32fb72e336df7fcc16869d7e2b4" "checksum miow 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9224c91f82b3c47cf53dcf78dfaa20d6888fbcc5d272d5f2fcdf8a697f3c987d" @@ -2977,6 +3088,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum openssl-sys 0.9.27 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdc5c4a02e69ce65046f1763a0181107038e02176233acb0b3351d7cc588f9" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" +"checksum parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "149d8f5b97f3c1133e3cfcd8886449959e856b557ff281e292b733d7c69e005e" "checksum parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9fd9d732f2de194336fb02fe11f9eed13d9e76f13f4315b4d88a14ca411750cd" "checksum parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "538ef00b7317875071d5e00f603f24d16f0b474c1a5fc0ccb8b454ca72eafa79" "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" @@ -3002,7 +3114,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum radix_trie 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "03d0d770481e8af620ca61d3d304bf014f965d7f78e923dc58545e6a545070a9" "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" "checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" +"checksum rayon 0.9.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)" = "" "checksum rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e811e76f1dbf68abf87a759083d34600017fc4e10b6bd5ad84a700f9dba4b1" +"checksum rayon-core 1.3.0 (git+https://github.com/Zoxc/rayon.git?branch=fiber)" = "" "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8" "checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" @@ -3025,6 +3139,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637" "checksum schannel 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "fbaffce35eb61c5b00846e73128b0cd62717e7c0ec46abbec132370d013975b4" +"checksum scoped-tls 0.1.0 (git+https://github.com/Zoxc/scoped-tls.git)" = "" "checksum scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8674d439c964889e2476f474a3bf198cc9e199e77499960893bac5de7e9218a4" "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" From 3898012bd89eedcc95976761d287805407870388 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Tue, 17 Apr 2018 17:36:53 +0200 Subject: [PATCH 34/42] Fix tests --- src/test/compile-fail/issue-20413.rs | 1 + src/test/compile-fail/issue-21946.rs | 1 + src/test/compile-fail/issue-23122-1.rs | 1 + src/test/compile-fail/issue-23122-2.rs | 1 + src/test/ui/cycle-trait-supertrait-indirect.stderr | 5 ----- src/test/ui/impl-trait/auto-trait-leak.stderr | 1 - src/test/ui/span/issue-35987.rs | 1 + src/test/ui/span/issue-35987.stderr | 13 +++++++++++-- 8 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/test/compile-fail/issue-20413.rs b/src/test/compile-fail/issue-20413.rs index a48c03aa178c3..a19584f80973f 100644 --- a/src/test/compile-fail/issue-20413.rs +++ b/src/test/compile-fail/issue-20413.rs @@ -18,6 +18,7 @@ struct NoData; impl Foo for T where NoData: Foo { //~^ ERROR: overflow evaluating the requirement fn answer(self) { + //~^ ERROR: overflow evaluating the requirement let val: NoData = NoData; } } diff --git a/src/test/compile-fail/issue-21946.rs b/src/test/compile-fail/issue-21946.rs index 0d652be5c2611..d93b9caa20a91 100644 --- a/src/test/compile-fail/issue-21946.rs +++ b/src/test/compile-fail/issue-21946.rs @@ -17,6 +17,7 @@ struct FooStruct; impl Foo for FooStruct { //~^ ERROR overflow evaluating the requirement `::A` type A = ::A; + //~^ ERROR overflow evaluating the requirement `::A` } fn main() {} diff --git a/src/test/compile-fail/issue-23122-1.rs b/src/test/compile-fail/issue-23122-1.rs index 36d8450848d05..7d591c1bad02e 100644 --- a/src/test/compile-fail/issue-23122-1.rs +++ b/src/test/compile-fail/issue-23122-1.rs @@ -17,6 +17,7 @@ struct GetNext { t: T } impl Next for GetNext { //~^ ERROR overflow evaluating the requirement type Next = as Next>::Next; + //~^ ERROR overflow evaluating the requirement } fn main() {} diff --git a/src/test/compile-fail/issue-23122-2.rs b/src/test/compile-fail/issue-23122-2.rs index faaf78f894b05..21d5d5cbd0beb 100644 --- a/src/test/compile-fail/issue-23122-2.rs +++ b/src/test/compile-fail/issue-23122-2.rs @@ -17,6 +17,7 @@ struct GetNext { t: T } impl Next for GetNext { //~^ ERROR overflow evaluating the requirement type Next = as Next>::Next; + //~^ ERROR overflow evaluating the requirement } fn main() {} diff --git a/src/test/ui/cycle-trait-supertrait-indirect.stderr b/src/test/ui/cycle-trait-supertrait-indirect.stderr index 85681b478e21d..ae1d164bc9b93 100644 --- a/src/test/ui/cycle-trait-supertrait-indirect.stderr +++ b/src/test/ui/cycle-trait-supertrait-indirect.stderr @@ -10,11 +10,6 @@ note: ...which requires computing the supertraits of `C`... LL | trait C: B { } | ^^^^^^^^^^ = note: ...which again requires computing the supertraits of `B`, completing the cycle -note: cycle used when computing the supertraits of `A` - --> $DIR/cycle-trait-supertrait-indirect.rs:14:1 - | -LL | trait A: B { - | ^^^^^^^^^^ error: aborting due to previous error diff --git a/src/test/ui/impl-trait/auto-trait-leak.stderr b/src/test/ui/impl-trait/auto-trait-leak.stderr index 3b20451b10215..2eeec7cf6fd49 100644 --- a/src/test/ui/impl-trait/auto-trait-leak.stderr +++ b/src/test/ui/impl-trait/auto-trait-leak.stderr @@ -50,7 +50,6 @@ note: ...which requires processing `cycle1::{{impl-Trait}}`... LL | fn cycle1() -> impl Clone { | ^^^^^^^^^^ = note: ...which again requires processing `cycle1`, completing the cycle -note: cycle used when type-checking all item bodies error: aborting due to 3 previous errors diff --git a/src/test/ui/span/issue-35987.rs b/src/test/ui/span/issue-35987.rs index 19e05f33825f7..5bd516fa46ab7 100644 --- a/src/test/ui/span/issue-35987.rs +++ b/src/test/ui/span/issue-35987.rs @@ -17,6 +17,7 @@ impl Add for Foo { type Output = usize; fn add(self, rhs: Self) -> Self::Output { + //~^ ERROR ambiguous associated type unimplemented!(); } } diff --git a/src/test/ui/span/issue-35987.stderr b/src/test/ui/span/issue-35987.stderr index 1dd45bb1e5efe..b53b514f8a61d 100644 --- a/src/test/ui/span/issue-35987.stderr +++ b/src/test/ui/span/issue-35987.stderr @@ -8,6 +8,15 @@ help: possible better candidate is found in another module, you can import it in LL | use std::ops::Add; | -error: aborting due to previous error +error[E0223]: ambiguous associated type + --> $DIR/issue-35987.rs:19:32 + | +LL | fn add(self, rhs: Self) -> Self::Output { + | ^^^^^^^^^^^^ ambiguous associated type + | + = note: specify the type using the syntax ` as Trait>::Output` + +error: aborting due to 2 previous errors -For more information about this error, try `rustc --explain E0404`. +Some errors occurred: E0223, E0404. +For more information about an error, try `rustc --explain E0223`. From dd92f61180268d28c479bc0089d25c955a10776b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Tue, 17 Apr 2018 17:50:15 +0200 Subject: [PATCH 35/42] Show backtraces and crashes for rustc --- src/bootstrap/bin/rustc.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index b6ae824c37601..5fd54dec525d7 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -38,6 +38,19 @@ use std::str::FromStr; use std::time::Instant; fn main() { + // Show crash dialog + #[cfg(windows)] + { + extern "system" { + fn SetErrorMode(mode: u32) -> u32; + } + const SEM_NOGPFAULTERRORBOX: u32 = 0x0002; + unsafe { + let mode = SetErrorMode(0) & !SEM_NOGPFAULTERRORBOX; + SetErrorMode(mode); + } + } + let mut args = env::args_os().skip(1).collect::>(); // Append metadata suffix for internal crates. See the corresponding entry @@ -100,6 +113,7 @@ fn main() { dylib_path.insert(0, PathBuf::from(&libdir)); let mut cmd = Command::new(rustc); + cmd.env("RUST_BACKTRACE", "1"); cmd.args(&args) .arg("--cfg") .arg(format!("stage{}", stage)) From 3bec83836cd9ee27923f6be0eeab76b70ab72d1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Tue, 17 Apr 2018 20:09:40 +0200 Subject: [PATCH 36/42] Fix optimization_fuel --- src/librustc/session/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 65ee22d3af235..6757283f020d7 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -835,10 +835,10 @@ impl Session { /// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n. /// This expends fuel if applicable, and records fuel if applicable. pub fn consider_optimizing String>(&self, crate_name: &str, msg: T) -> bool { - assert!(self.query_threads() == 1); let mut ret = true; match self.optimization_fuel_crate { Some(ref c) if c == crate_name => { + assert!(self.query_threads() == 1); let fuel = self.optimization_fuel_limit.get(); ret = fuel != 0; if fuel == 0 && !self.out_of_fuel.get() { @@ -852,6 +852,7 @@ impl Session { } match self.print_fuel_crate { Some(ref c) if c == crate_name => { + assert!(self.query_threads() == 1); self.print_fuel.set(self.print_fuel.get() + 1); } _ => {} From d918e342dfb7ed0558b0efcfa2f4af8f2c31ece0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Tue, 17 Apr 2018 21:53:57 +0200 Subject: [PATCH 37/42] Force 8 query threads --- src/librustc/session/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 6757283f020d7..cf0865f27e6a9 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -863,7 +863,7 @@ impl Session { /// Returns the number of query threads that should be used for this /// compilation pub fn query_threads(&self) -> usize { - self.opts.debugging_opts.query_threads.unwrap_or(1)/*8*/ + 8 } /// Returns the number of codegen threads that should be used for this From 7ddc1fb23cb8e9ccf0a132326dda6ff482819fbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Wed, 18 Apr 2018 02:06:03 +0200 Subject: [PATCH 38/42] Revert "Force 8 query threads" --- src/librustc/session/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index cf0865f27e6a9..6757283f020d7 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -863,7 +863,7 @@ impl Session { /// Returns the number of query threads that should be used for this /// compilation pub fn query_threads(&self) -> usize { - 8 + self.opts.debugging_opts.query_threads.unwrap_or(1)/*8*/ } /// Returns the number of codegen threads that should be used for this From 8a1d1de9dcfdd83bd10ec62fd79e7a115af3b272 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Wed, 18 Apr 2018 04:35:40 +0200 Subject: [PATCH 39/42] wip --- src/librustc/ty/maps/config.rs | 2 +- src/librustc/ty/maps/plumbing.rs | 435 +++++++++++++++---------------- 2 files changed, 206 insertions(+), 231 deletions(-) diff --git a/src/librustc/ty/maps/config.rs b/src/librustc/ty/maps/config.rs index 664c84f598660..a0dc7fa52fdff 100644 --- a/src/librustc/ty/maps/config.rs +++ b/src/librustc/ty/maps/config.rs @@ -23,7 +23,7 @@ use syntax_pos::symbol::InternedString; pub trait QueryConfig { type Key: Eq + Hash + Clone; - type Value; + type Value: Clone; } pub(super) trait QueryDescription<'tcx>: QueryConfig { diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 5be571c29e2fc..55a7f8cd259c2 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -15,18 +15,32 @@ use dep_graph::{DepNodeIndex, DepNode, DepKind, DepNodeColor}; use errors::DiagnosticBuilder; use errors::Level; +use errors::Diagnostic; use ty::tls; use ty::{TyCtxt}; use ty::maps::Query; use ty::maps::config::QueryDescription; -use ty::maps::job::{QueryResult, QueryInfo}; +use ty::maps::job::{QueryJob, QueryResult, QueryInfo}; use ty::item_path; +use util::common::{profq_msg, ProfileQueriesMsg}; + use rustc_data_structures::fx::{FxHashMap}; -use rustc_data_structures::sync::LockGuard; +use rustc_data_structures::sync::{Lrc, Lock, LockGuard}; use std::marker::PhantomData; +use std::mem; +use std::ptr; +use std::collections::hash_map::Entry; use syntax_pos::Span; +use std::panic; + +#[cfg(parallel_queries)] +use rayon_core; + +#[cfg(not(parallel_queries))] +use errors::FatalError; + pub(super) struct QueryMap<'tcx, D: QueryDescription<'tcx>> { phantom: PhantomData<(D, &'tcx ())>, pub(super) map: FxHashMap>>, @@ -62,6 +76,157 @@ pub(super) trait GetCacheInternal<'tcx>: QueryDescription<'tcx> + Sized { -> LockGuard<'a, QueryMap<'tcx, Self>>; } +// If enabled, send a message to the profile-queries thread +macro_rules! profq_msg { + ($tcx:expr, $msg:expr) => { + if cfg!(debug_assertions) { + if $tcx.sess.profile_queries() { + profq_msg($tcx.sess, $msg) + } + } + } +} + +// If enabled, format a key using its debug string, which can be +// expensive to compute (in terms of time). +macro_rules! profq_key { + ($tcx:expr, $key:expr) => { + if cfg!(debug_assertions) { + if $tcx.sess.profile_queries_and_keys() { + Some(format!("{:?}", $key)) + } else { None } + } else { None } + } +} + +pub(super) struct JobOwner<'a, 'tcx: 'a, Q: QueryDescription<'tcx> + 'a> { + map: &'a Lock>, + key: Q::Key, + job: Lrc>, +} + +impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { + /// Either get the lock of the query map, allowing us to + /// start executing the query, or it returns with the result of the query. + /// If the query already executed and panicked, this will fatal error / silently panic + pub(super) fn try_get( + tcx: TyCtxt<'a, 'tcx, '_>, + map: &'a Lock>, + span: Span, + query: Query<'tcx>, + key: &Q::Key, + ) -> TryGetJob<'a, 'tcx, Q> { + loop { + let mut lock = map.borrow_mut(); + let job = match lock.map.entry((*key).clone()) { + Entry::Occupied(entry) => { + match *entry.get() { + QueryResult::Started(ref job) => job.clone(), + QueryResult::Complete(ref value) => { + profq_msg!(tcx, ProfileQueriesMsg::CacheHit); + let result = Ok((value.value.clone(), value.index)); + return TryGetJob::JobCompleted(result); + }, + QueryResult::Poisoned => { + #[cfg(not(parallel_queries))] + { + FatalError.raise(); + } + #[cfg(parallel_queries)] + { + panic::resume_unwind(Box::new(rayon_core::PoisonedJob)) + } + }, + } + } + Entry::Vacant(entry) => { + // No job entry for this query. Return a new one to be started later + return tls::with_related_context(tcx, |icx| { + let info = QueryInfo { + span, + query, + }; + let job = Lrc::new(QueryJob::new(info, icx.query.clone())); + let owner = JobOwner { + map, + job: job.clone(), + key: (*key).clone(), + }; + entry.insert(QueryResult::Started(job)); + TryGetJob::NotYetStarted(owner) + }) + } + }; + mem::drop(lock); + + if let Err(cycle) = job.await(tcx, span) { + return TryGetJob::JobCompleted(Err(cycle)); + } + } + } + + pub(super) fn complete(self, result: &Q::Value, dep_node_index: DepNodeIndex) { + // We can move out of `self` here because we `mem::forget` it below + let key = unsafe { ptr::read(&self.key) }; + let job = unsafe { ptr::read(&self.job) }; + let map = self.map; + + // Forget ourself so our destructor won't poison the query + mem::forget(self); + + let value = QueryValue::new(result.clone(), dep_node_index); + map.borrow_mut().map.insert(key, QueryResult::Complete(value)); + + job.signal_complete(); + } + + /// Creates a job for the query and updates the query map indicating that it started. + /// Then it changes ImplicitCtxt to point to the new query job while it executes. + /// If the query panics, this updates the query map to indicate so. + pub(super) fn start<'lcx, F, R>( + &self, + tcx: TyCtxt<'_, 'tcx, 'lcx>, + compute: F) + -> Result<(R, Vec), CycleError<'tcx>> + where + F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'lcx>) -> R + { + // The TyCtxt stored in TLS has the same global interner lifetime + // as `tcx`, so we use `with_related_context` to relate the 'gcx lifetimes + // when accessing the ImplicitCtxt + let r = tls::with_related_context(tcx, move |icx| { + // Update the ImplicitCtxt to point to our new query job + let icx = tls::ImplicitCtxt { + tcx, + query: Some(self.job.clone()), + layout_depth: icx.layout_depth, + task: icx.task, + waiter_cycle: None, + }; + + // Use the ImplicitCtxt while we execute the query + tls::enter_context(&icx, |icx| { + compute(icx.tcx) + }) + }); + + // Extract the diagnostic from the job + let diagnostics = mem::replace(&mut *self.job.diagnostics.lock(), Vec::new()); + + Ok((r, diagnostics)) + } +} + +impl<'a, 'tcx, Q: QueryDescription<'tcx>> Drop for JobOwner<'a, 'tcx, Q> { + fn drop(&mut self) { + // Poison the query so jobs waiting on it panic + self.map.borrow_mut().map.insert(self.key.clone(), QueryResult::Poisoned); + // Also signal the completion of the job, so waiters + // will continue execution + self.job.signal_complete(); + } +} + #[derive(Clone)] pub struct CycleError<'tcx> { /// The query and related span which uses the cycle @@ -70,14 +235,14 @@ pub struct CycleError<'tcx> { } /// The result of `try_get_lock` -pub(super) enum TryGetLock<'a, 'tcx: 'a, T, D: QueryDescription<'tcx> + 'a> { +pub(super) enum TryGetJob<'a, 'tcx: 'a, D: QueryDescription<'tcx> + 'a> { /// The query is not yet started. Contains a guard to the map eventually used to start it. - NotYetStarted(LockGuard<'a, QueryMap<'tcx, D>>), + NotYetStarted(JobOwner<'a, 'tcx, D>), /// The query was already completed. /// Returns the result of the query and its dep node index /// if it succeeded or a cycle error if it failed - JobCompleted(Result<(T, DepNodeIndex), CycleError<'tcx>>), + JobCompleted(Result<(D::Value, DepNodeIndex), CycleError<'tcx>>), } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { @@ -182,29 +347,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } -// If enabled, send a message to the profile-queries thread -macro_rules! profq_msg { - ($tcx:expr, $msg:expr) => { - if cfg!(debug_assertions) { - if $tcx.sess.profile_queries() { - profq_msg($tcx.sess, $msg) - } - } - } -} - -// If enabled, format a key using its debug string, which can be -// expensive to compute (in terms of time). -macro_rules! profq_key { - ($tcx:expr, $key:expr) => { - if cfg!(debug_assertions) { - if $tcx.sess.profile_queries_and_keys() { - Some(format!("{:?}", $key)) - } else { None } - } else { None } - } -} - macro_rules! handle_cycle_error { ([][$this: expr]) => {{ Value::from_cycle_error($this.global_tcx()) @@ -225,13 +367,7 @@ macro_rules! define_maps { use dep_graph::DepNodeIndex; use std::mem; - use errors::Diagnostic; - #[cfg(not(parallel_queries))] - use errors::FatalError; use rustc_data_structures::sync::{Lock, LockGuard}; - use rustc_data_structures::OnDrop; - use std::panic; - use rayon_core; use { rustc_data_structures::stable_hasher::HashStable, rustc_data_structures::stable_hasher::StableHasherResult, @@ -357,52 +493,6 @@ macro_rules! define_maps { DepNode::new(tcx, $node(*key)) } - /// Either get the lock of the query map, allowing us to - /// start executing the query, or it returns with the result of the query. - /// If the query already executed and panicked, this will fatal error / silently panic - fn try_get_lock( - tcx: TyCtxt<'a, $tcx, 'lcx>, - span: Span, - key: &$K - ) -> TryGetLock<'a, $tcx, $V, Self> - { - loop { - let lock = tcx.maps.$name.borrow_mut(); - let job = if let Some(value) = lock.map.get(key) { - match *value { - QueryResult::Started(ref job) => Some(job.clone()), - QueryResult::Complete(ref value) => { - profq_msg!(tcx, ProfileQueriesMsg::CacheHit); - let result = Ok(((&value.value).clone(), value.index)); - return TryGetLock::JobCompleted(result); - }, - QueryResult::Poisoned => { - #[cfg(not(parallel_queries))] - { - FatalError.raise(); - } - #[cfg(parallel_queries)] - { - panic::resume_unwind(Box::new(rayon_core::PoisonedJob)) - } - }, - } - } else { - None - }; - let job = if let Some(job) = job { - job - } else { - return TryGetLock::NotYetStarted(lock); - }; - mem::drop(lock); - - if let Err(cycle) = job.await(tcx, span) { - return TryGetLock::JobCompleted(Err(cycle)); - } - } - } - fn try_get_with(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) @@ -420,29 +510,22 @@ macro_rules! define_maps { ) ); - /// Get the lock used to start the query or - /// return the result of the completed query - macro_rules! get_lock_or_return { - () => {{ - match Self::try_get_lock(tcx, span, &key) { - TryGetLock::NotYetStarted(lock) => lock, - TryGetLock::JobCompleted(result) => { - return result.map(|(v, index)| { - tcx.dep_graph.read_index(index); - v - }) - } - } - }} - } - - let mut lock = get_lock_or_return!(); + let query = Query::$name(Clone::clone(&key)); + let job = match JobOwner::try_get(tcx, &tcx.maps.$name, span, query, &key) { + TryGetJob::NotYetStarted(job) => job, + TryGetJob::JobCompleted(result) => { + return result.map(|(v, index)| { + tcx.dep_graph.read_index(index); + v + }) + } + }; // Fast path for when incr. comp. is off. `to_dep_node` is // expensive for some DepKinds. if !tcx.dep_graph.is_fully_enabled() { let null_dep_node = DepNode::new_no_params(::dep_graph::DepKind::Null); - return Self::force_with_lock(tcx, key, span, lock, null_dep_node) + return Self::force_with_job(tcx, key, job, null_dep_node) .map(|(v, _)| v); } @@ -451,48 +534,37 @@ macro_rules! define_maps { if dep_node.kind.is_anon() { profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin); - let res = Self::start_job(tcx, span, key, lock, |tcx| { + let res = job.start(tcx, |tcx| { tcx.dep_graph.with_anon_task(dep_node.kind, || { Self::compute_result(tcx.global_tcx(), key) }) })?; profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd); - let (((result, dep_node_index), diagnostics), job) = res; + let ((result, dep_node_index), diagnostics) = res; tcx.dep_graph.read_index(dep_node_index); tcx.on_disk_query_result_cache .store_diagnostics_for_anon_node(dep_node_index, diagnostics); - let value = QueryValue::new(Clone::clone(&result), dep_node_index); - - tcx.maps - .$name - .borrow_mut() - .map - .insert(key, QueryResult::Complete(value)); - - job.signal_complete(); + job.complete(&result, dep_node_index); return Ok(result); } if !dep_node.kind.is_input() { - // try_mark_green_and_read may force queries. So we must drop our lock here - mem::drop(lock); if let Some(dep_node_index) = tcx.try_mark_green_and_read(&dep_node) { profq_msg!(tcx, ProfileQueriesMsg::CacheHit); return Self::load_from_disk_and_cache_in_memory(tcx, key, - span, + job, dep_node_index, &dep_node) } - lock = get_lock_or_return!(); } - match Self::force_with_lock(tcx, key, span, lock, dep_node) { + match Self::force_with_job(tcx, key, job, dep_node) { Ok((result, dep_node_index)) => { tcx.dep_graph.read_index(dep_node_index); Ok(result) @@ -525,76 +597,6 @@ macro_rules! define_maps { } } - /// Creates a job for the query and updates the query map indicating that it started. - /// Then it changes ImplicitCtxt to point to the new query job while it executes. - /// If the query panics, this updates the query map to indicate so. - fn start_job(tcx: TyCtxt<'_, $tcx, 'lcx>, - span: Span, - key: $K, - mut map: LockGuard<'_, QueryMap<$tcx, Self>>, - compute: F) - -> Result<((R, Vec), Lrc>), CycleError<$tcx>> - where F: for<'b> FnOnce(TyCtxt<'b, $tcx, 'lcx>) -> R - { - let query = Query::$name(Clone::clone(&key)); - - let entry = QueryInfo { - span, - query, - }; - - // The TyCtxt stored in TLS has the same global interner lifetime - // as `tcx`, so we use `with_related_context` to relate the 'gcx lifetimes - // when accessing the ImplicitCtxt - let (r, job) = ty::tls::with_related_context(tcx, move |icx| { - let job = Lrc::new(QueryJob::new(entry, icx.query.clone())); - - // Store the job in the query map and drop the lock to allow - // others to wait it - map.map.entry(key).or_insert(QueryResult::Started(job.clone())); - mem::drop(map); - - let r = { - let on_drop = OnDrop(|| { - // Poison the query so jobs waiting on it panic - tcx.maps - .$name - .borrow_mut() - .map - .insert(key, QueryResult::Poisoned); - // Also signal the completion of the job, so waiters - // will continue execution - job.signal_complete(); - }); - - // Update the ImplicitCtxt to point to our new query job - let icx = ty::tls::ImplicitCtxt { - tcx, - query: Some(job.clone()), - layout_depth: icx.layout_depth, - task: icx.task, - waiter_cycle: None, - }; - - // Use the ImplicitCtxt while we execute the query - let r = ty::tls::enter_context(&icx, |icx| { - compute(icx.tcx) - }); - - mem::forget(on_drop); - - r - }; - - (r, job) - }); - - // Extract the diagnostic from the job - let diagnostics: Vec<_> = mem::replace(&mut *job.diagnostics.lock(), Vec::new()); - - Ok(((r, diagnostics), job)) - } - fn compute_result(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K) -> $V { let provider = tcx.maps.providers[key.map_crate()].$name; provider(tcx.global_tcx(), key) @@ -602,7 +604,7 @@ macro_rules! define_maps { fn load_from_disk_and_cache_in_memory(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K, - span: Span, + job: JobOwner<'a, $tcx, Self>, dep_node_index: DepNodeIndex, dep_node: &DepNode) -> Result<$V, CycleError<$tcx>> @@ -632,8 +634,8 @@ macro_rules! define_maps { None }; - let (result, job) = if let Some(result) = result { - (result, None) + let result = if let Some(result) = result { + result } else { // We could not load a result from the on-disk cache, so // recompute. @@ -641,18 +643,14 @@ macro_rules! define_maps { // The diagnostics for this query have already been // promoted to the current session during // try_mark_green(), so we can ignore them here. - let ((result, _), job) = Self::start_job(tcx, - span, - key, - tcx.maps.$name.borrow_mut(), - |tcx| { + let (result, _) = job.start(tcx, |tcx| { // The dep-graph for this computation is already in // place tcx.dep_graph.with_ignore(|| { Self::compute_result(tcx, key) }) })?; - (result, Some(job)) + result }; // If -Zincremental-verify-ich is specified, re-hash results from @@ -685,15 +683,7 @@ macro_rules! define_maps { tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true); } - let value = QueryValue::new(Clone::clone(&result), dep_node_index); - - tcx.maps - .$name - .borrow_mut() - .map - .insert(key, QueryResult::Complete(value)); - - job.map(|j| j.signal_complete()); + job.complete(&result, dep_node_index); Ok(result) } @@ -706,21 +696,20 @@ macro_rules! define_maps { -> Result<($V, DepNodeIndex), CycleError<$tcx>> { // We may be concurrently trying both execute and force a query // Ensure that only one of them runs the query - let lock = match Self::try_get_lock(tcx, span, &key) { - TryGetLock::NotYetStarted(lock) => lock, - TryGetLock::JobCompleted(result) => return result, + let query = Query::$name(Clone::clone(&key)); + let job = match JobOwner::try_get(tcx, &tcx.maps.$name, span, query, &key) { + TryGetJob::NotYetStarted(job) => job, + TryGetJob::JobCompleted(result) => return result, }; - Self::force_with_lock(tcx, - key, - span, - lock, - dep_node) + Self::force_with_job(tcx, + key, + job, + dep_node) } - fn force_with_lock(tcx: TyCtxt<'a, $tcx, 'lcx>, + fn force_with_job(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K, - span: Span, - map: LockGuard<'_, QueryMap<$tcx, Self>>, + job: JobOwner<'_, $tcx, Self>, dep_node: DepNode) -> Result<($V, DepNodeIndex), CycleError<$tcx>> { // If the following assertion triggers, it can have two reasons: @@ -735,11 +724,7 @@ macro_rules! define_maps { key, dep_node); profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin); - let res = Self::start_job(tcx, - span, - key, - map, - |tcx| { + let res = job.start(tcx, |tcx| { if dep_node.kind.is_eval_always() { tcx.dep_graph.with_eval_always_task(dep_node, tcx, @@ -754,7 +739,7 @@ macro_rules! define_maps { })?; profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd); - let (((result, dep_node_index), diagnostics), job) = res; + let ((result, dep_node_index), diagnostics) = res; if tcx.sess.opts.debugging_opts.query_dep_graph { tcx.dep_graph.mark_loaded_from_cache(dep_node_index, false); @@ -765,17 +750,7 @@ macro_rules! define_maps { .store_diagnostics(dep_node_index, diagnostics); } - let value = QueryValue::new(Clone::clone(&result), dep_node_index); - - tcx.maps - .$name - .borrow_mut() - .map - .insert(key, QueryResult::Complete(value)); - - let job: Lrc = job; - - job.signal_complete(); + job.complete(&result, dep_node_index); Ok((result, dep_node_index)) } From 1336fb0d16f496512610aa33524ac8b30e2ecb82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Wed, 18 Apr 2018 22:00:39 +0200 Subject: [PATCH 40/42] wip --- src/librustc/ty/maps/plumbing.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 55a7f8cd259c2..5c0f97c5194f2 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -99,6 +99,8 @@ macro_rules! profq_key { } } +/// A type representing the responsibility to execute the job in the `job` field. +/// This will poison the relevant query if dropped. pub(super) struct JobOwner<'a, 'tcx: 'a, Q: QueryDescription<'tcx> + 'a> { map: &'a Lock>, key: Q::Key, @@ -165,6 +167,8 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { } } + /// Completes the query by updating the query map with the `result`, + /// signals the waiter and forgets the JobOwner, so it won't poison the query pub(super) fn complete(self, result: &Q::Value, dep_node_index: DepNodeIndex) { // We can move out of `self` here because we `mem::forget` it below let key = unsafe { ptr::read(&self.key) }; @@ -180,14 +184,14 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { job.signal_complete(); } - /// Creates a job for the query and updates the query map indicating that it started. - /// Then it changes ImplicitCtxt to point to the new query job while it executes. - /// If the query panics, this updates the query map to indicate so. + /// Executes a job by changing the ImplicitCtxt to point to the + /// new query job while it executes. It returns the diagnostics + /// captured during execution and the actual result. pub(super) fn start<'lcx, F, R>( &self, tcx: TyCtxt<'_, 'tcx, 'lcx>, compute: F) - -> Result<(R, Vec), CycleError<'tcx>> + -> (R, Vec) where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'lcx>) -> R { @@ -538,7 +542,7 @@ macro_rules! define_maps { tcx.dep_graph.with_anon_task(dep_node.kind, || { Self::compute_result(tcx.global_tcx(), key) }) - })?; + }); profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd); let ((result, dep_node_index), diagnostics) = res; @@ -649,7 +653,7 @@ macro_rules! define_maps { tcx.dep_graph.with_ignore(|| { Self::compute_result(tcx, key) }) - })?; + }); result }; @@ -736,7 +740,7 @@ macro_rules! define_maps { key, Self::compute_result) } - })?; + }); profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd); let ((result, dep_node_index), diagnostics) = res; From 64895eef5bd8784409728240866fb81d34837322 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 19 Apr 2018 19:22:47 +0200 Subject: [PATCH 41/42] Disable tests --- src/bootstrap/builder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 367e2038f4fdc..9bf3eb9d13aa0 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -311,7 +311,7 @@ impl<'a> Builder<'a> { tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, native::Llvm, tool::Rustfmt, tool::Miri, native::Lld), Kind::Check => describe!(check::Std, check::Test, check::Rustc, check::CodegenBackend), - Kind::Test => describe!(test::Tidy, test::Bootstrap/**/, test::Ui, test::RunPass, + Kind::Test => describe!(test::Tidy, test::Bootstrap/*, test::Ui, test::RunPass, test::CompileFail, test::ParseFail, test::RunFail, test::RunPassValgrind, test::MirOpt, test::Codegen, test::CodegenUnits, test::Incremental, test::Debuginfo, test::UiFullDeps, test::RunPassFullDeps, test::RunFailFullDeps, @@ -326,7 +326,7 @@ impl<'a> Builder<'a> { test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme, test::RustdocUi, // Run run-make last, since these won't pass without make on Windows - test::RunMake/**/), + test::RunMake*/), Kind::Bench => describe!(test::Crate, test::CrateLibrustc), Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, doc::Standalone, doc::Std, doc::Test, doc::WhitelistedRustc, doc::Rustc, From fc85d9b268666e078c2b0f83ea4a58cdd049f3e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Thu, 19 Apr 2018 19:42:15 +0200 Subject: [PATCH 42/42] fix --- src/librustc/ty/maps/plumbing.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 5c0f97c5194f2..5c9f66bb4ec77 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -217,7 +217,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { // Extract the diagnostic from the job let diagnostics = mem::replace(&mut *self.job.diagnostics.lock(), Vec::new()); - Ok((r, diagnostics)) + (r, diagnostics) } }