diff --git a/src/Cargo.lock b/src/Cargo.lock index 0df5afe0b165c..7a629727442da 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -67,6 +67,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "arena" version = "0.0.0" +dependencies = [ + "rustc_data_structures 0.0.0", +] [[package]] name = "atty" @@ -444,7 +447,7 @@ dependencies = [ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -480,7 +483,7 @@ dependencies = [ "curl-sys 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", "socket2 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -493,7 +496,7 @@ dependencies = [ "cc 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -731,7 +734,7 @@ dependencies = [ "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -956,6 +959,11 @@ name = "lazy_static" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "lazy_static" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "lazycell" version = "0.5.1" @@ -984,7 +992,7 @@ dependencies = [ "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -996,7 +1004,7 @@ dependencies = [ "cmake 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1227,14 +1235,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl" -version = "0.9.22" +version = "0.9.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1244,7 +1252,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.22" +version = "0.9.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1263,6 +1271,14 @@ dependencies = [ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "owning_ref" +version = "0.3.3" +source = "git+https://github.com/Zoxc/owning-ref-rs.git#88a3b66552608ccb0348657f7a56218c72e15a6e" +dependencies = [ + "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "owning_ref" version = "0.3.3" @@ -1289,6 +1305,27 @@ dependencies = [ "unwind 0.0.0", ] +[[package]] +name = "parking_lot" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "parking_lot_core" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", + "smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "percent-encoding" version = "1.0.1" @@ -1347,6 +1384,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "proc_macro" version = "0.0.0" dependencies = [ + "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -1604,11 +1642,12 @@ dependencies = [ "graphviz 0.0.0", "jobserver 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)", "rustc_back 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", + "scoped-tls 0.1.0 (git+https://github.com/Zoxc/scoped-tls.git)", "serialize 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -1687,6 +1726,7 @@ dependencies = [ "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", + "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", "rustc_mir 0.0.0", "syntax 0.0.0", @@ -1727,7 +1767,10 @@ dependencies = [ name = "rustc_data_structures" version = "0.0.0" dependencies = [ + "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)", + "parking_lot 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "serialize 0.0.0", ] @@ -1740,7 +1783,7 @@ dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)", "rustc 0.0.0", "rustc_allocator 0.0.0", "rustc_back 0.0.0", @@ -1827,7 +1870,7 @@ version = "0.0.0" dependencies = [ "flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)", "proc_macro 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", @@ -1875,6 +1918,7 @@ dependencies = [ "rustc 0.0.0", "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -1900,6 +1944,7 @@ name = "rustc_privacy" version = "0.0.0" dependencies = [ "rustc 0.0.0", + "rustc_data_structures 0.0.0", "rustc_typeck 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -1943,7 +1988,7 @@ dependencies = [ "jobserver 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)", "rustc 0.0.0", "rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_allocator 0.0.0", @@ -1970,7 +2015,7 @@ dependencies = [ "ar 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_data_structures 0.0.0", @@ -2055,6 +2100,11 @@ dependencies = [ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "scoped-tls" +version = "0.1.0" +source = "git+https://github.com/Zoxc/scoped-tls.git#324e723273d0304abd421032fc96044d72c4d34a" + [[package]] name = "scoped-tls" version = "0.1.0" @@ -2179,6 +2229,11 @@ name = "smallvec" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "smallvec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "socket2" version = "0.2.4" @@ -2296,6 +2351,7 @@ dependencies = [ "rustc_cratesio_shim 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", + "scoped-tls 0.1.0 (git+https://github.com/Zoxc/scoped-tls.git)", "serialize 0.0.0", "syntax_pos 0.0.0", ] @@ -2306,6 +2362,7 @@ version = "0.0.0" dependencies = [ "fmt_macros 0.0.0", "proc_macro 0.0.0", + "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -2316,6 +2373,7 @@ name = "syntax_pos" version = "0.0.0" dependencies = [ "rustc_data_structures 0.0.0", + "scoped-tls 0.1.0 (git+https://github.com/Zoxc/scoped-tls.git)", "serialize 0.0.0", "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2743,6 +2801,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum kuchiki 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e03098e8e719c92b7794515dfd5c1724e2b12f5ce1788e61cfa4663f82eba8d8" "checksum languageserver-types 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "773e175c945800aeea4c21c04090bcb9db987b1a566ad9c6f569972299950e3e" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" +"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d" "checksum lazycell 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b585b7a6811fb03aa10e74b278a0f00f8dd9b45dc681f148bb29fa5cb61859b" "checksum libc 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)" = "36fbc8a8929c632868295d0178dd8f63fc423fd7537ad0738372bd010b3ac9b0" "checksum libgit2-sys 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)" = "6f74b4959cef96898f5123148724fc7dee043b9a6b99f219d948851bfbe53cb2" @@ -2770,11 +2829,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "cacfcab5eb48250ee7d0c7896b51a2c5eec99c1feea5f32025635f5ae4b00070" "checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" "checksum open 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c281318d992e4432cfa799969467003d05921582a7489a8325e37f8a450d5113" -"checksum openssl 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)" = "419ef26bb651d72b6c5a603bcc4e4856a362460e62352dfffa53de91d2e81181" +"checksum openssl 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)" = "169a4b9160baf9b9b1ab975418c673686638995ba921683a7f1e01470dcb8854" "checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf" -"checksum openssl-sys 0.9.22 (registry+https://github.com/rust-lang/crates.io-index)" = "5483bdc56756041ba6aa37c9cb59cc2219f012a2a1377d97ad35556ac6676ee7" +"checksum openssl-sys 0.9.23 (registry+https://github.com/rust-lang/crates.io-index)" = "2200ffec628e3f14c39fc0131a301db214f1a7d584e36507ee8700b0c7fb7a46" "checksum os_pipe 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "998bfbb3042e715190fe2a41abfa047d7e8cb81374d2977d7f100eacd8619cb1" +"checksum owning_ref 0.3.3 (git+https://github.com/Zoxc/owning-ref-rs.git)" = "" "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" +"checksum parking_lot 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3e7f7c9857874e54afeb950eebeae662b1e51a2493666d2ea4c0a5d91dcf0412" +"checksum parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6c677d78851950b3aec390e681a411f78cc250cba277d4f578758a377f727970" "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8" "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" @@ -2808,6 +2870,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7" +"checksum scoped-tls 0.1.0 (git+https://github.com/Zoxc/scoped-tls.git)" = "" "checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d" "checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57" "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" @@ -2824,6 +2887,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8" "checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537" "checksum smallvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4f8266519bc1d17d0b5b16f6c21295625d562841c708f6376f49028a43e9c11e" +"checksum smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44db0ecb22921ef790d17ae13a3f6d15784183ff5f2a01aa32098c7498d2b4b9" "checksum socket2 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "36b4896961171cd3317c7e9603d88f379f8c6e45342212235d356496680c68fd" "checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b" "checksum string_cache 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "413fc7852aeeb5472f1986ef755f561ddf0c789d3d796e65f0b6fe293ecd4ef8" diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index 631c9f72f3500..e1bcc15ca4feb 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -262,6 +262,10 @@ fn main() { } } + if env::var_os("RUSTC_PARALLEL_QUERIES").is_some() { + cmd.arg("--cfg").arg("parallel_queries"); + } + let color = match env::var("RUSTC_COLOR") { Ok(s) => usize::from_str(&s).expect("RUSTC_COLOR should be an integer"), Err(_) => 0, diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index db013691bb1b8..4c76230ced8bc 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -561,6 +561,9 @@ pub fn rustc_cargo(build: &Build, if let Some(ref s) = build.config.rustc_default_linker { cargo.env("CFG_DEFAULT_LINKER", s); } + if build.config.rustc_parallel_queries { + cargo.env("RUSTC_PARALLEL_QUERIES", "1"); + } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 9dd37d8e4560c..87b1db33a7a85 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -87,6 +87,7 @@ pub struct Config { pub rust_debuginfo_lines: bool, pub rust_debuginfo_only_std: bool, pub rust_rpath: bool, + pub rustc_parallel_queries: bool, pub rustc_default_linker: Option, pub rust_optimize_tests: bool, pub rust_debuginfo_tests: bool, @@ -266,6 +267,7 @@ struct Rust { debuginfo: Option, debuginfo_lines: Option, debuginfo_only_std: Option, + experimental_parallel_queries: Option, debug_jemalloc: Option, use_jemalloc: Option, backtrace: Option, @@ -474,6 +476,7 @@ impl Config { set(&mut config.rust_dist_src, rust.dist_src); set(&mut config.quiet_tests, rust.quiet_tests); set(&mut config.test_miri, rust.test_miri); + config.rustc_parallel_queries = rust.experimental_parallel_queries.unwrap_or(false); config.rustc_default_linker = rust.default_linker.clone(); config.musl_root = rust.musl_root.clone().map(PathBuf::from); config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from); diff --git a/src/libarena/Cargo.toml b/src/libarena/Cargo.toml index b53c0a2f48bf7..e2af67dd92861 100644 --- a/src/libarena/Cargo.toml +++ b/src/libarena/Cargo.toml @@ -7,3 +7,6 @@ version = "0.0.0" name = "arena" path = "lib.rs" crate-type = ["dylib"] + +[dependencies] +rustc_data_structures = { path = "../librustc_data_structures" } \ No newline at end of file diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 2be7b1bc2e17c..6c1a8caf5b049 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -33,6 +33,9 @@ #![allow(deprecated)] extern crate alloc; +extern crate rustc_data_structures; + +use rustc_data_structures::sync::MTLock; use std::cell::{Cell, RefCell}; use std::cmp; @@ -46,6 +49,10 @@ use alloc::raw_vec::RawVec; /// An arena that can hold objects of only one type. pub struct TypedArena { + lock: MTLock>, +} + +struct TypedArenaInner { /// A pointer to the next object to be allocated. ptr: Cell<*mut T>, @@ -109,38 +116,102 @@ impl TypedArenaChunk { const PAGE: usize = 4096; +impl TypedArenaInner { + /// Grows the arena. + #[inline(never)] + #[cold] + fn grow(&self, n: usize) { + unsafe { + let mut chunks = self.chunks.borrow_mut(); + let (chunk, mut new_capacity); + if let Some(last_chunk) = chunks.last_mut() { + let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; + let currently_used_cap = used_bytes / mem::size_of::(); + if last_chunk.storage.reserve_in_place(currently_used_cap, n) { + self.end.set(last_chunk.end()); + return; + } else { + new_capacity = last_chunk.storage.cap(); + loop { + new_capacity = new_capacity.checked_mul(2).unwrap(); + if new_capacity >= currently_used_cap + n { + break; + } + } + } + } else { + let elem_size = cmp::max(1, mem::size_of::()); + new_capacity = cmp::max(n, PAGE / elem_size); + } + chunk = TypedArenaChunk::::new(new_capacity); + self.ptr.set(chunk.start()); + self.end.set(chunk.end()); + chunks.push(chunk); + } + } + + // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other + // chunks. + fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk) { + // Determine how much was filled. + let start = last_chunk.start() as usize; + // We obtain the value of the pointer to the first uninitialized element. + let end = self.ptr.get() as usize; + // We then calculate the number of elements to be dropped in the last chunk, + // which is the filled area's length. + let diff = if mem::size_of::() == 0 { + // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get + // the number of zero-sized values in the last and only chunk, just out of caution. + // Recall that `end` was incremented for each allocated value. + end - start + } else { + (end - start) / mem::size_of::() + }; + // Pass that to the `destroy` method. + unsafe { + last_chunk.destroy(diff); + } + // Reset the chunk. + self.ptr.set(last_chunk.start()); + } +} + impl TypedArena { /// Creates a new `TypedArena`. #[inline] pub fn new() -> TypedArena { TypedArena { - // We set both `ptr` and `end` to 0 so that the first call to - // alloc() will trigger a grow(). - ptr: Cell::new(0 as *mut T), - end: Cell::new(0 as *mut T), - chunks: RefCell::new(vec![]), - _own: PhantomData, + lock: MTLock::new(TypedArenaInner { + // We set both `ptr` and `end` to 0 so that the first call to + // alloc() will trigger a grow(). + ptr: Cell::new(0 as *mut T), + end: Cell::new(0 as *mut T), + chunks: RefCell::new(vec![]), + _own: PhantomData, + }) } } /// Allocates an object in the `TypedArena`, returning a reference to it. #[inline] pub fn alloc(&self, object: T) -> &mut T { - if self.ptr == self.end { - self.grow(1) + let this = self.lock.lock(); + + if this.ptr == this.end { + this.grow(1) } unsafe { if mem::size_of::() == 0 { - self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T); + this.ptr.set(intrinsics::arith_offset(this.ptr.get() as *mut u8, 1) as *mut T); let ptr = mem::align_of::() as *mut T; // Don't drop the object. This `write` is equivalent to `forget`. ptr::write(ptr, object); &mut *ptr } else { - let ptr = self.ptr.get(); + let ptr = this.ptr.get(); // Advance the pointer. - self.ptr.set(self.ptr.get().offset(1)); + this.ptr.set(this.ptr.get().offset(1)); // Write into uninitialized memory. ptr::write(ptr, object); &mut *ptr @@ -160,61 +231,32 @@ impl TypedArena { assert!(mem::size_of::() != 0); assert!(slice.len() != 0); - let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize; + let this = self.lock.lock(); + + let available_capacity_bytes = this.end.get() as usize - this.ptr.get() as usize; let at_least_bytes = slice.len() * mem::size_of::(); if available_capacity_bytes < at_least_bytes { - self.grow(slice.len()); + this.grow(slice.len()); } unsafe { - let start_ptr = self.ptr.get(); + let start_ptr = this.ptr.get(); let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len()); - self.ptr.set(start_ptr.offset(arena_slice.len() as isize)); + this.ptr.set(start_ptr.offset(arena_slice.len() as isize)); arena_slice.copy_from_slice(slice); arena_slice } } - /// Grows the arena. - #[inline(never)] - #[cold] - fn grow(&self, n: usize) { - unsafe { - let mut chunks = self.chunks.borrow_mut(); - let (chunk, mut new_capacity); - if let Some(last_chunk) = chunks.last_mut() { - let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; - let currently_used_cap = used_bytes / mem::size_of::(); - if last_chunk.storage.reserve_in_place(currently_used_cap, n) { - self.end.set(last_chunk.end()); - return; - } else { - new_capacity = last_chunk.storage.cap(); - loop { - new_capacity = new_capacity.checked_mul(2).unwrap(); - if new_capacity >= currently_used_cap + n { - break; - } - } - } - } else { - let elem_size = cmp::max(1, mem::size_of::()); - new_capacity = cmp::max(n, PAGE / elem_size); - } - chunk = TypedArenaChunk::::new(new_capacity); - self.ptr.set(chunk.start()); - self.end.set(chunk.end()); - chunks.push(chunk); - } - } - /// Clears the arena. Deallocates all but the longest chunk which may be reused. pub fn clear(&mut self) { + let this = self.lock.lock(); + unsafe { // Clear the last chunk, which is partially filled. - let mut chunks_borrow = self.chunks.borrow_mut(); + let mut chunks_borrow = this.chunks.borrow_mut(); if let Some(mut last_chunk) = chunks_borrow.pop() { - self.clear_last_chunk(&mut last_chunk); + this.clear_last_chunk(&mut last_chunk); // If `T` is ZST, code below has no effect. for mut chunk in chunks_borrow.drain(..) { let cap = chunk.storage.cap(); @@ -224,41 +266,18 @@ impl TypedArena { } } } - - // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other - // chunks. - fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk) { - // Determine how much was filled. - let start = last_chunk.start() as usize; - // We obtain the value of the pointer to the first uninitialized element. - let end = self.ptr.get() as usize; - // We then calculate the number of elements to be dropped in the last chunk, - // which is the filled area's length. - let diff = if mem::size_of::() == 0 { - // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get - // the number of zero-sized values in the last and only chunk, just out of caution. - // Recall that `end` was incremented for each allocated value. - end - start - } else { - (end - start) / mem::size_of::() - }; - // Pass that to the `destroy` method. - unsafe { - last_chunk.destroy(diff); - } - // Reset the chunk. - self.ptr.set(last_chunk.start()); - } } unsafe impl<#[may_dangle] T> Drop for TypedArena { fn drop(&mut self) { + let this = self.lock.get_mut(); + unsafe { // Determine how much was filled. - let mut chunks_borrow = self.chunks.borrow_mut(); + let mut chunks_borrow = this.chunks.borrow_mut(); if let Some(mut last_chunk) = chunks_borrow.pop() { // Drop the contents of the last chunk. - self.clear_last_chunk(&mut last_chunk); + this.clear_last_chunk(&mut last_chunk); // The last chunk will be dropped. Destroy all other chunks. for chunk in chunks_borrow.iter_mut() { let cap = chunk.storage.cap(); @@ -270,9 +289,13 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena { } } -unsafe impl Send for TypedArena {} +unsafe impl Send for TypedArenaInner {} pub struct DroplessArena { + lock: MTLock, +} + +struct DroplessArenaInner { /// A pointer to the next object to be allocated. ptr: Cell<*mut u8>, @@ -284,26 +307,9 @@ pub struct DroplessArena { chunks: RefCell>>, } -impl DroplessArena { - pub fn new() -> DroplessArena { - DroplessArena { - ptr: Cell::new(0 as *mut u8), - end: Cell::new(0 as *mut u8), - chunks: RefCell::new(vec![]), - } - } - - pub fn in_arena(&self, ptr: *const T) -> bool { - let ptr = ptr as *const u8 as *mut u8; - for chunk in &*self.chunks.borrow() { - if chunk.start() <= ptr && ptr < chunk.end() { - return true; - } - } - - false - } +unsafe impl Send for DroplessArenaInner {} +impl DroplessArenaInner { fn align_for(&self) { let align = mem::align_of::(); let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1); @@ -341,6 +347,31 @@ impl DroplessArena { chunks.push(chunk); } } +} + +impl DroplessArena { + pub fn new() -> DroplessArena { + DroplessArena { + lock: MTLock::new(DroplessArenaInner { + ptr: Cell::new(0 as *mut u8), + end: Cell::new(0 as *mut u8), + chunks: RefCell::new(vec![]), + }) + } + } + + pub fn in_arena(&self, ptr: *const T) -> bool { + let this = self.lock.lock(); + + let ptr = ptr as *const u8 as *mut u8; + for chunk in &*this.chunks.borrow() { + if chunk.start() <= ptr && ptr < chunk.end() { + return true; + } + } + + false + } #[inline] pub fn alloc(&self, object: T) -> &mut T { @@ -348,16 +379,18 @@ impl DroplessArena { assert!(!mem::needs_drop::()); assert!(mem::size_of::() != 0); - self.align_for::(); - let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::() as isize); - if (future_end as *mut u8) >= self.end.get() { - self.grow::(1) + let this = self.lock.lock(); + + this.align_for::(); + let future_end = intrinsics::arith_offset(this.ptr.get(), mem::size_of::() as isize); + if (future_end as *mut u8) >= this.end.get() { + this.grow::(1) } - let ptr = self.ptr.get(); + let ptr = this.ptr.get(); // Set the pointer past ourselves - self.ptr.set(intrinsics::arith_offset( - self.ptr.get(), mem::size_of::() as isize + this.ptr.set(intrinsics::arith_offset( + this.ptr.get(), mem::size_of::() as isize ) as *mut u8); // Write into uninitialized memory. ptr::write(ptr as *mut T, object); @@ -377,19 +410,22 @@ impl DroplessArena { assert!(!mem::needs_drop::()); assert!(mem::size_of::() != 0); assert!(slice.len() != 0); - self.align_for::(); + + let this = self.lock.lock(); + + this.align_for::(); let future_end = unsafe { - intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::()) as isize) + intrinsics::arith_offset(this.ptr.get(), (slice.len() * mem::size_of::()) as isize) }; - if (future_end as *mut u8) >= self.end.get() { - self.grow::(slice.len()); + if (future_end as *mut u8) >= this.end.get() { + this.grow::(slice.len()); } unsafe { - let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len()); - self.ptr.set(intrinsics::arith_offset( - self.ptr.get(), (slice.len() * mem::size_of::()) as isize + let arena_slice = slice::from_raw_parts_mut(this.ptr.get() as *mut T, slice.len()); + this.ptr.set(intrinsics::arith_offset( + this.ptr.get(), (slice.len() * mem::size_of::()) as isize ) as *mut u8); arena_slice.copy_from_slice(slice); arena_slice @@ -415,7 +451,8 @@ mod tests { #[test] pub fn test_unused() { let arena: TypedArena = TypedArena::new(); - assert!(arena.chunks.borrow().is_empty()); + let lock = arena.lock.lock(); + assert!(lock.chunks.borrow().is_empty()); } #[test] diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml index cfd83e348a8e2..c1b2622520b11 100644 --- a/src/libproc_macro/Cargo.toml +++ b/src/libproc_macro/Cargo.toml @@ -11,3 +11,4 @@ crate-type = ["dylib"] syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } rustc_errors = { path = "../librustc_errors" } +rustc_data_structures = { path = "../librustc_data_structures" } diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 4a6841aedca12..4306e9bc322b6 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -43,6 +43,7 @@ extern crate syntax; extern crate syntax_pos; extern crate rustc_errors; +extern crate rustc_data_structures; mod diagnostic; @@ -50,7 +51,7 @@ mod diagnostic; pub use diagnostic::{Diagnostic, Level}; use std::{ascii, fmt, iter}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::str::FromStr; use syntax::ast; @@ -275,7 +276,7 @@ pub struct LineColumn { #[unstable(feature = "proc_macro", issue = "38356")] #[derive(Clone)] pub struct SourceFile { - filemap: Rc, + filemap: Lrc, } impl SourceFile { @@ -325,7 +326,7 @@ impl fmt::Debug for SourceFile { #[unstable(feature = "proc_macro", issue = "38356")] impl PartialEq for SourceFile { fn eq(&self, other: &Self) -> bool { - Rc::ptr_eq(&self.filemap, &other.filemap) + Lrc::ptr_eq(&self.filemap, &other.filemap) } } diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 0b62e1bd5afbf..101b472e22ac1 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -15,7 +15,8 @@ fmt_macros = { path = "../libfmt_macros" } graphviz = { path = "../libgraphviz" } jobserver = "0.1" log = "0.3" -owning_ref = "0.3.3" +scoped-tls = { git = "https://github.com/Zoxc/scoped-tls.git", features=["nightly"] } +owning_ref = { git = "https://github.com/Zoxc/owning-ref-rs.git" } rustc_back = { path = "../librustc_back" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index 96d6b0f79cfff..6dd525f79580c 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -13,10 +13,9 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHashingContextProvider}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use std::cell::{Ref, RefCell}; +use rustc_data_structures::sync::{Lrc, RwLock, ReadGuard, Lock}; use std::env; use std::hash::Hash; -use std::rc::Rc; use ty::TyCtxt; use util::common::{ProfileQueriesMsg, profq_msg}; @@ -32,7 +31,7 @@ use super::prev::PreviousDepGraph; #[derive(Clone)] pub struct DepGraph { - data: Option>, + data: Option>, // At the moment we are using DepNode as key here. In the future it might // be possible to use an IndexVec here. At the moment there @@ -41,7 +40,7 @@ pub struct DepGraph { // we need to have a dep-graph to generate DepNodeIndices. // - The architecture is still in flux and it's not clear what how to best // implement things. - fingerprints: Rc>> + fingerprints: Lrc>> } @@ -71,50 +70,50 @@ struct DepGraphData { /// tracking. The `current` field is the dependency graph of only the /// current compilation session: We don't merge the previous dep-graph into /// current one anymore. - current: RefCell, + current: Lock, /// The dep-graph from the previous compilation session. It contains all /// nodes and edges as well as all fingerprints of nodes that have them. previous: PreviousDepGraph, - colors: RefCell>, + colors: Lock>, /// When we load, there may be `.o` files, cached mir, or other such /// things available to us. If we find that they are not dirty, we /// load the path to the file storing those work-products here into /// this map. We can later look for and extract that data. - previous_work_products: RefCell>, + previous_work_products: RwLock>, /// Work-products that we generate in this run. - work_products: RefCell>, + work_products: RwLock>, - dep_node_debug: RefCell>, + dep_node_debug: Lock>, // Used for testing, only populated when -Zquery-dep-graph is specified. - loaded_from_cache: RefCell>, + loaded_from_cache: Lock>, } impl DepGraph { pub fn new(prev_graph: PreviousDepGraph) -> DepGraph { DepGraph { - data: Some(Rc::new(DepGraphData { - previous_work_products: RefCell::new(FxHashMap()), - work_products: RefCell::new(FxHashMap()), - dep_node_debug: RefCell::new(FxHashMap()), - current: RefCell::new(CurrentDepGraph::new()), + data: Some(Lrc::new(DepGraphData { + previous_work_products: RwLock::new(FxHashMap()), + work_products: RwLock::new(FxHashMap()), + dep_node_debug: Lock::new(FxHashMap()), + current: Lock::new(CurrentDepGraph::new()), previous: prev_graph, - colors: RefCell::new(FxHashMap()), - loaded_from_cache: RefCell::new(FxHashMap()), + colors: Lock::new(FxHashMap()), + loaded_from_cache: Lock::new(FxHashMap()), })), - fingerprints: Rc::new(RefCell::new(FxHashMap())), + fingerprints: Lrc::new(Lock::new(FxHashMap())), } } pub fn new_disabled() -> DepGraph { DepGraph { data: None, - fingerprints: Rc::new(RefCell::new(FxHashMap())), + fingerprints: Lrc::new(Lock::new(FxHashMap())), } } @@ -196,8 +195,8 @@ impl DepGraph { cx: C, arg: A, task: fn(C, A) -> R, - push: fn(&RefCell, DepNode), - pop: fn(&RefCell, DepNode) -> DepNodeIndex) + push: fn(&Lock, DepNode), + pop: fn(&Lock, DepNode) -> DepNodeIndex) -> (R, DepNodeIndex) where C: DepGraphSafe + StableHashingContextProvider, R: HashStable, @@ -384,13 +383,13 @@ impl DepGraph { /// Access the map of work-products created during this run. Only /// used during saving of the dep-graph. - pub fn work_products(&self) -> Ref> { + pub fn work_products(&self) -> ReadGuard> { self.data.as_ref().unwrap().work_products.borrow() } /// Access the map of work-products created during the cached run. Only /// used during saving of the dep-graph. - pub fn previous_work_products(&self) -> Ref> { + pub fn previous_work_products(&self) -> ReadGuard> { self.data.as_ref().unwrap().previous_work_products.borrow() } diff --git a/src/librustc/dep_graph/raii.rs b/src/librustc/dep_graph/raii.rs index 5728bcc7d2771..8bb8840836020 100644 --- a/src/librustc/dep_graph/raii.rs +++ b/src/librustc/dep_graph/raii.rs @@ -10,14 +10,14 @@ use super::graph::CurrentDepGraph; -use std::cell::RefCell; +use rustc_data_structures::sync::Lock; pub struct IgnoreTask<'graph> { - graph: &'graph RefCell, + graph: &'graph Lock, } impl<'graph> IgnoreTask<'graph> { - pub(super) fn new(graph: &'graph RefCell) -> IgnoreTask<'graph> { + pub(super) fn new(graph: &'graph Lock) -> IgnoreTask<'graph> { graph.borrow_mut().push_ignore(); IgnoreTask { graph, diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 8969528dd1949..b67ae712dc651 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -29,9 +29,10 @@ use hir::print::Nested; use util::nodemap::{DefIdMap, FxHashMap}; use arena::TypedArena; -use std::cell::RefCell; use std::io; +use rustc_data_structures::sync::Lock; + pub mod blocks; mod collector; mod def_collector; @@ -255,7 +256,7 @@ pub struct Map<'hir> { definitions: &'hir Definitions, /// Bodies inlined from other crates are cached here. - inlined_bodies: RefCell>, + inlined_bodies: Lock>, /// The reverse mapping of `node_to_hir_id`. hir_to_node_id: FxHashMap, @@ -1090,7 +1091,7 @@ pub fn map_crate<'hir>(sess: &::session::Session, map, hir_to_node_id, definitions, - inlined_bodies: RefCell::new(DefIdMap()), + inlined_bodies: Lock::new(DefIdMap()), }; hir_id_validator::check_crate(&map); diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index e393459027859..e5bf384d253c5 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::codemap::CodeMap; use syntax_pos::{BytePos, FileMap}; @@ -18,7 +18,7 @@ struct CacheEntry { line_number: usize, line_start: BytePos, line_end: BytePos, - file: Rc, + file: Lrc, file_index: usize, } @@ -51,7 +51,7 @@ impl<'cm> CachingCodemapView<'cm> { pub fn byte_pos_to_line_and_col(&mut self, pos: BytePos) - -> Option<(Rc, usize, BytePos)> { + -> Option<(Lrc, usize, BytePos)> { self.time_stamp += 1; // Check if the position is in one of the cached lines @@ -78,11 +78,9 @@ impl<'cm> CachingCodemapView<'cm> { // If the entry doesn't point to the correct file, fix it up if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos { let file_valid; - let files = self.codemap.files(); - - if files.len() > 0 { + if self.codemap.files().len() > 0 { let file_index = self.codemap.lookup_filemap_idx(pos); - let file = files[file_index].clone(); + let file = self.codemap.files()[file_index].clone(); if pos >= file.start_pos && pos < file.end_pos { cache_entry.file = file; diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 0ef42177c14a8..5ecd537ad32b6 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -19,7 +19,6 @@ use session::Session; use std::cmp::Ord; use std::hash as std_hash; -use std::cell::RefCell; use std::collections::HashMap; use syntax::ast; @@ -35,8 +34,10 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHashingContextProvi use rustc_data_structures::accumulate_vec::AccumulateVec; use rustc_data_structures::fx::FxHashSet; -thread_local!(static IGNORED_ATTR_NAMES: RefCell> = - RefCell::new(FxHashSet())); +pub fn compute_ignored_attr_names() -> FxHashSet { + debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0); + ich::IGNORED_ATTRIBUTES.iter().map(|&s| Symbol::intern(s)).collect() +} /// This is the context state available during incr. comp. hashing. It contains /// enough information to transform DefIds and HirIds into stable DefPaths (i.e. @@ -89,15 +90,6 @@ impl<'gcx> StableHashingContext<'gcx> { -> Self { let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans; - debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0); - IGNORED_ATTR_NAMES.with(|names| { - let mut names = names.borrow_mut(); - if names.is_empty() { - names.extend(ich::IGNORED_ATTRIBUTES.iter() - .map(|&s| Symbol::intern(s))); - } - }); - StableHashingContext { sess, body_resolver: BodyResolver(krate), @@ -185,9 +177,7 @@ impl<'gcx> StableHashingContext<'gcx> { #[inline] pub fn is_ignored_attr(&self, name: Symbol) -> bool { - IGNORED_ATTR_NAMES.with(|names| { - names.borrow().contains(&name) - }) + self.sess.ignored_attr_names.contains(&name) } pub fn hash_hir_item_like(&mut self, f: F) { diff --git a/src/librustc/ich/mod.rs b/src/librustc/ich/mod.rs index cbd76ee14db38..ce1bd07b14ce0 100644 --- a/src/librustc/ich/mod.rs +++ b/src/librustc/ich/mod.rs @@ -13,7 +13,7 @@ pub use self::fingerprint::Fingerprint; pub use self::caching_codemap_view::CachingCodemapView; pub use self::hcx::{StableHashingContext, NodeIdHashingMode, - hash_stable_trait_impls}; + hash_stable_trait_impls, compute_ignored_attr_names}; mod fingerprint; mod caching_codemap_view; mod hcx; diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 06c9995663e68..5d5e18ac88aa8 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -86,7 +86,7 @@ extern crate rustc_errors as errors; #[macro_use] extern crate syntax; extern crate syntax_pos; extern crate jobserver; - +#[macro_use] extern crate scoped_tls; extern crate serialize as rustc_serialize; // used by deriving // Note that librustc doesn't actually depend on these crates, see the note in diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 75cd230e1e5e2..aafbd2dcd3b65 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -27,6 +27,7 @@ use self::TargetLint::*; use std::slice; +use rustc_data_structures::sync::{RwLock, ReadGuard}; use lint::{EarlyLintPassObject, LateLintPassObject}; use lint::{Level, Lint, LintId, LintPass, LintBuffer}; use lint::levels::{LintLevelSets, LintLevelsBuilder}; @@ -39,7 +40,6 @@ use ty::layout::{LayoutError, LayoutOf, TyLayout}; use util::nodemap::FxHashMap; use std::default::Default as StdDefault; -use std::cell::{Ref, RefCell}; use syntax::ast; use syntax_pos::{MultiSpan, Span}; use errors::DiagnosticBuilder; @@ -77,7 +77,7 @@ pub struct LintStore { pub struct LintSession<'a, PassObject> { /// Reference to the store of registered lints. - lints: Ref<'a, LintStore>, + lints: ReadGuard<'a, LintStore>, /// Trait objects for each lint pass. passes: Option>, @@ -317,7 +317,7 @@ impl<'a, PassObject: LintPassObject> LintSession<'a, PassObject> { /// Creates a new `LintSession`, by moving out the `LintStore`'s initial /// lint levels and pass objects. These can be restored using the `restore` /// method. - fn new(store: &'a RefCell) -> LintSession<'a, PassObject> { + fn new(store: &'a RwLock) -> LintSession<'a, PassObject> { let mut s = store.borrow_mut(); let passes = PassObject::take_passes(&mut *s); drop(s); @@ -328,7 +328,7 @@ impl<'a, PassObject: LintPassObject> LintSession<'a, PassObject> { } /// Restores the levels back to the original lint store. - fn restore(self, store: &RefCell) { + fn restore(self, store: &RwLock) { drop(self.lints); let mut s = store.borrow_mut(); PassObject::restore_passes(&mut *s, self.passes); diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 906cae53710ff..ad8c3aaf957dc 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -31,7 +31,7 @@ pub use self::Level::*; pub use self::LintSource::*; -use std::rc::Rc; +use rustc_data_structures::sync::{Send, Sync, Lrc}; use errors::{DiagnosticBuilder, DiagnosticId}; use hir::def_id::{CrateNum, LOCAL_CRATE}; @@ -257,8 +257,8 @@ pub trait EarlyLintPass: LintPass { } /// A lint pass boxed up as a trait object. -pub type EarlyLintPassObject = Box; -pub type LateLintPassObject = Box LateLintPass<'a, 'tcx> + 'static>; +pub type EarlyLintPassObject = Box; +pub type LateLintPassObject = Box LateLintPass<'a, 'tcx> + Send + Sync + 'static>; /// Identifies a lint known to the compiler. #[derive(Clone, Copy, Debug)] @@ -479,7 +479,7 @@ pub fn struct_lint_level<'a>(sess: &'a Session, } fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum) - -> Rc + -> Lrc { assert_eq!(cnum, LOCAL_CRATE); let mut builder = LintLevelMapBuilder { @@ -492,7 +492,7 @@ fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum) intravisit::walk_crate(builder, krate); }); - Rc::new(builder.levels.build_map()) + Lrc::new(builder.levels.build_map()) } struct LintLevelMapBuilder<'a, 'tcx: 'a> { diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index 4be23fb711d77..750beb1151faa 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -37,13 +37,12 @@ use util::nodemap::NodeSet; use std::any::Any; use std::collections::BTreeMap; use std::path::{Path, PathBuf}; -use std::rc::Rc; -use owning_ref::ErasedBoxRef; use syntax::ast; use syntax::ext::base::SyntaxExtension; use syntax::symbol::Symbol; use syntax_pos::Span; use rustc_back::target::Target; +use rustc_data_structures::sync::{MetadataRef, Lrc}; pub use self::NativeLibraryKind::*; @@ -139,7 +138,7 @@ pub struct NativeLibrary { pub enum LoadedMacro { MacroDef(ast::Item), - ProcMacro(Rc), + ProcMacro(Lrc), } #[derive(Copy, Clone, Debug)] @@ -187,11 +186,11 @@ pub trait MetadataLoader { fn get_rlib_metadata(&self, target: &Target, filename: &Path) - -> Result, String>; + -> Result; fn get_dylib_metadata(&self, target: &Target, filename: &Path) - -> Result, String>; + -> Result; } #[derive(Clone)] @@ -206,7 +205,7 @@ pub struct ExternConstBody<'tcx> { #[derive(Clone)] pub struct ExternBodyNestedBodies { - pub nested_bodies: Rc>, + pub nested_bodies: Lrc>, // It would require a lot of infrastructure to enable stable-hashing Bodies // from other crates, so we hash on export and just store the fingerprint @@ -225,7 +224,7 @@ pub struct ExternBodyNestedBodies { /// (it'd break incremental compilation) and should only be called pre-HIR (e.g. /// during resolve) pub trait CrateStore { - fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc; + fn crate_data_as_rc_any(&self, krate: CrateNum) -> Lrc; // access to the metadata loader fn metadata_loader(&self) -> &MetadataLoader; @@ -234,7 +233,7 @@ pub trait CrateStore { fn def_key(&self, def: DefId) -> DefKey; fn def_path(&self, def: DefId) -> hir_map::DefPath; fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash; - fn def_path_table(&self, cnum: CrateNum) -> Rc; + fn def_path_table(&self, cnum: CrateNum) -> Lrc; // "queries" used in resolve that aren't tracked for incremental compilation fn visibility_untracked(&self, def: DefId) -> ty::Visibility; @@ -297,7 +296,7 @@ pub struct DummyCrateStore; #[allow(unused_variables)] impl CrateStore for DummyCrateStore { - fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc + fn crate_data_as_rc_any(&self, krate: CrateNum) -> Lrc { bug!("crate_data_as_rc_any") } // item info fn visibility_untracked(&self, def: DefId) -> ty::Visibility { bug!("visibility") } @@ -325,7 +324,7 @@ impl CrateStore for DummyCrateStore { fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash { bug!("def_path_hash") } - fn def_path_table(&self, cnum: CrateNum) -> Rc { + fn def_path_table(&self, cnum: CrateNum) -> Lrc { bug!("def_path_table") } fn struct_field_names_untracked(&self, def: DefId) -> Vec { @@ -397,7 +396,7 @@ pub fn used_crates(tcx: TyCtxt, prefer: LinkagePreference) }) .collect::>(); let mut ordering = tcx.postorder_cnums(LOCAL_CRATE); - Rc::make_mut(&mut ordering).reverse(); + Lrc::make_mut(&mut ordering).reverse(); libs.sort_by_key(|&(a, _)| { ordering.iter().position(|x| *x == a) }); diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 9018b9fe590b2..6f709c4395058 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -27,7 +27,7 @@ use middle::region; use ty::{self, TyCtxt, adjustment}; use hir::{self, PatKind}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax::ptr::P; use syntax_pos::Span; @@ -279,7 +279,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> { param_env: ty::ParamEnv<'tcx>, region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option>) + rvalue_promotable_map: Option>) -> Self { ExprUseVisitor { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 0d4429de22a84..ce396276c98e2 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -85,6 +85,7 @@ use syntax::ast; use syntax_pos::Span; use std::fmt; +use rustc_data_structures::sync::Lrc; use std::rc::Rc; use util::nodemap::ItemLocalSet; @@ -286,7 +287,7 @@ pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub tcx: TyCtxt<'a, 'gcx, 'tcx>, pub region_scope_tree: &'a region::ScopeTree, pub tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option>, + rvalue_promotable_map: Option>, infcx: Option<&'a InferCtxt<'a, 'gcx, 'tcx>>, } @@ -395,7 +396,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx, 'tcx> { pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option>) + rvalue_promotable_map: Option>) -> MemCategorizationContext<'a, 'tcx, 'tcx> { MemCategorizationContext { tcx, diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index d5f26d1117c5b..b724c23a07e17 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -18,7 +18,7 @@ use hir::map as hir_map; use hir::def::Def; use hir::def_id::{DefId, CrateNum}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use ty::{self, TyCtxt}; use ty::maps::Providers; use middle::privacy; @@ -378,7 +378,7 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, // We introduce a new-type here, so we can have a specialized HashStable // implementation for it. #[derive(Clone)] -pub struct ReachableSet(pub Rc); +pub struct ReachableSet(pub Lrc); fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> ReachableSet { @@ -426,7 +426,7 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> reachable_context.propagate(); // Return the set of reachable symbols. - ReachableSet(Rc::new(reachable_context.reachable_symbols)) + ReachableSet(Lrc::new(reachable_context.reachable_symbols)) } pub fn provide(providers: &mut Providers) { diff --git a/src/librustc/middle/recursion_limit.rs b/src/librustc/middle/recursion_limit.rs index 6c87f750376fa..85592353c8977 100644 --- a/src/librustc/middle/recursion_limit.rs +++ b/src/librustc/middle/recursion_limit.rs @@ -18,7 +18,7 @@ use session::Session; use syntax::ast; -use std::cell::Cell; +use rustc_data_structures::sync::LockCell; pub fn update_limits(sess: &Session, krate: &ast::Crate) { update_limit(sess, krate, &sess.recursion_limit, "recursion_limit", @@ -27,7 +27,7 @@ pub fn update_limits(sess: &Session, krate: &ast::Crate) { "type length limit"); } -fn update_limit(sess: &Session, krate: &ast::Crate, limit: &Cell, +fn update_limit(sess: &Session, krate: &ast::Crate, limit: &LockCell, name: &str, description: &str) { for attr in &krate.attrs { if !attr.check_name(name) { diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index d3aa80e5585e2..d1feb8e9a07de 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -20,7 +20,7 @@ use ty; use std::fmt; use std::mem; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::codemap; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; @@ -1350,7 +1350,7 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { } fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Rc + -> Lrc { let closure_base_def_id = tcx.closure_base_def_id(def_id); if closure_base_def_id != def_id { @@ -1392,7 +1392,7 @@ fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) ScopeTree::default() }; - Rc::new(scope_tree) + Lrc::new(scope_tree) } pub fn provide(providers: &mut Providers) { diff --git a/src/librustc/mir/cache.rs b/src/librustc/mir/cache.rs index efc2f647cfdf5..8fed58a178862 100644 --- a/src/librustc/mir/cache.rs +++ b/src/librustc/mir/cache.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::cell::{Ref, RefCell}; use rustc_data_structures::indexed_vec::IndexVec; +use rustc_data_structures::sync::{RwLock, ReadGuard}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use ich::StableHashingContext; @@ -19,7 +19,7 @@ use rustc_serialize as serialize; #[derive(Clone, Debug)] pub struct Cache { - predecessors: RefCell>>> + predecessors: RwLock>>> } @@ -46,7 +46,7 @@ impl<'gcx> HashStable> for Cache { impl Cache { pub fn new() -> Self { Cache { - predecessors: RefCell::new(None) + predecessors: RwLock::new(None) } } @@ -55,12 +55,12 @@ impl Cache { *self.predecessors.borrow_mut() = None; } - pub fn predecessors(&self, mir: &Mir) -> Ref>> { + pub fn predecessors(&self, mir: &Mir) -> ReadGuard>> { if self.predecessors.borrow().is_none() { *self.predecessors.borrow_mut() = Some(calculate_predecessors(mir)); } - Ref::map(self.predecessors.borrow(), |p| p.as_ref().unwrap()) + ReadGuard::map(self.predecessors.borrow(), |p| p.as_ref().unwrap()) } } diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 64e601ab1e734..d9d5b13b4ab18 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -14,6 +14,7 @@ use graphviz::IntoCow; use middle::const_val::ConstVal; use middle::region; use rustc_const_math::{ConstUsize, ConstInt, ConstMathErr}; +use rustc_data_structures::sync::{Lrc}; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_data_structures::control_flow_graph::dominators::{Dominators, dominators}; use rustc_data_structures::control_flow_graph::{GraphPredecessors, GraphSuccessors}; @@ -29,11 +30,10 @@ use std::slice; use hir::{self, InlineAsm}; use std::ascii; use std::borrow::{Cow}; -use std::cell::Ref; +use rustc_data_structures::sync::ReadGuard; use std::fmt::{self, Debug, Formatter, Write}; use std::{iter, u32}; use std::ops::{Index, IndexMut}; -use std::rc::Rc; use std::vec::IntoIter; use syntax::ast::{self, Name}; use syntax::symbol::InternedString; @@ -181,13 +181,13 @@ impl<'tcx> Mir<'tcx> { } #[inline] - pub fn predecessors(&self) -> Ref>> { + pub fn predecessors(&self) -> ReadGuard>> { self.cache.predecessors(self) } #[inline] - pub fn predecessors_for(&self, bb: BasicBlock) -> Ref> { - Ref::map(self.predecessors(), |p| &p[bb]) + pub fn predecessors_for(&self, bb: BasicBlock) -> ReadGuard> { + ReadGuard::map(self.predecessors(), |p| &p[bb]) } #[inline] @@ -1777,10 +1777,10 @@ pub struct UnsafetyViolation { #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UnsafetyCheckResult { /// Violations that are propagated *upwards* from this function - pub violations: Rc<[UnsafetyViolation]>, + pub violations: Lrc<[UnsafetyViolation]>, /// unsafe blocks in this function, along with whether they are used. This is /// used for the "unused_unsafe" lint. - pub unsafe_blocks: Rc<[(ast::NodeId, bool)]>, + pub unsafe_blocks: Lrc<[(ast::NodeId, bool)]>, } /// The layout of generator state diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 0dcd3e8081080..a7592ca238ca3 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -1046,6 +1046,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "prints the llvm optimization passes being run"), ast_json: bool = (false, parse_bool, [UNTRACKED], "print the AST as JSON and halt"), + query_threads: Option = (None, parse_opt_uint, [UNTRACKED], + "execute queries on a thread pool with N threads"), ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED], "print the pre-expansion AST as JSON and halt"), ls: bool = (false, parse_bool, [UNTRACKED], @@ -1638,6 +1640,10 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) } } + if debugging_opts.query_threads == Some(0) { + early_error(error_format, "Value for query threads must be a positive nonzero integer"); + } + if codegen_units == Some(0) { early_error(error_format, "Value for codegen units must be a positive nonzero integer"); } @@ -2095,6 +2101,7 @@ mod tests { use super::{OutputType, OutputTypes, Externs}; use rustc_back::{PanicStrategy, RelroLevel}; use syntax::symbol::Symbol; + use syntax; fn optgroups() -> getopts::Options { let mut opts = getopts::Options::new(); @@ -2115,41 +2122,45 @@ mod tests { // When the user supplies --test we should implicitly supply --cfg test #[test] fn test_switch_implies_cfg_test() { - let matches = - &match optgroups().parse(&["--test".to_string()]) { - Ok(m) => m, - Err(f) => panic!("test_switch_implies_cfg_test: {}", f) - }; - let registry = errors::registry::Registry::new(&[]); - let (sessopts, cfg) = build_session_options_and_crate_config(matches); - let sess = build_session(sessopts, None, registry); - let cfg = build_configuration(&sess, cfg); - assert!(cfg.contains(&(Symbol::intern("test"), None))); + syntax::with_globals(&syntax::Globals::new(), || { + let matches = + &match optgroups().parse(&["--test".to_string()]) { + Ok(m) => m, + Err(f) => panic!("test_switch_implies_cfg_test: {}", f) + }; + let registry = errors::registry::Registry::new(&[]); + let (sessopts, cfg) = build_session_options_and_crate_config(matches); + let sess = build_session(sessopts, None, registry); + let cfg = build_configuration(&sess, cfg); + assert!(cfg.contains(&(Symbol::intern("test"), None))); + }); } // When the user supplies --test and --cfg test, don't implicitly add // another --cfg test #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { - let matches = - &match optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]) { - Ok(m) => m, - Err(f) => { - panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) - } - }; - let registry = errors::registry::Registry::new(&[]); - let (sessopts, cfg) = build_session_options_and_crate_config(matches); - let sess = build_session(sessopts, None, registry); - let cfg = build_configuration(&sess, cfg); - let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test"); - assert!(test_items.next().is_some()); - assert!(test_items.next().is_none()); + syntax::with_globals(&syntax::Globals::new(), || { + let matches = + &match optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]) { + Ok(m) => m, + Err(f) => { + panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) + } + }; + let registry = errors::registry::Registry::new(&[]); + let (sessopts, cfg) = build_session_options_and_crate_config(matches); + let sess = build_session(sessopts, None, registry); + let cfg = build_configuration(&sess, cfg); + let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test"); + assert!(test_items.next().is_some()); + assert!(test_items.next().is_none()); + }); } #[test] fn test_can_print_warnings() { - { + syntax::with_globals(&syntax::Globals::new(), || { let matches = optgroups().parse(&[ "-Awarnings".to_string() ]).unwrap(); @@ -2157,9 +2168,9 @@ mod tests { let (sessopts, _) = build_session_options_and_crate_config(&matches); let sess = build_session(sessopts, None, registry); assert!(!sess.diagnostic().flags.can_emit_warnings); - } + }); - { + syntax::with_globals(&syntax::Globals::new(), || { let matches = optgroups().parse(&[ "-Awarnings".to_string(), "-Dwarnings".to_string() @@ -2168,9 +2179,9 @@ mod tests { let (sessopts, _) = build_session_options_and_crate_config(&matches); let sess = build_session(sessopts, None, registry); assert!(sess.diagnostic().flags.can_emit_warnings); - } + }); - { + syntax::with_globals(&syntax::Globals::new(), || { let matches = optgroups().parse(&[ "-Adead_code".to_string() ]).unwrap(); @@ -2178,7 +2189,7 @@ mod tests { let (sessopts, _) = build_session_options_and_crate_config(&matches); let sess = build_session(sessopts, None, registry); assert!(sess.diagnostic().flags.can_emit_warnings); - } + }); } #[test] diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index df5805bacd41a..d4cba3fe6b5f0 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -14,6 +14,7 @@ pub use self::code_stats::{SizeKind, TypeSizeInfo, VariantInfo}; use hir::def_id::{CrateNum, DefIndex}; use ich::Fingerprint; +use ich; use lint; use middle::allocator::AllocatorKind; use middle::dependency_format; @@ -22,12 +23,16 @@ use session::config::DebugInfoLevel; use ty::tls; use util::nodemap::{FxHashMap, FxHashSet}; use util::common::{duration_to_secs_str, ErrorReported}; +use util::common::ProfileQueriesMsg; + +use rustc_data_structures::sync::{self, Lrc, RwLock, Lock, LockCell, ReadGuard}; use syntax::ast::NodeId; use errors::{self, DiagnosticBuilder, DiagnosticId}; use errors::emitter::{Emitter, EmitterWriter}; use syntax::json::JsonEmitter; use syntax::feature_gate; +use syntax::symbol::Symbol; use syntax::parse; use syntax::parse::ParseSess; use syntax::{ast, codemap}; @@ -39,15 +44,14 @@ use rustc_back::target::Target; use rustc_data_structures::flock; use jobserver::Client; -use std::cell::{self, Cell, RefCell}; use std::collections::HashMap; use std::env; use std::fmt; use std::io::Write; use std::path::{Path, PathBuf}; -use std::rc::Rc; use std::sync::{Once, ONCE_INIT}; use std::time::Duration; +use std::sync::mpsc; mod code_stats; pub mod config; @@ -62,10 +66,10 @@ pub struct Session { pub opts: config::Options, pub parse_sess: ParseSess, /// For a library crate, this is always none - pub entry_fn: RefCell>, - pub entry_type: Cell>, - pub plugin_registrar_fn: Cell>, - pub derive_registrar_fn: Cell>, + pub entry_fn: Lock>, + pub entry_type: LockCell>, + pub plugin_registrar_fn: LockCell>, + pub derive_registrar_fn: LockCell>, pub default_sysroot: Option, /// The name of the root source file of the crate, in the local file system. /// `None` means that there is no source file. @@ -73,88 +77,94 @@ pub struct Session { /// The directory the compiler has been executed in plus a flag indicating /// if the value stored here has been affected by path remapping. pub working_dir: (String, bool), - pub lint_store: RefCell, - pub buffered_lints: RefCell>, + pub lint_store: RwLock, + pub buffered_lints: Lock>, /// Set of (DiagnosticId, Option, message) tuples tracking /// (sub)diagnostics that have been set once, but should not be set again, /// in order to avoid redundantly verbose output (Issue #24690, #44953). - pub one_time_diagnostics: RefCell, String)>>, - pub plugin_llvm_passes: RefCell>, - pub plugin_attributes: RefCell>, - pub crate_types: RefCell>, - pub dependency_formats: RefCell, + pub one_time_diagnostics: Lock, String)>>, + pub plugin_llvm_passes: Lock>, + pub plugin_attributes: Lock>, + pub crate_types: RwLock>, + pub dependency_formats: Lock, /// The crate_disambiguator is constructed out of all the `-C metadata` /// arguments passed to the compiler. Its value together with the crate-name /// forms a unique global identifier for the crate. It is used to allow /// multiple crates with the same name to coexist. See the /// trans::back::symbol_names module for more information. - pub crate_disambiguator: RefCell>, - pub features: RefCell, + pub crate_disambiguator: Lock>, + pub features: RwLock, /// The maximum recursion limit for potentially infinitely recursive /// operations such as auto-dereference and monomorphization. - pub recursion_limit: Cell, + pub recursion_limit: LockCell, /// The maximum length of types during monomorphization. - pub type_length_limit: Cell, + pub type_length_limit: LockCell, /// The metadata::creader module may inject an allocator/panic_runtime /// dependency if it didn't already find one, and this tracks what was /// injected. - pub injected_allocator: Cell>, - pub allocator_kind: Cell>, - pub injected_panic_runtime: Cell>, + pub injected_allocator: LockCell>, + pub allocator_kind: LockCell>, + pub injected_panic_runtime: LockCell>, /// Map from imported macro spans (which consist of /// the localized span for the macro body) to the /// macro name and definition span in the source crate. - pub imported_macro_spans: RefCell>, + pub imported_macro_spans: Lock>, + + incr_comp_session: RwLock, + + /// A cache of attributes ignored by StableHashingContext + pub ignored_attr_names: FxHashSet, - incr_comp_session: RefCell, + /// Used by -Z profile-queries in util::common + pub profile_channel: Lock>>, /// Some measurements that are being gathered during compilation. pub perf_stats: PerfStats, /// Data about code being compiled, gathered during compilation. - pub code_stats: RefCell, + pub code_stats: Lock, - next_node_id: Cell, + next_node_id: LockCell, /// If -zfuel=crate=n is specified, Some(crate). optimization_fuel_crate: Option, /// If -zfuel=crate=n is specified, initially set to n. Otherwise 0. - optimization_fuel_limit: Cell, + optimization_fuel_limit: LockCell, /// We're rejecting all further optimizations. - out_of_fuel: Cell, + out_of_fuel: LockCell, // The next two are public because the driver needs to read them. /// If -zprint-fuel=crate, Some(crate). pub print_fuel_crate: Option, /// Always set to zero and incremented so that we can print fuel expended by a crate. - pub print_fuel: Cell, + pub print_fuel: LockCell, /// Loaded up early on in the initialization of this `Session` to avoid /// false positives about a job server in our environment. pub jobserver_from_env: Option, /// Metadata about the allocators for the current crate being compiled - pub has_global_allocator: Cell, + pub has_global_allocator: LockCell, } pub struct PerfStats { /// The accumulated time needed for computing the SVH of the crate - pub svh_time: Cell, + pub svh_time: LockCell, /// The accumulated time spent on computing incr. comp. hashes - pub incr_comp_hashes_time: Cell, + pub incr_comp_hashes_time: LockCell, /// The number of incr. comp. hash computations performed - pub incr_comp_hashes_count: Cell, + pub incr_comp_hashes_count: LockCell, /// The number of bytes hashed when computing ICH values - pub incr_comp_bytes_hashed: Cell, + pub incr_comp_bytes_hashed: LockCell, /// The accumulated time spent on computing symbol hashes - pub symbol_hash_time: Cell, + pub symbol_hash_time: LockCell, /// The accumulated time spent decoding def path tables from metadata - pub decode_def_path_tables_time: Cell, + pub decode_def_path_tables_time: LockCell, } /// Enum to support dispatch of one-time diagnostics (in Session.diag_once) @@ -577,9 +587,9 @@ impl Session { }; } - pub fn incr_comp_session_dir(&self) -> cell::Ref { + pub fn incr_comp_session_dir(&self) -> ReadGuard { let incr_comp_session = self.incr_comp_session.borrow(); - cell::Ref::map(incr_comp_session, |incr_comp_session| { + ReadGuard::map(incr_comp_session, |incr_comp_session| { match *incr_comp_session { IncrCompSession::NotInitialized => { bug!("Trying to get session directory from IncrCompSession `{:?}`", @@ -594,7 +604,7 @@ impl Session { }) } - pub fn incr_comp_session_dir_opt(&self) -> Option> { + pub fn incr_comp_session_dir_opt(&self) -> Option> { if self.opts.incremental.is_some() { Some(self.incr_comp_session_dir()) } else { @@ -646,6 +656,12 @@ impl Session { ret } + /// Returns the number of query threads that should be used for this + /// compilation + pub fn query_threads(&self) -> usize { + self.opts.debugging_opts.query_threads.unwrap_or(1) + } + /// Returns the number of codegen units that should be used for this /// compilation pub fn codegen_units(&self) -> usize { @@ -754,14 +770,14 @@ pub fn build_session(sopts: config::Options, build_session_with_codemap(sopts, local_crate_source_file, registry, - Rc::new(codemap::CodeMap::new(file_path_mapping)), + Lrc::new(codemap::CodeMap::new(file_path_mapping)), None) } pub fn build_session_with_codemap(sopts: config::Options, local_crate_source_file: Option, registry: errors::registry::Registry, - codemap: Rc, + codemap: Lrc, emitter_dest: Option>) -> Session { // FIXME: This is not general enough to make the warning lint completely override @@ -781,7 +797,7 @@ pub fn build_session_with_codemap(sopts: config::Options, let external_macro_backtrace = sopts.debugging_opts.external_macro_backtrace; - let emitter: Box = match (sopts.error_format, emitter_dest) { + let emitter: Box = match (sopts.error_format, emitter_dest) { (config::ErrorOutputType::HumanReadable(color_config), None) => { Box::new(EmitterWriter::stderr(color_config, Some(codemap.clone()), false)) } @@ -821,7 +837,7 @@ pub fn build_session_with_codemap(sopts: config::Options, pub fn build_session_(sopts: config::Options, local_crate_source_file: Option, span_diagnostic: errors::Handler, - codemap: Rc) + codemap: Lrc) -> Session { let host = match Target::search(config::host_triple()) { Ok(t) => t, @@ -844,10 +860,10 @@ pub fn build_session_(sopts: config::Options, }); let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone()); - let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref() + let optimization_fuel_limit = LockCell::new(sopts.debugging_opts.fuel.as_ref() .map(|i| i.1).unwrap_or(0)); let print_fuel_crate = sopts.debugging_opts.print_fuel.clone(); - let print_fuel = Cell::new(0); + let print_fuel = LockCell::new(0); let working_dir = match env::current_dir() { Ok(dir) => dir.to_string_lossy().into_owned(), @@ -863,44 +879,46 @@ pub fn build_session_(sopts: config::Options, opts: sopts, parse_sess: p_s, // For a library crate, this is always none - entry_fn: RefCell::new(None), - entry_type: Cell::new(None), - plugin_registrar_fn: Cell::new(None), - derive_registrar_fn: Cell::new(None), + entry_fn: Lock::new(None), + entry_type: LockCell::new(None), + plugin_registrar_fn: LockCell::new(None), + derive_registrar_fn: LockCell::new(None), default_sysroot, local_crate_source_file, working_dir, - lint_store: RefCell::new(lint::LintStore::new()), - buffered_lints: RefCell::new(Some(lint::LintBuffer::new())), - one_time_diagnostics: RefCell::new(FxHashSet()), - plugin_llvm_passes: RefCell::new(Vec::new()), - plugin_attributes: RefCell::new(Vec::new()), - crate_types: RefCell::new(Vec::new()), - dependency_formats: RefCell::new(FxHashMap()), - crate_disambiguator: RefCell::new(None), - features: RefCell::new(feature_gate::Features::new()), - recursion_limit: Cell::new(64), - type_length_limit: Cell::new(1048576), - next_node_id: Cell::new(NodeId::new(1)), - injected_allocator: Cell::new(None), - allocator_kind: Cell::new(None), - injected_panic_runtime: Cell::new(None), - imported_macro_spans: RefCell::new(HashMap::new()), - incr_comp_session: RefCell::new(IncrCompSession::NotInitialized), + lint_store: RwLock::new(lint::LintStore::new()), + buffered_lints: Lock::new(Some(lint::LintBuffer::new())), + one_time_diagnostics: Lock::new(FxHashSet()), + plugin_llvm_passes: Lock::new(Vec::new()), + plugin_attributes: Lock::new(Vec::new()), + crate_types: RwLock::new(Vec::new()), + dependency_formats: Lock::new(FxHashMap()), + crate_disambiguator: Lock::new(None), + features: RwLock::new(feature_gate::Features::new()), + recursion_limit: LockCell::new(64), + type_length_limit: LockCell::new(1048576), + next_node_id: LockCell::new(NodeId::new(1)), + injected_allocator: LockCell::new(None), + allocator_kind: LockCell::new(None), + injected_panic_runtime: LockCell::new(None), + imported_macro_spans: Lock::new(HashMap::new()), + incr_comp_session: RwLock::new(IncrCompSession::NotInitialized), + ignored_attr_names: ich::compute_ignored_attr_names(), + profile_channel: Lock::new(None), perf_stats: PerfStats { - svh_time: Cell::new(Duration::from_secs(0)), - incr_comp_hashes_time: Cell::new(Duration::from_secs(0)), - incr_comp_hashes_count: Cell::new(0), - incr_comp_bytes_hashed: Cell::new(0), - symbol_hash_time: Cell::new(Duration::from_secs(0)), - decode_def_path_tables_time: Cell::new(Duration::from_secs(0)), + svh_time: LockCell::new(Duration::from_secs(0)), + incr_comp_hashes_time: LockCell::new(Duration::from_secs(0)), + incr_comp_hashes_count: LockCell::new(0), + incr_comp_bytes_hashed: LockCell::new(0), + symbol_hash_time: LockCell::new(Duration::from_secs(0)), + decode_def_path_tables_time: LockCell::new(Duration::from_secs(0)), }, - code_stats: RefCell::new(CodeStats::new()), + code_stats: Lock::new(CodeStats::new()), optimization_fuel_crate, optimization_fuel_limit, print_fuel_crate, print_fuel, - out_of_fuel: Cell::new(false), + out_of_fuel: LockCell::new(false), // Note that this is unsafe because it may misinterpret file descriptors // on Unix as jobserver file descriptors. We hopefully execute this near // the beginning of the process though to ensure we don't get false @@ -918,7 +936,7 @@ pub fn build_session_(sopts: config::Options, }); (*GLOBAL_JOBSERVER).clone() }, - has_global_allocator: Cell::new(false), + has_global_allocator: LockCell::new(false), }; sess @@ -971,7 +989,7 @@ pub enum IncrCompSession { } pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! { - let emitter: Box = match output { + let emitter: Box = match output { config::ErrorOutputType::HumanReadable(color_config) => { Box::new(EmitterWriter::stderr(color_config, None, false)) } @@ -986,7 +1004,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! { } pub fn early_warn(output: config::ErrorOutputType, msg: &str) { - let emitter: Box = match output { + let emitter: Box = match output { config::ErrorOutputType::HumanReadable(color_config) => { Box::new(EmitterWriter::stderr(color_config, None, false)) } diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 94605d895a554..c7bef6c6e2c6b 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -25,6 +25,7 @@ use ty::{self, AdtKind, Ty, TyCtxt, TypeFoldable, ToPredicate}; use ty::error::{ExpectedFound, TypeError}; use infer::{InferCtxt}; +use rustc_data_structures::sync::Lrc; use std::rc::Rc; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; @@ -693,11 +694,11 @@ pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn vtable_methods<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_ref: ty::PolyTraitRef<'tcx>) - -> Rc)>>> + -> Lrc)>>> { debug!("vtable_methods({:?})", trait_ref); - Rc::new( + Lrc::new( supertraits(tcx, trait_ref).flat_map(move |trait_ref| { let trait_methods = tcx.associated_items(trait_ref.def_id()) .filter(|item| item.kind == ty::AssociatedKind::Method); diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index e70de0e566e41..c724db448a8eb 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -41,10 +41,10 @@ use ty::fast_reject; use ty::relate::TypeRelation; use middle::lang_items; +use rustc_data_structures::sync::Lock; use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::snapshot_vec::{SnapshotVecDelegate, SnapshotVec}; use std::iter; -use std::cell::RefCell; use std::cmp; use std::fmt; use std::marker::PhantomData; @@ -148,7 +148,7 @@ struct TraitObligationStack<'prev, 'tcx: 'prev> { #[derive(Clone)] pub struct SelectionCache<'tcx> { - hashmap: RefCell, + hashmap: Lock, WithDepNode>>>>, } @@ -413,7 +413,7 @@ impl EvaluationResult { #[derive(Clone)] pub struct EvaluationCache<'tcx> { - hashmap: RefCell, WithDepNode>> + hashmap: Lock, WithDepNode>> } impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { @@ -3303,7 +3303,7 @@ impl<'tcx> TraitObligation<'tcx> { impl<'tcx> SelectionCache<'tcx> { pub fn new() -> SelectionCache<'tcx> { SelectionCache { - hashmap: RefCell::new(FxHashMap()) + hashmap: Lock::new(FxHashMap()) } } } @@ -3311,7 +3311,7 @@ impl<'tcx> SelectionCache<'tcx> { impl<'tcx> EvaluationCache<'tcx> { pub fn new() -> EvaluationCache<'tcx> { EvaluationCache { - hashmap: RefCell::new(FxHashMap()) + hashmap: Lock::new(FxHashMap()) } } } diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index 6a96d01d5f92a..37bb3776fb212 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -28,7 +28,7 @@ use traits::{self, Reveal, ObligationCause}; use traits::select::IntercrateAmbiguityCause; use ty::{self, TyCtxt, TypeFoldable}; use syntax_pos::DUMMY_SP; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use lint; @@ -306,7 +306,7 @@ impl SpecializesCache { // Query provider for `specialization_graph_of`. pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_id: DefId) - -> Rc { + -> Lrc { let mut sg = specialization_graph::Graph::new(); let mut trait_impls = Vec::new(); @@ -386,7 +386,7 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx } } - Rc::new(sg) + Lrc::new(sg) } /// Recovers the "impl X for Y" signature from `impl_def_id` and returns it as a diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index 834389e5d009c..e55d4614a2ee3 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -17,7 +17,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, use traits; use ty::{self, TyCtxt, TypeFoldable}; use ty::fast_reject::{self, SimplifiedType}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast::Name; use util::nodemap::{DefIdMap, FxHashMap}; @@ -330,7 +330,7 @@ impl<'a, 'gcx, 'tcx> Node { pub struct Ancestors { trait_def_id: DefId, - specialization_graph: Rc, + specialization_graph: Lrc, current_source: Option, } diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index ce05acb01b001..c6a0a73664e15 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -54,16 +54,15 @@ use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap, use arena::{TypedArena, DroplessArena}; use rustc_const_math::{ConstInt, ConstUsize}; use rustc_data_structures::indexed_vec::IndexVec; +use rustc_data_structures::sync::{Sync, Lrc, Lock, LockCell}; use std::any::Any; use std::borrow::Borrow; -use std::cell::{Cell, RefCell}; use std::cmp::Ordering; use std::collections::hash_map::{self, Entry}; use std::hash::{Hash, Hasher}; use std::mem; use std::ops::Deref; use std::iter; -use std::rc::Rc; use std::sync::mpsc; use std::sync::Arc; use syntax::abi; @@ -75,6 +74,20 @@ use syntax_pos::Span; use hir; +pub struct AllArenas<'tcx> { + pub global: GlobalArenas<'tcx>, + pub interner: DroplessArena, +} + +impl<'tcx> AllArenas<'tcx> { + pub fn new() -> Self { + AllArenas { + global: GlobalArenas::new(), + interner: DroplessArena::new(), + } + } +} + /// Internal storage pub struct GlobalArenas<'tcx> { // internings @@ -109,26 +122,26 @@ pub struct CtxtInterners<'tcx> { /// Specifically use a speedy hash algorithm for these hash sets, /// they're accessed quite often. - type_: RefCell>>>, - type_list: RefCell>>>>, - substs: RefCell>>>, - region: RefCell>>, - existential_predicates: RefCell>>>>, - predicates: RefCell>>>>, - const_: RefCell>>>, + type_: Lock>>>, + type_list: Lock>>>>, + substs: Lock>>>, + region: Lock>>, + existential_predicates: Lock>>>>, + predicates: Lock>>>>, + const_: Lock>>>, } impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> { fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> { CtxtInterners { - arena, - type_: RefCell::new(FxHashSet()), - type_list: RefCell::new(FxHashSet()), - substs: RefCell::new(FxHashSet()), - region: RefCell::new(FxHashSet()), - existential_predicates: RefCell::new(FxHashSet()), - predicates: RefCell::new(FxHashSet()), - const_: RefCell::new(FxHashSet()), + arena: arena, + type_: Lock::new(FxHashSet()), + type_list: Lock::new(FxHashSet()), + substs: Lock::new(FxHashSet()), + region: Lock::new(FxHashSet()), + existential_predicates: Lock::new(FxHashSet()), + predicates: Lock::new(FxHashSet()), + const_: Lock::new(FxHashSet()), } } @@ -379,9 +392,9 @@ pub struct TypeckTables<'tcx> { /// Set of trait imports actually used in the method resolution. /// This is used for warning unused imports. During type - /// checking, this `Rc` should not be cloned: it must have a ref-count + /// checking, this `Lrc` should not be cloned: it must have a ref-count /// of 1 so that we can insert things into the set mutably. - pub used_trait_imports: Rc, + pub used_trait_imports: Lrc, /// If any errors occurred while type-checking this body, /// this field will be set to `true`. @@ -408,7 +421,7 @@ impl<'tcx> TypeckTables<'tcx> { liberated_fn_sigs: ItemLocalMap(), fru_field_types: ItemLocalMap(), cast_kinds: ItemLocalMap(), - used_trait_imports: Rc::new(DefIdSet()), + used_trait_imports: Lrc::new(DefIdSet()), tainted_by_errors: false, free_region_map: FreeRegionMap::new(), } @@ -779,7 +792,7 @@ pub struct GlobalCtxt<'tcx> { global_arenas: &'tcx GlobalArenas<'tcx>, global_interners: CtxtInterners<'tcx>, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), pub sess: &'tcx Session, @@ -796,11 +809,11 @@ pub struct GlobalCtxt<'tcx> { /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. trait_map: FxHashMap>>>>, + Lrc>>>>, /// Export map produced by name resolution. - export_map: FxHashMap>>, + export_map: FxHashMap>>, named_region_map: NamedRegionMap, @@ -815,14 +828,14 @@ pub struct GlobalCtxt<'tcx> { // Records the free variables refrenced by every closure // expression. Do not track deps for this, just recompute it from // scratch every time. - freevars: FxHashMap>>, + freevars: FxHashMap>>, maybe_unused_trait_imports: FxHashSet, maybe_unused_extern_crates: Vec<(DefId, Span)>, // Internal cache for metadata decoding. No need to track deps on this. - pub rcache: RefCell>>, + pub rcache: Lock>>, /// Caches the results of trait selection. This cache is used /// for things that do not have to do with the parameters in scope. @@ -841,21 +854,21 @@ pub struct GlobalCtxt<'tcx> { pub data_layout: TargetDataLayout, /// Used to prevent layout from recursing too deeply. - pub layout_depth: Cell, + pub layout_depth: LockCell, /// Map from function to the `#[derive]` mode that it's defining. Only used /// by `proc-macro` crates. - pub derive_macros: RefCell>, + pub derive_macros: Lock>, - stability_interner: RefCell>, + stability_interner: Lock>, - layout_interner: RefCell>, + layout_interner: Lock>, /// A vector of every trait accessible in the whole crate /// (i.e. including those from subcrates). This is used only for /// error reporting, and so is lazily initialized and generally - /// shouldn't taint the common path (hence the RefCell). - pub all_traits: RefCell>>, + /// shouldn't taint the common path (hence the Lock). + pub all_traits: Lock>>, /// A general purpose channel to throw data out the back towards LLVM worker /// threads. @@ -863,7 +876,7 @@ pub struct GlobalCtxt<'tcx> { /// This is intended to only get used during the trans phase of the compiler /// when satisfying the query for a particular codegen unit. Internally in /// the query it'll send data along this channel to get processed later. - pub tx_to_llvm_workers: mpsc::Sender>, + pub tx_to_llvm_workers: Lock>>, output_filenames: Arc, } @@ -980,11 +993,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. pub fn create_and_enter(s: &'tcx Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), local_providers: ty::maps::Providers<'tcx>, extern_providers: ty::maps::Providers<'tcx>, - arenas: &'tcx GlobalArenas<'tcx>, - arena: &'tcx DroplessArena, + arenas: &'tcx AllArenas<'tcx>, resolutions: ty::Resolutions, named_region_map: resolve_lifetime::NamedRegionMap, hir: hir_map::Map<'tcx>, @@ -996,7 +1008,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R { let data_layout = TargetDataLayout::parse(s); - let interners = CtxtInterners::new(arena); + let interners = CtxtInterners::new(&arenas.interner); let common_types = CommonTypes::new(&interners); let dep_graph = hir.dep_graph.clone(); let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0); @@ -1004,7 +1016,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { providers[LOCAL_CRATE] = local_providers; let def_path_hash_to_def_id = if s.opts.build_dep_graph() { - let upstream_def_path_tables: Vec<(CrateNum, Rc<_>)> = cstore + let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore .crates_untracked() .iter() .map(|&cnum| (cnum, cstore.def_path_table(cnum))) @@ -1039,37 +1051,39 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { for (k, v) in resolutions.trait_map { let hir_id = hir.node_to_hir_id(k); let map = trait_map.entry(hir_id.owner) - .or_insert_with(|| Rc::new(FxHashMap())); - Rc::get_mut(map).unwrap() + .or_insert_with(|| Lrc::new(FxHashMap())); + Lrc::get_mut(map).unwrap() .insert(hir_id.local_id, - Rc::new(StableVec::new(v))); + Lrc::new(StableVec::new(v))); } let mut defs = FxHashMap(); for (k, v) in named_region_map.defs { let hir_id = hir.node_to_hir_id(k); let map = defs.entry(hir_id.owner) - .or_insert_with(|| Rc::new(FxHashMap())); - Rc::get_mut(map).unwrap().insert(hir_id.local_id, v); + .or_insert_with(|| Lrc::new(FxHashMap())); + Lrc::get_mut(map).unwrap().insert(hir_id.local_id, v); } let mut late_bound = FxHashMap(); for k in named_region_map.late_bound { let hir_id = hir.node_to_hir_id(k); let map = late_bound.entry(hir_id.owner) - .or_insert_with(|| Rc::new(FxHashSet())); - Rc::get_mut(map).unwrap().insert(hir_id.local_id); + .or_insert_with(|| Lrc::new(FxHashSet())); + Lrc::get_mut(map).unwrap().insert(hir_id.local_id); } let mut object_lifetime_defaults = FxHashMap(); for (k, v) in named_region_map.object_lifetime_defaults { let hir_id = hir.node_to_hir_id(k); let map = object_lifetime_defaults.entry(hir_id.owner) - .or_insert_with(|| Rc::new(FxHashMap())); - Rc::get_mut(map).unwrap().insert(hir_id.local_id, Rc::new(v)); + .or_insert_with(|| Lrc::new(FxHashMap())); + Lrc::get_mut(map).unwrap().insert(hir_id.local_id, Lrc::new(v)); } + ::rustc_data_structures::sync::assert_sync::(); + tls::enter_global(GlobalCtxt { sess: s, cstore, - global_arenas: arenas, + global_arenas: &arenas.global, global_interners: interners, dep_graph: dep_graph.clone(), on_disk_query_result_cache, @@ -1081,10 +1095,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }, trait_map, export_map: resolutions.export_map.into_iter().map(|(k, v)| { - (k, Rc::new(v)) + (k, Lrc::new(v)) }).collect(), freevars: resolutions.freevars.into_iter().map(|(k, v)| { - (hir.local_def_id(k), Rc::new(v)) + (hir.local_def_id(k), Lrc::new(v)) }).collect(), maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports @@ -1099,17 +1113,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { hir, def_path_hash_to_def_id, maps: maps::Maps::new(providers), - rcache: RefCell::new(FxHashMap()), + rcache: Lock::new(FxHashMap()), selection_cache: traits::SelectionCache::new(), evaluation_cache: traits::EvaluationCache::new(), crate_name: Symbol::intern(crate_name), data_layout, - layout_interner: RefCell::new(FxHashSet()), - layout_depth: Cell::new(0), - derive_macros: RefCell::new(NodeMap()), - stability_interner: RefCell::new(FxHashSet()), - all_traits: RefCell::new(None), - tx_to_llvm_workers: tx, + layout_interner: Lock::new(FxHashSet()), + layout_depth: LockCell::new(0), + derive_macros: Lock::new(NodeMap()), + stability_interner: Lock::new(FxHashSet()), + all_traits: Lock::new(None), + tx_to_llvm_workers: Lock::new(tx), output_filenames: Arc::new(output_filenames.clone()), }, f) } @@ -1119,15 +1133,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.sess.consider_optimizing(&cname, msg) } - pub fn lang_items(self) -> Rc { + pub fn lang_items(self) -> Lrc { self.get_lang_items(LOCAL_CRATE) } - pub fn stability(self) -> Rc> { + pub fn stability(self) -> Lrc> { self.stability_index(LOCAL_CRATE) } - pub fn crates(self) -> Rc> { + pub fn crates(self) -> Lrc> { self.all_crate_nums(LOCAL_CRATE) } @@ -1188,7 +1202,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // Note that this is *untracked* and should only be used within the query // system if the result is otherwise tracked through queries - pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Rc { + pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc { self.cstore.crate_data_as_rc_any(cnum) } @@ -2108,7 +2122,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { lint::struct_lint_level(self.sess, lint, level, src, None, msg) } - pub fn in_scope_traits(self, id: HirId) -> Option>> { + pub fn in_scope_traits(self, id: HirId) -> Option>> { self.in_scope_traits_map(id.owner) .and_then(|map| map.get(&id.local_id).cloned()) } @@ -2125,7 +2139,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn object_lifetime_defaults(self, id: HirId) - -> Option>> + -> Option>> { self.object_lifetime_defaults_map(id.owner) .and_then(|map| map.get(&id.local_id).cloned()) @@ -2177,12 +2191,12 @@ impl InternIteratorElement for Result { } struct NamedRegionMap { - defs: FxHashMap>>, - late_bound: FxHashMap>>, + defs: FxHashMap>>, + late_bound: FxHashMap>>, object_lifetime_defaults: FxHashMap< DefIndex, - Rc>>>, + Lrc>>>, >, } @@ -2211,7 +2225,7 @@ pub fn provide(providers: &mut ty::maps::Providers) { // Once red/green incremental compilation lands we should be able to // remove this because while the crate changes often the lint level map // will change rarely. - tcx.dep_graph.with_ignore(|| Rc::new(middle::lang_items::collect(tcx))) + tcx.dep_graph.with_ignore(|| Lrc::new(middle::lang_items::collect(tcx))) }; providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned(); providers.maybe_unused_trait_import = |tcx, id| { @@ -2219,12 +2233,12 @@ pub fn provide(providers: &mut ty::maps::Providers) { }; providers.maybe_unused_extern_crates = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - Rc::new(tcx.maybe_unused_extern_crates.clone()) + Lrc::new(tcx.maybe_unused_extern_crates.clone()) }; providers.stability_index = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - Rc::new(stability::Index::new(tcx)) + Lrc::new(stability::Index::new(tcx)) }; providers.lookup_stability = |tcx, id| { assert_eq!(id.krate, LOCAL_CRATE); @@ -2242,11 +2256,11 @@ pub fn provide(providers: &mut ty::maps::Providers) { }; providers.all_crate_nums = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - Rc::new(tcx.cstore.crates_untracked()) + Lrc::new(tcx.cstore.crates_untracked()) }; providers.postorder_cnums = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - Rc::new(tcx.cstore.postorder_cnums_untracked()) + Lrc::new(tcx.cstore.postorder_cnums_untracked()) }; providers.output_filenames = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); diff --git a/src/librustc/ty/maps/mod.rs b/src/librustc/ty/maps/mod.rs index 848d2a0a7def7..2cbdfe87967d2 100644 --- a/src/librustc/ty/maps/mod.rs +++ b/src/librustc/ty/maps/mod.rs @@ -46,7 +46,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::stable_hasher::StableVec; use std::ops::Deref; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::sync::Arc; use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::symbol::InternedString; @@ -117,17 +117,17 @@ define_maps! { <'tcx> /// Get a map with the variance of every item; use `item_variance` /// instead. - [] fn crate_variances: crate_variances(CrateNum) -> Rc, + [] fn crate_variances: crate_variances(CrateNum) -> Lrc, /// Maps from def-id of a type or region parameter to its /// (inferred) variance. - [] fn variances_of: ItemVariances(DefId) -> Rc>, + [] fn variances_of: ItemVariances(DefId) -> Lrc>, /// Maps from def-id of a type to its (inferred) outlives. [] fn inferred_outlives_of: InferredOutlivesOf(DefId) -> Vec>, /// Maps from an impl/trait def-id to a list of the def-ids of its items - [] fn associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc>, + [] fn associated_item_def_ids: AssociatedItemDefIds(DefId) -> Lrc>, /// Maps from a trait item to the trait item "descriptor" [] fn associated_item: AssociatedItems(DefId) -> ty::AssociatedItem, @@ -138,17 +138,17 @@ define_maps! { <'tcx> /// Maps a DefId of a type to a list of its inherent impls. /// Contains implementations of methods that are inherent to a type. /// Methods in these implementations don't need to be exported. - [] fn inherent_impls: InherentImpls(DefId) -> Rc>, + [] fn inherent_impls: InherentImpls(DefId) -> Lrc>, /// Set of all the def-ids in this crate that have MIR associated with /// them. This includes all the body owners, but also things like struct /// constructors. - [] fn mir_keys: mir_keys(CrateNum) -> Rc, + [] fn mir_keys: mir_keys(CrateNum) -> Lrc, /// Maps DefId's that have an associated Mir to the result /// of the MIR qualify_consts pass. The actual meaning of /// the value isn't known except to the pass itself. - [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Rc>), + [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Lrc>), /// Fetch the MIR for a given def-id right after it's built - this includes /// unreachable code. @@ -183,13 +183,13 @@ define_maps! { <'tcx> [] fn typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>, - [] fn used_trait_imports: UsedTraitImports(DefId) -> Rc, + [] fn used_trait_imports: UsedTraitImports(DefId) -> Lrc, [] fn has_typeck_tables: HasTypeckTables(DefId) -> bool, [] fn coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (), - [] fn borrowck: BorrowCheck(DefId) -> Rc, + [] fn borrowck: BorrowCheck(DefId) -> Lrc, /// Borrow checks the function body. If this is a closure, returns /// additional requirements that the closure's creator must verify. @@ -214,13 +214,13 @@ define_maps! { <'tcx> -> Result<(), ErrorReported>, /// Performs the privacy check and computes "access levels". - [] fn privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc, + [] fn privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Lrc, [] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet, /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body; /// in the case of closures, this will be redirected to the enclosing function. - [] fn region_scope_tree: RegionScopeTree(DefId) -> Rc, + [] fn region_scope_tree: RegionScopeTree(DefId) -> Lrc, [] fn mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>, @@ -231,22 +231,22 @@ define_maps! { <'tcx> [] fn def_span: DefSpan(DefId) -> Span, [] fn lookup_stability: LookupStability(DefId) -> Option<&'tcx attr::Stability>, [] fn lookup_deprecation_entry: LookupDeprecationEntry(DefId) -> Option, - [] fn item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>, + [] fn item_attrs: ItemAttrs(DefId) -> Lrc<[ast::Attribute]>, [] fn fn_arg_names: FnArgNames(DefId) -> Vec, [] fn impl_parent: ImplParent(DefId) -> Option, [] fn trait_of_item: TraitOfItem(DefId) -> Option, [] fn is_exported_symbol: IsExportedSymbol(DefId) -> bool, [] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> ExternBodyNestedBodies, [] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool, - [] fn rvalue_promotable_map: RvaluePromotableMap(DefId) -> Rc, + [] fn rvalue_promotable_map: RvaluePromotableMap(DefId) -> Lrc, [] fn is_mir_available: IsMirAvailable(DefId) -> bool, [] fn vtable_methods: vtable_methods_node(ty::PolyTraitRef<'tcx>) - -> Rc)>>>, + -> Lrc)>>>, [] fn trans_fulfill_obligation: fulfill_obligation_dep_node( (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> Vtable<'tcx, ()>, - [] fn trait_impls_of: TraitImpls(DefId) -> Rc, - [] fn specialization_graph_of: SpecializationGraph(DefId) -> Rc, + [] fn trait_impls_of: TraitImpls(DefId) -> Lrc, + [] fn specialization_graph_of: SpecializationGraph(DefId) -> Lrc, [] fn is_object_safe: ObjectSafety(DefId) -> bool, // Get the ParameterEnvironment for a given item; this environment @@ -268,7 +268,7 @@ define_maps! { <'tcx> ty::layout::LayoutError<'tcx>>, [] fn dylib_dependency_formats: DylibDepFormats(CrateNum) - -> Rc>, + -> Lrc>, [] fn is_panic_runtime: IsPanicRuntime(CrateNum) -> bool, [] fn is_compiler_builtins: IsCompilerBuiltins(CrateNum) -> bool, @@ -278,17 +278,17 @@ define_maps! { <'tcx> [] fn panic_strategy: GetPanicStrategy(CrateNum) -> PanicStrategy, [] fn is_no_builtins: IsNoBuiltins(CrateNum) -> bool, - [] fn extern_crate: ExternCrate(DefId) -> Rc>, + [] fn extern_crate: ExternCrate(DefId) -> Lrc>, [] fn specializes: specializes_node((DefId, DefId)) -> bool, [] fn in_scope_traits_map: InScopeTraits(DefIndex) - -> Option>>>>, - [] fn module_exports: ModuleExports(DefId) -> Option>>, - [] fn lint_levels: lint_levels_node(CrateNum) -> Rc, + -> Option>>>>, + [] fn module_exports: ModuleExports(DefId) -> Option>>, + [] fn lint_levels: lint_levels_node(CrateNum) -> Lrc, [] fn impl_defaultness: ImplDefaultness(DefId) -> hir::Defaultness, - [] fn exported_symbol_ids: ExportedSymbolIds(CrateNum) -> Rc, - [] fn native_libraries: NativeLibraries(CrateNum) -> Rc>, + [] fn exported_symbol_ids: ExportedSymbolIds(CrateNum) -> Lrc, + [] fn native_libraries: NativeLibraries(CrateNum) -> Lrc>, [] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option, [] fn derive_registrar_fn: DeriveRegistrarFn(CrateNum) -> Option, [] fn crate_disambiguator: CrateDisambiguator(CrateNum) -> CrateDisambiguator, @@ -296,46 +296,46 @@ define_maps! { <'tcx> [] fn original_crate_name: OriginalCrateName(CrateNum) -> Symbol, [] fn implementations_of_trait: implementations_of_trait_node((CrateNum, DefId)) - -> Rc>, + -> Lrc>, [] fn all_trait_implementations: AllTraitImplementations(CrateNum) - -> Rc>, + -> Lrc>, [] fn is_dllimport_foreign_item: IsDllimportForeignItem(DefId) -> bool, [] fn is_statically_included_foreign_item: IsStaticallyIncludedForeignItem(DefId) -> bool, [] fn native_library_kind: NativeLibraryKind(DefId) -> Option, - [] fn link_args: link_args_node(CrateNum) -> Rc>, + [] fn link_args: link_args_node(CrateNum) -> Lrc>, [] fn named_region_map: NamedRegion(DefIndex) -> - Option>>, + Option>>, [] fn is_late_bound_map: IsLateBound(DefIndex) -> - Option>>, + Option>>, [] fn object_lifetime_defaults_map: ObjectLifetimeDefaults(DefIndex) - -> Option>>>>, + -> Option>>>>, [] fn visibility: Visibility(DefId) -> ty::Visibility, [] fn dep_kind: DepKind(CrateNum) -> DepKind, [] fn crate_name: CrateName(CrateNum) -> Symbol, - [] fn item_children: ItemChildren(DefId) -> Rc>, + [] fn item_children: ItemChildren(DefId) -> Lrc>, [] fn extern_mod_stmt_cnum: ExternModStmtCnum(DefId) -> Option, - [] fn get_lang_items: get_lang_items_node(CrateNum) -> Rc, - [] fn defined_lang_items: DefinedLangItems(CrateNum) -> Rc>, - [] fn missing_lang_items: MissingLangItems(CrateNum) -> Rc>, + [] fn get_lang_items: get_lang_items_node(CrateNum) -> Lrc, + [] fn defined_lang_items: DefinedLangItems(CrateNum) -> Lrc>, + [] fn missing_lang_items: MissingLangItems(CrateNum) -> Lrc>, [] fn extern_const_body: ExternConstBody(DefId) -> ExternConstBody<'tcx>, [] fn visible_parent_map: visible_parent_map_node(CrateNum) - -> Rc>, + -> Lrc>, [] fn missing_extern_crate_item: MissingExternCrateItem(CrateNum) -> bool, - [] fn used_crate_source: UsedCrateSource(CrateNum) -> Rc, - [] fn postorder_cnums: postorder_cnums_node(CrateNum) -> Rc>, + [] fn used_crate_source: UsedCrateSource(CrateNum) -> Lrc, + [] fn postorder_cnums: postorder_cnums_node(CrateNum) -> Lrc>, - [] fn freevars: Freevars(DefId) -> Option>>, + [] fn freevars: Freevars(DefId) -> Option>>, [] fn maybe_unused_trait_import: MaybeUnusedTraitImport(DefId) -> bool, [] fn maybe_unused_extern_crates: maybe_unused_extern_crates_node(CrateNum) - -> Rc>, + -> Lrc>, - [] fn stability_index: stability_index_node(CrateNum) -> Rc>, - [] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Rc>, + [] fn stability_index: stability_index_node(CrateNum) -> Lrc>, + [] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Lrc>, [] fn exported_symbols: ExportedSymbols(CrateNum) -> Arc, SymbolExportLevel)>>, diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 079b518efd898..105b1703fa213 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -17,14 +17,13 @@ use hir::map::definitions::DefPathHash; use ich::CachingCodemapView; use mir; use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sync::{Lrc, Lock}; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, SpecializedDecoder, SpecializedEncoder, UseSpecializedDecodable, UseSpecializedEncodable}; use session::{CrateDisambiguator, Session}; -use std::cell::RefCell; use std::mem; -use std::rc::Rc; use syntax::ast::NodeId; use syntax::codemap::{CodeMap, StableFilemapId}; use syntax_pos::{BytePos, Span, DUMMY_SP, FileMap}; @@ -56,17 +55,17 @@ pub struct OnDiskCache<'sess> { // This field collects all Diagnostics emitted during the current // compilation session. - current_diagnostics: RefCell>>, + current_diagnostics: Lock>>, prev_cnums: Vec<(u32, String, CrateDisambiguator)>, - cnum_map: RefCell>>>, + cnum_map: Lock>>>, codemap: &'sess CodeMap, file_index_to_stable_id: FxHashMap, // These two fields caches that are populated lazily during decoding. - file_index_to_file: RefCell>>, - synthetic_expansion_infos: RefCell>, + file_index_to_file: Lock>>, + synthetic_expansion_infos: Lock>, // A map from dep-node to the position of the cached query result in // `serialized_data`. @@ -132,14 +131,14 @@ impl<'sess> OnDiskCache<'sess> { OnDiskCache { serialized_data: data, file_index_to_stable_id: footer.file_index_to_stable_id, - file_index_to_file: RefCell::new(FxHashMap()), + file_index_to_file: Lock::new(FxHashMap()), prev_cnums: footer.prev_cnums, - cnum_map: RefCell::new(None), + cnum_map: Lock::new(None), codemap: sess.codemap(), - current_diagnostics: RefCell::new(FxHashMap()), + current_diagnostics: Lock::new(FxHashMap()), query_result_index: footer.query_result_index.into_iter().collect(), prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(), - synthetic_expansion_infos: RefCell::new(FxHashMap()), + synthetic_expansion_infos: Lock::new(FxHashMap()), } } @@ -147,14 +146,14 @@ impl<'sess> OnDiskCache<'sess> { OnDiskCache { serialized_data: Vec::new(), file_index_to_stable_id: FxHashMap(), - file_index_to_file: RefCell::new(FxHashMap()), + file_index_to_file: Lock::new(FxHashMap()), prev_cnums: vec![], - cnum_map: RefCell::new(None), + cnum_map: Lock::new(None), codemap, - current_diagnostics: RefCell::new(FxHashMap()), + current_diagnostics: Lock::new(FxHashMap()), query_result_index: FxHashMap(), prev_diagnostics_index: FxHashMap(), - synthetic_expansion_infos: RefCell::new(FxHashMap()), + synthetic_expansion_infos: Lock::new(FxHashMap()), } } @@ -356,9 +355,9 @@ impl<'sess> OnDiskCache<'sess> { opaque: opaque::Decoder::new(&self.serialized_data[..], pos.to_usize()), codemap: self.codemap, cnum_map: cnum_map.as_ref().unwrap(), - file_index_to_file: &mut file_index_to_file, + file_index_to_file: &mut *file_index_to_file, file_index_to_stable_id: &self.file_index_to_stable_id, - synthetic_expansion_infos: &mut synthetic_expansion_infos, + synthetic_expansion_infos: &mut *synthetic_expansion_infos, }; match decode_tagged(&mut decoder, dep_node_index) { @@ -418,12 +417,12 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> { codemap: &'x CodeMap, cnum_map: &'x IndexVec>, synthetic_expansion_infos: &'x mut FxHashMap, - file_index_to_file: &'x mut FxHashMap>, + file_index_to_file: &'x mut FxHashMap>, file_index_to_stable_id: &'x FxHashMap, } impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { - fn file_index_to_file(&mut self, index: FileMapIndex) -> Rc { + fn file_index_to_file(&mut self, index: FileMapIndex) -> Lrc { let CacheDecoder { ref mut file_index_to_file, ref file_index_to_stable_id, @@ -696,7 +695,7 @@ struct CacheEncoder<'enc, 'a, 'tcx, E> impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { - fn filemap_index(&mut self, filemap: Rc) -> FileMapIndex { + fn filemap_index(&mut self, filemap: Lrc) -> FileMapIndex { self.file_to_file_index[&(&*filemap as *const FileMap)] } diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index ec6d190b8bde9..8a4fa933be20d 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -20,7 +20,7 @@ use ty::maps::config::QueryDescription; use ty::item_path; use rustc_data_structures::fx::{FxHashMap}; -use std::cell::{Ref, RefMut}; +use rustc_data_structures::sync::LockGuard; use std::marker::PhantomData; use std::mem; use syntax_pos::Span; @@ -57,12 +57,12 @@ impl<'tcx, M: QueryDescription<'tcx>> QueryMap<'tcx, M> { pub(super) trait GetCacheInternal<'tcx>: QueryDescription<'tcx> + Sized { fn get_cache_internal<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Ref<'a, QueryMap<'tcx, Self>>; + -> LockGuard<'a, QueryMap<'tcx, Self>>; } pub(super) struct CycleError<'a, 'tcx: 'a> { span: Span, - cycle: RefMut<'a, [(Span, Query<'tcx>)]>, + cycle: LockGuard<'a, [(Span, Query<'tcx>)]>, } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { @@ -112,7 +112,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { .find(|&(_, &(_, ref q))| *q == query) { return Err(CycleError { span, - cycle: RefMut::map(stack, |stack| &mut stack[i..]) + cycle: LockGuard::map(stack, |stack| &mut stack[i..]) }); } stack.push((span, query)); @@ -189,7 +189,7 @@ macro_rules! define_maps { [$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)*) => { use dep_graph::DepNodeIndex; - use std::cell::RefCell; + use rustc_data_structures::sync::{Lock, LockGuard}; define_map_struct! { tcx: $tcx, @@ -201,8 +201,8 @@ macro_rules! define_maps { -> Self { Maps { providers, - query_stack: RefCell::new(vec![]), - $($name: RefCell::new(QueryMap::new())),* + query_stack: Lock::new(vec![]), + $($name: Lock::new(QueryMap::new())),* } } } @@ -250,7 +250,7 @@ macro_rules! define_maps { impl<$tcx> GetCacheInternal<$tcx> for queries::$name<$tcx> { fn get_cache_internal<'a>(tcx: TyCtxt<'a, $tcx, $tcx>) - -> ::std::cell::Ref<'a, QueryMap<$tcx, Self>> { + -> LockGuard<'a, QueryMap<$tcx, Self>> { tcx.maps.$name.borrow() } } @@ -586,8 +586,8 @@ macro_rules! define_map_struct { input: ($(([$(modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => { pub struct Maps<$tcx> { providers: IndexVec>, - query_stack: RefCell)>>, - $($(#[$attr])* $name: RefCell>>,)* + query_stack: Lock)>>, + $($(#[$attr])* $name: Lock>>,)* } }; } diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index afe999cede70d..13af8dcf4636c 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -42,7 +42,7 @@ use std::fmt; use std::hash::{Hash, Hasher}; use std::iter::FromIterator; use std::ops::Deref; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::slice; use std::vec::IntoIter; use std::mem; @@ -77,7 +77,7 @@ pub use self::sty::TypeVariants::*; pub use self::binding::BindingMode; pub use self::binding::BindingMode::*; -pub use self::context::{TyCtxt, GlobalArenas, tls, keep_local}; +pub use self::context::{TyCtxt, GlobalArenas, AllArenas, tls, keep_local}; pub use self::context::{Lift, TypeckTables}; pub use self::instance::{Instance, InstanceDef}; @@ -124,7 +124,7 @@ mod sty; /// *on-demand* infrastructure. #[derive(Clone)] pub struct CrateAnalysis { - pub access_levels: Rc, + pub access_levels: Lrc, pub name: String, pub glob_map: Option, } @@ -327,10 +327,10 @@ pub struct CrateVariancesMap { /// For each item with generics, maps to a vector of the variance /// of its generics. If an item has no generics, it will have no /// entry. - pub variances: FxHashMap>>, + pub variances: FxHashMap>>, /// An empty vector, useful for cloning. - pub empty_variance: Rc>, + pub empty_variance: Lrc>, } impl Variance { @@ -2067,7 +2067,7 @@ impl BorrowKind { #[derive(Debug, Clone)] pub enum Attributes<'gcx> { - Owned(Rc<[ast::Attribute]>), + Owned(Lrc<[ast::Attribute]>), Borrowed(&'gcx [ast::Attribute]) } @@ -2552,7 +2552,7 @@ fn adt_dtorck_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Rc> { + -> Lrc> { let id = tcx.hir.as_local_node_id(def_id).unwrap(); let item = tcx.hir.expect_item(id); let vec: Vec<_> = match item.node { @@ -2570,7 +2570,7 @@ fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } _ => span_bug!(item.span, "associated_item_def_ids: not impl or trait") }; - Rc::new(vec) + Lrc::new(vec) } fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span { @@ -2660,7 +2660,7 @@ pub fn provide(providers: &mut ty::maps::Providers) { /// (constructing this map requires touching the entire crate). #[derive(Clone, Debug)] pub struct CrateInherentImpls { - pub inherent_impls: DefIdMap>>, + pub inherent_impls: DefIdMap>>, } /// A set of constraints that need to be satisfied in order for diff --git a/src/librustc/ty/steal.rs b/src/librustc/ty/steal.rs index 0b0818888812f..08f48dfb6d924 100644 --- a/src/librustc/ty/steal.rs +++ b/src/librustc/ty/steal.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::cell::{Ref, RefCell}; +use rustc_data_structures::sync::{RwLock, ReadGuard}; use std::mem; /// The `Steal` struct is intended to used as the value for a query. @@ -32,18 +32,18 @@ use std::mem; /// /// FIXME(#41710) -- what is the best way to model linear queries? pub struct Steal { - value: RefCell> + value: RwLock> } impl Steal { pub fn new(value: T) -> Self { Steal { - value: RefCell::new(Some(value)) + value: RwLock::new(Some(value)) } } - pub fn borrow(&self) -> Ref { - Ref::map(self.value.borrow(), |opt| match *opt { + pub fn borrow(&self) -> ReadGuard { + ReadGuard::map(self.value.borrow(), |opt| match *opt { None => bug!("attempted to read from stolen value"), Some(ref v) => v }) diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 0fbf9f1bd587b..62d3c8dc87da3 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -20,7 +20,7 @@ use ty::{Ty, TyCtxt}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; /// A trait's definition with type information. pub struct TraitDef { @@ -142,7 +142,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // Query provider for `trait_impls_of`. pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_id: DefId) - -> Rc { + -> Lrc { let mut remote_impls = Vec::new(); // Traits defined in the current crate can't have impls in upstream @@ -180,7 +180,7 @@ pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } - Rc::new(TraitImpls { + Lrc::new(TraitImpls { blanket_impls: blanket_impls, non_blanket_impls: non_blanket_impls, }) diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index 76d3494dbf082..77b1326eed7f9 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -10,6 +10,8 @@ #![allow(non_camel_case_types)] +use rustc_data_structures::sync::{Lock, LockCell}; + use std::cell::{RefCell, Cell}; use std::collections::HashMap; use std::ffi::CString; @@ -35,7 +37,7 @@ pub struct ErrorReported; thread_local!(static TIME_DEPTH: Cell = Cell::new(0)); /// Initialized for -Z profile-queries -thread_local!(static PROFQ_CHAN: RefCell>> = RefCell::new(None)); +scoped_thread_local!(pub static PROFQ_CHAN: Lock>>); /// Parameters to the `Dump` variant of type `ProfileQueriesMsg`. #[derive(Clone,Debug)] @@ -77,7 +79,7 @@ pub enum ProfileQueriesMsg { /// If enabled, send a message to the profile-queries thread pub fn profq_msg(msg: ProfileQueriesMsg) { - PROFQ_CHAN.with(|sender|{ + PROFQ_CHAN.with(|sender| { if let Some(s) = sender.borrow().as_ref() { s.send(msg).unwrap() } else { @@ -93,7 +95,7 @@ pub fn profq_msg(msg: ProfileQueriesMsg) { /// Set channel for profile queries channel pub fn profq_set_chan(s: Sender) -> bool { - PROFQ_CHAN.with(|chan|{ + PROFQ_CHAN.with(|chan| { if chan.borrow().is_none() { *chan.borrow_mut() = Some(s); true @@ -205,7 +207,7 @@ pub fn to_readable_str(mut val: usize) -> String { groups.join("_") } -pub fn record_time(accu: &Cell, f: F) -> T where +pub fn record_time(accu: &LockCell, f: F) -> T where F: FnOnce() -> T, { let start = Instant::now(); diff --git a/src/librustc_borrowck/Cargo.toml b/src/librustc_borrowck/Cargo.toml index 25f02537490fa..abc1fc759f696 100644 --- a/src/librustc_borrowck/Cargo.toml +++ b/src/librustc_borrowck/Cargo.toml @@ -17,3 +17,4 @@ graphviz = { path = "../libgraphviz" } rustc = { path = "../librustc" } rustc_mir = { path = "../librustc_mir" } rustc_errors = { path = "../librustc_errors" } +rustc_data_structures = { path = "../librustc_data_structures" } \ No newline at end of file diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 40837c5e8d699..dcfdaeea64b57 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -44,6 +44,7 @@ use rustc::util::nodemap::FxHashSet; use std::cell::RefCell; use std::fmt; use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::hash::{Hash, Hasher}; use syntax::ast; use syntax_pos::{MultiSpan, Span}; @@ -86,7 +87,7 @@ pub struct AnalysisData<'a, 'tcx: 'a> { } fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) - -> Rc + -> Lrc { debug!("borrowck(body_owner_def_id={:?})", owner_def_id); @@ -99,7 +100,7 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) // those things (notably the synthesized constructors from // tuple structs/variants) do not have an associated body // and do not need borrowchecking. - return Rc::new(BorrowCheckResult { + return Lrc::new(BorrowCheckResult { used_mut_nodes: FxHashSet(), }) } @@ -127,7 +128,7 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) // Note that `mir_validated` is a "stealable" result; the // thief, `optimized_mir()`, forces borrowck, so we know that // is not yet stolen. - tcx.mir_validated(owner_def_id).borrow(); + let _guard = tcx.mir_validated(owner_def_id).borrow(); // option dance because you can't capture an uninitialized variable // by mut-ref. @@ -145,7 +146,7 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) } unused::check(&mut bccx, body); - Rc::new(BorrowCheckResult { + Lrc::new(BorrowCheckResult { used_mut_nodes: bccx.used_mut_nodes.into_inner(), }) } @@ -243,7 +244,7 @@ pub struct BorrowckCtxt<'a, 'tcx: 'a> { // Some in `borrowck_fn` and cleared later tables: &'a ty::TypeckTables<'tcx>, - region_scope_tree: Rc, + region_scope_tree: Lrc, owner_def_id: DefId, diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index be173db23a52a..2bdee3198f22a 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -23,6 +23,7 @@ extern crate syntax; extern crate syntax_pos; extern crate rustc_errors as errors; +extern crate rustc_data_structures; // for "clarity", rename the graphviz crate to dot; graphviz within `borrowck` // refers to the borrowck-specific graphviz adapter traits. diff --git a/src/librustc_data_structures/Cargo.toml b/src/librustc_data_structures/Cargo.toml index 343b1ed68b804..7973ece2cbc26 100644 --- a/src/librustc_data_structures/Cargo.toml +++ b/src/librustc_data_structures/Cargo.toml @@ -11,3 +11,9 @@ crate-type = ["dylib"] [dependencies] log = "0.3" serialize = { path = "../libserialize" } +cfg-if = "0.1.2" +owning_ref = { git = "https://github.com/Zoxc/owning-ref-rs.git" } + +[dependencies.parking_lot] +version = "0.5" +features = ["nightly"] \ No newline at end of file diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index e2f50c8c8891b..a1e48eedfbaf5 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -327,7 +327,7 @@ macro_rules! newtype_index { #[derive(Clone, PartialEq, Eq)] pub struct IndexVec { pub raw: Vec, - _marker: PhantomData + _marker: PhantomData } impl serialize::Encodable for IndexVec { diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index 8862ba3545eba..ff4ec79a452a8 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -31,7 +31,10 @@ #![feature(i128)] #![feature(conservative_impl_trait)] #![feature(specialization)] +#![feature(optin_builtin_traits)] #![feature(underscore_lifetimes)] +#![feature(macro_vis_matcher)] +#![feature(allow_internal_unstable)] #![cfg_attr(unix, feature(libc))] #![cfg_attr(test, feature(test))] @@ -42,6 +45,10 @@ extern crate log; extern crate serialize as rustc_serialize; // used by deriving #[cfg(unix)] extern crate libc; +extern crate parking_lot; +#[macro_use] +extern crate cfg_if; +extern crate owning_ref; pub use rustc_serialize::hex::ToHex; @@ -67,6 +74,7 @@ pub mod tuple_slice; pub mod veccell; pub mod control_flow_graph; pub mod flock; +pub mod sync; // See comments in src/librustc/lib.rs #[doc(hidden)] diff --git a/src/librustc_data_structures/sync.rs b/src/librustc_data_structures/sync.rs new file mode 100644 index 0000000000000..44a6d8c060833 --- /dev/null +++ b/src/librustc_data_structures/sync.rs @@ -0,0 +1,373 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This mdoule defines types which are thread safe if cfg!(parallel_queries) is true. +//! +//! `Lrc` is an alias of either Rc or Arc. +//! +//! `Lock` is a mutex. +//! It internally uses `parking_lot::Mutex` if cfg!(parallel_queries) is true, +//! `RefCell` otherwise. +//! +//! `RwLock` is a read-write lock. +//! It internally uses `parking_lot::RwLock` if cfg!(parallel_queries) is true, +//! `RefCell` otherwise. +//! +//! `LockCell` is a thread safe version of `Cell`, with `set` and `get` operations. +//! It can never deadlock. It uses `Cell` when +//! cfg!(parallel_queries) is false, otherwise it is a `Lock`. +//! +//! `MTLock` is a mutex which disappears if cfg!(parallel_queries) is false. +//! +//! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync +//! depending on the value of cfg!(parallel_queries). + +use std::cmp::Ordering; +use std::fmt::Debug; +use std::fmt::Formatter; +use std::fmt; +use owning_ref::{Erased, OwningRef}; + +cfg_if! { + if #[cfg(not(parallel_queries))] { + pub auto trait Send {} + pub auto trait Sync {} + + impl Send for T {} + impl Sync for T {} + + #[macro_export] + macro_rules! rustc_erase_owner { + ($v:expr) => { + $v.erase_owner() + } + } + + pub type MetadataRef = OwningRef, [u8]>; + + pub use std::rc::Rc as Lrc; + pub use std::cell::Ref as ReadGuard; + pub use std::cell::RefMut as WriteGuard; + pub use std::cell::RefMut as LockGuard; + + pub use std::cell::RefCell as RwLock; + use std::cell::RefCell as InnerLock; + + use std::cell::Cell; + + #[derive(Debug)] + pub struct MTLock(T); + + impl MTLock { + #[inline(always)] + pub fn new(inner: T) -> Self { + MTLock(inner) + } + + #[inline(always)] + pub fn into_inner(self) -> T { + self.0 + } + + #[inline(always)] + pub fn get_mut(&mut self) -> &mut T { + &mut self.0 + } + + #[inline(always)] + pub fn lock(&self) -> &T { + &self.0 + } + + #[inline(always)] + pub fn borrow(&self) -> &T { + &self.0 + } + + #[inline(always)] + pub fn borrow_mut(&self) -> &T { + &self.0 + } + } + + // FIXME: Probably a bad idea (in the threaded case) + impl Clone for MTLock { + #[inline] + fn clone(&self) -> Self { + MTLock(self.0.clone()) + } + } + + pub struct LockCell(Cell); + + impl LockCell { + #[inline(always)] + pub fn new(inner: T) -> Self { + LockCell(Cell::new(inner)) + } + + #[inline(always)] + pub fn into_inner(self) -> T { + self.0.into_inner() + } + + #[inline(always)] + pub fn set(&self, new_inner: T) { + self.0.set(new_inner); + } + + #[inline(always)] + pub fn get(&self) -> T where T: Copy { + self.0.get() + } + + #[inline(always)] + pub fn set_mut(&mut self, new_inner: T) { + self.0.set(new_inner); + } + + #[inline(always)] + pub fn get_mut(&mut self) -> T where T: Copy { + self.0.get() + } + } + + impl LockCell> { + #[inline(always)] + pub fn take(&self) -> Option { + unsafe { (*self.0.as_ptr()).take() } + } + } + } else { + pub use std::marker::Send as Send; + pub use std::marker::Sync as Sync; + + pub use parking_lot::RwLockReadGuard as ReadGuard; + pub use parking_lot::RwLockWriteGuard as WriteGuard; + + pub use parking_lot::MutexGuard as LockGuard; + + use parking_lot; + + pub use std::sync::Arc as Lrc; + + pub use self::Lock as MTLock; + + use parking_lot::Mutex as InnerLock; + + pub type MetadataRef = OwningRef, [u8]>; + + /// This makes locks panic if they are already held. + /// It is only useful when you are running in a single thread + const ERROR_CHECKING: bool = false; + + #[macro_export] + macro_rules! rustc_erase_owner { + ($v:expr) => {{ + let v = $v; + ::rustc_data_structures::sync::assert_send_sync_val(&v); + v.erase_send_sync_owner() + }} + } + + pub struct LockCell(Lock); + + impl LockCell { + #[inline(always)] + pub fn new(inner: T) -> Self { + LockCell(Lock::new(inner)) + } + + #[inline(always)] + pub fn into_inner(self) -> T { + self.0.into_inner() + } + + #[inline(always)] + pub fn set(&self, new_inner: T) { + *self.0.lock() = new_inner; + } + + #[inline(always)] + pub fn get(&self) -> T where T: Copy { + *self.0.lock() + } + + #[inline(always)] + pub fn set_mut(&mut self, new_inner: T) { + *self.0.get_mut() = new_inner; + } + + #[inline(always)] + pub fn get_mut(&mut self) -> T where T: Copy { + *self.0.get_mut() + } + } + + impl LockCell> { + #[inline(always)] + pub fn take(&self) -> Option { + self.0.lock().take() + } + } + + #[derive(Debug)] + pub struct RwLock(parking_lot::RwLock); + + impl RwLock { + #[inline(always)] + pub fn new(inner: T) -> Self { + RwLock(parking_lot::RwLock::new(inner)) + } + + #[inline(always)] + pub fn borrow(&self) -> ReadGuard { + if ERROR_CHECKING { + self.0.try_read().expect("lock was already held") + } else { + self.0.read() + } + } + + #[inline(always)] + pub fn borrow_mut(&self) -> WriteGuard { + if ERROR_CHECKING { + self.0.try_write().expect("lock was already held") + } else { + self.0.write() + } + } + } + + // FIXME: Probably a bad idea + impl Clone for RwLock { + #[inline] + fn clone(&self) -> Self { + RwLock::new(self.borrow().clone()) + } + } + } +} + +pub fn assert_sync() {} +pub fn assert_send_sync_val(_t: &T) {} + +impl Debug for LockCell { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + f.debug_struct("LockCell") + .field("value", &self.get()) + .finish() + } +} + +impl Default for LockCell { + /// Creates a `LockCell`, with the `Default` value for T. + #[inline] + fn default() -> LockCell { + LockCell::new(Default::default()) + } +} + +impl PartialEq for LockCell { + #[inline] + fn eq(&self, other: &LockCell) -> bool { + self.get() == other.get() + } +} + +impl Eq for LockCell {} + +impl PartialOrd for LockCell { + #[inline] + fn partial_cmp(&self, other: &LockCell) -> Option { + self.get().partial_cmp(&other.get()) + } + + #[inline] + fn lt(&self, other: &LockCell) -> bool { + self.get() < other.get() + } + + #[inline] + fn le(&self, other: &LockCell) -> bool { + self.get() <= other.get() + } + + #[inline] + fn gt(&self, other: &LockCell) -> bool { + self.get() > other.get() + } + + #[inline] + fn ge(&self, other: &LockCell) -> bool { + self.get() >= other.get() + } +} + +impl Ord for LockCell { + #[inline] + fn cmp(&self, other: &LockCell) -> Ordering { + self.get().cmp(&other.get()) + } +} + +#[derive(Debug)] +pub struct Lock(InnerLock); + +impl Lock { + #[inline(always)] + pub fn new(inner: T) -> Self { + Lock(InnerLock::new(inner)) + } + + #[inline(always)] + pub fn into_inner(self) -> T { + self.0.into_inner() + } + + #[inline(always)] + pub fn get_mut(&mut self) -> &mut T { + self.0.get_mut() + } + + #[cfg(parallel_queries)] + #[inline(always)] + pub fn lock(&self) -> LockGuard { + if ERROR_CHECKING { + self.0.try_lock().expect("lock was already held") + } else { + self.0.lock() + } + } + + #[cfg(not(parallel_queries))] + #[inline(always)] + pub fn lock(&self) -> LockGuard { + self.0.borrow_mut() + } + + #[inline(always)] + pub fn borrow(&self) -> LockGuard { + self.lock() + } + + #[inline(always)] + pub fn borrow_mut(&self) -> LockGuard { + self.lock() + } +} + +// FIXME: Probably a bad idea +impl Clone for Lock { + #[inline] + fn clone(&self) -> Self { + Lock::new(self.borrow().clone()) + } +} diff --git a/src/librustc_data_structures/transitive_relation.rs b/src/librustc_data_structures/transitive_relation.rs index ba7ab0c07c66a..9ba6871a55aa7 100644 --- a/src/librustc_data_structures/transitive_relation.rs +++ b/src/librustc_data_structures/transitive_relation.rs @@ -10,16 +10,16 @@ use bitvec::BitMatrix; use fx::FxHashMap; +use sync::Lock; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use stable_hasher::{HashStable, StableHasher, StableHasherResult}; -use std::cell::RefCell; use std::fmt::Debug; use std::hash::Hash; use std::mem; -#[derive(Clone, Debug)] -pub struct TransitiveRelation { +#[derive(Debug, Clone)] +pub struct TransitiveRelation { // List of elements. This is used to map from a T to a usize. elements: Vec, @@ -32,14 +32,14 @@ pub struct TransitiveRelation { // This is a cached transitive closure derived from the edges. // Currently, we build it lazilly and just throw out any existing - // copy whenever a new edge is added. (The RefCell is to permit + // copy whenever a new edge is added. (The LockCell is to permit // the lazy computation.) This is kind of silly, except for the // fact its size is tied to `self.elements.len()`, so I wanted to // wait before building it up to avoid reallocating as new edges // are added with new elements. Perhaps better would be to ask the // user for a batch of edges to minimize this effect, but I // already wrote the code this way. :P -nmatsakis - closure: RefCell>, + closure: Lock>, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug)] @@ -51,13 +51,13 @@ struct Edge { target: Index, } -impl TransitiveRelation { +impl TransitiveRelation { pub fn new() -> TransitiveRelation { TransitiveRelation { elements: vec![], map: FxHashMap(), edges: vec![], - closure: RefCell::new(None), + closure: Lock::new(None), } } @@ -72,7 +72,7 @@ impl TransitiveRelation { fn add_index(&mut self, a: T) -> Index { let &mut TransitiveRelation { ref mut elements, - ref closure, + ref mut closure, ref mut map, .. } = self; @@ -82,7 +82,7 @@ impl TransitiveRelation { elements.push(a); // if we changed the dimensions, clear the cache - *closure.borrow_mut() = None; + *closure.get_mut() = None; Index(elements.len() - 1) }) @@ -122,7 +122,7 @@ impl TransitiveRelation { self.edges.push(edge); // added an edge, clear the cache - *self.closure.borrow_mut() = None; + *self.closure.get_mut() = None; } } @@ -443,7 +443,7 @@ impl Decodable for TransitiveRelation .enumerate() .map(|(index, elem)| (elem.clone(), Index(index))) .collect(); - Ok(TransitiveRelation { elements, edges, map, closure: RefCell::new(None) }) + Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) }) }) } } diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index d6155f53485e3..96c121b337b50 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -12,7 +12,7 @@ crate-type = ["dylib"] arena = { path = "../libarena" } graphviz = { path = "../libgraphviz" } log = { version = "0.3", features = ["release_max_level_info"] } -owning_ref = "0.3.3" +owning_ref = { git = "https://github.com/Zoxc/owning-ref-rs.git" } env_logger = { version = "0.4", default-features = false } rustc = { path = "../librustc" } rustc_allocator = { path = "../librustc_allocator" } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index b0f61e9a19177..e41b61fed3239 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -22,7 +22,7 @@ use rustc::lint; use rustc::middle::{self, stability, reachable}; use rustc::middle::cstore::CrateStore; use rustc::middle::privacy::AccessLevels; -use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas}; +use rustc::ty::{self, TyCtxt, Resolutions, AllArenas}; use rustc::traits; use rustc::util::common::{ErrorReported, time}; use rustc_allocator as allocator; @@ -51,7 +51,8 @@ use std::fs; use std::io::{self, Write}; use std::iter; use std::path::{Path, PathBuf}; -use std::rc::Rc; +use rustc_data_structures::sync::{Sync, Lrc}; +use rustc::util::common::PROFQ_CHAN; use std::sync::mpsc; use syntax::{ast, diagnostics, visit}; use syntax::attr; @@ -61,7 +62,6 @@ use syntax::parse::{self, PResult}; use syntax::util::node_count::NodeCounter; use syntax; use syntax_ext; -use arena::DroplessArena; use derive_registrar; use pretty::ReplaceBodyWithLoop; @@ -75,6 +75,18 @@ pub fn compile_input(sess: &Session, output: &Option, addl_plugins: Option>, control: &CompileController) -> CompileResult { + PROFQ_CHAN.set(&sess.profile_channel, || { + compile_input_impl(sess, cstore, input, outdir, output, addl_plugins, control) + }) +} + +fn compile_input_impl(sess: &Session, + cstore: &CStore, + input: &Input, + outdir: &Option, + output: &Option, + addl_plugins: Option>, + control: &CompileController) -> CompileResult { use rustc::session::config::CrateType; macro_rules! controller_entry_point { @@ -168,8 +180,7 @@ pub fn compile_input(sess: &Session, return Ok(()) } - let arena = DroplessArena::new(); - let arenas = GlobalArenas::new(); + let arenas = AllArenas::new(); // Construct the HIR map let hir_map = time(sess.time_passes(), @@ -184,7 +195,6 @@ pub fn compile_input(sess: &Session, sess, outdir, output, - &arena, &arenas, &cstore, &hir_map, @@ -214,7 +224,6 @@ pub fn compile_input(sess: &Session, hir_map, analysis, resolutions, - &arena, &arenas, &crate_name, &outputs, @@ -408,8 +417,7 @@ pub struct CompileState<'a, 'tcx: 'a> { pub output_filenames: Option<&'a OutputFilenames>, pub out_dir: Option<&'a Path>, pub out_file: Option<&'a Path>, - pub arena: Option<&'tcx DroplessArena>, - pub arenas: Option<&'tcx GlobalArenas<'tcx>>, + pub arenas: Option<&'tcx AllArenas<'tcx>>, pub expanded_crate: Option<&'a ast::Crate>, pub hir_crate: Option<&'a hir::Crate>, pub hir_map: Option<&'a hir_map::Map<'tcx>>, @@ -429,7 +437,6 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { session, out_dir: out_dir.as_ref().map(|s| &**s), out_file: None, - arena: None, arenas: None, krate: None, registry: None, @@ -484,8 +491,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { session: &'tcx Session, out_dir: &'a Option, out_file: &'a Option, - arena: &'tcx DroplessArena, - arenas: &'tcx GlobalArenas<'tcx>, + arenas: &'tcx AllArenas<'tcx>, cstore: &'tcx CStore, hir_map: &'a hir_map::Map<'tcx>, analysis: &'a ty::CrateAnalysis, @@ -497,7 +503,6 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> { -> Self { CompileState { crate_name: Some(crate_name), - arena: Some(arena), arenas: Some(arenas), cstore: Some(cstore), hir_map: Some(hir_map), @@ -909,7 +914,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, expanded_crate: krate, defs: resolver.definitions, analysis: ty::CrateAnalysis { - access_levels: Rc::new(AccessLevels::default()), + access_levels: Lrc::new(AccessLevels::default()), name: crate_name.to_string(), glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None }, }, @@ -951,12 +956,11 @@ pub fn default_provide_extern(providers: &mut ty::maps::Providers) { /// structures carrying the results of the analysis. pub fn phase_3_run_analysis_passes<'tcx, F, R>(control: &CompileController, sess: &'tcx Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), hir_map: hir_map::Map<'tcx>, mut analysis: ty::CrateAnalysis, resolutions: Resolutions, - arena: &'tcx DroplessArena, - arenas: &'tcx GlobalArenas<'tcx>, + arenas: &'tcx AllArenas<'tcx>, name: &str, output_filenames: &OutputFilenames, f: F) @@ -1020,7 +1024,6 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(control: &CompileController, local_providers, extern_providers, arenas, - arena, resolutions, named_region_map, hir_map, diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index d9b67e2d27f0c..94b18a69282de 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -25,6 +25,8 @@ #![feature(rustc_diagnostic_macros)] #![feature(set_stdio)] +#![recursion_limit="256"] + extern crate arena; extern crate getopts; extern crate graphviz; @@ -89,9 +91,9 @@ use std::io::{self, Read, Write}; use std::iter::repeat; use std::path::PathBuf; use std::process::{self, Command, Stdio}; -use std::rc::Rc; use std::str; use std::sync::{Arc, Mutex}; +use rustc_data_structures::sync::Lrc; use std::thread; use syntax::ast; @@ -187,9 +189,20 @@ mod rustc_trans { // The FileLoader provides a way to load files from sources other than the file system. pub fn run_compiler<'a>(args: &[String], callbacks: &mut CompilerCalls<'a>, - file_loader: Option>, + file_loader: Option>, emitter_dest: Option>) -> (CompileResult, Option) +{ + syntax::with_globals(&syntax::Globals::new(), || { + run_compiler_impl(args, callbacks, file_loader, emitter_dest) + }) +} + +fn run_compiler_impl<'a>(args: &[String], + callbacks: &mut CompilerCalls<'a>, + file_loader: Option>, + emitter_dest: Option>) + -> (CompileResult, Option) { macro_rules! do_or_return {($expr: expr, $sess: expr) => { match $expr { @@ -227,10 +240,10 @@ pub fn run_compiler<'a>(args: &[String], }, }; - let cstore = Rc::new(CStore::new(DefaultTransCrate::metadata_loader())); + let cstore = CStore::new(DefaultTransCrate::metadata_loader()); let loader = file_loader.unwrap_or(box RealFileLoader); - let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping())); + let codemap = Lrc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping())); let mut sess = session::build_session_with_codemap( sopts, input_file_path, descriptions, codemap, emitter_dest, ); @@ -243,7 +256,7 @@ pub fn run_compiler<'a>(args: &[String], do_or_return!(callbacks.late_callback(&matches, &sess, - &*cstore, + &cstore, &input, &odir, &ofile), Some(sess)); @@ -579,7 +592,6 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { &state.expanded_crate.take().unwrap(), state.crate_name.unwrap(), ppm, - state.arena.unwrap(), state.arenas.unwrap(), state.output_filenames.unwrap(), opt_uii.clone(), @@ -1188,7 +1200,9 @@ pub fn in_rustc_thread(f: F) -> Result> cfg = cfg.stack_size(STACK_SIZE); } - let thread = cfg.spawn(f); + let thread = cfg.spawn(|| { + syntax::with_globals(&syntax::Globals::new(), || f()) + }); thread.unwrap().join() } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index d930739c9f014..e02a709315490 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -17,7 +17,8 @@ use self::NodesMatchingUII::*; use {abort_on_err, driver}; -use rustc::ty::{self, TyCtxt, GlobalArenas, Resolutions}; +use rustc_data_structures::sync::Sync; +use rustc::ty::{self, TyCtxt, Resolutions, AllArenas}; use rustc::cfg; use rustc::cfg::graphviz::LabelledCFG; use rustc::middle::cstore::CrateStore; @@ -51,8 +52,6 @@ use rustc::hir::map::blocks; use rustc::hir; use rustc::hir::print as pprust_hir; -use arena::DroplessArena; - #[derive(Copy, Clone, PartialEq, Debug)] pub enum PpSourceMode { PpmNormal, @@ -201,12 +200,11 @@ impl PpSourceMode { } fn call_with_pp_support_hir<'tcx, A, F>(&self, sess: &'tcx Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), hir_map: &hir_map::Map<'tcx>, analysis: &ty::CrateAnalysis, resolutions: &Resolutions, - arena: &'tcx DroplessArena, - arenas: &'tcx GlobalArenas<'tcx>, + arenas: &'tcx AllArenas<'tcx>, output_filenames: &OutputFilenames, id: &str, f: F) @@ -237,7 +235,6 @@ impl PpSourceMode { hir_map.clone(), analysis.clone(), resolutions.clone(), - arena, arenas, id, output_filenames, @@ -900,7 +897,7 @@ pub fn print_after_parsing(sess: &Session, } pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, - cstore: &'tcx CrateStore, + cstore: &'tcx (CrateStore + Sync), hir_map: &hir_map::Map<'tcx>, analysis: &ty::CrateAnalysis, resolutions: &Resolutions, @@ -908,8 +905,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, krate: &ast::Crate, crate_name: &str, ppm: PpMode, - arena: &'tcx DroplessArena, - arenas: &'tcx GlobalArenas<'tcx>, + arenas: &'tcx AllArenas<'tcx>, output_filenames: &OutputFilenames, opt_uii: Option, ofile: Option<&Path>) { @@ -920,7 +916,6 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, analysis, resolutions, crate_name, - arena, arenas, output_filenames, ppm, @@ -959,7 +954,6 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, hir_map, analysis, resolutions, - arena, arenas, output_filenames, crate_name, @@ -984,7 +978,6 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, hir_map, analysis, resolutions, - arena, arenas, output_filenames, crate_name, @@ -1001,7 +994,6 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, hir_map, analysis, resolutions, - arena, arenas, output_filenames, crate_name, @@ -1036,7 +1028,6 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, hir_map, analysis, resolutions, - arena, arenas, output_filenames, crate_name, @@ -1062,13 +1053,12 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, // with a different callback than the standard driver, so that isn't easy. // Instead, we call that function ourselves. fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, - cstore: &'a CrateStore, + cstore: &'a (CrateStore + Sync), hir_map: &hir_map::Map<'tcx>, analysis: &ty::CrateAnalysis, resolutions: &Resolutions, crate_name: &str, - arena: &'tcx DroplessArena, - arenas: &'tcx GlobalArenas<'tcx>, + arenas: &'tcx AllArenas<'tcx>, output_filenames: &OutputFilenames, ppm: PpMode, uii: Option, @@ -1090,7 +1080,6 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, hir_map.clone(), analysis.clone(), resolutions.clone(), - arena, arenas, crate_name, output_filenames, diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 0818b929ee7ad..c0f1a052022be 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -32,6 +32,8 @@ use rustc::session::{self, config}; use rustc::session::config::{OutputFilenames, OutputTypes}; use rustc_trans_utils::trans_crate::TransCrate; use std::rc::Rc; +use rustc_data_structures::sync::{Send, Lrc}; +use syntax; use syntax::ast; use syntax::abi::Abi; use syntax::codemap::{CodeMap, FilePathMapping}; @@ -41,7 +43,6 @@ use errors::{Level, DiagnosticBuilder}; use syntax::feature_gate::UnstableFeatures; use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; -use arena::DroplessArena; use rustc::hir; @@ -97,9 +98,19 @@ fn errors(msgs: &[&str]) -> (Box, usize) { } fn test_env(source_string: &str, - (emitter, expected_err_count): (Box, usize), + args: (Box, usize), body: F) where F: FnOnce(Env) +{ + syntax::with_globals(&syntax::Globals::new(), || { + test_env_impl(source_string, args, body) + }); +} + +fn test_env_impl(source_string: &str, + (emitter, expected_err_count): (Box, usize), + body: F) + where F: FnOnce(Env) { let mut options = config::basic_options(); options.debugging_opts.verbose = true; @@ -110,7 +121,7 @@ fn test_env(source_string: &str, let sess = session::build_session_(options, None, diagnostic_handler, - Rc::new(CodeMap::new(FilePathMapping::empty()))); + Lrc::new(CodeMap::new(FilePathMapping::empty()))); rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let input = config::Input::Str { @@ -132,8 +143,7 @@ fn test_env(source_string: &str, .expect("phase 2 aborted") }; - let arena = DroplessArena::new(); - let arenas = ty::GlobalArenas::new(); + let arenas = ty::AllArenas::new(); let hir_map = hir_map::map_crate(&sess, &*cstore, &mut hir_forest, &defs); // run just enough stuff to build a tcx: @@ -151,7 +161,6 @@ fn test_env(source_string: &str, ty::maps::Providers::default(), ty::maps::Providers::default(), &arenas, - &arena, resolutions, named_region_map.unwrap(), hir_map, diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 16bbd755b8869..dfc97d155ecbb 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -12,14 +12,14 @@ use self::Destination::*; use syntax_pos::{DUMMY_SP, FileMap, Span, MultiSpan}; -use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, CodeMapper, DiagnosticId}; +use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, CodeMapperDyn, DiagnosticId}; use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style}; use styled_buffer::StyledBuffer; +use rustc_data_structures::sync::Lrc; use std::borrow::Cow; use std::io::prelude::*; use std::io; -use std::rc::Rc; use term; use std::collections::HashMap; use std::cmp::min; @@ -103,19 +103,19 @@ impl ColorConfig { pub struct EmitterWriter { dst: Destination, - cm: Option>, + cm: Option>, short_message: bool, } struct FileWithAnnotatedLines { - file: Rc, + file: Lrc, lines: Vec, multiline_depth: usize, } impl EmitterWriter { pub fn stderr(color_config: ColorConfig, - code_map: Option>, + code_map: Option>, short_message: bool) -> EmitterWriter { if color_config.use_color() { @@ -135,7 +135,7 @@ impl EmitterWriter { } pub fn new(dst: Box, - code_map: Option>, + code_map: Option>, short_message: bool) -> EmitterWriter { EmitterWriter { @@ -147,7 +147,7 @@ impl EmitterWriter { fn preprocess_annotations(&mut self, msp: &MultiSpan) -> Vec { fn add_annotation_to_file(file_vec: &mut Vec, - file: Rc, + file: Lrc, line_index: usize, ann: Annotation) { @@ -279,7 +279,7 @@ impl EmitterWriter { fn render_source_line(&self, buffer: &mut StyledBuffer, - file: Rc, + file: Lrc, line: &Line, width_offset: usize, code_offset: usize) -> Vec<(usize, Style)> { @@ -1104,8 +1104,6 @@ impl EmitterWriter { level: &Level, max_line_num_len: usize) -> io::Result<()> { - use std::borrow::Borrow; - if let Some(ref cm) = self.cm { let mut buffer = StyledBuffer::new(); @@ -1119,7 +1117,7 @@ impl EmitterWriter { Some(Style::HeaderMsg)); // Render the replacements for each suggestion - let suggestions = suggestion.splice_lines(cm.borrow()); + let suggestions = suggestion.splice_lines(&**cm); let mut row_num = 2; for &(ref complete, ref parts) in suggestions.iter().take(MAX_SUGGESTIONS) { diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 605cfc5ed127e..18612c5afe056 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -33,14 +33,15 @@ use self::Level::*; use emitter::{Emitter, EmitterWriter}; +use rustc_data_structures::sync::{Lrc, Lock, LockCell, Send, Sync}; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::stable_hasher::StableHasher; use std::borrow::Cow; -use std::cell::{RefCell, Cell}; use std::mem; -use std::rc::Rc; use std::{error, fmt}; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering::SeqCst; mod diagnostic; mod diagnostic_builder; @@ -92,6 +93,8 @@ pub struct SubstitutionPart { pub snippet: String, } +pub type CodeMapperDyn = CodeMapper + Send + Sync; + pub trait CodeMapper { fn lookup_char_pos(&self, pos: BytePos) -> Loc; fn span_to_lines(&self, sp: Span) -> FileLinesResult; @@ -99,12 +102,13 @@ pub trait CodeMapper { fn span_to_filename(&self, sp: Span) -> FileName; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; fn call_span_if_macro(&self, sp: Span) -> Span; - fn ensure_filemap_source_present(&self, file_map: Rc) -> bool; + fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool; } impl CodeSuggestion { /// Returns the assembled code suggestions and whether they should be shown with an underline. - pub fn splice_lines(&self, cm: &CodeMapper) -> Vec<(String, Vec)> { + pub fn splice_lines(&self, cm: &CodeMapperDyn) + -> Vec<(String, Vec)> { use syntax_pos::{CharPos, Loc, Pos}; fn push_trailing(buf: &mut String, @@ -235,16 +239,16 @@ pub use diagnostic_builder::DiagnosticBuilder; pub struct Handler { pub flags: HandlerFlags, - err_count: Cell, - emitter: RefCell>, - continue_after_error: Cell, - delayed_span_bug: RefCell>, - tracked_diagnostics: RefCell>>, + err_count: AtomicUsize, + emitter: Lock>, + continue_after_error: LockCell, + delayed_span_bug: Lock>, + tracked_diagnostics: Lock>>, // This set contains a hash of every diagnostic that has been emitted by // this handler. These hashes is used to avoid emitting the same error // twice. - emitted_diagnostics: RefCell>, + emitted_diagnostics: Lock>, } #[derive(Default)] @@ -258,7 +262,7 @@ impl Handler { pub fn with_tty_emitter(color_config: ColorConfig, can_emit_warnings: bool, treat_err_as_bug: bool, - cm: Option>) + cm: Option>) -> Handler { Handler::with_tty_emitter_and_flags( color_config, @@ -271,7 +275,7 @@ impl Handler { } pub fn with_tty_emitter_and_flags(color_config: ColorConfig, - cm: Option>, + cm: Option>, flags: HandlerFlags) -> Handler { let emitter = Box::new(EmitterWriter::stderr(color_config, cm, false)); @@ -280,7 +284,7 @@ impl Handler { pub fn with_emitter(can_emit_warnings: bool, treat_err_as_bug: bool, - e: Box) + e: Box) -> Handler { Handler::with_emitter_and_flags( e, @@ -291,15 +295,15 @@ impl Handler { }) } - pub fn with_emitter_and_flags(e: Box, flags: HandlerFlags) -> Handler { + pub fn with_emitter_and_flags(e: Box, flags: HandlerFlags) -> Handler { Handler { flags, - err_count: Cell::new(0), - emitter: RefCell::new(e), - continue_after_error: Cell::new(true), - delayed_span_bug: RefCell::new(None), - tracked_diagnostics: RefCell::new(None), - emitted_diagnostics: RefCell::new(FxHashSet()), + err_count: AtomicUsize::new(0), + emitter: Lock::new(e), + continue_after_error: LockCell::new(true), + delayed_span_bug: Lock::new(None), + tracked_diagnostics: Lock::new(None), + emitted_diagnostics: Lock::new(FxHashSet()), } } @@ -310,7 +314,7 @@ impl Handler { // NOTE: DO NOT call this function from rustc, as it relies on `err_count` being non-zero // if an error happened to avoid ICEs. This function should only be called from tools. pub fn reset_err_count(&self) { - self.err_count.set(0); + self.err_count.store(0, SeqCst); } pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> { @@ -506,19 +510,19 @@ impl Handler { fn bump_err_count(&self) { self.panic_if_treat_err_as_bug(); - self.err_count.set(self.err_count.get() + 1); + self.err_count.fetch_add(1, SeqCst); } pub fn err_count(&self) -> usize { - self.err_count.get() + self.err_count.load(SeqCst) } pub fn has_errors(&self) -> bool { - self.err_count.get() > 0 + self.err_count() > 0 } pub fn abort_if_errors(&self) { let s; - match self.err_count.get() { + match self.err_count() { 0 => { if let Some(bug) = self.delayed_span_bug.borrow_mut().take() { DiagnosticBuilder::new_diagnostic(self, bug).emit(); @@ -527,7 +531,7 @@ impl Handler { } 1 => s = "aborting due to previous error".to_string(), _ => { - s = format!("aborting due to {} previous errors", self.err_count.get()); + s = format!("aborting due to {} previous errors", self.err_count()); } } diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index 40b75be36fefb..4b6c2bffd7d45 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -11,7 +11,7 @@ crate-type = ["dylib"] [dependencies] flate2 = "0.2" log = "0.3" -owning_ref = "0.3.3" +owning_ref = { git = "https://github.com/Zoxc/owning-ref-rs.git" } proc_macro = { path = "../libproc_macro" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 264c15bcd0b76..187e1425a08d6 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -14,6 +14,7 @@ use cstore::{self, CStore, CrateSource, MetadataBlob}; use locator::{self, CratePaths}; use native_libs::relevant_lib; use schema::CrateRoot; +use rustc_data_structures::sync::{Lrc, RwLock, Lock, LockCell}; use rustc::hir::def_id::{CrateNum, DefIndex, CRATE_DEF_INDEX}; use rustc::hir::svh::Svh; @@ -29,10 +30,8 @@ use rustc::util::common::record_time; use rustc::util::nodemap::FxHashSet; use rustc::hir::map::Definitions; -use std::cell::{RefCell, Cell}; use std::ops::Deref; use std::path::PathBuf; -use std::rc::Rc; use std::{cmp, fs}; use syntax::ast; @@ -87,7 +86,7 @@ struct ExtensionCrate { } enum PMDSource { - Registered(Rc), + Registered(Lrc), Owned(Library), } @@ -230,7 +229,7 @@ impl<'a> CrateLoader<'a> { span: Span, lib: Library, dep_kind: DepKind) - -> (CrateNum, Rc) { + -> (CrateNum, Lrc) { info!("register crate `extern crate {} as {}`", name, ident); let crate_root = lib.metadata.get_root(); self.verify_no_symbol_conflicts(span, &crate_root); @@ -272,8 +271,8 @@ impl<'a> CrateLoader<'a> { let mut cmeta = cstore::CrateMetadata { name, - extern_crate: Cell::new(None), - def_path_table: Rc::new(def_path_table), + extern_crate: LockCell::new(None), + def_path_table: Lrc::new(def_path_table), exported_symbols, trait_impls, proc_macros: crate_root.macro_derive_registrar.map(|_| { @@ -281,11 +280,11 @@ impl<'a> CrateLoader<'a> { }), root: crate_root, blob: metadata, - cnum_map: RefCell::new(cnum_map), + cnum_map: Lock::new(cnum_map), cnum, - codemap_import_info: RefCell::new(vec![]), - attribute_cache: RefCell::new([Vec::new(), Vec::new()]), - dep_kind: Cell::new(dep_kind), + codemap_import_info: RwLock::new(vec![]), + attribute_cache: Lock::new([Vec::new(), Vec::new()]), + dep_kind: LockCell::new(dep_kind), source: cstore::CrateSource { dylib, rlib, @@ -310,7 +309,7 @@ impl<'a> CrateLoader<'a> { cmeta.dllimport_foreign_items = dllimports; - let cmeta = Rc::new(cmeta); + let cmeta = Lrc::new(cmeta); self.cstore.set_crate_data(cnum, cmeta.clone()); (cnum, cmeta) } @@ -323,7 +322,7 @@ impl<'a> CrateLoader<'a> { span: Span, path_kind: PathKind, mut dep_kind: DepKind) - -> (CrateNum, Rc) { + -> (CrateNum, Lrc) { info!("resolving crate `extern crate {} as {}`", name, ident); let result = if let Some(cnum) = self.existing_match(name, hash, path_kind) { LoadResult::Previous(cnum) @@ -549,7 +548,7 @@ impl<'a> CrateLoader<'a> { /// custom derive (and other macro-1.1 style features) are implemented via /// executables and custom IPC. fn load_derive_macros(&mut self, root: &CrateRoot, dylib: Option, span: Span) - -> Vec<(ast::Name, Rc)> { + -> Vec<(ast::Name, Lrc)> { use std::{env, mem}; use proc_macro::TokenStream; use proc_macro::__internal::Registry; @@ -578,7 +577,7 @@ impl<'a> CrateLoader<'a> { mem::transmute::<*mut u8, fn(&mut Registry)>(sym) }; - struct MyRegistrar(Vec<(ast::Name, Rc)>); + struct MyRegistrar(Vec<(ast::Name, Lrc)>); impl Registry for MyRegistrar { fn register_custom_derive(&mut self, @@ -588,7 +587,7 @@ impl<'a> CrateLoader<'a> { let attrs = attributes.iter().cloned().map(Symbol::intern).collect::>(); let derive = ProcMacroDerive::new(expand, attrs.clone()); let derive = SyntaxExtension::ProcMacroDerive(Box::new(derive), attrs); - self.0.push((Symbol::intern(trait_name), Rc::new(derive))); + self.0.push((Symbol::intern(trait_name), Lrc::new(derive))); } fn register_attr_proc_macro(&mut self, @@ -597,7 +596,7 @@ impl<'a> CrateLoader<'a> { let expand = SyntaxExtension::AttrProcMacro( Box::new(AttrProcMacro { inner: expand }) ); - self.0.push((Symbol::intern(name), Rc::new(expand))); + self.0.push((Symbol::intern(name), Lrc::new(expand))); } fn register_bang_proc_macro(&mut self, @@ -606,7 +605,7 @@ impl<'a> CrateLoader<'a> { let expand = SyntaxExtension::ProcMacro( Box::new(BangProcMacro { inner: expand }) ); - self.0.push((Symbol::intern(name), Rc::new(expand))); + self.0.push((Symbol::intern(name), Lrc::new(expand))); } } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 3a4ba6768a716..8132f8cf6212c 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -22,9 +22,7 @@ use rustc_back::PanicStrategy; use rustc_data_structures::indexed_vec::IndexVec; use rustc::util::nodemap::{FxHashMap, FxHashSet, NodeMap}; -use std::cell::{RefCell, Cell}; -use std::rc::Rc; -use owning_ref::ErasedBoxRef; +use rustc_data_structures::sync::{Sync, Lrc, RwLock, Lock, LockCell}; use syntax::{ast, attr}; use syntax::ext::base::SyntaxExtension; use syntax::symbol::Symbol; @@ -42,7 +40,9 @@ pub use cstore_impl::{provide, provide_extern}; // own crate numbers. pub type CrateNumMap = IndexVec; -pub struct MetadataBlob(pub ErasedBoxRef<[u8]>); +pub use rustc_data_structures::sync::MetadataRef; + +pub struct MetadataBlob(pub MetadataRef); /// Holds information about a syntax_pos::FileMap imported from another crate. /// See `imported_filemaps()` for more information. @@ -52,7 +52,7 @@ pub struct ImportedFileMap { /// The end of this FileMap within the codemap of its original crate pub original_end_pos: syntax_pos::BytePos, /// The imported FileMap's representation within the local codemap - pub translated_filemap: Rc, + pub translated_filemap: Lrc, } pub struct CrateMetadata { @@ -61,13 +61,13 @@ pub struct CrateMetadata { /// Information about the extern crate that caused this crate to /// be loaded. If this is `None`, then the crate was injected /// (e.g., by the allocator) - pub extern_crate: Cell>, + pub extern_crate: LockCell>, pub blob: MetadataBlob, - pub cnum_map: RefCell, + pub cnum_map: Lock, pub cnum: CrateNum, - pub codemap_import_info: RefCell>, - pub attribute_cache: RefCell<[Vec>>; 2]>, + pub codemap_import_info: RwLock>, + pub attribute_cache: Lock<[Vec>>; 2]>, pub root: schema::CrateRoot, @@ -76,32 +76,32 @@ pub struct CrateMetadata { /// hashmap, which gives the reverse mapping. This allows us to /// quickly retrace a `DefPath`, which is needed for incremental /// compilation support. - pub def_path_table: Rc, + pub def_path_table: Lrc, pub exported_symbols: FxHashSet, pub trait_impls: FxHashMap<(u32, DefIndex), schema::LazySeq>, - pub dep_kind: Cell, + pub dep_kind: LockCell, pub source: CrateSource, - pub proc_macros: Option)>>, + pub proc_macros: Option)>>, // Foreign items imported from a dylib (Windows only) pub dllimport_foreign_items: FxHashSet, } pub struct CStore { - metas: RefCell>>, + metas: RwLock>>, /// Map from NodeId's of local extern crate statements to crate numbers - extern_mod_crate_map: RefCell>, - pub metadata_loader: Box, + extern_mod_crate_map: Lock>, + pub metadata_loader: Box, } impl CStore { - pub fn new(metadata_loader: Box) -> CStore { + pub fn new(metadata_loader: Box) -> CStore { CStore { - metas: RefCell::new(FxHashMap()), - extern_mod_crate_map: RefCell::new(FxHashMap()), + metas: RwLock::new(FxHashMap()), + extern_mod_crate_map: Lock::new(FxHashMap()), metadata_loader, } } @@ -110,16 +110,16 @@ impl CStore { CrateNum::new(self.metas.borrow().len() + 1) } - pub fn get_crate_data(&self, cnum: CrateNum) -> Rc { + pub fn get_crate_data(&self, cnum: CrateNum) -> Lrc { self.metas.borrow().get(&cnum).unwrap().clone() } - pub fn set_crate_data(&self, cnum: CrateNum, data: Rc) { + pub fn set_crate_data(&self, cnum: CrateNum, data: Lrc) { self.metas.borrow_mut().insert(cnum, data); } pub fn iter_crate_data(&self, mut i: I) - where I: FnMut(CrateNum, &Rc) + where I: FnMut(CrateNum, &Lrc) { for (&k, v) in self.metas.borrow().iter() { i(k, v); diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 911b4dac4e13a..fbf5565da6a77 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -30,7 +30,7 @@ use rustc::hir::map::definitions::DefPathTable; use rustc::util::nodemap::{NodeSet, DefIdMap}; use std::any::Any; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax::attr; @@ -111,12 +111,12 @@ provide! { <'tcx> tcx, def_id, other, cdata, let _ = cdata; tcx.calculate_dtor(def_id, &mut |_,_| Ok(())) } - variances_of => { Rc::new(cdata.get_item_variances(def_id.index)) } + variances_of => { Lrc::new(cdata.get_item_variances(def_id.index)) } associated_item_def_ids => { let mut result = vec![]; cdata.each_child_of_item(def_id.index, |child| result.push(child.def.def_id()), tcx.sess); - Rc::new(result) + Lrc::new(result) } associated_item => { cdata.get_associated_item(def_id.index) } impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) } @@ -136,11 +136,11 @@ provide! { <'tcx> tcx, def_id, other, cdata, mir } mir_const_qualif => { - (cdata.mir_const_qualif(def_id.index), Rc::new(IdxSetBuf::new_empty(0))) + (cdata.mir_const_qualif(def_id.index), Lrc::new(IdxSetBuf::new_empty(0))) } typeck_tables_of => { cdata.item_body_tables(def_id.index, tcx) } fn_sig => { cdata.fn_sig(def_id.index, tcx) } - inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) } + inherent_impls => { Lrc::new(cdata.get_inherent_implementations_for_type(def_id.index)) } is_const_fn => { cdata.is_const_fn(def_id.index) } is_foreign_item => { cdata.is_foreign_item(def_id.index) } is_auto_impl => { cdata.is_auto_impl(def_id.index) } @@ -169,18 +169,18 @@ provide! { <'tcx> tcx, def_id, other, cdata, } is_mir_available => { cdata.is_item_mir_available(def_id.index) } - dylib_dependency_formats => { Rc::new(cdata.get_dylib_dependency_formats()) } + dylib_dependency_formats => { Lrc::new(cdata.get_dylib_dependency_formats()) } is_panic_runtime => { cdata.is_panic_runtime(tcx.sess) } is_compiler_builtins => { cdata.is_compiler_builtins(tcx.sess) } has_global_allocator => { cdata.has_global_allocator() } is_sanitizer_runtime => { cdata.is_sanitizer_runtime(tcx.sess) } is_profiler_runtime => { cdata.is_profiler_runtime(tcx.sess) } panic_strategy => { cdata.panic_strategy() } - extern_crate => { Rc::new(cdata.extern_crate.get()) } + extern_crate => { Lrc::new(cdata.extern_crate.get()) } is_no_builtins => { cdata.is_no_builtins(tcx.sess) } impl_defaultness => { cdata.get_impl_defaultness(def_id.index) } - exported_symbol_ids => { Rc::new(cdata.get_exported_symbols()) } - native_libraries => { Rc::new(cdata.get_native_libraries(tcx.sess)) } + exported_symbol_ids => { Lrc::new(cdata.get_exported_symbols()) } + native_libraries => { Lrc::new(cdata.get_native_libraries(tcx.sess)) } plugin_registrar_fn => { cdata.root.plugin_registrar_fn.map(|index| { DefId { krate: def_id.krate, index } @@ -199,13 +199,13 @@ provide! { <'tcx> tcx, def_id, other, cdata, let mut result = vec![]; let filter = Some(other); cdata.get_implementations_for_trait(filter, &mut result); - Rc::new(result) + Lrc::new(result) } all_trait_implementations => { let mut result = vec![]; cdata.get_implementations_for_trait(None, &mut result); - Rc::new(result) + Lrc::new(result) } is_dllimport_foreign_item => { @@ -217,10 +217,10 @@ provide! { <'tcx> tcx, def_id, other, cdata, item_children => { let mut result = vec![]; cdata.each_child_of_item(def_id.index, |child| result.push(child), tcx.sess); - Rc::new(result) + Lrc::new(result) } - defined_lang_items => { Rc::new(cdata.get_lang_items()) } - missing_lang_items => { Rc::new(cdata.get_missing_lang_items()) } + defined_lang_items => { Lrc::new(cdata.get_lang_items()) } + missing_lang_items => { Lrc::new(cdata.get_missing_lang_items()) } extern_const_body => { debug!("item_body({:?}): inlining item", def_id); @@ -234,7 +234,7 @@ provide! { <'tcx> tcx, def_id, other, cdata, } } - used_crate_source => { Rc::new(cdata.source.clone()) } + used_crate_source => { Lrc::new(cdata.source.clone()) } has_copy_closures => { cdata.has_copy_closures(tcx.sess) } has_clone_closures => { cdata.has_clone_closures(tcx.sess) } @@ -276,11 +276,11 @@ pub fn provide<'tcx>(providers: &mut Providers<'tcx>) { }, native_libraries: |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - Rc::new(native_libs::collect(tcx)) + Lrc::new(native_libs::collect(tcx)) }, link_args: |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - Rc::new(link_args::collect(tcx)) + Lrc::new(link_args::collect(tcx)) }, // Returns a map from a sufficiently visible external item (i.e. an @@ -337,7 +337,7 @@ pub fn provide<'tcx>(providers: &mut Providers<'tcx>) { } } - Rc::new(visible_parent_map) + Lrc::new(visible_parent_map) }, ..*providers @@ -345,7 +345,7 @@ pub fn provide<'tcx>(providers: &mut Providers<'tcx>) { } impl CrateStore for cstore::CStore { - fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc { + fn crate_data_as_rc_any(&self, krate: CrateNum) -> Lrc { self.get_crate_data(krate) } @@ -418,7 +418,7 @@ impl CrateStore for cstore::CStore { self.get_crate_data(def.krate).def_path_hash(def.index) } - fn def_path_table(&self, cnum: CrateNum) -> Rc { + fn def_path_table(&self, cnum: CrateNum) -> Lrc { self.get_crate_data(cnum).def_path_table.clone() } @@ -442,7 +442,7 @@ impl CrateStore for cstore::CStore { } else if data.name == "proc_macro" && self.get_crate_data(id.krate).item_name(id.index) == "quote" { let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::__internal::Quoter)); - return LoadedMacro::ProcMacro(Rc::new(ext)); + return LoadedMacro::ProcMacro(Lrc::new(ext)); } let (name, def) = data.get_macro(id.index); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3be99e9722361..eea6c9375a248 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -13,6 +13,7 @@ use cstore::{self, CrateMetadata, MetadataBlob, NativeLibrary}; use schema::*; +use rustc_data_structures::sync::{Lrc, ReadGuard}; use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash}; use rustc::hir; use rustc::middle::cstore::{LinkagePreference, ExternConstBody, @@ -28,11 +29,9 @@ use rustc::ty::codec::TyDecoder; use rustc::util::nodemap::DefIdSet; use rustc::mir::Mir; -use std::cell::Ref; use std::collections::BTreeMap; use std::io; use std::mem; -use std::rc::Rc; use std::u32; use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; @@ -760,12 +759,12 @@ impl<'a, 'tcx> CrateMetadata { .map(|body| (body.id(), body)) .collect(); ExternBodyNestedBodies { - nested_bodies: Rc::new(nested_bodies), + nested_bodies: Lrc::new(nested_bodies), fingerprint: ast.stable_bodies_hash, } } else { ExternBodyNestedBodies { - nested_bodies: Rc::new(BTreeMap::new()), + nested_bodies: Lrc::new(BTreeMap::new()), fingerprint: Fingerprint::zero(), } } @@ -855,11 +854,11 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Rc<[ast::Attribute]> { + pub fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Lrc<[ast::Attribute]> { let (node_as, node_index) = (node_id.address_space().index(), node_id.as_array_index()); if self.is_proc_macro(node_id) { - return Rc::new([]); + return Lrc::new([]); } if let Some(&Some(ref val)) = @@ -875,7 +874,7 @@ impl<'a, 'tcx> CrateMetadata { if def_key.disambiguated_data.data == DefPathData::StructCtor { item = self.entry(def_key.parent.unwrap()); } - let result: Rc<[ast::Attribute]> = Rc::from(self.get_attributes(&item, sess)); + let result: Lrc<[ast::Attribute]> = Lrc::from(self.get_attributes(&item, sess)); let vec_ = &mut self.attribute_cache.borrow_mut()[node_as]; if vec_.len() < node_index + 1 { vec_.resize(node_index + 1, None); @@ -1093,7 +1092,7 @@ impl<'a, 'tcx> CrateMetadata { /// for items inlined from other crates. pub fn imported_filemaps(&'a self, local_codemap: &codemap::CodeMap) - -> Ref<'a, Vec> { + -> ReadGuard<'a, Vec> { { let filemaps = self.codemap_import_info.borrow(); if !filemaps.is_empty() { diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 6cfa324797c5d..aaab8818cec8b 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -33,7 +33,7 @@ use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque}; use std::io::prelude::*; use std::io::Cursor; use std::path::Path; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::u32; use syntax::ast::{self, CRATE_NODE_ID}; use syntax::codemap::Spanned; @@ -292,7 +292,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { .to_string_lossy() .into_owned(); adapted.name = abs_path; - Rc::new(adapted) + Lrc::new(adapted) } }) .collect::>(); diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index 6c1ca36232307..a100e79aecbae 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -23,7 +23,10 @@ #![feature(specialization)] #![feature(rustc_private)] +#![recursion_limit="256"] + extern crate libc; + #[macro_use] extern crate log; #[macro_use] @@ -39,6 +42,7 @@ extern crate proc_macro; #[macro_use] extern crate rustc; extern crate rustc_back; +#[macro_use] extern crate rustc_data_structures; mod diagnostics; diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index 8abccb503d6fa..a4ab3a66a52fc 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -219,7 +219,7 @@ //! no means all of the necessary details. Take a look at the rest of //! metadata::locator or metadata::creader for all the juicy details! -use cstore::MetadataBlob; +use cstore::{MetadataRef, MetadataBlob}; use creader::Library; use schema::{METADATA_HEADER, rustc_version}; @@ -243,7 +243,7 @@ use std::path::{Path, PathBuf}; use std::time::Instant; use flate2::read::DeflateDecoder; -use owning_ref::{ErasedBoxRef, OwningRef}; +use owning_ref::OwningRef; pub struct CrateMismatch { path: PathBuf, @@ -842,7 +842,7 @@ fn get_metadata_section_imp(target: &Target, if !filename.exists() { return Err(format!("no such file: '{}'", filename.display())); } - let raw_bytes: ErasedBoxRef<[u8]> = match flavor { + let raw_bytes: MetadataRef = match flavor { CrateFlavor::Rlib => loader.get_rlib_metadata(target, filename)?, CrateFlavor::Dylib => { let buf = loader.get_dylib_metadata(target, filename)?; @@ -862,7 +862,7 @@ fn get_metadata_section_imp(target: &Target, match DeflateDecoder::new(compressed_bytes).read_to_end(&mut inflated) { Ok(_) => { let buf = unsafe { OwningRef::new_assert_stable_address(inflated) }; - buf.map_owner_box().erase_owner() + rustc_erase_owner!(buf.map_owner_box()) } Err(_) => { return Err(format!("failed to decompress metadata: {}", filename.display())); @@ -875,7 +875,7 @@ fn get_metadata_section_imp(target: &Target, let mut buf = vec![]; file.read_to_end(&mut buf).map_err(|_| format!("failed to read rmeta metadata: '{}'", filename.display()))?; - OwningRef::new(buf).map_owner_box().erase_owner() + rustc_erase_owner!(OwningRef::new(buf).map_owner_box()) } }; let blob = MetadataBlob(raw_bytes); diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs index c27cb43eff77b..55105a7fcfb8b 100644 --- a/src/librustc_mir/dataflow/impls/borrows.rs +++ b/src/librustc_mir/dataflow/impls/borrows.rs @@ -21,6 +21,7 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_data_structures::bitslice::{BitwiseOperator}; use rustc_data_structures::indexed_set::{IdxSet}; use rustc_data_structures::indexed_vec::{IndexVec}; +use rustc_data_structures::sync::Lrc; use dataflow::{BitDenotation, BlockSets, DataflowOperator}; pub use dataflow::indexes::BorrowIndex; @@ -30,7 +31,6 @@ use borrow_check::nll::ToRegionVid; use syntax_pos::Span; use std::fmt; -use std::rc::Rc; // `Borrows` maps each dataflow bit to an `Rvalue::Ref`, which can be // uniquely identified in the MIR by the `Location` of the assigment @@ -38,7 +38,7 @@ use std::rc::Rc; pub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, mir: &'a Mir<'tcx>, - scope_tree: Rc, + scope_tree: Lrc, root_scope: Option, borrows: IndexVec>, location_map: FxHashMap, diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 306b41714a553..c331eef6f49ec 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -30,7 +30,7 @@ use syntax::ast; use syntax::symbol::Symbol; use rustc::hir; use rustc_const_math::{ConstInt, ConstUsize}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; #[derive(Clone)] pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { @@ -43,7 +43,7 @@ pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { /// Identity `Substs` for use with const-evaluation. pub identity_substs: &'gcx Substs<'gcx>, - pub region_scope_tree: Rc, + pub region_scope_tree: Lrc, pub tables: &'a ty::TypeckTables<'gcx>, /// This is `Constness::Const` if we are compiling a `static`, diff --git a/src/librustc_mir/transform/check_unsafety.rs b/src/librustc_mir/transform/check_unsafety.rs index 7833f4bbac7aa..257e2dd081acd 100644 --- a/src/librustc_mir/transform/check_unsafety.rs +++ b/src/librustc_mir/transform/check_unsafety.rs @@ -10,6 +10,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::indexed_vec::IndexVec; +use rustc_data_structures::sync::Lrc; use rustc::ty::maps::Providers; use rustc::ty::{self, TyCtxt}; @@ -22,7 +23,6 @@ use rustc::mir::visit::{PlaceContext, Visitor}; use syntax::ast; use syntax::symbol::Symbol; -use std::rc::Rc; use util; pub struct UnsafetyChecker<'a, 'tcx: 'a> { @@ -327,8 +327,8 @@ fn unsafety_check_result<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) ClearCrossCrate::Clear => { debug!("unsafety_violations: {:?} - remote, skipping", def_id); return UnsafetyCheckResult { - violations: Rc::new([]), - unsafe_blocks: Rc::new([]) + violations: Lrc::new([]), + unsafe_blocks: Lrc::new([]) } } }; diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index fb9daf07c71dc..4ce514216e016 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -17,8 +17,8 @@ use rustc::ty::steal::Steal; use rustc::hir; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; use rustc::util::nodemap::DefIdSet; +use rustc_data_structures::sync::Lrc; use std::borrow::Cow; -use std::rc::Rc; use syntax::ast; use syntax_pos::Span; @@ -66,7 +66,7 @@ fn is_mir_available<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> boo /// Finds the full set of def-ids within the current crate that have /// MIR associated with them. fn mir_keys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: CrateNum) - -> Rc { + -> Lrc { assert_eq!(krate, LOCAL_CRATE); let mut set = DefIdSet(); @@ -101,7 +101,7 @@ fn mir_keys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: CrateNum) set: &mut set, }.as_deep_visitor()); - Rc::new(set) + Lrc::new(set) } fn mir_built<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Steal> { diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index 2b2323928efba..cfd7cf0313da6 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -34,7 +34,7 @@ use syntax::feature_gate::UnstableFeatures; use syntax_pos::{Span, DUMMY_SP}; use std::fmt; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::usize; use transform::{MirPass, MirSource}; @@ -296,7 +296,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } /// Qualify a whole const, static initializer or const fn. - fn qualify_const(&mut self) -> (Qualif, Rc>) { + fn qualify_const(&mut self) -> (Qualif, Lrc>) { debug!("qualifying {} {:?}", self.mode, self.def_id); let mir = self.mir; @@ -411,7 +411,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } } - (self.qualif, Rc::new(promoted_temps)) + (self.qualif, Lrc::new(promoted_temps)) } } @@ -945,7 +945,7 @@ pub fn provide(providers: &mut Providers) { fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> (u8, Rc>) { + -> (u8, Lrc>) { // NB: This `borrow()` is guaranteed to be valid (i.e., the value // cannot yet be stolen), because `mir_validated()`, which steals // from `mir_const(), forces this query to execute before @@ -954,7 +954,7 @@ fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, if mir.return_ty().references_error() { tcx.sess.delay_span_bug(mir.span, "mir_const_qualif: Mir had errors"); - return (Qualif::NOT_CONST.bits(), Rc::new(IdxSetBuf::new_empty(0))); + return (Qualif::NOT_CONST.bits(), Lrc::new(IdxSetBuf::new_empty(0))); } let mut qualifier = Qualifier::new(tcx, def_id, mir, Mode::Const); diff --git a/src/librustc_passes/Cargo.toml b/src/librustc_passes/Cargo.toml index d2560c2f8203f..c2e5e369a8fcb 100644 --- a/src/librustc_passes/Cargo.toml +++ b/src/librustc_passes/Cargo.toml @@ -13,6 +13,7 @@ log = "0.3" rustc = { path = "../librustc" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } +rustc_data_structures = { path = "../librustc_data_structures" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_passes/consts.rs b/src/librustc_passes/consts.rs index 776b5f3c984f1..2de875982882c 100644 --- a/src/librustc_passes/consts.rs +++ b/src/librustc_passes/consts.rs @@ -45,7 +45,7 @@ use rustc::util::common::ErrorReported; use rustc::util::nodemap::{ItemLocalSet, NodeSet}; use rustc::lint::builtin::CONST_ERR; use rustc::hir::{self, PatKind, RangeEnd}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; @@ -83,7 +83,7 @@ fn const_is_rvalue_promotable_to_static<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn rvalue_promotable_map<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Rc + -> Lrc { let outer_def_id = tcx.closure_base_def_id(def_id); if outer_def_id != def_id { @@ -108,7 +108,7 @@ fn rvalue_promotable_map<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let body_id = tcx.hir.body_owned_by(node_id); visitor.visit_nested_body(body_id); - Rc::new(visitor.result) + Lrc::new(visitor.result) } struct CheckCrateVisitor<'a, 'tcx: 'a> { diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs index 9a150abea6691..9e7f2e142a360 100644 --- a/src/librustc_passes/lib.rs +++ b/src/librustc_passes/lib.rs @@ -25,6 +25,7 @@ extern crate rustc; extern crate rustc_const_eval; extern crate rustc_const_math; +extern crate rustc_data_structures; #[macro_use] extern crate log; diff --git a/src/librustc_privacy/Cargo.toml b/src/librustc_privacy/Cargo.toml index c65312e9a8337..62eab40f3ec9a 100644 --- a/src/librustc_privacy/Cargo.toml +++ b/src/librustc_privacy/Cargo.toml @@ -13,3 +13,4 @@ rustc = { path = "../librustc" } rustc_typeck = { path = "../librustc_typeck" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } +rustc_data_structures = { path = "../librustc_data_structures" } \ No newline at end of file diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 74d92ce1c3e62..fe2895e433dbb 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -19,6 +19,7 @@ #[macro_use] extern crate syntax; extern crate rustc_typeck; extern crate syntax_pos; +extern crate rustc_data_structures; use rustc::hir::{self, PatKind}; use rustc::hir::def::Def; @@ -37,7 +38,7 @@ use syntax_pos::Span; use std::cmp; use std::mem::replace; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; mod diagnostics; @@ -1582,13 +1583,13 @@ pub fn provide(providers: &mut Providers) { }; } -pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Lrc { tcx.privacy_access_levels(LOCAL_CRATE) } fn privacy_access_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: CrateNum) - -> Rc { + -> Lrc { assert_eq!(krate, LOCAL_CRATE); let krate = tcx.hir.krate(); @@ -1661,7 +1662,7 @@ fn privacy_access_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate.visit_all_item_likes(&mut DeepVisitor::new(&mut visitor)); } - Rc::new(visitor.access_levels) + Lrc::new(visitor.access_levels) } __build_diagnostic_array! { librustc_privacy, DIAGNOSTICS } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index afca6ea2c0751..f2528eee96e75 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -27,7 +27,7 @@ use rustc::hir::def_id::{BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, LOCAL_CRATE, Def use rustc::ty; use std::cell::Cell; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast::{Name, Ident}; use syntax::attr; @@ -569,7 +569,7 @@ impl<'a> Resolver<'a> { } } - pub fn get_macro(&mut self, def: Def) -> Rc { + pub fn get_macro(&mut self, def: Def) -> Lrc { let def_id = match def { Def::Macro(def_id, ..) => def_id, _ => panic!("Expected Def::Macro(..)"), @@ -583,7 +583,7 @@ impl<'a> Resolver<'a> { LoadedMacro::ProcMacro(ext) => return ext, }; - let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, + let ext = Lrc::new(macro_rules::compile(&self.session.parse_sess, &self.session.features, ¯o_def)); self.macro_map.insert(def_id, ext.clone()); diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 49c452cddb2cd..67bbd83829304 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -67,7 +67,7 @@ use std::cmp; use std::collections::BTreeSet; use std::fmt; use std::mem::replace; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver}; use macros::{InvocationData, LegacyBinding, LegacyScope, MacroBinding}; @@ -1101,7 +1101,7 @@ impl<'a> NameBinding<'a> { } } - fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc { + fn get_macro(&self, resolver: &mut Resolver<'a>) -> Lrc { resolver.get_macro(self.def_ignoring_ambiguity()) } @@ -1302,7 +1302,7 @@ pub struct Resolver<'a> { macro_names: FxHashSet, global_macros: FxHashMap>, lexical_macro_resolutions: Vec<(Ident, &'a Cell>)>, - macro_map: FxHashMap>, + macro_map: FxHashMap>, macro_defs: FxHashMap, local_macro_def_scopes: FxHashMap>, macro_exports: Vec, diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 3d1d7c0c48a1e..f40e656d87276 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -40,7 +40,7 @@ use syntax_pos::{Span, DUMMY_SP}; use std::cell::Cell; use std::mem; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; #[derive(Clone)] pub struct InvocationData<'a> { @@ -185,7 +185,7 @@ impl<'a> base::Resolver for Resolver<'a> { invocation.expansion.set(visitor.legacy_scope); } - fn add_builtin(&mut self, ident: ast::Ident, ext: Rc) { + fn add_builtin(&mut self, ident: ast::Ident, ext: Lrc) { let def_id = DefId { krate: BUILTIN_MACROS_CRATE, index: DefIndex::new(self.macro_map.len()), @@ -292,7 +292,7 @@ impl<'a> base::Resolver for Resolver<'a> { } fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool) - -> Result>, Determinacy> { + -> Result>, Determinacy> { let def = match invoc.kind { InvocationKind::Attr { attr: None, .. } => return Ok(None), _ => self.resolve_invoc_to_def(invoc, scope, force)?, @@ -312,7 +312,7 @@ impl<'a> base::Resolver for Resolver<'a> { } fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool) - -> Result, Determinacy> { + -> Result, Determinacy> { self.resolve_macro_to_def(scope, path, kind, force).map(|def| { self.unused_macros.remove(&def.def_id()); self.get_macro(def) @@ -732,7 +732,7 @@ impl<'a> Resolver<'a> { } let def_id = self.definitions.local_def_id(item.id); - let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, + let ext = Lrc::new(macro_rules::compile(&self.session.parse_sess, &self.session.features, item)); self.macro_map.insert(def_id, ext); diff --git a/src/librustc_trans/Cargo.toml b/src/librustc_trans/Cargo.toml index d8318ea808221..5456fc5a91e47 100644 --- a/src/librustc_trans/Cargo.toml +++ b/src/librustc_trans/Cargo.toml @@ -15,7 +15,7 @@ flate2 = "0.2" jobserver = "0.1.5" log = "0.3" num_cpus = "1.0" -owning_ref = "0.3.3" +owning_ref = { git = "https://github.com/Zoxc/owning-ref-rs.git" } rustc = { path = "../librustc" } rustc-demangle = "0.1.4" rustc_allocator = { path = "../librustc_allocator" } diff --git a/src/librustc_trans/back/symbol_export.rs b/src/librustc_trans/back/symbol_export.rs index fa6fe2e9e93ef..121361a9ef48e 100644 --- a/src/librustc_trans/back/symbol_export.rs +++ b/src/librustc_trans/back/symbol_export.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::sync::Arc; use base; @@ -64,7 +64,7 @@ pub fn crates_export_threshold(crate_types: &[config::CrateType]) pub fn provide(providers: &mut Providers) { providers.exported_symbol_ids = |tcx, cnum| { let export_threshold = threshold(tcx); - Rc::new(tcx.exported_symbols(cnum) + Lrc::new(tcx.exported_symbols(cnum) .iter() .filter_map(|&(_, id, level)| { id.and_then(|id| { diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index cb883e0349f31..c6ee8534015b7 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -982,7 +982,7 @@ pub fn start_async_translation(tcx: TyCtxt, crate_info, time_graph, - coordinator_send: tcx.tx_to_llvm_workers.clone(), + coordinator_send: tcx.tx_to_llvm_workers.lock().clone(), trans_worker_receive, shared_emitter_main, future: coordinator_thread, @@ -1352,7 +1352,7 @@ fn start_executing_work(tcx: TyCtxt, metadata_config: Arc, allocator_config: Arc) -> thread::JoinHandle> { - let coordinator_send = tcx.tx_to_llvm_workers.clone(); + let coordinator_send = tcx.tx_to_llvm_workers.lock().clone(); let mut exported_symbols = FxHashMap(); exported_symbols.insert(LOCAL_CRATE, tcx.exported_symbols(LOCAL_CRATE)); for &cnum in tcx.crates().iter() { @@ -2257,7 +2257,7 @@ pub fn submit_translated_module_to_llvm(tcx: TyCtxt, mtrans: ModuleTranslation, cost: u64) { let llvm_work_item = WorkItem::Optimize(mtrans); - drop(tcx.tx_to_llvm_workers.send(Box::new(Message::TranslationDone { + drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::TranslationDone { llvm_work_item, cost, }))); diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 3c2e56bf2a127..a0799358b8800 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -49,7 +49,7 @@ extern crate rustc_apfloat; extern crate rustc_back; extern crate rustc_binaryen; extern crate rustc_const_math; -extern crate rustc_data_structures; +#[macro_use] extern crate rustc_data_structures; extern crate rustc_demangle; extern crate rustc_incremental; extern crate rustc_llvm as llvm; @@ -73,8 +73,8 @@ pub use llvm_util::{init, target_features, print_version, print_passes, print, e use std::any::Any; use std::path::PathBuf; -use std::rc::Rc; use std::sync::mpsc; +use rustc_data_structures::sync::{self, Lrc}; use rustc::dep_graph::DepGraph; use rustc::hir::def_id::CrateNum; @@ -160,7 +160,7 @@ impl rustc_trans_utils::trans_crate::TransCrate for LlvmTransCrate { type OngoingCrateTranslation = back::write::OngoingCrateTranslation; type TranslatedCrate = CrateTranslation; - fn metadata_loader() -> Box { + fn metadata_loader() -> Box { box metadata::LlvmMetadataLoader } @@ -321,11 +321,11 @@ pub struct CrateInfo { profiler_runtime: Option, sanitizer_runtime: Option, is_no_builtins: FxHashSet, - native_libraries: FxHashMap>>, + native_libraries: FxHashMap>>, crate_name: FxHashMap, - used_libraries: Rc>, - link_args: Rc>, - used_crate_source: FxHashMap>, + used_libraries: Lrc>, + link_args: Lrc>, + used_crate_source: FxHashMap>, used_crates_static: Vec<(CrateNum, LibSource)>, used_crates_dynamic: Vec<(CrateNum, LibSource)>, } diff --git a/src/librustc_trans/metadata.rs b/src/librustc_trans/metadata.rs index 883808c59091a..3ada8d1671135 100644 --- a/src/librustc_trans/metadata.rs +++ b/src/librustc_trans/metadata.rs @@ -15,22 +15,64 @@ use llvm; use llvm::{False, ObjectFile, mk_section_iter}; use llvm::archive_ro::ArchiveRO; -use owning_ref::{ErasedBoxRef, OwningRef}; +use owning_ref::{OwningRef}; use std::path::Path; use std::ptr; +use std::thread; use std::slice; +use std::ops::Deref; + +pub use rustc_data_structures::sync::MetadataRef; pub const METADATA_FILENAME: &str = "rust.metadata.bin"; +/// This is a way to send erased LLVM object across thread. +/// It is used for metadata which gets stored in the global context. +/// We must ensure that the metadata gets dropped in the same thread that allocated it. +// FIXME: How to achieve the above? We probably need to create metadata on the main thread. +struct SameThread { + obj: T, + thread_id: thread::ThreadId, +} + +impl SameThread { + fn new(obj: T) -> Self { + SameThread { + obj, + thread_id: thread::current().id() + } + } +} + +impl Deref for SameThread { + type Target = T; + + fn deref(&self) -> &T { + assert!(thread::current().id() == self.thread_id); + &self.obj + } +} + +// This is safe since there isn't a way to access +// the inner `obj` on a different thread without panicking +unsafe impl Send for SameThread {} +unsafe impl Sync for SameThread {} + +impl Drop for SameThread { + fn drop(&mut self) { + assert!(thread::current().id() == self.thread_id); + } +} + pub struct LlvmMetadataLoader; impl MetadataLoader for LlvmMetadataLoader { - fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result, String> { + fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result { // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap // internally to read the file. We also avoid even using a memcpy by // just keeping the archive along while the metadata is in use. let archive = ArchiveRO::open(filename) - .map(|ar| OwningRef::new(box ar)) + .map(|ar| OwningRef::new(box SameThread::new(ar))) .map_err(|e| { debug!("llvm didn't like `{}`: {}", filename.display(), e); format!("failed to read rlib metadata in '{}': {}", filename.display(), e) @@ -47,13 +89,13 @@ impl MetadataLoader for LlvmMetadataLoader { filename.display()) }) })?; - Ok(buf.erase_owner()) + Ok(rustc_erase_owner!(buf)) } fn get_dylib_metadata(&self, target: &Target, filename: &Path) - -> Result, String> { + -> Result { unsafe { let buf = common::path2cstr(filename); let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr()); @@ -61,11 +103,11 @@ impl MetadataLoader for LlvmMetadataLoader { return Err(format!("error reading library: '{}'", filename.display())); } let of = ObjectFile::new(mb) - .map(|of| OwningRef::new(box of)) + .map(|of| OwningRef::new(box SameThread::new(of))) .ok_or_else(|| format!("provided path not an object file: '{}'", filename.display()))?; let buf = of.try_map(|of| search_meta_section(of, target, filename))?; - Ok(buf.erase_owner()) + Ok(rustc_erase_owner!(buf)) } } } diff --git a/src/librustc_trans_utils/Cargo.toml b/src/librustc_trans_utils/Cargo.toml index 7d9d7cea9335d..855fabe580aaf 100644 --- a/src/librustc_trans_utils/Cargo.toml +++ b/src/librustc_trans_utils/Cargo.toml @@ -12,7 +12,7 @@ test = false [dependencies] ar = "0.3.0" flate2 = "0.2" -owning_ref = "0.3.3" +owning_ref = { git = "https://github.com/Zoxc/owning-ref-rs.git" } log = "0.3" syntax = { path = "../libsyntax" } diff --git a/src/librustc_trans_utils/lib.rs b/src/librustc_trans_utils/lib.rs index d6f8707b8747a..d6866127e1e56 100644 --- a/src/librustc_trans_utils/lib.rs +++ b/src/librustc_trans_utils/lib.rs @@ -36,7 +36,7 @@ extern crate log; #[macro_use] extern crate rustc; extern crate rustc_back; -extern crate rustc_data_structures; +#[macro_use] extern crate rustc_data_structures; extern crate syntax; extern crate syntax_pos; diff --git a/src/librustc_trans_utils/trans_crate.rs b/src/librustc_trans_utils/trans_crate.rs index 645898601614b..a794e39a39b32 100644 --- a/src/librustc_trans_utils/trans_crate.rs +++ b/src/librustc_trans_utils/trans_crate.rs @@ -28,7 +28,7 @@ use std::fs::File; use std::path::Path; use std::sync::mpsc; -use owning_ref::{ErasedBoxRef, OwningRef}; +use owning_ref::OwningRef; use ar::{Archive, Builder, Header}; use flate2::Compression; use flate2::write::DeflateEncoder; @@ -39,18 +39,21 @@ use rustc::session::Session; use rustc::session::config::{CrateType, OutputFilenames}; use rustc::ty::TyCtxt; use rustc::ty::maps::Providers; -use rustc::middle::cstore::EncodedMetadata; +use rustc::middle::cstore::{MetadataLoader, EncodedMetadata}; use rustc::middle::cstore::MetadataLoader as MetadataLoaderTrait; use rustc::dep_graph::{DepGraph, DepNode, DepKind}; use rustc_back::target::Target; use link::{build_link_meta, out_filename}; +use rustc_data_structures::sync::Sync; + +pub use rustc_data_structures::sync::MetadataRef; pub trait TransCrate { type MetadataLoader: MetadataLoaderTrait; type OngoingCrateTranslation; type TranslatedCrate; - fn metadata_loader() -> Box; + fn metadata_loader() -> Box; fn provide(_providers: &mut Providers); fn provide_extern(_providers: &mut Providers); fn trans_crate<'a, 'tcx>( @@ -73,7 +76,7 @@ impl TransCrate for DummyTransCrate { type OngoingCrateTranslation = (); type TranslatedCrate = (); - fn metadata_loader() -> Box { + fn metadata_loader() -> Box { box DummyMetadataLoader(()) } @@ -116,7 +119,7 @@ impl MetadataLoaderTrait for DummyMetadataLoader { &self, _target: &Target, _filename: &Path - ) -> Result, String> { + ) -> Result { bug!("DummyMetadataLoader::get_rlib_metadata"); } @@ -124,7 +127,7 @@ impl MetadataLoaderTrait for DummyMetadataLoader { &self, _target: &Target, _filename: &Path - ) -> Result, String> { + ) -> Result { bug!("DummyMetadataLoader::get_dylib_metadata"); } } @@ -132,7 +135,7 @@ impl MetadataLoaderTrait for DummyMetadataLoader { pub struct NoLlvmMetadataLoader; impl MetadataLoaderTrait for NoLlvmMetadataLoader { - fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result, String> { + fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result { let file = File::open(filename) .map_err(|e| format!("metadata file open err: {:?}", e))?; let mut archive = Archive::new(file); @@ -144,7 +147,7 @@ impl MetadataLoaderTrait for NoLlvmMetadataLoader { let mut buf = Vec::new(); io::copy(&mut entry, &mut buf).unwrap(); let buf: OwningRef, [u8]> = OwningRef::new(buf).into(); - return Ok(buf.map_owner_box().erase_owner()); + return Ok(rustc_erase_owner!(buf.map_owner_box())); } } @@ -155,7 +158,7 @@ impl MetadataLoaderTrait for NoLlvmMetadataLoader { &self, _target: &Target, _filename: &Path, - ) -> Result, String> { + ) -> Result { // FIXME: Support reading dylibs from llvm enabled rustc self.get_rlib_metadata(_target, _filename) } @@ -181,7 +184,7 @@ impl TransCrate for MetadataOnlyTransCrate { type OngoingCrateTranslation = OngoingCrateTranslation; type TranslatedCrate = TranslatedCrate; - fn metadata_loader() -> Box { + fn metadata_loader() -> Box { box NoLlvmMetadataLoader } diff --git a/src/librustc_typeck/check/generator_interior.rs b/src/librustc_typeck/check/generator_interior.rs index af1297697c241..268930c05b141 100644 --- a/src/librustc_typeck/check/generator_interior.rs +++ b/src/librustc_typeck/check/generator_interior.rs @@ -18,14 +18,14 @@ use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; use rustc::hir::{self, Pat, PatKind, Expr}; use rustc::middle::region; use rustc::ty::Ty; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use super::FnCtxt; use util::nodemap::FxHashMap; struct InteriorVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, types: FxHashMap, usize>, - region_scope_tree: Rc, + region_scope_tree: Lrc, expr_count: usize, } diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 58d72e37d51cf..b87b61bbaf688 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -25,7 +25,7 @@ use syntax_pos::Span; use rustc::hir; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; pub use self::MethodError::*; pub use self::CandidateSource::*; @@ -165,7 +165,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let Some(import_id) = pick.import_id { let import_def_id = self.tcx.hir.local_def_id(import_id); debug!("used_trait_import: {:?}", import_def_id); - Rc::get_mut(&mut self.tables.borrow_mut().used_trait_imports) + Lrc::get_mut(&mut self.tables.borrow_mut().used_trait_imports) .unwrap().insert(import_def_id); } @@ -364,7 +364,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let Some(import_id) = pick.import_id { let import_def_id = self.tcx.hir.local_def_id(import_id); debug!("used_trait_import: {:?}", import_def_id); - Rc::get_mut(&mut self.tables.borrow_mut().used_trait_imports) + Lrc::get_mut(&mut self.tables.borrow_mut().used_trait_imports) .unwrap().insert(import_def_id); } diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index b3a7c32140b2e..10efc6dc33d37 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -21,6 +21,7 @@ use namespace::Namespace; use rustc::traits::{Obligation, SelectionContext}; use util::nodemap::FxHashSet; +use rustc_data_structures::sync::LockGuard; use syntax::ast; use errors::DiagnosticBuilder; use syntax_pos::Span; @@ -29,7 +30,6 @@ use rustc::hir; use rustc::hir::print; use rustc::infer::type_variable::TypeVariableOrigin; -use std::cell; use std::cmp::Ordering; use super::{MethodError, NoMatchData, CandidateSource}; @@ -631,7 +631,7 @@ pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> AllTraits<'a> } pub struct AllTraits<'a> { - borrow: cell::Ref<'a, Option>, + borrow: LockGuard<'a, Option>, idx: usize, } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 24ffc0ca542cf..c05420c82e783 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -109,7 +109,7 @@ use util::common::{ErrorReported, indenter}; use util::nodemap::{DefIdMap, DefIdSet, FxHashMap, NodeMap}; use std::cell::{Cell, RefCell, Ref, RefMut}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::collections::hash_map::Entry; use std::cmp; use std::fmt::Display; @@ -833,7 +833,7 @@ fn has_typeck_tables<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn used_trait_imports<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Rc { + -> Lrc { tcx.typeck_tables_of(def_id).used_trait_imports.clone() } diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index 7ef6027772be2..46aa8df23dc76 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -97,7 +97,7 @@ use rustc::ty::outlives::Component; use std::mem; use std::ops::Deref; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax_pos::Span; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; @@ -190,7 +190,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { pub struct RegionCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, - pub region_scope_tree: Rc, + pub region_scope_tree: Lrc, outlives_environment: OutlivesEnvironment<'tcx>, diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 1052f031bbf14..6ec7df7ae0f33 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -24,7 +24,7 @@ use rustc::util::nodemap::{DefIdSet, FxHashMap}; use syntax::ast; use syntax_pos::Span; use std::mem; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; /////////////////////////////////////////////////////////////////////////// // Entry point @@ -49,7 +49,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { wbcx.visit_free_region_map(); let used_trait_imports = mem::replace(&mut self.tables.borrow_mut().used_trait_imports, - Rc::new(DefIdSet())); + Lrc::new(DefIdSet())); debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports); wbcx.tables.used_trait_imports = used_trait_imports; diff --git a/src/librustc_typeck/coherence/inherent_impls.rs b/src/librustc_typeck/coherence/inherent_impls.rs index 569b6a2febb45..89f1b8fe4316e 100644 --- a/src/librustc_typeck/coherence/inherent_impls.rs +++ b/src/librustc_typeck/coherence/inherent_impls.rs @@ -24,7 +24,7 @@ use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::ty::{self, CrateInherentImpls, TyCtxt}; use rustc::util::nodemap::DefIdMap; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax_pos::Span; @@ -48,7 +48,7 @@ pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, /// On-demand query: yields a vector of the inherent impls for a specific type. pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty_def_id: DefId) - -> Rc> { + -> Lrc> { assert!(ty_def_id.is_local()); // NB. Until we adopt the red-green dep-tracking algorithm (see @@ -67,7 +67,7 @@ pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4 thread_local! { - static EMPTY_DEF_ID_VEC: Rc> = Rc::new(vec![]) + static EMPTY_DEF_ID_VEC: Lrc> = Lrc::new(vec![]) } let result = tcx.dep_graph.with_ignore(|| { @@ -296,11 +296,11 @@ impl<'a, 'tcx> InherentCollect<'a, 'tcx> { let impl_def_id = self.tcx.hir.local_def_id(item.id); let mut rc_vec = self.impls_map.inherent_impls .entry(def_id) - .or_insert_with(|| Rc::new(vec![])); + .or_insert_with(|| Lrc::new(vec![])); // At this point, there should not be any clones of the - // `Rc`, so we can still safely push into it in place: - Rc::get_mut(&mut rc_vec).unwrap().push(impl_def_id); + // `Lrc`, so we can still safely push into it in place: + Lrc::get_mut(&mut rc_vec).unwrap().push(impl_def_id); } else { struct_span_err!(self.tcx.sess, item.span, diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs index 418d2b9467096..2392104424355 100644 --- a/src/librustc_typeck/variance/mod.rs +++ b/src/librustc_typeck/variance/mod.rs @@ -17,7 +17,7 @@ use rustc::hir; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::ty::{self, CrateVariancesMap, TyCtxt}; use rustc::ty::maps::Providers; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; /// Defines the `TermsContext` basically houses an arena where we can /// allocate terms. @@ -44,16 +44,16 @@ pub fn provide(providers: &mut Providers) { } fn crate_variances<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) - -> Rc { + -> Lrc { assert_eq!(crate_num, LOCAL_CRATE); let mut arena = arena::TypedArena::new(); let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena); let constraints_cx = constraints::add_constraints_from_crate(terms_cx); - Rc::new(solve::solve_constraints(constraints_cx)) + Lrc::new(solve::solve_constraints(constraints_cx)) } fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId) - -> Rc> { + -> Lrc> { let id = tcx.hir.as_local_node_id(item_def_id).expect("expected local def-id"); let unsupported = || { // Variance not relevant. diff --git a/src/librustc_typeck/variance/solve.rs b/src/librustc_typeck/variance/solve.rs index 434e8ce148f3b..340a7b1d08ede 100644 --- a/src/librustc_typeck/variance/solve.rs +++ b/src/librustc_typeck/variance/solve.rs @@ -18,7 +18,7 @@ use rustc::hir::def_id::DefId; use rustc::ty; use rustc_data_structures::fx::FxHashMap; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use super::constraints::*; use super::terms::*; @@ -51,7 +51,7 @@ pub fn solve_constraints(constraints_cx: ConstraintContext) -> ty::CrateVariance }; solutions_cx.solve(); let variances = solutions_cx.create_map(); - let empty_variance = Rc::new(Vec::new()); + let empty_variance = Lrc::new(Vec::new()); ty::CrateVariancesMap { variances, empty_variance } } @@ -88,7 +88,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { } } - fn create_map(&self) -> FxHashMap>> { + fn create_map(&self) -> FxHashMap>> { let tcx = self.terms_cx.tcx; let solutions = &self.solutions; @@ -109,7 +109,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { } } - (def_id, Rc::new(variances)) + (def_id, Lrc::new(variances)) }).collect() } diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index 5eb3e38d5b371..b29ccde18598e 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -398,6 +398,7 @@ mod test { use syntax::ast::*; use syntax::codemap::dummy_spanned; use syntax_pos::DUMMY_SP; + use syntax::{Globals, with_globals}; fn word_cfg(s: &str) -> Cfg { Cfg::Cfg(Symbol::intern(s), None) @@ -409,479 +410,494 @@ mod test { #[test] fn test_cfg_not() { - assert_eq!(!Cfg::False, Cfg::True); - assert_eq!(!Cfg::True, Cfg::False); - assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test")))); - assert_eq!( - !Cfg::All(vec![word_cfg("a"), word_cfg("b")]), - Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")]))) - ); - assert_eq!( - !Cfg::Any(vec![word_cfg("a"), word_cfg("b")]), - Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")]))) - ); - assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test")); + with_globals(&Globals::new(), || { + assert_eq!(!Cfg::False, Cfg::True); + assert_eq!(!Cfg::True, Cfg::False); + assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test")))); + assert_eq!( + !Cfg::All(vec![word_cfg("a"), word_cfg("b")]), + Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")]))) + ); + assert_eq!( + !Cfg::Any(vec![word_cfg("a"), word_cfg("b")]), + Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")]))) + ); + assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test")); + }) } #[test] fn test_cfg_and() { - let mut x = Cfg::False; - x &= Cfg::True; - assert_eq!(x, Cfg::False); - - x = word_cfg("test"); - x &= Cfg::False; - assert_eq!(x, Cfg::False); - - x = word_cfg("test2"); - x &= Cfg::True; - assert_eq!(x, word_cfg("test2")); - - x = Cfg::True; - x &= word_cfg("test3"); - assert_eq!(x, word_cfg("test3")); - - x &= word_cfg("test4"); - assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")])); - - x &= word_cfg("test5"); - assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")])); - - x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]); - assert_eq!(x, Cfg::All(vec![ - word_cfg("test3"), - word_cfg("test4"), - word_cfg("test5"), - word_cfg("test6"), - word_cfg("test7"), - ])); - - let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]); - y &= x; - assert_eq!(y, Cfg::All(vec![ - word_cfg("test3"), - word_cfg("test4"), - word_cfg("test5"), - word_cfg("test6"), - word_cfg("test7"), - Cfg::Any(vec![word_cfg("a"), word_cfg("b")]), - ])); - - assert_eq!( - word_cfg("a") & word_cfg("b") & word_cfg("c"), - Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")]) - ); + with_globals(&Globals::new(), || { + let mut x = Cfg::False; + x &= Cfg::True; + assert_eq!(x, Cfg::False); + + x = word_cfg("test"); + x &= Cfg::False; + assert_eq!(x, Cfg::False); + + x = word_cfg("test2"); + x &= Cfg::True; + assert_eq!(x, word_cfg("test2")); + + x = Cfg::True; + x &= word_cfg("test3"); + assert_eq!(x, word_cfg("test3")); + + x &= word_cfg("test4"); + assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")])); + + x &= word_cfg("test5"); + assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")])); + + x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]); + assert_eq!(x, Cfg::All(vec![ + word_cfg("test3"), + word_cfg("test4"), + word_cfg("test5"), + word_cfg("test6"), + word_cfg("test7"), + ])); + + let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]); + y &= x; + assert_eq!(y, Cfg::All(vec![ + word_cfg("test3"), + word_cfg("test4"), + word_cfg("test5"), + word_cfg("test6"), + word_cfg("test7"), + Cfg::Any(vec![word_cfg("a"), word_cfg("b")]), + ])); + + assert_eq!( + word_cfg("a") & word_cfg("b") & word_cfg("c"), + Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")]) + ); + }) } #[test] fn test_cfg_or() { - let mut x = Cfg::True; - x |= Cfg::False; - assert_eq!(x, Cfg::True); - - x = word_cfg("test"); - x |= Cfg::True; - assert_eq!(x, Cfg::True); - - x = word_cfg("test2"); - x |= Cfg::False; - assert_eq!(x, word_cfg("test2")); - - x = Cfg::False; - x |= word_cfg("test3"); - assert_eq!(x, word_cfg("test3")); - - x |= word_cfg("test4"); - assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")])); - - x |= word_cfg("test5"); - assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")])); - - x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]); - assert_eq!(x, Cfg::Any(vec![ - word_cfg("test3"), - word_cfg("test4"), - word_cfg("test5"), - word_cfg("test6"), - word_cfg("test7"), - ])); - - let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]); - y |= x; - assert_eq!(y, Cfg::Any(vec![ - word_cfg("test3"), - word_cfg("test4"), - word_cfg("test5"), - word_cfg("test6"), - word_cfg("test7"), - Cfg::All(vec![word_cfg("a"), word_cfg("b")]), - ])); - - assert_eq!( - word_cfg("a") | word_cfg("b") | word_cfg("c"), - Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")]) - ); + with_globals(&Globals::new(), || { + let mut x = Cfg::True; + x |= Cfg::False; + assert_eq!(x, Cfg::True); + + x = word_cfg("test"); + x |= Cfg::True; + assert_eq!(x, Cfg::True); + + x = word_cfg("test2"); + x |= Cfg::False; + assert_eq!(x, word_cfg("test2")); + + x = Cfg::False; + x |= word_cfg("test3"); + assert_eq!(x, word_cfg("test3")); + + x |= word_cfg("test4"); + assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")])); + + x |= word_cfg("test5"); + assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")])); + + x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]); + assert_eq!(x, Cfg::Any(vec![ + word_cfg("test3"), + word_cfg("test4"), + word_cfg("test5"), + word_cfg("test6"), + word_cfg("test7"), + ])); + + let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]); + y |= x; + assert_eq!(y, Cfg::Any(vec![ + word_cfg("test3"), + word_cfg("test4"), + word_cfg("test5"), + word_cfg("test6"), + word_cfg("test7"), + Cfg::All(vec![word_cfg("a"), word_cfg("b")]), + ])); + + assert_eq!( + word_cfg("a") | word_cfg("b") | word_cfg("c"), + Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")]) + ); + }) } #[test] fn test_parse_ok() { - let mi = MetaItem { - name: Symbol::intern("all"), - node: MetaItemKind::Word, - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all"))); - - let mi = MetaItem { - name: Symbol::intern("all"), - node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str( - Symbol::intern("done"), - StrStyle::Cooked, - ))), - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done"))); - - let mi = MetaItem { - name: Symbol::intern("all"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("b"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b"))); - - let mi = MetaItem { - name: Symbol::intern("any"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("b"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b"))); - - let mi = MetaItem { - name: Symbol::intern("not"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a"))); - - let mi = MetaItem { - name: Symbol::intern("not"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("any"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("all"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("b"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("c"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c"))))); - - let mi = MetaItem { - name: Symbol::intern("all"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("b"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("c"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c"))); + with_globals(&Globals::new(), || { + let mi = MetaItem { + name: Symbol::intern("all"), + node: MetaItemKind::Word, + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all"))); + + let mi = MetaItem { + name: Symbol::intern("all"), + node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str( + Symbol::intern("done"), + StrStyle::Cooked, + ))), + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done"))); + + let mi = MetaItem { + name: Symbol::intern("all"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("b"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b"))); + + let mi = MetaItem { + name: Symbol::intern("any"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("b"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b"))); + + let mi = MetaItem { + name: Symbol::intern("not"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a"))); + + let mi = MetaItem { + name: Symbol::intern("not"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("any"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("all"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("b"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("c"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c"))))); + + let mi = MetaItem { + name: Symbol::intern("all"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("b"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("c"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c"))); + }) } #[test] fn test_parse_err() { - let mi = MetaItem { - name: Symbol::intern("foo"), - node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); - - let mi = MetaItem { - name: Symbol::intern("not"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("b"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); - - let mi = MetaItem { - name: Symbol::intern("not"), - node: MetaItemKind::List(vec![]), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); - - let mi = MetaItem { - name: Symbol::intern("foo"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); - - let mi = MetaItem { - name: Symbol::intern("all"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("foo"), - node: MetaItemKind::List(vec![]), - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("b"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); - - let mi = MetaItem { - name: Symbol::intern("any"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("a"), - node: MetaItemKind::Word, - span: DUMMY_SP, - })), - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("foo"), - node: MetaItemKind::List(vec![]), - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); - - let mi = MetaItem { - name: Symbol::intern("not"), - node: MetaItemKind::List(vec![ - dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { - name: Symbol::intern("foo"), - node: MetaItemKind::List(vec![]), - span: DUMMY_SP, - })), - ]), - span: DUMMY_SP, - }; - assert!(Cfg::parse(&mi).is_err()); + with_globals(&Globals::new(), || { + let mi = MetaItem { + name: Symbol::intern("foo"), + node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + + let mi = MetaItem { + name: Symbol::intern("not"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("b"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + + let mi = MetaItem { + name: Symbol::intern("not"), + node: MetaItemKind::List(vec![]), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + + let mi = MetaItem { + name: Symbol::intern("foo"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + + let mi = MetaItem { + name: Symbol::intern("all"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("foo"), + node: MetaItemKind::List(vec![]), + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("b"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + + let mi = MetaItem { + name: Symbol::intern("any"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("a"), + node: MetaItemKind::Word, + span: DUMMY_SP, + })), + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("foo"), + node: MetaItemKind::List(vec![]), + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + + let mi = MetaItem { + name: Symbol::intern("not"), + node: MetaItemKind::List(vec![ + dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { + name: Symbol::intern("foo"), + node: MetaItemKind::List(vec![]), + span: DUMMY_SP, + })), + ]), + span: DUMMY_SP, + }; + assert!(Cfg::parse(&mi).is_err()); + }) } #[test] fn test_render_short_html() { - assert_eq!( - word_cfg("unix").render_short_html(), - "Unix" - ); - assert_eq!( - name_value_cfg("target_os", "macos").render_short_html(), - "macOS" - ); - assert_eq!( - name_value_cfg("target_pointer_width", "16").render_short_html(), - "16-bit" - ); - assert_eq!( - name_value_cfg("target_endian", "little").render_short_html(), - "Little-endian" - ); - assert_eq!( - (!word_cfg("windows")).render_short_html(), - "Non-Windows" - ); - assert_eq!( - (word_cfg("unix") & word_cfg("windows")).render_short_html(), - "Unix and Windows" - ); - assert_eq!( - (word_cfg("unix") | word_cfg("windows")).render_short_html(), - "Unix or Windows" - ); - assert_eq!( - ( - word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions") - ).render_short_html(), - "Unix and Windows and debug-assertions enabled" - ); - assert_eq!( - ( - word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions") - ).render_short_html(), - "Unix or Windows or debug-assertions enabled" - ); - assert_eq!( - ( - !(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")) - ).render_short_html(), - "Neither Unix nor Windows nor debug-assertions enabled" - ); - assert_eq!( - ( - (word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) | - (word_cfg("windows") & name_value_cfg("target_pointer_width", "64")) - ).render_short_html(), - "Unix and x86-64, or Windows and 64-bit" - ); - assert_eq!( - (!(word_cfg("unix") & word_cfg("windows"))).render_short_html(), - "Not (Unix and Windows)" - ); - assert_eq!( - ( - (word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix") - ).render_short_html(), - "(Debug-assertions enabled or Windows) and Unix" - ); + with_globals(&Globals::new(), || { + assert_eq!( + word_cfg("unix").render_short_html(), + "Unix" + ); + assert_eq!( + name_value_cfg("target_os", "macos").render_short_html(), + "macOS" + ); + assert_eq!( + name_value_cfg("target_pointer_width", "16").render_short_html(), + "16-bit" + ); + assert_eq!( + name_value_cfg("target_endian", "little").render_short_html(), + "Little-endian" + ); + assert_eq!( + (!word_cfg("windows")).render_short_html(), + "Non-Windows" + ); + assert_eq!( + (word_cfg("unix") & word_cfg("windows")).render_short_html(), + "Unix and Windows" + ); + assert_eq!( + (word_cfg("unix") | word_cfg("windows")).render_short_html(), + "Unix or Windows" + ); + assert_eq!( + ( + word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions") + ).render_short_html(), + "Unix and Windows and debug-assertions enabled" + ); + assert_eq!( + ( + word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions") + ).render_short_html(), + "Unix or Windows or debug-assertions enabled" + ); + assert_eq!( + ( + !(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")) + ).render_short_html(), + "Neither Unix nor Windows nor debug-assertions enabled" + ); + assert_eq!( + ( + (word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) | + (word_cfg("windows") & name_value_cfg("target_pointer_width", "64")) + ).render_short_html(), + "Unix and x86-64, or Windows and 64-bit" + ); + assert_eq!( + (!(word_cfg("unix") & word_cfg("windows"))).render_short_html(), + "Not (Unix and Windows)" + ); + assert_eq!( + ( + (word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix") + ).render_short_html(), + "(Debug-assertions enabled or Windows) and Unix" + ); + }) } #[test] fn test_render_long_html() { - assert_eq!( - word_cfg("unix").render_long_html(), - "This is supported on Unix only." - ); - assert_eq!( - name_value_cfg("target_os", "macos").render_long_html(), - "This is supported on macOS only." - ); - assert_eq!( - name_value_cfg("target_pointer_width", "16").render_long_html(), - "This is supported on 16-bit only." - ); - assert_eq!( - name_value_cfg("target_endian", "little").render_long_html(), - "This is supported on little-endian only." - ); - assert_eq!( - (!word_cfg("windows")).render_long_html(), - "This is supported on non-Windows only." - ); - assert_eq!( - (word_cfg("unix") & word_cfg("windows")).render_long_html(), - "This is supported on Unix and Windows only." - ); - assert_eq!( - (word_cfg("unix") | word_cfg("windows")).render_long_html(), - "This is supported on Unix or Windows only." - ); - assert_eq!( - ( - word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions") - ).render_long_html(), - "This is supported on Unix and Windows and debug-assertions enabled \ - only." - ); - assert_eq!( - ( - word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions") - ).render_long_html(), - "This is supported on Unix or Windows or debug-assertions enabled \ - only." - ); - assert_eq!( - ( - !(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")) - ).render_long_html(), - "This is supported on neither Unix nor Windows nor debug-assertions \ - enabled." - ); - assert_eq!( - ( - (word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) | - (word_cfg("windows") & name_value_cfg("target_pointer_width", "64")) - ).render_long_html(), - "This is supported on Unix and x86-64, or Windows and 64-bit only." - ); - assert_eq!( - (!(word_cfg("unix") & word_cfg("windows"))).render_long_html(), - "This is supported on not (Unix and Windows)." - ); - assert_eq!( - ( - (word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix") - ).render_long_html(), - "This is supported on (debug-assertions enabled or Windows) and Unix \ - only." - ); + with_globals(&Globals::new(), || { + assert_eq!( + word_cfg("unix").render_long_html(), + "This is supported on Unix only." + ); + assert_eq!( + name_value_cfg("target_os", "macos").render_long_html(), + "This is supported on macOS only." + ); + assert_eq!( + name_value_cfg("target_pointer_width", "16").render_long_html(), + "This is supported on 16-bit only." + ); + assert_eq!( + name_value_cfg("target_endian", "little").render_long_html(), + "This is supported on little-endian only." + ); + assert_eq!( + (!word_cfg("windows")).render_long_html(), + "This is supported on non-Windows only." + ); + assert_eq!( + (word_cfg("unix") & word_cfg("windows")).render_long_html(), + "This is supported on Unix and Windows only." + ); + assert_eq!( + (word_cfg("unix") | word_cfg("windows")).render_long_html(), + "This is supported on Unix or Windows only." + ); + assert_eq!( + ( + word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions") + ).render_long_html(), + "This is supported on Unix and Windows and debug-assertions enabled\ + only." + ); + assert_eq!( + ( + word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions") + ).render_long_html(), + "This is supported on Unix or Windows or debug-assertions enabled\ + only." + ); + assert_eq!( + ( + !(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")) + ).render_long_html(), + "This is supported on neither Unix nor Windows nor debug-assertions \ + enabled." + ); + assert_eq!( + ( + (word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) | + (word_cfg("windows") & name_value_cfg("target_pointer_width", "64")) + ).render_long_html(), + "This is supported on Unix and x86-64, or Windows and 64-bit \ + only." + ); + assert_eq!( + (!(word_cfg("unix") & word_cfg("windows"))).render_long_html(), + "This is supported on not (Unix and Windows)." + ); + assert_eq!( + ( + (word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix") + ).render_long_html(), + "This is supported on (debug-assertions enabled or Windows) and Unix\ + only." + ); + }) } } diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 16b5216d58d43..95def83cb53ec 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -13,7 +13,7 @@ use std::collections::BTreeMap; use std::io; use std::iter::once; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use rustc::hir; @@ -403,7 +403,7 @@ fn build_module(cx: &DocContext, did: DefId) -> clean::Module { } struct InlinedConst { - nested_bodies: Rc> + nested_bodies: Lrc> } impl hir::print::PpAnn for InlinedConst { diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 456a00947ae0a..6e48469dc2ecb 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -14,7 +14,7 @@ use rustc::session::{self, config}; use rustc::hir::def_id::DefId; use rustc::hir::def::Def; use rustc::middle::privacy::AccessLevels; -use rustc::ty::{self, TyCtxt, GlobalArenas}; +use rustc::ty::{self, TyCtxt, AllArenas}; use rustc::hir::map as hir_map; use rustc::lint; use rustc::util::nodemap::FxHashMap; @@ -31,13 +31,13 @@ use errors::emitter::ColorConfig; use std::cell::{RefCell, Cell}; use std::mem; use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::path::PathBuf; use visit_ast::RustdocVisitor; use clean; use clean::Clean; use html::render::RenderInfo; -use arena::DroplessArena; pub use rustc::session::config::Input; pub use rustc::session::search_paths::SearchPaths; @@ -135,7 +135,7 @@ pub fn run_core(search_paths: SearchPaths, ..config::basic_options().clone() }; - let codemap = Rc::new(codemap::CodeMap::new(sessopts.file_path_mapping())); + let codemap = Lrc::new(codemap::CodeMap::new(sessopts.file_path_mapping())); let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, @@ -170,8 +170,7 @@ pub fn run_core(search_paths: SearchPaths, abort_on_err(result, &sess) }; - let arena = DroplessArena::new(); - let arenas = GlobalArenas::new(); + let arenas = AllArenas::new(); let hir_map = hir_map::map_crate(&sess, &*cstore, &mut hir_forest, &defs); let output_filenames = driver::build_output_filenames(&input, &None, @@ -185,7 +184,6 @@ pub fn run_core(search_paths: SearchPaths, hir_map, analysis, resolutions, - &arena, &arenas, &name, &output_filenames, diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index f0bb87015f805..cd5cff952cfaf 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -25,6 +25,8 @@ #![feature(unicode)] #![feature(vec_remove_item)] +#![recursion_limit="256"] + extern crate arena; extern crate getopts; extern crate env_logger; @@ -105,7 +107,9 @@ pub fn main() { const STACK_SIZE: usize = 32_000_000; // 32MB env_logger::init().unwrap(); let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || { - get_args().map(|args| main_args(&args)).unwrap_or(1) + syntax::with_globals(&syntax::Globals::new(), move || { + get_args().map(|args| main_args(&args)).unwrap_or(1) + }) }).unwrap().join().unwrap_or(101); process::exit(res as i32); } @@ -264,7 +268,7 @@ pub fn usage(argv0: &str) { println!("{}", options.usage(&format!("{} [options] ", argv0))); } -pub fn main_args(args: &[String]) -> isize { +fn main_args(args: &[String]) -> isize { let mut options = getopts::Options::new(); for option in opts() { (option.apply)(&mut options); diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 74a16cb867d74..57611161a8d22 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -18,6 +18,7 @@ use std::panic::{self, AssertUnwindSafe}; use std::process::Command; use std::rc::Rc; use std::str; +use rustc_data_structures::sync::Lrc; use std::sync::{Arc, Mutex}; use testing; @@ -77,7 +78,7 @@ pub fn run(input: &str, ..config::basic_options().clone() }; - let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping())); + let codemap = Lrc::new(CodeMap::new(sessopts.file_path_mapping())); let handler = errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, @@ -235,7 +236,7 @@ fn run_test(test: &str, cratename: &str, filename: &str, cfgs: Vec, libs } } let data = Arc::new(Mutex::new(Vec::new())); - let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping())); + let codemap = Lrc::new(CodeMap::new(sessopts.file_path_mapping())); let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), Some(codemap.clone()), false); @@ -450,7 +451,7 @@ pub struct Collector { opts: TestOptions, maybe_sysroot: Option, position: Span, - codemap: Option>, + codemap: Option>, filename: Option, // to be removed when hoedown will be removed as well pub render_type: RenderType, @@ -460,7 +461,7 @@ pub struct Collector { impl Collector { pub fn new(cratename: String, cfgs: Vec, libs: SearchPaths, externs: Externs, use_headers: bool, opts: TestOptions, maybe_sysroot: Option, - codemap: Option>, filename: Option, + codemap: Option>, filename: Option, render_type: RenderType, linker: Option) -> Collector { Collector { tests: Vec::new(), diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs index d8ae9729224d7..de7eebe7bf3fb 100644 --- a/src/libserialize/collection_impls.rs +++ b/src/libserialize/collection_impls.rs @@ -15,6 +15,7 @@ use std::hash::{Hash, BuildHasher}; use {Decodable, Encodable, Decoder, Encoder}; use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet}; use std::rc::Rc; +use std::sync::Arc; impl< T: Encodable @@ -218,3 +219,26 @@ impl Decodable for Rc<[T]> { }) } } + +impl Encodable for Arc<[T]> { + fn encode(&self, s: &mut E) -> Result<(), E::Error> { + s.emit_seq(self.len(), |s| { + for (index, e) in self.iter().enumerate() { + s.emit_seq_elt(index, |s| e.encode(s))?; + } + Ok(()) + }) + } +} + +impl Decodable for Arc<[T]> { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_seq(|d, len| { + let mut vec = Vec::with_capacity(len); + for index in 0..len { + vec.push(d.read_seq_elt(index, |d| Decodable::decode(d))?); + } + Ok(vec.into()) + }) + } +} \ No newline at end of file diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml index fb1f300f63cc3..24fda5a051f1a 100644 --- a/src/libsyntax/Cargo.toml +++ b/src/libsyntax/Cargo.toml @@ -12,6 +12,7 @@ crate-type = ["dylib"] bitflags = "1.0" serialize = { path = "../libserialize" } log = "0.3" +scoped-tls = { git = "https://github.com/Zoxc/scoped-tls.git", features=["nightly"] } syntax_pos = { path = "../libsyntax_pos" } rustc_cratesio_shim = { path = "../librustc_cratesio_shim" } rustc_errors = { path = "../librustc_errors" } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3c1d6ea18f7c2..add9373aa82d4 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -29,7 +29,7 @@ use tokenstream::{ThinTokenStream, TokenStream}; use serialize::{self, Encoder, Decoder}; use std::collections::HashSet; use std::fmt; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::u32; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] @@ -1119,7 +1119,7 @@ pub enum LitKind { /// A string literal (`"foo"`) Str(Symbol, StrStyle), /// A byte string (`b"foo"`) - ByteStr(Rc>), + ByteStr(Lrc>), /// A byte char (`b'f'`) Byte(u8), /// A character literal (`'a'`) diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index e5e95002e1079..48047a63b696d 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -30,15 +30,10 @@ use ptr::P; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::ThinVec; +use GLOBALS; -use std::cell::{RefCell, Cell}; use std::iter; -thread_local! { - static USED_ATTRS: RefCell> = RefCell::new(Vec::new()); - static KNOWN_ATTRS: RefCell> = RefCell::new(Vec::new()); -} - enum AttrError { MultipleItem(Name), UnknownMetaItem(Name), @@ -65,22 +60,24 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) { pub fn mark_used(attr: &Attribute) { debug!("Marking {:?} as used.", attr); let AttrId(id) = attr.id; - USED_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let mut slot = globals.used_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - if slot.borrow().len() <= idx { - slot.borrow_mut().resize(idx + 1, 0); + if slot.len() <= idx { + slot.resize(idx + 1, 0); } - slot.borrow_mut()[idx] |= 1 << shift; + slot[idx] |= 1 << shift; }); } pub fn is_used(attr: &Attribute) -> bool { let AttrId(id) = attr.id; - USED_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let slot = globals.used_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0) + slot.get(idx).map(|bits| bits & (1 << shift) != 0) .unwrap_or(false) }) } @@ -88,22 +85,24 @@ pub fn is_used(attr: &Attribute) -> bool { pub fn mark_known(attr: &Attribute) { debug!("Marking {:?} as known.", attr); let AttrId(id) = attr.id; - KNOWN_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let mut slot = globals.known_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - if slot.borrow().len() <= idx { - slot.borrow_mut().resize(idx + 1, 0); + if slot.len() <= idx { + slot.resize(idx + 1, 0); } - slot.borrow_mut()[idx] |= 1 << shift; + slot[idx] |= 1 << shift; }); } pub fn is_known(attr: &Attribute) -> bool { let AttrId(id) = attr.id; - KNOWN_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let slot = globals.known_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0) + slot.get(idx).map(|bits| bits & (1 << shift) != 0) .unwrap_or(false) }) } @@ -419,16 +418,14 @@ pub fn mk_spanned_word_item(sp: Span, name: Name) -> MetaItem { MetaItem { span: sp, name: name, node: MetaItemKind::Word } } +pub fn mk_attr_id() -> AttrId { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + static NEXT_ATTR_ID: AtomicUsize = AtomicUsize::new(0); -thread_local! { static NEXT_ATTR_ID: Cell = Cell::new(0) } - -pub fn mk_attr_id() -> AttrId { - let id = NEXT_ATTR_ID.with(|slot| { - let r = slot.get(); - slot.set(r + 1); - r - }); + let id = NEXT_ATTR_ID.fetch_add(1, Ordering::SeqCst); + assert!(id != ::std::usize::MAX); AttrId(id) } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3906ed431ce20..3d8adb3fa3a9d 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -24,10 +24,9 @@ pub use self::ExpnFormat::*; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::StableHasher; -use std::cell::{RefCell, Ref}; +use rustc_data_structures::sync::{Lrc, Lock, LockGuard}; use std::hash::Hash; use std::path::{Path, PathBuf}; -use std::rc::Rc; use std::env; use std::fs; @@ -125,32 +124,32 @@ impl StableFilemapId { // pub struct CodeMap { - pub(super) files: RefCell>>, - file_loader: Box, + pub(super) files: Lock>>, + file_loader: Box, // This is used to apply the file path remapping as specified via // -Zremap-path-prefix to all FileMaps allocated within this CodeMap. path_mapping: FilePathMapping, - stable_id_to_filemap: RefCell>>, + stable_id_to_filemap: Lock>>, } impl CodeMap { pub fn new(path_mapping: FilePathMapping) -> CodeMap { CodeMap { - files: RefCell::new(Vec::new()), + files: Lock::new(Vec::new()), file_loader: Box::new(RealFileLoader), path_mapping, - stable_id_to_filemap: RefCell::new(FxHashMap()), + stable_id_to_filemap: Lock::new(FxHashMap()), } } - pub fn with_file_loader(file_loader: Box, + pub fn with_file_loader(file_loader: Box, path_mapping: FilePathMapping) -> CodeMap { CodeMap { - files: RefCell::new(Vec::new()), - file_loader, + files: Lock::new(Vec::new()), + file_loader: file_loader, path_mapping, - stable_id_to_filemap: RefCell::new(FxHashMap()), + stable_id_to_filemap: Lock::new(FxHashMap()), } } @@ -162,16 +161,16 @@ impl CodeMap { self.file_loader.file_exists(path) } - pub fn load_file(&self, path: &Path) -> io::Result> { + pub fn load_file(&self, path: &Path) -> io::Result> { let src = self.file_loader.read_file(path)?; Ok(self.new_filemap(path.to_str().unwrap().to_string(), src)) } - pub fn files(&self) -> Ref>> { + pub fn files(&self) -> LockGuard>> { self.files.borrow() } - pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { + pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { self.stable_id_to_filemap.borrow().get(&stable_id).map(|fm| fm.clone()) } @@ -187,7 +186,7 @@ impl CodeMap { /// Creates a new filemap without setting its line information. If you don't /// intend to set the line information yourself, you should use new_filemap_and_lines. - pub fn new_filemap(&self, filename: FileName, src: String) -> Rc { + pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc { let start_pos = self.next_start_pos(); let mut files = self.files.borrow_mut(); @@ -199,7 +198,7 @@ impl CodeMap { let unmapped_path = PathBuf::from(filename.clone()); let (filename, was_remapped) = self.path_mapping.map_prefix(filename); - let filemap = Rc::new(FileMap::new( + let filemap = Lrc::new(FileMap::new( filename, was_remapped, unmapped_path, @@ -217,7 +216,7 @@ impl CodeMap { } /// Creates a new filemap and sets its line information. - pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc { + pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Lrc { let fm = self.new_filemap(filename.to_string(), src.to_owned()); let mut byte_pos: u32 = fm.start_pos.0; for line in src.lines() { @@ -244,7 +243,7 @@ impl CodeMap { mut file_local_lines: Vec, mut file_local_multibyte_chars: Vec, mut file_local_non_narrow_chars: Vec) - -> Rc { + -> Lrc { let start_pos = self.next_start_pos(); let mut files = self.files.borrow_mut(); @@ -263,19 +262,19 @@ impl CodeMap { *swc = *swc + start_pos; } - let filemap = Rc::new(FileMap { + let filemap = Lrc::new(FileMap { name: filename, name_was_remapped, unmapped_path: None, crate_of_origin, src: None, src_hash, - external_src: RefCell::new(ExternalSource::AbsentOk), + external_src: Lock::new(ExternalSource::AbsentOk), start_pos, end_pos, - lines: RefCell::new(file_local_lines), - multibyte_chars: RefCell::new(file_local_multibyte_chars), - non_narrow_chars: RefCell::new(file_local_non_narrow_chars), + lines: Lock::new(file_local_lines), + multibyte_chars: Lock::new(file_local_multibyte_chars), + non_narrow_chars: Lock::new(file_local_non_narrow_chars), }); files.push(filemap.clone()); @@ -358,7 +357,7 @@ impl CodeMap { } // If the relevant filemap is empty, we don't return a line number. - pub fn lookup_line(&self, pos: BytePos) -> Result> { + pub fn lookup_line(&self, pos: BytePos) -> Result> { let idx = self.lookup_filemap_idx(pos); let files = self.files.borrow(); @@ -561,7 +560,7 @@ impl CodeMap { self.span_until_char(sp, '{') } - pub fn get_filemap(&self, filename: &str) -> Option> { + pub fn get_filemap(&self, filename: &str) -> Option> { for fm in self.files.borrow().iter() { if filename == fm.name { return Some(fm.clone()); @@ -658,7 +657,7 @@ impl CodeMapper for CodeMap { } sp } - fn ensure_filemap_source_present(&self, file_map: Rc) -> bool { + fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool { file_map.add_external_src( || self.file_loader.read_file(Path::new(&file_map.name)).ok() ) @@ -709,7 +708,7 @@ impl FilePathMapping { mod tests { use super::*; use std::borrow::Cow; - use std::rc::Rc; + use rustc_data_structures::sync::Lrc; #[test] fn t1 () { @@ -930,7 +929,7 @@ mod tests { /// `substring` in `source_text`. trait CodeMapExtension { fn span_substr(&self, - file: &Rc, + file: &Lrc, source_text: &str, substring: &str, n: usize) @@ -939,7 +938,7 @@ mod tests { impl CodeMapExtension for CodeMap { fn span_substr(&self, - file: &Rc, + file: &Lrc, source_text: &str, substring: &str, n: usize) diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 5224f52c49629..a192e154ff018 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::cell::RefCell; use std::collections::BTreeMap; use std::env; @@ -30,12 +29,6 @@ pub use errors::*; // Maximum width of any line in an extended error description (inclusive). const MAX_DESCRIPTION_WIDTH: usize = 80; -thread_local! { - static REGISTERED_DIAGNOSTICS: RefCell = { - RefCell::new(BTreeMap::new()) - } -} - /// Error information type. pub struct ErrorInfo { pub description: Option, @@ -45,14 +38,6 @@ pub struct ErrorInfo { /// Mapping from error codes to metadata. pub type ErrorMap = BTreeMap; -fn with_registered_diagnostics(f: F) -> T where - F: FnOnce(&mut ErrorMap) -> T, -{ - REGISTERED_DIAGNOSTICS.with(move |slot| { - f(&mut *slot.borrow_mut()) - }) -} - pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) @@ -62,27 +47,27 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, _ => unreachable!() }; - with_registered_diagnostics(|diagnostics| { - match diagnostics.get_mut(&code.name) { - // Previously used errors. - Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => { - ecx.struct_span_warn(span, &format!( - "diagnostic code {} already used", code - )).span_note(previous_span, "previous invocation") - .emit(); - } - // Newly used errors. - Some(ref mut info) => { - info.use_site = Some(span); - } - // Unregistered errors. - None => { - ecx.span_err(span, &format!( - "used diagnostic code {} not registered", code - )); - } + let mut diagnostics = ecx.parse_sess.registered_diagnostics.lock(); + match diagnostics.get_mut(&code.name) { + // Previously used errors. + Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => { + ecx.struct_span_warn(span, &format!( + "diagnostic code {} already used", code + )).span_note(previous_span, "previous invocation") + .emit(); } - }); + // Newly used errors. + Some(ref mut info) => { + info.use_site = Some(span); + } + // Unregistered errors. + None => { + ecx.span_err(span, &format!( + "used diagnostic code {} not registered", code + )); + } + } + MacEager::expr(ecx.expr_tuple(span, Vec::new())) } @@ -131,17 +116,17 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, } }); // Add the error to the map. - with_registered_diagnostics(|diagnostics| { - let info = ErrorInfo { - description, - use_site: None - }; - if diagnostics.insert(code.name, info).is_some() { - ecx.span_err(span, &format!( - "diagnostic code {} already registered", code - )); - } - }); + let mut diagnostics = ecx.parse_sess.registered_diagnostics.lock(); + let info = ErrorInfo { + description, + use_site: None + }; + if diagnostics.insert(code.name, info).is_some() { + ecx.span_err(span, &format!( + "diagnostic code {} already registered", code + )); + } + let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!( "__register_diagnostic_{}", code ))); @@ -173,18 +158,17 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, // Output error metadata to `tmp/extended-errors//.json` if let Ok(target_triple) = env::var("CFG_COMPILER_HOST_TRIPLE") { - with_registered_diagnostics(|diagnostics| { - if let Err(e) = output_metadata(ecx, - &target_triple, - &crate_name.name.as_str(), - diagnostics) { - ecx.span_bug(span, &format!( - "error writing metadata for triple `{}` and crate `{}`, error: {}, \ - cause: {:?}", - target_triple, crate_name, e.description(), e.cause() - )); - } - }); + let diagnostics = ecx.parse_sess.registered_diagnostics.lock(); + if let Err(e) = output_metadata(ecx, + &target_triple, + &crate_name.name.as_str(), + &*diagnostics) { + ecx.span_bug(span, &format!( + "error writing metadata for triple `{}` and crate `{}`, error: {}, \ + cause: {:?}", + target_triple, crate_name, e.description(), e.cause() + )); + } } else { ecx.span_err(span, &format!( "failed to write metadata for crate `{}` because $CFG_COMPILER_HOST_TRIPLE is not set", @@ -192,19 +176,19 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, } // Construct the output expression. - let (count, expr) = - with_registered_diagnostics(|diagnostics| { - let descriptions: Vec> = - diagnostics.iter().filter_map(|(&code, info)| { - info.description.map(|description| { - ecx.expr_tuple(span, vec![ - ecx.expr_str(span, code), - ecx.expr_str(span, description) - ]) - }) - }).collect(); - (descriptions.len(), ecx.expr_vec(span, descriptions)) - }); + let (count, expr) = { + let diagnostics = ecx.parse_sess.registered_diagnostics.lock(); + let descriptions: Vec> = + diagnostics.iter().filter_map(|(&code, info)| { + info.description.map(|description| { + ecx.expr_tuple(span, vec![ + ecx.expr_str(span, code), + ecx.expr_str(span, description) + ]) + }) + }).collect(); + (descriptions.len(), ecx.expr_vec(span, descriptions)) + }; let static_ = ecx.lifetime(span, Ident::from_str("'static")); let ty_str = ecx.ty_rptr( diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 6c96692f719ff..b90a8cd43f662 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -27,6 +27,7 @@ use util::small_vector::SmallVector; use std::collections::HashMap; use std::path::PathBuf; use std::rc::Rc; +use rustc_data_structures::sync::{Send, Sync, Lrc}; use std::default::Default; use tokenstream::{self, TokenStream}; @@ -506,7 +507,6 @@ pub enum MacroKind { Derive, } -/// An enum representing the different kinds of syntax extensions. pub enum SyntaxExtension { /// A syntax extension that is attached to an item and creates new items /// based upon it. @@ -514,26 +514,26 @@ pub enum SyntaxExtension { /// `#[derive(...)]` is a `MultiItemDecorator`. /// /// Prefer ProcMacro or MultiModifier since they are more flexible. - MultiDecorator(Box), + MultiDecorator(Box), /// A syntax extension that is attached to an item and modifies it /// in-place. Also allows decoration, i.e., creating new items. - MultiModifier(Box), + MultiModifier(Box), /// A function-like procedural macro. TokenStream -> TokenStream. - ProcMacro(Box), + ProcMacro(Box), /// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream. /// The first TokenSteam is the attribute, the second is the annotated item. /// Allows modification of the input items and adding new items, similar to /// MultiModifier, but uses TokenStreams, rather than AST nodes. - AttrProcMacro(Box), + AttrProcMacro(Box), /// A normal, function-like syntax extension. /// /// `bytes!` is a `NormalTT`. NormalTT { - expander: Box, + expander: Box, def_info: Option<(ast::NodeId, Span)>, /// Whether the contents of the macro can /// directly use `#[unstable]` things (true == yes). @@ -546,13 +546,13 @@ pub enum SyntaxExtension { /// A function-like syntax extension that has an extra ident before /// the block. /// - IdentTT(Box, Option, bool), + IdentTT(Box, Option, bool), /// An attribute-like procedural macro. TokenStream -> TokenStream. /// The input is the annotated item. /// Allows generating code to implement a Trait for a given struct /// or enum item. - ProcMacroDerive(Box, Vec /* inert attribute names */), + ProcMacroDerive(Box, Vec /* inert attribute names */), /// An attribute-like procedural macro that derives a builtin trait. BuiltinDerive(BuiltinDeriveFn), @@ -560,7 +560,7 @@ pub enum SyntaxExtension { /// A declarative macro, e.g. `macro m() {}`. /// /// The second element is the definition site span. - DeclMacro(Box, Option<(ast::NodeId, Span)>), + DeclMacro(Box, Option<(ast::NodeId, Span)>), } impl SyntaxExtension { @@ -602,15 +602,15 @@ pub trait Resolver { fn is_whitelisted_legacy_custom_derive(&self, name: Name) -> bool; fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion, derives: &[Mark]); - fn add_builtin(&mut self, ident: ast::Ident, ext: Rc); + fn add_builtin(&mut self, ident: ast::Ident, ext: Lrc); fn resolve_imports(&mut self); // Resolves attribute and derive legacy macros from `#![plugin(..)]`. fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec) -> Option; fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool) - -> Result>, Determinacy>; + -> Result>, Determinacy>; fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool) - -> Result, Determinacy>; + -> Result, Determinacy>; fn check_unused_macros(&self); } @@ -629,16 +629,16 @@ impl Resolver for DummyResolver { fn is_whitelisted_legacy_custom_derive(&self, _name: Name) -> bool { false } fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion, _derives: &[Mark]) {} - fn add_builtin(&mut self, _ident: ast::Ident, _ext: Rc) {} + fn add_builtin(&mut self, _ident: ast::Ident, _ext: Lrc) {} fn resolve_imports(&mut self) {} fn find_legacy_attr_invoc(&mut self, _attrs: &mut Vec) -> Option { None } fn resolve_invoc(&mut self, _invoc: &mut Invocation, _scope: Mark, _force: bool) - -> Result>, Determinacy> { + -> Result>, Determinacy> { Err(Determinacy::Determined) } fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _kind: MacroKind, - _force: bool) -> Result, Determinacy> { + _force: bool) -> Result, Determinacy> { Err(Determinacy::Determined) } fn check_unused_macros(&self) {} diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 0d1b1c65a2934..46dd8e78a8b7f 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -39,6 +39,7 @@ use std::fs::File; use std::io::Read; use std::mem; use std::rc::Rc; +use rustc_data_structures::sync::Lrc; macro_rules! expansions { ($($kind:ident: $ty:ty [$($vec:ident, $ty_elt:ty)*], $kind_name:expr, .$make:ident, @@ -444,7 +445,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } } - fn expand_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { + fn expand_invoc(&mut self, invoc: Invocation, ext: Lrc) -> Expansion { let result = match invoc.kind { InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext), InvocationKind::Attr { .. } => self.expand_attr_invoc(invoc, ext), @@ -468,7 +469,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { result } - fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { + fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Lrc) -> Expansion { let Invocation { expansion_kind: kind, .. } = invoc; let (attr, item) = match invoc.kind { InvocationKind::Attr { attr, item, .. } => (attr.unwrap(), item), @@ -523,7 +524,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } /// Expand a macro invocation. Returns the result of expansion. - fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { + fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Lrc) -> Expansion { let (mark, kind) = (invoc.expansion_data.mark, invoc.expansion_kind); let (mac, ident, span) = match invoc.kind { InvocationKind::Bang { mac, ident, span } => (mac, ident, span), @@ -649,7 +650,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } /// Expand a derive invocation. Returns the result of expansion. - fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { + fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Lrc) -> Expansion { let Invocation { expansion_kind: kind, .. } = invoc; let (path, item) = match invoc.kind { InvocationKind::Derive { path, item } => (path, item), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 86657e675b2de..e3e41e99a5c5c 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -24,7 +24,7 @@ use util::small_vector::SmallVector; use std::fs::File; use std::io::prelude::*; use std::path::{Path, PathBuf}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; // These macros all relate to the file system; they either return // the column/row/filename of the expression, or they include @@ -185,7 +185,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke let filename = format!("{}", file.display()); cx.codemap().new_filemap_and_lines(&filename, ""); - base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Rc::new(bytes)))) + base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 5e58f003c2be7..823ca1b025de8 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use rustc_data_structures::sync::RwLock; + use {ast, attr}; use syntax_pos::{Span, DUMMY_SP}; use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; @@ -26,10 +28,10 @@ use parse::token::Token::*; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree}; -use std::cell::RefCell; use std::collections::HashMap; use std::collections::hash_map::Entry; -use std::rc::Rc; + +use rustc_data_structures::sync::Lrc; pub struct ParserAnyMacro<'a> { parser: Parser<'a>, @@ -183,7 +185,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, // Holy self-referential! /// Converts a `macro_rules!` invocation into a syntax extension. -pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) -> SyntaxExtension { +pub fn compile(sess: &ParseSess, features: &RwLock, def: &ast::Item) -> SyntaxExtension { let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); @@ -199,7 +201,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) // ...quasiquoting this would be nice. // These spans won't matter, anyways let argument_gram = vec![ - quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { + quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), @@ -210,7 +212,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) num_captures: 2, })), // to phase into semicolon-termination instead of semicolon-separation - quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { + quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: quoted::KleeneOp::ZeroOrMore, @@ -293,7 +295,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) } fn check_lhs_nt_follows(sess: &ParseSess, - features: &RefCell, + features: &RwLock, attrs: &[ast::Attribute], lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the @@ -350,7 +352,7 @@ fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool { } fn check_matcher(sess: &ParseSess, - features: &RefCell, + features: &RwLock, attrs: &[ast::Attribute], matcher: &[quoted::TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); @@ -598,7 +600,7 @@ impl TokenSet { // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. fn check_matcher_core(sess: &ParseSess, - features: &RefCell, + features: &RwLock, attrs: &[ast::Attribute], first_sets: &FirstSets, matcher: &[quoted::TokenTree], @@ -865,7 +867,7 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result, + features: &RwLock, attrs: &[ast::Attribute], tok: "ed::TokenTree) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); @@ -880,7 +882,7 @@ fn has_legal_fragment_specifier(sess: &ParseSess, } fn is_legal_fragment_specifier(sess: &ParseSess, - features: &RefCell, + features: &RwLock, attrs: &[ast::Attribute], frag_name: &str, frag_span: Span) -> bool { diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 0e21e3f6b0010..1a9cee768c340 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -16,7 +16,7 @@ use symbol::keywords; use syntax_pos::{DUMMY_SP, Span, BytePos}; use tokenstream; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Delimited { @@ -77,9 +77,9 @@ pub enum KleeneOp { #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum TokenTree { Token(Span, token::Token), - Delimited(Span, Rc), + Delimited(Span, Lrc), /// A kleene-style repetition sequence - Sequence(Span, Rc), + Sequence(Span, Lrc), /// E.g. `$var` MetaVar(Span, ast::Ident), /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros. @@ -189,7 +189,7 @@ fn parse_tree(tree: tokenstream::TokenTree, let sequence = parse(delimited.tts.into(), expect_matchers, sess); let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); let name_captures = macro_parser::count_names(&sequence); - TokenTree::Sequence(span, Rc::new(SequenceRepetition { + TokenTree::Sequence(span, Lrc::new(SequenceRepetition { tts: sequence, separator, op, @@ -215,7 +215,7 @@ fn parse_tree(tree: tokenstream::TokenTree, }, tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), tokenstream::TokenTree::Delimited(span, delimited) => { - TokenTree::Delimited(span, Rc::new(Delimited { + TokenTree::Delimited(span, Lrc::new(Delimited { delim: delimited.delim, tts: parse(delimited.tts.into(), expect_matchers, sess), })) diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index d51b0d0ae3e93..7883c4bbc1648 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -20,6 +20,7 @@ use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::mem; use std::ops::Add; use std::collections::HashMap; @@ -27,12 +28,12 @@ use std::collections::HashMap; // An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). enum Frame { Delimited { - forest: Rc, + forest: Lrc, idx: usize, span: Span, }, Sequence { - forest: Rc, + forest: Lrc, idx: usize, sep: Option, }, @@ -40,7 +41,7 @@ enum Frame { impl Frame { fn new(tts: Vec) -> Frame { - let forest = Rc::new(quoted::Delimited { delim: token::NoDelim, tts: tts }); + let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts }); Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP } } } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 1a92f057e5e87..eba91c8f06dad 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -29,7 +29,7 @@ use tokenstream::*; use util::small_vector::SmallVector; use util::move_map::MoveMap; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; pub trait Folder : Sized { // Any additions to this trait should happen in form @@ -573,7 +573,7 @@ pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token token::Ident(id) => token::Ident(fld.fold_ident(id)), token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)), token::Interpolated(nt) => { - let nt = match Rc::try_unwrap(nt) { + let nt = match Lrc::try_unwrap(nt) { Ok(nt) => nt, Err(nt) => (*nt).clone(), }; @@ -1363,6 +1363,7 @@ mod tests { use util::parser_testing::{string_to_crate, matches_codepattern}; use print::pprust; use fold; + use {Globals, with_globals}; use super::*; // this version doesn't care about getting comments or docstrings in. @@ -1400,28 +1401,32 @@ mod tests { // make sure idents get transformed everywhere #[test] fn ident_transformation () { - let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate( - "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); - let folded_crate = zz_fold.fold_crate(ast); - assert_pred!( - matches_codepattern, - "matches_codepattern", - pprust::to_string(|s| fake_print_crate(s, &folded_crate)), - "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); + with_globals(&Globals::new(), || { + let mut zz_fold = ToZzIdentFolder; + let ast = string_to_crate( + "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); + let folded_crate = zz_fold.fold_crate(ast); + assert_pred!( + matches_codepattern, + "matches_codepattern", + pprust::to_string(|s| fake_print_crate(s, &folded_crate)), + "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); + }) } // even inside macro defs.... #[test] fn ident_transformation_in_defs () { - let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate( - "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ - (g $(d $d $e)+))} ".to_string()); - let folded_crate = zz_fold.fold_crate(ast); - assert_pred!( - matches_codepattern, - "matches_codepattern", - pprust::to_string(|s| fake_print_crate(s, &folded_crate)), - "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); + with_globals(&Globals::new(), || { + let mut zz_fold = ToZzIdentFolder; + let ast = string_to_crate( + "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ + (g $(d $d $e)+))} ".to_string()); + let folded_crate = zz_fold.fold_crate(ast); + assert_pred!( + matches_codepattern, + "matches_codepattern", + pprust::to_string(|s| fake_print_crate(s, &folded_crate)), + "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); + }) } } diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 80ac0cb4faf7d..e1f5cdb09435f 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -26,7 +26,7 @@ use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, CodeMapper}; use errors::DiagnosticId; use errors::emitter::{Emitter, EmitterWriter}; -use std::rc::Rc; +use rustc_data_structures::sync::{self, Sync, Lrc}; use std::io::{self, Write}; use std::vec; use std::sync::{Arc, Mutex}; @@ -36,13 +36,13 @@ use rustc_serialize::json::{as_json, as_pretty_json}; pub struct JsonEmitter { dst: Box, registry: Option, - cm: Rc, + cm: Lrc, pretty: bool, } impl JsonEmitter { pub fn stderr(registry: Option, - code_map: Rc, + code_map: Lrc, pretty: bool) -> JsonEmitter { JsonEmitter { dst: Box::new(io::stderr()), @@ -54,12 +54,12 @@ impl JsonEmitter { pub fn basic(pretty: bool) -> JsonEmitter { let file_path_mapping = FilePathMapping::empty(); - JsonEmitter::stderr(None, Rc::new(CodeMap::new(file_path_mapping)), pretty) + JsonEmitter::stderr(None, Lrc::new(CodeMap::new(file_path_mapping)), pretty) } pub fn new(dst: Box, registry: Option, - code_map: Rc, + code_map: Lrc, pretty: bool) -> JsonEmitter { JsonEmitter { dst, diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 9e4f134e2bd56..0ddf7122e9227 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -23,6 +23,9 @@ #![feature(unicode)] #![feature(rustc_diagnostic_macros)] #![feature(i128_type)] +#![feature(const_atomic_usize_new)] + +#![recursion_limit="256"] // See librustc_cratesio_shim/Cargo.toml for a comment explaining this. #[allow(unused_extern_crates)] @@ -35,9 +38,12 @@ extern crate std_unicode; pub extern crate rustc_errors as errors; extern crate syntax_pos; extern crate rustc_data_structures; +#[macro_use] extern crate scoped_tls; extern crate serialize as rustc_serialize; // used by deriving +use rustc_data_structures::sync::Lock; + // A variant of 'try!' that panics on an Err. This is used as a crutch on the // way towards a non-panic!-prone parser. It should be used for fatal parsing // errors; eventually we plan to convert all code using panictry to just use @@ -68,6 +74,32 @@ macro_rules! unwrap_or { } } +pub struct Globals { + used_attrs: Lock>, + known_attrs: Lock>, + syntax_pos_globals: syntax_pos::Globals, +} + +impl Globals { + pub fn new() -> Globals { + Globals { + used_attrs: Lock::new(Vec::new()), + known_attrs: Lock::new(Vec::new()), + syntax_pos_globals: syntax_pos::Globals::new(), + } + } +} + +pub fn with_globals(t: &Globals, f: F) -> R + where F: FnOnce() -> R +{ + GLOBALS.set(t, || { + syntax_pos::GLOBALS.set(&t.syntax_pos_globals, f) + }) +} + +scoped_thread_local!(static GLOBALS: Globals); + #[macro_use] pub mod diagnostics { #[macro_use] diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d9c33fa50bd89..21c91333d4618 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -20,7 +20,7 @@ use std_unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; use std::mem::replace; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; pub mod comments; mod tokentrees; @@ -48,7 +48,7 @@ pub struct StringReader<'a> { pub col: CharPos, /// The current character (which has been read from self.pos) pub ch: Option, - pub filemap: Rc, + pub filemap: Lrc, /// If Some, stop reading the source at this position (inclusive). pub terminator: Option, /// Whether to record new-lines and multibyte chars in filemap. @@ -61,7 +61,7 @@ pub struct StringReader<'a> { pub fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. - source_text: Rc, + source_text: Lrc, /// Stack of open delimiters and their spans. Used for error message. token: token::Token, span: Span, @@ -152,13 +152,13 @@ impl<'a> StringReader<'a> { impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into next_pos and ch - pub fn new_raw(sess: &'a ParseSess, filemap: Rc) -> Self { + pub fn new_raw(sess: &'a ParseSess, filemap: Lrc) -> Self { let mut sr = StringReader::new_raw_internal(sess, filemap); sr.bump(); sr } - fn new_raw_internal(sess: &'a ParseSess, filemap: Rc) -> Self { + fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc) -> Self { if filemap.src.is_none() { sess.span_diagnostic.bug(&format!("Cannot lex filemap without source: {}", filemap.name)); @@ -187,7 +187,7 @@ impl<'a> StringReader<'a> { } } - pub fn new(sess: &'a ParseSess, filemap: Rc) -> Self { + pub fn new(sess: &'a ParseSess, filemap: Lrc) -> Self { let mut sr = StringReader::new_raw(sess, filemap); if sr.advance_token().is_err() { sr.emit_fatal_errors(); @@ -1711,12 +1711,13 @@ mod tests { use errors; use feature_gate::UnstableFeatures; use parse::token; - use std::cell::RefCell; use std::collections::HashSet; use std::io; - use std::rc::Rc; + use rustc_data_structures::sync::{Lrc, Lock}; + use diagnostics::plugin::ErrorMap; + use {Globals, with_globals}; - fn mk_sess(cm: Rc) -> ParseSess { + fn mk_sess(cm: Lrc) -> ParseSess { let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()), false); @@ -1724,9 +1725,10 @@ mod tests { span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)), unstable_features: UnstableFeatures::from_environment(), config: CrateConfig::new(), - included_mod_stack: RefCell::new(Vec::new()), + included_mod_stack: Lock::new(Vec::new()), code_map: cm, - missing_fragment_specifiers: RefCell::new(HashSet::new()), + missing_fragment_specifiers: Lock::new(HashSet::new()), + registered_diagnostics: Lock::new(ErrorMap::new()), } } @@ -1741,33 +1743,35 @@ mod tests { #[test] fn t1() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut string_reader = setup(&cm, - &sh, - "/* my source file */ fn main() { println!(\"zebra\"); }\n" - .to_string()); - let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().tok, token::Comment); - assert_eq!(string_reader.next_token().tok, token::Whitespace); - let tok1 = string_reader.next_token(); - let tok2 = TokenAndSpan { - tok: token::Ident(id), - sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), - }; - assert_eq!(tok1, tok2); - assert_eq!(string_reader.next_token().tok, token::Whitespace); - // the 'main' id is already read: - assert_eq!(string_reader.pos.clone(), BytePos(28)); - // read another token: - let tok3 = string_reader.next_token(); - let tok4 = TokenAndSpan { - tok: token::Ident(Ident::from_str("main")), - sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), - }; - assert_eq!(tok3, tok4); - // the lparen is already read: - assert_eq!(string_reader.pos.clone(), BytePos(29)) + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut string_reader = setup(&cm, + &sh, + "/* my source file */ fn main() { println!(\"zebra\"); }\n" + .to_string()); + let id = Ident::from_str("fn"); + assert_eq!(string_reader.next_token().tok, token::Comment); + assert_eq!(string_reader.next_token().tok, token::Whitespace); + let tok1 = string_reader.next_token(); + let tok2 = TokenAndSpan { + tok: token::Ident(id), + sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + }; + assert_eq!(tok1, tok2); + assert_eq!(string_reader.next_token().tok, token::Whitespace); + // the 'main' id is already read: + assert_eq!(string_reader.pos.clone(), BytePos(28)); + // read another token: + let tok3 = string_reader.next_token(); + let tok4 = TokenAndSpan { + tok: token::Ident(Ident::from_str("main")), + sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + }; + assert_eq!(tok3, tok4); + // the lparen is already read: + assert_eq!(string_reader.pos.clone(), BytePos(29)) + }) } // check that the given reader produces the desired stream @@ -1785,113 +1789,133 @@ mod tests { #[test] fn doublecolonparsing() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a b".to_string()), - vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a b".to_string()), + vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + }) } #[test] fn dcparsing_2() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a::b".to_string()), - vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a::b".to_string()), + vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + }) } #[test] fn dcparsing_3() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a ::b".to_string()), - vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a ::b".to_string()), + vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + }) } #[test] fn dcparsing_4() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a:: b".to_string()), - vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a:: b".to_string()), + vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + }) } #[test] fn character_a() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern("a")), None)); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern("a")), None)); + }) } #[test] fn character_space() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern(" ")), None)); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern(" ")), None)); + }) } #[test] fn character_escaped() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern("\\n")), None)); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern("\\n")), None)); + }) } #[test] fn lifetime_name() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, - token::Lifetime(Ident::from_str("'abc"))); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, + token::Lifetime(Ident::from_str("'abc"))); + }) } #[test] fn raw_string() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) - .next_token() - .tok, - token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) + .next_token() + .tok, + token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); + }) } #[test] fn literal_suffixes() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - macro_rules! test { - ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(Symbol::intern($tok_contents)), - Some(Symbol::intern("suffix")))); - // with a whitespace separator: - assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(Symbol::intern($tok_contents)), - None)); - }} - } - - test!("'a'", Char, "a"); - test!("b'a'", Byte, "a"); - test!("\"a\"", Str_, "a"); - test!("b\"a\"", ByteStr, "a"); - test!("1234", Integer, "1234"); - test!("0b101", Integer, "0b101"); - test!("0xABC", Integer, "0xABC"); - test!("1.0", Float, "1.0"); - test!("1.0e10", Float, "1.0e10"); - - assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, - token::Literal(token::Integer(Symbol::intern("2")), - Some(Symbol::intern("us")))); - assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::StrRaw(Symbol::intern("raw"), 3), - Some(Symbol::intern("suffix")))); - assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), - Some(Symbol::intern("suffix")))); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + macro_rules! test { + ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ + assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + Some(Symbol::intern("suffix")))); + // with a whitespace separator: + assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + None)); + }} + } + + test!("'a'", Char, "a"); + test!("b'a'", Byte, "a"); + test!("\"a\"", Str_, "a"); + test!("b\"a\"", ByteStr, "a"); + test!("1234", Integer, "1234"); + test!("0b101", Integer, "0b101"); + test!("0xABC", Integer, "0xABC"); + test!("1.0", Float, "1.0"); + test!("1.0e10", Float, "1.0e10"); + + assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, + token::Literal(token::Integer(Symbol::intern("2")), + Some(Symbol::intern("us")))); + assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, + token::Literal(token::StrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); + assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, + token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); + }) } #[test] @@ -1903,27 +1927,31 @@ mod tests { #[test] fn nested_block_comments() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().tok { - token::Comment => {} - _ => panic!("expected a comment!"), - } - assert_eq!(lexer.next_token().tok, - token::Literal(token::Char(Symbol::intern("a")), None)); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); + match lexer.next_token().tok { + token::Comment => {} + _ => panic!("expected a comment!"), + } + assert_eq!(lexer.next_token().tok, + token::Literal(token::Char(Symbol::intern("a")), None)); + }) } #[test] fn crlf_comments() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); - let comment = lexer.next_token(); - assert_eq!(comment.tok, token::Comment); - assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().tok, token::Whitespace); - assert_eq!(lexer.next_token().tok, - token::DocComment(Symbol::intern("/// test"))); + with_globals(&Globals::new(), || { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); + let comment = lexer.next_token(); + assert_eq!(comment.tok, token::Comment); + assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); + assert_eq!(lexer.next_token().tok, token::Whitespace); + assert_eq!(lexer.next_token().tok, + token::DocComment(Symbol::intern("/// test"))); + }) } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c679efd41ea46..dc33ddf8ed15c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -10,6 +10,7 @@ //! The main parser interface +use rustc_data_structures::sync::{Lrc, Lock}; use ast::{self, CrateConfig}; use codemap::{CodeMap, FilePathMapping}; use syntax_pos::{self, Span, FileMap, NO_EXPANSION}; @@ -20,12 +21,11 @@ use ptr::P; use str::char_at; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree}; +use diagnostics::plugin::ErrorMap; -use std::cell::RefCell; use std::collections::HashSet; use std::iter; use std::path::{Path, PathBuf}; -use std::rc::Rc; use std::str; pub type PResult<'a, T> = Result>; @@ -46,15 +46,17 @@ pub struct ParseSess { pub span_diagnostic: Handler, pub unstable_features: UnstableFeatures, pub config: CrateConfig, - pub missing_fragment_specifiers: RefCell>, + pub missing_fragment_specifiers: Lock>, + /// The registered diagnostics codes + pub registered_diagnostics: Lock, /// Used to determine and report recursive mod inclusions - included_mod_stack: RefCell>, - code_map: Rc, + included_mod_stack: Lock>, // FIXME: Should be a temporary thread local thing + code_map: Lrc, } impl ParseSess { pub fn new(file_path_mapping: FilePathMapping) -> Self { - let cm = Rc::new(CodeMap::new(file_path_mapping)); + let cm = Lrc::new(CodeMap::new(file_path_mapping)); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, @@ -62,13 +64,14 @@ impl ParseSess { ParseSess::with_span_handler(handler, cm) } - pub fn with_span_handler(handler: Handler, code_map: Rc) -> ParseSess { + pub fn with_span_handler(handler: Handler, code_map: Lrc) -> ParseSess { ParseSess { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), config: HashSet::new(), - missing_fragment_specifiers: RefCell::new(HashSet::new()), - included_mod_stack: RefCell::new(vec![]), + missing_fragment_specifiers: Lock::new(HashSet::new()), + registered_diagnostics: Lock::new(ErrorMap::new()), + included_mod_stack: Lock::new(vec![]), code_map, } } @@ -176,7 +179,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, } /// Given a filemap and config, return a parser -pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc, ) -> Parser { +pub fn filemap_to_parser(sess: & ParseSess, filemap: Lrc) -> Parser { let end_pos = filemap.end_pos; let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); @@ -199,7 +202,7 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's codemap and return the new filemap. fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) - -> Rc { + -> Lrc { match sess.codemap().load_file(path) { Ok(filemap) => filemap, Err(e) => { @@ -213,7 +216,7 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc, override_span: Option) +pub fn filemap_to_stream(sess: &ParseSess, filemap: Lrc, override_span: Option) -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); srdr.override_span = override_span; @@ -415,7 +418,7 @@ pub fn lit_token(lit: token::Lit, suf: Option, diag: Option<(Span, &Hand (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str())))) } token::ByteStrRaw(i, _) => { - (true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))) + (true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes())))) } } } @@ -489,7 +492,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { } } -pub fn byte_str_lit(lit: &str) -> Rc> { +pub fn byte_str_lit(lit: &str) -> Lrc> { let mut res = Vec::with_capacity(lit.len()); // FIXME #8372: This could be a for-loop if it didn't borrow the iterator @@ -546,7 +549,7 @@ pub fn byte_str_lit(lit: &str) -> Rc> { } } - Rc::new(res) + Lrc::new(res) } pub fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) @@ -669,6 +672,7 @@ mod tests { use util::parser_testing::{string_to_stream, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; use util::ThinVec; + use {Globals, with_globals}; // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { @@ -680,156 +684,170 @@ mod tests { } #[test] fn path_exprs_1() { - assert!(string_to_expr("a".to_string()) == - P(ast::Expr{ - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { + with_globals(&Globals::new(), || { + assert!(string_to_expr("a".to_string()) == + P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span: sp(0, 1), + segments: vec![str2seg("a", 0, 1)], + }), span: sp(0, 1), - segments: vec![str2seg("a", 0, 1)], - }), - span: sp(0, 1), - attrs: ThinVec::new(), - })) + attrs: ThinVec::new(), + })) + }) } #[test] fn path_exprs_2 () { - assert!(string_to_expr("::a::b".to_string()) == - P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { + with_globals(&Globals::new(), || { + assert!(string_to_expr("::a::b".to_string()) == + P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span: sp(0, 6), + segments: vec![ast::PathSegment::crate_root(sp(0, 2)), + str2seg("a", 2, 3), + str2seg("b", 5, 6)] + }), span: sp(0, 6), - segments: vec![ast::PathSegment::crate_root(sp(0, 2)), - str2seg("a", 2, 3), - str2seg("b", 5, 6)] - }), - span: sp(0, 6), - attrs: ThinVec::new(), - })) + attrs: ThinVec::new(), + })) + }) } #[should_panic] #[test] fn bad_path_expr_1() { - string_to_expr("::abc::def::return".to_string()); + with_globals(&Globals::new(), || { + string_to_expr("::abc::def::return".to_string()); + }) } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts: Vec<_> = - string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); - let tts: &[TokenTree] = &tts[..]; - - match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { - ( - 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), - Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip))), - Some(&TokenTree::Delimited(_, ref macro_delimed)), - ) - if name_macro_rules.name == "macro_rules" - && name_zip.name == "zip" => { - let tts = ¯o_delimed.stream().trees().collect::>(); - match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { - ( - 3, - Some(&TokenTree::Delimited(_, ref first_delimed)), - Some(&TokenTree::Token(_, token::FatArrow)), - Some(&TokenTree::Delimited(_, ref second_delimed)), - ) - if macro_delimed.delim == token::Paren => { - let tts = &first_delimed.stream().trees().collect::>(); - match (tts.len(), tts.get(0), tts.get(1)) { - ( - 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident))), - ) - if first_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 3: {:?}", *first_delimed), - } - let tts = &second_delimed.stream().trees().collect::>(); - match (tts.len(), tts.get(0), tts.get(1)) { - ( - 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident))), - ) - if second_delimed.delim == token::Paren - && ident.name == "a" => {}, - _ => panic!("value 4: {:?}", *second_delimed), - } - }, - _ => panic!("value 2: {:?}", *macro_delimed), - } - }, - _ => panic!("value: {:?}",tts), - } + with_globals(&Globals::new(), || { + let tts: Vec<_> = + string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); + let tts: &[TokenTree] = &tts[..]; + + match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { + ( + 4, + Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), + Some(&TokenTree::Token(_, token::Not)), + Some(&TokenTree::Token(_, token::Ident(name_zip))), + Some(&TokenTree::Delimited(_, ref macro_delimed)), + ) + if name_macro_rules.name == "macro_rules" + && name_zip.name == "zip" => { + let tts = ¯o_delimed.stream().trees().collect::>(); + match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { + ( + 3, + Some(&TokenTree::Delimited(_, ref first_delimed)), + Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Delimited(_, ref second_delimed)), + ) + if macro_delimed.delim == token::Paren => { + let tts = &first_delimed.stream().trees().collect::>(); + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident))), + ) + if first_delimed.delim == token::Paren && ident.name == "a" => {}, + _ => panic!("value 3: {:?}", *first_delimed), + } + let tts = &second_delimed.stream().trees().collect::>(); + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident))), + ) + if second_delimed.delim == token::Paren + && ident.name == "a" => {}, + _ => panic!("value 4: {:?}", *second_delimed), + } + }, + _ => panic!("value 2: {:?}", *macro_delimed), + } + }, + _ => panic!("value: {:?}",tts), + } + }) } #[test] fn string_to_tts_1() { - let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - - let expected = TokenStream::concat(vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), - TokenTree::Delimited( - sp(5, 14), - tokenstream::Delimited { - delim: token::DelimToken::Paren, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(), - ]).into(), - }).into(), - TokenTree::Delimited( - sp(15, 21), - tokenstream::Delimited { - delim: token::DelimToken::Brace, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), - ]).into(), - }).into() - ]); - - assert_eq!(tts, expected); + with_globals(&Globals::new(), || { + let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); + + let expected = TokenStream::concat(vec![ + TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), + TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), + TokenTree::Delimited( + sp(5, 14), + tokenstream::Delimited { + delim: token::DelimToken::Paren, + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(8, 9), token::Colon).into(), + TokenTree::Token(sp(10, 13), + token::Ident(Ident::from_str("i32"))).into(), + ]).into(), + }).into(), + TokenTree::Delimited( + sp(15, 21), + tokenstream::Delimited { + delim: token::DelimToken::Brace, + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(18, 19), token::Semi).into(), + ]).into(), + }).into() + ]); + + assert_eq!(tts, expected); + }) } #[test] fn ret_expr() { - assert!(string_to_expr("return d".to_string()) == - P(ast::Expr{ - id: ast::DUMMY_NODE_ID, - node:ast::ExprKind::Ret(Some(P(ast::Expr{ + with_globals(&Globals::new(), || { + assert!(string_to_expr("return d".to_string()) == + P(ast::Expr{ id: ast::DUMMY_NODE_ID, - node:ast::ExprKind::Path(None, ast::Path{ - span: sp(7, 8), - segments: vec![str2seg("d", 7, 8)], - }), - span:sp(7,8), + node:ast::ExprKind::Ret(Some(P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node:ast::ExprKind::Path(None, ast::Path{ + span: sp(7, 8), + segments: vec![str2seg("d", 7, 8)], + }), + span:sp(7,8), + attrs: ThinVec::new(), + }))), + span:sp(0,8), attrs: ThinVec::new(), - }))), - span:sp(0,8), - attrs: ThinVec::new(), - })) + })) + }) } #[test] fn parse_stmt_1 () { - assert!(string_to_stmt("b;".to_string()) == - Some(ast::Stmt { - node: ast::StmtKind::Expr(P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { - span:sp(0,1), - segments: vec![str2seg("b", 0, 1)], - }), - span: sp(0,1), - attrs: ThinVec::new()})), - id: ast::DUMMY_NODE_ID, - span: sp(0,1)})) - + with_globals(&Globals::new(), || { + assert!(string_to_stmt("b;".to_string()) == + Some(ast::Stmt { + node: ast::StmtKind::Expr(P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span:sp(0,1), + segments: vec![str2seg("b", 0, 1)], + }), + span: sp(0,1), + attrs: ThinVec::new()})), + id: ast::DUMMY_NODE_ID, + span: sp(0,1)})) + }) } fn parser_done(p: Parser){ @@ -837,120 +855,128 @@ mod tests { } #[test] fn parse_ident_pat () { - let sess = ParseSess::new(FilePathMapping::empty()); - let mut parser = string_to_parser(&sess, "b".to_string()); - assert!(panictry!(parser.parse_pat()) - == P(ast::Pat{ - id: ast::DUMMY_NODE_ID, - node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), - Spanned{ span:sp(0, 1), - node: Ident::from_str("b") - }, - None), - span: sp(0,1)})); - parser_done(parser); + with_globals(&Globals::new(), || { + let sess = ParseSess::new(FilePathMapping::empty()); + let mut parser = string_to_parser(&sess, "b".to_string()); + assert!(panictry!(parser.parse_pat()) + == P(ast::Pat{ + id: ast::DUMMY_NODE_ID, + node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), + Spanned{ span:sp(0, 1), + node: Ident::from_str("b") + }, + None), + span: sp(0,1)})); + parser_done(parser); + }) } // check the contents of the tt manually: #[test] fn parse_fundecl () { - // this test depends on the intern order of "fn" and "i32" - let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { - m.map(|mut m| { - m.tokens = None; - m - }) - }); - assert_eq!(item, - Some( - P(ast::Item{ident:Ident::from_str("a"), - attrs:Vec::new(), - id: ast::DUMMY_NODE_ID, - tokens: None, - node: ast::ItemKind::Fn(P(ast::FnDecl { - inputs: vec![ast::Arg{ - ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, - node: ast::TyKind::Path(None, ast::Path{ - span:sp(10,13), - segments: vec![str2seg("i32", 10, 13)], + with_globals(&Globals::new(), || { + // this test depends on the intern order of "fn" and "i32" + let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { + m.map(|mut m| { + m.tokens = None; + m + }) + }); + assert_eq!(item, + Some( + P(ast::Item{ident:Ident::from_str("a"), + attrs:Vec::new(), + id: ast::DUMMY_NODE_ID, + tokens: None, + node: ast::ItemKind::Fn(P(ast::FnDecl { + inputs: vec![ast::Arg{ + ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, + node: ast::TyKind::Path(None, ast::Path{ + span:sp(10,13), + segments: vec![str2seg("i32", 10, 13)], + }), + span:sp(10,13) }), - span:sp(10,13) - }), - pat: P(ast::Pat { - id: ast::DUMMY_NODE_ID, - node: PatKind::Ident( - ast::BindingMode::ByValue( - ast::Mutability::Immutable), - Spanned{ - span: sp(6,7), - node: Ident::from_str("b")}, - None - ), - span: sp(6,7) - }), - id: ast::DUMMY_NODE_ID - }], - output: ast::FunctionRetTy::Default(sp(15, 15)), - variadic: false - }), - ast::Unsafety::Normal, - Spanned { - span: sp(0,2), - node: ast::Constness::NotConst, - }, - Abi::Rust, - ast::Generics{ // no idea on either of these: - lifetimes: Vec::new(), - ty_params: Vec::new(), - where_clause: ast::WhereClause { + pat: P(ast::Pat { id: ast::DUMMY_NODE_ID, - predicates: Vec::new(), + node: PatKind::Ident( + ast::BindingMode::ByValue( + ast::Mutability::Immutable), + Spanned{ + span: sp(6,7), + node: Ident::from_str("b")}, + None + ), + span: sp(6,7) + }), + id: ast::DUMMY_NODE_ID + }], + output: ast::FunctionRetTy::Default(sp(15, 15)), + variadic: false + }), + ast::Unsafety::Normal, + Spanned { + span: sp(0,2), + node: ast::Constness::NotConst, + }, + Abi::Rust, + ast::Generics{ // no idea on either of these: + lifetimes: Vec::new(), + ty_params: Vec::new(), + where_clause: ast::WhereClause { + id: ast::DUMMY_NODE_ID, + predicates: Vec::new(), + span: syntax_pos::DUMMY_SP, + }, span: syntax_pos::DUMMY_SP, }, - span: syntax_pos::DUMMY_SP, - }, - P(ast::Block { - stmts: vec![ast::Stmt { - node: ast::StmtKind::Semi(P(ast::Expr{ + P(ast::Block { + stmts: vec![ast::Stmt { + node: ast::StmtKind::Semi(P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, + ast::Path{ + span:sp(17,18), + segments: vec![str2seg("b", 17, 18)], + }), + span: sp(17,18), + attrs: ThinVec::new()})), id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, - ast::Path{ - span:sp(17,18), - segments: vec![str2seg("b", 17, 18)], - }), - span: sp(17,18), - attrs: ThinVec::new()})), + span: sp(17,19)}], id: ast::DUMMY_NODE_ID, - span: sp(17,19)}], - id: ast::DUMMY_NODE_ID, - rules: ast::BlockCheckMode::Default, // no idea - span: sp(15,21), - })), - vis: ast::Visibility::Inherited, - span: sp(0,21)}))); + rules: ast::BlockCheckMode::Default, // no idea + span: sp(15,21), + })), + vis: ast::Visibility::Inherited, + span: sp(0,21)}))); + }) } #[test] fn parse_use() { - let use_s = "use foo::bar::baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - - let use_s = "use foo::bar as baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); + with_globals(&Globals::new(), || { + let use_s = "use foo::bar::baz;"; + let vitem = string_to_item(use_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], use_s); + + let use_s = "use foo::bar as baz;"; + let vitem = string_to_item(use_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], use_s); + }) } #[test] fn parse_extern_crate() { - let ex_s = "extern crate foo;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - - let ex_s = "extern crate foo as bar;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); + with_globals(&Globals::new(), || { + let ex_s = "extern crate foo;"; + let vitem = string_to_item(ex_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], ex_s); + + let ex_s = "extern crate foo as bar;"; + let vitem = string_to_item(ex_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], ex_s); + }) } fn get_spans_of_pat_idents(src: &str) -> Vec { @@ -977,31 +1003,36 @@ mod tests { } #[test] fn span_of_self_arg_pat_idents_are_correct() { - - let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", - "impl z { fn a (&mut self, &myarg: i32) {} }", - "impl z { fn a (&'a self, &myarg: i32) {} }", - "impl z { fn a (self, &myarg: i32) {} }", - "impl z { fn a (self: Foo, &myarg: i32) {} }", - ]; - - for &src in &srcs { - let spans = get_spans_of_pat_idents(src); - let (lo, hi) = (spans[0].lo(), spans[0].hi()); - assert!("self" == &src[lo.to_usize()..hi.to_usize()], - "\"{}\" != \"self\". src=\"{}\"", - &src[lo.to_usize()..hi.to_usize()], src) - } + with_globals(&Globals::new(), || { + + let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", + "impl z { fn a (&mut self, &myarg: i32) {} }", + "impl z { fn a (&'a self, &myarg: i32) {} }", + "impl z { fn a (self, &myarg: i32) {} }", + "impl z { fn a (self: Foo, &myarg: i32) {} }", + ]; + + for &src in &srcs { + let spans = get_spans_of_pat_idents(src); + let (lo, hi) = (spans[0].lo(), spans[0].hi()); + assert!("self" == &src[lo.to_usize()..hi.to_usize()], + "\"{}\" != \"self\". src=\"{}\"", + &src[lo.to_usize()..hi.to_usize()], src) + } + }) } #[test] fn parse_exprs () { - // just make sure that they parse.... - string_to_expr("3 + 4".to_string()); - string_to_expr("a::z.froob(b,&(987+3))".to_string()); + with_globals(&Globals::new(), || { + // just make sure that they parse.... + string_to_expr("3 + 4".to_string()); + string_to_expr("a::z.froob(b,&(987+3))".to_string()); + }) } #[test] fn attrs_fix_bug () { - string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + with_globals(&Globals::new(), || { + string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result, String> { #[cfg(windows)] fn wb() -> c_int { @@ -1013,49 +1044,54 @@ mod tests { let mut fflags: c_int = wb(); }".to_string()); + }) } #[test] fn crlf_doc_comments() { - let sess = ParseSess::new(FilePathMapping::empty()); - - let name = "".to_string(); - let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) - .unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc, "/// doc comment"); - - let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) - .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.path == "doc") - .map(|a| a.value_str().unwrap().to_string()).collect::>(); - let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[..], b); - - let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc, "/** doc comment\n * with CRLF */"); + with_globals(&Globals::new(), || { + let sess = ParseSess::new(FilePathMapping::empty()); + + let name = "".to_string(); + let source = "/// doc comment\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name.clone(), source, &sess) + .unwrap().unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); + assert_eq!(doc, "/// doc comment"); + + let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name.clone(), source, &sess) + .unwrap().unwrap(); + let docs = item.attrs.iter().filter(|a| a.path == "doc") + .map(|a| a.value_str().unwrap().to_string()).collect::>(); + let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; + assert_eq!(&docs[..], b); + + let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); + assert_eq!(doc, "/** doc comment\n * with CRLF */"); + }) } #[test] fn ttdelim_span() { - let sess = ParseSess::new(FilePathMapping::empty()); - let expr = parse::parse_expr_from_source_str("foo".to_string(), - "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - - let tts: Vec<_> = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), - _ => panic!("not a macro"), - }; + with_globals(&Globals::new(), || { + let sess = ParseSess::new(FilePathMapping::empty()); + let expr = parse::parse_expr_from_source_str("foo".to_string(), + "foo!( fn main() { body } )".to_string(), &sess).unwrap(); + + let tts: Vec<_> = match expr.node { + ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), + _ => panic!("not a macro"), + }; - let span = tts.iter().rev().next().unwrap().span(); + let span = tts.iter().rev().next().unwrap().span(); - match sess.codemap().span_to_snippet(span) { - Ok(s) => assert_eq!(&s[..], "{ body }"), - Err(_) => panic!("could not get snippet"), - } + match sess.codemap().span_to_snippet(span) { + Ok(s) => assert_eq!(&s[..], "{ body }"), + Err(_) => panic!("could not get snippet"), + } + }) } // This tests that when parsing a string (rather than a file) we don't try @@ -1063,17 +1099,19 @@ mod tests { // See `recurse_into_file_modules` in the parser. #[test] fn out_of_line_mod() { - let sess = ParseSess::new(FilePathMapping::empty()); - let item = parse_item_from_source_str( - "foo".to_owned(), - "mod foo { struct S; mod this_does_not_exist; }".to_owned(), - &sess, - ).unwrap().unwrap(); - - if let ast::ItemKind::Mod(ref m) = item.node { - assert!(m.items.len() == 2); - } else { - panic!(); - } + with_globals(&Globals::new(), || { + let sess = ParseSess::new(FilePathMapping::empty()); + let item = parse_item_from_source_str( + "foo".to_owned(), + "mod foo { struct S; mod this_does_not_exist; }".to_owned(), + &sess, + ).unwrap().unwrap(); + + if let ast::ItemKind::Mod(ref m) = item.node { + assert!(m.items.len() == 2); + } else { + panic!(); + } + }) } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 26f39f608807d..63699c09ae70a 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -25,9 +25,8 @@ use syntax_pos::{self, Span}; use tokenstream::{TokenStream, TokenTree}; use tokenstream; -use std::cell::Cell; use std::{cmp, fmt}; -use std::rc::Rc; +use rustc_data_structures::sync::{Lrc, LockCell}; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum BinOpToken { @@ -179,7 +178,7 @@ pub enum Token { // The `LazyTokenStream` is a pure function of the `Nonterminal`, // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. - Interpolated(Rc<(Nonterminal, LazyTokenStream)>), + Interpolated(Lrc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), @@ -199,7 +198,7 @@ pub enum Token { impl Token { pub fn interpolated(nt: Nonterminal) -> Token { - Token::Interpolated(Rc::new((nt, LazyTokenStream::new()))) + Token::Interpolated(Lrc::new((nt, LazyTokenStream::new()))) } /// Returns `true` if the token starts with '>'. @@ -560,13 +559,13 @@ pub fn is_op(tok: &Token) -> bool { } } -pub struct LazyTokenStream(Cell>); +pub struct LazyTokenStream(LockCell>); impl Clone for LazyTokenStream { fn clone(&self) -> Self { let opt_stream = self.0.take(); self.0.set(opt_stream.clone()); - LazyTokenStream(Cell::new(opt_stream)) + LazyTokenStream(LockCell::new(opt_stream)) } } @@ -585,7 +584,7 @@ impl fmt::Debug for LazyTokenStream { impl LazyTokenStream { pub fn new() -> Self { - LazyTokenStream(Cell::new(None)) + LazyTokenStream(LockCell::new(None)) } pub fn force TokenStream>(&self, f: F) -> TokenStream { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a2d3ed4deb652..1de5ac2abdc57 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -3145,36 +3145,41 @@ mod tests { use ast; use codemap; use syntax_pos; + use {Globals, with_globals}; #[test] fn test_fun_to_string() { - let abba_ident = ast::Ident::from_str("abba"); + with_globals(&Globals::new(), || { + let abba_ident = ast::Ident::from_str("abba"); - let decl = ast::FnDecl { - inputs: Vec::new(), - output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), - variadic: false - }; - let generics = ast::Generics::default(); - assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal, - ast::Constness::NotConst, - abba_ident, &generics), - "fn abba()"); + let decl = ast::FnDecl { + inputs: Vec::new(), + output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), + variadic: false + }; + let generics = ast::Generics::default(); + assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal, + ast::Constness::NotConst, + abba_ident, &generics), + "fn abba()"); + }) } #[test] fn test_variant_to_string() { - let ident = ast::Ident::from_str("principal_skinner"); - - let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { - name: ident, - attrs: Vec::new(), - // making this up as I go.... ? - data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), - disr_expr: None, - }); - - let varstr = variant_to_string(&var); - assert_eq!(varstr, "principal_skinner"); + with_globals(&Globals::new(), || { + let ident = ast::Ident::from_str("principal_skinner"); + + let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { + name: ident, + attrs: Vec::new(), + // making this up as I go.... ? + data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), + disr_expr: None, + }); + + let varstr = variant_to_string(&var); + assert_eq!(varstr, "principal_skinner"); + }) } } diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index a4ac5826f99c1..db6d0d74cea3e 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -21,7 +21,6 @@ use std::mem; use std::vec; use attr::{self, HasAttrs}; use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, FileMap, BytePos}; -use std::rc::Rc; use codemap::{self, CodeMap, ExpnInfo, NameAndSpan, MacroAttribute, dummy_spanned}; use errors; diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index a29250ea5f19f..7b2eb77a46cc4 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -13,10 +13,11 @@ use errors::Handler; use errors::emitter::EmitterWriter; use std::io; use std::io::prelude::*; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use std::str; use std::sync::{Arc, Mutex}; use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; +use {Globals, with_globals}; /// Identify a position in the text by the Nth occurrence of a string. struct Position { @@ -45,36 +46,38 @@ impl Write for Shared { } fn test_harness(file_text: &str, span_labels: Vec, expected_output: &str) { - let output = Arc::new(Mutex::new(Vec::new())); + with_globals(&Globals::new(), || { + let output = Arc::new(Mutex::new(Vec::new())); - let code_map = Rc::new(CodeMap::new(FilePathMapping::empty())); - code_map.new_filemap_and_lines("test.rs", &file_text); + let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty())); + code_map.new_filemap_and_lines("test.rs", &file_text); - let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); - let mut msp = MultiSpan::from_span(primary_span); - for span_label in span_labels { - let span = make_span(&file_text, &span_label.start, &span_label.end); - msp.push_span_label(span, span_label.label.to_string()); - println!("span: {:?} label: {:?}", span, span_label.label); - println!("text: {:?}", code_map.span_to_snippet(span)); - } + let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); + let mut msp = MultiSpan::from_span(primary_span); + for span_label in span_labels { + let span = make_span(&file_text, &span_label.start, &span_label.end); + msp.push_span_label(span, span_label.label.to_string()); + println!("span: {:?} label: {:?}", span, span_label.label); + println!("text: {:?}", code_map.span_to_snippet(span)); + } - let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }), - Some(code_map.clone()), - false); - let handler = Handler::with_emitter(true, false, Box::new(emitter)); - handler.span_err(msp, "foo"); + let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }), + Some(code_map.clone()), + false); + let handler = Handler::with_emitter(true, false, Box::new(emitter)); + handler.span_err(msp, "foo"); - assert!(expected_output.chars().next() == Some('\n'), - "expected output should begin with newline"); - let expected_output = &expected_output[1..]; + assert!(expected_output.chars().next() == Some('\n'), + "expected output should begin with newline"); + let expected_output = &expected_output[1..]; - let bytes = output.lock().unwrap(); - let actual_output = str::from_utf8(&bytes).unwrap(); - println!("expected output:\n------\n{}------", expected_output); - println!("actual output:\n------\n{}------", actual_output); + let bytes = output.lock().unwrap(); + let actual_output = str::from_utf8(&bytes).unwrap(); + println!("expected output:\n------\n{}------", expected_output); + println!("actual output:\n------\n{}------", actual_output); - assert!(expected_output == actual_output) + assert!(expected_output == actual_output) + }) } fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 870f54e4396af..8d9c5329c0813 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -593,6 +593,7 @@ impl Hash for ThinTokenStream { mod tests { use super::*; use syntax::ast::Ident; + use {Globals, with_globals}; use syntax_pos::{Span, BytePos, NO_EXPANSION}; use parse::token::Token; use util::parser_testing::string_to_stream; @@ -607,66 +608,82 @@ mod tests { #[test] fn test_concat() { - let test_res = string_to_ts("foo::bar::baz"); - let test_fst = string_to_ts("foo::bar"); - let test_snd = string_to_ts("::baz"); - let eq_res = TokenStream::concat(vec![test_fst, test_snd]); - assert_eq!(test_res.trees().count(), 5); - assert_eq!(eq_res.trees().count(), 5); - assert_eq!(test_res.eq_unspanned(&eq_res), true); + with_globals(&Globals::new(), || { + let test_res = string_to_ts("foo::bar::baz"); + let test_fst = string_to_ts("foo::bar"); + let test_snd = string_to_ts("::baz"); + let eq_res = TokenStream::concat(vec![test_fst, test_snd]); + assert_eq!(test_res.trees().count(), 5); + assert_eq!(eq_res.trees().count(), 5); + assert_eq!(test_res.eq_unspanned(&eq_res), true); + }) } #[test] fn test_to_from_bijection() { - let test_start = string_to_ts("foo::bar(baz)"); - let test_end = test_start.trees().collect(); - assert_eq!(test_start, test_end) + with_globals(&Globals::new(), || { + let test_start = string_to_ts("foo::bar(baz)"); + let test_end = test_start.trees().collect(); + assert_eq!(test_start, test_end) + }) } #[test] fn test_eq_0() { - let test_res = string_to_ts("foo"); - let test_eqs = string_to_ts("foo"); - assert_eq!(test_res, test_eqs) + with_globals(&Globals::new(), || { + let test_res = string_to_ts("foo"); + let test_eqs = string_to_ts("foo"); + assert_eq!(test_res, test_eqs) + }) } #[test] fn test_eq_1() { - let test_res = string_to_ts("::bar::baz"); - let test_eqs = string_to_ts("::bar::baz"); - assert_eq!(test_res, test_eqs) + with_globals(&Globals::new(), || { + let test_res = string_to_ts("::bar::baz"); + let test_eqs = string_to_ts("::bar::baz"); + assert_eq!(test_res, test_eqs) + }) } #[test] fn test_eq_3() { - let test_res = string_to_ts(""); - let test_eqs = string_to_ts(""); - assert_eq!(test_res, test_eqs) + with_globals(&Globals::new(), || { + let test_res = string_to_ts(""); + let test_eqs = string_to_ts(""); + assert_eq!(test_res, test_eqs) + }) } #[test] fn test_diseq_0() { - let test_res = string_to_ts("::bar::baz"); - let test_eqs = string_to_ts("bar::baz"); - assert_eq!(test_res == test_eqs, false) + with_globals(&Globals::new(), || { + let test_res = string_to_ts("::bar::baz"); + let test_eqs = string_to_ts("bar::baz"); + assert_eq!(test_res == test_eqs, false) + }) } #[test] fn test_diseq_1() { - let test_res = string_to_ts("(bar,baz)"); - let test_eqs = string_to_ts("bar,baz"); - assert_eq!(test_res == test_eqs, false) + with_globals(&Globals::new(), || { + let test_res = string_to_ts("(bar,baz)"); + let test_eqs = string_to_ts("bar,baz"); + assert_eq!(test_res == test_eqs, false) + }) } #[test] fn test_is_empty() { - let test0: TokenStream = Vec::::new().into_iter().collect(); - let test1: TokenStream = - TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into(); - let test2 = string_to_ts("foo(bar::baz)"); - - assert_eq!(test0.is_empty(), true); - assert_eq!(test1.is_empty(), false); - assert_eq!(test2.is_empty(), false); + with_globals(&Globals::new(), || { + let test0: TokenStream = Vec::::new().into_iter().collect(); + let test1: TokenStream = + TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into(); + let test2 = string_to_ts("foo(bar::baz)"); + + assert_eq!(test0.is_empty(), true); + assert_eq!(test1.is_empty(), false); + assert_eq!(test2.is_empty(), false); + }) } } diff --git a/src/libsyntax/util/rc_slice.rs b/src/libsyntax/util/rc_slice.rs index d6939d71129e4..520b7a48e3025 100644 --- a/src/libsyntax/util/rc_slice.rs +++ b/src/libsyntax/util/rc_slice.rs @@ -10,14 +10,14 @@ use std::fmt; use std::ops::{Deref, Range}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, HashStable}; #[derive(Clone)] pub struct RcSlice { - data: Rc>, + data: Lrc>, offset: u32, len: u32, } @@ -27,7 +27,7 @@ impl RcSlice { RcSlice { offset: 0, len: vec.len() as u32, - data: Rc::new(vec.into_boxed_slice()), + data: Lrc::new(vec.into_boxed_slice()), } } diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml index 1c4702402886d..d8eeb5ed2554a 100644 --- a/src/libsyntax_ext/Cargo.toml +++ b/src/libsyntax_ext/Cargo.toml @@ -14,3 +14,4 @@ proc_macro = { path = "../libproc_macro" } rustc_errors = { path = "../librustc_errors" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } +rustc_data_structures = { path = "../librustc_data_structures" } \ No newline at end of file diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index ccf3d5502341f..307ef0eed8ea1 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -10,7 +10,7 @@ //! The compiler code necessary to implement the `#[derive]` extensions. -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension, Resolver}; use syntax::ext::build::AstBuilder; @@ -81,7 +81,7 @@ macro_rules! derive_traits { $( resolver.add_builtin( ast::Ident::with_empty_ctxt(Symbol::intern($name)), - Rc::new(SyntaxExtension::BuiltinDerive($func)) + Lrc::new(SyntaxExtension::BuiltinDerive($func)) ); )* } diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 42bbb4ae0cbee..a33d0bef4751c 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -22,6 +22,7 @@ extern crate fmt_macros; extern crate syntax; extern crate syntax_pos; extern crate proc_macro; +extern crate rustc_data_structures; extern crate rustc_errors as errors; mod asm; @@ -43,7 +44,7 @@ pub mod deriving; pub mod proc_macro_impl; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax::ext::base::{MacroExpanderFn, NormalTT, NamedSyntaxExtension}; use syntax::symbol::Symbol; @@ -54,7 +55,7 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, deriving::register_builtin_derives(resolver); let mut register = |name, ext| { - resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Rc::new(ext)); + resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Lrc::new(ext)); }; macro_rules! register { diff --git a/src/libsyntax_pos/Cargo.toml b/src/libsyntax_pos/Cargo.toml index aad2155157d84..af02f246f1de9 100644 --- a/src/libsyntax_pos/Cargo.toml +++ b/src/libsyntax_pos/Cargo.toml @@ -11,4 +11,5 @@ crate-type = ["dylib"] [dependencies] serialize = { path = "../libserialize" } rustc_data_structures = { path = "../librustc_data_structures" } +scoped-tls = { git = "https://github.com/Zoxc/scoped-tls.git", features=["nightly"] } unicode-width = "0.1.4" diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index 9358e654a9fc8..b2d33780363fd 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -15,11 +15,11 @@ //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 +use GLOBALS; use Span; use symbol::{Ident, Symbol}; use serialize::{Encodable, Decodable, Encoder, Decoder}; -use std::cell::RefCell; use std::collections::HashMap; use std::fmt; @@ -106,7 +106,7 @@ impl Mark { } } -struct HygieneData { +pub struct HygieneData { marks: Vec, syntax_contexts: Vec, markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, @@ -114,7 +114,7 @@ struct HygieneData { } impl HygieneData { - fn new() -> Self { + pub fn new() -> Self { HygieneData { marks: vec![MarkData::default()], syntax_contexts: vec![SyntaxContextData::default()], @@ -124,10 +124,7 @@ impl HygieneData { } fn with T>(f: F) -> T { - thread_local! { - static HYGIENE_DATA: RefCell = RefCell::new(HygieneData::new()); - } - HYGIENE_DATA.with(|data| f(&mut *data.borrow_mut())) + GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut())) } } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index ec652b5607ec4..d1bf942c1d7b6 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -27,18 +27,21 @@ #![feature(specialization)] use std::borrow::Cow; -use std::cell::{Cell, RefCell}; +use std::cell::Cell; use std::cmp::{self, Ordering}; use std::fmt; use std::hash::Hasher; use std::ops::{Add, Sub}; use std::path::PathBuf; -use std::rc::Rc; use rustc_data_structures::stable_hasher::StableHasher; +use rustc_data_structures::sync::{Lrc, Lock}; extern crate rustc_data_structures; +#[macro_use] +extern crate scoped_tls; + use serialize::{Encodable, Decodable, Encoder, Decoder}; extern crate serialize; @@ -54,6 +57,24 @@ pub use span_encoding::{Span, DUMMY_SP}; pub mod symbol; +pub struct Globals { + symbol_interner: Lock, + span_interner: Lock, + hygiene_data: Lock, +} + +impl Globals { + pub fn new() -> Globals { + Globals { + symbol_interner: Lock::new(symbol::Interner::fresh()), + span_interner: Lock::new(span_encoding::SpanInterner::default()), + hygiene_data: Lock::new(hygiene::HygieneData::new()), + } + } +} + +scoped_thread_local!(pub static GLOBALS: Globals); + pub type FileName = String; /// Spans represent a region of code, used for error reporting. Positions in spans @@ -92,10 +113,6 @@ impl SpanData { } } -// The interner in thread-local, so `Span` shouldn't move between threads. -impl !Send for Span {} -impl !Sync for Span {} - impl PartialOrd for Span { fn partial_cmp(&self, rhs: &Self) -> Option { PartialOrd::partial_cmp(&self.data(), &rhs.data()) @@ -604,22 +621,22 @@ pub struct FileMap { /// Indicates which crate this FileMap was imported from. pub crate_of_origin: u32, /// The complete source code - pub src: Option>, + pub src: Option>, /// The source code's hash pub src_hash: u128, /// The external source code (used for external crates, which will have a `None` /// value as `self.src`. - pub external_src: RefCell, + pub external_src: Lock, /// The start position of this source in the CodeMap pub start_pos: BytePos, /// The end position of this source in the CodeMap pub end_pos: BytePos, /// Locations of lines beginnings in the source code - pub lines: RefCell>, + pub lines: Lock>, /// Locations of multi-byte characters in the source code - pub multibyte_chars: RefCell>, + pub multibyte_chars: Lock>, /// Width of characters that are not narrow in the source code - pub non_narrow_chars: RefCell>, + pub non_narrow_chars: Lock>, } impl Encodable for FileMap { @@ -742,10 +759,10 @@ impl Decodable for FileMap { end_pos, src: None, src_hash, - external_src: RefCell::new(ExternalSource::AbsentOk), - lines: RefCell::new(lines), - multibyte_chars: RefCell::new(multibyte_chars), - non_narrow_chars: RefCell::new(non_narrow_chars) + external_src: Lock::new(ExternalSource::AbsentOk), + lines: Lock::new(lines), + multibyte_chars: Lock::new(multibyte_chars), + non_narrow_chars: Lock::new(non_narrow_chars) }) }) } @@ -776,14 +793,14 @@ impl FileMap { name_was_remapped, unmapped_path: Some(unmapped_path), crate_of_origin: 0, - src: Some(Rc::new(src)), + src: Some(Lrc::new(src)), src_hash, - external_src: RefCell::new(ExternalSource::Unneeded), + external_src: Lock::new(ExternalSource::Unneeded), start_pos, end_pos: Pos::from_usize(end_pos), - lines: RefCell::new(Vec::new()), - multibyte_chars: RefCell::new(Vec::new()), - non_narrow_chars: RefCell::new(Vec::new()), + lines: Lock::new(Vec::new()), + multibyte_chars: Lock::new(Vec::new()), + non_narrow_chars: Lock::new(Vec::new()), } } @@ -1039,7 +1056,7 @@ impl Sub for CharPos { #[derive(Debug, Clone)] pub struct Loc { /// Information about the original source - pub file: Rc, + pub file: Lrc, /// The (1-based) line number pub line: usize, /// The (0-based) column offset @@ -1056,14 +1073,14 @@ pub struct LocWithOpt { pub filename: FileName, pub line: usize, pub col: CharPos, - pub file: Option>, + pub file: Option>, } // used to be structural records. Better names, anyone? #[derive(Debug)] -pub struct FileMapAndLine { pub fm: Rc, pub line: usize } +pub struct FileMapAndLine { pub fm: Lrc, pub line: usize } #[derive(Debug)] -pub struct FileMapAndBytePos { pub fm: Rc, pub pos: BytePos } +pub struct FileMapAndBytePos { pub fm: Lrc, pub pos: BytePos } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct LineInfo { @@ -1078,7 +1095,7 @@ pub struct LineInfo { } pub struct FileLines { - pub file: Rc, + pub file: Lrc, pub lines: Vec } diff --git a/src/libsyntax_pos/span_encoding.rs b/src/libsyntax_pos/span_encoding.rs index b23e40ce7a932..16a59649038f8 100644 --- a/src/libsyntax_pos/span_encoding.rs +++ b/src/libsyntax_pos/span_encoding.rs @@ -14,12 +14,11 @@ // The encoding format for inline spans were obtained by optimizing over crates in rustc/libstd. // See https://internals.rust-lang.org/t/rfc-compiler-refactoring-spans/1357/28 +use GLOBALS; use {BytePos, SpanData}; use hygiene::SyntaxContext; use rustc_data_structures::fx::FxHashMap; -use std::cell::RefCell; - /// A compressed span. /// Contains either fields of `SpanData` inline if they are small, or index into span interner. /// The primary goal of `Span` is to be as small as possible and fit into other structures @@ -112,7 +111,7 @@ fn decode(span: Span) -> SpanData { } #[derive(Default)] -struct SpanInterner { +pub struct SpanInterner { spans: FxHashMap, span_data: Vec, } @@ -135,11 +134,8 @@ impl SpanInterner { } } -// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. +// If an interner exists, return it. Otherwise, prepare a fresh one. #[inline] fn with_span_interner T>(f: F) -> T { - thread_local!(static INTERNER: RefCell = { - RefCell::new(SpanInterner::default()) - }); - INTERNER.with(|interner| f(&mut *interner.borrow_mut())) + GLOBALS.with(|globals| f(&mut *globals.span_interner.lock())) } diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index aafdd696b747d..7f57f95863795 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -13,9 +13,9 @@ //! type, and vice versa. use hygiene::SyntaxContext; +use GLOBALS; use serialize::{Decodable, Decoder, Encodable, Encoder}; -use std::cell::RefCell; use std::collections::HashMap; use std::fmt; @@ -83,10 +83,6 @@ impl Decodable for Ident { #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Symbol(u32); -// The interner in thread-local, so `Symbol` shouldn't move between threads. -impl !Send for Symbol { } -impl !Sync for Symbol { } - impl Symbol { /// Maps a string to its interned representation. pub fn intern(string: &str) -> Self { @@ -247,7 +243,7 @@ macro_rules! declare_keywords {( } impl Interner { - fn fresh() -> Self { + pub fn fresh() -> Self { Interner::prefill(&[$($string,)*]) } } @@ -330,12 +326,10 @@ declare_keywords! { (60, Union, "union") } -// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. +// If an interner exists, return it. Otherwise, prepare a fresh one. +#[inline] fn with_interner T>(f: F) -> T { - thread_local!(static INTERNER: RefCell = { - RefCell::new(Interner::fresh()) - }); - INTERNER.with(|interner| f(&mut *interner.borrow_mut())) + GLOBALS.with(|globals| f(&mut *globals.symbol_interner.lock())) } /// Represents a string stored in the thread-local interner. Because the @@ -388,8 +382,6 @@ impl<'a> ::std::cmp::PartialEq for &'a String { } } -impl !Send for InternedString { } - impl ::std::ops::Deref for InternedString { type Target = str; fn deref(&self) -> &str { self.string } @@ -422,6 +414,7 @@ impl Encodable for InternedString { #[cfg(test)] mod tests { use super::*; + use Globals; #[test] fn interner_tests() { @@ -444,7 +437,9 @@ mod tests { #[test] fn without_first_quote_test() { - let i = Ident::from_str("'break"); - assert_eq!(i.without_first_quote().name, keywords::Break.name()); + GLOBALS.set(&Globals::new(), || { + let i = Ident::from_str("'break"); + assert_eq!(i.without_first_quote().name, keywords::Break.name()); + }); } } diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index 27b9e27be4385..c36341b149828 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -24,6 +24,10 @@ use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; fn main() { + syntax::with_globals(&syntax::Globals::new(), || run()); +} + +fn run() { let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty()); let mut resolver = syntax::ext::base::DummyResolver; let mut cx = syntax::ext::base::ExtCtxt::new( diff --git a/src/test/run-make/issue-19371/foo.rs b/src/test/run-make/issue-19371/foo.rs index 4db027aaeef71..c5023d15c531f 100644 --- a/src/test/run-make/issue-19371/foo.rs +++ b/src/test/run-make/issue-19371/foo.rs @@ -19,13 +19,12 @@ extern crate rustc_trans; extern crate syntax; use rustc::session::{build_session, Session}; -use rustc::session::config::{basic_options, build_configuration, Input, +use rustc::session::config::{basic_options, Input, OutputType, OutputTypes}; use rustc_driver::driver::{compile_input, CompileController, anon_src}; use rustc_metadata::cstore::CStore; use rustc_errors::registry::Registry; -use std::collections::HashSet; use std::path::PathBuf; use std::rc::Rc; @@ -68,8 +67,10 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc) { } fn compile(code: String, output: PathBuf, sysroot: PathBuf) { - let (sess, cstore) = basic_sess(sysroot); - let control = CompileController::basic(); - let input = Input::Str { name: anon_src(), input: code }; - let _ = compile_input(&sess, &cstore, &input, &None, &Some(output), None, &control); + syntax::with_globals(&syntax::Globals::new(), || { + let (sess, cstore) = basic_sess(sysroot); + let control = CompileController::basic(); + let input = Input::Str { name: anon_src(), input: code }; + let _ = compile_input(&sess, &cstore, &input, &None, &Some(output), None, &control); + }); } diff --git a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs index fc031f4a310c2..61eee9301ba80 100644 --- a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs @@ -115,6 +115,10 @@ fn reject_stmt_parse(es: &str) { } fn main() { + syntax::with_globals(&syntax::Globals::new(), || run()); +} + +fn run() { let both = &["#[attr]", "#![attr]"]; let outer = &["#[attr]"]; let none = &[]; diff --git a/src/test/run-pass-fulldeps/issue-35829.rs b/src/test/run-pass-fulldeps/issue-35829.rs index f17a0494a69c4..e27aa34af6577 100644 --- a/src/test/run-pass-fulldeps/issue-35829.rs +++ b/src/test/run-pass-fulldeps/issue-35829.rs @@ -13,19 +13,23 @@ #![feature(quote, rustc_private)] extern crate syntax; +extern crate rustc_data_structures; use syntax::ext::base::{ExtCtxt, DummyResolver}; use syntax::ext::expand::ExpansionConfig; use syntax::parse::ParseSess; use syntax::codemap::{FilePathMapping, dummy_spanned}; use syntax::print::pprust::expr_to_string; -use syntax::ast::{Expr, ExprKind, LitKind, StrStyle, RangeLimits}; -use syntax::symbol::Symbol; +use syntax::ast::{ExprKind, LitKind, RangeLimits}; use syntax::ptr::P; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; fn main() { + syntax::with_globals(&syntax::Globals::new(), || run()); +} + +fn run() { let parse_sess = ParseSess::new(FilePathMapping::empty()); let exp_cfg = ExpansionConfig::default("issue_35829".to_owned()); let mut resolver = DummyResolver; @@ -33,12 +37,12 @@ fn main() { // check byte string let byte_string = quote_expr!(&cx, b"one"); - let byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"one".to_vec())); + let byte_string_lit_kind = LitKind::ByteStr(Lrc::new(b"one".to_vec())); assert_eq!(byte_string.node, ExprKind::Lit(P(dummy_spanned(byte_string_lit_kind)))); // check raw byte string let raw_byte_string = quote_expr!(&cx, br###"#"two"#"###); - let raw_byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"#\"two\"#".to_vec())); + let raw_byte_string_lit_kind = LitKind::ByteStr(Lrc::new(b"#\"two\"#".to_vec())); assert_eq!(raw_byte_string.node, ExprKind::Lit(P(dummy_spanned(raw_byte_string_lit_kind)))); // check dotdoteq diff --git a/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs b/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs index 456088b2c5285..5c96a23610b8b 100644 --- a/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs +++ b/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs @@ -216,8 +216,11 @@ impl Folder for AddParens { } } - fn main() { + syntax::with_globals(&syntax::Globals::new(), || run()); +} + +fn run() { let ps = ParseSess::new(FilePathMapping::empty()); iter_exprs(2, &mut |e| { diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 949aa8a9518c0..3fcc7b6baec74 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -21,6 +21,10 @@ use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; fn main() { + syntax::with_globals(&syntax::Globals::new(), || run()); +} + +fn run() { let ps = syntax::parse::ParseSess::new(FilePathMapping::empty()); let mut resolver = syntax::ext::base::DummyResolver; let mut cx = syntax::ext::base::ExtCtxt::new( diff --git a/src/tools/error_index_generator/main.rs b/src/tools/error_index_generator/main.rs index ca383b5add011..4f7474a9275b2 100644 --- a/src/tools/error_index_generator/main.rs +++ b/src/tools/error_index_generator/main.rs @@ -205,7 +205,10 @@ fn main() { *slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/"))); }); let (format, dst) = parse_args(); - if let Err(e) = main_with_result(format, &dst) { + let result = syntax::with_globals(&syntax::Globals::new(), move || { + main_with_result(format, &dst) + }); + if let Err(e) = result { panic!("{}", e.description()); } }