diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index cbdb174c02d97..39d7ea922bced 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -1080,6 +1080,10 @@ impl Build { /// done. The file is updated immediately after this function completes. pub fn save_toolstate(&self, tool: &str, state: ToolState) { if let Some(ref path) = self.config.save_toolstates { + if let Some(parent) = path.parent() { + // Ensure the parent directory always exists + t!(std::fs::create_dir_all(parent)); + } let mut file = t!(fs::OpenOptions::new() .create(true) .read(true) diff --git a/src/ci/azure-pipelines/auto.yml b/src/ci/azure-pipelines/auto.yml index 24b07a1b7c950..271c32585449e 100644 --- a/src/ci/azure-pipelines/auto.yml +++ b/src/ci/azure-pipelines/auto.yml @@ -140,6 +140,7 @@ jobs: IMAGE: x86_64-gnu-aux x86_64-gnu-tools: IMAGE: x86_64-gnu-tools + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json x86_64-gnu-debug: IMAGE: x86_64-gnu-debug x86_64-gnu-nopt: @@ -262,8 +263,9 @@ jobs: # MSVC tools tests x86_64-msvc-tools: MSYS_BITS: 64 - SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstates.json windows - RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstates.json + SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstate/toolstates.json + DEPLOY_TOOLSTATES_JSON: toolstates-windows.json # 32/64-bit MinGW builds. # diff --git a/src/ci/azure-pipelines/steps/run.yml b/src/ci/azure-pipelines/steps/run.yml index b8e32cf2cdfe3..cef2d235602f1 100644 --- a/src/ci/azure-pipelines/steps/run.yml +++ b/src/ci/azure-pipelines/steps/run.yml @@ -183,37 +183,21 @@ steps: condition: and(succeeded(), not(variables.SKIP_JOB)) displayName: Run build -# If we're a deploy builder, use the `aws` command to publish everything to our -# bucket. -- bash: | - set -e - source src/ci/shared.sh - if [ "$AGENT_OS" = "Linux" ]; then - rm -rf obj/build/dist/doc - upload_dir=obj/build/dist - else - rm -rf build/dist/doc - upload_dir=build/dist - fi - ls -la $upload_dir - deploy_dir=rustc-builds - if [ "$DEPLOY_ALT" == "1" ]; then - deploy_dir=rustc-builds-alt - fi - retry aws s3 cp --no-progress --recursive --acl public-read ./$upload_dir s3://$DEPLOY_BUCKET/$deploy_dir/$BUILD_SOURCEVERSION +- bash: src/ci/scripts/upload-artifacts.sh env: AWS_ACCESS_KEY_ID: $(UPLOAD_AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY: $(UPLOAD_AWS_SECRET_ACCESS_KEY) - condition: and(succeeded(), not(variables.SKIP_JOB), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1'))) displayName: Upload artifacts - -# Upload CPU usage statistics that we've been gathering this whole time. Always -# execute this step in case we want to inspect failed builds, but don't let -# errors here ever fail the build since this is just informational. -- bash: aws s3 cp --acl public-read cpu-usage.csv s3://$DEPLOY_BUCKET/rustc-builds/$BUILD_SOURCEVERSION/cpu-$CI_JOB_NAME.csv - env: - AWS_ACCESS_KEY_ID: $(UPLOAD_AWS_ACCESS_KEY_ID) - AWS_SECRET_ACCESS_KEY: $(UPLOAD_AWS_SECRET_ACCESS_KEY) - condition: variables['UPLOAD_AWS_SECRET_ACCESS_KEY'] - continueOnError: true - displayName: Upload CPU usage statistics + # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy + # builders *should* have the AWS credentials available. Still, explicitly + # adding the condition is helpful as this way CI will not silently skip + # deploying artifacts from a dist builder if the variables are misconfigured, + # erroring about invalid credentials instead. + condition: | + and( + succeeded(), not(variables.SKIP_JOB), + or( + variables.UPLOAD_AWS_SECRET_ACCESS_KEY, + eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1') + ) + ) diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 415d6b63eb8dc..cdafcbadc9ec7 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -106,6 +106,7 @@ fi mkdir -p $HOME/.cargo mkdir -p $objdir/tmp mkdir -p $objdir/cores +mkdir -p /tmp/toolstate args= if [ "$SCCACHE_BUCKET" != "" ]; then @@ -156,6 +157,7 @@ else args="$args --volume $objdir:/checkout/obj" args="$args --volume $HOME/.cargo:/cargo" args="$args --volume $HOME/rustsrc:$HOME/rustsrc" + args="$args --volume /tmp/toolstate:/tmp/toolstate" args="$args --env LOCAL_USER_ID=`id -u`" fi diff --git a/src/ci/docker/x86_64-gnu-tools/Dockerfile b/src/ci/docker/x86_64-gnu-tools/Dockerfile index 8035195c6ed0a..7687a6ca23e18 100644 --- a/src/ci/docker/x86_64-gnu-tools/Dockerfile +++ b/src/ci/docker/x86_64-gnu-tools/Dockerfile @@ -26,5 +26,5 @@ ENV CHECK_LINKS 1 ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --save-toolstates=/tmp/toolstates.json -ENV SCRIPT /tmp/checktools.sh ../x.py /tmp/toolstates.json linux + --save-toolstates=/tmp/toolstate/toolstates.json +ENV SCRIPT /tmp/checktools.sh ../x.py /tmp/toolstate/toolstates.json linux diff --git a/src/ci/docker/x86_64-gnu-tools/checktools.sh b/src/ci/docker/x86_64-gnu-tools/checktools.sh index 4243effdf9b4b..ebb8c0bda53ee 100755 --- a/src/ci/docker/x86_64-gnu-tools/checktools.sh +++ b/src/ci/docker/x86_64-gnu-tools/checktools.sh @@ -3,7 +3,7 @@ set -eu X_PY="$1" -TOOLSTATE_FILE="$(realpath $2)" +TOOLSTATE_FILE="$(realpath -m $2)" OS="$3" COMMIT="$(git rev-parse HEAD)" CHANGED_FILES="$(git diff --name-status HEAD HEAD^)" @@ -13,6 +13,7 @@ SIX_WEEK_CYCLE="$(( ($(date +%s) / 86400 - 20) % 42 ))" # The Wednesday after this has value 0. # We track this value to prevent regressing tools in the last week of the 6-week cycle. +mkdir -p "$(dirname $TOOLSTATE_FILE)" touch "$TOOLSTATE_FILE" # Try to test all the tools and store the build/test success in the TOOLSTATE_FILE diff --git a/src/ci/scripts/install-msys2-packages.sh b/src/ci/scripts/install-msys2-packages.sh index 375f13f30b3ed..36d9202f7a2d3 100755 --- a/src/ci/scripts/install-msys2-packages.sh +++ b/src/ci/scripts/install-msys2-packages.sh @@ -8,14 +8,6 @@ source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" if isWindows; then pacman -S --noconfirm --needed base-devel ca-certificates make diffutils tar - # FIXME(#65767): workaround msys bug, step 2 - arch=i686 - if [ "$MSYS_BITS" = "64" ]; then - arch=x86_64 - fi - pacman -U --noconfirm --noprogressbar mingw-w64-$arch-ca-certificates-20180409-1-any.pkg.tar.xz - rm mingw-w64-$arch-ca-certificates-20180409-1-any.pkg.tar.xz - # Make sure we use the native python interpreter instead of some msys equivalent # one way or another. The msys interpreters seem to have weird path conversions # baked in which break LLVM's build system one way or another, so let's use the diff --git a/src/ci/scripts/install-msys2.sh b/src/ci/scripts/install-msys2.sh index 8b631192ea2b4..ce37c3b146977 100755 --- a/src/ci/scripts/install-msys2.sh +++ b/src/ci/scripts/install-msys2.sh @@ -1,5 +1,4 @@ #!/bin/bash -# ignore-tidy-linelength # Download and install MSYS2, needed primarily for the test suite (run-make) but # also used by the MinGW toolchain for assembling things. # @@ -13,13 +12,6 @@ IFS=$'\n\t' source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" if isWindows; then - # FIXME(#65767): workaround msys bug, step 1 - arch=i686 - if [ "$MSYS_BITS" = "64" ]; then - arch=x86_64 - fi - curl -O "${MIRRORS_BASE}/msys2-repo/mingw/$arch/mingw-w64-$arch-ca-certificates-20180409-1-any.pkg.tar.xz" - choco install msys2 --params="/InstallDir:${SYSTEM_WORKFOLDER}/msys2 /NoPath" -y --no-progress mkdir -p "${SYSTEM_WORKFOLDER}/msys2/home/${USERNAME}" diff --git a/src/ci/scripts/upload-artifacts.sh b/src/ci/scripts/upload-artifacts.sh new file mode 100755 index 0000000000000..312ec9d805012 --- /dev/null +++ b/src/ci/scripts/upload-artifacts.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Upload all the artifacts to our S3 bucket. All the files inside ${upload_dir} +# will be uploaded to the deploy bucket and eventually signed and released in +# static.rust-lang.org. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +upload_dir="$(mktemp -d)" + +# Release tarballs produced by a dist builder. +if [[ "${DEPLOY-0}" -eq "1" ]] || [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then + dist_dir=build/dist + if isLinux; then + dist_dir=obj/build/dist + fi + rm -rf "${dist_dir}/doc" + cp -r "${dist_dir}"/* "${upload_dir}" +fi + +# CPU usage statistics. +cp cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv" + +# Toolstate data. +if [[ -n "${DEPLOY_TOOLSTATES_JSON+x}" ]]; then + cp /tmp/toolstate/toolstates.json "${upload_dir}/${DEPLOY_TOOLSTATES_JSON}" +fi + +echo "Files that will be uploaded:" +ls -lah "${upload_dir}" +echo + +deploy_dir="rustc-builds" +if [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then + deploy_dir="rustc-builds-alt" +fi +deploy_url="s3://${DEPLOY_BUCKET}/${deploy_dir}/$(ciCommit)" + +retry aws s3 cp --no-progress --recursive --acl public-read "${upload_dir}" "${deploy_url}" diff --git a/src/ci/shared.sh b/src/ci/shared.sh index 37e45b5639dc9..718a5379ae558 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -46,6 +46,10 @@ function getCIBranch { echo "$BUILD_SOURCEBRANCHNAME" } +function ciCommit { + echo "${BUILD_SOURCEVERSION}" +} + function ciCommandAddPath { if [[ $# -ne 1 ]]; then echo "usage: $0 " diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 0819969b93360..1b67b05c73021 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -133,6 +133,7 @@ #[allow(unused)] use prelude::v1::*; +#[cfg(not(test))] // See #65860 #[macro_use] mod macros; @@ -180,10 +181,14 @@ pub mod hint; /* Core language traits */ +#[cfg(not(test))] // See #65860 pub mod marker; pub mod ops; +#[cfg(not(test))] // See #65860 pub mod cmp; +#[cfg(not(test))] // See #65860 pub mod clone; +#[cfg(not(test))] // See #65860 pub mod default; pub mod convert; pub mod borrow; @@ -191,6 +196,7 @@ pub mod borrow; /* Core types and methods on primitives */ pub mod any; +#[cfg(not(test))] // See #65860 pub mod array; pub mod ascii; pub mod sync; @@ -198,7 +204,9 @@ pub mod cell; pub mod char; pub mod panic; pub mod panicking; +#[cfg(not(test))] // See #65860 pub mod pin; +#[cfg(not(test))] // See #65860 pub mod iter; pub mod option; pub mod raw; @@ -206,14 +214,18 @@ pub mod result; pub mod ffi; pub mod slice; +#[cfg(not(test))] // See #65860 pub mod str; +#[cfg(not(test))] // See #65860 pub mod hash; +#[cfg(not(test))] // See #65860 pub mod fmt; pub mod time; pub mod unicode; /* Async */ +#[cfg(not(test))] // See #65860 pub mod future; pub mod task; diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs index e42c3a63541cc..8ded1417ee570 100644 --- a/src/librustc/traits/object_safety.rs +++ b/src/librustc/traits/object_safety.rs @@ -520,9 +520,11 @@ impl<'tcx> TyCtxt<'tcx> { /// a pointer. /// /// In practice, we cannot use `dyn Trait` explicitly in the obligation because it would result - /// in a new check that `Trait` is object safe, creating a cycle. So instead, we fudge a little - /// by introducing a new type parameter `U` such that `Self: Unsize` and `U: Trait + ?Sized`, - /// and use `U` in place of `dyn Trait`. Written as a chalk-style query: + /// in a new check that `Trait` is object safe, creating a cycle (until object_safe_for_dispatch + /// is stabilized, see tracking issue https://github.com/rust-lang/rust/issues/43561). + /// Instead, we fudge a little by introducing a new type parameter `U` such that + /// `Self: Unsize` and `U: Trait + ?Sized`, and use `U` in place of `dyn Trait`. + /// Written as a chalk-style query: /// /// forall (U: Trait + ?Sized) { /// if (Self: Unsize) { @@ -556,8 +558,8 @@ impl<'tcx> TyCtxt<'tcx> { // the type `U` in the query // use a bogus type parameter to mimick a forall(U) query using u32::MAX for now. - // FIXME(mikeyhew) this is a total hack, and we should replace it when real forall queries - // are implemented + // FIXME(mikeyhew) this is a total hack. Once object_safe_for_dispatch is stabilized, we can + // replace this with `dyn Trait` let unsized_self_ty: Ty<'tcx> = self.mk_ty_param( ::std::u32::MAX, Symbol::intern("RustaceansAreAwesome"), diff --git a/src/librustc_data_structures/sync.rs b/src/librustc_data_structures/sync.rs index f09474ff4d344..d111471f53d7d 100644 --- a/src/librustc_data_structures/sync.rs +++ b/src/librustc_data_structures/sync.rs @@ -497,13 +497,15 @@ impl Once { /// If the value was already initialized the closure is not called and `false` is returned, /// otherwise if the value from the closure initializes the inner value, `true` is returned #[inline] - pub fn init_locking T>(&self, f: F) -> bool { - let mut lock = self.0.lock(); - if lock.is_some() { - return false; + pub fn init_locking T>(&self, f: F) -> &T { + { + let mut lock = self.0.lock(); + if lock.is_none() { + *lock = Some(f()); + } } - *lock = Some(f()); - true + + self.borrow() } /// Tries to initialize the inner value by calling the closure without ensuring that no-one diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 234a5395047c6..540b06b3a8be9 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -3,7 +3,7 @@ use crate::cstore::{self, CStore, MetadataBlob}; use crate::locator::{self, CratePaths}; use crate::schema::{CrateRoot, CrateDep}; -use rustc_data_structures::sync::{RwLock, Lock, AtomicCell}; +use rustc_data_structures::sync::{Lock, Once, AtomicCell}; use rustc::hir::def_id::CrateNum; use rustc_data_structures::svh::Svh; @@ -249,7 +249,7 @@ impl<'a> CrateLoader<'a> { cnum_map, cnum, dependencies: Lock::new(dependencies), - source_map_import_info: RwLock::new(vec![]), + source_map_import_info: Once::new(), alloc_decoding_state: AllocDecodingState::new(interpret_alloc_index), dep_kind: Lock::new(dep_kind), source, diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 6b06cf575edcf..8dfc921c95b3d 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -9,7 +9,7 @@ use rustc::middle::cstore::{CrateSource, DepKind, ExternCrate}; use rustc::mir::interpret::AllocDecodingState; use rustc_index::vec::IndexVec; use rustc::util::nodemap::FxHashMap; -use rustc_data_structures::sync::{Lrc, RwLock, Lock, MetadataRef, AtomicCell}; +use rustc_data_structures::sync::{Lrc, Lock, MetadataRef, Once, AtomicCell}; use syntax::ast; use syntax::edition::Edition; use syntax_expand::base::SyntaxExtension; @@ -62,7 +62,7 @@ crate struct CrateMetadata { /// Proc macro descriptions for this crate, if it's a proc macro crate. crate raw_proc_macros: Option<&'static [ProcMacro]>, /// Source maps for code from the crate. - crate source_map_import_info: RwLock>, + crate source_map_import_info: Once>, /// Used for decoding interpret::AllocIds in a cached & thread-safe manner. crate alloc_decoding_state: AllocDecodingState, /// The `DepNodeIndex` of the `DepNode` representing this upstream crate. diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 0e6ecbbf0176a..c5954e1ea1d98 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -5,7 +5,7 @@ use crate::schema::*; use crate::table::{FixedSizeEncoding, PerDefTable}; use rustc_index::vec::IndexVec; -use rustc_data_structures::sync::{Lrc, ReadGuard}; +use rustc_data_structures::sync::Lrc; use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash}; use rustc::hir; use rustc::middle::cstore::{LinkagePreference, NativeLibrary, ForeignModule}; @@ -664,7 +664,7 @@ impl<'a, 'tcx> CrateMetadata { tcx: TyCtxt<'tcx>, ) -> ty::GenericPredicates<'tcx> { self.root.per_def.predicates.get(self, item_id).unwrap().decode((self, tcx)) -} + } crate fn get_predicates_defined_on( &self, @@ -1290,87 +1290,68 @@ impl<'a, 'tcx> CrateMetadata { fn imported_source_files( &'a self, local_source_map: &source_map::SourceMap, - ) -> ReadGuard<'a, Vec> { - { - let source_files = self.source_map_import_info.borrow(); - if !source_files.is_empty() { - return source_files; - } - } - - // Lock the source_map_import_info to ensure this only happens once - let mut source_map_import_info = self.source_map_import_info.borrow_mut(); - - if !source_map_import_info.is_empty() { - drop(source_map_import_info); - return self.source_map_import_info.borrow(); - } - - let external_source_map = self.root.source_map.decode(self); - - let imported_source_files = external_source_map.map(|source_file_to_import| { - // We can't reuse an existing SourceFile, so allocate a new one - // containing the information we need. - let syntax_pos::SourceFile { name, - name_was_remapped, - src_hash, - start_pos, - end_pos, - mut lines, - mut multibyte_chars, - mut non_narrow_chars, - mut normalized_pos, - name_hash, - .. } = source_file_to_import; - - let source_length = (end_pos - start_pos).to_usize(); - - // Translate line-start positions and multibyte character - // position into frame of reference local to file. - // `SourceMap::new_imported_source_file()` will then translate those - // coordinates to their new global frame of reference when the - // offset of the SourceFile is known. - for pos in &mut lines { - *pos = *pos - start_pos; - } - for mbc in &mut multibyte_chars { - mbc.pos = mbc.pos - start_pos; - } - for swc in &mut non_narrow_chars { - *swc = *swc - start_pos; - } - for np in &mut normalized_pos { - np.pos = np.pos - start_pos; - } - - let local_version = local_source_map.new_imported_source_file(name, - name_was_remapped, - self.cnum.as_u32(), - src_hash, - name_hash, - source_length, - lines, - multibyte_chars, - non_narrow_chars, - normalized_pos); - debug!("CrateMetaData::imported_source_files alloc \ - source_file {:?} original (start_pos {:?} end_pos {:?}) \ - translated (start_pos {:?} end_pos {:?})", - local_version.name, start_pos, end_pos, - local_version.start_pos, local_version.end_pos); - - cstore::ImportedSourceFile { - original_start_pos: start_pos, - original_end_pos: end_pos, - translated_source_file: local_version, - } - }).collect(); - - *source_map_import_info = imported_source_files; - drop(source_map_import_info); + ) -> &[cstore::ImportedSourceFile] { + self.source_map_import_info.init_locking(|| { + let external_source_map = self.root.source_map.decode(self); + + external_source_map.map(|source_file_to_import| { + // We can't reuse an existing SourceFile, so allocate a new one + // containing the information we need. + let syntax_pos::SourceFile { name, + name_was_remapped, + src_hash, + start_pos, + end_pos, + mut lines, + mut multibyte_chars, + mut non_narrow_chars, + mut normalized_pos, + name_hash, + .. } = source_file_to_import; + + let source_length = (end_pos - start_pos).to_usize(); + + // Translate line-start positions and multibyte character + // position into frame of reference local to file. + // `SourceMap::new_imported_source_file()` will then translate those + // coordinates to their new global frame of reference when the + // offset of the SourceFile is known. + for pos in &mut lines { + *pos = *pos - start_pos; + } + for mbc in &mut multibyte_chars { + mbc.pos = mbc.pos - start_pos; + } + for swc in &mut non_narrow_chars { + *swc = *swc - start_pos; + } + for np in &mut normalized_pos { + np.pos = np.pos - start_pos; + } - // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. - self.source_map_import_info.borrow() + let local_version = local_source_map.new_imported_source_file(name, + name_was_remapped, + self.cnum.as_u32(), + src_hash, + name_hash, + source_length, + lines, + multibyte_chars, + non_narrow_chars, + normalized_pos); + debug!("CrateMetaData::imported_source_files alloc \ + source_file {:?} original (start_pos {:?} end_pos {:?}) \ + translated (start_pos {:?} end_pos {:?})", + local_version.name, start_pos, end_pos, + local_version.start_pos, local_version.end_pos); + + cstore::ImportedSourceFile { + original_start_pos: start_pos, + original_end_pos: end_pos, + translated_source_file: local_version, + } + }).collect() + }) } /// Get the `DepNodeIndex` corresponding this crate. The result of this diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 4fd5e6a543551..ae23971849eea 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -19,7 +19,7 @@ use super::{ }; pub use rustc::mir::interpret::ScalarMaybeUndef; -/// A `Value` represents a single immediate self-contained Rust value. +/// An `Immediate` represents a single immediate self-contained Rust value. /// /// For optimization of a few very common cases, there is also a representation for a pair of /// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary diff --git a/src/librustc_resolve/error_codes.rs b/src/librustc_resolve/error_codes.rs index 9937f27931fb7..b14913cd4fdde 100644 --- a/src/librustc_resolve/error_codes.rs +++ b/src/librustc_resolve/error_codes.rs @@ -1823,6 +1823,33 @@ trait Hello { ``` "##, +E0577: r##" +Something other than a module was found in visibility scope. + +Erroneous code example: + +```compile_fail,E0577,edition2018 +pub struct Sea; + +pub (in crate::Sea) struct Shark; // error! + +fn main() {} +``` + +`Sea` is not a module, therefore it is invalid to use it in a visibility path. +To fix this error we need to ensure `Sea` is a module. + +Please note that the visibility scope can only be applied on ancestors! + +```edition2018 +pub mod Sea { + pub (in crate::Sea) struct Shark; // ok! +} + +fn main() {} +``` +"##, + E0603: r##" A private item was used outside its scope. @@ -1990,6 +2017,5 @@ fn main() {} // E0427, merged into 530 // E0467, removed // E0470, removed - E0577, E0578, } diff --git a/src/test/ui/resolve/resolve-bad-visibility.stderr b/src/test/ui/resolve/resolve-bad-visibility.stderr index 43af38cf491e3..197ecf0cb0039 100644 --- a/src/test/ui/resolve/resolve-bad-visibility.stderr +++ b/src/test/ui/resolve/resolve-bad-visibility.stderr @@ -30,5 +30,5 @@ LL | pub(in too_soon) struct H; error: aborting due to 5 previous errors -Some errors have detailed explanations: E0433, E0742. +Some errors have detailed explanations: E0433, E0577, E0742. For more information about an error, try `rustc --explain E0433`. diff --git a/src/test/ui/span/visibility-ty-params.stderr b/src/test/ui/span/visibility-ty-params.stderr index c2f0711b0c866..d3fa1d7732e72 100644 --- a/src/test/ui/span/visibility-ty-params.stderr +++ b/src/test/ui/span/visibility-ty-params.stderr @@ -18,3 +18,4 @@ LL | m!{ m<> } error: aborting due to 3 previous errors +For more information about this error, try `rustc --explain E0577`.