diff --git a/rls/src/actions/diagnostics.rs b/rls/src/actions/diagnostics.rs index 0cd3a2a6512..18acc7f3e86 100644 --- a/rls/src/actions/diagnostics.rs +++ b/rls/src/actions/diagnostics.rs @@ -1,18 +1,8 @@ -// Copyright 2018 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Conversion of raw rustc-emitted JSON messages into LSP diagnostics. //! //! Data definitions for diagnostics can be found in the Rust compiler for: -//! 1. Internal diagnostics at src/librustc_errors/diagnostic.rs. -//! 2. Emitted JSON format at src/libsyntax/json.rs. +//! 1. Internal diagnostics at `src/librustc_errors/diagnostic.rs`. +//! 2. Emitted JSON format at `src/libsyntax/json.rs`. use std::collections::HashMap; use std::iter; @@ -98,7 +88,8 @@ pub fn parse_diagnostics( let mut diagnostics = HashMap::new(); - // If the client doesn't support related information, emit separate diagnostics for secondary spans + // If the client doesn't support related information, emit separate diagnostics for + // secondary spans. let diagnostic_spans = if related_information_support { &primaries } else { &message.spans }; for (path, diagnostic) in diagnostic_spans.iter().map(|span| { @@ -131,8 +122,8 @@ pub fn parse_diagnostics( let rls_span = { let mut span = span; - // if span points to a macro, search through the expansions - // for a more useful source location + // If span points to a macro, search through the expansions + // for a more useful source location. while span.file_name.ends_with(" macros>") && span.expansion.is_some() { span = &span.expansion.as_ref().unwrap().span; } @@ -244,8 +235,8 @@ fn make_suggestions<'a>( .collect(); // Suggestions are displayed at primary span, so if the change is somewhere - // else, be sure to specify that - // TODO: In theory this can even point to different files - does that happen in practice? + // else, be sure to specify that. + // TODO: In theory this can even point to different files -- does that happen in practice? for suggestion in &mut suggestions { if !suggestion.range.is_within(&primary_range) { let line = suggestion.range.start.line + 1; // as 1-based @@ -275,7 +266,7 @@ fn label_suggestion(span: &DiagnosticSpan, label: &str) -> Option { trait IsWithin { /// Returns whether `other` is considered within `self` - /// note: a thing should be 'within' itself + /// NOTE: a thing should be 'within' itself. fn is_within(&self, other: &Self) -> bool; } @@ -304,9 +295,9 @@ impl IsWithin for Range { } } -/// Tests for formatted messages from the compilers json output -/// run cargo with `--message-format=json` to generate the json for new tests and add .json -/// message files to '$FIXTURES_DIR/compiler_message/' +/// Tests for formatted messages from the compiler's JSON output. +/// Runs cargo with `--message-format=json` to generate the JSON for new tests and add JSON +/// message files to the `$FIXTURES_DIR/compiler_message/` directory. #[cfg(test)] mod diagnostic_message_test { use super::*; @@ -332,7 +323,7 @@ mod diagnostic_message_test { pub(super) trait FileDiagnosticTestExt { fn single_file_results(&self) -> &Vec<(Diagnostic, Vec)>; - /// Returns (primary message, secondary messages) + /// Returns `(primary message, secondary messages)`. fn to_messages(&self) -> Vec<(String, Vec)>; fn to_primary_messages(&self) -> Vec; fn to_secondary_messages(&self) -> Vec; @@ -419,7 +410,7 @@ mod diagnostic_message_test { assert_eq!(messages[0].1, vec!["consider giving `v` a type", "cannot infer type for `T`"]); // Check if we don't emit related information if it's not supported and - // if secondary spans are emitted as separate diagnostics + // if secondary spans are emitted as separate diagnostics. let messages = parse_compiler_message( &read_fixture("compiler_message/type-annotations-needed.json"), false, @@ -477,7 +468,7 @@ mod diagnostic_message_test { cannot borrow mutably", ); - // note: consider message becomes a suggestion + // NOTE: 'consider' message becomes a suggestion. assert_eq!( messages[0].1, vec!["consider changing this to `mut string`", "cannot borrow mutably",] @@ -675,7 +666,7 @@ help: consider borrowing here: `&string`"#, } } -/// Tests for creating suggestions from the compilers json output +/// Tests for creating suggestions from the compilers JSON output. #[cfg(test)] mod diagnostic_suggestion_test { use self::diagnostic_message_test::*; diff --git a/rls/src/actions/format.rs b/rls/src/actions/format.rs index d66939c05a1..2976e91a96f 100644 --- a/rls/src/actions/format.rs +++ b/rls/src/actions/format.rs @@ -1,14 +1,4 @@ -// Copyright 2018 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Code formatting using Rustfmt - by default using statically-linked one or +//! Code formatting using Rustfmt -- by default using statically-linked one or //! possibly running Rustfmt binary specified by the user. use std::env::temp_dir; @@ -22,12 +12,12 @@ use rand::{distributions, thread_rng, Rng}; use rustfmt_nightly::{Config, Input, Session}; use serde_json; -/// Specified which `rustfmt` to use. +/// Specifies which `rustfmt` to use. #[derive(Clone)] pub enum Rustfmt { - /// (Path to external `rustfmt`, cwd where it should be spawned at) + /// `(path to external `rustfmt`, current working directory to spawn at)` External(PathBuf, PathBuf), - /// Statically linked `rustfmt` + /// Statically linked `rustfmt`. Internal, } @@ -90,7 +80,8 @@ fn format_internal(input: String, config: Config) -> Result { match session.format(Input::Text(input)) { Ok(report) => { - // Session::format returns Ok even if there are any errors, i.e., parsing errors. + // `Session::format` returns `Ok` even if there are any errors, i.e., parsing + // errors. if session.has_operational_errors() || session.has_parsing_errors() { debug!("reformat: format_input failed: has errors, report = {}", report); diff --git a/rls/src/actions/hover.rs b/rls/src/actions/hover.rs index c57dddfcd8a..82f1551cdae 100644 --- a/rls/src/actions/hover.rs +++ b/rls/src/actions/hover.rs @@ -1,21 +1,7 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use crate::actions::format::Rustfmt; -use crate::actions::requests; -use crate::actions::InitActionContext; -use crate::config::FmtConfig; -use crate::lsp_data::*; -use crate::server::ResponseError; +use std::path::{Path, PathBuf}; use home; +use log::*; use racer; use rls_analysis::{Def, DefKind}; use rls_span::{Column, Range, Row, Span, ZeroIndexed}; @@ -23,8 +9,12 @@ use rls_vfs::{self as vfs, Vfs}; use rustfmt_nightly::NewlineStyle; use serde_derive::{Deserialize, Serialize}; -use log::*; -use std::path::{Path, PathBuf}; +use crate::actions::format::Rustfmt; +use crate::actions::requests; +use crate::actions::InitActionContext; +use crate::config::FmtConfig; +use crate::lsp_data::*; +use crate::server::ResponseError; #[derive(Debug, Deserialize, Serialize, PartialEq, Eq)] pub struct Tooltip { @@ -63,7 +53,7 @@ pub fn process_docs(docs: &str) -> String { line.to_string() }; - // Racer sometimes pulls out comment block headers from the standard library + // Racer sometimes pulls out comment block headers from the standard library. let ignore_slashes = line.starts_with("////"); let maybe_attribute = trimmed.starts_with("#[") || trimmed.starts_with("#!["); @@ -124,7 +114,7 @@ pub fn extract_docs( let attr_start = line.starts_with("#[") || line.starts_with("#!["); if attr_start && line.ends_with(']') && !hit_top { - // Ignore single line attributes + // Ignore single-line attributes. trace!( "extract_docs: ignoring single-line attribute, next_row: {:?}, up: {}", next_row, @@ -134,7 +124,7 @@ pub fn extract_docs( } // Continue with the next line when transitioning out of a - // multi-line attribute + // multi-line attribute. if attr_start || (line.ends_with(']') && !line.starts_with("//")) { in_meta = !in_meta; if !in_meta && !hit_top { @@ -148,7 +138,7 @@ pub fn extract_docs( } if !hit_top && in_meta { - // Ignore milti-line attributes + // Ignore milti-line attributes. trace!( "extract_docs: ignoring multi-line attribute, next_row: {:?}, up: {}, in_meta: {}", next_row, @@ -181,7 +171,7 @@ pub fn extract_docs( } if hit_top { - // The top of the file was reached + // The top of the file was reached. debug!( "extract_docs: bailing out: prev_row == next_row; next_row = {:?}, up = {}", next_row, up @@ -224,8 +214,7 @@ fn extract_and_process_docs(vfs: &Vfs, file: &Path, row_start: Row) .and_then(empty_to_none) } -/// Extracts a function, method, struct, enum, or trait declaration -/// from source. +/// Extracts a function, method, struct, enum, or trait declaration from source. pub fn extract_decl( vfs: &Vfs, file: &Path, @@ -328,10 +317,10 @@ fn tooltip_struct_enum_union_trait( let vfs = ctx.vfs.clone(); let fmt_config = ctx.fmt_config(); - // We hover often so use the in-process one to speed things up + // We hover often, so use the in-process one to speed things up. let fmt = Rustfmt::Internal; - // fallback in case source extration fails + // Fallback in case source extration fails. let the_type = || match def.kind { DefKind::Struct => format!("struct {}", def.name), DefKind::Enum => format!("enum {}", def.name), @@ -383,7 +372,7 @@ fn tooltip_function_method( let vfs = ctx.vfs.clone(); let fmt_config = ctx.fmt_config(); - // We hover often so use the in-process one to speed things up + // We hover often, so use the in-process one to speed things up. let fmt = Rustfmt::Internal; let the_type = || { @@ -471,7 +460,7 @@ fn empty_to_none(s: String) -> Option { } } -/// Extract and process source documentation for the give `def`. +/// Extracts and processes source documentation for the give `def`. fn def_docs(def: &Def, vfs: &Vfs) -> Option { let save_analysis_docs = || empty_to_none(def.docs.trim().into()); extract_and_process_docs(&vfs, def.span.file.as_ref(), def.span.range.row_start) @@ -555,7 +544,7 @@ fn skip_path_components>( }) } -/// Collapse parent directory references inside of paths. +/// Collapses parent directory references inside of paths. /// /// # Example /// @@ -636,12 +625,12 @@ fn racer_match_to_def(ctx: &InitActionContext, m: &racer::Match) -> Option let contextstr_path = PathBuf::from(&contextstr); let contextstr_path = collapse_parents(contextstr_path); - // Tidy up the module path - // Skips toolchains/$TOOLCHAIN/lib/rustlib/src/rust/src + // Tidy up the module path. + // Skips `toolchains/$TOOLCHAIN/lib/rustlib/src/rust/src`. skip_path_components(&contextstr_path, rustup_home, 7) - // Skips /registry/src/github.com-1ecc6299db9ec823/ + // Skips `/registry/src/github.com-1ecc6299db9ec823/`. .or_else(|| skip_path_components(&contextstr_path, cargo_home, 3)) - // Make the path relative to the root of the project, if possible + // Make the path relative to the root of the project, if possible. .or_else(|| { contextstr_path.strip_prefix(&ctx.current_project).ok().map(|x| x.to_owned()) }) @@ -692,7 +681,7 @@ fn racer_match_to_def(ctx: &InitActionContext, m: &racer::Match) -> Option }) } -/// Use racer to synthesize a `Def` for the given `span`. If no appropriate +/// Uses racer to synthesize a `Def` for the given `span`. If no appropriate /// match is found with coordinates, `None` is returned. fn racer_def(ctx: &InitActionContext, span: &Span) -> Option { let vfs = ctx.vfs.clone(); @@ -721,7 +710,7 @@ fn racer_def(ctx: &InitActionContext, span: &Span) -> Option { let racer_match = racer::find_definition(file_path, location, &session); trace!("racer_def: match: {:?}", racer_match); racer_match - // Avoid creating tooltip text that is exactly the item being hovered over + // Avoid creating tooltip text that is exactly the item being hovered over. .filter(|m| name.as_ref().map(|name| name != &m.contextstr).unwrap_or(true)) .and_then(|m| racer_match_to_def(ctx, &m)) }); @@ -741,7 +730,7 @@ fn format_object(rustfmt: Rustfmt, fmt_config: &FmtConfig, the_type: String) -> config.set().newline_style(NewlineStyle::Unix); let trimmed = the_type.trim(); - // Normalize the ending for rustfmt + // Normalize the ending for rustfmt. let object = if trimmed.ends_with(')') { format!("{};", trimmed) } else if trimmed.ends_with('}') || trimmed.ends_with(';') { @@ -765,7 +754,7 @@ fn format_object(rustfmt: Rustfmt, fmt_config: &FmtConfig, the_type: String) -> }; // If it's a tuple, remove the trailing ';' and hide non-pub components - // for pub types + // for pub types. let result = if formatted.trim().ends_with(';') { let mut decl = formatted.trim().trim_end_matches(';'); if let (Some(pos), true) = (decl.rfind('('), decl.ends_with(')')) { @@ -789,11 +778,11 @@ fn format_object(rustfmt: Rustfmt, fmt_config: &FmtConfig, the_type: String) -> decl.to_string() } } else { - // not a tuple + // Not a tuple. decl.into() } } else { - // not a tuple or unit struct + // Not a tuple or unit struct. formatted }; @@ -865,8 +854,7 @@ pub fn tooltip( let racer_fallback_enabled = ctx.config.lock().unwrap().racer_completion; - // Fallback to racer if the def was not available and - // racer is enabled. + // Fallback to racer if the def was not available and racer is enabled. let hover_span_def = hover_span_def.or_else(|e| { debug!("tooltip: racer_fallback_enabled: {}", racer_fallback_enabled); if racer_fallback_enabled { diff --git a/rls/src/actions/mod.rs b/rls/src/actions/mod.rs index e21dd86d2d8..b839b8b99f6 100644 --- a/rls/src/actions/mod.rs +++ b/rls/src/actions/mod.rs @@ -365,12 +365,12 @@ impl InitActionContext { self.build_queue.block_on_build(); } - /// Returns true if there are no builds pending or in progress. + /// Returns `true` if there are no builds pending or in progress. fn build_ready(&self) -> bool { self.build_queue.build_ready() } - /// Returns true if there are no builds or post-build (analysis) tasks pending + /// Returns `true` if there are no builds or post-build (analysis) tasks pending /// or in progress. fn analysis_ready(&self) -> bool { self.active_build_count.load(Ordering::SeqCst) == 0 diff --git a/rls/src/actions/notifications.rs b/rls/src/actions/notifications.rs index 485bf8c49eb..c0801019f30 100644 --- a/rls/src/actions/notifications.rs +++ b/rls/src/actions/notifications.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! One-way notifications that the RLS receives from the client. use crate::actions::{FileWatch, InitActionContext, VersionOrdering}; diff --git a/rls/src/actions/post_build.rs b/rls/src/actions/post_build.rs index 6b2d6f4e6f1..3af0a7c8930 100644 --- a/rls/src/actions/post_build.rs +++ b/rls/src/actions/post_build.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Post-build processing of data. #![allow(missing_docs)] diff --git a/rls/src/actions/progress.rs b/rls/src/actions/progress.rs index e87fc9a4175..1617bdb6e47 100644 --- a/rls/src/actions/progress.rs +++ b/rls/src/actions/progress.rs @@ -1,13 +1,3 @@ -// Copyright 2018 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - use std::sync::atomic::{AtomicUsize, Ordering}; use crate::lsp_data::{ @@ -17,20 +7,20 @@ use crate::server::{Notification, Output}; use lazy_static::lazy_static; use lsp_types::notification::{PublishDiagnostics, ShowMessage}; -/// Trait for communication of build progress back to the client. +/// Communication of build progress back to the client. pub trait ProgressNotifier: Send { fn notify_begin_progress(&self); fn notify_progress(&self, update: ProgressUpdate); fn notify_end_progress(&self); } -/// Kinds of progress updates +/// Kinds of progress updates. pub enum ProgressUpdate { Message(String), Percentage(f64), } -/// Trait for communication of diagnostics (i.e. build results) back to the rest of +/// Trait for communication of diagnostics (i.e., build results) back to the rest of /// the RLS (and on to the client). // This trait only really exists to work around the object safety rules (Output // is not object-safe). @@ -41,9 +31,9 @@ pub trait DiagnosticsNotifier: Send { fn notify_end_diagnostics(&self); } -/// Generate a new progress params with a unique ID and the given title. +/// Generates a new progress params with a unique ID and the given title. fn new_progress_params(title: String) -> ProgressParams { - // counter to generate unique ID for each chain-of-progress notifications. + // Counter to generate unique IDs for each chain-of-progress notification. lazy_static! { static ref PROGRESS_ID_COUNTER: AtomicUsize = { AtomicUsize::new(0) }; } @@ -61,7 +51,7 @@ fn new_progress_params(title: String) -> ProgressParams { /// the same instance is used for the entirety of one single build. pub struct BuildProgressNotifier { out: O, - // these params are used as a template and are cloned for each + // These params are used as a template and are cloned for each // message that is actually notified. progress_params: ProgressParams, } @@ -95,7 +85,7 @@ impl ProgressNotifier for BuildProgressNotifier { /// Notifier of diagnostics after the build has completed. pub struct BuildDiagnosticsNotifier { out: O, - // these params are used as a template and are cloned for each + // These params are used as a template, and are cloned for each // message that is actually notified. progress_params: ProgressParams, } diff --git a/rls/src/actions/requests.rs b/rls/src/actions/requests.rs index f141fda67f7..3a0575bcdc6 100644 --- a/rls/src/actions/requests.rs +++ b/rls/src/actions/requests.rs @@ -1,19 +1,14 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Requests that the RLS can respond to. -use crate::actions::InitActionContext; +use std::collections::HashMap; +use std::path::Path; +use std::sync::atomic::Ordering; + use itertools::Itertools; +use jsonrpc_core::types::ErrorCode; use log::{debug, trace, warn}; use racer; +use rls_analysis::SymbolQuery; use rls_data as data; use rls_span as span; use rls_vfs::FileContents; @@ -22,16 +17,12 @@ use serde_derive::{Deserialize, Serialize}; use serde_json; use url::Url; +use crate::actions::InitActionContext; use crate::actions::hover; use crate::actions::run::collect_run_actions; use crate::build::Edition; use crate::lsp_data; use crate::lsp_data::*; -use crate::server; -use crate::server::{Ack, Output, Request, RequestAction, ResponseError, ResponseWithMessage}; -use jsonrpc_core::types::ErrorCode; -use rls_analysis::SymbolQuery; - use crate::lsp_data::request::ApplyWorkspaceEdit; pub use crate::lsp_data::request::{ CodeActionRequest as CodeAction, CodeLensRequest, Completion, @@ -40,20 +31,18 @@ pub use crate::lsp_data::request::{ HoverRequest as Hover, RangeFormatting, References, Rename, ResolveCompletionItem as ResolveCompletion, WorkspaceSymbol, }; +use crate::server; +use crate::server::{Ack, Output, Request, RequestAction, ResponseError, ResponseWithMessage}; -use std::collections::HashMap; -use std::path::Path; -use std::sync::atomic::Ordering; - -/// Represent the result of a deglob action for a single wildcard import. +/// The result of a deglob action for a single wildcard import. /// /// The `location` is the position of the wildcard. /// `new_text` is the text which should replace the wildcard. #[derive(Debug, Deserialize, Serialize)] pub struct DeglobResult { - /// Location of the "*" character in a wildcard import + /// The `Location` of the "*" character in a wildcard import. pub location: Location, - /// Replacement text + /// The replacement text. pub new_text: String, } @@ -492,15 +481,15 @@ fn apply_deglobs( Ok(ApplyWorkspaceEditParams { edit }) } -/// Create `CodeActions` for fixes suggested by the compiler -/// the results are appended to `code_actions_result` +/// Creates `CodeAction`s for fixes suggested by the compiler. +/// The results are appended to `code_actions_result`. fn make_suggestion_fix_actions( params: &::Params, file_path: &Path, ctx: &InitActionContext, code_actions_result: &mut ::Response, ) { - // search for compiler suggestions + // Search for compiler suggestions. if let Some(results) = ctx.previous_build_results.lock().unwrap().get(file_path) { let suggestions = results .iter() @@ -520,40 +509,39 @@ fn make_suggestion_fix_actions( } } -/// Create `CodeActions` for performing deglobbing when a wildcard import is found -/// the results are appended to `code_actions_result` +/// Creates `CodeAction`s for performing deglobbing when a wildcard import is found. +/// The results are appended to `code_actions_result`. fn make_deglob_actions( params: &::Params, file_path: &Path, ctx: &InitActionContext, code_actions_result: &mut ::Response, ) { - // search for a glob in the line + // Search for a glob in the line. if let Ok(line) = ctx.vfs.load_line(file_path, ls_util::range_to_rls(params.range).row_start) { let span = Location::new(params.text_document.uri.clone(), params.range); - // for all indices which are a `*` - // check if we can deglob them - // this handles badly formatted text containing multiple "use"s in one line + // For all indices that are a `*`, check if we can deglob them. + // This handles badly-formatted text containing multiple `use`s in one line. let deglob_results: Vec<_> = line .char_indices() .filter(|&(_, chr)| chr == '*') .filter_map(|(index, _)| { - // map the indices to `Span`s + // Map the indices to `Span`s. let mut span = ls_util::location_to_rls(&span).unwrap(); span.range.col_start = span::Column::new_zero_indexed(index as u32); span.range.col_end = span::Column::new_zero_indexed(index as u32 + 1); - // load the deglob type information + // Load the deglob type information. ctx.analysis.show_type(&span).ok().map(|ty| (ty, span)) }) .map(|(mut deglob_str, span)| { - // Handle multiple imports from one * + // Handle multiple imports from one `*`. if deglob_str.contains(',') || deglob_str.is_empty() { deglob_str = format!("{{{}}}", sort_deglob_str(°lob_str)); } - // Build result + // Build result. let deglob_result = DeglobResult { location: ls_util::rls_to_location(&span), new_text: deglob_str, @@ -582,7 +570,7 @@ fn sort_deglob_str(s: &str) -> String { substrings.sort_by(|a, b| { use std::cmp::Ordering; - // Algorithm taken from rustfmt (rustfmt/src/imports.rs) + // Algorithm taken from rustfmt (`rustfmt/src/imports.rs`). let is_upper_snake_case = |s: &str| s.chars().all(|c| c.is_uppercase() || c == '_' || c.is_numeric()); @@ -740,7 +728,7 @@ fn reformat( .map_err(|msg| ResponseError::Message(ErrorCode::InternalError, msg))?; // Note that we don't need to update the VFS, the client echos back the - // change to us when it applies the returned TextEdit. + // change to us when it applies the returned `TextEdit`. if !ctx.quiescent.load(Ordering::SeqCst) { return Err(ResponseError::Message( @@ -762,9 +750,9 @@ impl RequestAction for ResolveCompletion { } fn handle(_: InitActionContext, params: Self::Params) -> Result { - // currently, we safely ignore this as a pass-through since we fully handle - // textDocument/completion. In the future, we may want to use this method as a - // way to more lazily fill out completion information + // Currently, we safely ignore this as a pass-through since we fully handle + // `textDocument/completion`. In the future, we may want to use this method as a + // way to more lazily fill out completion information. Ok(params) } } diff --git a/rls/src/actions/work_pool.rs b/rls/src/actions/work_pool.rs index a3fa5749967..5246bbd8b0f 100644 --- a/rls/src/actions/work_pool.rs +++ b/rls/src/actions/work_pool.rs @@ -7,8 +7,8 @@ use std::time::{Duration, Instant}; use std::{fmt, panic}; /// Description of work on the request work pool. Equality implies two pieces of work are the same -/// kind of thing. The `str` should be human readable for logging, ie the language server protocol -/// request message name or similar. +/// kind of thing. The `str` should be human readable for logging (e.g., the language server +/// protocol request message name or similar). #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct WorkDescription(pub &'static str); diff --git a/rls/src/build/cargo.rs b/rls/src/build/cargo.rs index 3f8d7eda882..3ebb3480720 100644 --- a/rls/src/build/cargo.rs +++ b/rls/src/build/cargo.rs @@ -1,12 +1,13 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::env; +use std::ffi::OsString; +use std::fmt::{self, Write}; +use std::fs::{read_dir, remove_file}; +use std::path::{Path, PathBuf}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::mpsc::Sender; +use std::sync::{Arc, Mutex}; +use std::thread; use cargo::core::compiler::{BuildConfig, CompileMode, Context, Executor, Unit}; use cargo::core::resolver::ResolveError; @@ -19,6 +20,9 @@ use cargo::util::{ ConfigValue, ProcessBuilder, }; use failure::{self, format_err, Fail}; +use log::{debug, trace, warn}; +use rls_data::Analysis; +use rls_vfs::Vfs; use serde_json; use crate::actions::progress::ProgressUpdate; @@ -28,20 +32,6 @@ use crate::build::plan::{BuildPlan, Crate}; use crate::build::{BufWriter, BuildResult, CompilationContext, Internals, PackageArg}; use crate::config::Config; use crate::lsp_data::{Position, Range}; -use log::{debug, trace, warn}; -use rls_data::Analysis; -use rls_vfs::Vfs; - -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::env; -use std::ffi::OsString; -use std::fmt::{self, Write}; -use std::fs::{read_dir, remove_file}; -use std::path::{Path, PathBuf}; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::mpsc::Sender; -use std::sync::{Arc, Mutex}; -use std::thread; // Runs an in-process instance of Cargo. pub(super) fn cargo( @@ -114,8 +104,8 @@ fn run_cargo( progress_sender: Sender, ) -> Result { // Lock early to guarantee synchronized access to env var for the scope of Cargo routine. - // Additionally we need to pass inner lock to RlsExecutor, since it needs to hand it down - // during exec() callback when calling linked compiler in parallel, for which we need to + // Additionally we need to pass inner lock to `RlsExecutor`, since it needs to hand it down + // during `exec()` callback when calling linked compiler in parallel, for which we need to // guarantee consistent environment variables. let (lock_guard, inner_lock) = env_lock.lock(); let restore_env = Environment::push_with_lock(&HashMap::new(), None, lock_guard); @@ -181,8 +171,8 @@ fn run_cargo_ws( PackageArg::Packages(pkgs) => (false, pkgs.into_iter().collect()), }; - // TODO: It might be feasible to keep this CargoOptions structure cached and regenerate - // it on every relevant configuration change + // TODO: it might be feasible to keep this `CargoOptions` structure cached and regenerate + // it on every relevant configuration change. let (opts, rustflags, clear_env_rust_log, cfg_test) = { // We mustn't lock configuration for the whole build process let rls_config = rls_config.lock().unwrap(); @@ -193,7 +183,9 @@ fn run_cargo_ws( for package in &packages { if ws.members().find(|x| *x.name() == *package).is_none() { - warn!("cargo - couldn't find member package `{}` specified in `analyze_package` configuration", package); + warn!("couldn't find member package `{}` specified in `analyze_package` \ + configuration", + package); } } @@ -204,9 +196,9 @@ fn run_cargo_ws( let pkg_names = spec.to_package_id_specs(&ws)?.iter().map(|pkg_spec| pkg_spec.name().to_owned()).collect(); - trace!("Specified packages to be built by Cargo: {:#?}", pkg_names); + trace!("specified packages to be built by Cargo: {:#?}", pkg_names); - // Since Cargo build routine will try to regenerate the unit dep graph, + // Since the Cargo build routine will try to regenerate the unit dep graph, // we need to clear the existing dep graph. compilation_cx.lock().unwrap().build_plan = BuildPlan::Cargo(CargoPlan::with_packages(manifest_path, pkg_names)); @@ -217,9 +209,10 @@ fn run_cargo_ws( opts.lib, opts.bin, opts.bins, - // TODO: Support more crate target types + // TODO: support more crate target types. Vec::new(), - cfg_test, // Check all integration tests under tests/ + // Check all integration tests under `tests/`. + cfg_test, Vec::new(), false, Vec::new(), @@ -239,7 +232,7 @@ fn run_cargo_ws( }; // Create a custom environment for running cargo, the environment is reset - // afterwards automatically + // afterwards automatically. restore_env.push_var("RUSTFLAGS", &Some(rustflags.into())); if clear_env_rust_log { @@ -265,22 +258,22 @@ fn run_cargo_ws( match compile_with_exec(&ws, &compile_opts, &exec) { Ok(_) => { trace!( - "Created build plan after Cargo compilation routine: {:?}", + "created build plan after Cargo compilation routine: {:?}", compilation_cx.lock().unwrap().build_plan ); } Err(e) => { if !reached_primary.load(Ordering::SeqCst) { - debug!("Error running compile_with_exec: {:?}", e); + debug!("error running `compile_with_exec`: {:?}", e); return Err(e); } else { - warn!("Ignoring error running compile_with_exec: {:?}", e); + warn!("ignoring error running `compile_with_exec`: {:?}", e); } } } if !reached_primary.load(Ordering::SeqCst) { - return Err(format_err!("Error compiling dependent crate")); + return Err(format_err!("error compiling dependent crate")); } Ok(compilation_cx @@ -301,10 +294,10 @@ struct RlsExecutor { vfs: Arc, analysis: Arc>>, /// Packages which are directly a member of the workspace, for which - /// analysis and diagnostics will be provided + /// analysis and diagnostics will be provided. member_packages: Mutex>, input_files: Arc>>>, - /// JSON compiler messages emitted for each primary compiled crate + /// JSON compiler messages emitted for each primary compiled crate. compiler_messages: Arc>>, progress_sender: Mutex>, /// Set to true if attempt to compile a primary crate. If we don't track @@ -344,7 +337,7 @@ impl RlsExecutor { } } - /// Returns whether a given package is a primary one (every member of the + /// Returns `true if a given package is a primary one (every member of the /// workspace is considered as such). Used to determine whether the RLS /// should cache invocations for these packages and rebuild them on changes. fn is_primary_package(&self, id: PackageId) -> bool { @@ -362,7 +355,7 @@ impl Executor for RlsExecutor { let plan = compilation_cx .build_plan .as_cargo_mut() - .expect("Build plan should be properly initialized before running Cargo"); + .expect("build plan should be properly initialized before running Cargo"); let only_primary = |unit: &Unit<'_>| self.is_primary_package(unit.pkg.package_id()); @@ -374,11 +367,11 @@ impl Executor for RlsExecutor { // workspace, even if it's not dirty at a time, to cache compiler // invocations in the build plan. // We only do a cargo build if we want to force rebuild the last - // crate (e.g., because some args changed). Therefore we should + // crate (e.g., because some args changed). Therefore, we should // always force rebuild the primary crate. let id = unit.pkg.package_id(); - // FIXME build scripts - this will force rebuild build scripts as - // well as the primary crate. But this is not too bad - it means + // FIXME: build scripts -- this will force rebuild build scripts as + // well as the primary crate. But this is not too bad -- it means // we will rarely rebuild more than we have to. self.is_primary_package(id) } @@ -413,7 +406,7 @@ impl Executor for RlsExecutor { } else { crate_name.clone() })) - .expect("Failed to send progress update"); + .expect("failed to send progress update"); } let out_dir = parse_arg(cargo_args, "--out-dir").expect("no out-dir in rustc command line"); @@ -428,26 +421,26 @@ impl Executor for RlsExecutor { && name.ends_with(".json") { if let Err(e) = remove_file(entry.path()) { - debug!("Error deleting file, {}: {}", name, e); + debug!("error deleting file, {}: {}", name, e); } } } } // Prepare our own call to `rustc` as follows: - // 1. Use $RUSTC wrapper if specified, otherwise use RLS executable + // 1. Use `$RUSTC` wrapper if specified, otherwise use RLS executable // as an rustc shim (needed to distribute via the stable channel) // 2. For non-primary packages or build scripts, execute the call // 3. Otherwise, we'll want to use the compilation to drive the analysis: - // i. Modify arguments to account for the RLS settings (e.g. - // compiling under cfg(test) mode or passing a custom sysroot) + // i. Modify arguments to account for the RLS settings (e.g., + // compiling under `cfg(test)` mode or passing a custom sysroot), // ii. Execute the call and store the final args/envs to be used for - // later in-process execution of the compiler + // later in-process execution of the compiler. let mut cmd = cargo_cmd.clone(); // RLS executable can be spawned in a different directory than the one // that Cargo was spawned in, so be sure to use absolute RLS path (which - // env::current_exe() returns) for the shim. + // `env::current_exe()` returns) for the shim. let rustc_shim = env::var("RUSTC") .ok() .or_else(|| env::current_exe().ok().and_then(|x| x.to_str().map(String::from))) @@ -461,7 +454,7 @@ impl Executor for RlsExecutor { let envs = cargo_cmd.get_envs().clone(); let sysroot = super::rustc::current_sysroot() - .expect("need to specify SYSROOT env var or use rustup or multirust"); + .expect("need to specify `SYSROOT` env var or use rustup or multirust"); { let config = self.config.lock().unwrap(); @@ -513,18 +506,18 @@ impl Executor for RlsExecutor { self.reached_primary.store(true, Ordering::SeqCst); - // Cache executed command for the build plan + // Cache executed command for the build plan. { let mut cx = self.compilation_cx.lock().unwrap(); let plan = cx.build_plan.as_cargo_mut().unwrap(); plan.cache_compiler_job(id, target, mode, &cmd); } - // Prepare modified cargo-generated args/envs for future rustc calls + // Prepare modified cargo-generated args/envs for future rustc calls. let rustc = cargo_cmd.get_program().to_owned().into_string().unwrap(); args.insert(0, rustc); - // Store the modified cargo-generated args/envs for future rustc calls + // Store the modified cargo-generated args/envs for future rustc calls. { let mut compilation_cx = self.compilation_cx.lock().unwrap(); compilation_cx.needs_rebuild = false; @@ -550,7 +543,7 @@ impl Executor for RlsExecutor { self.compiler_messages.lock().unwrap().append(&mut messages); self.analysis.lock().unwrap().append(&mut analysis); - // Cache calculated input files for a given rustc invocation + // Cache calculated input files for a given rustc invocation. { let mut cx = self.compilation_cx.lock().unwrap(); let plan = cx.build_plan.as_cargo_mut().unwrap(); @@ -629,7 +622,7 @@ fn prepare_cargo_rustflags(config: &Config) -> String { dedup_flags(&flags) } -/// Construct a cargo configuration for the given build and target directories +/// Constructs a cargo configuration for the given build and target directories /// and shell. pub fn make_cargo_config( build_dir: &Path, @@ -694,14 +687,13 @@ fn parse_arg(args: &[OsString], arg: &str) -> Option { None } -/// `flag_str` is a string of command line args for Rust. This function removes any -/// duplicate flags. +/// Removes any duplicate flags from `flag_str` (a string of command line args for Rust). fn dedup_flags(flag_str: &str) -> String { - // The basic strategy here is that we split flag_str into a set of keys and - // values and dedup any duplicate keys, using the last value in flag_str. + // The basic strategy here is that we split `flag_str` into a set of keys and + // values and dedup any duplicate keys, using the last value in `flag_str`. // This is a bit complicated because of the variety of ways args can be specified. - // Retain flags order to prevent complete project rebuild due to RUSTFLAGS fingerprint change + // Retain flags order to prevent complete project rebuild due to `RUSTFLAGS` fingerprint change. let mut flags = BTreeMap::new(); let mut bits = flag_str.split_whitespace().peekable(); @@ -718,8 +710,7 @@ fn dedup_flags(flag_str: &str) -> String { if bit.starts_with('-') { if bit.contains('=') { - // Split only on the first equals sign (there may be - // more than one) + // Split only on the first equals sign (there may be more than one). let bits: Vec<_> = bit.splitn(2, '=').collect(); assert!(bits.len() == 2); flags.insert(bits[0].to_owned() + "=", bits[1].to_owned()); @@ -758,7 +749,7 @@ fn dedup_flags(flag_str: &str) -> String { #[derive(Debug)] pub struct ManifestAwareError { cause: failure::Error, - /// Path to a manifest file within the project that seems the closest to the error's origin + /// The path to a manifest file within the project that seems the closest to the error's origin. nearest_project_manifest: PathBuf, manifest_error_range: Range, } @@ -767,17 +758,17 @@ impl ManifestAwareError { fn new(cause: failure::Error, root_manifest: &Path, ws: Option<&Workspace<'_>>) -> Self { let project_dir = root_manifest.parent().unwrap(); let mut err_path = root_manifest; - // cover whole manifest if we haven't any better idea. + // Cover whole manifest if we haven't any better idea. let mut err_range = Range { start: Position::new(0, 0), end: Position::new(9999, 0) }; if let Some(manifest_err) = cause.downcast_ref::() { - // Scan through any manifest errors to pin the error more precisely + // Scan through any manifest errors to pin the error more precisely. let is_project_manifest = |path: &PathBuf| path.is_file() && path.starts_with(project_dir); let last_cause = manifest_err.manifest_causes().last().unwrap_or(manifest_err); if is_project_manifest(last_cause.manifest_path()) { - // manifest with the issue is inside the project + // Manifest with the issue is inside the project. err_path = last_cause.manifest_path().as_path(); if let Some((line, col)) = (last_cause as &dyn Fail) .iter_chain() @@ -785,7 +776,7 @@ impl ManifestAwareError { .next() .and_then(|e| e.line_col()) { - // Use toml deserialize error position + // Use TOML deserializiation error position. err_range.start = Position::new(line as _, col as _); err_range.end = Position::new(line as _, col as u64 + 1); } @@ -795,12 +786,12 @@ impl ManifestAwareError { .filter(|e| is_project_manifest(e.manifest_path())) .last(); if let Some(nearest) = nearest_cause { - // not the root cause, but the nearest manifest to it in the project + // Not the root cause, but the nearest manifest to it in the project. err_path = nearest.manifest_path().as_path(); } } } else if let (Some(ws), Some(resolve_err)) = (ws, cause.downcast_ref::()) { - // if the resolve error leads to a workspace member we should use that manifest + // If the resolve error leads to a workspace member, we should use that manifest. if let Some(member) = resolve_err .package_path() .iter() @@ -849,7 +840,7 @@ mod test { assert!(result.matches("foo").count() == 2); assert!(result.matches("bar").count() == 1); - // These should dedup. + // These should get deduplicated. assert!(dedup_flags("-Zfoo -Zfoo") == " -Zfoo"); assert!(dedup_flags("-Zfoo -Zfoo -Zfoo") == " -Zfoo"); let result = dedup_flags("-Zfoo -Zfoo -Zbar"); diff --git a/rls/src/build/cargo_plan.rs b/rls/src/build/cargo_plan.rs index cd6ba8f2889..92235850157 100644 --- a/rls/src/build/cargo_plan.rs +++ b/rls/src/build/cargo_plan.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! This contains a build plan that is created during the Cargo build routine //! and stored afterwards, which can be later queried, given a list of dirty //! files, to retrieve a queue of compiler calls to be invoked (including @@ -41,8 +31,8 @@ use crate::build::rustc::src_path; use crate::build::PackageArg; /// Main key type by which `Unit`s will be distinguished in the build plan. -/// In Target we're mostly interested in TargetKind (Lib, Bin, ...) and name -/// (e.g. we can have 2 binary targets with different names). +/// In `Target` we're mostly interested in `TargetKind` (Lib, Bin, ...) and name +/// (e.g., we can have 2 binary targets with different names). pub(crate) type UnitKey = (PackageId, Target, CompileMode); /// Holds the information how exactly the build will be performed for a given @@ -77,13 +67,13 @@ impl CargoPlan { CargoPlan { built_packages: pkgs, ..Self::with_manifest(manifest_path) } } - /// Returns whether a build plan has cached compiler invocations and dep - /// graph so it's at all able to return a job queue via `prepare_work`. + /// Returns `true` if a build plan has cached compiler invocations and dep + /// graph, so it's possibly able to return a job queue via `prepare_work`. pub(crate) fn is_ready(&self) -> bool { !self.compiler_jobs.is_empty() } - /// Cache a given compiler invocation in `ProcessBuilder` for a given + /// Caches a given compiler invocation in `ProcessBuilder` for a given /// `PackageId` and `TargetKind` in `Target`, to be used when processing /// cached build plan. pub(crate) fn cache_compiler_job( @@ -120,7 +110,7 @@ impl CargoPlan { let unit_key = (id, target.clone(), mode); trace!("Caching these files: {:#?} for {:?} key", &input_files, &unit_key); - // Create reverse file -> unit mapping (to be used for dirty unit calculation) + // Create reverse file -> unit mapping (to be used for dirty unit calculation). for file in &input_files { self.file_key_mapping.entry(file.to_path_buf()).or_default().insert(unit_key.clone()); } @@ -128,7 +118,7 @@ impl CargoPlan { self.input_files.insert(unit_key, input_files); } - /// Emplace a given `Unit`, along with its `Unit` dependencies (recursively) + /// Places a given `Unit`, along with its `Unit` dependencies (recursively) /// into the dependency graph as long as the passed `Unit` isn't filtered /// out by the `filter` closure. pub(crate) fn emplace_dep_with_filter( @@ -184,10 +174,10 @@ impl CargoPlan { } } - /// TODO: Improve detecting dirty crate targets for a set of dirty file paths. + /// TODO: improve detecting dirty crate targets for a set of dirty file paths. /// This uses a lousy heuristic of checking path prefix for a given crate /// target to determine whether a given unit (crate target) is dirty. This - /// can easily backfire, e.g. when build script is under src/. Any change + /// can easily backfire, e.g., when build script is under `src/`. Any change /// to a file under src/ would imply the build script is always dirty, so we /// never do work and always offload to Cargo in such case. /// Because of that, build scripts are checked separately and only other @@ -234,7 +224,7 @@ impl CargoPlan { .take_while(|&(x, y)| x == y) .count() }; - // Since a package can correspond to many units (e.g. compiled + // Since a package can correspond to many units (e.g., compiled // as a regular binary or a test harness for unit tests), we // collect every unit having the longest path prefix. let max_matching_prefix = other_targets @@ -267,7 +257,7 @@ impl CargoPlan { fn transitive_dirty_units(&self, dirties: &HashSet) -> HashSet { let mut transitive = dirties.clone(); // Walk through a rev dep graph using a stack of nodes to collect - // transitively every dirty node + // transitively every dirty node. let mut to_process: Vec<_> = dirties.iter().cloned().collect(); while let Some(top) = to_process.pop() { if transitive.get(&top).is_some() { @@ -275,7 +265,7 @@ impl CargoPlan { } transitive.insert(top.clone()); - // Process every dirty rev dep of the processed node + // Process every dirty rev dep of the processed node. let dirty_rev_deps = self .rev_dep_graph .get(&top) @@ -299,9 +289,9 @@ impl CargoPlan { self.rev_dep_graph .iter() - // Remove nodes that are not dirty + // Remove nodes that are not dirty. .filter(|&(unit, _)| dirties.contains(unit)) - // Retain only dirty dependencies of the ones that are dirty + // Retain only dirty dependencies of the ones that are dirty. .map(|(k, deps)| { (k.clone(), deps.iter().cloned().filter(|d| dirties.contains(d)).collect()) }) @@ -355,7 +345,7 @@ impl CargoPlan { let needed_packages = self.built_packages.union(&dirty_packages).cloned().collect(); // We modified a file from a packages, that are not included in the - // cached build plan - run Cargo to recreate the build plan including them + // cached build plan -- run Cargo to recreate the build plan including them. if needs_more_packages { return WorkStatus::NeedsCargo(PackageArg::Packages(needed_packages)); } @@ -380,7 +370,7 @@ impl CargoPlan { // It is possible that we want a job which is not in our cache (compiler_jobs), // for example we might be building a workspace with an error in a crate and later - // crates within the crate that depend on the error-ing one have never been built. + // crates within the crate that depend on the erroring one have never been built. // In that case we need to build from scratch so that everything is in our cache, or // we cope with the error. In the error case, jobs will be None. match jobs { @@ -417,7 +407,7 @@ impl PackageMap { } } - // Find each package in the workspace and record the root directory and package name. + // Finds each package in the workspace and record the root directory and package name. fn discover_package_paths(manifest_path: &Path) -> HashMap { trace!("read metadata {:?}", manifest_path); cargo_metadata::MetadataCommand::new() @@ -440,7 +430,7 @@ impl PackageMap { modified_files.iter().filter_map(|p| self.map(p.as_ref())).collect() } - // Map a file to the package which it belongs to. + // Maps a file to the package which it belongs to. // We do this by walking up the directory tree from `path` until we get to // one of the recorded package root directories. fn map(&self, path: &Path) -> Option { @@ -526,9 +516,9 @@ impl BuildGraph for CargoPlan { fn add>(&mut self, unit: T, deps: Vec) { let unit = unit.into(); - // Units can depend on others with different Targets or Profiles - // (e.g. different `run_custom_build`) despite having the same UnitKey. - // We coalesce them here while creating the UnitKey dep graph. + // Units can depend on others with different `Target`s or `Profile`s + // (e.g., different `run_custom_build`) despite having the same `UnitKey`. + // We coalesce them here while creating the `UnitKey` dep graph. // TODO: Are we sure? Can we figure that out? let deps = deps.into_iter().map(|d| d.into()).filter(|dep| unit.key() != dep.key()); @@ -539,7 +529,7 @@ impl BuildGraph for CargoPlan { self.units.entry(dep.key()).or_insert(dep); } - // We expect these entries to be present for each unit in the graph + // We expect these entries to be present for each unit in the graph. self.dep_graph.entry(unit.key()).or_insert_with(HashSet::new); self.rev_dep_graph.entry(unit.key()).or_insert_with(HashSet::new); diff --git a/rls/src/build/environment.rs b/rls/src/build/environment.rs index b30fa58727d..889f5beac86 100644 --- a/rls/src/build/environment.rs +++ b/rls/src/build/environment.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - use lazy_static::lazy_static; use std::collections::HashMap; use std::env; diff --git a/rls/src/build/external.rs b/rls/src/build/external.rs index 540dc209407..257f30628e5 100644 --- a/rls/src/build/external.rs +++ b/rls/src/build/external.rs @@ -1,13 +1,3 @@ -// Copyright 2018 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Contains data and logic for executing builds specified externally (rather //! than executing and intercepting Cargo calls). //! @@ -52,9 +42,9 @@ fn cmd_line_to_command>(cmd_line: &S, cwd: &Path) -> Result>( cmd_line: S, @@ -84,7 +74,7 @@ pub(super) fn build_with_external_cmd>( .lines() .filter_map(|res| res.ok()) .map(PathBuf::from) - // Relative paths are relative to build command, not RLS itself (cwd may be different) + // Relative paths are relative to build command, not RLS itself (CWD may be different). .map(|path| if !path.is_absolute() { build_dir.join(path) } else { path }); let analyses = match read_analysis_files(files) { @@ -168,7 +158,7 @@ fn plan_from_analysis(analysis: &[Analysis], build_dir: &Path) -> Result, } @@ -188,11 +178,12 @@ pub(crate) struct RawInvocation { #[derive(Clone, Debug)] pub(crate) struct Invocation { - deps: Vec, // FIXME: Use arena and store refs instead for ergonomics + // FIXME: use arena and store refs instead for ergonomics. + deps: Vec, outputs: Vec, links: BTreeMap, command: ProcessBuilder, - // Parsed data + // Parsed data. src_path: Option, } @@ -214,7 +205,7 @@ impl BuildKey for Invocation { self.command.get_program().hash(&mut hash); let /*mut*/ args = self.command.get_args().to_owned(); - // args.sort(); // TODO: Parse 2-part args (e.g. ["--extern", "a=b"]) + // args.sort(); // TODO: parse 2-part args (e.g., `["--extern", "a=b"]`) args.hash(&mut hash); let mut envs: Vec<_> = self.command.get_envs().iter().collect(); envs.sort(); @@ -268,7 +259,7 @@ impl ExternalPlan { } pub(crate) fn try_from_raw(build_dir: &Path, raw: RawPlan) -> Result { - // Sanity check, each dependency (index) has to be inside the build plan + // Sanity check: each dependency (index) has to be inside the build plan. if raw .invocations .iter() @@ -320,8 +311,8 @@ impl BuildGraph for ExternalPlan { self.units.entry(unit.key()).or_insert(unit); } - // FIXME: Change associating files with units by their path but rather - // include file inputs in the build plan or call rustc with --emit=dep-info + // FIXME: change associating files with units by their path but rather + // include file inputs in the build plan or call rustc with `--emit=dep-info`. fn dirties>(&self, modified: &[T]) -> Vec<&Self::Unit> { let mut results = HashSet::::new(); @@ -332,17 +323,17 @@ impl BuildGraph for ExternalPlan { assert!(a.is_absolute() && b.is_absolute()); a.components().zip(b.components()).take_while(|&(x, y)| x == y).count() }; - // Since a package can correspond to many units (e.g. compiled + // Since a package can correspond to many units (e.g., compiled // as a regular binary or a test harness for unit tests), we // collect every unit having the longest path prefix. let matching_units: Vec<(&_, usize)> = self .units .values() // For `rustc dir/some.rs` we'll consider every changed files - // under dir/ as relevant + // under dir/ as relevant. .map(|unit| (unit, unit.src_path.as_ref().and_then(|src| src.parent()))) .filter_map(|(unit, src)| src.map(|src| (unit, src))) - // Discard units that are in a different directory subtree + // Discard units that are in a different directory subtree. .filter_map(|(unit, src)| { let matching = matching_prefix_components(modified, &src); if matching >= src.components().count() { @@ -354,7 +345,7 @@ impl BuildGraph for ExternalPlan { .collect(); // Changing files in the same directory might affect multiple units - // (e.g. multiple crate binaries, their unit test harness), so + // (e.g., multiple crate binaries, their unit test harness), so // treat all of them as dirty. if let Some(max_prefix) = matching_units.iter().map(|(_, p)| p).max() { let dirty_keys = matching_units @@ -554,9 +545,9 @@ mod tests { to_paths(&["/my/repo/build.rs", "/my/repo/src/lib.rs"]).sorted(), ); - // TODO: Test on non-trivial input, use Iterator::position if - // nondeterminate order wrt hashing is a problem - // Jobs that have to run first are *last* in the topological sorting here + // TODO: test on non-trivial input; `use Iterator::position` if + // non-determinate order w.r.t. hashing is a problem. + // Jobs that have to run first are *last* in the topological sorting here. let topo_units = plan.topological_sort(units_to_rebuild); assert_eq!(paths(&topo_units), to_paths(&["/my/repo/src/lib.rs", "/my/repo/build.rs"]),) } diff --git a/rls/src/build/mod.rs b/rls/src/build/mod.rs index 6b1025d166d..57518c566db 100644 --- a/rls/src/build/mod.rs +++ b/rls/src/build/mod.rs @@ -1,27 +1,5 @@ -// Copyright 2016-2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Running builds as-needed for the server to answer questions. -use self::environment::EnvironmentLock; -use self::plan::{BuildGraph, BuildPlan, WorkStatus}; - -use crate::actions::post_build::PostBuildHandler; -use crate::actions::progress::{ProgressNotifier, ProgressUpdate}; -use crate::config::Config; -use crate::lsp_data::Range; -use failure; -use log::{debug, info, trace}; -use rls_data::Analysis; -use rls_vfs::Vfs; - use std::collections::{HashMap, HashSet}; use std::io::{self, Write}; use std::mem; @@ -32,6 +10,17 @@ use std::sync::{Arc, Mutex, RwLock}; use std::thread; use std::time::{Duration, Instant}; +use failure; +use log::{debug, info, trace}; +use rls_data::Analysis; +use rls_vfs::Vfs; + +use crate::actions::post_build::PostBuildHandler; +use crate::actions::progress::{ProgressNotifier, ProgressUpdate}; +use crate::config::Config; +use crate::lsp_data::Range; +use self::environment::EnvironmentLock; +use self::plan::{BuildGraph, BuildPlan, WorkStatus}; pub use self::plan::{Crate, Edition}; mod cargo; @@ -70,7 +59,7 @@ mod rustc; #[derive(Clone)] pub struct BuildQueue { internals: Arc, - // The build queue - we only have one low and one high priority build waiting. + // The build queue -- we only have one low and one high priority build waiting. // (low, high) priority builds. // This lock should only be held transiently. queued: Arc>, @@ -124,7 +113,8 @@ pub enum BuildResult { /// Priority for a build request. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum BuildPriority { - /// Run this build as soon as possible (e.g., on save or explicit build request) (not currently used). + /// Run this build as soon as possible (e.g., on save or explicit build request). + /// (Not currently used.) Immediate, /// Immediate, plus re-run Cargo. Cargo, @@ -147,7 +137,7 @@ struct CompilationContext { cwd: Option, /// The build directory is supplied by the client and passed to Cargo. build_dir: Option, - /// Whether needs to perform a Cargo rebuild + /// `true` if we need to perform a Cargo rebuild. needs_rebuild: bool, /// Build plan, which should know all the inter-package/target dependencies /// along with args/envs. @@ -203,7 +193,7 @@ impl Build { } } - // True if the build is waiting and where it should be impossible for one to + // Returns `true` if the build is waiting and where it should be impossible for one to // be in progress. fn is_pending_fresh(&self) -> bool { match *self { @@ -222,7 +212,7 @@ impl Build { } impl BuildQueue { - /// Construct a new build queue. + /// Constructs a new build queue. pub fn new(vfs: Arc, config: Arc>) -> BuildQueue { BuildQueue { internals: Arc::new(Internals::new(vfs, config)), @@ -253,7 +243,7 @@ impl BuildQueue { /// ## Implementation /// /// This layer of the build queue is single-threaded and we aim to return - /// quickly. A single build thread is spawned to do any building (we never + /// quickly. A single build thread is spawned to do any building (we never /// do parallel builds so that we don't hog the CPU, we might want to change /// that in the future). /// @@ -296,7 +286,7 @@ impl BuildQueue { } } - /// Block until any currently queued builds are complete. + /// Blocks until any currently queued builds are complete. /// /// Since an incoming build can squash a pending or executing one, we wait /// for all builds to complete, i.e., if any build is running when called, @@ -317,7 +307,7 @@ impl BuildQueue { } } - /// Essentially this is !'would_block' (see `block_on_build`). If this is + /// Essentially this is the opposite of 'would block' (see `block_on_build`). If this is /// true, then it is safe to rely on data from the build. pub fn build_ready(&self) -> bool { !self.internals.building.load(Ordering::SeqCst) @@ -384,17 +374,17 @@ impl BuildQueue { } } - // channel to get progress updates out for the async build + // Channel to get progress updates out for the async build. let (progress_sender, progress_receiver) = channel::(); - // notifier of window/progress + // Notifier of window/progress. let notifier = build.notifier; - // use this thread to propagate the progress messages until the sender is dropped. + // Use this thread to propagate the progress messages until the sender is dropped. let progress_thread = thread::Builder::new() .name("progress-notifier".into()) .spawn(move || { - // window/progress notification that we are about to build + // Window/progress notification that we are about to build. notifier.notify_begin_progress(); while let Ok(progress) = progress_receiver.recv() { notifier.notify_progress(progress); @@ -507,7 +497,7 @@ impl Internals { // changing project), we must do a cargo build of the whole project. // Otherwise we just use rustc directly. // - // The 'full cargo build' is a `cargo check` customised and run + // The 'full Cargo build' is a `cargo check` customised and run // in-process. Cargo will shell out to call rustc (this means the // the compiler available at runtime must match the compiler linked to // the RLS). All but the last crate are built as normal, we intercept @@ -520,7 +510,7 @@ impl Internals { // disk). // If the build plan has already been cached, use it, unless Cargo - // has to be specifically rerun (e.g. when build scripts changed) + // has to be specifically rerun (e.g., when build scripts changed). let work = { let modified: Vec<_> = self.dirty_files.lock().unwrap().keys().cloned().collect(); @@ -540,19 +530,19 @@ impl Internals { cx.build_plan = BuildPlan::External(plan); // Since we don't support diagnostics in external // builds it might be worth rerunning the commands - // ourselves again to get both analysis *and* diagnostics + // ourselves again to get both analysis *and* diagnostics. return result; } }, } - // Fall back to Cargo + // Fall back to Cargo. } else { - // Cargo plan is recreated and `needs_rebuild` reset if we run cargo::cargo(). + // Cargo plan is recreated and `needs_rebuild` reset if we run `cargo::cargo()`. match cx.build_plan { BuildPlan::External(_) => WorkStatus::NeedsCargo(PackageArg::Default), BuildPlan::Cargo(ref plan) => { match plan.prepare_work(&modified) { - // Don't reuse the plan if we need to rebuild + // Don't reuse the plan if we need to rebuild. WorkStatus::Execute(_) if needs_rebuild => { WorkStatus::NeedsCargo(PackageArg::Default) } @@ -562,7 +552,7 @@ impl Internals { } } }; - trace!("Specified work: {:#?}", work); + trace!("specified work: {:#?}", work); let result = match work { WorkStatus::NeedsCargo(package_arg) => cargo::cargo(self, package_arg, progress_sender), @@ -580,8 +570,7 @@ impl Internals { /// Returns a pre-build wait time facilitating build debouncing. /// - /// Uses client configured value, or attempts to infer an appropriate - /// duration. + /// Uses client configured value, or attempts to infer an appropriate duration. fn build_wait(&self) -> Duration { self.config.lock().unwrap().wait_to_build.map(Duration::from_millis).unwrap_or_else(|| { match *self.last_build_duration.read().unwrap() { @@ -616,22 +605,22 @@ impl Write for BufWriter { fn auto_tune_build_wait_no_config() { let i = Internals::new(Arc::new(Vfs::new()), Arc::default()); - // Pessimistic if no information + // Pessimistic if no information. assert_eq!(i.build_wait(), Duration::from_millis(1500)); - // very fast builds like hello world + // Very fast builds like hello world. *i.last_build_duration.write().unwrap() = Some(Duration::from_millis(70)); assert_eq!(i.build_wait(), Duration::from_millis(0)); - // pretty fast builds should have a minimally impacting debounce for typing + // Somewhat fast builds should have a minimally impacting debounce for typing. *i.last_build_duration.write().unwrap() = Some(Duration::from_millis(850)); assert_eq!(i.build_wait(), Duration::from_millis(200)); - // medium builds should have a medium debounce time + // Medium builds should have a medium debounce time. *i.last_build_duration.write().unwrap() = Some(Duration::from_secs(4)); assert_eq!(i.build_wait(), Duration::from_millis(500)); - // slow builds ... lets wait just a bit longer, maybe they'll type something else? + // Slow builds. Lets wait just a bit longer, maybe they'll type something else? *i.last_build_duration.write().unwrap() = Some(Duration::from_secs(12)); assert_eq!(i.build_wait(), Duration::from_millis(1500)); } @@ -641,7 +630,7 @@ fn dont_auto_tune_build_wait_configured() { let i = Internals::new(Arc::new(Vfs::new()), Arc::default()); i.config.lock().unwrap().wait_to_build = Some(350); - // Always use configured build wait if available + // Always use configured build wait if available. assert_eq!(i.build_wait(), Duration::from_millis(350)); *i.last_build_duration.write().unwrap() = Some(Duration::from_millis(70)); diff --git a/rls/src/build/rustc.rs b/rls/src/build/rustc.rs index 86505063137..0f29e99c319 100644 --- a/rls/src/build/rustc.rs +++ b/rls/src/build/rustc.rs @@ -1,18 +1,5 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. -use log::trace; -use rls_data::Analysis; -use rls_vfs::Vfs; - // FIXME: switch to something more ergonomic here, once available. -// (currently there is no way to opt into sysroot crates w/o `extern crate`) +// (Currently, there is no way to opt into sysroot crates without `extern crate`.) #[allow(unused_extern_crates)] extern crate getopts; #[allow(unused_extern_crates)] @@ -33,19 +20,6 @@ extern crate rustc_resolve; extern crate rustc_save_analysis; #[allow(unused_extern_crates)] extern crate syntax; -use self::rustc::session::config::Input; -use self::rustc::session::Session; -use self::rustc_driver::driver::CompileController; -use self::rustc_driver::{run, run_compiler, CompilerCalls, RustcDefaultCalls}; -use self::rustc_save_analysis as save; -use self::rustc_save_analysis::CallbackHandler; -use self::syntax::edition::Edition as RustcEdition; -use self::syntax::source_map::{FileLoader, RealFileLoader}; - -use crate::build::environment::{Environment, EnvironmentLockFacade}; -use crate::build::plan::{Crate, Edition}; -use crate::build::{BufWriter, BuildResult}; -use crate::config::{ClippyPreference, Config}; use std::collections::{HashMap, HashSet}; use std::env; @@ -55,7 +29,24 @@ use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::{Arc, Mutex}; -// Runs a single instance of rustc. Runs in-process. +use log::trace; +use rls_data::Analysis; +use rls_vfs::Vfs; + +use crate::build::environment::{Environment, EnvironmentLockFacade}; +use crate::build::{BufWriter, BuildResult}; +use crate::build::plan::{Crate, Edition}; +use crate::config::{ClippyPreference, Config}; +use self::rustc_driver::driver::CompileController; +use self::rustc_driver::{run, run_compiler, CompilerCalls, RustcDefaultCalls}; +use self::rustc_save_analysis as save; +use self::rustc_save_analysis::CallbackHandler; +use self::rustc::session::Session; +use self::rustc::session::config::{self, ErrorOutputType, Input}; +use self::syntax::source_map::{FileLoader, RealFileLoader}; +use self::syntax::edition::Edition as RustcEdition; + +// Runs a single instance of Rustc (in-process). pub(crate) fn rustc( vfs: &Vfs, args: &[String], @@ -113,8 +104,8 @@ pub(crate) fn rustc( let controller = Box::new(RlsRustcCalls::new(Arc::clone(&analysis), Arc::clone(&input_files), clippy_pref)); - // rustc explicitly panics in run_compiler() on compile failure, regardless - // if it encounters an ICE (internal compiler error) or not. + // rustc explicitly panics in `run_compiler()` on compile failure, regardless + // of whether it encounters an ICE (internal compiler error) or not. // TODO: Change librustc_driver behaviour to distinguish between ICEs and // regular compilation failure with errors? let result = ::std::panic::catch_unwind(|| { @@ -129,8 +120,8 @@ pub(crate) fn rustc( }) }); - // FIXME(#25) given that we are running the compiler directly, there is no need - // to serialize the error messages - we should pass them in memory. + // FIXME(#25): given that we are running the compiler directly, there is no need + // to serialize the error messages -- we should pass them in memory. let err_buf = Arc::try_unwrap(err_buf).unwrap().into_inner().unwrap(); let err_buf = String::from_utf8(err_buf).unwrap(); let stderr_json_msgs: Vec<_> = err_buf.lines().map(String::from).collect(); diff --git a/rls/src/cmd.rs b/rls/src/cmd.rs index b6892dc9070..84263e4e7e8 100644 --- a/rls/src/cmd.rs +++ b/rls/src/cmd.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! This module presents the RLS as a command line interface, it takes simple //! versions of commands, turns them into messages the RLS will understand, runs //! the RLS as usual and prints the JSON result back on the command line. @@ -47,7 +37,7 @@ macro_rules! print_verb { } } -/// Run the RLS in command line mode. +/// Runs the RLS in command line mode. pub fn run() { let sender = init(); diff --git a/rls/src/concurrency.rs b/rls/src/concurrency.rs index 848f9a7e193..49b423e876b 100644 --- a/rls/src/concurrency.rs +++ b/rls/src/concurrency.rs @@ -87,7 +87,7 @@ impl Drop for ConcurrentJob { // so we use uninhabited enum as a message type enum Never {} -/// Nonblocking +/// Non-blocking. fn is_closed(chan: &Receiver) -> bool { select! { recv(chan) -> msg => match msg { diff --git a/rls/src/config.rs b/rls/src/config.rs index 2e02156e75f..343202528e7 100644 --- a/rls/src/config.rs +++ b/rls/src/config.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Configuration for the workspace that RLS is operating within and options for //! tweaking the RLS's behavior itself. @@ -127,21 +117,23 @@ pub struct Config { pub unstable_features: bool, pub wait_to_build: Option, pub show_warnings: bool, - /// Clear the RUST_LOG env variable before calling rustc/cargo? Default: true + /// `true` to clear the `RUST_LOG` env variable before calling rustc/cargo. + /// Default: `true`. pub clear_env_rust_log: bool, - /// Build the project only when a file got saved and not on file change. Default: false + /// `true` to build the project only when a file got saved and not on file change. + /// Default: `false`. pub build_on_save: bool, pub use_crate_blacklist: bool, - /// Cargo target dir. If set overrides the default one. + /// The Cargo target directory. If set, overrides the default one. pub target_dir: Inferrable>, pub features: Vec, pub all_features: bool, pub no_default_features: bool, pub jobs: Option, pub all_targets: bool, - /// Enable use of racer for `textDocument/completion` requests. + /// Enables use of Racer for `textDocument/completion` requests. /// - /// Enabled also enables racer fallbacks for hover & go-to-definition functionality + /// Enabled also enables racer fallbacks for hover and go-to-definition functionality /// if rustc analysis should fail. pub racer_completion: bool, #[serde(deserialize_with = "deserialize_clippy_preference")] @@ -155,15 +147,15 @@ pub struct Config { pub full_docs: Inferrable, /// Show additional context in hover tooltips when available. This is often the type /// local variable declaration. When set to false, the content is only available when - /// holding the `ctrl` key in some editors. + /// holding the `Ctrl` key in some editors. pub show_hover_context: bool, /// Use provided rustfmt binary instead of the statically linked one. - /// (requires unstable features) + /// (requires unstable features). pub rustfmt_path: Option, /// EXPERIMENTAL (needs unstable features) /// If set, executes a given program responsible for rebuilding save-analysis /// to be loaded by the RLS. The program given should output a list of - /// resulting .json files on stdout. + /// resulting JSON files on stdout. pub build_command: Option, } @@ -279,7 +271,7 @@ impl Config { } } - /// Is this config incomplete, and needs additional values to be inferred? + /// Checks if this config is incomplete, and needs additional values to be inferred. pub fn needs_inference(&self) -> bool { self.build_bin.is_none() || self.build_lib.is_none() || self.target_dir.is_none() } @@ -288,7 +280,7 @@ impl Config { /// Specifically, this: /// - detects correct `target/` build directory used by Cargo, if not specified. pub fn infer_defaults(&mut self, project_dir: &Path) -> CargoResult<()> { - // Note that this may not be equal build_dir when inside a workspace member + // Note that this may not be equal `build_dir` when inside a workspace member. let manifest_path = important_paths::find_root_manifest_for_wd(project_dir)?; trace!("root manifest_path: {:?}", &manifest_path); @@ -301,7 +293,7 @@ impl Config { // Constructing a `Workspace` also probes the filesystem and detects where to place the // build artifacts. We need to rely on Cargo's behaviour directly not to possibly place our - // own artifacts somewhere else (e.g. when analyzing only a single crate in a workspace) + // own artifacts somewhere else (e.g., when analyzing only a single crate in a workspace). match self.target_dir { // We require an absolute path, so adjust a relative one if it's passed. Inferrable::Specified(Some(ref mut path)) if path.is_relative() => { @@ -326,11 +318,11 @@ impl Config { #[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] pub enum ClippyPreference { - /// Disable clippy + /// Disable clippy. Off, - /// Enable clippy, but "allow" clippy lints (ie require "warn" override) + /// Enable clippy, but `allow` clippy lints (i.e., require `warn` override). OptIn, - /// Enable clippy + /// Enable clippy. On, } @@ -348,7 +340,7 @@ impl FromStr for ClippyPreference { } } -/// Permissive custom deserialization for `ClippyPreference` using `FromStr` +/// Permissive custom deserialization for `ClippyPreference` using `FromStr`. fn deserialize_clippy_preference<'de, T, D>(deserializer: D) -> Result where T: Deserialize<'de> + FromStr, @@ -372,16 +364,16 @@ where deserializer.deserialize_any(ClippyPrefDeserializer(PhantomData)) } -/// A rustfmt config (typically specified via rustfmt.toml) +/// A Rustfmt config (typically specified via `rustfmt.toml`). /// The `FmtConfig` is not an exact translation of the config -/// rustfmt generates from the user's toml file, since when -/// using rustfmt with rls certain configuration options are -/// always used. See `FmtConfig::set_rls_options` +/// Rustfmt generates from the user's TOML file, since when +/// using Rustfmt with RLS, certain configuration options are +/// always used. See `FmtConfig::set_rls_options`. pub struct FmtConfig(RustfmtConfig); impl FmtConfig { /// Look for `.rustmt.toml` or `rustfmt.toml` in `path`, falling back - /// to the default config if neither exist + /// to the default config if neither exists. pub fn from(path: &Path) -> FmtConfig { struct NullOptions; @@ -402,13 +394,13 @@ impl FmtConfig { FmtConfig::default() } - /// Return an immutable borrow of the config, will always - /// have any relevant rls specific options set + /// Returns an immutable borrow of the config; will always + /// have any relevant RLS specific options set. pub fn get_rustfmt_config(&self) -> &RustfmtConfig { &self.0 } - // options that are always used when formatting with rls + // Options that are always used when formatting with RLS. fn set_rls_options(&mut self) { self.0.set().skip_children(true); self.0.set().emit_mode(EmitMode::Stdout); diff --git a/rls/src/lsp_data.rs b/rls/src/lsp_data.rs index aa9e9f08101..1ce0b1a42d1 100644 --- a/rls/src/lsp_data.rs +++ b/rls/src/lsp_data.rs @@ -4,6 +4,9 @@ use std::error::Error; use std::fmt; use std::path::PathBuf; +pub use lsp_types::notification::Notification as LSPNotification; +pub use lsp_types::request::Request as LSPRequest; +pub use lsp_types::*; use racer; use rls_analysis::DefKind; use rls_span as span; @@ -13,11 +16,7 @@ use url::Url; use crate::actions::hover; use crate::config; -pub use lsp_types::notification::Notification as LSPNotification; -pub use lsp_types::request::Request as LSPRequest; -pub use lsp_types::*; - -/// Errors that can occur when parsing a file URI. +/// An error that can occur when parsing a file URI. #[derive(Debug)] pub enum UrlFileParseError { /// The URI scheme is not `file`. @@ -44,7 +43,7 @@ where } } -/// Parse the given URI into a `PathBuf`. +/// Parses the given URI into a `PathBuf`. pub fn parse_file_path(uri: &Url) -> Result { if uri.scheme() == "file" { uri.to_file_path().map_err(|_err| UrlFileParseError::InvalidFilePath) @@ -53,7 +52,7 @@ pub fn parse_file_path(uri: &Url) -> Result { } } -/// Create an edit for the given location and text. +/// Creates an edit for the given location and text. pub fn make_workspace_edit(location: Location, new_text: String) -> WorkspaceEdit { let changes = vec![(location.uri, vec![TextEdit { range: location.range, new_text }])] .into_iter() @@ -67,14 +66,14 @@ pub mod ls_util { use super::*; use crate::Span; - /// Convert a language server protocol range into an RLS range. - /// NOTE: This does not translate LSP UTF-16 code units offsets into Unicode + /// Converts a language server protocol range into an RLS range. + /// NOTE: this does not translate LSP UTF-16 code units offsets into Unicode /// Scalar Value offsets as expected by RLS/Rust. pub fn range_to_rls(r: Range) -> span::Range { span::Range::from_positions(position_to_rls(r.start), position_to_rls(r.end)) } - /// Convert a language server protocol position into an RLS position. + /// Converts a language server protocol position into an RLS position. pub fn position_to_rls(p: Position) -> span::Position { span::Position::new( span::Row::new_zero_indexed(p.line as u32), @@ -82,20 +81,20 @@ pub mod ls_util { ) } - /// Convert a language server protocol location into an RLS span. + /// Converts a language server protocol location into an RLS span. pub fn location_to_rls( l: &Location, ) -> Result, UrlFileParseError> { parse_file_path(&l.uri).map(|path| Span::from_range(range_to_rls(l.range), path)) } - /// Convert an RLS span into a language server protocol location. + /// Converts an RLS span into a language server protocol location. pub fn rls_to_location(span: &Span) -> Location { - // An RLS span has the same info as an LSP Location + // An RLS span has the same info as an LSP `Location`. Location { uri: Url::from_file_path(&span.file).unwrap(), range: rls_to_range(span.range) } } - /// Convert an RLS location into a language server protocol location. + /// Converts an RLS location into a language server protocol location. pub fn rls_location_to_location(l: &span::Location) -> Location { Location { uri: Url::from_file_path(&l.file).unwrap(), @@ -103,12 +102,12 @@ pub mod ls_util { } } - /// Convert an RLS range into a language server protocol range. + /// Converts an RLS range into a language server protocol range. pub fn rls_to_range(r: span::Range) -> Range { Range { start: rls_to_position(r.start()), end: rls_to_position(r.end()) } } - /// Convert an RLS position into a language server protocol range. + /// Converts an RLS position into a language server protocol range. pub fn rls_to_position(p: span::Position) -> Position { Position { line: p.row.0.into(), character: p.col.0.into() } } @@ -132,17 +131,17 @@ pub mod ls_util { .last() .expect("String is not empty.") .chars() - // LSP uses UTF-16 code units offset + // LSP uses UTF-16 code units offset. .map(|chr| chr.len_utf16() as u64) .sum() }; - // range is zero-based and the end position is exclusive + // Range is zero-based and end position is exclusive. Range { start: Position::new(0, 0), end: Position::new(line_count, col) } } } } -/// Convert an RLS def-kind to a language server protocol symbol-kind. +/// Converts an RLS def-kind to a language server protocol symbol-kind. pub fn source_kind_from_def_kind(k: DefKind) -> SymbolKind { match k { DefKind::Enum | DefKind::Union => SymbolKind::Enum, @@ -160,7 +159,7 @@ pub fn source_kind_from_def_kind(k: DefKind) -> SymbolKind { } } -/// What kind of completion is this racer match type? +/// Indicates the kind of completion for this racer match type. pub fn completion_kind_from_match_type(m: racer::MatchType) -> CompletionItemKind { match m { racer::MatchType::Crate | racer::MatchType::Module => CompletionItemKind::Module, @@ -191,7 +190,7 @@ pub fn completion_kind_from_match_type(m: racer::MatchType) -> CompletionItemKin } } -/// Convert a racer match into an RLS completion. +/// Converts a racer match into an RLS completion. pub fn completion_item_from_racer_match(m: &racer::Match) -> CompletionItem { let mut item = CompletionItem::new_simple(m.matchstr.clone(), m.contextstr.clone()); item.kind = Some(completion_kind_from_match_type(m.mtype.clone())); @@ -208,9 +207,9 @@ pub fn completion_item_from_racer_match(m: &racer::Match) -> CompletionItem { /* ------ Extension methods for JSON-RPC protocol types ------ */ -/// Provide additional methods for the remote `Range` type +/// Provides additional methods for the remote `Range` type. pub trait RangeExt { - /// Do both Ranges overlap? + /// `true` if both `Range`s overlap. fn overlaps(&self, other: &Self) -> bool; } @@ -220,7 +219,7 @@ impl RangeExt for Range { } } -/// `DidChangeConfigurationParams.settings` payload reading the { rust: {...} } bit. +/// `DidChangeConfigurationParams.settings` payload reading the `{ rust: {...} }` bit. #[derive(Debug, Deserialize)] pub struct ChangeConfigSettings { pub rust: config::Config, @@ -263,7 +262,7 @@ impl ChangeConfigSettings { #[derive(Debug, Deserialize)] #[serde(default, rename_all = "camelCase")] pub struct InitializationOptions { - /// Should the build not be triggered immediately after receiving `initialize` + /// `true` if build should not be triggered immediately after receiving `initialize`. pub omit_init_build: bool, pub cmd_run: bool, /// `DidChangeConfigurationParams.settings` payload for upfront configuration. @@ -315,9 +314,9 @@ pub struct ClientCapabilities { impl ClientCapabilities { pub fn new(params: &lsp_types::InitializeParams) -> ClientCapabilities { - // lsp_types::ClientCapabilities is a rather awkward object to use internally - // (for instance it doesn't Clone). Instead we pick out the bits of it that we - // are going to handle into ClientCapabilities. The upside of + // `lsp_types::ClientCapabilities` is a rather awkward object to use internally + // (for instance, it doesn't `Clone`). Instead we pick out the bits of it that we + // are going to handle into `ClientCapabilities`. The upside of // using this very simple struct is that it can be kept thread safe // without mutex locking it on every request. let code_completion_has_snippet_support = params @@ -356,7 +355,7 @@ impl LSPNotification for DiagnosticsBegin { } /// Custom LSP notification sent to client indicating that data processing started -/// by a `rustDocument`/diagnosticsBegin` has ended. +/// by a `rustDocument/diagnosticsBegin` has ended. /// For each `diagnosticsBegin` message, there is a single `diagnosticsEnd` message. /// This means that for multiple active `diagnosticsBegin` messages, there will /// be sent multiple `diagnosticsEnd` notifications. @@ -388,10 +387,8 @@ impl notification::Notification for Progress { const METHOD: &'static str = NOTIFICATION__Progress; } -/** - * The progress notification is sent from the server to the client to ask the client - * to indicate progress. - */ +// The progress notification is sent from the server to the client to ask the client +// to indicate progress. #[allow(non_upper_case_globals)] pub const NOTIFICATION__Progress: &str = "window/progress"; diff --git a/rls/src/main.rs b/rls/src/main.rs index 94700f5326e..be8b567b996 100644 --- a/rls/src/main.rs +++ b/rls/src/main.rs @@ -20,8 +20,7 @@ use std::sync::Arc; const RUSTC_WRAPPER_ENV_VAR: &str = "RUSTC_WRAPPER"; -/// The main entry point to the RLS. Parses CLI arguments and then runs the -/// server. +/// The main entry point to the RLS. Parses CLI arguments and then runs the server. pub fn main() { let exit_code = main_inner(); std::process::exit(exit_code); diff --git a/rls/src/server/dispatch.rs b/rls/src/server/dispatch.rs index cd22c114f36..ef3b60dab20 100644 --- a/rls/src/server/dispatch.rs +++ b/rls/src/server/dispatch.rs @@ -1,14 +1,10 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. +use std::sync::mpsc; +use std::thread; +use std::time::Duration; + +use jsonrpc_core::types::ErrorCode; +use log::debug; -use super::requests::*; use crate::actions::work_pool; use crate::actions::work_pool::WorkDescription; use crate::actions::InitActionContext; @@ -18,11 +14,8 @@ use crate::server; use crate::server::io::Output; use crate::server::message::ResponseError; use crate::server::{Request, Response}; -use jsonrpc_core::types::ErrorCode; -use log::debug; -use std::sync::mpsc; -use std::thread; -use std::time::Duration; + +use super::requests::*; /// Timeout time for request responses. By default a LSP client request not /// responded to after this duration will return a fallback response. @@ -36,7 +29,8 @@ pub const DEFAULT_REQUEST_TIMEOUT: Duration = Duration::from_millis(3_600_000); /// Macro enum `DispatchRequest` packing in various similar `Request` types macro_rules! define_dispatch_request_enum { ($($request_type:ident),*$(,)*) => { - #[allow(clippy::large_enum_variant)] // seems ok for a short lived macro-enum + // Seems ok for a short-lived macro-enum. + #[allow(clippy::large_enum_variant)] pub(crate) enum DispatchRequest { $( $request_type(Request<$request_type>), @@ -60,10 +54,10 @@ macro_rules! define_dispatch_request_enum { let timeout = $request_type::timeout(); let receiver = work_pool::receive_from_thread(move || { - // checking timeout here can prevent starting expensive work that has - // already timed out due to previous long running requests + // Checking timeout here can prevent starting expensive work that has + // already timed out due to previous long running requests. // Note: done here on the threadpool as pool scheduling may incur - // a further delay + // a further delay. if received.elapsed() >= timeout { $request_type::fallback_response() } @@ -117,7 +111,7 @@ pub(crate) struct Dispatcher { } impl Dispatcher { - /// Creates a new `Dispatcher` starting a new thread and channel + /// Creates a new `Dispatcher` starting a new thread and channel. pub(crate) fn new(out: O) -> Self { let (sender, receiver) = mpsc::channel::<(DispatchRequest, InitActionContext, JobToken)>(); @@ -134,7 +128,7 @@ impl Dispatcher { Self { sender } } - /// Sends a request to the dispatch-worker thread, does not block + /// Sends a request to the dispatch-worker thread; does not block. pub(crate) fn dispatch>( &mut self, request: R, @@ -143,26 +137,26 @@ impl Dispatcher { let (job, token) = ConcurrentJob::new(); ctx.add_job(job); if let Err(err) = self.sender.send((request.into(), ctx, token)) { - debug!("Failed to dispatch request: {:?}", err); + debug!("failed to dispatch request: {:?}", err); } } } -/// Stdin-nonblocking request logic designed to be packed into a `DispatchRequest` +/// Stdin-non-blocking request logic designed to be packed into a `DispatchRequest` /// and handled on the `WORK_POOL` via a `Dispatcher`. pub trait RequestAction: LSPRequest { - /// Serializable response type + /// Serializable response type. type Response: server::Response + Send; - /// Max duration this request should finish within, also see `fallback_response()` + /// Max duration this request should finish within; also see `fallback_response()`. fn timeout() -> Duration { DEFAULT_REQUEST_TIMEOUT } - /// Returns a response used in timeout scenarios + /// Returns a response used in timeout scenarios. fn fallback_response() -> Result; - /// Request processing logic + /// Request processing logic. fn handle( ctx: InitActionContext, params: Self::Params, diff --git a/rls/src/server/io.rs b/rls/src/server/io.rs index 9b1f6fab1d1..aadc0ab9b14 100644 --- a/rls/src/server/io.rs +++ b/rls/src/server/io.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - use log::{debug, trace}; use serde; use serde_json; @@ -22,7 +12,7 @@ use std::sync::Arc; use jsonrpc_core::{self as jsonrpc, response, version, Id}; -/// A trait for anything that can read language server input messages. +/// Anything that can read language server input messages. pub trait MessageReader { /// Read the next input message. fn read_message(&self) -> Option; @@ -50,7 +40,7 @@ impl MessageReader for StdioMsgReader { // The input is expected to provide a message as described by "Base Protocol" of Language Server // Protocol. fn read_message(input: &mut R) -> Result { - // Read in the "Content-Length: xx" part + // Read in the "Content-Length: xx" part. let mut size: Option = None; loop { let mut buffer = String::new(); @@ -71,7 +61,7 @@ fn read_message(input: &mut R) -> Result { let res: Vec<&str> = buffer.split(' ').collect(); - // Make sure header is valid + // Make sure header is valid. if res.len() != 2 { return Err(io::Error::new( io::ErrorKind::InvalidData, @@ -116,29 +106,28 @@ fn read_message(input: &mut R) -> Result { String::from_utf8(content).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) } -/// Anything that can send notifications and responses to a language server -/// client. +/// Anything that can send notifications and responses to a language server client. pub trait Output: Sync + Send + Clone + 'static { - /// Send a response string along the output. + /// Sends a response string along the output. fn response(&self, output: String); - /// Get a new unique ID. + /// Gets a new unique ID. fn provide_id(&self) -> RequestId; - /// Notify the client of a failure. + /// Notifies the client of a failure. fn failure(&self, id: jsonrpc::Id, error: jsonrpc::Error) { let response = response::Failure { jsonrpc: Some(version::Version::V2), id, error }; self.response(serde_json::to_string(&response).unwrap()); } - /// Notify the client of a failure with the given diagnostic message. + /// Notifies the client of a failure with the given diagnostic message. fn failure_message>(&self, id: RequestId, code: jsonrpc::ErrorCode, msg: M) { let error = jsonrpc::Error { code, message: msg.into(), data: None }; self.failure(Id::from(&id), error); } - /// Send a successful response or notification along the output. + /// Sends a successful response or notification along the output. fn success(&self, id: RequestId, data: &D) { let data = match serde_json::to_string(data) { Ok(data) => data, @@ -159,7 +148,7 @@ pub trait Output: Sync + Send + Clone + 'static { self.response(output); } - /// Send a notification along the output. + /// Sends a notification along the output. fn notify(&self, notification: Notification) where A: LSPNotification, @@ -186,7 +175,7 @@ pub(super) struct StdioOutput { } impl StdioOutput { - /// Construct a new `stdout` output. + /// Constructs a new `stdout` output. pub(crate) fn new() -> StdioOutput { StdioOutput { next_id: Arc::new(AtomicU64::new(1)) } } diff --git a/rls/src/server/message.rs b/rls/src/server/message.rs index 91fba10859f..e7df6dfd7f5 100644 --- a/rls/src/server/message.rs +++ b/rls/src/server/message.rs @@ -1,16 +1,9 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Traits and structs for message handling - -use crate::actions::InitActionContext; +//! Traits and structs for message handling. + +use std::fmt; +use std::marker::PhantomData; +use std::time::Instant; + use jsonrpc_core::{self as jsonrpc, Id}; use log::debug; use lsp_types::notification::ShowMessage; @@ -20,14 +13,10 @@ use serde::Deserialize; use serde_derive::Serialize; use serde_json; -use crate::actions::ActionContext; +use crate::actions::{ActionContext, InitActionContext}; use crate::lsp_data::{LSPNotification, LSPRequest, MessageType, ShowMessageParams, WorkspaceEdit}; use crate::server::io::Output; -use std::fmt; -use std::marker::PhantomData; -use std::time::Instant; - /// A response that just acknowledges receipt of its request. #[derive(Debug, Serialize)] pub struct Ack; @@ -38,7 +27,7 @@ pub struct NoResponse; /// A response to some request. pub trait Response { - /// Send the response along the given output. + /// Sends the response along the given output. fn send(self, id: RequestId, out: &O); } @@ -52,12 +41,12 @@ impl Response for R { } } -/// Wrapper for a response error +/// Wrapper for a response error. #[derive(Debug)] pub enum ResponseError { - /// Error with no special response to the client + /// Error with no special response to the client. Empty, - /// Error with a response to the client + /// Error with a response to the client. Message(jsonrpc::ErrorCode, String), } @@ -76,7 +65,7 @@ pub enum ResponseWithMessage { Warn(String), } -/// A response which has a default value. +/// A response that has a default value. pub trait DefaultResponse: Response + ::serde::Serialize + fmt::Debug { fn default() -> Self; } @@ -107,15 +96,15 @@ impl DefaultResponse for WorkspaceEdit { /// An action taken in response to some notification from the client. /// Blocks stdin whilst being handled. pub trait BlockingNotificationAction: LSPNotification { - /// Handle this notification. + /// Handles this notification. fn handle(_: Self::Params, _: &mut InitActionContext, _: O) -> Result<(), ()>; } -/// A request that blocks stdin whilst being handled +/// A request that blocks stdin whilst being handled. pub trait BlockingRequestAction: LSPRequest { type Response: Response + fmt::Debug; - /// Handle request and return its response. Output is also provided for additional messaging. + /// Handles request and returns its response. Output is also provided for additional messaging. fn handle( id: RequestId, params: Self::Params, @@ -126,7 +115,7 @@ pub trait BlockingRequestAction: LSPRequest { /// A request ID as defined by language server protocol. /// -/// It only describes valid request ids - a case for notification (where id is not specified) is +/// It only describes valid request IDs -- a case for notification (where `id` is not specified) is /// not included here. #[derive(Debug, PartialEq, Clone, Hash, Eq)] pub enum RequestId { @@ -154,7 +143,7 @@ impl<'a> From<&'a RequestId> for Id { /// A request that gets JSON serialized in the language server protocol. pub struct Request { - /// The unique request id. + /// The unique request ID. pub id: RequestId, /// The time the request was received / processed by the main stdin reading thread. pub received: Instant, @@ -198,7 +187,7 @@ where let params = match serde_json::to_value(&request.params).unwrap() { params @ serde_json::Value::Array(_) | params @ serde_json::Value::Object(_) | - // Internally we represent missing params by Null + // We represent missing params internally by Null. params @ serde_json::Value::Null => params, _ => unreachable!("Bad parameter type found for {:?} request", method), }; @@ -218,7 +207,7 @@ where let params = match serde_json::to_value(¬ification.params).unwrap() { params @ serde_json::Value::Array(_) | params @ serde_json::Value::Object(_) | - // Internally we represent missing params by Null + // We represent missing params internally by Null. params @ serde_json::Value::Null => params, _ => unreachable!("Bad parameter type found for {:?} request", method), }; @@ -293,7 +282,7 @@ impl RawMessage { let params = R::Params::deserialize(&self.params) .or_else(|e| { - // Avoid tedious type errors trying to deserialize `()` + // Avoid tedious type errors trying to deserialize `()`. if std::mem::size_of::() == 0 { R::Params::deserialize(&serde_json::Value::Null).map_err(|_| e) } else { @@ -331,28 +320,29 @@ impl RawMessage { let ls_command: serde_json::Value = serde_json::from_str(msg).map_err(|_| jsonrpc::Error::parse_error())?; - // Per JSON-RPC/LSP spec, Requests must have id, whereas Notifications can't + // Per JSON-RPC/LSP spec, Requests must have ID, whereas Notifications cannot. let id = ls_command .get("id") .map_or(Id::Null, |id| serde_json::from_value(id.to_owned()).unwrap()); let method = match ls_command.get("method") { Some(method) => method, - // No method means this is a response to one of our requests. FIXME: we should - // confirm these, but currently just ignore them. + // No method means this is a response to one of our requests. + // FIXME: we should confirm these, but currently just ignore them. None => return Ok(None), }; let method = method.as_str().ok_or_else(jsonrpc::Error::invalid_request)?.to_owned(); - // Representing internally a missing parameter as Null instead of None, + // Representing a missing parameter as Null internally instead of `None`, // (Null being unused value of param by the JSON-RPC 2.0 spec) - // to unify the type handling – now the parameter type implements Deserialize. + // in order to unify the type handling –- now the parameter type implements + // `Deserialize`. let params = match ls_command.get("params").map(|p| p.to_owned()) { Some(params @ serde_json::Value::Object(..)) | Some(params @ serde_json::Value::Array(..)) => params, // Null as input value is not allowed by JSON-RPC 2.0, - // but including it for robustness + // but including it for robustness. Some(serde_json::Value::Null) | None => serde_json::Value::Null, _ => return Err(jsonrpc::Error::invalid_request()), }; @@ -361,8 +351,8 @@ impl RawMessage { } } -// Added so we can prepend with extra constant "jsonrpc": "2.0" key. -// Should be resolved once https://github.com/serde-rs/serde/issues/760 is fixed. +// Added so we can prepend with extra constant `"jsonrpc": "2.0"` key. +// Should be resolved once is fixed. impl Serialize for RawMessage { fn serialize(&self, serializer: S) -> Result { let serialize_id = match self.id { @@ -409,7 +399,7 @@ mod test { assert_eq!(notification._action, expected._action); } - // http://www.jsonrpc.org/specification#request_object + // See . #[test] fn raw_message_parses_valid_jsonrpc_request_with_string_id() { let raw_json = json!({ @@ -422,7 +412,7 @@ mod test { let expected_msg = RawMessage { method: "someRpcCall".to_owned(), id: Id::Str("abc".to_owned()), - // Internally missing parameters are represented as null + // Missing parameters are represented internally as Null. params: serde_json::Value::Null, }; assert_eq!(expected_msg, RawMessage::try_parse(&raw_json).unwrap().unwrap()); @@ -440,7 +430,7 @@ mod test { let expected_msg = RawMessage { method: "someRpcCall".to_owned(), id: Id::Num(1), - // Internally missing parameters are represented as null + // Missing parameters are represented internally as Null. params: serde_json::Value::Null, }; assert_eq!(expected_msg, RawMessage::try_parse(&raw_json).unwrap().unwrap()); diff --git a/rls/src/server/mod.rs b/rls/src/server/mod.rs index e89c1efc74c..7debcdcd8c3 100644 --- a/rls/src/server/mod.rs +++ b/rls/src/server/mod.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Implementation of the server loop, and traits for extending server //! interactions (for example, to add support for handling new types of //! requests). @@ -50,7 +40,7 @@ mod message; const NOT_INITIALIZED_CODE: ErrorCode = ErrorCode::ServerError(-32002); -/// Run the Rust Language Server. +/// Runs the Rust Language Server. pub fn run_server(analysis: Arc, vfs: Arc) -> i32 { debug!("Language Server starting up. Version: {}", version()); let service = LsService::new( @@ -75,7 +65,7 @@ impl BlockingRequestAction for ShutdownRequest { _out: O, ) -> Result { if let Ok(ctx) = ctx.inited() { - // Currently we don't perform an explicit cleanup, other than storing state + // Currently we don't perform an explicit clean-up, other than storing state. ctx.shut_down.store(true, Ordering::SeqCst); Ok(Ack) } else { @@ -151,7 +141,7 @@ impl BlockingRequestAction for InitializeRequest { if ctx.inited().is_ok() { return Err(ResponseError::Message( - // No code in the spec, just use some number + // No code in the spec; just use some number. ErrorCode::ServerError(123), "Already received an initialize request".to_owned(), )); @@ -162,8 +152,8 @@ impl BlockingRequestAction for InitializeRequest { let result = InitializeResult { capabilities: server_caps(ctx) }; - // send response early before `ctx.init` to enforce - // initialize-response-before-all-other-messages constraint + // Send response early before `ctx.init` to enforce + // initialize-response-before-all-other-messages constraint. result.send(id, &out); let capabilities = lsp_data::ClientCapabilities::new(¶ms); @@ -182,7 +172,7 @@ pub struct LsService { } impl LsService { - /// Construct a new language server service. + /// Constructs a new language server service. pub fn new( analysis: Arc, vfs: Arc, @@ -200,7 +190,7 @@ impl LsService { } } - /// Run this language service. + /// Runs this language service. pub fn run(mut self) -> i32 { loop { match self.handle_message() { @@ -241,7 +231,8 @@ impl LsService { <$br_action as LSPRequest>::METHOD => { let request: Request<$br_action> = msg.parse_as_request()?; - // block until all nonblocking requests have been handled ensuring ordering + // Block until all non-blocking requests have been handled ensuring + // ordering. self.wait_for_concurrent_jobs(); let req_id = request.id.clone(); @@ -329,7 +320,7 @@ impl LsService { Ok(()) } - /// Read a message from the language server reader input and handle it with + /// Reads a message from the language server reader input and handle it with /// the appropriate action. Returns a `ServerStateChange` that describes how /// the service should proceed now that the message has been handled. pub fn handle_message(&mut self) -> ServerStateChange { @@ -357,7 +348,7 @@ impl LsService { trace!("Parsed message `{:?}`", raw_message); // If we're in shutdown mode, ignore any messages other than 'exit'. - // This is not actually in the spec, I'm not sure we should do this, + // This is not actually in the spec; I'm not sure we should do this, // but it kinda makes sense. { let shutdown_mode = match self.ctx { @@ -391,11 +382,10 @@ impl LsService { } } -/// How should the server proceed? +// Indicates how the server should proceed. #[derive(Eq, PartialEq, Debug, Clone, Copy)] pub enum ServerStateChange { - /// Continue serving responses to requests and sending notifications to the - /// client. + /// Continue serving responses to requests and sending notifications to the client. Continue, /// Stop the server. Break { exit_code: i32 }, @@ -511,7 +501,7 @@ mod test { assert_eq!(get_root_path(¶ms), root_path); } - /// Some clients send empty object params for void params requests (see #1038) + /// Some clients send empty object params for void params requests (see issue #1038). #[test] fn parse_shutdown_object_params() { let raw = RawMessage::try_parse( diff --git a/tests/fixtures/deglob/src/main.rs b/tests/fixtures/deglob/src/main.rs index 478226972dc..721c8674c1d 100644 --- a/tests/fixtures/deglob/src/main.rs +++ b/tests/fixtures/deglob/src/main.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - // single wildcard import // imports two values use std::io::*; diff --git a/tests/fixtures/find_impls/src/main.rs b/tests/fixtures/find_impls/src/main.rs index ee0b0f5f659..0812b576a19 100644 --- a/tests/fixtures/find_impls/src/main.rs +++ b/tests/fixtures/find_impls/src/main.rs @@ -1,12 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. #![allow(dead_code)] #[derive(PartialEq)] diff --git a/tests/fixtures/hover/src/lib.rs b/tests/fixtures/hover/src/lib.rs index c862e1bdcc9..763f266e441 100644 --- a/tests/fixtures/hover/src/lib.rs +++ b/tests/fixtures/hover/src/lib.rs @@ -1,12 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. #![allow(dead_code, unused_imports)] extern crate fnv; diff --git a/tests/fixtures/hover/src/test_extract_decl.rs b/tests/fixtures/hover/src/test_extract_decl.rs index b832f089887..449a263b018 100644 --- a/tests/fixtures/hover/src/test_extract_decl.rs +++ b/tests/fixtures/hover/src/test_extract_decl.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - pub fn foo() -> Foo { Foo { t: 1 } } diff --git a/tests/fixtures/hover/src/test_extract_decl_multiline_empty_function.rs b/tests/fixtures/hover/src/test_extract_decl_multiline_empty_function.rs index a98e942c510..d4c7ed50114 100644 --- a/tests/fixtures/hover/src/test_extract_decl_multiline_empty_function.rs +++ b/tests/fixtures/hover/src/test_extract_decl_multiline_empty_function.rs @@ -1,23 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - /// Do nothing with matices. fn matrix_something( _a_matrix: [[f32; 4]; 4], diff --git a/tests/fixtures/hover/src/test_extract_docs_attributes.rs b/tests/fixtures/hover/src/test_extract_docs_attributes.rs index bef8c0b5692..b3519078f2b 100644 --- a/tests/fixtures/hover/src/test_extract_docs_attributes.rs +++ b/tests/fixtures/hover/src/test_extract_docs_attributes.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - /// Begin multiline attribute /// /// Cras malesuada mattis massa quis ornare. Suspendisse in ex maximus, diff --git a/tests/fixtures/hover/src/test_extract_docs_comment_block.rs b/tests/fixtures/hover/src/test_extract_docs_comment_block.rs index 1e39007e787..cfe9d8b1619 100644 --- a/tests/fixtures/hover/src/test_extract_docs_comment_block.rs +++ b/tests/fixtures/hover/src/test_extract_docs_comment_block.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //////////////////////////////////////////////////////////////////////////////// // Free functions //////////////////////////////////////////////////////////////////////////////// diff --git a/tests/fixtures/hover/src/test_extract_docs_empty_line_before_decl.rs b/tests/fixtures/hover/src/test_extract_docs_empty_line_before_decl.rs index 4e020317862..4c36260142b 100644 --- a/tests/fixtures/hover/src/test_extract_docs_empty_line_before_decl.rs +++ b/tests/fixtures/hover/src/test_extract_docs_empty_line_before_decl.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - /// Begin empty before decl /// /// Cras malesuada mattis massa quis ornare. Suspendisse in ex maximus, diff --git a/tests/fixtures/hover/src/test_extract_docs_module_docs.rs b/tests/fixtures/hover/src/test_extract_docs_module_docs.rs index 35b48a96f0d..a2739d20bec 100644 --- a/tests/fixtures/hover/src/test_extract_docs_module_docs.rs +++ b/tests/fixtures/hover/src/test_extract_docs_module_docs.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Begin module docs //! //! Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas diff --git a/tests/fixtures/hover/src/test_extract_docs_module_docs_with_attribute.rs b/tests/fixtures/hover/src/test_extract_docs_module_docs_with_attribute.rs index 65a178d95f5..001c9716c7b 100644 --- a/tests/fixtures/hover/src/test_extract_docs_module_docs_with_attribute.rs +++ b/tests/fixtures/hover/src/test_extract_docs_module_docs_with_attribute.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Begin module docs //! //! Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas diff --git a/tests/fixtures/hover/src/test_tooltip_01.rs b/tests/fixtures/hover/src/test_tooltip_01.rs index 5320ce41cb7..c755dbf3b33 100644 --- a/tests/fixtures/hover/src/test_tooltip_01.rs +++ b/tests/fixtures/hover/src/test_tooltip_01.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - /// Foo enum #[derive(Copy, Clone)] pub enum Foo { diff --git a/tests/fixtures/hover/src/test_tooltip_mod.rs b/tests/fixtures/hover/src/test_tooltip_mod.rs index 08f8a1c35ac..c8add03b585 100644 --- a/tests/fixtures/hover/src/test_tooltip_mod.rs +++ b/tests/fixtures/hover/src/test_tooltip_mod.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - //! Begin module docs //! //! Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas diff --git a/tests/fixtures/hover/src/test_tooltip_mod_use.rs b/tests/fixtures/hover/src/test_tooltip_mod_use.rs index a0c7e88944d..dc788c1d0d9 100644 --- a/tests/fixtures/hover/src/test_tooltip_mod_use.rs +++ b/tests/fixtures/hover/src/test_tooltip_mod_use.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - use test_tooltip_mod; use test_tooltip_mod::First; use test_tooltip_mod::sub_module; \ No newline at end of file diff --git a/tests/fixtures/hover/src/test_tooltip_mod_use_external.rs b/tests/fixtures/hover/src/test_tooltip_mod_use_external.rs index 6cfacd00fd5..9b1593f51a5 100644 --- a/tests/fixtures/hover/src/test_tooltip_mod_use_external.rs +++ b/tests/fixtures/hover/src/test_tooltip_mod_use_external.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - use fnv; use fnv::FnvHasher; diff --git a/tests/fixtures/hover/src/test_tooltip_std.rs b/tests/fixtures/hover/src/test_tooltip_std.rs index 44eb9fc2cef..1c5b70b8724 100644 --- a/tests/fixtures/hover/src/test_tooltip_std.rs +++ b/tests/fixtures/hover/src/test_tooltip_std.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - // Spot check several stdlib items and verify that the the doc_url // is correctly included for traits. The tests around the stdlib // are subject to breakage due to changes in docs, so these tests diff --git a/tests/fixtures/lens_run/src/main.rs b/tests/fixtures/lens_run/src/main.rs index ae6f2cfb85e..2b24e927d1e 100644 --- a/tests/fixtures/lens_run/src/main.rs +++ b/tests/fixtures/lens_run/src/main.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - pub fn main() { } diff --git a/tests/fixtures/reformat/src/main.rs b/tests/fixtures/reformat/src/main.rs index 8caef324630..3aeec5620da 100644 --- a/tests/fixtures/reformat/src/main.rs +++ b/tests/fixtures/reformat/src/main.rs @@ -1,12 +1,2 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - pub mod foo ; pub fn main() { let world = "world"; println!("Hello, {}!", world); } diff --git a/tests/fixtures/reformat_with_range/src/main.rs b/tests/fixtures/reformat_with_range/src/main.rs index 01b26cb6928..ab9675ab5ef 100644 --- a/tests/fixtures/reformat_with_range/src/main.rs +++ b/tests/fixtures/reformat_with_range/src/main.rs @@ -1,13 +1,3 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - pub fn main() { let world1 = "world"; println!("Hello, {}!", world1);