diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb8e0c92d33..1af14608446 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -499,7 +499,7 @@ jobs: toolchain: ${{ env.RUST_VERSION }} - uses: Swatinem/rust-cache@v2 - name: Run sway-lsp tests sequentially - run: cargo test --locked --release -p sway-lsp -- --test-threads=1 + run: cargo test --locked --release -p sway-lsp -- --nocapture --test-threads=1 cargo-test-workspace: runs-on: ubuntu-latest steps: diff --git a/Cargo.lock b/Cargo.lock index ff35c75828e..85636de0283 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6731,6 +6731,7 @@ dependencies = [ "assert-json-diff", "futures", "lsp-types", + "rand", "serde", "serde_json", "tokio", diff --git a/sway-core/src/build_config.rs b/sway-core/src/build_config.rs index 375a45d2208..7462bdd7612 100644 --- a/sway-core/src/build_config.rs +++ b/sway-core/src/build_config.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Deserializer, Serialize}; -use std::{path::PathBuf, sync::Arc}; +use std::{collections::BTreeMap, path::PathBuf, sync::Arc}; use strum::{Display, EnumString}; #[derive( @@ -217,6 +217,9 @@ pub struct LspConfig { // // This is set to false if compilation was triggered by a didSave or didOpen LSP event. pub optimized_build: bool, + // The value of the `version` field in the `DidChangeTextDocumentParams` struct. + // This is used to determine if the file has been modified since the last compilation. + pub file_versions: BTreeMap>, } #[cfg(test)] diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 943f95d525b..20b7037f516 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -110,6 +110,7 @@ pub fn parse( config.build_target, config.include_tests, config.experimental, + config.lsp_mode.as_ref(), ) .map( |ParsedModuleTree { @@ -254,6 +255,7 @@ fn parse_submodules( build_target: BuildTarget, include_tests: bool, experimental: ExperimentalFlags, + lsp_mode: Option<&LspConfig>, ) -> Submodules { // Assume the happy path, so there'll be as many submodules as dependencies, but no more. let mut submods = Vec::with_capacity(module.submodules().count()); @@ -287,6 +289,7 @@ fn parse_submodules( build_target, include_tests, experimental, + lsp_mode, ) { if !matches!(kind, parsed::TreeType::Library) { let source_id = engines.se().get_source_id(submod_path.as_ref()); @@ -340,6 +343,7 @@ fn parse_module_tree( build_target: BuildTarget, include_tests: bool, experimental: ExperimentalFlags, + lsp_mode: Option<&LspConfig>, ) -> Result { let query_engine = engines.qe(); @@ -359,6 +363,7 @@ fn parse_module_tree( build_target, include_tests, experimental, + lsp_mode, ); // Convert from the raw parsed module to the `ParseTree` ready for type-check. @@ -403,57 +408,73 @@ fn parse_module_tree( .ok() .and_then(|m| m.modified().ok()); let dependencies = submodules.into_iter().map(|s| s.path).collect::>(); - let parsed_module_tree = ParsedModuleTree { - tree_type: kind, - lexed_module: lexed, - parse_module: parsed, - }; + let version = lsp_mode + .and_then(|lsp| lsp.file_versions.get(path.as_ref()).copied()) + .unwrap_or(None); let cache_entry = ModuleCacheEntry { path, modified_time, hash, dependencies, include_tests, + version, }; query_engine.insert_parse_module_cache_entry(cache_entry); - Ok(parsed_module_tree) + Ok(ParsedModuleTree { + tree_type: kind, + lexed_module: lexed, + parse_module: parsed, + }) } fn is_parse_module_cache_up_to_date( engines: &Engines, path: &Arc, include_tests: bool, + build_config: Option<&BuildConfig>, ) -> bool { let query_engine = engines.qe(); let key = ModuleCacheKey::new(path.clone(), include_tests); let entry = query_engine.get_parse_module_cache_entry(&key); match entry { Some(entry) => { - let modified_time = std::fs::metadata(path.as_path()) - .ok() - .and_then(|m| m.modified().ok()); - // Let's check if we can re-use the dependency information - // we got from the cache, which is only true if the file hasn't been - // modified since or if its hash is the same. - let cache_up_to_date = entry.modified_time == modified_time || { - let src = std::fs::read_to_string(path.as_path()).unwrap(); - - let mut hasher = DefaultHasher::new(); - src.hash(&mut hasher); - let hash = hasher.finish(); - - hash == entry.hash - }; + // we got from the cache. + let cache_up_to_date = build_config + .as_ref() + .and_then(|x| x.lsp_mode.as_ref()) + .and_then(|lsp| { + // First try to get the file version from lsp if it exists + lsp.file_versions.get(path.as_ref()) + }) + .map_or_else( + || { + // Otherwise we can safely read the file from disk here, as the LSP has not modified it, or we are not in LSP mode. + // Check if the file has been modified or if its hash is the same as the last compilation + let modified_time = std::fs::metadata(path.as_path()) + .ok() + .and_then(|m| m.modified().ok()); + entry.modified_time == modified_time || { + let src = std::fs::read_to_string(path.as_path()).unwrap(); + let mut hasher = DefaultHasher::new(); + src.hash(&mut hasher); + let hash = hasher.finish(); + hash == entry.hash + } + }, + |version| { + // The cache is invalid if the lsp version is greater than the last compilation + !version.map_or(false, |v| v > entry.version.unwrap_or(0)) + }, + ); // Look at the dependencies recursively to make sure they have not been // modified either. if cache_up_to_date { - entry - .dependencies - .iter() - .all(|path| is_parse_module_cache_up_to_date(engines, path, include_tests)) + entry.dependencies.iter().all(|path| { + is_parse_module_cache_up_to_date(engines, path, include_tests, build_config) + }) } else { false } @@ -528,7 +549,6 @@ pub fn parsed_to_ast( package_name, build_config, ); - check_should_abort(handler, retrigger_compilation.clone())?; // Only clear the parsed AST nodes if we are running a regular compilation pipeline. @@ -681,16 +701,14 @@ pub fn compile_to_ast( retrigger_compilation: Option>, ) -> Result { check_should_abort(handler, retrigger_compilation.clone())?; - let query_engine = engines.qe(); let mut metrics = PerformanceData::default(); - if let Some(config) = build_config { let path = config.canonical_root_module(); let include_tests = config.include_tests; // Check if we can re-use the data in the cache. - if is_parse_module_cache_up_to_date(engines, &path, include_tests) { + if is_parse_module_cache_up_to_date(engines, &path, include_tests, build_config) { let mut entry = query_engine.get_programs_cache_entry(&path).unwrap(); entry.programs.metrics.reused_modules += 1; diff --git a/sway-core/src/query_engine/mod.rs b/sway-core/src/query_engine/mod.rs index 9b329279a7c..074091cfcb0 100644 --- a/sway-core/src/query_engine/mod.rs +++ b/sway-core/src/query_engine/mod.rs @@ -32,6 +32,7 @@ pub struct ModuleCacheEntry { pub hash: u64, pub dependencies: Vec, pub include_tests: bool, + pub version: Option, } pub type ModuleCacheMap = HashMap; @@ -45,19 +46,11 @@ pub struct ProgramsCacheEntry { pub type ProgramsCacheMap = HashMap; -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct QueryEngine { - parse_module_cache: RwLock, - programs_cache: RwLock, -} - -impl Clone for QueryEngine { - fn clone(&self) -> Self { - Self { - parse_module_cache: RwLock::new(self.parse_module_cache.read().unwrap().clone()), - programs_cache: RwLock::new(self.programs_cache.read().unwrap().clone()), - } - } + // We want the below types wrapped in Arcs to optimize cloning from LSP. + parse_module_cache: Arc>, + programs_cache: Arc>, } impl QueryEngine { diff --git a/sway-core/src/semantic_analysis/module.rs b/sway-core/src/semantic_analysis/module.rs index 5158858e7ad..a98c36717c4 100644 --- a/sway-core/src/semantic_analysis/module.rs +++ b/sway-core/src/semantic_analysis/module.rs @@ -303,6 +303,7 @@ impl ty::TyModule { )?; let mut all_nodes = Self::type_check_nodes(handler, ctx.by_ref(), ordered_nodes)?; + let submodules = submodules_res?; let fallback_fn = collect_fallback_fn(&all_nodes, engines, handler)?; diff --git a/sway-lsp/benches/lsp_benchmarks/compile.rs b/sway-lsp/benches/lsp_benchmarks/compile.rs index 322ff2ac77e..6916582e271 100644 --- a/sway-lsp/benches/lsp_benchmarks/compile.rs +++ b/sway-lsp/benches/lsp_benchmarks/compile.rs @@ -15,6 +15,7 @@ fn benchmarks(c: &mut Criterion) { let uri = Url::from_file_path(super::benchmark_dir().join("src/main.sw")).unwrap(); let mut lsp_mode = Some(sway_core::LspConfig { optimized_build: false, + file_versions: Default::default(), }); c.bench_function("compile", |b| { b.iter(|| { diff --git a/sway-lsp/benches/lsp_benchmarks/mod.rs b/sway-lsp/benches/lsp_benchmarks/mod.rs index 8a9d45d32b8..4afb85d0c45 100644 --- a/sway-lsp/benches/lsp_benchmarks/mod.rs +++ b/sway-lsp/benches/lsp_benchmarks/mod.rs @@ -14,6 +14,7 @@ pub async fn compile_test_project() -> (Url, Arc) { let session = Arc::new(Session::new()); let lsp_mode = Some(sway_core::LspConfig { optimized_build: false, + file_versions: Default::default(), }); // Load the test project let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap(); diff --git a/sway-lsp/src/config.rs b/sway-lsp/src/config.rs index 799504dfd83..0a3a5597462 100644 --- a/sway-lsp/src/config.rs +++ b/sway-lsp/src/config.rs @@ -16,6 +16,8 @@ pub struct Config { pub on_enter: OnEnterConfig, #[serde(default, skip_serializing)] trace: TraceConfig, + #[serde(default)] + pub garbage_collection: GarbageCollectionConfig, } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Default)] @@ -53,7 +55,26 @@ impl Default for DiagnosticConfig { } } -// Options for confguring server logging. +// Options for configuring garbage collection. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GarbageCollectionConfig { + pub gc_enabled: bool, + pub gc_frequency: i32, +} + +impl Default for GarbageCollectionConfig { + fn default() -> Self { + Self { + gc_enabled: true, + // Garbage collection is fairly expsensive so we default to only clear on every 3rd keystroke. + // Waiting too long to clear can cause a stack overflow to occur. + gc_frequency: 3, + } + } +} + +// Options for configuring server logging. #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct LoggingConfig { #[serde(with = "LevelFilterDef")] diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index ee46a1401e1..d0d9e7ed5ee 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -259,6 +259,7 @@ impl Session { path: uri.path().to_string(), err: err.to_string(), })?; + Ok(()) } @@ -372,7 +373,7 @@ type CompileResults = (Vec, Vec); pub fn traverse( results: Vec<(Option, Handler)>, - engines: &Engines, + engines_clone: &Engines, session: Arc, ) -> Result, LanguageServerError> { session.token_map.clear(); @@ -399,6 +400,18 @@ pub fn traverse( session.metrics.insert(source_id, metrics.clone()); } + let engines_ref = session.engines.read(); + // Check if the cached AST was returned by the compiler for the users workspace. + // If it was, then we need to use the original engines for traversal. + // + // This is due to the garbage collector removing types from the engines_clone + // and they have not been re-added due to compilation being skipped. + let engines = if i == results_len - 1 && metrics.reused_modules > 0 { + &*engines_ref + } else { + engines_clone + }; + // Get a reference to the typed program AST. let typed_program = typed .as_ref() diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index be2e97211fa..6b610cbc6a3 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -10,7 +10,11 @@ use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileChangeType, Url, }; -use std::sync::{atomic::Ordering, Arc}; +use std::{ + collections::BTreeMap, + path::PathBuf, + sync::{atomic::Ordering, Arc}, +}; pub async fn handle_did_open_text_document( state: &ServerState, @@ -32,6 +36,8 @@ pub async fn handle_did_open_text_document( uri: Some(uri.clone()), version: None, optimized_build: false, + gc_options: state.config.read().garbage_collection.clone(), + file_versions: BTreeMap::new(), })); state.is_compiling.store(true, Ordering::SeqCst); @@ -49,6 +55,7 @@ fn send_new_compilation_request( uri: &Url, version: Option, optimized_build: bool, + file_versions: BTreeMap>, ) { if state.is_compiling.load(Ordering::SeqCst) { // If we are already compiling, then we need to retrigger compilation @@ -71,6 +78,8 @@ fn send_new_compilation_request( uri: Some(uri.clone()), version, optimized_build, + gc_options: state.config.read().garbage_collection.clone(), + file_versions, })); } @@ -89,16 +98,36 @@ pub async fn handle_did_change_text_document( session .write_changes_to_file(&uri, params.content_changes) .await?; + + let file_versions = file_versions(&session, &uri, Some(params.text_document.version as u64)); send_new_compilation_request( state, session.clone(), &uri, Some(params.text_document.version), true, + file_versions, ); Ok(()) } +fn file_versions( + session: &Session, + uri: &Url, + version: Option, +) -> BTreeMap> { + let mut file_versions = BTreeMap::new(); + for item in session.documents.iter() { + let path = PathBuf::from(item.key()); + if path == uri.to_file_path().unwrap() { + file_versions.insert(path, version); + } else { + file_versions.insert(path, None); + } + } + file_versions +} + pub(crate) async fn handle_did_save_text_document( state: &ServerState, params: DidSaveTextDocumentParams, @@ -109,7 +138,8 @@ pub(crate) async fn handle_did_save_text_document( .uri_and_session_from_workspace(¶ms.text_document.uri) .await?; session.sync.resync()?; - send_new_compilation_request(state, session.clone(), &uri, None, false); + let file_versions = file_versions(&session, &uri, None); + send_new_compilation_request(state, session.clone(), &uri, None, false, file_versions); state.wait_for_parsing().await; state .publish_diagnostics(uri, params.text_document.uri, session) diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index 55ed137efcf..6b77d0b01ae 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -1,11 +1,10 @@ //! The context or environment in which the language server functions. use crate::{ - config::{Config, Warnings}, + config::{Config, GarbageCollectionConfig, Warnings}, core::session::{self, Session}, error::{DirectoryError, DocumentError, LanguageServerError}, - utils::debug, - utils::keyword_docs::KeywordDocs, + utils::{debug, keyword_docs::KeywordDocs}, }; use crossbeam_channel::{Receiver, Sender}; use dashmap::DashMap; @@ -13,7 +12,7 @@ use forc_pkg::manifest::GenericManifestFile; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; use parking_lot::RwLock; -use std::process::Command; +use std::{collections::BTreeMap, process::Command}; use std::{ mem, path::PathBuf, @@ -29,7 +28,7 @@ use tower_lsp::{jsonrpc, Client}; /// `ServerState` is the primary mutable state of the language server pub struct ServerState { pub(crate) client: Option, - pub(crate) config: Arc>, + pub config: Arc>, pub(crate) keyword_docs: Arc, pub(crate) sessions: Arc, pub(crate) retrigger_compilation: Arc, @@ -86,6 +85,8 @@ pub struct CompilationContext { pub uri: Option, pub version: Option, pub optimized_build: bool, + pub gc_options: GarbageCollectionConfig, + pub file_versions: BTreeMap>, } impl ServerState { @@ -119,9 +120,10 @@ impl ServerState { let mut engines_clone = session.engines.read().clone(); if let Some(version) = ctx.version { - // Garbage collection is fairly expsensive so we only clear on every 3rd keystroke. - // Waiting too long to clear can cause a stack overflow to occur. - if version % 3 == 0 { + // Perform garbage collection at configured intervals if enabled to manage memory usage. + if ctx.gc_options.gc_enabled + && version % ctx.gc_options.gc_frequency == 0 + { // Call this on the engines clone so we don't clear types that are still in use // and might be needed in the case cancel compilation was triggered. if let Err(err) = session.garbage_collect(&mut engines_clone) { @@ -135,6 +137,7 @@ impl ServerState { let lsp_mode = Some(LspConfig { optimized_build: ctx.optimized_build, + file_versions: ctx.file_versions, }); // Set the is_compiling flag to true so that the wait_for_parsing function knows that we are compiling @@ -148,7 +151,26 @@ impl ServerState { experimental, ) { Ok(_) => { - mem::swap(&mut *session.engines.write(), &mut engines_clone); + if let Ok(path) = uri.to_file_path() { + let path = Arc::new(path); + let source_id = + session.engines.read().se().get_source_id(&path); + let metrics = session + .metrics + .get(&source_id) + .expect("metrics not found for source_id"); + // It's very important to check if the workspace AST was reused to determine if we need to overwrite the engines. + // Because the engines_clone has garbage collection applied. If the workspace AST was reused, we need to keep the old engines + // as the engines_clone might have cleared some types that are still in use. + if metrics.reused_modules == 0 { + // The compiler did not reuse the workspace AST. + // We need to overwrite the old engines with the engines clone. + mem::swap( + &mut *session.engines.write(), + &mut engines_clone, + ); + } + } *last_compilation_state.write() = LastCompilationState::Success; } Err(_err) => { diff --git a/sway-lsp/tests/fixtures/garbage_collection/storage_contract/.gitignore b/sway-lsp/tests/fixtures/garbage_collection/storage_contract/.gitignore new file mode 100644 index 00000000000..77d3844f58c --- /dev/null +++ b/sway-lsp/tests/fixtures/garbage_collection/storage_contract/.gitignore @@ -0,0 +1,2 @@ +out +target diff --git a/sway-lsp/tests/fixtures/garbage_collection/storage_contract/Forc.toml b/sway-lsp/tests/fixtures/garbage_collection/storage_contract/Forc.toml new file mode 100644 index 00000000000..96a747b5cca --- /dev/null +++ b/sway-lsp/tests/fixtures/garbage_collection/storage_contract/Forc.toml @@ -0,0 +1,7 @@ +[project] +authors = ["Fuel Labs "] +entry = "main.sw" +license = "Apache-2.0" +name = "storage_contract" + +[dependencies] diff --git a/sway-lsp/tests/fixtures/garbage_collection/storage_contract/src/main.sw b/sway-lsp/tests/fixtures/garbage_collection/storage_contract/src/main.sw new file mode 100644 index 00000000000..2cb02993474 --- /dev/null +++ b/sway-lsp/tests/fixtures/garbage_collection/storage_contract/src/main.sw @@ -0,0 +1,57 @@ +contract; + +use std::{ + bytes::Bytes, + hash::*, + storage::storage_string::*, + storage::storage_vec::*, + string::String, +}; + +storage { + msgs: StorageMap = StorageMap:: {}, + msgs_sender: StorageMap = StorageMap:: {}, + ids: StorageVec = StorageVec:: {}, +} + +abi Thread { + #[storage(read, write)] + fn insert_msg(id: b256, msg: String) -> b256; + + #[storage(read)] + fn get_ids() -> Vec; + + #[storage(read)] + fn get_msg(id: b256) -> String; + + #[storage(read)] + fn get_sender(id: b256) -> Address; +} + +impl Thread for Contract { + #[storage(read, write)] + fn insert_msg(id: b256, msg: String) -> b256 { + let key_of_string = storage.msgs.get(id); + key_of_string.write_slice(msg); + let sender = msg_sender().unwrap(); + storage.msgs_sender.insert(id, sender); + storage.ids.push(id); + id + } + + #[storage(read)] + fn get_ids() -> Vec { + storage.ids.load_vec() + } + + #[storage(read)] + fn get_msg(id: b256) -> String { + storage.msgs.get(id).read_slice().unwrap() + } + + #[storage(read)] + fn get_sender(id: b256) -> Address { + let id = storage.msgs_sender.get(id).try_read().unwrap(); + id.as_address().unwrap() + } +} \ No newline at end of file diff --git a/sway-lsp/tests/integration/lsp.rs b/sway-lsp/tests/integration/lsp.rs index f4d10b59927..e0d063871be 100644 --- a/sway-lsp/tests/integration/lsp.rs +++ b/sway-lsp/tests/integration/lsp.rs @@ -92,29 +92,24 @@ pub(crate) async fn did_change_request( service: &mut LspService, uri: &Url, version: i32, + params: Option, ) -> Request { - let params = json!({ - "textDocument": { - "uri": uri, - "version": version, - }, - "contentChanges": [ - { - "range": { - "start": { - "line": 1, - "character": 0 - }, - "end": { - "line": 1, - "character": 0 - } - }, - "rangeLength": 0, - "text": "\n", - } - ] + let params = params.unwrap_or_else(|| { + create_did_change_params( + uri, + version, + Position { + line: 1, + character: 0, + }, + Position { + line: 1, + character: 0, + }, + 0, + ) }); + let params: serde_json::value::Value = serde_json::to_value(params).unwrap(); let did_change = Request::build("textDocument/didChange") .params(params) .finish(); @@ -553,3 +548,23 @@ pub(crate) async fn rename_request<'a>( let worspace_edit = request::handle_rename(server, params).await.unwrap(); worspace_edit.unwrap() } + +pub fn create_did_change_params( + uri: &Url, + version: i32, + start: Position, + end: Position, + range_length: u32, +) -> DidChangeTextDocumentParams { + DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: uri.clone(), + version, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: Some(Range { start, end }), + range_length: Some(range_length), + text: "\n".into(), + }], + } +} diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 11b4a08a19f..91d5ac31e55 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -9,8 +9,9 @@ use sway_lsp::{ }; use sway_lsp_test_utils::{ assert_server_requests, dir_contains_forc_manifest, doc_comments_dir, e2e_language_dir, - e2e_test_dir, generic_impl_self_dir, get_fixture, load_sway_example, runnables_test_dir, - self_impl_reassignment_dir, sway_workspace_dir, test_fixtures_dir, + e2e_test_dir, generic_impl_self_dir, get_fixture, load_sway_example, random_delay, + runnables_test_dir, self_impl_reassignment_dir, setup_panic_hook, sway_workspace_dir, + test_fixtures_dir, }; use tower_lsp::LspService; @@ -154,7 +155,7 @@ fn did_change() { run_async!({ let (mut service, _) = LspService::new(ServerState::new); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let _ = lsp::did_change_request(&mut service, &uri, 1).await; + let _ = lsp::did_change_request(&mut service, &uri, 1, None).await; service.inner().wait_for_parsing().await; shutdown_and_exit(&mut service).await; }); @@ -167,7 +168,7 @@ fn did_cache_test() { .custom_method("sway/metrics", ServerState::metrics) .finish(); let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await; - let _ = lsp::did_change_request(&mut service, &uri, 1).await; + let _ = lsp::did_change_request(&mut service, &uri, 1, None).await; service.inner().wait_for_parsing().await; let metrics = lsp::metrics_request(&mut service, &uri).await; assert!(metrics.len() >= 2); @@ -191,7 +192,7 @@ fn did_change_stress_test() { let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await; let times = 400; for version in 0..times { - let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; + let _ = lsp::did_change_request(&mut service, &uri, version + 1, None).await; if version == 0 { service.inner().wait_for_parsing().await; } @@ -211,12 +212,7 @@ fn did_change_stress_test_random_wait() { run_async!({ let test_duration = tokio::time::Duration::from_secs(5 * 60); // 5 minutes timeout let test_future = async { - std::env::set_var("RUST_BACKTRACE", "1"); - let default_panic = std::panic::take_hook(); - std::panic::set_hook(Box::new(move |panic_info| { - default_panic(panic_info); // Print the panic message - std::process::exit(1); - })); + setup_panic_hook(); let (mut service, _) = LspService::new(ServerState::new); let example_dir = sway_workspace_dir() .join(e2e_language_dir()) @@ -225,7 +221,7 @@ fn did_change_stress_test_random_wait() { let times = 60; for version in 0..times { //eprintln!("version: {}", version); - let _ = lsp::did_change_request(&mut service, &uri, version + 1).await; + let _ = lsp::did_change_request(&mut service, &uri, version + 1, None).await; if version == 0 { service.inner().wait_for_parsing().await; } @@ -255,6 +251,77 @@ fn did_change_stress_test_random_wait() { }); } +fn garbage_collection_runner(path: PathBuf) { + run_async!({ + setup_panic_hook(); + let (mut service, _) = LspService::new(ServerState::new); + // set the garbage collection frequency to 1 + service + .inner() + .config + .write() + .garbage_collection + .gc_frequency = 1; + let uri = init_and_open(&mut service, path).await; + let times = 60; + for version in 1..times { + //eprintln!("version: {}", version); + let params = if rand::random::() % 3 < 1 { + // enter keypress at line 20 + lsp::create_did_change_params( + &uri, + version, + Position { + line: 20, + character: 0, + }, + Position { + line: 20, + character: 0, + }, + 0, + ) + } else { + // backspace keypress at line 21 + lsp::create_did_change_params( + &uri, + version, + Position { + line: 20, + character: 0, + }, + Position { + line: 21, + character: 0, + }, + 1, + ) + }; + let _ = lsp::did_change_request(&mut service, &uri, version, Some(params)).await; + if version == 0 { + service.inner().wait_for_parsing().await; + } + // wait for a random amount of time to simulate typing + random_delay().await; + } + shutdown_and_exit(&mut service).await; + }); +} + +#[test] +fn garbage_collection_storage() { + let p = sway_workspace_dir() + .join("sway-lsp/tests/fixtures/garbage_collection/storage_contract") + .join("src/main.sw"); + garbage_collection_runner(p); +} + +#[test] +fn garbage_collection_paths() { + let p = test_fixtures_dir().join("tokens/paths/src/main.sw"); + garbage_collection_runner(p); +} + #[test] fn lsp_syncs_with_workspace_edits() { run_async!({ @@ -270,7 +337,7 @@ fn lsp_syncs_with_workspace_edits() { def_path: uri.as_str(), }; lsp::definition_check(service.inner(), &go_to).await; - let _ = lsp::did_change_request(&mut service, &uri, 1).await; + let _ = lsp::did_change_request(&mut service, &uri, 1, None).await; service.inner().wait_for_parsing().await; go_to.def_line = 20; lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 45, 24).await; diff --git a/sway-lsp/tests/utils/Cargo.toml b/sway-lsp/tests/utils/Cargo.toml index 56552d29f23..1901ec8c701 100644 --- a/sway-lsp/tests/utils/Cargo.toml +++ b/sway-lsp/tests/utils/Cargo.toml @@ -13,6 +13,7 @@ repository.workspace = true assert-json-diff = "2.0" futures = { version = "0.3", default-features = false, features = ["std", "async-await"] } lsp-types = { version = "0.94", features = ["proposed"] } +rand = "0.8" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.60" tokio = { version = "1.3", features = ["io-std", "io-util", "macros", "net", "rt-multi-thread", "sync", "time"] } diff --git a/sway-lsp/tests/utils/src/lib.rs b/sway-lsp/tests/utils/src/lib.rs index a4b6720b6a9..3124481ca06 100644 --- a/sway-lsp/tests/utils/src/lib.rs +++ b/sway-lsp/tests/utils/src/lib.rs @@ -1,6 +1,7 @@ use assert_json_diff::assert_json_include; use futures::StreamExt; use lsp_types::Url; +use rand::Rng; use serde_json::Value; use std::{ env, fs, @@ -113,3 +114,43 @@ pub async fn assert_server_requests( } }) } + +/// Introduces a random delay between 1 to 30 milliseconds with a chance of additional longer delays based on predefined probabilities. +pub async fn random_delay() { + // wait for a random amount of time between 1-30ms + tokio::time::sleep(tokio::time::Duration::from_millis( + rand::thread_rng().gen_range(1..=30), + )) + .await; + + // there is a 10% chance that a longer 100-800ms wait will be added + if rand::thread_rng().gen_ratio(1, 10) { + tokio::time::sleep(tokio::time::Duration::from_millis( + rand::thread_rng().gen_range(100..=1200), + )) + .await; + } + // 20% chance to introduce a longer delay of 200 to 1500 milliseconds. + if rand::thread_rng().gen_ratio(2, 10) { + tokio::time::sleep(tokio::time::Duration::from_millis( + rand::thread_rng().gen_range(400..=2800), + )) + .await; + } +} + +/// Sets up the environment and a custom panic hook to print panic information and exit the program. +pub fn setup_panic_hook() { + // Enable backtrace to get more information about panic + std::env::set_var("RUST_BACKTRACE", "1"); + + // Take the default panic hook + let default_panic = std::panic::take_hook(); + + // Set a custom panic hook + std::panic::set_hook(Box::new(move |panic_info| { + // Invoke the default panic hook to print the panic message + default_panic(panic_info); + std::process::exit(1); + })); +}