From 1f35c8d09df9612e23d62a472d49c7021202711d Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 17:47:07 +0900 Subject: [PATCH 001/228] Fix tooltip of `always_treat_brackets_as_autoclosed` (#18191) Fixed a bug where the `always_treat_brackets_as_autoclosed` option would not display the message in the tooltip that appears when hovering. Release Notes: - N/A --- crates/language/src/language_settings.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 82d4208aae6eb..735a9a60f87fa 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -325,11 +325,11 @@ pub struct LanguageSettingsContent { /// /// Default: true pub use_auto_surround: Option, - // Controls how the editor handles the autoclosed characters. - // When set to `false`(default), skipping over and auto-removing of the closing characters - // happen only for auto-inserted characters. - // Otherwise(when `true`), the closing characters are always skipped over and auto-removed - // no matter how they were inserted. + /// Controls how the editor handles the autoclosed characters. + /// When set to `false`(default), skipping over and auto-removing of the closing characters + /// happen only for auto-inserted characters. + /// Otherwise(when `true`), the closing characters are always skipped over and auto-removed + /// no matter how they were inserted. /// /// Default: false pub always_treat_brackets_as_autoclosed: Option, From e7fcf83ce8d88ca36d2aa7fe8fc017c308aaf138 Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 17:48:52 +0900 Subject: [PATCH 002/228] docs: Fix misordered headings (#18192) 1. Raised the `Indent Guides` heading to level 2, which is completely unrelated to `Git`. 2. the `Git` heading now only contains `Git Gutter` and `Inline Git Blame` as subheadings. 3. The `Indent Guides` heading is now located directly after the `Git` heading. Release Notes: - N/A --- docs/src/configuring-zed.md | 88 ++++++++++++++++++------------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 7cc6a4a8cb02c..518dbb7f38a83 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -857,7 +857,50 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files } ``` -### Indent Guides +### Inline Git Blame + +- Description: Whether or not to show git blame information inline, on the currently focused line. +- Setting: `inline_blame` +- Default: + +```json +{ + "git": { + "inline_blame": { + "enabled": true + } + } +} +``` + +**Options** + +1. Disable inline git blame: + +```json +{ + "git": { + "inline_blame": { + "enabled": false + } + } +} +``` + +2. Only show inline git blame after a delay (that starts after cursor stops moving): + +```json +{ + "git": { + "inline_blame": { + "enabled": true, + "delay_ms": 500 + } + } +} +``` + +## Indent Guides - Description: Configuration related to indent guides. Indent guides can be configured separately for each language. - Setting: `indent_guides` @@ -926,49 +969,6 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files } ``` -### Inline Git Blame - -- Description: Whether or not to show git blame information inline, on the currently focused line. -- Setting: `inline_blame` -- Default: - -```json -{ - "git": { - "inline_blame": { - "enabled": true - } - } -} -``` - -**Options** - -1. Disable inline git blame: - -```json -{ - "git": { - "inline_blame": { - "enabled": false - } - } -} -``` - -2. Only show inline git blame after a delay (that starts after cursor stops moving): - -```json -{ - "git": { - "inline_blame": { - "enabled": true, - "delay_ms": 500 - } - } -} -``` - ## Hard Tabs - Description: Whether to indent lines using tab characters or multiple spaces. From 37c93d8fead2f33ed444c1ee8efd303a2b5a4c8c Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 18:09:35 +0900 Subject: [PATCH 003/228] docs: Add missing `base_keymap` option in `configuring-zed.md` (#18190) Added `base_keymap`, an option that works in the editor but is missing from the documentation. Release Notes: - N/A --- assets/settings/default.json | 8 ++++-- docs/src/configuring-zed.md | 56 ++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 8424c5733d81b..e04ab90f217cd 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -15,9 +15,11 @@ // text editor: // // 1. "VSCode" - // 2. "JetBrains" - // 3. "SublimeText" - // 4. "Atom" + // 2. "Atom" + // 3. "JetBrains" + // 4. "None" + // 5. "SublimeText" + // 6. "TextMate" "base_keymap": "VSCode", // Features that can be globally enabled or disabled "features": { diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 518dbb7f38a83..5d9a2843edf4a 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -93,6 +93,62 @@ Extensions that provide language servers may also provide default settings for t `boolean` values +## Base Keymap + +- Description: Base key bindings scheme. Base keymaps can be overridden with user keymaps. +- Setting: `base_keymap` +- Default: `VSCode` + +**Options** + +1. VSCode + +```json +{ + "base_keymap": "VSCode" +} +``` + +2. Atom + +```json +{ + "base_keymap": "Atom" +} +``` + +3. JetBrains + +```json +{ + "base_keymap": "JetBrains" +} +``` + +4. None + +```json +{ + "base_keymap": "None" +} +``` + +5. SublimeText + +```json +{ + "base_keymap": "SublimeText" +} +``` + +6. TextMate + +```json +{ + "base_keymap": "TextMate" +} +``` + ## Buffer Font Family - Description: The name of a font to use for rendering text in the editor. From 0f4ebdfbca721614f3cadafc3b44e4fbf099afda Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 18:15:13 +0900 Subject: [PATCH 004/228] docs: Add missing `ui_font_size` option in `configuring-zed.md` (#18189) Added `ui_font_size`, an option that works in the editor but is missing from the documentation. Release Notes: - N/A --- docs/src/configuring-zed.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 5d9a2843edf4a..c0aa4c513a5f3 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -215,7 +215,7 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting **Options** -`integer` values +`integer` values from `6` to `100` pixels (inclusive) ## Buffer Font Weight @@ -2184,6 +2184,16 @@ Float values between `0.0` and `0.9`, where: } ``` +## UI Font Size + +- Description: The default font size for text in the UI. +- Setting: `ui_font_size` +- Default: `16` + +**Options** + +`integer` values from `6` to `100` pixels (inclusive) + ## An example configuration: ```json From 75cb199a54666032e7a62dfb64739283556ae96c Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Mon, 23 Sep 2024 00:50:51 +0800 Subject: [PATCH 005/228] project: Fix typo error cause remove worktree not stop lsp (#18198) Release Notes: - N/A --- crates/project/src/project.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index bd9c17ecb29b9..78584cbae0c65 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2006,7 +2006,7 @@ impl Project { cx.emit(Event::LanguageServerAdded(*language_server_id)) } LspStoreEvent::LanguageServerRemoved(language_server_id) => { - cx.emit(Event::LanguageServerAdded(*language_server_id)) + cx.emit(Event::LanguageServerRemoved(*language_server_id)) } LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit( Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()), From bb7d9d35256825c9b022a3c89c556c1521664c8d Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Mon, 23 Sep 2024 12:26:01 +0900 Subject: [PATCH 006/228] docs: Remove `default_dock_anchor` in `configuring-zed.md` (#18210) Removed the deprecated option `default_dock_anchor` in `configuring-zed.md` Note: https://zed.dev/blog/new-panel-system Release Notes: - N/A --- docs/src/configuring-zed.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index c0aa4c513a5f3..7837044a60a66 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -381,12 +381,6 @@ List of `string` values "cursor_shape": "hollow" ``` -## Default Dock Anchor - -- Description: The default anchor for new docks. -- Setting: `default_dock_anchor` -- Default: `bottom` - **Options** 1. Position the dock attached to the bottom of the workspace: `bottom` From 05d18321db59539b56520d25f2ee95850ad911fd Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 23 Sep 2024 12:53:57 +0300 Subject: [PATCH 007/228] Resolve completions properly (#18212) Related to https://github.com/rust-lang/rust-analyzer/pull/18167 * Declare more completion item fields in the client completion resolve capabilities * Do resolve completions even if their docs are present * Instead, do not resolve completions that could not be resolved when handling the remote client resolve requests * Do replace the old lsp completion data with the resolved one Release Notes: - Improved completion resolve mechanism --- crates/lsp/src/lsp.rs | 8 ++++++- crates/project/src/lsp_store.rs | 37 ++++++++++++++++++++++++--------- crates/proto/proto/zed.proto | 1 + 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 21671cd0b1326..c2a5951de7210 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -615,8 +615,14 @@ impl LanguageServer { snippet_support: Some(true), resolve_support: Some(CompletionItemCapabilityResolveSupport { properties: vec![ - "documentation".to_string(), "additionalTextEdits".to_string(), + "command".to_string(), + "detail".to_string(), + "documentation".to_string(), + "filterText".to_string(), + "labelDetails".to_string(), + "tags".to_string(), + "textEdit".to_string(), ], }), insert_replace_support: Some(true), diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6a3788c879316..95ca84236001c 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1615,10 +1615,6 @@ impl LspStore { let (server_id, completion) = { let completions_guard = completions.read(); let completion = &completions_guard[completion_index]; - if completion.documentation.is_some() { - continue; - } - did_resolve = true; let server_id = completion.server_id; let completion = completion.lsp_completion.clone(); @@ -1643,10 +1639,6 @@ impl LspStore { let (server_id, completion) = { let completions_guard = completions.read(); let completion = &completions_guard[completion_index]; - if completion.documentation.is_some() { - continue; - } - let server_id = completion.server_id; let completion = completion.lsp_completion.clone(); @@ -1743,6 +1735,10 @@ impl LspStore { completion.lsp_completion.insert_text_format = completion_item.insert_text_format; } } + + let mut completions = completions.write(); + let completion = &mut completions[completion_index]; + completion.lsp_completion = completion_item; } #[allow(clippy::too_many_arguments)] @@ -1771,6 +1767,10 @@ impl LspStore { else { return; }; + let Some(lsp_completion) = serde_json::from_slice(&response.lsp_completion).log_err() + else { + return; + }; let documentation = if response.documentation.is_empty() { Documentation::Undocumented @@ -1787,6 +1787,7 @@ impl LspStore { let mut completions = completions.write(); let completion = &mut completions[completion_index]; completion.documentation = Some(documentation); + completion.lsp_completion = lsp_completion; let old_range = response .old_start @@ -4192,17 +4193,32 @@ impl LspStore { let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?; let completion = this - .read_with(&cx, |this, _| { + .read_with(&cx, |this, cx| { let id = LanguageServerId(envelope.payload.language_server_id as usize); let Some(server) = this.language_server_for_id(id) else { return Err(anyhow!("No language server {id}")); }; - Ok(server.request::(lsp_completion)) + Ok(cx.background_executor().spawn(async move { + let can_resolve = server + .capabilities() + .completion_provider + .as_ref() + .and_then(|options| options.resolve_provider) + .unwrap_or(false); + if can_resolve { + server + .request::(lsp_completion) + .await + } else { + anyhow::Ok(lsp_completion) + } + })) })?? .await?; let mut documentation_is_markdown = false; + let lsp_completion = serde_json::to_string(&completion)?.into_bytes(); let documentation = match completion.documentation { Some(lsp::Documentation::String(text)) => text, @@ -4244,6 +4260,7 @@ impl LspStore { old_start, old_end, new_text, + lsp_completion, }) } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a886b2185556f..a18bbe8ecf514 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1219,6 +1219,7 @@ message ResolveCompletionDocumentationResponse { Anchor old_start = 3; Anchor old_end = 4; string new_text = 5; + bytes lsp_completion = 6; } message ResolveInlayHint { From 8a36278c9590664e881dda454ccfa7685eb5b761 Mon Sep 17 00:00:00 2001 From: moshyfawn Date: Mon, 23 Sep 2024 08:59:45 -0400 Subject: [PATCH 008/228] docs: Fix long code blocks overflow (#18208) Closes #18207 Release Notes: - N/A | Before | After | |--------|-------| | image | image | --- docs/theme/highlight.css | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/theme/highlight.css b/docs/theme/highlight.css index 9d8f39d9030b2..9bd80f351612a 100644 --- a/docs/theme/highlight.css +++ b/docs/theme/highlight.css @@ -12,6 +12,7 @@ .hljs { color: #24292e; background: #ffffff; + overflow-x: auto; } .hljs-doctag, From d784e720274b2a9ced94aa6fcc703f53db132163 Mon Sep 17 00:00:00 2001 From: Charlie Egan Date: Mon, 23 Sep 2024 14:38:54 +0100 Subject: [PATCH 009/228] docs: Add Rego language (#18217) Release Notes: - N/A --------- Signed-off-by: Charlie Egan Co-authored-by: Charlie Egan Co-authored-by: Marshall Bowers --- docs/src/SUMMARY.md | 1 + docs/src/languages.md | 1 + docs/src/languages/rego.md | 38 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 docs/src/languages/rego.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 8bb8035c61743..bb0c9d79f590c 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -93,6 +93,7 @@ - [PureScript](./languages/purescript.md) - [Python](./languages/python.md) - [R](./languages/r.md) +- [Rego](./languages/rego.md) - [ReStructuredText](./languages/rst.md) - [Racket](./languages/racket.md) - [Roc](./languages/roc.md) diff --git a/docs/src/languages.md b/docs/src/languages.md index 7ec586f1f598f..4bc6e7d3d732a 100644 --- a/docs/src/languages.md +++ b/docs/src/languages.md @@ -45,6 +45,7 @@ Zed supports hundreds of programming languages and text formats. Some work out-o - [PureScript](./languages/purescript.md) - [Python](./languages/python.md) - [R](./languages/r.md) +- [Rego](./languages/rego.md) - [ReStructuredText](./languages/rst.md) - [Racket](./languages/racket.md) - [Roc](./languages/roc.md) diff --git a/docs/src/languages/rego.md b/docs/src/languages/rego.md new file mode 100644 index 0000000000000..3709c6a1feeb2 --- /dev/null +++ b/docs/src/languages/rego.md @@ -0,0 +1,38 @@ +# Rego + +Rego language support in Zed is provided by the community-maintained [Rego extension](https://github.com/StyraInc/zed-rego). + +- Tree Sitter: [FallenAngel97/tree-sitter-rego](https://github.com/FallenAngel97/tree-sitter-rego) +- Language Server: [StyraInc/regal](https://github.com/StyraInc/regal) + +## Installation + +The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. + +## Configuration + +The extension's behavior is configured in the `.regal/config.yaml` file. The following is an example configuration which disables the `todo-comment` rule, customizes the `line-length` rule, and ignores test files for the `opa-fmt` rule: + +```yaml +rules: + style: + todo-comment: + # don't report on todo comments + level: ignore + line-length: + # custom rule configuration + max-line-length: 100 + # warn on too long lines, but don't fail + level: warning + opa-fmt: + # not needed as error is the default, but + # being explicit won't hurt + level: error + # files can be ignored for any individual rule + # in this example, test files are ignored + ignore: + files: + - "*_test.rego" +``` + +Read Regal's [configuration documentation](https://docs.styra.com/regal#configuration) for more information. From 2ff8dde925b75d62f030755843cd93c402a41022 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:16:15 -0400 Subject: [PATCH 010/228] Use fenix toolchain in nix shell (#18227) In #17974 we explicitly depend on rustc/cargo for the nix devShell, however the fenix overlay that contains the latest stable versions was not being applied to that shell. This led to the shell inheriting whatever rustc/cargo was on nixos-unstable from nixpkgs, which sometimes lags behind. This change fixes that, and also restructures the flake to ensure that all outputs rely on the overlaid `pkgs`. Release Notes: - N/A --- flake.lock | 18 ++++++++--------- flake.nix | 56 +++++++++++++++++++++++++++------------------------ nix/shell.nix | 3 +-- 3 files changed, 40 insertions(+), 37 deletions(-) diff --git a/flake.lock b/flake.lock index a5b7a7a6ae9c4..5666e73569f7d 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1725409566, - "narHash": "sha256-PrtLmqhM6UtJP7v7IGyzjBFhbG4eOAHT6LPYOFmYfbk=", + "lastModified": 1727060013, + "narHash": "sha256-/fC5YlJy4IoAW9GhkJiwyzk0K/gQd9Qi4rRcoweyG9E=", "owner": "ipetkov", "repo": "crane", - "rev": "7e4586bad4e3f8f97a9271def747cf58c4b68f3c", + "rev": "6b40cc876c929bfe1e3a24bf538ce3b5622646ba", "type": "github" }, "original": { @@ -23,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1726813972, - "narHash": "sha256-t6turZgoSAVgj7hn5mxzNlLOeVeZvymFo8+ymB52q34=", + "lastModified": 1727073227, + "narHash": "sha256-1kmkEQmFfGVuPBasqSZrNThqyMDV1SzTalQdRZxtDRs=", "owner": "nix-community", "repo": "fenix", - "rev": "251caeafc75b710282ee7e375800f75f4c8c5727", + "rev": "88cc292eb3c689073c784d6aecc0edbd47e12881", "type": "github" }, "original": { @@ -53,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726642912, - "narHash": "sha256-wiZzKGHRAhItEuoE599Wm3ic+Lg/NykuBvhb+awf7N8=", + "lastModified": 1726937504, + "narHash": "sha256-bvGoiQBvponpZh8ClUcmJ6QnsNKw0EMrCQJARK3bI1c=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "395c52d142ec1df377acd67db6d4a22950b02a98", + "rev": "9357f4f23713673f310988025d9dc261c20e70c6", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 7d1410ac7c20b..2ee86c446685f 100644 --- a/flake.nix +++ b/flake.nix @@ -17,27 +17,34 @@ fenix, ... }: let - forAllSystems = function: - nixpkgs.lib.genAttrs [ - "x86_64-linux" - "aarch64-linux" - ] (system: - function (import nixpkgs { - inherit system; - overlays = [fenix.overlays.default]; - })); - in { - packages = forAllSystems (pkgs: let - craneLib = (crane.mkLib pkgs).overrideToolchain (p: p.fenix.stable.toolchain); - rustPlatform = pkgs.makeRustPlatform { - inherit (pkgs.fenix.stable.toolchain) cargo rustc; + systems = ["x86_64-linux" "aarch64-linux"]; + + overlays = { + fenix = fenix.overlays.default; + rust-toolchain = final: prev: { + rustToolchain = final.fenix.stable.toolchain; }; - nightlyBuild = pkgs.callPackage ./nix/build.nix { - inherit craneLib rustPlatform; + zed-editor = final: prev: { + zed-editor = final.callPackage ./nix/build.nix { + craneLib = (crane.mkLib final).overrideToolchain final.rustToolchain; + rustPlatform = final.makeRustPlatform { + inherit (final.rustToolchain) cargo rustc; + }; + }; + }; + }; + + mkPkgs = system: + import nixpkgs { + inherit system; + overlays = builtins.attrValues overlays; }; - in { - zed-editor = nightlyBuild; - default = nightlyBuild; + + forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f (mkPkgs system)); + in { + packages = forAllSystems (pkgs: { + zed-editor = pkgs.zed-editor; + default = pkgs.zed-editor; }); devShells = forAllSystems (pkgs: { @@ -46,13 +53,10 @@ formatter = forAllSystems (pkgs: pkgs.alejandra); - overlays.default = final: prev: { - zed-editor = final.callPackage ./nix/build.nix { - craneLib = (crane.mkLib final).overrideToolchain (p: p.fenix.stable.toolchain); - rustPlatform = final.makeRustPlatform { - inherit (final.fenix.stable.toolchain) cargo rustc; - }; + overlays = + overlays + // { + default = nixpkgs.lib.composeManyExtensions (builtins.attrValues overlays); }; - }; }; } diff --git a/nix/shell.nix b/nix/shell.nix index 476374b67ef09..e0b4018778c87 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -20,8 +20,7 @@ in wayland xorg.libxcb vulkan-loader - rustc - cargo + rustToolchain ]; in pkgs.mkShell.override {inherit stdenv;} { From 35a80f07e02054b281a946ead549d24499dcfcec Mon Sep 17 00:00:00 2001 From: Nathan Lovato <12694995+NathanLovato@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:01:32 +0200 Subject: [PATCH 011/228] docs: Split vim mode documentation into two pages, edit for clarity (#17614) Closes #17215 Release Notes: - N/A --- This PR builds upon the vim mode documentation page and aims bring the following improvements: - Separate vim mode-specific configuration from introducing vim mode. - Reformat some lists of provided commands and keymaps from code blocks to sub-sections containing tables. - Flesh out the text a little bit to make it more explicit in some parts. - Generally format notes and a couple of other things closer to some other docs pages. Checking the diff doesn't give a good idea of the changes, so here are some before after images for quick examples of the kinds of changes brought by this PR. **Introducing the key differences of Zed's vim mode** Before ![2024-09-09_22-12](https://github.com/user-attachments/assets/447418cb-a6e6-4f9c-8d4b-6d941126979e) After ![2024-09-09_22-16](https://github.com/user-attachments/assets/be69f2d9-c3ae-4b34-978a-344130bee37c) --- **Zed-specific vim key bindings** Before ![2024-09-09_22-17](https://github.com/user-attachments/assets/88fdc512-a50b-487d-85d1-5988f15c2a6f) After ![2024-09-09_22-18](https://github.com/user-attachments/assets/3b77c2f6-0ffa-4afc-a86d-1210ac706c8c) --- docs/src/SUMMARY.md | 2 +- docs/src/vim.md | 607 ++++++++++++++++++++++++++------------------ 2 files changed, 366 insertions(+), 243 deletions(-) diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index bb0c9d79f590c..f0e4784f89cd9 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -17,7 +17,7 @@ - [Snippets](./snippets.md) - [Themes](./themes.md) -- [Vim](./vim.md) +- [Vim Mode](./vim.md) # Using Zed diff --git a/docs/src/vim.md b/docs/src/vim.md index 777534813f965..8bfa6aa73f612 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -1,14 +1,35 @@ # Vim Mode -Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands are available, and how to customize keybindings. +Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands Zed provides to help you navigate and edit your code, and generally how to make the most of vim mode in Zed. -## Philosophy +You'll learn how to: -Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. +- Understand the core differences between Zed's vim mode and traditional Vim +- Enable or disable vim mode +- Make the most of Zed-specific features within vim mode +- Customize vim mode key bindings +- Configure vim mode settings + +Whether you're new to vim mode or an experienced Vim user looking to optimize your Zed experience, this guide will help you harness the full power of modal editing in Zed. + +## Zed's vim mode design + +Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. + +This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. So, Zed's vim mode does not replicate Vim one-to-one, but it meshes Vim's modal design with Zed's modern features to provide a more fluid experience. It's also configurable, so you can add your own key bindings or override the defaults. -> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue](https://github.com/zed-industries/zed/issues). +### Core differences + +There are four types of features in vim mode that use Zed's core functionality, leading to some differences in behavior: + +1. **Motions**: vim mode uses Zed's semantic parsing to tune the behavior of motions per language. For example, in Rust, jumping to matching bracket with `%` works with the pipe character `|`. In JavaScript, `w` considers `$` to be a word character. +2. **Visual block selections**: vim mode uses Zed's multiple cursor to emulate visual block selections, making block selections a lot more flexible. For example, anything you insert after a block selection updates on every line in real-time, and you can add or remove cursors anytime. +3. **Macros**: vim mode uses Zed's recording system for vim macros. So, you can capture and replay more complex actions, like autocompletion. +4. **Search and replace**: vim mode uses Zed's search system, so, the syntax for regular expressions is slightly different compared to Vim. [Head to the Regex differences section](#regex-differences) for details. + +> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue on GitHub](https://github.com/zed-industries/zed/issues). ## Enabling and disabling vim mode @@ -16,136 +37,351 @@ When you first open Zed, you'll see a checkbox on the welcome screen that allows If you missed this, you can toggle vim mode on or off anytime by opening the command palette and using the workspace command `toggle vim mode`. +> **Note**: This command toggles the following property in your user settings: +> +> ```json +> { +> "vim_mode": true +> } +> ``` + ## Zed-specific features Zed is built on a modern foundation that (among other things) uses tree-sitter and language servers to understand the content of the file you're editing and supports multiple cursors out of the box. Vim mode has several "core Zed" key bindings that will help you make the most of Zed's specific feature set. +### Language server + +The following commands use the language server to help you navigate and refactor your code. + +| Command | Default Shortcut | +| ---------------------------------------- | ---------------- | +| Go to definition | `g d` | +| Go to declaration | `g D` | +| Go to type definition | `g y` | +| Go to implementation | `g I` | +| Rename (change definition) | `c d` | +| Go to All references to the current word | `g A` | +| Find symbol in current file | `g s` | +| Find symbol in entire project | `g S` | +| Go to next diagnostic | `g ]` or `] d` | +| Go to previous diagnostic | `g [` or `[ d` | +| Show inline error (hover) | `g h` | +| Open the code actions menu | `g .` | + +### Git + +| Command | Default Shortcut | +| ------------------------- | ---------------- | +| Go to next git change | `] c` | +| Go to previous git change | `[ c` | + +### Treesitter + +Treesitter is a powerful tool that Zed uses to understand the structure of your code. These commands help you navigate your code semantically. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Select a smaller syntax node | `] x` | +| Select a larger syntax node | `[ x` | + +### Multi cursor + +These commands help you manage multiple cursors in Zed. + +| Command | Default Shortcut | +| ------------------------------------------------------------ | ---------------- | +| Add a cursor selecting the next copy of the current word | `g l` | +| Add a cursor selecting the previous copy of the current word | `g L` | +| Skip latest word selection, and add next | `g >` | +| Skip latest word selection, and add previous | `g <` | +| Add a visual selection for every copy of the current word | `g a` | + +### Pane management + +These commands open new panes or jump to specific panes. + +| Command | Default Shortcut | +| ------------------------------------------ | ------------------ | +| Open a project-wide search | `g /` | +| Open the current search excerpt | `g ` | +| Open the current search excerpt in a split | ` ` | +| Go to definition in a split | ` g d` | +| Go to type definition in a split | ` g D` | + +### In insert mode + +The following commands help you bring up Zed's completion menu, request a suggestion from GitHub Copilot, or open the inline AI assistant without leaving insert mode. + +| Command | Default Shortcut | +| ---------------------------------------------------------------------------- | ---------------- | +| Open the completion menu | `ctrl-x ctrl-o` | +| Request GitHub Copilot suggestion (requires GitHub Copilot to be configured) | `ctrl-x ctrl-c` | +| Open the inline AI assistant (requires a configured assistant) | `ctrl-x ctrl-a` | +| Open the code actions menu | `ctrl-x ctrl-l` | +| Hides all suggestions | `ctrl-x ctrl-z` | + +### Supported plugins + +Zed's vim mode includes some features that are usually provided by very popular plugins in the Vim ecosystem: + +- You can surround text objects with `ys` (yank surround), change surrounding with `cs`, and delete surrounding with `ds`. +- You can comment and uncomment selections with `gc` in visual mode and `gcc` in normal mode. +- The project panel supports many shortcuts modeled after the Vim plugin `netrw`: navigation with `hjkl`, open file with `o`, open file in a new tab with `t`, etc. +- You can add key bindings to your keymap to navigate "camelCase" names. [Head down to the Optional key bindings](#optional-key-bindings) section to learn how. + +## Command palette + +Vim mode allows you to open Zed's command palette with `:`. You can then type to access any usual Zed command. Additionally, vim mode adds aliases for popular Vim commands to ensure your muscle memory transfers to Zed. For example, you can write `:w` or `:write` to save the file. + +Below, you'll find tables listing the commands you can use in the command palette. We put optional characters in square brackets to indicate that you can omit them. + +> **Note**: We don't emulate the full power of Vim's command line yet. In particular, commands currently do not support arguments. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. + +### File and window management + +This table shows commands for managing windows, tabs, and panes. As commands don't support arguments currently, you cannot specify a filename when saving or creating a new file. + +| Command | Description | +| -------------- | ---------------------------------------------------- | +| `:w[rite][!]` | Save the current file | +| `:wq[!]` | Save the file and close the buffer | +| `:q[uit][!]` | Close the buffer | +| `:wa[ll][!]` | Save all open files | +| `:wqa[ll][!]` | Save all open files and close all buffers | +| `:qa[ll][!]` | Close all buffers | +| `:[e]x[it][!]` | Close the buffer | +| `:up[date]` | Save the current file | +| `:cq` | Quit completely (close all running instances of Zed) | +| `:vs[plit]` | Split the pane vertically | +| `:sp[lit]` | Split the pane horizontally | +| `:new` | Create a new file in a horizontal split | +| `:vne[w]` | Create a new file in a vertical split | +| `:tabedit` | Create a new file in a new tab | +| `:tabnew` | Create a new file in a new tab | +| `:tabn[ext]` | Go to the next tab | +| `:tabp[rev]` | Go to previous tab | +| `:tabc[lose]` | Close the current tab | + +> **Note:** The `!` character is used to force the command to execute without saving changes or prompting before overwriting a file. + +### Ex commands + +These ex commands open Zed's various panels and windows. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Open the project panel | `:E[xplore]` | +| Open the collaboration panel | `:C[ollab]` | +| Open the chat panel | `:Ch[at]` | +| Open the AI panel | `:A[I]` | +| Open the notifications panel | `:No[tif]` | +| Open the feedback window | `:fe[edback]` | +| Open the diagnostics window | `:cl[ist]` | +| Open the terminal | `:te[rm]` | +| Open the extensions window | `:Ext[ensions]` | + +### Navigating diagnostics + +These commands navigate diagnostics. + +| Command | Description | +| ------------------------ | ------------------------------ | +| `:cn[ext]` or `:ln[ext]` | Go to the next diagnostic | +| `:cp[rev]` or `:lp[rev]` | Go to the previous diagnostics | +| `:cc` or `:ll` | Open the errors page | + +### Git + +These commands interact with the version control system git. + +| Command | Description | +| --------------- | ------------------------------------------------------- | +| `:dif[fupdate]` | View the diff under the cursor (`d o` in normal mode) | +| `:rev[ert]` | Revert the diff under the cursor (`d p` in normal mode) | + +### Jump + +These commands jump to specific positions in the file. + +| Command | Description | +| ------------------- | ----------------------------------- | +| `:` | Jump to a line number | +| `:$` | Jump to the end of the file | +| `:/foo` and `:?foo` | Jump to next/prev line matching foo | + +### Replacement + +This command replaces text. It emulates the substitute command in vim. The substitute command uses regular expressions, and Zed uses a slightly different syntax than vim. You can learn more about Zed's syntax below, [in the regex differences section](#regex-differences). Also, by default, Zed always replaces all occurrences of the search pattern in the current line. + +| Command | Description | +| -------------------- | --------------------------------- | +| `:[range]s/foo/bar/` | Replace instances of foo with bar | + +### Editing + +These commands help you edit text. + +| Command | Description | +| ----------------- | ------------------------------------------------------- | +| `:j[oin]` | Join the current line | +| `:d[elete][l][p]` | Delete the current line | +| `:s[ort] [i]` | Sort the current selection (with i, case-insensitively) | +| `:y[ank]` | Yank (copy) the current selection or line | + +### Command mnemonics + +As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: + +- `:diffs` for "toggle all hunk diffs" +- `:cpp` for "copy path to file" +- `:crp` for "copy relative path" +- `:reveal` for "reveal in finder" +- `:zlog` for "open zed log" +- `:clank` for "cancel language server work" + +## Customizing key bindings + +In this section, we'll learn how to customize the key bindings of Zed's vim mode. You'll learn: + +- How to select the correct context for your new key bindings. +- Useful contexts for vim mode key bindings. +- Common key bindings to customize for extra productivity. + +### Selecting the correct context + +Zed's key bindings are evaluated only when the `"context"` property matches your location in the editor. For example, if you add key bindings to the `"Editor"` context, they will only work when you're editing a file. If you add key bindings to the `"Workspace"` context, they will work everywhere in Zed. Here's an example of a key binding that saves when you're editing a file: + +```json +{ + "context": "Editor", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -# Language server -g d Go to definition -g D Go to declaration -g y Go to type definition -g I Go to implementation - -c d Rename (change definition) -g A Go to All references to the current word - -g s Find symbol in current file -g S Find symbol in entire project - -g ] Go to next diagnostic -g [ Go to previous diagnostic -] d Go to next diagnostic -[ d Go to previous diagnostic -g h Show inline error (hover) -g . Open the code actions menu - -# Git -] c Go to next git change -[ c Go to previous git change - -# Treesitter -] x Select a smaller syntax node -[ x Select a larger syntax node - -# Multi cursor -g l Add a visual selection for the next copy of the current word -g L The same, but backwards -g > Skip latest word selection, and add next. -g < The same, but backwards -g a Add a visual selection for every copy of the current word - -# Pane management -g / Open a project-wide search -g Open the current search excerpt - Open the current search excerpt in a split - g d Go to definition in a split - g D Go to type definition in a split - -# Insert mode -ctrl-x ctrl-o Open the completion menu -ctrl-x ctrl-c Request GitHub Copilot suggestion (if configured) -ctrl-x ctrl-a Open the inline AI assistant (if configured) -ctrl-x ctrl-l Open the code actions menu -ctrl-x ctrl-z Hides all suggestions - -# Ex commands -:E[xplore] Open the project panel -:C[ollab] Open the collaboration panel -:Ch[at] Open the chat panel -:A[I] Open the AI panel -:No[tif] Open the notifications panel -:fe[edback] Open the feedback window -:cl[ist] Open the diagnostics window -:te[rm] Open the terminal -:Ext[ensions] Open the extensions window + +Contexts are nested, so when you're editing a file, the context is the `"Editor"` context, which is inside the `"Pane"` context, which is inside the `"Workspace"` context. That's why any key bindings you add to the `"Workspace"` context will work when you're editing a file. Here's an example: + +```json +// This key binding will work when you're editing a file. It comes built into Zed by default as the workspace: save command. +{ + "context": "Workspace", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like JavaScript. +Contexts are expressions. They support boolean operators like `&&` (and) and `||` (or). For example, you can use the context `"Editor && vim_mode == normal"` to create key bindings that only work when you're editing a file _and_ you're in vim's normal mode. -Vim mode emulates visual block mode using Zed's multiple cursor support. This again leads to some differences, but is much more powerful. +Vim mode adds several contexts to the `"Editor"` context: -Vim's macro support (`q` and `@`) is implemented using Zed's actions. This lets us support recording and replaying of autocompleted code, etc. Unlike Vim, Zed does not re-use the yank registers for recording macros, they are two separate namespaces. +| Operator | Description | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| VimControl | Indicates that vim keybindings should work. Currently an alias for `vim_mode == normal \|\| vim_mode == visual \|\| vim_mode == operator`, but the definition may change over time | +| vim_mode == normal | Normal mode | +| vim_mode == visual | Visual mode | +| vim_mode == insert | Insert mode | +| vim_mode == replace | Replace mode | +| vim_mode == waiting | Waiting for an arbitrary key (e.g., after typing `f` or `t`) | +| vim_mode == operator | Waiting for another binding to trigger (e.g., after typing `c` or `d`) | +| vim_operator | Set to `none` unless `vim_mode == operator`, in which case it is set to the current operator's default keybinding (e.g., after typing `d`, `vim_operator == d`) | -Finally, vim mode's search and replace functionality is backed by Zed's. This means that the pattern syntax is slightly different, see the section on [Regex differences](#regex-differences) for details. +> **Note**: Contexts are matched only on one level at a time. So it is possible to use the expression `"Editor && vim_mode == normal"`, but `"Workspace && vim_mode == normal"` will never match because we set the vim context at the `"Editor"` level. -## Custom key bindings +### Useful contexts for vim mode key bindings -You can edit your personal key bindings with `:keymap`. -For vim-specific shortcuts, you may find the following template a good place to start. +Here's a template with useful vim mode contexts to help you customize your vim mode key bindings. You can copy it and integrate it into your user keymap. ```json [ { "context": "VimControl && !menu", "bindings": { - // put key-bindings here if you want them to work in normal & visual mode + // Put key bindings here if you want them to work in normal & visual mode. } }, { "context": "vim_mode == normal && !menu", "bindings": { - // "shift-y": ["workspace::SendKeystrokes", "y $"] // use nvim's Y behavior + // "shift-y": ["workspace::SendKeystrokes", "y $"] // Use neovim's yank behavior: yank to end of line. } }, { "context": "vim_mode == insert", "bindings": { - // "j k": "vim::NormalBefore" // remap jk in insert mode to escape. + // "j k": "vim::NormalBefore" // In insert mode, make jk escape to normal mode. } }, { "context": "EmptyPane || SharedScreen", "bindings": { - // put key-bindings here (in addition to above) if you want them to - // work when no editor exists + // Put key bindings here (in addition to the context above) if you want them to + // work when no editor exists. // "space f": "file_finder::Toggle" } } ] ``` -If you would like to emulate vim's `map` (`nmap` etc.) commands you can bind to the [`workspace::SendKeystrokes`](./key-bindings.md#remapping-keys) action in the correct context. +> **Note**: If you would like to emulate Vim's `map` commands (`nmap`, etc.), you can use the action `workspace::SendKeystrokes` in the correct context. -Check out the [bindings that are enabled by default in vim mode](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). +### Optional key bindings -### Contexts +By default, you can navigate between the different files open in the editor with shortcuts like `ctrl+w` followed by one of `hjkl` to move to the left, down, up, or right, respectively. -Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing, you're in the `"Workspace"` location, which is at the top, containing a `"Pane"` that contains an `"Editor"`. +But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. -Contexts are matched only on one level at a time. So, it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. +```json +{ + "context": "Dock", + "bindings": { + "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] + // ... or other keybindings + } +} +``` -Vim mode adds several contexts to the `Editor`: +Subword motion, which allows you to navigate and select individual words in camelCase or snake_case, is not enabled by default. To enable it, add these bindings to your keymap. -- `vim_mode` is similar to, but not identical to, the current mode. It starts as one of `normal`, `visual`, `insert` or `replace` (depending on your mode). If you are mid-way through typing a sequence, `vim_mode` will be either `waiting` if it's waiting for an arbitrary key (for example after typing `f` or `t`), or `operator` if it's waiting for another binding to trigger (for example after typing `c` or `d`). -- `vim_operator` is set to `none` unless `vim_mode == operator` in which case it is set to the current operator's default keybinding (for example after typing `d`, `vim_operator == d`). -- `"VimControl"` indicates that vim keybindings should work. It is currently an alias for `vim_mode == normal || vim_mode == visual || vim_mode == operator`, but the definition may change over time. +```json +[ + { + "context": "VimControl && !menu && vim_mode != operator", + "bindings": { + "w": "vim::NextSubwordStart", + "b": "vim::PreviousSubwordStart", + "e": "vim::NextSubwordEnd", + "g e": "vim::PreviousSubwordEnd" + } + } +] +``` + +Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), but it doesn't have a shortcut to add surrounds in visual mode. By default, `shift-s` substitutes the selection (erases the text and enters insert mode). To use `shift-s` to add surrounds in visual mode, you can add the following object to your keymap. + +```json +{ + "context": "vim_mode == visual", + "bindings": { + "shift-s": [ + "vim::PushOperator", + { + "AddSurrounds": {} + } + ] + } +} +``` ### Restoring common text editing keybindings -If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: Ctrl+v to copy, Ctrl+f to search, etc. You can restore them by copying this data into your keymap: +If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: `ctrl+v` to copy, `ctrl+f` to search, etc. You can restore them by copying this data into your keymap: ```json { @@ -162,109 +398,39 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi }, ``` -## Command palette - -Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. - -Additionally, vim mode contains a number of aliases for popular Vim commands to ensure that muscle memory works. For example, `:w` will save the file. - -We do not (yet) emulate the full power of Vim’s command line, in particular, we do not support arguments to commands yet. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. - -As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. - -Currently supported Vim-specific commands: +## Changing vim mode settings -``` -# window management -:w[rite][!], :wq[!], :q[uit][!], :wa[ll][!], :wqa[ll][!], :qa[ll][!], :[e]x[it][!], :up[date] - to save/close tab(s) and pane(s) (no filename is supported yet) -:cq - to quit completely. -:vs[plit], :sp[lit] - to split vertically/horizontally (no filename is supported yet) -:new, :vne[w] - to create a new file in a new pane above or to the left -:tabedit, :tabnew - to create a new file in a new tab. -:tabn[ext], :tabp[rev] - to go to previous/next tabs -:tabc[lose] - to close the current tab - -# navigating diagnostics -:cn[ext], :cp[rev], :ln[ext], :lp[rev] - to go to the next/prev diagnostics -:cc, :ll - to open the errors page - -# handling git diff -:dif[fupdate] - to view the diff under the cursor ("d o" in normal mode) -:rev[ert] - to revert the diff under the cursor ("d p" in normal mode) - -# jump to position -: - to jump to a line number -:$ - to jump to the end of the file -:/foo and :?foo - to jump to next/prev line matching foo - -# replacement (/g is always assumed and Zed uses different regex syntax to vim) -:[range]s/foo/bar/ - to replace instances of foo with bar - -# editing -:j[oin] - to join the current line (no range is yet supported) -:d[elete][l][p] - to delete the current line (no range is yet supported) -:s[ort] [i] - to sort the current selection (with i, case-insensitively) -:y[ank] -``` - -As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: +You can change the following settings to modify vim mode's behavior: -``` -:diffs Toggle all Hunk [Diffs] -:cpp [C]o[p]y [P]ath to file -:crp [C]opy [r]elative [P]ath -:reveal [Reveal] in finder -:zlog Open [Z]ed Log -:clank [C]ancel [lan]guage server work[k] -``` +| Property | Description | Default Value | +| ---------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | +| use_multiline_find | If `true`, `f` and `t` motions extend across multiple lines. | false | +| use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | +| toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | +| custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | -## Settings - -Vim mode is not enabled by default. To enable vim mode, you need to add the following configuration to your settings file: +Here's an example of adding a digraph for the zombie emoji. This allows you to type `ctrl-k f z` to insert a zombie emoji. You can add as many digraphs as you like. ```json { - "vim_mode": true + "vim": { + "custom_digraphs": { + "fz": "🧟‍♀️" + } + } } ``` -Alternatively, you can enable vim mode by running the `toggle vim mode` command from the command palette. - -Some vim settings are available to modify the default vim behavior: +Here's an example of these settings changed: ```json { "vim": { - // "always": use system clipboard when no register is specified - // "never": don't use system clipboard unless "+ or "* is specified - // "on_yank": use system clipboard for yank operations when no register is specified - "use_system_clipboard": "always", - // Let `f` and `t` motions extend across multiple lines + "use_system_clipboard": "never", "use_multiline_find": true, - // Let `f` and `t` motions match case insensitively if the target is lowercase "use_smartcase_find": true, - // Use relative line numbers in normal mode, absolute in insert mode - // c.f. https://github.com/jeffkreeftmeijer/vim-numbertoggle "toggle_relative_line_numbers": true, - // Add custom digraphs (e.g. ctrl-k f z will insert a zombie emoji) "custom_digraphs": { "fz": "🧟‍♀️" } @@ -272,22 +438,36 @@ Some vim settings are available to modify the default vim behavior: } ``` -There are also a few Zed settings that you may also enjoy if you use vim mode: +## Useful core Zed settings for vim mode + +Here are a few general Zed settings that can help you fine-tune your Vim experience: + +| Property | Description | Default Value | +| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------- | +| cursor_blink | If `true`, the cursor blinks. | `true` | +| relative_line_numbers | If `true`, line numbers in the left gutter are relative to the cursor. | `true` | +| scrollbar | Object that controls the scrollbar display. Set to `{ "show": "never" }` to hide the scroll bar. | `{ "show": "always" }` | +| scroll_beyond_last_line | If set to `"one_page"`, allows scrolling up to one page beyond the last line. Set to `"off"` to prevent this behavior. | `"one_page"` | +| vertical_scroll_margin | The number of lines to keep above or below the cursor when scrolling. Set to `0` to allow the cursor to go up to the edges of the screen vertically. | `3` | +| gutter.line_numbers | Controls the display of line numbers in the gutter. Set the `"line_numbers"` property to `false` to hide line numbers. | `true` | +| command_aliases | Object that defines aliases for commands in the command palette. You can use it to define shortcut names for commands you use often. Read below for examples. | `{}` | + +Here's an example of these settings changed: ```json { - // disable cursor blink + // Disable cursor blink "cursor_blink": false, - // use relative line numbers + // Use relative line numbers "relative_line_numbers": true, - // hide the scroll bar + // Hide the scroll bar "scrollbar": { "show": "never" }, - // prevent the buffer from scrolling beyond the last line + // Prevent the buffer from scrolling beyond the last line "scroll_beyond_last_line": "off", - // allow cursor to reach edges of screen + // Allow the cursor to reach the edges of the screen "vertical_scroll_margin": 0, "gutter": { - // disable line numbers completely: + // Disable line numbers completely: "line_numbers": false }, "command_aliases": { @@ -298,74 +478,17 @@ There are also a few Zed settings that you may also enjoy if you use vim mode: } ``` -If you want to navigate between the editor and docks (terminal, project panel, AI assistant panel, etc...), just like you navigate between splits, you can use the following key bindings: - -```json -{ - "context": "Dock", - "bindings": { - "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] - // ... or other keybindings - } -} -``` - -Subword motion is not enabled by default. To enable it, add these bindings to your keymap. - -```json -[ - { - "context": "VimControl && !menu && vim_mode != operator", - "bindings": { - "w": "vim::NextSubwordStart", - "b": "vim::PreviousSubwordStart", - "e": "vim::NextSubwordEnd", - "g e": "vim::PreviousSubwordEnd" - } - } -] -``` - -Surrounding the selection in visual mode is also not enabled by default (`shift-s` normally behaves like `c`). To enable it, add the following to your keymap. - -```json -{ - "context": "vim_mode == visual", - "bindings": { - "shift-s": [ - "vim::PushOperator", - { - "AddSurrounds": {} - } - ] - } -} -``` - -## Supported plugins - -Zed has nascent support for some Vim plugins: - -- From `vim-surround`, `ys`, `cs` and `ds` work. Though you cannot add new HTML tags yet. -- From `vim-commentary`, `gc` in visual mode and `gcc` in normal mode. Though you cannot operate on arbitrary objects yet. -- From `netrw`, most keybindings are supported in the project panel. -- From `vim-spider`/`CamelCaseMotion` you can use subword motions as described above. +The `command_aliases` property is a single object that maps keys or key sequences to vim mode commands. The example above defines multiple aliases: `W` for `w`, `Wq` for `wq`, and `Q` for `q`. ## Regex differences -Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax for some things. - -Notably: +Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax in some cases. Here are the most common differences: -- Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. -- On the flip side, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. -- When replacing, Vim uses `\0` to represent the entire match, in Zed this is `$0`, same for numbered capture groups `\1` -> `$1`. -- Vim uses `/g` to indicate "all matches on one line", in Zed this is implied -- Vim uses `/i` to indicate "case-insensitive", in Zed you can either use `(?i)` at the start of the pattern or toggle case-sensitivity with `cmd-option-c`. +- **Capture groups**: Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. On the flip side, in Vim, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. +- **Matches**: When replacing, Vim uses the backslash character followed by a number to represent a matched capture group. For example, `\1`. Zed uses the dollar sign instead. So, when in Vim you use `\0` to represent the entire match, in Zed the syntax is `$0` instead. Same for numbered capture groups: `\1` in Vim is `$1` in Zed. +- **Global option**: By default, in Vim, regex searches only match the first occurrence on a line, and you append `/g` at the end of your query to find all matches. In Zed, regex searches are global by default. +- **Case sensitivity**: Vim uses `/i` to indicate a case-insensitive search. In Zed you can either write `(?i)` at the start of the pattern or toggle case-sensitivity with the shortcut {#kb search::ToggleCaseSensitive}. -To help with the transition, the command palette will fix parentheses and replace groups for you when you run `:%s//`. So `%s:/\(a\)(b)/\1/` will be converted into a search for "(a)\(b\)" and a replacement of "$1". +> **Note**: To help with the transition, the command palette will fix parentheses and replace groups for you when you write a Vim-style substitute command, `:%s//`. So, Zed will convert `%s:/\(a\)(b)/\1/` into a search for "(a)\(b\)" and a replacement of "$1". For the full syntax supported by Zed's regex engine [see the regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). From a36706aed6e7f582f731a4f33ef3b056dac25f36 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 09:11:58 -0600 Subject: [PATCH 012/228] Fix up/down project_id confusion (#18099) Release Notes: - ssh remoting: Fix LSP queries run over collab --- crates/project/src/lsp_store.rs | 137 +++++++++++-------- crates/project/src/project.rs | 19 +-- crates/project/src/worktree_store.rs | 103 +++++++++----- crates/remote_server/src/headless_project.rs | 2 +- 4 files changed, 161 insertions(+), 100 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 95ca84236001c..4506fcc6feb43 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -50,7 +50,7 @@ use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; -use rpc::AnyProtoClient; +use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient}; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; use sha2::{Digest, Sha256}; @@ -132,6 +132,7 @@ impl LocalLspStore { pub struct RemoteLspStore { upstream_client: AnyProtoClient, + upstream_project_id: u64, } impl RemoteLspStore {} @@ -164,8 +165,7 @@ impl LspStoreMode { pub struct LspStore { mode: LspStoreMode, - downstream_client: Option, - project_id: u64, + downstream_client: Option<(AnyProtoClient, u64)>, nonce: u128, buffer_store: Model, worktree_store: Model, @@ -302,14 +302,16 @@ impl LspStore { } } - pub fn upstream_client(&self) -> Option { + pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { match &self.mode { LspStoreMode::Ssh(SshLspStore { upstream_client, .. - }) - | LspStoreMode::Remote(RemoteLspStore { - upstream_client, .. - }) => Some(upstream_client.clone()), + }) => Some((upstream_client.clone(), SSH_PROJECT_ID)), + LspStoreMode::Remote(RemoteLspStore { + upstream_client, + upstream_project_id, + .. + }) => Some((upstream_client.clone(), *upstream_project_id)), LspStoreMode::Local(_) => None, } } @@ -374,7 +376,6 @@ impl LspStore { }), }), downstream_client: None, - project_id: 0, buffer_store, worktree_store, languages: languages.clone(), @@ -395,10 +396,11 @@ impl LspStore { &self, buffer: Model, client: AnyProtoClient, + upstream_project_id: u64, request: R, cx: &mut ModelContext<'_, LspStore>, ) -> Task::Response>> { - let message = request.to_proto(self.project_id, buffer.read(cx)); + let message = request.to_proto(upstream_project_id, buffer.read(cx)); cx.spawn(move |this, cx| async move { let response = client.request(message).await?; let this = this.upgrade().context("project dropped")?; @@ -413,7 +415,6 @@ impl LspStore { worktree_store: Model, languages: Arc, upstream_client: AnyProtoClient, - project_id: u64, cx: &mut ModelContext, ) -> Self { cx.subscribe(&buffer_store, Self::on_buffer_store_event) @@ -429,7 +430,6 @@ impl LspStore { current_lsp_settings: Default::default(), }), downstream_client: None, - project_id, buffer_store, worktree_store, languages: languages.clone(), @@ -461,9 +461,11 @@ impl LspStore { .detach(); Self { - mode: LspStoreMode::Remote(RemoteLspStore { upstream_client }), + mode: LspStoreMode::Remote(RemoteLspStore { + upstream_client, + upstream_project_id: project_id, + }), downstream_client: None, - project_id, buffer_store, worktree_store, languages: languages.clone(), @@ -768,13 +770,13 @@ impl LspStore { } pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { - if let Some(client) = self.downstream_client.clone() { + if let Some((client, downstream_project_id)) = self.downstream_client.clone() { if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { for (path, summaries) in summaries { for (&server_id, summary) in summaries { client .send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, + project_id: downstream_project_id, worktree_id: worktree.id().to_proto(), summary: Some(summary.to_proto(server_id, path)), }) @@ -798,8 +800,14 @@ impl LspStore { { let buffer = buffer_handle.read(cx); - if let Some(upstream_client) = self.upstream_client() { - return self.send_lsp_proto_request(buffer_handle, upstream_client, request, cx); + if let Some((upstream_client, upstream_project_id)) = self.upstream_client() { + return self.send_lsp_proto_request( + buffer_handle, + upstream_client, + upstream_project_id, + request, + cx, + ); } let language_server = match server { @@ -1077,9 +1085,9 @@ impl LspStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request = proto::ApplyCodeAction { - project_id: self.project_id, + project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), action: Some(Self::serialize_code_action(&action)), }; @@ -1163,9 +1171,9 @@ impl LspStore { server_id: LanguageServerId, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request = proto::ResolveInlayHint { - project_id: self.project_id, + project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), language_server_id: server_id.0 as u64, hint: Some(InlayHints::project_to_proto_hint(hint.clone())), @@ -1274,9 +1282,9 @@ impl LspStore { trigger: String, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = proto::OnTypeFormatting { - project_id: self.project_id, + project_id, buffer_id: buffer.read(cx).remote_id().into(), position: Some(serialize_anchor(&position)), trigger, @@ -1424,11 +1432,11 @@ impl LspStore { range: Range, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), version: serialize_version(&buffer_handle.read(cx).version()), - project_id: self.project_id, + project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), @@ -1437,7 +1445,7 @@ impl LspStore { range: range.clone(), kinds: None, } - .to_proto(self.project_id, buffer_handle.read(cx)), + .to_proto(project_id, buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -1504,10 +1512,11 @@ impl LspStore { ) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let task = self.send_lsp_proto_request( buffer.clone(), upstream_client, + project_id, GetCompletions { position, context }, cx, ); @@ -1603,14 +1612,13 @@ impl LspStore { ) -> Task> { let client = self.upstream_client(); let language_registry = self.languages.clone(); - let project_id = self.project_id; let buffer_id = buffer.read(cx).remote_id(); let buffer_snapshot = buffer.read(cx).snapshot(); cx.spawn(move |this, cx| async move { let mut did_resolve = false; - if let Some(client) = client { + if let Some((client, project_id)) = client { for completion_index in completion_indices { let (server_id, completion) = { let completions_guard = completions.read(); @@ -1811,8 +1819,7 @@ impl LspStore { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); - if let Some(client) = self.upstream_client() { - let project_id = self.project_id; + if let Some((client, project_id)) = self.upstream_client() { cx.spawn(move |_, mut cx| async move { let response = client .request(proto::ApplyCompletionAdditionalEdits { @@ -1927,9 +1934,9 @@ impl LspStore { let buffer_id = buffer.remote_id().into(); let lsp_request = InlayHints { range }; - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = proto::InlayHints { - project_id: self.project_id, + project_id, buffer_id, start: Some(serialize_anchor(&range_start)), end: Some(serialize_anchor(&range_end)), @@ -1977,16 +1984,16 @@ impl LspStore { ) -> Task> { let position = position.to_point_utf16(buffer.read(cx)); - if let Some(client) = self.upstream_client() { + if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), - project_id: self.project_id, + project_id: upstream_project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetSignatureHelp( - GetSignatureHelp { position }.to_proto(self.project_id, buffer.read(cx)), + GetSignatureHelp { position }.to_proto(upstream_project_id, buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -2049,16 +2056,16 @@ impl LspStore { position: PointUtf16, cx: &mut ModelContext, ) -> Task> { - if let Some(client) = self.upstream_client() { + if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), - project_id: self.project_id, + project_id: upstream_project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetHover( - GetHover { position }.to_proto(self.project_id, buffer.read(cx)), + GetHover { position }.to_proto(upstream_project_id, buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -2123,9 +2130,9 @@ impl LspStore { pub fn symbols(&self, query: &str, cx: &mut ModelContext) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client().as_ref() { + if let Some((upstream_client, project_id)) = self.upstream_client().as_ref() { let request = upstream_client.request(proto::GetProjectSymbols { - project_id: self.project_id, + project_id: *project_id, query: query.to_string(), }); cx.foreground_executor().spawn(async move { @@ -2598,8 +2605,7 @@ impl LspStore { downstream_client: AnyProtoClient, _: &mut ModelContext, ) { - self.project_id = project_id; - self.downstream_client = Some(downstream_client.clone()); + self.downstream_client = Some((downstream_client.clone(), project_id)); for (server_id, status) in &self.language_server_statuses { downstream_client @@ -2857,10 +2863,10 @@ impl LspStore { } if !old_summary.is_empty() || !new_summary.is_empty() { - if let Some(downstream_client) = &self.downstream_client { + if let Some((downstream_client, project_id)) = &self.downstream_client { downstream_client .send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, + project_id: *project_id, worktree_id: worktree_id.to_proto(), summary: Some(proto::DiagnosticSummary { path: worktree_path.to_string_lossy().to_string(), @@ -2881,9 +2887,9 @@ impl LspStore { symbol: &Symbol, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = client.request(proto::OpenBufferForSymbol { - project_id: self.project_id, + project_id, symbol: Some(Self::serialize_symbol(symbol)), }); cx.spawn(move |this, mut cx| async move { @@ -3184,6 +3190,17 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { + let response_from_ssh = this.update(&mut cx, |this, _| { + let ssh = this.as_ssh()?; + let mut payload = envelope.payload.clone(); + payload.project_id = SSH_PROJECT_ID; + + Some(ssh.upstream_client.request(payload)) + })?; + if let Some(response_from_ssh) = response_from_ssh { + return response_from_ssh.await; + } + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); @@ -4779,10 +4796,11 @@ impl LspStore { // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. let lsp_adapter = adapter.adapter.clone(); - let project_id = self.project_id; + let Some((upstream_client, project_id)) = self.upstream_client() else { + return; + }; let worktree_id = worktree.read(cx).id().to_proto(); - let upstream_client = ssh.upstream_client.clone(); - let name = adapter.name(); + let name = adapter.name().to_string(); let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); @@ -5165,12 +5183,11 @@ impl LspStore { } }); - let project_id = self.project_id; for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { summaries.retain(|path, summaries_by_server_id| { if summaries_by_server_id.remove(&server_id).is_some() { - if let Some(downstream_client) = self.downstream_client.clone() { - downstream_client + if let Some((client, project_id)) = self.downstream_client.clone() { + client .send(proto::UpdateDiagnosticSummary { project_id, worktree_id: worktree_id.to_proto(), @@ -5236,9 +5253,9 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = client.request(proto::RestartLanguageServers { - project_id: self.project_id, + project_id, buffer_ids: buffers .into_iter() .map(|b| b.read(cx).remote_id().to_proto()) @@ -5694,9 +5711,9 @@ impl LspStore { async move { this.update(&mut cx, |this, cx| { cx.emit(LspStoreEvent::RefreshInlayHints); - this.downstream_client.as_ref().map(|client| { + this.downstream_client.as_ref().map(|(client, project_id)| { client.send(proto::RefreshInlayHints { - project_id: this.project_id, + project_id: *project_id, }) }) })? @@ -6073,9 +6090,9 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerAdded(server_id)); - if let Some(downstream_client) = self.downstream_client.as_ref() { + if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { downstream_client.send(proto::StartLanguageServer { - project_id: self.project_id, + project_id: *project_id, server: Some(proto::LanguageServer { id: server_id.0 as u64, name: language_server.name().to_string(), diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 78584cbae0c65..0c54a16187a4a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -625,7 +625,7 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| WorktreeStore::new(None, false, fs.clone())); + let worktree_store = cx.new_model(|_| WorktreeStore::local(false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -722,7 +722,7 @@ impl Project { SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); let worktree_store = - cx.new_model(|_| WorktreeStore::new(Some(ssh.clone().into()), false, fs.clone())); + cx.new_model(|_| WorktreeStore::remote(false, ssh.clone().into(), 0, None)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -744,7 +744,6 @@ impl Project { worktree_store.clone(), languages.clone(), ssh.clone().into(), - 0, cx, ) }); @@ -874,11 +873,15 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - let mut store = WorktreeStore::new(Some(client.clone().into()), true, fs.clone()); - if let Some(dev_server_project_id) = response.payload.dev_server_project_id { - store.set_dev_server_project_id(DevServerProjectId(dev_server_project_id)); - } - store + WorktreeStore::remote( + true, + client.clone().into(), + response.payload.project_id, + response + .payload + .dev_server_project_id + .map(DevServerProjectId), + ) })?; let buffer_store = cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 5c3b2a00a9866..9f25572fc7ec0 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -36,19 +36,27 @@ struct MatchingEntry { respond: oneshot::Sender, } +enum WorktreeStoreState { + Local { + fs: Arc, + }, + Remote { + dev_server_project_id: Option, + upstream_client: AnyProtoClient, + upstream_project_id: u64, + }, +} + pub struct WorktreeStore { next_entry_id: Arc, - upstream_client: Option, - downstream_client: Option, - remote_id: u64, - dev_server_project_id: Option, + downstream_client: Option<(AnyProtoClient, u64)>, retain_worktrees: bool, worktrees: Vec, worktrees_reordered: bool, #[allow(clippy::type_complexity)] loading_worktrees: HashMap, Shared, Arc>>>>, - fs: Arc, + state: WorktreeStoreState, } pub enum WorktreeStoreEvent { @@ -69,27 +77,37 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_expand_project_entry); } - pub fn new( - upstream_client: Option, - retain_worktrees: bool, - fs: Arc, - ) -> Self { + pub fn local(retain_worktrees: bool, fs: Arc) -> Self { Self { next_entry_id: Default::default(), loading_worktrees: Default::default(), - dev_server_project_id: None, downstream_client: None, worktrees: Vec::new(), worktrees_reordered: false, retain_worktrees, - remote_id: 0, - upstream_client, - fs, + state: WorktreeStoreState::Local { fs }, } } - pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) { - self.dev_server_project_id = Some(id); + pub fn remote( + retain_worktrees: bool, + upstream_client: AnyProtoClient, + upstream_project_id: u64, + dev_server_project_id: Option, + ) -> Self { + Self { + next_entry_id: Default::default(), + loading_worktrees: Default::default(), + downstream_client: None, + worktrees: Vec::new(), + worktrees_reordered: false, + retain_worktrees, + state: WorktreeStoreState::Remote { + upstream_client, + upstream_project_id, + dev_server_project_id, + }, + } } /// Iterates through all worktrees, including ones that don't appear in the project panel @@ -159,14 +177,28 @@ impl WorktreeStore { ) -> Task>> { let path: Arc = abs_path.as_ref().into(); if !self.loading_worktrees.contains_key(&path) { - let task = if let Some(client) = self.upstream_client.clone() { - if let Some(dev_server_project_id) = self.dev_server_project_id { - self.create_dev_server_worktree(client, dev_server_project_id, abs_path, cx) - } else { - self.create_ssh_worktree(client, abs_path, visible, cx) + let task = match &self.state { + WorktreeStoreState::Remote { + upstream_client, + dev_server_project_id, + .. + } => { + if let Some(dev_server_project_id) = dev_server_project_id { + self.create_dev_server_worktree( + upstream_client.clone(), + *dev_server_project_id, + abs_path, + cx, + ) + } else if upstream_client.is_via_collab() { + Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab")))) + } else { + self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx) + } + } + WorktreeStoreState::Local { fs } => { + self.create_local_worktree(fs.clone(), abs_path, visible, cx) } - } else { - self.create_local_worktree(abs_path, visible, cx) }; self.loading_worktrees.insert(path.clone(), task.shared()); @@ -236,11 +268,11 @@ impl WorktreeStore { fn create_local_worktree( &mut self, + fs: Arc, abs_path: impl AsRef, visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let fs = self.fs.clone(); let next_entry_id = self.next_entry_id.clone(); let path: Arc = abs_path.as_ref().into(); @@ -374,6 +406,17 @@ impl WorktreeStore { self.worktrees_reordered = worktrees_reordered; } + fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { + match &self.state { + WorktreeStoreState::Remote { + upstream_client, + upstream_project_id, + .. + } => Some((upstream_client.clone(), *upstream_project_id)), + WorktreeStoreState::Local { .. } => None, + } + } + pub fn set_worktrees_from_proto( &mut self, worktrees: Vec, @@ -389,8 +432,8 @@ impl WorktreeStore { }) .collect::>(); - let client = self - .upstream_client + let (client, project_id) = self + .upstream_client() .clone() .ok_or_else(|| anyhow!("invalid project"))?; @@ -408,7 +451,7 @@ impl WorktreeStore { self.worktrees.push(handle); } else { self.add( - &Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx), + &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx), cx, ); } @@ -477,10 +520,9 @@ impl WorktreeStore { } pub fn send_project_updates(&mut self, cx: &mut ModelContext) { - let Some(downstream_client) = self.downstream_client.clone() else { + let Some((downstream_client, project_id)) = self.downstream_client.clone() else { return; }; - let project_id = self.remote_id; let update = proto::UpdateProject { project_id, @@ -549,8 +591,7 @@ impl WorktreeStore { cx: &mut ModelContext, ) { self.retain_worktrees = true; - self.remote_id = remote_id; - self.downstream_client = Some(downsteam_client); + self.downstream_client = Some((downsteam_client, remote_id)); // When shared, retain all worktrees for worktree_handle in self.worktrees.iter_mut() { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 9d5c26d6c7ce1..0d644a64a6aa7 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -45,7 +45,7 @@ impl HeadlessProject { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let worktree_store = cx.new_model(|cx| { - let mut store = WorktreeStore::new(None, true, fs.clone()); + let mut store = WorktreeStore::local(true, fs.clone()); store.shared(SSH_PROJECT_ID, session.clone().into(), cx); store }); From bc751d6c1994634cea98bb855ec2981d3a976d8c Mon Sep 17 00:00:00 2001 From: Boris Verkhovskiy Date: Mon, 23 Sep 2024 10:03:55 -0600 Subject: [PATCH 013/228] Don't highlight Python import names as type (#17984) Works on #14892 Follow up to #17473 --- crates/languages/src/python/highlights.scm | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index df6b60466cd2a..3255677bedc42 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -2,24 +2,6 @@ (attribute attribute: (identifier) @property) (type (identifier) @type) -; Module imports - -(import_statement - (dotted_name (identifier) @type)) - -(import_statement - (aliased_import - name: (dotted_name (identifier) @type) - alias: (identifier) @type)) - -(import_from_statement - (dotted_name (identifier) @type)) - -(import_from_statement - (aliased_import - name: (dotted_name (identifier) @type) - alias: (identifier) @type)) - ; Function calls (decorator) @function From 3c95a64a23c96303b864335ec55c3ec93ca0e414 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:11:26 -0400 Subject: [PATCH 014/228] Add a rather-conservative stale issue action in dry-run mode (#18233) Zed is becoming more popular and our issue tracker is only growing larger and larger. I realize that a stale issue action can be controversial, but the way we currently manage issues hasn't scaled well and it will only get worse. We need some crowd-sourced system. Let's ask those who have opened issues if their issues are still valid. This is rather conservative and only targets bugs and crashes. I'll run it in debug mode, report the results, and enable it if it feels right. We can always turn this off if users end up really not liking it. My original rules were: ```txt If an issue is old enough (12 months or older) AND if there are no recent comments from the team (last dev comment is older than 6 months) AND it has less than X upvotes (5) AND it does not have an open PR linked to it AND is a "defect" or "panic / crash" AND does not have a "ignore top-ranking issues" label AND was not opened by a org member AND is open AND is issue (not a pull request) THEN close the issue with a kind message. ``` But only some of these were actually supported in the configuration. Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 28 ++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .github/workflows/close_stale_issues.yml diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml new file mode 100644 index 0000000000000..240403169c471 --- /dev/null +++ b/.github/workflows/close_stale_issues.yml @@ -0,0 +1,28 @@ +name: "Close Stale Issues" +on: + schedule: + - cron: "0 1 * * *" + workflow_dispatch: + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: > + Hi there! 👋 + + We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed. + + Thanks for your help! + close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" + days-before-stale: 365 + days-before-close: 7 + only-issue-labels: "defect,panic / crash" + operations-per-run: 100 + ascending: true + enable-statistics: true + debug-only: true + stale-issue-label: "stale" From 20826336d9c2815da327ead99006b28dc8800082 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:15:33 -0400 Subject: [PATCH 015/228] update stale issue configuration to use `any-of-issue-labels` (#18236) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 240403169c471..1cac6450e8605 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -20,7 +20,7 @@ jobs: close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" days-before-stale: 365 days-before-close: 7 - only-issue-labels: "defect,panic / crash" + any-of-issue-labels: "defect,panic / crash" operations-per-run: 100 ascending: true enable-statistics: true From 65bb989c61e90271e544566b7999feaa9e8ff105 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 23 Sep 2024 12:16:51 -0400 Subject: [PATCH 016/228] gpui: Update doc comment for `SharedString::new_static` (#18234) This PR updates the doc comment for `SharedString::new_static`. Release Notes: - N/A --- crates/gpui/src/shared_string.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index f5aef6adf80be..e1fd4f1a5caa9 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -10,9 +10,9 @@ use util::arc_cow::ArcCow; pub struct SharedString(ArcCow<'static, str>); impl SharedString { - /// creates a static SharedString - pub const fn new_static(s: &'static str) -> Self { - Self(ArcCow::Borrowed(s)) + /// Creates a static [`SharedString`] from a `&'static str`. + pub const fn new_static(str: &'static str) -> Self { + Self(ArcCow::Borrowed(str)) } } From 11953bbc16c12c61363f6e15d023c6ff9488114a Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:24:49 -0400 Subject: [PATCH 017/228] Disable debug mode for stale issue action (#18237) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 1cac6450e8605..be4f6f4af04ba 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -24,5 +24,4 @@ jobs: operations-per-run: 100 ascending: true enable-statistics: true - debug-only: true stale-issue-label: "stale" From 1efe87029bffc2b23784247db3a851dc11ba9ae8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:32:31 -0400 Subject: [PATCH 018/228] Update stale issues configuration to use 180 days (#18238) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index be4f6f4af04ba..afc28ec180b15 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -18,7 +18,7 @@ jobs: Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" - days-before-stale: 365 + days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" operations-per-run: 100 From 7051bc00c2fe8d7407480a805e950cb73343bb45 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 23 Sep 2024 11:40:34 -0600 Subject: [PATCH 019/228] Add "Fix with Assistant" code action on lines with diagnostics (#18163) Release Notes: - Added a new "Fix with Assistant" action on code with errors or warnings. --------- Co-authored-by: Nathan --- Cargo.lock | 1 + crates/assistant/Cargo.toml | 1 + crates/assistant/src/inline_assistant.rs | 195 ++++++++++++++++-- crates/assistant/src/workflow.rs | 1 + .../remote_editing_collaboration_tests.rs | 1 + crates/editor/src/editor.rs | 169 +++++++++++---- crates/gpui/src/executor.rs | 6 +- crates/multi_buffer/src/multi_buffer.rs | 67 ++++++ crates/project/src/lsp_store.rs | 34 +-- crates/project/src/project.rs | 2 +- crates/project/src/project_tests.rs | 3 +- crates/search/src/project_search.rs | 2 +- crates/workspace/src/workspace.rs | 8 +- 13 files changed, 418 insertions(+), 72 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0f6751b895e2..e345736295613 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -404,6 +404,7 @@ dependencies = [ "language_model", "languages", "log", + "lsp", "markdown", "menu", "multi_buffer", diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index 9f715d822474d..9e61eee18aaf8 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -51,6 +51,7 @@ indoc.workspace = true language.workspace = true language_model.workspace = true log.workspace = true +lsp.workspace = true markdown.workspace = true menu.workspace = true multi_buffer.workspace = true diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index f2428c3a2e94c..9c117e66653e9 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -12,8 +12,9 @@ use editor::{ BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, ToDisplayPoint, }, - Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, - ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode, + EditorStyle, ExcerptId, ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, + ToOffset as _, ToPoint, }; use feature_flags::{FeatureFlagAppExt as _, ZedPro}; use fs::Fs; @@ -35,6 +36,7 @@ use language_model::{ }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; +use project::{CodeAction, ProjectTransaction}; use rope::Rope; use settings::{Settings, SettingsStore}; use smol::future::FutureExt; @@ -49,10 +51,11 @@ use std::{ time::{Duration, Instant}, }; use terminal_view::terminal_panel::TerminalPanel; +use text::{OffsetRangeExt, ToPoint as _}; use theme::ThemeSettings; use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip}; use util::{RangeExt, ResultExt}; -use workspace::{notifications::NotificationId, Toast, Workspace}; +use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace}; pub fn init( fs: Arc, @@ -129,8 +132,10 @@ impl InlineAssistant { } pub fn register_workspace(&mut self, workspace: &View, cx: &mut WindowContext) { - cx.subscribe(workspace, |_, event, cx| { - Self::update_global(cx, |this, cx| this.handle_workspace_event(event, cx)); + cx.subscribe(workspace, |workspace, event, cx| { + Self::update_global(cx, |this, cx| { + this.handle_workspace_event(workspace, event, cx) + }); }) .detach(); @@ -150,19 +155,49 @@ impl InlineAssistant { .detach(); } - fn handle_workspace_event(&mut self, event: &workspace::Event, cx: &mut WindowContext) { - // When the user manually saves an editor, automatically accepts all finished transformations. - if let workspace::Event::UserSavedItem { item, .. } = event { - if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { - if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { - for assist_id in editor_assists.assist_ids.clone() { - let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { - self.finish_assist(assist_id, false, cx) + fn handle_workspace_event( + &mut self, + workspace: View, + event: &workspace::Event, + cx: &mut WindowContext, + ) { + match event { + workspace::Event::UserSavedItem { item, .. } => { + // When the user manually saves an editor, automatically accepts all finished transformations. + if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { + if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { + for assist_id in editor_assists.assist_ids.clone() { + let assist = &self.assists[&assist_id]; + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { + self.finish_assist(assist_id, false, cx) + } } } } } + workspace::Event::ItemAdded { item } => { + self.register_workspace_item(&workspace, item.as_ref(), cx); + } + _ => (), + } + } + + fn register_workspace_item( + &mut self, + workspace: &View, + item: &dyn ItemHandle, + cx: &mut WindowContext, + ) { + if let Some(editor) = item.act_as::(cx) { + editor.update(cx, |editor, cx| { + editor.push_code_action_provider( + Arc::new(AssistantCodeActionProvider { + editor: cx.view().downgrade(), + workspace: workspace.downgrade(), + }), + cx, + ); + }); } } @@ -332,6 +367,7 @@ impl InlineAssistant { mut range: Range, initial_prompt: String, initial_transaction_id: Option, + focus: bool, workspace: Option>, assistant_panel: Option<&View>, cx: &mut WindowContext, @@ -404,6 +440,11 @@ impl InlineAssistant { assist_group.assist_ids.push(assist_id); editor_assists.assist_ids.push(assist_id); self.assist_groups.insert(assist_group_id, assist_group); + + if focus { + self.focus_assist(assist_id, cx); + } + assist_id } @@ -3289,6 +3330,132 @@ where } } +struct AssistantCodeActionProvider { + editor: WeakView, + workspace: WeakView, +} + +impl CodeActionProvider for AssistantCodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + let snapshot = buffer.read(cx).snapshot(); + let mut range = range.to_point(&snapshot); + + // Expand the range to line boundaries. + range.start.column = 0; + range.end.column = snapshot.line_len(range.end.row); + + let mut has_diagnostics = false; + for diagnostic in snapshot.diagnostics_in_range::<_, Point>(range.clone(), false) { + range.start = cmp::min(range.start, diagnostic.range.start); + range.end = cmp::max(range.end, diagnostic.range.end); + has_diagnostics = true; + } + if has_diagnostics { + if let Some(symbols_containing_start) = snapshot.symbols_containing(range.start, None) { + if let Some(symbol) = symbols_containing_start.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + if let Some(symbols_containing_end) = snapshot.symbols_containing(range.end, None) { + if let Some(symbol) = symbols_containing_end.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + Task::ready(Ok(vec![CodeAction { + server_id: language::LanguageServerId(0), + range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end), + lsp_action: lsp::CodeAction { + title: "Fix with Assistant".into(), + ..Default::default() + }, + }])) + } else { + Task::ready(Ok(Vec::new())) + } + } + + fn apply_code_action( + &self, + buffer: Model, + action: CodeAction, + excerpt_id: ExcerptId, + _push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + let editor = self.editor.clone(); + let workspace = self.workspace.clone(); + cx.spawn(|mut cx| async move { + let editor = editor.upgrade().context("editor was released")?; + let range = editor + .update(&mut cx, |editor, cx| { + editor.buffer().update(cx, |multibuffer, cx| { + let buffer = buffer.read(cx); + let multibuffer_snapshot = multibuffer.read(cx); + + let old_context_range = + multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?; + let mut new_context_range = old_context_range.clone(); + if action + .range + .start + .cmp(&old_context_range.start, buffer) + .is_lt() + { + new_context_range.start = action.range.start; + } + if action.range.end.cmp(&old_context_range.end, buffer).is_gt() { + new_context_range.end = action.range.end; + } + drop(multibuffer_snapshot); + + if new_context_range != old_context_range { + multibuffer.resize_excerpt(excerpt_id, new_context_range, cx); + } + + let multibuffer_snapshot = multibuffer.read(cx); + Some( + multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.start)? + ..multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.end)?, + ) + }) + })? + .context("invalid range")?; + let assistant_panel = workspace.update(&mut cx, |workspace, cx| { + workspace + .panel::(cx) + .context("assistant panel was released") + })??; + + cx.update_global(|assistant: &mut InlineAssistant, cx| { + let assist_id = assistant.suggest_assist( + &editor, + range, + "Fix Diagnostics".into(), + None, + true, + Some(workspace), + Some(&assistant_panel), + cx, + ); + assistant.start_assist(assist_id, cx); + })?; + + Ok(ProjectTransaction::default()) + }) + } +} + fn prefixes(text: &str) -> impl Iterator { (0..text.len() - 1).map(|ix| &text[..ix + 1]) } diff --git a/crates/assistant/src/workflow.rs b/crates/assistant/src/workflow.rs index 75c65ed0a78e4..8a770e21aa7ca 100644 --- a/crates/assistant/src/workflow.rs +++ b/crates/assistant/src/workflow.rs @@ -187,6 +187,7 @@ impl WorkflowSuggestion { suggestion_range, initial_prompt, initial_transaction_id, + false, Some(workspace.clone()), Some(assistant_panel), cx, diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index cdcf69cf7e9ac..a81166bb00cee 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -53,6 +53,7 @@ async fn test_sharing_an_ssh_remote_project( let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; + executor.run_until_parked(); // User A shares the remote project. let active_call_a = cx_a.read(ActiveCall::global); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b1a3d95a0da78..cbc272d995213 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -68,7 +68,7 @@ use element::LineWithInvisibles; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, }; -use futures::FutureExt; +use futures::{future, FutureExt}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::blame::GitBlame; use git::diff_hunk_to_display; @@ -569,8 +569,8 @@ pub struct Editor { find_all_references_task_sources: Vec, next_completion_id: CompletionId, completion_documentation_pre_resolve_debounce: DebouncedDelay, - available_code_actions: Option<(Location, Arc<[CodeAction]>)>, - code_actions_task: Option>, + available_code_actions: Option<(Location, Arc<[AvailableCodeAction]>)>, + code_actions_task: Option>>, document_highlights_task: Option>, linked_editing_range_task: Option>>, linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges, @@ -590,6 +590,7 @@ pub struct Editor { gutter_hovered: bool, hovered_link_state: Option, inline_completion_provider: Option, + code_action_providers: Vec>, active_inline_completion: Option, // enable_inline_completions is a switch that Vim can use to disable // inline completions based on its mode. @@ -1360,10 +1361,16 @@ impl CompletionsMenu { } } +struct AvailableCodeAction { + excerpt_id: ExcerptId, + action: CodeAction, + provider: Arc, +} + #[derive(Clone)] struct CodeActionContents { tasks: Option>, - actions: Option>, + actions: Option>, } impl CodeActionContents { @@ -1395,9 +1402,11 @@ impl CodeActionContents { .map(|(kind, task)| CodeActionsItem::Task(kind.clone(), task.clone())) }) .chain(self.actions.iter().flat_map(|actions| { - actions - .iter() - .map(|action| CodeActionsItem::CodeAction(action.clone())) + actions.iter().map(|available| CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + }) })) } fn get(&self, index: usize) -> Option { @@ -1410,10 +1419,13 @@ impl CodeActionContents { .cloned() .map(|(kind, task)| CodeActionsItem::Task(kind, task)) } else { - actions - .get(index - tasks.templates.len()) - .cloned() - .map(CodeActionsItem::CodeAction) + actions.get(index - tasks.templates.len()).map(|available| { + CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + } + }) } } (Some(tasks), None) => tasks @@ -1421,7 +1433,15 @@ impl CodeActionContents { .get(index) .cloned() .map(|(kind, task)| CodeActionsItem::Task(kind, task)), - (None, Some(actions)) => actions.get(index).cloned().map(CodeActionsItem::CodeAction), + (None, Some(actions)) => { + actions + .get(index) + .map(|available| CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + }) + } (None, None) => None, } } @@ -1431,7 +1451,11 @@ impl CodeActionContents { #[derive(Clone)] enum CodeActionsItem { Task(TaskSourceKind, ResolvedTask), - CodeAction(CodeAction), + CodeAction { + excerpt_id: ExcerptId, + action: CodeAction, + provider: Arc, + }, } impl CodeActionsItem { @@ -1442,14 +1466,14 @@ impl CodeActionsItem { Some(task) } fn as_code_action(&self) -> Option<&CodeAction> { - let Self::CodeAction(action) = self else { + let Self::CodeAction { action, .. } = self else { return None; }; Some(action) } fn label(&self) -> String { match self { - Self::CodeAction(action) => action.lsp_action.title.clone(), + Self::CodeAction { action, .. } => action.lsp_action.title.clone(), Self::Task(_, task) => task.resolved_label.clone(), } } @@ -1588,7 +1612,9 @@ impl CodeActionsMenu { .enumerate() .max_by_key(|(_, action)| match action { CodeActionsItem::Task(_, task) => task.resolved_label.chars().count(), - CodeActionsItem::CodeAction(action) => action.lsp_action.title.chars().count(), + CodeActionsItem::CodeAction { action, .. } => { + action.lsp_action.title.chars().count() + } }) .map(|(ix, _)| ix), ) @@ -1864,6 +1890,11 @@ impl Editor { None }; + let mut code_action_providers = Vec::new(); + if let Some(project) = project.clone() { + code_action_providers.push(Arc::new(project) as Arc<_>); + } + let mut this = Self { focus_handle, show_cursor_when_unfocused: false, @@ -1915,6 +1946,7 @@ impl Editor { next_completion_id: 0, completion_documentation_pre_resolve_debounce: DebouncedDelay::new(), next_inlay_id: 0, + code_action_providers, available_code_actions: Default::default(), code_actions_task: Default::default(), document_highlights_task: Default::default(), @@ -4553,7 +4585,7 @@ impl Editor { let action = action.clone(); cx.spawn(|editor, mut cx| async move { while let Some(prev_task) = task { - prev_task.await; + prev_task.await.log_err(); task = editor.update(&mut cx, |this, _| this.code_actions_task.take())?; } @@ -4727,17 +4759,16 @@ impl Editor { Some(Task::ready(Ok(()))) }) } - CodeActionsItem::CodeAction(action) => { - let apply_code_actions = workspace - .read(cx) - .project() - .clone() - .update(cx, |project, cx| { - project.apply_code_action(buffer, action, true, cx) - }); + CodeActionsItem::CodeAction { + excerpt_id, + action, + provider, + } => { + let apply_code_action = + provider.apply_code_action(buffer, action, excerpt_id, true, cx); let workspace = workspace.downgrade(); Some(cx.spawn(|editor, cx| async move { - let project_transaction = apply_code_actions.await?; + let project_transaction = apply_code_action.await?; Self::open_project_transaction( &editor, workspace, @@ -4835,8 +4866,16 @@ impl Editor { Ok(()) } + pub fn push_code_action_provider( + &mut self, + provider: Arc, + cx: &mut ViewContext, + ) { + self.code_action_providers.push(provider); + self.refresh_code_actions(cx); + } + fn refresh_code_actions(&mut self, cx: &mut ViewContext) -> Option<()> { - let project = self.project.clone()?; let buffer = self.buffer.read(cx); let newest_selection = self.selections.newest_anchor().clone(); let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?; @@ -4850,13 +4889,30 @@ impl Editor { .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) .await; - let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| { - project.code_actions(&start_buffer, start..end, cx) - }) { - code_actions.await - } else { - Vec::new() - }; + let (providers, tasks) = this.update(&mut cx, |this, cx| { + let providers = this.code_action_providers.clone(); + let tasks = this + .code_action_providers + .iter() + .map(|provider| provider.code_actions(&start_buffer, start..end, cx)) + .collect::>(); + (providers, tasks) + })?; + + let mut actions = Vec::new(); + for (provider, provider_actions) in + providers.into_iter().zip(future::join_all(tasks).await) + { + if let Some(provider_actions) = provider_actions.log_err() { + actions.extend(provider_actions.into_iter().map(|action| { + AvailableCodeAction { + excerpt_id: newest_selection.start.excerpt_id, + action, + provider: provider.clone(), + } + })); + } + } this.update(&mut cx, |this, cx| { this.available_code_actions = if actions.is_empty() { @@ -4872,7 +4928,6 @@ impl Editor { }; cx.notify(); }) - .log_err(); })); None } @@ -9685,7 +9740,7 @@ impl Editor { }) .context("location tasks preparation")?; - let locations = futures::future::join_all(location_tasks) + let locations = future::join_all(location_tasks) .await .into_iter() .filter_map(|location| location.transpose()) @@ -12574,6 +12629,48 @@ pub trait CompletionProvider { } } +pub trait CodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>>; + + fn apply_code_action( + &self, + buffer_handle: Model, + action: CodeAction, + excerpt_id: ExcerptId, + push_to_history: bool, + cx: &mut WindowContext, + ) -> Task>; +} + +impl CodeActionProvider for Model { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + self.update(cx, |project, cx| project.code_actions(buffer, range, cx)) + } + + fn apply_code_action( + &self, + buffer_handle: Model, + action: CodeAction, + _excerpt_id: ExcerptId, + push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + self.update(cx, |project, cx| { + project.apply_code_action(buffer_handle, action, push_to_history, cx) + }) + } +} + fn snippet_completions( project: &Project, buffer: &Model, diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index b909e63271c06..3035892d7a17f 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -407,7 +407,11 @@ impl BackgroundExecutor { /// How many CPUs are available to the dispatcher. pub fn num_cpus(&self) -> usize { - num_cpus::get() + #[cfg(any(test, feature = "test-support"))] + return 4; + + #[cfg(not(any(test, feature = "test-support")))] + return num_cpus::get(); } /// Whether we're on the main thread. diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index d406f9bfaf6ac..0df196bb9829d 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1810,6 +1810,69 @@ impl MultiBuffer { self.as_singleton().unwrap().read(cx).is_parsing() } + pub fn resize_excerpt( + &mut self, + id: ExcerptId, + range: Range, + cx: &mut ModelContext, + ) { + self.sync(cx); + + let snapshot = self.snapshot(cx); + let locator = snapshot.excerpt_locator_for_id(id); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); + let mut edits = Vec::>::new(); + + let prefix = cursor.slice(&Some(locator), Bias::Left, &()); + new_excerpts.append(prefix, &()); + + let mut excerpt = cursor.item().unwrap().clone(); + let old_text_len = excerpt.text_summary.len; + + excerpt.range.context.start = range.start; + excerpt.range.context.end = range.end; + excerpt.max_buffer_row = range.end.to_point(&excerpt.buffer).row; + + excerpt.text_summary = excerpt + .buffer + .text_summary_for_range(excerpt.range.context.clone()); + + let new_start_offset = new_excerpts.summary().text.len; + let old_start_offset = cursor.start().1; + let edit = Edit { + old: old_start_offset..old_start_offset + old_text_len, + new: new_start_offset..new_start_offset + excerpt.text_summary.len, + }; + + if let Some(last_edit) = edits.last_mut() { + if last_edit.old.end == edit.old.start { + last_edit.old.end = edit.old.end; + last_edit.new.end = edit.new.end; + } else { + edits.push(edit); + } + } else { + edits.push(edit); + } + + new_excerpts.push(excerpt, &()); + + cursor.next(&()); + + new_excerpts.append(cursor.suffix(&()), &()); + + drop(cursor); + self.snapshot.borrow_mut().excerpts = new_excerpts; + + self.subscriptions.publish_mut(edits); + cx.emit(Event::Edited { + singleton_buffer_edited: false, + }); + cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); + cx.notify(); + } + pub fn expand_excerpts( &mut self, ids: impl IntoIterator, @@ -3139,6 +3202,10 @@ impl MultiBufferSnapshot { None } + pub fn context_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { + Some(self.excerpt(excerpt_id)?.range.context.clone()) + } + pub fn can_resolve(&self, anchor: &Anchor) -> bool { if anchor.excerpt_id == ExcerptId::min() || anchor.excerpt_id == ExcerptId::max() { true diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 4506fcc6feb43..b2920bc791c47 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1431,7 +1431,7 @@ impl LspStore { buffer_handle: &Model, range: Range, cx: &mut ModelContext, - ) -> Task> { + ) -> Task>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -1451,14 +1451,11 @@ impl LspStore { let buffer = buffer_handle.clone(); cx.spawn(|weak_project, cx| async move { let Some(project) = weak_project.upgrade() else { - return Vec::new(); + return Ok(Vec::new()); }; - join_all( - request_task - .await - .log_err() - .map(|response| response.responses) - .unwrap_or_default() + let responses = request_task.await?.responses; + let actions = join_all( + responses .into_iter() .filter_map(|lsp_response| match lsp_response.response? { proto::lsp_response::Response::GetCodeActionsResponse(response) => { @@ -1470,7 +1467,7 @@ impl LspStore { } }) .map(|code_actions_response| { - let response = GetCodeActions { + GetCodeActions { range: range.clone(), kinds: None, } @@ -1479,14 +1476,17 @@ impl LspStore { project.clone(), buffer.clone(), cx.clone(), - ); - async move { response.await.log_err().unwrap_or_default() } + ) }), ) - .await - .into_iter() - .flatten() - .collect() + .await; + + Ok(actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .collect()) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -1498,7 +1498,9 @@ impl LspStore { }, cx, ); - cx.spawn(|_, _| async move { all_actions_task.await.into_iter().flatten().collect() }) + cx.spawn( + |_, _| async move { Ok(all_actions_task.await.into_iter().flatten().collect()) }, + ) } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 0c54a16187a4a..b1347c6d063f2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3247,7 +3247,7 @@ impl Project { buffer_handle: &Model, range: Range, cx: &mut ModelContext, - ) -> Task> { + ) -> Task>> { let buffer = buffer_handle.read(cx); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); self.lsp_store.update(cx, |lsp_store, cx| { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index d0d67f0cda4a4..a7d2e6766c233 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -2708,7 +2708,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { .next() .await; - let action = actions.await[0].clone(); + let action = actions.await.unwrap()[0].clone(); let apply = project.update(cx, |project, cx| { project.apply_code_action(buffer.clone(), action, true, cx) }); @@ -5046,6 +5046,7 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { vec!["TailwindServer code action", "TypeScriptServer code action"], code_actions_task .await + .unwrap() .into_iter() .map(|code_action| code_action.lsp_action.title) .sorted() diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index fac3c55bf4550..d5b719a657628 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2745,7 +2745,7 @@ pub mod tests { search_view .results_editor .update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\nconst ONE: usize = 1;\n\n\n\n\nconst TWO: usize = one::ONE + one::ONE;\n", + "\n\n\nconst TWO: usize = one::ONE + one::ONE;\n\n\n\n\nconst ONE: usize = 1;\n", "New search in directory should have a filter that matches a certain directory" ); }) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 1fbeab38a2e8b..92bfc8c5c56d4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -675,7 +675,9 @@ impl DelayedDebouncedEditAction { pub enum Event { PaneAdded(View), PaneRemoved, - ItemAdded, + ItemAdded { + item: Box, + }, ItemRemoved, ActiveItemChanged, UserSavedItem { @@ -2984,7 +2986,9 @@ impl Workspace { match event { pane::Event::AddItem { item } => { item.added_to_pane(self, pane, cx); - cx.emit(Event::ItemAdded); + cx.emit(Event::ItemAdded { + item: item.boxed_clone(), + }); } pane::Event::Split(direction) => { self.split_and_clone(pane, *direction, cx); From 1ff10b71c8ea9cae6263225445e35c68ab0808be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Tue, 24 Sep 2024 03:39:01 +0800 Subject: [PATCH 020/228] lua: Add auto-close for single quote strings and highlight escape sequences (#18199) - Add auto close to single quote string - Add syntax highlights to escape sequence --- extensions/lua/languages/lua/config.toml | 2 +- extensions/lua/languages/lua/highlights.scm | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions/lua/languages/lua/config.toml b/extensions/lua/languages/lua/config.toml index 6c3aee09ea71b..7ec8ef2f03f54 100644 --- a/extensions/lua/languages/lua/config.toml +++ b/extensions/lua/languages/lua/config.toml @@ -8,6 +8,6 @@ brackets = [ { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = false, newline = false, not_in = ["string"] }, + { start = "'", end = "'", close = true, newline = false, not_in = ["string"] }, ] collapsed_placeholder = "--[ ... ]--" diff --git a/extensions/lua/languages/lua/highlights.scm b/extensions/lua/languages/lua/highlights.scm index 98e2c2eaff638..7b0b8364ea2d3 100644 --- a/extensions/lua/languages/lua/highlights.scm +++ b/extensions/lua/languages/lua/highlights.scm @@ -196,3 +196,4 @@ (number) @number (string) @string +(escape_sequence) @string.escape From e95e1c9ae5bd94ffaa595b8e56a727802441457d Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 23 Sep 2024 19:45:00 +0000 Subject: [PATCH 021/228] Add '?plain=1' to Permalinks on GitLab/GitHub for md files (#18241) Improve our Permalinks to markdown files. GitHub/GitLab supports the same URL syntax. --- crates/git_hosting_providers/src/providers/github.rs | 3 +++ crates/git_hosting_providers/src/providers/gitlab.rs | 3 +++ 2 files changed, 6 insertions(+) diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index be46b51ddf7bd..77eaa80961e61 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -145,6 +145,9 @@ impl GitHostingProvider for Github { .base_url() .join(&format!("{owner}/{repo}/blob/{sha}/{path}")) .unwrap(); + if path.ends_with(".md") { + permalink.set_query(Some("plain=1")); + } permalink.set_fragment( selection .map(|selection| self.line_fragment(&selection)) diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index ccb8a7280a2c6..36ee214cf9d47 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -65,6 +65,9 @@ impl GitHostingProvider for Gitlab { .base_url() .join(&format!("{owner}/{repo}/-/blob/{sha}/{path}")) .unwrap(); + if path.ends_with(".md") { + permalink.set_query(Some("plain=1")); + } permalink.set_fragment( selection .map(|selection| self.line_fragment(&selection)) From e4080ef565f71c15a4cd89f6b0d565b82ec53f7d Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 14:33:28 -0600 Subject: [PATCH 022/228] Move formatting to LSP store (#18242) Release Notes: - ssh-remoting: Fixed format on save --------- Co-authored-by: Mikayla --- .../src/activity_indicator.rs | 2 +- crates/collab/src/tests/integration_tests.rs | 4 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/items.rs | 4 +- crates/project/src/lsp_store.rs | 649 +++++++++++++++++- crates/project/src/prettier_store.rs | 8 +- crates/project/src/project.rs | 639 +---------------- crates/project/src/project_tests.rs | 2 +- 8 files changed, 655 insertions(+), 657 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index a9ae7d075d10c..fee0ef73f7bee 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -280,7 +280,7 @@ impl ActivityIndicator { } // Show any formatting failure - if let Some(failure) = self.project.read(cx).last_formatting_failure() { + if let Some(failure) = self.project.read(cx).last_formatting_failure(cx) { return Some(Content { icon: Some( Icon::new(IconName::Warning) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 51593e081e46c..d5cef3589cce3 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -28,8 +28,8 @@ use live_kit_client::MacOSDisplay; use lsp::LanguageServerId; use parking_lot::Mutex; use project::{ - search::SearchQuery, search::SearchResult, DiagnosticSummary, FormatTrigger, HoverBlockKind, - Project, ProjectPath, + lsp_store::FormatTrigger, search::SearchQuery, search::SearchResult, DiagnosticSummary, + HoverBlockKind, Project, ProjectPath, }; use rand::prelude::*; use serde_json::json; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cbc272d995213..dc536471023f0 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -122,8 +122,8 @@ use ordered_float::OrderedFloat; use parking_lot::{Mutex, RwLock}; use project::project_settings::{GitGutterSetting, ProjectSettings}; use project::{ - CodeAction, Completion, CompletionIntent, FormatTrigger, Item, Location, Project, ProjectPath, - ProjectTransaction, TaskSourceKind, + lsp_store::FormatTrigger, CodeAction, Completion, CompletionIntent, Item, Location, Project, + ProjectPath, ProjectTransaction, TaskSourceKind, }; use rand::prelude::*; use rpc::{proto::*, ErrorExt}; diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 1d301f2ee68cd..b3f4cc813fe8a 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -20,8 +20,8 @@ use language::{ }; use multi_buffer::AnchorRangeExt; use project::{ - project_settings::ProjectSettings, search::SearchQuery, FormatTrigger, Item as _, Project, - ProjectPath, + lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Item as _, + Project, ProjectPath, }; use rpc::proto::{self, update_view, PeerId}; use settings::Settings; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index b2920bc791c47..6673f9da1ddd7 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1,5 +1,6 @@ use crate::{ buffer_store::{BufferStore, BufferStoreEvent}, + deserialize_code_actions, environment::ProjectEnvironment, lsp_command::{self, *}, lsp_ext_command, @@ -19,7 +20,7 @@ use futures::{ future::{join_all, BoxFuture, Shared}, select, stream::FuturesUnordered, - Future, FutureExt, StreamExt, + AsyncWriteExt, Future, FutureExt, StreamExt, }; use globset::{Glob, GlobSet, GlobSetBuilder}; use gpui::{ @@ -29,12 +30,13 @@ use gpui::{ use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; use language::{ language_settings::{ - all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, + all_language_settings, language_settings, AllLanguageSettings, FormatOnSave, Formatter, + LanguageSettings, SelectedFormatter, }, markdown, point_to_lsp, prepare_completion_documentation, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, - DiagnosticEntry, DiagnosticSet, Documentation, File as _, Language, LanguageConfig, + DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageConfig, LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, @@ -90,12 +92,38 @@ const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FormatTrigger { + Save, + Manual, +} + +// Currently, formatting operations are represented differently depending on +// whether they come from a language server or an external command. +#[derive(Debug)] +pub enum FormatOperation { + Lsp(Vec<(Range, String)>), + External(Diff), + Prettier(Diff), +} + +impl FormatTrigger { + fn from_proto(value: i32) -> FormatTrigger { + match value { + 0 => FormatTrigger::Save, + 1 => FormatTrigger::Manual, + _ => FormatTrigger::Save, + } + } +} + pub struct LocalLspStore { http_client: Option>, environment: Model, fs: Arc, yarn: Model, pub language_servers: HashMap, + buffers_being_formatted: HashSet, last_workspace_edits_by_language_server: HashMap, language_server_watched_paths: HashMap>, language_server_watcher_registrations: @@ -104,6 +132,7 @@ pub struct LocalLspStore { HashMap)>, prettier_store: Model, current_lsp_settings: HashMap, + last_formatting_failure: Option, _subscription: gpui::Subscription, } @@ -128,6 +157,485 @@ impl LocalLspStore { futures::future::join_all(shutdown_futures).await; } } + async fn format_locally( + lsp_store: WeakModel, + mut buffers_with_paths: Vec<(Model, Option)>, + push_to_history: bool, + trigger: FormatTrigger, + mut cx: AsyncAppContext, + ) -> anyhow::Result { + // Do not allow multiple concurrent formatting requests for the + // same buffer. + lsp_store.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + buffers_with_paths.retain(|(buffer, _)| { + this.buffers_being_formatted + .insert(buffer.read(cx).remote_id()) + }); + })?; + + let _cleanup = defer({ + let this = lsp_store.clone(); + let mut cx = cx.clone(); + let buffers = &buffers_with_paths; + move || { + this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + for (buffer, _) in buffers { + this.buffers_being_formatted + .remove(&buffer.read(cx).remote_id()); + } + }) + .ok(); + } + }); + + let mut project_transaction = ProjectTransaction::default(); + for (buffer, buffer_abs_path) in &buffers_with_paths { + let (primary_adapter_and_server, adapters_and_servers) = + lsp_store.update(&mut cx, |lsp_store, cx| { + let buffer = buffer.read(cx); + + let adapters_and_servers = lsp_store + .language_servers_for_buffer(buffer, cx) + .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())) + .collect::>(); + + let primary_adapter = lsp_store + .primary_language_server_for_buffer(buffer, cx) + .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())); + + (primary_adapter, adapters_and_servers) + })?; + + let settings = buffer.update(&mut cx, |buffer, cx| { + language_settings(buffer.language(), buffer.file(), cx).clone() + })?; + + let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save; + let ensure_final_newline = settings.ensure_final_newline_on_save; + + // First, format buffer's whitespace according to the settings. + let trailing_whitespace_diff = if remove_trailing_whitespace { + Some( + buffer + .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))? + .await, + ) + } else { + None + }; + let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + if let Some(diff) = trailing_whitespace_diff { + buffer.apply_diff(diff, cx); + } + if ensure_final_newline { + buffer.ensure_final_newline(cx); + } + buffer.end_transaction(cx) + })?; + + // Apply the `code_actions_on_format` before we run the formatter. + let code_actions = deserialize_code_actions(&settings.code_actions_on_format); + #[allow(clippy::nonminimal_bool)] + if !code_actions.is_empty() + && !(trigger == FormatTrigger::Save && settings.format_on_save == FormatOnSave::Off) + { + LspStore::execute_code_actions_on_servers( + &lsp_store, + &adapters_and_servers, + code_actions, + buffer, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await?; + } + + // Apply language-specific formatting using either the primary language server + // or external command. + // Except for code actions, which are applied with all connected language servers. + let primary_language_server = + primary_adapter_and_server.map(|(_adapter, server)| server.clone()); + let server_and_buffer = primary_language_server + .as_ref() + .zip(buffer_abs_path.as_ref()); + + let prettier_settings = buffer.read_with(&cx, |buffer, cx| { + language_settings(buffer.language(), buffer.file(), cx) + .prettier + .clone() + })?; + + let mut format_operations: Vec = vec![]; + { + match trigger { + FormatTrigger::Save => { + match &settings.format_on_save { + FormatOnSave::Off => { + // nothing + } + FormatOnSave::On => { + match &settings.formatter { + SelectedFormatter::Auto => { + // do the auto-format: prefer prettier, fallback to primary language server + let diff = { + if prettier_settings.allowed { + Self::perform_format( + &Formatter::Prettier, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } else { + Self::perform_format( + &Formatter::LanguageServer { name: None }, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } + } + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + } + SelectedFormatter::List(formatters) => { + for formatter in formatters.as_ref() { + let diff = Self::perform_format( + formatter, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + + // format with formatter + } + } + } + } + FormatOnSave::List(formatters) => { + for formatter in formatters.as_ref() { + let diff = Self::perform_format( + formatter, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + } + } + } + } + FormatTrigger::Manual => { + match &settings.formatter { + SelectedFormatter::Auto => { + // do the auto-format: prefer prettier, fallback to primary language server + let diff = { + if prettier_settings.allowed { + Self::perform_format( + &Formatter::Prettier, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } else { + Self::perform_format( + &Formatter::LanguageServer { name: None }, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } + } + .log_err() + .flatten(); + + if let Some(op) = diff { + format_operations.push(op) + } + } + SelectedFormatter::List(formatters) => { + for formatter in formatters.as_ref() { + // format with formatter + let diff = Self::perform_format( + formatter, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + } + } + } + } + } + } + + buffer.update(&mut cx, |b, cx| { + // If the buffer had its whitespace formatted and was edited while the language-specific + // formatting was being computed, avoid applying the language-specific formatting, because + // it can't be grouped with the whitespace formatting in the undo history. + if let Some(transaction_id) = whitespace_transaction_id { + if b.peek_undo_stack() + .map_or(true, |e| e.transaction_id() != transaction_id) + { + format_operations.clear(); + } + } + + // Apply any language-specific formatting, and group the two formatting operations + // in the buffer's undo history. + for operation in format_operations { + match operation { + FormatOperation::Lsp(edits) => { + b.edit(edits, None, cx); + } + FormatOperation::External(diff) => { + b.apply_diff(diff, cx); + } + FormatOperation::Prettier(diff) => { + b.apply_diff(diff, cx); + } + } + + if let Some(transaction_id) = whitespace_transaction_id { + b.group_until_transaction(transaction_id); + } else if let Some(transaction) = project_transaction.0.get(buffer) { + b.group_until_transaction(transaction.id) + } + } + + if let Some(transaction) = b.finalize_last_transaction().cloned() { + if !push_to_history { + b.forget_transaction(transaction.id); + } + project_transaction.0.insert(buffer.clone(), transaction); + } + })?; + } + + Ok(project_transaction) + } + + #[allow(clippy::too_many_arguments)] + async fn perform_format( + formatter: &Formatter, + primary_server_and_buffer: Option<(&Arc, &PathBuf)>, + lsp_store: WeakModel, + buffer: &Model, + buffer_abs_path: &Option, + settings: &LanguageSettings, + adapters_and_servers: &[(Arc, Arc)], + push_to_history: bool, + transaction: &mut ProjectTransaction, + cx: &mut AsyncAppContext, + ) -> Result, anyhow::Error> { + let result = match formatter { + Formatter::LanguageServer { name } => { + if let Some((language_server, buffer_abs_path)) = primary_server_and_buffer { + let language_server = if let Some(name) = name { + adapters_and_servers + .iter() + .find_map(|(adapter, server)| { + adapter.name.0.as_ref().eq(name.as_str()).then_some(server) + }) + .unwrap_or(language_server) + } else { + language_server + }; + + Some(FormatOperation::Lsp( + LspStore::format_via_lsp( + &lsp_store, + buffer, + buffer_abs_path, + language_server, + settings, + cx, + ) + .await + .context("failed to format via language server")?, + )) + } else { + None + } + } + Formatter::Prettier => { + let prettier = lsp_store.update(cx, |lsp_store, _cx| { + lsp_store.prettier_store().unwrap().downgrade() + })?; + prettier_store::format_with_prettier(&prettier, buffer, cx) + .await + .transpose() + .ok() + .flatten() + } + Formatter::External { command, arguments } => { + let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); + Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) + .await + .context(format!( + "failed to format via external command {:?}", + command + ))? + .map(FormatOperation::External) + } + Formatter::CodeActions(code_actions) => { + let code_actions = deserialize_code_actions(code_actions); + if !code_actions.is_empty() { + LspStore::execute_code_actions_on_servers( + &lsp_store, + adapters_and_servers, + code_actions, + buffer, + push_to_history, + transaction, + cx, + ) + .await?; + } + None + } + }; + anyhow::Ok(result) + } + + async fn format_via_external_command( + buffer: &Model, + buffer_abs_path: Option<&Path>, + command: &str, + arguments: &[String], + cx: &mut AsyncAppContext, + ) -> Result> { + let working_dir_path = buffer.update(cx, |buffer, cx| { + let file = File::from_dyn(buffer.file())?; + let worktree = file.worktree.read(cx); + let mut worktree_path = worktree.abs_path().to_path_buf(); + if worktree.root_entry()?.is_file() { + worktree_path.pop(); + } + Some(worktree_path) + })?; + + let mut child = smol::process::Command::new(command); + #[cfg(target_os = "windows")] + { + use smol::process::windows::CommandExt; + child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); + } + + if let Some(working_dir_path) = working_dir_path { + child.current_dir(working_dir_path); + } + + let mut child = child + .args(arguments.iter().map(|arg| { + if let Some(buffer_abs_path) = buffer_abs_path { + arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy()) + } else { + arg.replace("{buffer_path}", "Untitled") + } + })) + .stdin(smol::process::Stdio::piped()) + .stdout(smol::process::Stdio::piped()) + .stderr(smol::process::Stdio::piped()) + .spawn()?; + + let stdin = child + .stdin + .as_mut() + .ok_or_else(|| anyhow!("failed to acquire stdin"))?; + let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?; + for chunk in text.chunks() { + stdin.write_all(chunk.as_bytes()).await?; + } + stdin.flush().await?; + + let output = child.output().await?; + if !output.status.success() { + return Err(anyhow!( + "command failed with exit code {:?}:\nstdout: {}\nstderr: {}", + output.status.code(), + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + )); + } + + let stdout = String::from_utf8(output.stdout)?; + Ok(Some( + buffer + .update(cx, |buffer, cx| buffer.diff(stdout, cx))? + .await, + )) + } } pub struct RemoteLspStore { @@ -221,8 +729,6 @@ pub enum LspStoreEvent { edits: Vec<(lsp::Range, Snippet)>, most_recent_edit: clock::Lamport, }, - StartFormattingLocalBuffer(BufferId), - FinishFormattingLocalBuffer(BufferId), } #[derive(Clone, Debug, Serialize)] @@ -251,6 +757,7 @@ impl LspStore { client.add_model_message_handler(Self::handle_start_language_server); client.add_model_message_handler(Self::handle_update_language_server); client.add_model_message_handler(Self::handle_update_diagnostic_summary); + client.add_model_request_handler(Self::handle_format_buffers); client.add_model_request_handler(Self::handle_resolve_completion_documentation); client.add_model_request_handler(Self::handle_apply_code_action); client.add_model_request_handler(Self::handle_inlay_hints); @@ -366,6 +873,8 @@ impl LspStore { language_server_watched_paths: Default::default(), language_server_watcher_registrations: Default::default(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), + buffers_being_formatted: Default::default(), + last_formatting_failure: None, prettier_store, environment, http_client, @@ -387,6 +896,7 @@ impl LspStore { diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, + _maintain_workspace_config: Self::maintain_workspace_config(cx), _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } @@ -1276,7 +1786,7 @@ impl LspStore { } fn apply_on_type_formatting( - &self, + &mut self, buffer: Model, position: Anchor, trigger: String, @@ -1298,25 +1808,18 @@ impl LspStore { .map(language::proto::deserialize_transaction) .transpose() }) - } else { + } else if let Some(local) = self.as_local_mut() { + let buffer_id = buffer.read(cx).remote_id(); + local.buffers_being_formatted.insert(buffer_id); cx.spawn(move |this, mut cx| async move { - // Do not allow multiple concurrent formatting requests for the - // same buffer. - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::StartFormattingLocalBuffer( - buffer.read(cx).remote_id(), - )); - })?; - let _cleanup = defer({ let this = this.clone(); let mut cx = cx.clone(); - let closure_buffer = buffer.clone(); move || { - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::FinishFormattingLocalBuffer( - closure_buffer.read(cx).remote_id(), - )) + this.update(&mut cx, |this, _| { + if let Some(local) = this.as_local_mut() { + local.buffers_being_formatted.remove(&buffer_id); + } }) .ok(); } @@ -1333,6 +1836,8 @@ impl LspStore { })? .await }) + } else { + Task::ready(Err(anyhow!("No upstream client or local language server"))) } } @@ -4708,6 +5213,110 @@ impl LspStore { .map(language::proto::serialize_transaction), }) } + pub fn last_formatting_failure(&self) -> Option<&str> { + self.as_local() + .and_then(|local| local.last_formatting_failure.as_deref()) + } + + pub fn format( + &mut self, + buffers: HashSet>, + push_to_history: bool, + trigger: FormatTrigger, + cx: &mut ModelContext, + ) -> Task> { + if let Some(_) = self.as_local() { + let buffers_with_paths = buffers + .into_iter() + .map(|buffer_handle| { + let buffer = buffer_handle.read(cx); + let buffer_abs_path = File::from_dyn(buffer.file()) + .and_then(|file| file.as_local().map(|f| f.abs_path(cx))); + (buffer_handle, buffer_abs_path) + }) + .collect::>(); + + cx.spawn(move |lsp_store, mut cx| async move { + let result = LocalLspStore::format_locally( + lsp_store.clone(), + buffers_with_paths, + push_to_history, + trigger, + cx.clone(), + ) + .await; + + lsp_store.update(&mut cx, |lsp_store, _| { + let local = lsp_store.as_local_mut().unwrap(); + match &result { + Ok(_) => local.last_formatting_failure = None, + Err(error) => { + local.last_formatting_failure.replace(error.to_string()); + } + } + })?; + + result + }) + } else if let Some((client, project_id)) = self.upstream_client() { + cx.spawn(move |this, mut cx| async move { + let response = client + .request(proto::FormatBuffers { + project_id, + trigger: trigger as i32, + buffer_ids: buffers + .iter() + .map(|buffer| { + buffer.update(&mut cx, |buffer, _| buffer.remote_id().into()) + }) + .collect::>()?, + }) + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + BufferStore::deserialize_project_transaction( + this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, + response, + push_to_history, + cx, + ) + .await + }) + } else { + Task::ready(Ok(ProjectTransaction::default())) + } + } + + async fn handle_format_buffers( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let sender_id = envelope.original_sender_id().unwrap_or_default(); + let format = this.update(&mut cx, |this, cx| { + let mut buffers = HashSet::default(); + for buffer_id in &envelope.payload.buffer_ids { + let buffer_id = BufferId::new(*buffer_id)?; + buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); + } + let trigger = FormatTrigger::from_proto(envelope.payload.trigger); + Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx)) + })??; + + let project_transaction = format.await?; + let project_transaction = this.update(&mut cx, |this, cx| { + this.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.serialize_project_transaction_for_peer( + project_transaction, + sender_id, + cx, + ) + }) + })?; + Ok(proto::FormatBuffersResponse { + transaction: Some(project_transaction), + }) + } fn language_settings<'a>( &'a self, diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 75d70c1d3f72f..82bd8464b2e53 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -25,8 +25,8 @@ use smol::stream::StreamExt; use util::{ResultExt, TryFutureExt}; use crate::{ - worktree_store::WorktreeStore, File, FormatOperation, PathChange, ProjectEntryId, Worktree, - WorktreeId, + lsp_store::WorktreeId, worktree_store::WorktreeStore, File, PathChange, ProjectEntryId, + Worktree, }; pub struct PrettierStore { @@ -644,7 +644,7 @@ pub(super) async fn format_with_prettier( prettier_store: &WeakModel, buffer: &Model, cx: &mut AsyncAppContext, -) -> Option> { +) -> Option> { let prettier_instance = prettier_store .update(cx, |prettier_store, cx| { prettier_store.prettier_instance_for_buffer(buffer, cx) @@ -671,7 +671,7 @@ pub(super) async fn format_with_prettier( let format_result = prettier .format(buffer, buffer_path, cx) .await - .map(FormatOperation::Prettier) + .map(crate::lsp_store::FormatOperation::Prettier) .with_context(|| format!("{} failed to format buffer", prettier_description)); Some(format_result) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b1347c6d063f2..dc9337674b7eb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,7 +31,7 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - AsyncWriteExt, StreamExt, + StreamExt, }; use git::{blame::Blame, repository::GitRepository}; @@ -41,17 +41,14 @@ use gpui::{ }; use itertools::Itertools; use language::{ - language_settings::{ - language_settings, FormatOnSave, Formatter, InlayHintKind, LanguageSettings, - SelectedFormatter, - }, + language_settings::InlayHintKind, proto::{ deserialize_anchor, serialize_anchor, serialize_line_ending, serialize_version, split_operations, }, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, ContextProvider, DiagnosticEntry, - Diff, Documentation, File as _, Language, LanguageRegistry, LanguageServerName, PointUtf16, - ToOffset, ToPointUtf16, Transaction, Unclipped, + Documentation, File as _, Language, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{CompletionContext, DocumentHighlightKind, LanguageServer, LanguageServerId}; use lsp_command::*; @@ -84,7 +81,7 @@ use task::{ }; use terminals::Terminals; use text::{Anchor, BufferId}; -use util::{defer, paths::compare_paths, ResultExt as _}; +use util::{paths::compare_paths, ResultExt as _}; use worktree::{CreatedEntry, Snapshot, Traversal}; use worktree_store::{WorktreeStore, WorktreeStoreEvent}; @@ -164,8 +161,6 @@ pub struct Project { search_included_history: SearchHistory, search_excluded_history: SearchHistory, snippets: Model, - last_formatting_failure: Option, - buffers_being_formatted: HashSet, environment: Model, settings_observer: Model, } @@ -477,31 +472,6 @@ impl Hover { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum FormatTrigger { - Save, - Manual, -} - -// Currently, formatting operations are represented differently depending on -// whether they come from a language server or an external command. -#[derive(Debug)] -enum FormatOperation { - Lsp(Vec<(Range, String)>), - External(Diff), - Prettier(Diff), -} - -impl FormatTrigger { - fn from_proto(value: i32) -> FormatTrigger { - match value { - 0 => FormatTrigger::Save, - 1 => FormatTrigger::Manual, - _ => FormatTrigger::Save, - } - } -} - enum EntitySubscription { Project(PendingEntitySubscription), BufferStore(PendingEntitySubscription), @@ -591,7 +561,7 @@ impl Project { client.add_model_message_handler(Self::handle_update_worktree); client.add_model_request_handler(Self::handle_reload_buffers); client.add_model_request_handler(Self::handle_synchronize_buffers); - client.add_model_request_handler(Self::handle_format_buffers); + client.add_model_request_handler(Self::handle_search_project); client.add_model_request_handler(Self::handle_search_candidate_buffers); client.add_model_request_handler(Self::handle_open_buffer_by_id); @@ -695,8 +665,7 @@ impl Project { search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), - last_formatting_failure: None, - buffers_being_formatted: Default::default(), + search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), } @@ -779,8 +748,7 @@ impl Project { search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), - last_formatting_failure: None, - buffers_being_formatted: Default::default(), + search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), }; @@ -967,8 +935,6 @@ impl Project { search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), - last_formatting_failure: None, - buffers_being_formatted: Default::default(), }; this.set_role(role, cx); for worktree in worktrees { @@ -2061,12 +2027,6 @@ impl Project { cx.emit(Event::SnippetEdit(*buffer_id, edits.clone())) } } - LspStoreEvent::StartFormattingLocalBuffer(buffer_id) => { - self.buffers_being_formatted.insert(*buffer_id); - } - LspStoreEvent::FinishFormattingLocalBuffer(buffer_id) => { - self.buffers_being_formatted.remove(buffer_id); - } } } @@ -2352,8 +2312,8 @@ impl Project { self.lsp_store.read(cx).language_server_statuses() } - pub fn last_formatting_failure(&self) -> Option<&str> { - self.last_formatting_failure.as_deref() + pub fn last_formatting_failure<'a>(&self, cx: &'a AppContext) -> Option<&'a str> { + self.lsp_store.read(cx).last_formatting_failure() } pub fn update_diagnostics( @@ -2455,558 +2415,12 @@ impl Project { &mut self, buffers: HashSet>, push_to_history: bool, - trigger: FormatTrigger, + trigger: lsp_store::FormatTrigger, cx: &mut ModelContext, ) -> Task> { - if self.is_local_or_ssh() { - let buffers_with_paths = buffers - .into_iter() - .map(|buffer_handle| { - let buffer = buffer_handle.read(cx); - let buffer_abs_path = File::from_dyn(buffer.file()) - .and_then(|file| file.as_local().map(|f| f.abs_path(cx))); - (buffer_handle, buffer_abs_path) - }) - .collect::>(); - - cx.spawn(move |project, mut cx| async move { - let result = Self::format_locally( - project.clone(), - buffers_with_paths, - push_to_history, - trigger, - cx.clone(), - ) - .await; - - project.update(&mut cx, |project, _| match &result { - Ok(_) => project.last_formatting_failure = None, - Err(error) => { - project.last_formatting_failure.replace(error.to_string()); - } - })?; - - result - }) - } else { - let remote_id = self.remote_id(); - let client = self.client.clone(); - cx.spawn(move |this, mut cx| async move { - if let Some(project_id) = remote_id { - let response = client - .request(proto::FormatBuffers { - project_id, - trigger: trigger as i32, - buffer_ids: buffers - .iter() - .map(|buffer| { - buffer.update(&mut cx, |buffer, _| buffer.remote_id().into()) - }) - .collect::>()?, - }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx, - ) - .await - } else { - Ok(ProjectTransaction::default()) - } - }) - } - } - - async fn format_locally( - project: WeakModel, - mut buffers_with_paths: Vec<(Model, Option)>, - push_to_history: bool, - trigger: FormatTrigger, - mut cx: AsyncAppContext, - ) -> anyhow::Result { - // Do not allow multiple concurrent formatting requests for the - // same buffer. - let lsp_store = project.update(&mut cx, |this, cx| { - buffers_with_paths.retain(|(buffer, _)| { - this.buffers_being_formatted - .insert(buffer.read(cx).remote_id()) - }); - this.lsp_store.downgrade() - })?; - - let _cleanup = defer({ - let this = project.clone(); - let mut cx = cx.clone(); - let buffers = &buffers_with_paths; - move || { - this.update(&mut cx, |this, cx| { - for (buffer, _) in buffers { - this.buffers_being_formatted - .remove(&buffer.read(cx).remote_id()); - } - }) - .ok(); - } - }); - - let mut project_transaction = ProjectTransaction::default(); - for (buffer, buffer_abs_path) in &buffers_with_paths { - let (primary_adapter_and_server, adapters_and_servers) = - project.update(&mut cx, |project, cx| { - let buffer = buffer.read(cx); - - let adapters_and_servers = project - .language_servers_for_buffer(buffer, cx) - .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())) - .collect::>(); - - let primary_adapter = project - .lsp_store - .read(cx) - .primary_language_server_for_buffer(buffer, cx) - .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())); - - (primary_adapter, adapters_and_servers) - })?; - - let settings = buffer.update(&mut cx, |buffer, cx| { - language_settings(buffer.language(), buffer.file(), cx).clone() - })?; - - let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save; - let ensure_final_newline = settings.ensure_final_newline_on_save; - - // First, format buffer's whitespace according to the settings. - let trailing_whitespace_diff = if remove_trailing_whitespace { - Some( - buffer - .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))? - .await, - ) - } else { - None - }; - let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| { - buffer.finalize_last_transaction(); - buffer.start_transaction(); - if let Some(diff) = trailing_whitespace_diff { - buffer.apply_diff(diff, cx); - } - if ensure_final_newline { - buffer.ensure_final_newline(cx); - } - buffer.end_transaction(cx) - })?; - - // Apply the `code_actions_on_format` before we run the formatter. - let code_actions = deserialize_code_actions(&settings.code_actions_on_format); - #[allow(clippy::nonminimal_bool)] - if !code_actions.is_empty() - && !(trigger == FormatTrigger::Save && settings.format_on_save == FormatOnSave::Off) - { - LspStore::execute_code_actions_on_servers( - &lsp_store, - &adapters_and_servers, - code_actions, - buffer, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await?; - } - - // Apply language-specific formatting using either the primary language server - // or external command. - // Except for code actions, which are applied with all connected language servers. - let primary_language_server = - primary_adapter_and_server.map(|(_adapter, server)| server.clone()); - let server_and_buffer = primary_language_server - .as_ref() - .zip(buffer_abs_path.as_ref()); - - let prettier_settings = buffer.read_with(&cx, |buffer, cx| { - language_settings(buffer.language(), buffer.file(), cx) - .prettier - .clone() - })?; - - let mut format_operations: Vec = vec![]; - { - match trigger { - FormatTrigger::Save => { - match &settings.format_on_save { - FormatOnSave::Off => { - // nothing - } - FormatOnSave::On => { - match &settings.formatter { - SelectedFormatter::Auto => { - // do the auto-format: prefer prettier, fallback to primary language server - let diff = { - if prettier_settings.allowed { - Self::perform_format( - &Formatter::Prettier, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } else { - Self::perform_format( - &Formatter::LanguageServer { name: None }, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } - } - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - } - SelectedFormatter::List(formatters) => { - for formatter in formatters.as_ref() { - let diff = Self::perform_format( - formatter, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - - // format with formatter - } - } - } - } - FormatOnSave::List(formatters) => { - for formatter in formatters.as_ref() { - let diff = Self::perform_format( - formatter, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - } - } - } - } - FormatTrigger::Manual => { - match &settings.formatter { - SelectedFormatter::Auto => { - // do the auto-format: prefer prettier, fallback to primary language server - let diff = { - if prettier_settings.allowed { - Self::perform_format( - &Formatter::Prettier, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } else { - Self::perform_format( - &Formatter::LanguageServer { name: None }, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } - } - .log_err() - .flatten(); - - if let Some(op) = diff { - format_operations.push(op) - } - } - SelectedFormatter::List(formatters) => { - for formatter in formatters.as_ref() { - // format with formatter - let diff = Self::perform_format( - formatter, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - } - } - } - } - } - } - - buffer.update(&mut cx, |b, cx| { - // If the buffer had its whitespace formatted and was edited while the language-specific - // formatting was being computed, avoid applying the language-specific formatting, because - // it can't be grouped with the whitespace formatting in the undo history. - if let Some(transaction_id) = whitespace_transaction_id { - if b.peek_undo_stack() - .map_or(true, |e| e.transaction_id() != transaction_id) - { - format_operations.clear(); - } - } - - // Apply any language-specific formatting, and group the two formatting operations - // in the buffer's undo history. - for operation in format_operations { - match operation { - FormatOperation::Lsp(edits) => { - b.edit(edits, None, cx); - } - FormatOperation::External(diff) => { - b.apply_diff(diff, cx); - } - FormatOperation::Prettier(diff) => { - b.apply_diff(diff, cx); - } - } - - if let Some(transaction_id) = whitespace_transaction_id { - b.group_until_transaction(transaction_id); - } else if let Some(transaction) = project_transaction.0.get(buffer) { - b.group_until_transaction(transaction.id) - } - } - - if let Some(transaction) = b.finalize_last_transaction().cloned() { - if !push_to_history { - b.forget_transaction(transaction.id); - } - project_transaction.0.insert(buffer.clone(), transaction); - } - })?; - } - - Ok(project_transaction) - } - - #[allow(clippy::too_many_arguments)] - async fn perform_format( - formatter: &Formatter, - primary_server_and_buffer: Option<(&Arc, &PathBuf)>, - project: WeakModel, - buffer: &Model, - buffer_abs_path: &Option, - settings: &LanguageSettings, - adapters_and_servers: &[(Arc, Arc)], - push_to_history: bool, - transaction: &mut ProjectTransaction, - cx: &mut AsyncAppContext, - ) -> Result, anyhow::Error> { - let result = match formatter { - Formatter::LanguageServer { name } => { - if let Some((language_server, buffer_abs_path)) = primary_server_and_buffer { - let language_server = if let Some(name) = name { - adapters_and_servers - .iter() - .find_map(|(adapter, server)| { - adapter.name.0.as_ref().eq(name.as_str()).then_some(server) - }) - .unwrap_or(language_server) - } else { - language_server - }; - - let lsp_store = project.update(cx, |p, _| p.lsp_store.downgrade())?; - Some(FormatOperation::Lsp( - LspStore::format_via_lsp( - &lsp_store, - buffer, - buffer_abs_path, - language_server, - settings, - cx, - ) - .await - .context("failed to format via language server")?, - )) - } else { - None - } - } - Formatter::Prettier => { - let prettier = project.update(cx, |project, cx| { - project - .lsp_store - .read(cx) - .prettier_store() - .unwrap() - .downgrade() - })?; - prettier_store::format_with_prettier(&prettier, buffer, cx) - .await - .transpose() - .ok() - .flatten() - } - Formatter::External { command, arguments } => { - let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); - Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) - .await - .context(format!( - "failed to format via external command {:?}", - command - ))? - .map(FormatOperation::External) - } - Formatter::CodeActions(code_actions) => { - let code_actions = deserialize_code_actions(code_actions); - let lsp_store = project.update(cx, |p, _| p.lsp_store.downgrade())?; - if !code_actions.is_empty() { - LspStore::execute_code_actions_on_servers( - &lsp_store, - adapters_and_servers, - code_actions, - buffer, - push_to_history, - transaction, - cx, - ) - .await?; - } - None - } - }; - anyhow::Ok(result) - } - - async fn format_via_external_command( - buffer: &Model, - buffer_abs_path: Option<&Path>, - command: &str, - arguments: &[String], - cx: &mut AsyncAppContext, - ) -> Result> { - let working_dir_path = buffer.update(cx, |buffer, cx| { - let file = File::from_dyn(buffer.file())?; - let worktree = file.worktree.read(cx); - let mut worktree_path = worktree.abs_path().to_path_buf(); - if worktree.root_entry()?.is_file() { - worktree_path.pop(); - } - Some(worktree_path) - })?; - - let mut child = smol::process::Command::new(command); - #[cfg(target_os = "windows")] - { - use smol::process::windows::CommandExt; - child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - - if let Some(working_dir_path) = working_dir_path { - child.current_dir(working_dir_path); - } - - let mut child = child - .args(arguments.iter().map(|arg| { - if let Some(buffer_abs_path) = buffer_abs_path { - arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy()) - } else { - arg.replace("{buffer_path}", "Untitled") - } - })) - .stdin(smol::process::Stdio::piped()) - .stdout(smol::process::Stdio::piped()) - .stderr(smol::process::Stdio::piped()) - .spawn()?; - - let stdin = child - .stdin - .as_mut() - .ok_or_else(|| anyhow!("failed to acquire stdin"))?; - let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?; - for chunk in text.chunks() { - stdin.write_all(chunk.as_bytes()).await?; - } - stdin.flush().await?; - - let output = child.output().await?; - if !output.status.success() { - return Err(anyhow!( - "command failed with exit code {:?}:\nstdout: {}\nstderr: {}", - output.status.code(), - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr), - )); - } - - let stdout = String::from_utf8(output.stdout)?; - Ok(Some( - buffer - .update(cx, |buffer, cx| buffer.diff(stdout, cx))? - .await, - )) + self.lsp_store.update(cx, |lsp_store, cx| { + lsp_store.format(buffers, push_to_history, trigger, cx) + }) } #[inline(never)] @@ -4210,31 +3624,6 @@ impl Project { Ok(response) } - async fn handle_format_buffers( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let sender_id = envelope.original_sender_id()?; - let format = this.update(&mut cx, |this, cx| { - let mut buffers = HashSet::default(); - for buffer_id in &envelope.payload.buffer_ids { - let buffer_id = BufferId::new(*buffer_id)?; - buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); - } - let trigger = FormatTrigger::from_proto(envelope.payload.trigger); - Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx)) - })??; - - let project_transaction = format.await?; - let project_transaction = this.update(&mut cx, |this, cx| { - this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) - })?; - Ok(proto::FormatBuffersResponse { - transaction: Some(project_transaction), - }) - } - async fn handle_task_context_for_location( project: Model, envelope: TypedEnvelope, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index a7d2e6766c233..9e58caa244243 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -4,7 +4,7 @@ use futures::{future, StreamExt}; use gpui::{AppContext, SemanticVersion, UpdateGlobal}; use http_client::Url; use language::{ - language_settings::{AllLanguageSettings, LanguageSettingsContent}, + language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent}, tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, }; From 3ba071b993099cdd9365f2223dd41fabf26df266 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 15:28:04 -0600 Subject: [PATCH 023/228] Allow using system node (#18172) Release Notes: - (Potentially breaking change) Zed will now use the node installed on your $PATH (if it is more recent than v18) instead of downloading its own. You can disable the new behavior with `{"node": {"disable_path_lookup": true}}` in your settings. We do not yet use system/project-local node_modules. --------- Co-authored-by: Mikayla --- Cargo.lock | 4 + assets/settings/default.json | 15 + crates/collab/src/tests/test_server.rs | 6 +- crates/copilot/src/copilot.rs | 12 +- crates/evals/src/eval.rs | 4 +- crates/extension/src/extension_store.rs | 4 +- crates/extension/src/extension_store_test.rs | 6 +- crates/extension/src/wasm_host.rs | 4 +- crates/headless/src/headless.rs | 2 +- crates/http_client/src/http_client.rs | 29 + crates/language/src/language.rs | 1 + crates/languages/src/css.rs | 10 +- crates/languages/src/json.rs | 10 +- crates/languages/src/lib.rs | 6 +- crates/languages/src/python.rs | 10 +- crates/languages/src/tailwind.rs | 10 +- crates/languages/src/typescript.rs | 24 +- crates/languages/src/vtsls.rs | 10 +- crates/languages/src/yaml.rs | 10 +- crates/markdown/examples/markdown.rs | 4 +- crates/markdown/examples/markdown_as_child.rs | 4 +- crates/node_runtime/Cargo.toml | 2 + crates/node_runtime/src/node_runtime.rs | 646 +++++++++++------- crates/prettier/src/prettier.rs | 4 +- crates/project/src/lsp_store.rs | 33 +- crates/project/src/prettier_store.rs | 18 +- crates/project/src/project.rs | 12 +- crates/project/src/project_settings.rs | 15 + crates/remote_server/src/headless_project.rs | 4 +- .../remote_server/src/remote_editing_tests.rs | 4 +- crates/workspace/src/workspace.rs | 10 +- crates/zed/Cargo.toml | 2 + crates/zed/src/main.rs | 32 +- crates/zed/src/zed.rs | 2 +- 34 files changed, 596 insertions(+), 373 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e345736295613..894dd00f6d7f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7190,6 +7190,7 @@ dependencies = [ "async-std", "async-tar", "async-trait", + "async-watch", "async_zip", "futures 0.3.30", "http_client", @@ -7202,6 +7203,7 @@ dependencies = [ "tempfile", "util", "walkdir", + "which 6.0.3", "windows 0.58.0", ] @@ -14393,6 +14395,7 @@ dependencies = [ "ashpd", "assets", "assistant", + "async-watch", "audio", "auto_update", "backtrace", @@ -14466,6 +14469,7 @@ dependencies = [ "session", "settings", "settings_ui", + "shellexpand 2.1.2", "simplelog", "smol", "snippet_provider", diff --git a/assets/settings/default.json b/assets/settings/default.json index e04ab90f217cd..3e8d3c8c70dd7 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -771,6 +771,21 @@ "pyrightconfig.json" ] }, + /// By default use a recent system version of node, or install our own. + /// You can override this to use a version of node that is not in $PATH with: + /// { + /// "node": { + /// "node_path": "/path/to/node" + /// "npm_path": "/path/to/npm" (defaults to node_path/../npm) + /// } + /// } + /// or to ensure Zed always downloads and installs an isolated version of node: + /// { + /// "node": { + /// "disable_path_lookup": true + /// } + /// NOTE: changing this setting currently requires restarting Zed. + "node": {}, // The extensions that Zed should automatically install on startup. // // If you don't want any of these extensions, add this field to your settings diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 1421e4c7f7aed..6f07d76b0b26b 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -21,7 +21,7 @@ use git::GitHostingProviderRegistry; use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext}; use http_client::FakeHttpClient; use language::LanguageRegistry; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use notifications::NotificationStore; use parking_lot::Mutex; use project::{Project, WorktreeId}; @@ -278,7 +278,7 @@ impl TestServer { languages: language_registry, fs: fs.clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); @@ -408,7 +408,7 @@ impl TestServer { languages: language_registry, fs: fs.clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index cdbe65ba1dcca..a1fd7a9bb9668 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -57,7 +57,7 @@ pub fn init( new_server_id: LanguageServerId, fs: Arc, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, cx: &mut AppContext, ) { copilot_chat::init(fs, http.clone(), cx); @@ -302,7 +302,7 @@ pub struct Completion { pub struct Copilot { http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, server: CopilotServer, buffers: HashSet>, server_id: LanguageServerId, @@ -334,7 +334,7 @@ impl Copilot { fn start( new_server_id: LanguageServerId, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, cx: &mut ModelContext, ) -> Self { let mut this = Self { @@ -392,7 +392,7 @@ impl Copilot { #[cfg(any(test, feature = "test-support"))] pub fn fake(cx: &mut gpui::TestAppContext) -> (Model, lsp::FakeLanguageServer) { use lsp::FakeLanguageServer; - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; let (server, fake_server) = FakeLanguageServer::new( LanguageServerId(0), @@ -406,7 +406,7 @@ impl Copilot { cx.to_async(), ); let http = http_client::FakeHttpClient::create(|_| async { unreachable!() }); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let this = cx.new_model(|cx| Self { server_id: LanguageServerId(0), http: http.clone(), @@ -425,7 +425,7 @@ impl Copilot { async fn start_language_server( new_server_id: LanguageServerId, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, this: WeakModel, mut cx: AsyncAppContext, ) { diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index e2c8b42644a31..899d821053711 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -9,7 +9,7 @@ use git::GitHostingProviderRegistry; use gpui::{AsyncAppContext, BackgroundExecutor, Context, Model}; use http_client::{HttpClient, Method}; use language::LanguageRegistry; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use open_ai::OpenAiEmbeddingModel; use project::Project; use semantic_index::{ @@ -292,7 +292,7 @@ async fn run_evaluation( let user_store = cx .new_model(|cx| UserStore::new(client.clone(), cx)) .unwrap(); - let node_runtime = Arc::new(FakeNodeRuntime {}); + let node_runtime = NodeRuntime::unavailable(); let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 8dbd618a25784..5f9fbffb11b2e 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -177,7 +177,7 @@ actions!(zed, [ReloadExtensions]); pub fn init( fs: Arc, client: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, language_registry: Arc, theme_registry: Arc, cx: &mut AppContext, @@ -228,7 +228,7 @@ impl ExtensionStore { http_client: Arc, builder_client: Arc, telemetry: Option>, - node_runtime: Arc, + node_runtime: NodeRuntime, language_registry: Arc, theme_registry: Arc, slash_command_registry: Arc, diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 4bdafaa32c2af..126e6b2cfbdad 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -15,7 +15,7 @@ use http_client::{FakeHttpClient, Response}; use indexed_docs::IndexedDocsRegistry; use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; use release_channel::AppVersion; @@ -264,7 +264,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { let slash_command_registry = SlashCommandRegistry::new(); let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor())); let snippet_registry = Arc::new(SnippetRegistry::new()); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let store = cx.new_model(|cx| { ExtensionStore::new( @@ -490,7 +490,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let slash_command_registry = SlashCommandRegistry::new(); let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor())); let snippet_registry = Arc::new(SnippetRegistry::new()); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let mut status_updates = language_registry.language_server_binary_statuses(); diff --git a/crates/extension/src/wasm_host.rs b/crates/extension/src/wasm_host.rs index 039f2d923b0d2..b3fd13a5bada3 100644 --- a/crates/extension/src/wasm_host.rs +++ b/crates/extension/src/wasm_host.rs @@ -33,7 +33,7 @@ pub(crate) struct WasmHost { engine: Engine, release_channel: ReleaseChannel, http_client: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, pub(crate) language_registry: Arc, fs: Arc, pub(crate) work_dir: PathBuf, @@ -80,7 +80,7 @@ impl WasmHost { pub fn new( fs: Arc, http_client: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, language_registry: Arc, work_dir: PathBuf, cx: &mut AppContext, diff --git a/crates/headless/src/headless.rs b/crates/headless/src/headless.rs index a5504500da017..1405577643606 100644 --- a/crates/headless/src/headless.rs +++ b/crates/headless/src/headless.rs @@ -25,7 +25,7 @@ pub struct DevServer { } pub struct AppState { - pub node_runtime: Arc, + pub node_runtime: NodeRuntime, pub user_store: Model, pub languages: Arc, pub fs: Arc, diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index d78b2dd23c7f7..c0630151519c5 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -264,6 +264,35 @@ pub fn read_proxy_from_env() -> Option { None } +pub struct BlockedHttpClient; + +impl HttpClient for BlockedHttpClient { + fn send( + &self, + _req: Request, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + Box::pin(async { + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + "BlockedHttpClient disallowed request", + ) + .into()) + }) + } + + fn proxy(&self) -> Option<&Uri> { + None + } + + fn send_with_redirect_policy( + &self, + req: Request, + _: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send(req) + } +} + #[cfg(feature = "test-support")] type FakeHttpHandler = Box< dyn Fn(Request) -> BoxFuture<'static, Result, anyhow::Error>> diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 29a7ac1860b0c..166d846f86e8b 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -564,6 +564,7 @@ async fn try_fetch_server_binary let name = adapter.name(); log::info!("fetching latest version of language server {:?}", name.0); delegate.update_status(name.clone(), LanguageServerBinaryStatus::CheckingForUpdate); + let latest_version = adapter .fetch_latest_server_version(delegate.as_ref()) .await?; diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index cf259d69d321a..7b7e9ae77f06f 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -22,11 +22,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct CssLspAdapter { - node: Arc, + node: NodeRuntime, } impl CssLspAdapter { - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { CssLspAdapter { node } } } @@ -81,14 +81,14 @@ impl LspAdapter for CssLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn initialization_options( @@ -103,7 +103,7 @@ impl LspAdapter for CssLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 6b5f74c2634b4..44cc68387676e 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -59,13 +59,13 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct JsonLspAdapter { - node: Arc, + node: NodeRuntime, languages: Arc, workspace_config: OnceLock, } impl JsonLspAdapter { - pub fn new(node: Arc, languages: Arc) -> Self { + pub fn new(node: NodeRuntime, languages: Arc) -> Self { Self { node, languages, @@ -183,14 +183,14 @@ impl LspAdapter for JsonLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn initialization_options( @@ -226,7 +226,7 @@ impl LspAdapter for JsonLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 0a3fc71d08961..7435ddb13196d 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -30,11 +30,7 @@ mod yaml; #[exclude = "*.rs"] struct LanguageDir; -pub fn init( - languages: Arc, - node_runtime: Arc, - cx: &mut AppContext, -) { +pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mut AppContext) { languages.register_native_grammars([ ("bash", tree_sitter_bash::LANGUAGE), ("c", tree_sitter_c::LANGUAGE), diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 0dce8fb661761..75f124489c382 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -26,13 +26,13 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct PythonLspAdapter { - node: Arc, + node: NodeRuntime, } impl PythonLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { PythonLspAdapter { node } } } @@ -94,14 +94,14 @@ impl LspAdapter for PythonLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn process_completions(&self, items: &mut [lsp::CompletionItem]) { @@ -198,7 +198,7 @@ impl LspAdapter for PythonLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { let server_path = container_dir.join(SERVER_PATH); if server_path.exists() { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index e3e17a8fa72eb..62d967d6a4a26 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -28,14 +28,14 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct TailwindLspAdapter { - node: Arc, + node: NodeRuntime, } impl TailwindLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("tailwindcss-language-server"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { TailwindLspAdapter { node } } } @@ -122,14 +122,14 @@ impl LspAdapter for TailwindLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn initialization_options( @@ -198,7 +198,7 @@ impl LspAdapter for TailwindLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index b09216c970369..25a97c8014d0a 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -65,7 +65,7 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec { } pub struct TypeScriptLspAdapter { - node: Arc, + node: NodeRuntime, } impl TypeScriptLspAdapter { @@ -73,7 +73,7 @@ impl TypeScriptLspAdapter { const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("typescript-language-server"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { TypeScriptLspAdapter { node } } async fn tsdk_path(adapter: &Arc) -> &'static str { @@ -161,14 +161,14 @@ impl LspAdapter for TypeScriptLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } fn code_action_kinds(&self) -> Option> { @@ -264,7 +264,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn get_cached_ts_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let old_server_path = container_dir.join(TypeScriptLspAdapter::OLD_SERVER_PATH); @@ -293,7 +293,7 @@ async fn get_cached_ts_server_binary( } pub struct EsLintLspAdapter { - node: Arc, + node: NodeRuntime, } impl EsLintLspAdapter { @@ -310,7 +310,7 @@ impl EsLintLspAdapter { const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { EsLintLspAdapter { node } } } @@ -476,11 +476,11 @@ impl LspAdapter for EsLintLspAdapter { } self.node - .run_npm_subcommand(Some(&repo_root), "install", &[]) + .run_npm_subcommand(&repo_root, "install", &[]) .await?; self.node - .run_npm_subcommand(Some(&repo_root), "run-script", &["compile"]) + .run_npm_subcommand(&repo_root, "run-script", &["compile"]) .await?; } @@ -496,20 +496,20 @@ impl LspAdapter for EsLintLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_eslint_server_binary(container_dir, &*self.node).await + get_cached_eslint_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_eslint_server_binary(container_dir, &*self.node).await + get_cached_eslint_server_binary(container_dir, &self.node).await } } async fn get_cached_eslint_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { // This is unfortunate but we don't know what the version is to build a path directly diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 5ec31213840bb..3c1cf0fcbe151 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -20,13 +20,13 @@ fn typescript_server_binary_arguments(server_path: &Path) -> Vec { } pub struct VtslsLspAdapter { - node: Arc, + node: NodeRuntime, } impl VtslsLspAdapter { const SERVER_PATH: &'static str = "node_modules/@vtsls/language-server/bin/vtsls.js"; - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { VtslsLspAdapter { node } } async fn tsdk_path(adapter: &Arc) -> &'static str { @@ -154,14 +154,14 @@ impl LspAdapter for VtslsLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } fn code_action_kinds(&self) -> Option> { @@ -298,7 +298,7 @@ impl LspAdapter for VtslsLspAdapter { async fn get_cached_ts_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let server_path = container_dir.join(VtslsLspAdapter::SERVER_PATH); diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 583961f4b1c0e..32ca73168ab2d 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -26,12 +26,12 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct YamlLspAdapter { - node: Arc, + node: NodeRuntime, } impl YamlLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { YamlLspAdapter { node } } } @@ -117,14 +117,14 @@ impl LspAdapter for YamlLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn workspace_configuration( @@ -157,7 +157,7 @@ impl LspAdapter for YamlLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/markdown/examples/markdown.rs b/crates/markdown/examples/markdown.rs index c2f3ab8158609..0514ebcf4e1e5 100644 --- a/crates/markdown/examples/markdown.rs +++ b/crates/markdown/examples/markdown.rs @@ -2,7 +2,7 @@ use assets::Assets; use gpui::{prelude::*, rgb, App, KeyBinding, StyleRefinement, View, WindowOptions}; use language::{language_settings::AllLanguageSettings, LanguageRegistry}; use markdown::{Markdown, MarkdownStyle}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use settings::SettingsStore; use std::sync::Arc; use theme::LoadThemes; @@ -102,7 +102,7 @@ pub fn main() { }); cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); theme::init(LoadThemes::JustBase, cx); let language_registry = LanguageRegistry::new(cx.background_executor().clone()); diff --git a/crates/markdown/examples/markdown_as_child.rs b/crates/markdown/examples/markdown_as_child.rs index 829e69436a6be..3700e64364dfd 100644 --- a/crates/markdown/examples/markdown_as_child.rs +++ b/crates/markdown/examples/markdown_as_child.rs @@ -2,7 +2,7 @@ use assets::Assets; use gpui::*; use language::{language_settings::AllLanguageSettings, LanguageRegistry}; use markdown::{Markdown, MarkdownStyle}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use settings::SettingsStore; use std::sync::Arc; use theme::LoadThemes; @@ -28,7 +28,7 @@ pub fn main() { }); cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); languages::init(language_registry.clone(), node_runtime, cx); theme::init(LoadThemes::JustBase, cx); diff --git a/crates/node_runtime/Cargo.toml b/crates/node_runtime/Cargo.toml index b7aee583360cb..d852b7ebdf9aa 100644 --- a/crates/node_runtime/Cargo.toml +++ b/crates/node_runtime/Cargo.toml @@ -18,6 +18,7 @@ test-support = ["tempfile"] [dependencies] anyhow.workspace = true async-compression.workspace = true +async-watch.workspace = true async-tar.workspace = true async-trait.workspace = true async_zip.workspace = true @@ -32,6 +33,7 @@ smol.workspace = true tempfile = { workspace = true, optional = true } util.workspace = true walkdir = "2.5.0" +which.workspace = true [target.'cfg(windows)'.dependencies] async-std = { version = "1.12.0", features = ["unstable"] } diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 4aa65ab6db804..72c74ce7cf983 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -5,7 +5,7 @@ pub use archive::extract_zip; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; use futures::AsyncReadExt; -use http_client::HttpClient; +use http_client::{HttpClient, Uri}; use semver::Version; use serde::Deserialize; use smol::io::BufReader; @@ -23,60 +23,166 @@ use util::ResultExt; #[cfg(windows)] use smol::process::windows::CommandExt; -const VERSION: &str = "v22.5.1"; +#[derive(Clone, Debug, Default, Eq, PartialEq)] +pub struct NodeBinaryOptions { + pub allow_path_lookup: bool, + pub allow_binary_download: bool, + pub use_paths: Option<(PathBuf, PathBuf)>, +} -#[cfg(not(windows))] -const NODE_PATH: &str = "bin/node"; -#[cfg(windows)] -const NODE_PATH: &str = "node.exe"; +#[derive(Clone)] +pub struct NodeRuntime(Arc>); -#[cfg(not(windows))] -const NPM_PATH: &str = "bin/npm"; -#[cfg(windows)] -const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js"; - -enum ArchiveType { - TarGz, - Zip, +struct NodeRuntimeState { + http: Arc, + instance: Option>, + last_options: Option, + options: async_watch::Receiver>, } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct NpmInfo { - #[serde(default)] - dist_tags: NpmInfoDistTags, - versions: Vec, -} +impl NodeRuntime { + pub fn new( + http: Arc, + options: async_watch::Receiver>, + ) -> Self { + NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState { + http, + instance: None, + last_options: None, + options, + }))) + } -#[derive(Debug, Deserialize, Default)] -pub struct NpmInfoDistTags { - latest: Option, -} + pub fn unavailable() -> Self { + NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState { + http: Arc::new(http_client::BlockedHttpClient), + instance: None, + last_options: None, + options: async_watch::channel(Some(NodeBinaryOptions::default())).1, + }))) + } -#[async_trait::async_trait] -pub trait NodeRuntime: Send + Sync { - async fn binary_path(&self) -> Result; - async fn node_environment_path(&self) -> Result; + async fn instance(&self) -> Result> { + let mut state = self.0.lock().await; - async fn run_npm_subcommand( + while state.options.borrow().is_none() { + state.options.changed().await?; + } + let options = state.options.borrow().clone().unwrap(); + if state.last_options.as_ref() != Some(&options) { + state.instance.take(); + } + if let Some(instance) = state.instance.as_ref() { + return Ok(instance.boxed_clone()); + } + + if let Some((node, npm)) = options.use_paths.as_ref() { + let instance = SystemNodeRuntime::new(node.clone(), npm.clone()).await?; + state.instance = Some(instance.boxed_clone()); + return Ok(instance); + } + + if options.allow_path_lookup { + if let Some(instance) = SystemNodeRuntime::detect().await { + state.instance = Some(instance.boxed_clone()); + return Ok(instance); + } + } + + let instance = if options.allow_binary_download { + ManagedNodeRuntime::install_if_needed(&state.http).await? + } else { + Box::new(UnavailableNodeRuntime) + }; + + state.instance = Some(instance.boxed_clone()); + return Ok(instance); + } + + pub async fn binary_path(&self) -> Result { + self.instance().await?.binary_path() + } + + pub async fn run_npm_subcommand( &self, - directory: Option<&Path>, + directory: &Path, subcommand: &str, args: &[&str], - ) -> Result; - - async fn npm_package_latest_version(&self, name: &str) -> Result; - - async fn npm_install_packages(&self, directory: &Path, packages: &[(&str, &str)]) - -> Result<()>; + ) -> Result { + let http = self.0.lock().await.http.clone(); + self.instance() + .await? + .run_npm_subcommand(Some(directory), http.proxy(), subcommand, args) + .await + } - async fn npm_package_installed_version( + pub async fn npm_package_installed_version( &self, local_package_directory: &Path, name: &str, - ) -> Result>; + ) -> Result> { + self.instance() + .await? + .npm_package_installed_version(local_package_directory, name) + .await + } - async fn should_install_npm_package( + pub async fn npm_package_latest_version(&self, name: &str) -> Result { + let http = self.0.lock().await.http.clone(); + let output = self + .instance() + .await? + .run_npm_subcommand( + None, + http.proxy(), + "info", + &[ + name, + "--json", + "--fetch-retry-mintimeout", + "2000", + "--fetch-retry-maxtimeout", + "5000", + "--fetch-timeout", + "5000", + ], + ) + .await?; + + let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?; + info.dist_tags + .latest + .or_else(|| info.versions.pop()) + .ok_or_else(|| anyhow!("no version found for npm package {}", name)) + } + + pub async fn npm_install_packages( + &self, + directory: &Path, + packages: &[(&str, &str)], + ) -> Result<()> { + let packages: Vec<_> = packages + .iter() + .map(|(name, version)| format!("{name}@{version}")) + .collect(); + + let mut arguments: Vec<_> = packages.iter().map(|p| p.as_str()).collect(); + arguments.extend_from_slice(&[ + "--save-exact", + "--fetch-retry-mintimeout", + "2000", + "--fetch-retry-maxtimeout", + "5000", + "--fetch-timeout", + "5000", + ]); + + self.run_npm_subcommand(directory, "install", &arguments) + .await?; + Ok(()) + } + + pub async fn should_install_npm_package( &self, package_name: &str, local_executable_path: &Path, @@ -110,21 +216,78 @@ pub trait NodeRuntime: Send + Sync { } } -pub struct RealNodeRuntime { - http: Arc, - installation_lock: Mutex<()>, +enum ArchiveType { + TarGz, + Zip, } -impl RealNodeRuntime { - pub fn new(http: Arc) -> Arc { - Arc::new(RealNodeRuntime { - http, - installation_lock: Mutex::new(()), - }) +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct NpmInfo { + #[serde(default)] + dist_tags: NpmInfoDistTags, + versions: Vec, +} + +#[derive(Debug, Deserialize, Default)] +pub struct NpmInfoDistTags { + latest: Option, +} + +#[async_trait::async_trait] +trait NodeRuntimeTrait: Send + Sync { + fn boxed_clone(&self) -> Box; + fn binary_path(&self) -> Result; + + async fn run_npm_subcommand( + &self, + directory: Option<&Path>, + proxy: Option<&Uri>, + subcommand: &str, + args: &[&str], + ) -> Result; + + async fn npm_package_installed_version( + &self, + local_package_directory: &Path, + name: &str, + ) -> Result>; +} + +#[derive(Clone)] +struct ManagedNodeRuntime { + installation_path: PathBuf, +} + +impl ManagedNodeRuntime { + const VERSION: &str = "v22.5.1"; + + #[cfg(not(windows))] + const NODE_PATH: &str = "bin/node"; + #[cfg(windows)] + const NODE_PATH: &str = "node.exe"; + + #[cfg(not(windows))] + const NPM_PATH: &str = "bin/npm"; + #[cfg(windows)] + const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js"; + + async fn node_environment_path(&self) -> Result { + let node_binary = self.installation_path.join(Self::NODE_PATH); + let mut env_path = vec![node_binary + .parent() + .expect("invalid node binary path") + .to_path_buf()]; + + if let Some(existing_path) = std::env::var_os("PATH") { + let mut paths = std::env::split_paths(&existing_path).collect::>(); + env_path.append(&mut paths); + } + + std::env::join_paths(env_path).context("failed to create PATH env variable") } - async fn install_if_needed(&self) -> Result { - let _lock = self.installation_lock.lock().await; + async fn install_if_needed(http: &Arc) -> Result> { log::info!("Node runtime install_if_needed"); let os = match consts::OS { @@ -140,11 +303,12 @@ impl RealNodeRuntime { other => bail!("Running on unsupported architecture: {other}"), }; - let folder_name = format!("node-{VERSION}-{os}-{arch}"); + let version = Self::VERSION; + let folder_name = format!("node-{version}-{os}-{arch}"); let node_containing_dir = paths::support_dir().join("node"); let node_dir = node_containing_dir.join(folder_name); - let node_binary = node_dir.join(NODE_PATH); - let npm_file = node_dir.join(NPM_PATH); + let node_binary = node_dir.join(Self::NODE_PATH); + let npm_file = node_dir.join(Self::NPM_PATH); let mut command = Command::new(&node_binary); @@ -177,16 +341,16 @@ impl RealNodeRuntime { other => bail!("Running on unsupported os: {other}"), }; + let version = Self::VERSION; let file_name = format!( - "node-{VERSION}-{os}-{arch}.{extension}", + "node-{version}-{os}-{arch}.{extension}", extension = match archive_type { ArchiveType::TarGz => "tar.gz", ArchiveType::Zip => "zip", } ); - let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}"); - let mut response = self - .http + let url = format!("https://nodejs.org/dist/{version}/{file_name}"); + let mut response = http .get(&url, Default::default(), true) .await .context("error downloading Node binary tarball")?; @@ -207,43 +371,32 @@ impl RealNodeRuntime { _ = fs::write(node_dir.join("blank_user_npmrc"), []).await; _ = fs::write(node_dir.join("blank_global_npmrc"), []).await; - anyhow::Ok(node_dir) + anyhow::Ok(Box::new(ManagedNodeRuntime { + installation_path: node_dir, + })) } } #[async_trait::async_trait] -impl NodeRuntime for RealNodeRuntime { - async fn binary_path(&self) -> Result { - let installation_path = self.install_if_needed().await?; - Ok(installation_path.join(NODE_PATH)) +impl NodeRuntimeTrait for ManagedNodeRuntime { + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) } - async fn node_environment_path(&self) -> Result { - let installation_path = self.install_if_needed().await?; - let node_binary = installation_path.join(NODE_PATH); - let mut env_path = vec![node_binary - .parent() - .expect("invalid node binary path") - .to_path_buf()]; - - if let Some(existing_path) = std::env::var_os("PATH") { - let mut paths = std::env::split_paths(&existing_path).collect::>(); - env_path.append(&mut paths); - } - - Ok(std::env::join_paths(env_path).context("failed to create PATH env variable")?) + fn binary_path(&self) -> Result { + Ok(self.installation_path.join(Self::NODE_PATH)) } async fn run_npm_subcommand( &self, directory: Option<&Path>, + proxy: Option<&Uri>, subcommand: &str, args: &[&str], ) -> Result { let attempt = || async move { - let installation_path = self.install_if_needed().await?; - let node_binary = installation_path.join(NODE_PATH); - let npm_file = installation_path.join(NPM_PATH); + let node_binary = self.installation_path.join(Self::NODE_PATH); + let npm_file = self.installation_path.join(Self::NPM_PATH); let env_path = self.node_environment_path().await?; if smol::fs::metadata(&node_binary).await.is_err() { @@ -258,54 +411,17 @@ impl NodeRuntime for RealNodeRuntime { command.env_clear(); command.env("PATH", env_path); command.arg(npm_file).arg(subcommand); - command.args(["--cache".into(), installation_path.join("cache")]); + command.args(["--cache".into(), self.installation_path.join("cache")]); command.args([ "--userconfig".into(), - installation_path.join("blank_user_npmrc"), + self.installation_path.join("blank_user_npmrc"), ]); command.args([ "--globalconfig".into(), - installation_path.join("blank_global_npmrc"), + self.installation_path.join("blank_global_npmrc"), ]); command.args(args); - - if let Some(directory) = directory { - command.current_dir(directory); - command.args(["--prefix".into(), directory.to_path_buf()]); - } - - if let Some(proxy) = self.http.proxy() { - // Map proxy settings from `http://localhost:10809` to `http://127.0.0.1:10809` - // NodeRuntime without environment information can not parse `localhost` - // correctly. - // TODO: map to `[::1]` if we are using ipv6 - let proxy = proxy - .to_string() - .to_ascii_lowercase() - .replace("localhost", "127.0.0.1"); - - command.args(["--proxy", &proxy]); - } - - #[cfg(windows)] - { - // SYSTEMROOT is a critical environment variables for Windows. - if let Some(val) = std::env::var("SYSTEMROOT") - .context("Missing environment variable: SYSTEMROOT!") - .log_err() - { - command.env("SYSTEMROOT", val); - } - // Without ComSpec, the post-install will always fail. - if let Some(val) = std::env::var("ComSpec") - .context("Missing environment variable: ComSpec!") - .log_err() - { - command.env("ComSpec", val); - } - command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - + configure_npm_command(&mut command, directory, proxy); command.output().await.map_err(|e| anyhow!("{e}")) }; @@ -332,182 +448,228 @@ impl NodeRuntime for RealNodeRuntime { output.map_err(|e| anyhow!("{e}")) } - - async fn npm_package_latest_version(&self, name: &str) -> Result { - let output = self - .run_npm_subcommand( - None, - "info", - &[ - name, - "--json", - "--fetch-retry-mintimeout", - "2000", - "--fetch-retry-maxtimeout", - "5000", - "--fetch-timeout", - "5000", - ], - ) - .await?; - - let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?; - info.dist_tags - .latest - .or_else(|| info.versions.pop()) - .ok_or_else(|| anyhow!("no version found for npm package {}", name)) - } - async fn npm_package_installed_version( &self, local_package_directory: &Path, name: &str, ) -> Result> { - let mut package_json_path = local_package_directory.to_owned(); - package_json_path.extend(["node_modules", name, "package.json"]); - - let mut file = match fs::File::open(package_json_path).await { - Ok(file) => file, - Err(err) => { - if err.kind() == io::ErrorKind::NotFound { - return Ok(None); - } + read_package_installed_version(local_package_directory.join("node_modules"), name).await + } +} - Err(err)? - } - }; +#[derive(Clone)] +pub struct SystemNodeRuntime { + node: PathBuf, + npm: PathBuf, + global_node_modules: PathBuf, + scratch_dir: PathBuf, +} - #[derive(Deserialize)] - struct PackageJson { - version: String, +impl SystemNodeRuntime { + const MIN_VERSION: semver::Version = Version::new(18, 0, 0); + async fn new(node: PathBuf, npm: PathBuf) -> Result> { + let output = Command::new(&node) + .arg("--version") + .output() + .await + .with_context(|| format!("running node from {:?}", node))?; + if !output.status.success() { + anyhow::bail!( + "failed to run node --version. stdout: {}, stderr: {}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + } + let version_str = String::from_utf8_lossy(&output.stdout); + let version = semver::Version::parse(version_str.trim().trim_start_matches('v'))?; + if version < Self::MIN_VERSION { + anyhow::bail!( + "node at {} is too old. want: {}, got: {}", + node.to_string_lossy(), + Self::MIN_VERSION, + version + ) } - let mut contents = String::new(); - file.read_to_string(&mut contents).await?; - let package_json: PackageJson = serde_json::from_str(&contents)?; - Ok(Some(package_json.version)) - } - - async fn npm_install_packages( - &self, - directory: &Path, - packages: &[(&str, &str)], - ) -> Result<()> { - let packages: Vec<_> = packages - .iter() - .map(|(name, version)| format!("{name}@{version}")) - .collect(); + let scratch_dir = paths::support_dir().join("node"); + fs::create_dir(&scratch_dir).await.ok(); + fs::create_dir(scratch_dir.join("cache")).await.ok(); + fs::write(scratch_dir.join("blank_user_npmrc"), []) + .await + .ok(); + fs::write(scratch_dir.join("blank_global_npmrc"), []) + .await + .ok(); - let mut arguments: Vec<_> = packages.iter().map(|p| p.as_str()).collect(); - arguments.extend_from_slice(&[ - "--save-exact", - "--fetch-retry-mintimeout", - "2000", - "--fetch-retry-maxtimeout", - "5000", - "--fetch-timeout", - "5000", - ]); + let mut this = Self { + node, + npm, + global_node_modules: PathBuf::default(), + scratch_dir, + }; + let output = this.run_npm_subcommand(None, None, "root", &["-g"]).await?; + this.global_node_modules = + PathBuf::from(String::from_utf8_lossy(&output.stdout).to_string()); - self.run_npm_subcommand(Some(directory), "install", &arguments) - .await?; - Ok(()) + Ok(Box::new(this)) } -} - -pub struct FakeNodeRuntime; -impl FakeNodeRuntime { - pub fn new() -> Arc { - Arc::new(Self) + async fn detect() -> Option> { + let node = which::which("node").ok()?; + let npm = which::which("npm").ok()?; + Self::new(node, npm).await.log_err() } } #[async_trait::async_trait] -impl NodeRuntime for FakeNodeRuntime { - async fn binary_path(&self) -> anyhow::Result { - unreachable!() +impl NodeRuntimeTrait for SystemNodeRuntime { + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) } - async fn node_environment_path(&self) -> anyhow::Result { - unreachable!() + fn binary_path(&self) -> Result { + Ok(self.node.clone()) } async fn run_npm_subcommand( &self, - _: Option<&Path>, + directory: Option<&Path>, + proxy: Option<&Uri>, subcommand: &str, args: &[&str], ) -> anyhow::Result { - unreachable!("Should not run npm subcommand '{subcommand}' with args {args:?}") - } + let mut command = Command::new(self.node.clone()); + command + .env_clear() + .env("PATH", std::env::var_os("PATH").unwrap_or_default()) + .arg(self.npm.clone()) + .arg(subcommand) + .args(["--cache".into(), self.scratch_dir.join("cache")]) + .args([ + "--userconfig".into(), + self.scratch_dir.join("blank_user_npmrc"), + ]) + .args([ + "--globalconfig".into(), + self.scratch_dir.join("blank_global_npmrc"), + ]) + .args(args); + configure_npm_command(&mut command, directory, proxy); + let output = command.output().await?; + if !output.status.success() { + return Err(anyhow!( + "failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + )); + } - async fn npm_package_latest_version(&self, name: &str) -> anyhow::Result { - unreachable!("Should not query npm package '{name}' for latest version") + Ok(output) } async fn npm_package_installed_version( &self, - _local_package_directory: &Path, + local_package_directory: &Path, name: &str, ) -> Result> { - unreachable!("Should not query npm package '{name}' for installed version") - } - - async fn npm_install_packages( - &self, - _: &Path, - packages: &[(&str, &str)], - ) -> anyhow::Result<()> { - unreachable!("Should not install packages {packages:?}") + read_package_installed_version(local_package_directory.join("node_modules"), name).await + // todo: allow returning a globally installed version (requires callers not to hard-code the path) } } -// TODO: Remove this when headless binary can run node -pub struct DummyNodeRuntime; +async fn read_package_installed_version( + node_module_directory: PathBuf, + name: &str, +) -> Result> { + let package_json_path = node_module_directory.join(name).join("package.json"); + + let mut file = match fs::File::open(package_json_path).await { + Ok(file) => file, + Err(err) => { + if err.kind() == io::ErrorKind::NotFound { + return Ok(None); + } + + Err(err)? + } + }; -impl DummyNodeRuntime { - pub fn new() -> Arc { - Arc::new(Self) + #[derive(Deserialize)] + struct PackageJson { + version: String, } + + let mut contents = String::new(); + file.read_to_string(&mut contents).await?; + let package_json: PackageJson = serde_json::from_str(&contents)?; + Ok(Some(package_json.version)) } +pub struct UnavailableNodeRuntime; + #[async_trait::async_trait] -impl NodeRuntime for DummyNodeRuntime { - async fn binary_path(&self) -> anyhow::Result { - anyhow::bail!("Dummy Node Runtime") +impl NodeRuntimeTrait for UnavailableNodeRuntime { + fn boxed_clone(&self) -> Box { + Box::new(UnavailableNodeRuntime) } - - async fn node_environment_path(&self) -> anyhow::Result { - anyhow::bail!("Dummy node runtime") + fn binary_path(&self) -> Result { + bail!("binary_path: no node runtime available") } async fn run_npm_subcommand( &self, _: Option<&Path>, - _subcommand: &str, - _args: &[&str], + _: Option<&Uri>, + _: &str, + _: &[&str], ) -> anyhow::Result { - anyhow::bail!("Dummy node runtime") - } - - async fn npm_package_latest_version(&self, _name: &str) -> anyhow::Result { - anyhow::bail!("Dummy node runtime") + bail!("run_npm_subcommand: no node runtime available") } async fn npm_package_installed_version( &self, _local_package_directory: &Path, - _name: &str, + _: &str, ) -> Result> { - anyhow::bail!("Dummy node runtime") + bail!("npm_package_installed_version: no node runtime available") } +} - async fn npm_install_packages( - &self, - _: &Path, - _packages: &[(&str, &str)], - ) -> anyhow::Result<()> { - anyhow::bail!("Dummy node runtime") +fn configure_npm_command(command: &mut Command, directory: Option<&Path>, proxy: Option<&Uri>) { + if let Some(directory) = directory { + command.current_dir(directory); + command.args(["--prefix".into(), directory.to_path_buf()]); + } + + if let Some(proxy) = proxy { + // Map proxy settings from `http://localhost:10809` to `http://127.0.0.1:10809` + // NodeRuntime without environment information can not parse `localhost` + // correctly. + // TODO: map to `[::1]` if we are using ipv6 + let proxy = proxy + .to_string() + .to_ascii_lowercase() + .replace("localhost", "127.0.0.1"); + + command.args(["--proxy", &proxy]); + } + + #[cfg(windows)] + { + // SYSTEMROOT is a critical environment variables for Windows. + if let Some(val) = std::env::var("SYSTEMROOT") + .context("Missing environment variable: SYSTEMROOT!") + .log_err() + { + command.env("SYSTEMROOT", val); + } + // Without ComSpec, the post-install will always fail. + if let Some(val) = std::env::var("ComSpec") + .context("Missing environment variable: ComSpec!") + .log_err() + { + command.env("ComSpec", val); + } + command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); } } diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 59ed915453996..012beb3fd7ab2 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -138,7 +138,7 @@ impl Prettier { pub async fn start( _: LanguageServerId, prettier_dir: PathBuf, - _: Arc, + _: NodeRuntime, _: AsyncAppContext, ) -> anyhow::Result { Ok(Self::Test(TestPrettier { @@ -151,7 +151,7 @@ impl Prettier { pub async fn start( server_id: LanguageServerId, prettier_dir: PathBuf, - node: Arc, + node: NodeRuntime, cx: AsyncAppContext, ) -> anyhow::Result { use lsp::LanguageServerBinary; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6673f9da1ddd7..6c71d4baebf56 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -17,7 +17,7 @@ use async_trait::async_trait; use client::{proto, TypedEnvelope}; use collections::{btree_map, BTreeMap, HashMap, HashSet}; use futures::{ - future::{join_all, BoxFuture, Shared}, + future::{join_all, Shared}, select, stream::FuturesUnordered, AsyncWriteExt, Future, FutureExt, StreamExt, @@ -27,7 +27,7 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; +use http_client::{BlockedHttpClient, HttpClient}; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, FormatOnSave, Formatter, @@ -7979,35 +7979,6 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { } } -struct BlockedHttpClient; - -impl HttpClient for BlockedHttpClient { - fn send( - &self, - _req: Request, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - Box::pin(async { - Err(std::io::Error::new( - std::io::ErrorKind::PermissionDenied, - "ssh host blocked http connection", - ) - .into()) - }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } - - fn send_with_redirect_policy( - &self, - req: Request, - _: bool, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.send(req) - } -} - struct SshLspAdapterDelegate { lsp_store: WeakModel, worktree: worktree::Snapshot, diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 82bd8464b2e53..65e2aa2e7673e 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -30,7 +30,7 @@ use crate::{ }; pub struct PrettierStore { - node: Arc, + node: NodeRuntime, fs: Arc, languages: Arc, worktree_store: Model, @@ -52,7 +52,7 @@ impl EventEmitter for PrettierStore {} impl PrettierStore { pub fn new( - node: Arc, + node: NodeRuntime, fs: Arc, languages: Arc, worktree_store: Model, @@ -212,7 +212,7 @@ impl PrettierStore { } fn start_prettier( - node: Arc, + node: NodeRuntime, prettier_dir: PathBuf, worktree_id: Option, cx: &mut ModelContext, @@ -241,7 +241,7 @@ impl PrettierStore { } fn start_default_prettier( - node: Arc, + node: NodeRuntime, worktree_id: Option, cx: &mut ModelContext, ) -> Task> { @@ -749,7 +749,7 @@ impl DefaultPrettier { pub fn prettier_task( &mut self, - node: &Arc, + node: &NodeRuntime, worktree_id: Option, cx: &mut ModelContext, ) -> Option>> { @@ -767,7 +767,7 @@ impl DefaultPrettier { impl PrettierInstance { pub fn prettier_task( &mut self, - node: &Arc, + node: &NodeRuntime, prettier_dir: Option<&Path>, worktree_id: Option, cx: &mut ModelContext, @@ -786,7 +786,7 @@ impl PrettierInstance { None => match prettier_dir { Some(prettier_dir) => { let new_task = PrettierStore::start_prettier( - Arc::clone(node), + node.clone(), prettier_dir.to_path_buf(), worktree_id, cx, @@ -797,7 +797,7 @@ impl PrettierInstance { } None => { self.attempt += 1; - let node = Arc::clone(node); + let node = node.clone(); cx.spawn(|prettier_store, mut cx| async move { prettier_store .update(&mut cx, |_, cx| { @@ -818,7 +818,7 @@ impl PrettierInstance { async fn install_prettier_packages( fs: &dyn Fs, plugins_to_install: HashSet>, - node: Arc, + node: NodeRuntime, ) -> anyhow::Result<()> { let packages_to_versions = future::try_join_all( plugins_to_install diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dc9337674b7eb..0015af380292e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -153,7 +153,7 @@ pub struct Project { git_diff_debouncer: DebouncedDelay, remotely_created_models: Arc>, terminals: Terminals, - node: Option>, + node: Option, tasks: Model, hosted_project_id: Option, dev_server_project_id: Option, @@ -579,7 +579,7 @@ impl Project { pub fn local( client: Arc, - node: Arc, + node: NodeRuntime, user_store: Model, languages: Arc, fs: Arc, @@ -675,7 +675,7 @@ impl Project { pub fn ssh( ssh: Arc, client: Arc, - node: Arc, + node: NodeRuntime, user_store: Model, languages: Arc, fs: Arc, @@ -1064,7 +1064,7 @@ impl Project { .update(|cx| { Project::local( client, - node_runtime::FakeNodeRuntime::new(), + node_runtime::NodeRuntime::unavailable(), user_store, Arc::new(languages), fs, @@ -1104,7 +1104,7 @@ impl Project { let project = cx.update(|cx| { Project::local( client, - node_runtime::FakeNodeRuntime::new(), + node_runtime::NodeRuntime::unavailable(), user_store, Arc::new(languages), fs, @@ -1157,7 +1157,7 @@ impl Project { self.user_store.clone() } - pub fn node_runtime(&self) -> Option<&Arc> { + pub fn node_runtime(&self) -> Option<&NodeRuntime> { self.node.as_ref() } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 904efe0a6b01f..d6f5600a551ef 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -34,6 +34,10 @@ pub struct ProjectSettings { #[serde(default)] pub git: GitSettings, + /// Configuration for Node-related features + #[serde(default)] + pub node: NodeBinarySettings, + /// Configuration for how direnv configuration should be loaded #[serde(default)] pub load_direnv: DirenvSettings, @@ -43,6 +47,17 @@ pub struct ProjectSettings { pub session: SessionSettings, } +#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct NodeBinarySettings { + /// The path to the node binary + pub path: Option, + /// The path to the npm binary Zed should use (defaults to .path/../npm) + pub npm_path: Option, + /// If disabled, zed will download its own copy of node. + #[serde(default)] + pub disable_path_lookup: Option, +} + #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum DirenvSettings { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 0d644a64a6aa7..87c9583077c4d 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -2,7 +2,7 @@ use anyhow::{anyhow, Result}; use fs::Fs; use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext}; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; -use node_runtime::DummyNodeRuntime; +use node_runtime::NodeRuntime; use project::{ buffer_store::{BufferStore, BufferStoreEvent}, project_settings::SettingsObserver, @@ -57,7 +57,7 @@ impl HeadlessProject { }); let prettier_store = cx.new_model(|cx| { PrettierStore::new( - DummyNodeRuntime::new(), + NodeRuntime::unavailable(), fs.clone(), languages.clone(), worktree_store.clone(), diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index b5ab1c40070a0..ba59d310c81b8 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -9,7 +9,7 @@ use language::{ Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LanguageServerName, }; use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use project::{ search::{SearchQuery, SearchResult}, Project, @@ -502,7 +502,7 @@ fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model, pub fs: Arc, pub build_window_options: fn(Option, &mut AppContext) -> WindowOptions, - pub node_runtime: Arc, + pub node_runtime: NodeRuntime, pub session: Model, } @@ -590,7 +590,7 @@ impl AppState { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut AppContext) -> Arc { - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; use session::Session; use settings::SettingsStore; use ui::Context as _; @@ -619,7 +619,7 @@ impl AppState { languages, user_store, workspace_store, - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), build_window_options: |_, _| Default::default(), session, }) @@ -4418,7 +4418,7 @@ impl Workspace { #[cfg(any(test, feature = "test-support"))] pub fn test_new(project: Model, cx: &mut ViewContext) -> Self { - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; use session::Session; let client = project.read(cx).client(); @@ -4434,7 +4434,7 @@ impl Workspace { user_store, fs: project.read(cx).fs().clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); let workspace = Self::new(Default::default(), project, app_state, cx); diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index ad2e7cd48c67d..65724480f6233 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -19,6 +19,7 @@ activity_indicator.workspace = true anyhow.workspace = true assets.workspace = true assistant.workspace = true +async-watch.workspace = true audio.workspace = true auto_update.workspace = true backtrace = "0.3" @@ -92,6 +93,7 @@ serde_json.workspace = true session.workspace = true settings.workspace = true settings_ui.workspace = true +shellexpand.workspace = true simplelog.workspace = true smol.workspace = true snippet_provider.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d3eb97c9aa506..309931f616352 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -29,8 +29,9 @@ use language::LanguageRegistry; use log::LevelFilter; use assets::Assets; -use node_runtime::RealNodeRuntime; +use node_runtime::{NodeBinaryOptions, NodeRuntime}; use parking_lot::Mutex; +use project::project_settings::ProjectSettings; use recent_projects::open_ssh_project; use release_channel::{AppCommitSha, AppVersion}; use session::{AppSession, Session}; @@ -43,7 +44,7 @@ use std::{ env, fs::OpenOptions, io::{IsTerminal, Write}, - path::Path, + path::{Path, PathBuf}, process, sync::Arc, }; @@ -477,7 +478,32 @@ fn main() { let mut languages = LanguageRegistry::new(cx.background_executor().clone()); languages.set_language_server_download_dir(paths::languages_dir().clone()); let languages = Arc::new(languages); - let node_runtime = RealNodeRuntime::new(client.http_client()); + let (tx, rx) = async_watch::channel(None); + cx.observe_global::(move |cx| { + let settings = &ProjectSettings::get_global(cx).node; + let options = NodeBinaryOptions { + allow_path_lookup: !settings.disable_path_lookup.unwrap_or_default(), + // TODO: Expose this setting + allow_binary_download: true, + use_paths: settings.path.as_ref().map(|node_path| { + let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref()); + let npm_path = settings + .npm_path + .as_ref() + .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref())); + ( + node_path.clone(), + npm_path.unwrap_or_else(|| { + let base_path = PathBuf::new(); + node_path.parent().unwrap_or(&base_path).join("npm") + }), + ) + }), + }; + tx.send(Some(options)).log_err(); + }) + .detach(); + let node_runtime = NodeRuntime::new(client.http_client(), rx); language::init(cx); languages::init(languages.clone(), node_runtime.clone(), cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index b0e023f42e0a0..8f4f1af24331c 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3365,7 +3365,7 @@ mod tests { cx.set_global(settings); let languages = LanguageRegistry::test(cx.executor()); let languages = Arc::new(languages); - let node_runtime = node_runtime::FakeNodeRuntime::new(); + let node_runtime = node_runtime::NodeRuntime::unavailable(); cx.update(|cx| { languages::init(languages.clone(), node_runtime, cx); }); From d989183f94725f4b2f42c6e7db79e37e0fdbddd5 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 23 Sep 2024 16:21:24 -0600 Subject: [PATCH 024/228] Remove `Debug` constraint on `SumTree` (and its related traits/structs) (#18248) Release Notes: - N/A Co-authored-by: Nathan --- crates/editor/src/selections_collection.rs | 4 +- crates/sum_tree/src/cursor.rs | 6 +-- crates/sum_tree/src/sum_tree.rs | 53 ++++++++++++++++++++-- crates/sum_tree/src/tree_map.rs | 34 +++++++------- 4 files changed, 69 insertions(+), 28 deletions(-) diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 35df9c1b53f72..c85e60fdaa92e 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -109,7 +109,7 @@ impl SelectionsCollection { pub fn all<'a, D>(&self, cx: &AppContext) -> Vec> where - D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, + D: 'a + TextDimension + Ord + Sub, { let disjoint_anchors = &self.disjoint; let mut disjoint = @@ -850,7 +850,7 @@ pub(crate) fn resolve_multiple<'a, D, I>( snapshot: &MultiBufferSnapshot, ) -> impl 'a + Iterator> where - D: TextDimension + Ord + Sub + std::fmt::Debug, + D: TextDimension + Ord + Sub, I: 'a + IntoIterator>, { let (to_summarize, selections) = selections.into_iter().tee(); diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 6da43a8de5ce3..773e7db88bad3 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -431,11 +431,9 @@ where aggregate: &mut dyn SeekAggregate<'a, T>, cx: &::Context, ) -> bool { - debug_assert!( + assert!( target.cmp(&self.position, cx) >= Ordering::Equal, - "cannot seek backward from {:?} to {:?}", - self.position, - target + "cannot seek backward", ); if !self.did_seek { diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index ca351d67cea76..965413d3190aa 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -34,7 +34,7 @@ pub trait KeyedItem: Item { /// /// Each Summary type can have multiple [`Dimensions`] that it measures, /// which can be used to navigate the tree -pub trait Summary: Clone + fmt::Debug { +pub trait Summary: Clone { type Context; fn zero(cx: &Self::Context) -> Self; @@ -49,7 +49,7 @@ pub trait Summary: Clone + fmt::Debug { /// # Example: /// Zed's rope has a `TextSummary` type that summarizes lines, characters, and bytes. /// Each of these are different dimensions we may want to seek to -pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug { +pub trait Dimension<'a, S: Summary>: Clone { fn zero(cx: &S::Context) -> Self; fn add_summary(&mut self, summary: &'a S, cx: &S::Context); @@ -71,7 +71,7 @@ impl<'a, T: Summary> Dimension<'a, T> for T { } } -pub trait SeekTarget<'a, S: Summary, D: Dimension<'a, S>>: fmt::Debug { +pub trait SeekTarget<'a, S: Summary, D: Dimension<'a, S>> { fn cmp(&self, cursor_location: &D, cx: &S::Context) -> Ordering; } @@ -173,9 +173,19 @@ impl Bias { /// The maximum number of items per node is `TREE_BASE * 2`. /// /// Any [`Dimension`] supported by the [`Summary`] type can be used to seek to a specific location in the tree. -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct SumTree(Arc>); +impl fmt::Debug for SumTree +where + T: fmt::Debug + Item, + T::Summary: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("SumTree").field(&self.0).finish() + } +} + impl SumTree { pub fn new(cx: &::Context) -> Self { SumTree(Arc::new(Node::Leaf { @@ -763,7 +773,7 @@ where } } -#[derive(Clone, Debug)] +#[derive(Clone)] pub enum Node { Internal { height: u8, @@ -778,6 +788,39 @@ pub enum Node { }, } +impl fmt::Debug for Node +where + T: Item + fmt::Debug, + T::Summary: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Node::Internal { + height, + summary, + child_summaries, + child_trees, + } => f + .debug_struct("Internal") + .field("height", height) + .field("summary", summary) + .field("child_summaries", child_summaries) + .field("child_trees", child_trees) + .finish(), + Node::Leaf { + summary, + items, + item_summaries, + } => f + .debug_struct("Leaf") + .field("summary", summary) + .field("items", items) + .field("item_summaries", item_summaries) + .finish(), + } + } +} + impl Node { fn is_leaf(&self) -> bool { matches!(self, Node::Leaf { .. }) diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 72465b1a99cab..b7eadb566d3ed 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -5,8 +5,8 @@ use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary #[derive(Clone, PartialEq, Eq)] pub struct TreeMap(SumTree>) where - K: Clone + Debug + Ord, - V: Clone + Debug; + K: Clone + Ord, + V: Clone; #[derive(Clone, Debug, PartialEq, Eq)] pub struct MapEntry { @@ -35,9 +35,9 @@ impl<'a, K> Default for MapKeyRef<'a, K> { #[derive(Clone)] pub struct TreeSet(TreeMap) where - K: Clone + Debug + Ord; + K: Clone + Ord; -impl TreeMap { +impl TreeMap { pub fn from_ordered_entries(entries: impl IntoIterator) -> Self { let tree = SumTree::from_iter( entries @@ -172,7 +172,7 @@ impl TreeMap { } } -impl Debug for TreeMap +impl Debug for TreeMap where K: Clone + Debug + Ord, V: Clone + Debug, @@ -185,7 +185,7 @@ where #[derive(Debug)] struct MapSeekTargetAdaptor<'a, T>(&'a T); -impl<'a, K: Debug + Clone + Ord, T: MapSeekTarget> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> +impl<'a, K: Clone + Ord, T: MapSeekTarget> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapSeekTargetAdaptor<'_, T> { fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { @@ -197,11 +197,11 @@ impl<'a, K: Debug + Clone + Ord, T: MapSeekTarget> SeekTarget<'a, MapKey, } } -pub trait MapSeekTarget: Debug { +pub trait MapSeekTarget { fn cmp_cursor(&self, cursor_location: &K) -> Ordering; } -impl MapSeekTarget for K { +impl MapSeekTarget for K { fn cmp_cursor(&self, cursor_location: &K) -> Ordering { self.cmp(cursor_location) } @@ -209,8 +209,8 @@ impl MapSeekTarget for K { impl Default for TreeMap where - K: Clone + Debug + Ord, - V: Clone + Debug, + K: Clone + Ord, + V: Clone, { fn default() -> Self { Self(Default::default()) @@ -219,7 +219,7 @@ where impl Item for MapEntry where - K: Clone + Debug + Ord, + K: Clone + Ord, V: Clone, { type Summary = MapKey; @@ -231,7 +231,7 @@ where impl KeyedItem for MapEntry where - K: Clone + Debug + Ord, + K: Clone + Ord, V: Clone, { type Key = MapKey; @@ -243,7 +243,7 @@ where impl Summary for MapKey where - K: Clone + Debug, + K: Clone, { type Context = (); @@ -258,7 +258,7 @@ where impl<'a, K> Dimension<'a, MapKey> for MapKeyRef<'a, K> where - K: Clone + Debug + Ord, + K: Clone + Ord, { fn zero(_cx: &()) -> Self { Default::default() @@ -271,7 +271,7 @@ where impl<'a, K> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapKeyRef<'_, K> where - K: Clone + Debug + Ord, + K: Clone + Ord, { fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { Ord::cmp(&self.0, &cursor_location.0) @@ -280,7 +280,7 @@ where impl Default for TreeSet where - K: Clone + Debug + Ord, + K: Clone + Ord, { fn default() -> Self { Self(Default::default()) @@ -289,7 +289,7 @@ where impl TreeSet where - K: Clone + Debug + Ord, + K: Clone + Ord, { pub fn from_ordered_entries(entries: impl IntoIterator) -> Self { Self(TreeMap::from_ordered_entries( From 20c06545b6c9d51e6329f3194bc89123b7f7f9f4 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 23 Sep 2024 15:47:25 -0700 Subject: [PATCH 025/228] SSH Remoting: Add the ability to resolve file paths on the remote host (#18250) Release Notes: - N/A --- crates/project/src/project.rs | 67 +++++++++++-------- crates/proto/proto/zed.proto | 15 ++++- crates/proto/src/proto.rs | 8 ++- crates/remote_server/src/headless_project.rs | 17 +++++ .../remote_server/src/remote_editing_tests.rs | 45 ++++++++++++- 5 files changed, 119 insertions(+), 33 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 0015af380292e..199b5a8f5c575 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3037,15 +3037,11 @@ impl Project { buffer: &Model, cx: &mut ModelContext, ) -> Task> { - // TODO: ssh based remoting. - if self.ssh_session.is_some() { - return Task::ready(None); - } - - if self.is_local_or_ssh() { - let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); + let path_buf = PathBuf::from(path); + if path_buf.is_absolute() || path.starts_with("~") { + if self.is_local() { + let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); - if expanded.is_absolute() { let fs = self.fs.clone(); cx.background_executor().spawn(async move { let path = expanded.as_path(); @@ -3053,16 +3049,24 @@ impl Project { exists.then(|| ResolvedPath::AbsPath(expanded)) }) + } else if let Some(ssh_session) = self.ssh_session.as_ref() { + let request = ssh_session.request(proto::CheckFileExists { + project_id: SSH_PROJECT_ID, + path: path.to_string(), + }); + cx.background_executor().spawn(async move { + let response = request.await.log_err()?; + if response.exists { + Some(ResolvedPath::AbsPath(PathBuf::from(response.path))) + } else { + None + } + }) } else { - self.resolve_path_in_worktrees(expanded, buffer, cx) - } - } else { - let path = PathBuf::from(path); - if path.is_absolute() || path.starts_with("~") { return Task::ready(None); } - - self.resolve_path_in_worktrees(path, buffer, cx) + } else { + self.resolve_path_in_worktrees(path_buf, buffer, cx) } } @@ -4016,17 +4020,7 @@ impl Project { } pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec { - self.worktrees(cx) - .map(|worktree| { - let worktree = worktree.read(cx); - proto::WorktreeMetadata { - id: worktree.id().to_proto(), - root_name: worktree.root_name().into(), - visible: worktree.is_visible(), - abs_path: worktree.abs_path().to_string_lossy().into(), - } - }) - .collect() + self.worktree_store.read(cx).worktree_metadata_protos(cx) } fn set_worktrees_from_proto( @@ -4035,10 +4029,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { cx.notify(); - let result = self.worktree_store.update(cx, |worktree_store, cx| { + self.worktree_store.update(cx, |worktree_store, cx| { worktree_store.set_worktrees_from_proto(worktrees, self.replica_id(), cx) - }); - result + }) } fn set_collaborators_from_proto( @@ -4547,6 +4540,22 @@ pub enum ResolvedPath { AbsPath(PathBuf), } +impl ResolvedPath { + pub fn abs_path(&self) -> Option<&Path> { + match self { + Self::AbsPath(path) => Some(path.as_path()), + _ => None, + } + } + + pub fn project_path(&self) -> Option<&ProjectPath> { + match self { + Self::ProjectPath(path) => Some(&path), + _ => None, + } + } +} + impl Item for Buffer { fn try_open( project: &Model, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a18bbe8ecf514..475ed139edfb8 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -293,7 +293,10 @@ message Envelope { TryExec try_exec = 252; ReadTextFile read_text_file = 253; - ReadTextFileResponse read_text_file_response = 254; // current max + ReadTextFileResponse read_text_file_response = 254; + + CheckFileExists check_file_exists = 255; + CheckFileExistsResponse check_file_exists_response = 256; // current max } reserved 158 to 161; @@ -2574,3 +2577,13 @@ message TryExec { message TryExecResponse { string text = 1; } + +message CheckFileExists { + uint64 project_id = 1; + string path = 2; +} + +message CheckFileExistsResponse { + bool exists = 1; + string path = 2; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index b5a00d16704c4..4146a47409ad7 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -372,7 +372,9 @@ messages!( (ShellEnvResponse, Foreground), (TryExec, Foreground), (ReadTextFile, Foreground), - (ReadTextFileResponse, Foreground) + (ReadTextFileResponse, Foreground), + (CheckFileExists, Background), + (CheckFileExistsResponse, Background) ); request_messages!( @@ -501,6 +503,7 @@ request_messages!( (ShellEnv, ShellEnvResponse), (ReadTextFile, ReadTextFileResponse), (TryExec, Ack), + (CheckFileExists, CheckFileExistsResponse) ); entity_messages!( @@ -578,7 +581,8 @@ entity_messages!( WhichCommand, ShellEnv, TryExec, - ReadTextFile + ReadTextFile, + CheckFileExists, ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 87c9583077c4d..043f7e95ee026 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -108,6 +108,7 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); + client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists); client.add_model_request_handler(Self::handle_add_worktree); client.add_model_request_handler(Self::handle_open_buffer_by_path); @@ -298,4 +299,20 @@ impl HeadlessProject { } Ok(proto::ListRemoteDirectoryResponse { entries }) } + + pub async fn handle_check_file_exists( + this: Model, + envelope: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let fs = cx.read_model(&this, |this, _| this.fs.clone())?; + let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); + + let exists = fs.is_file(&PathBuf::from(expanded.clone())).await; + + Ok(proto::CheckFileExistsResponse { + exists, + path: expanded, + }) + } } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index ba59d310c81b8..18eb12b445b97 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -12,7 +12,7 @@ use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind}; use node_runtime::NodeRuntime; use project::{ search::{SearchQuery, SearchResult}, - Project, + Project, ProjectPath, }; use remote::SshSession; use serde_json::json; @@ -440,6 +440,49 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext }) } +#[gpui::test] +async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, _headless, _fs) = init_test(cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + + let path = project + .update(cx, |project, cx| { + project.resolve_existing_file_path("/code/project1/README.md", &buffer, cx) + }) + .await + .unwrap(); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/README.md" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_existing_file_path("../README.md", &buffer, cx) + }) + .await + .unwrap(); + + assert_eq!( + path.project_path().unwrap().clone(), + ProjectPath::from((worktree_id, "README.md")) + ); +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); From 6b56530a4ab0b45d072ce5fe2c19e10e8cd3f58b Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 23 Sep 2024 23:53:28 +0000 Subject: [PATCH 026/228] lua: Bump to v0.1.0 (#18246) This PR bumps the Lua extension to v0.1.0 - https://github.com/zed-industries/zed/pull/18199 - https://github.com/zed-industries/zed/pull/16955 --- Cargo.lock | 2 +- extensions/lua/Cargo.toml | 2 +- extensions/lua/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 894dd00f6d7f7..09a68973340a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14621,7 +14621,7 @@ dependencies = [ [[package]] name = "zed_lua" -version = "0.0.3" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/lua/Cargo.toml b/extensions/lua/Cargo.toml index ace7f4700caeb..f577ce18712c4 100644 --- a/extensions/lua/Cargo.toml +++ b/extensions/lua/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_lua" -version = "0.0.3" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/lua/extension.toml b/extensions/lua/extension.toml index cd00bbc7c1ad7..82026f48ba90d 100644 --- a/extensions/lua/extension.toml +++ b/extensions/lua/extension.toml @@ -1,7 +1,7 @@ id = "lua" name = "Lua" description = "Lua support." -version = "0.0.3" +version = "0.1.0" schema_version = 1 authors = ["Max Brunsfeld "] repository = "https://github.com/zed-industries/zed" From dbc325ea12b7c06183149e472008f67a2ce0ce5f Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 24 Sep 2024 09:52:20 +0200 Subject: [PATCH 027/228] vtsls: Move all default configuration to workspace_configuration (#18259) This fixes https://github.com/zed-industries/zed/issues/18014 by fixing the regression that was introduced in https://github.com/zed-industries/zed/pull/17757. In short: after digging into the `vtsls` code, it looks like it essentially doesn't need any `initialization_options`, it's all workspace configuration, since it tries to use the built-in settings from VS Code. I tested the completions, the inlay hints, the max memory - all of it now works after moving to `workspace_configuration`. Closes #18014. Release Notes: - Fixed `vtsls` being initialized the wrong way, which would mean the wrong options were used to enable completions or inlay hints. --- crates/languages/src/vtsls.rs | 43 ++++++--------- docs/src/languages/typescript.md | 93 +++++++++++++++++++++++++++----- 2 files changed, 96 insertions(+), 40 deletions(-) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 3c1cf0fcbe151..de6d575a8ee9f 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -6,14 +6,14 @@ use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; -use serde_json::{json, Value}; +use serde_json::Value; use std::{ any::Any, ffi::OsString, path::{Path, PathBuf}, sync::Arc, }; -use util::{maybe, ResultExt}; +use util::{maybe, merge_json_value_into, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -212,11 +212,12 @@ impl LspAdapter for VtslsLspAdapter { }) } - async fn initialization_options( + async fn workspace_configuration( self: Arc, - adapter: &Arc, - ) -> Result> { - let tsdk_path = Self::tsdk_path(adapter).await; + delegate: &Arc, + cx: &mut AsyncAppContext, + ) -> Result { + let tsdk_path = Self::tsdk_path(delegate).await; let config = serde_json::json!({ "tsdk": tsdk_path, "suggest": { @@ -243,10 +244,13 @@ impl LspAdapter for VtslsLspAdapter { "enumMemberValues": { "enabled": true } - } + }, + "tsserver": { + "maxTsServerMemory": 8092 + }, }); - Ok(Some(json!({ + let mut default_workspace_configuration = serde_json::json!({ "typescript": config, "javascript": config, "vtsls": { @@ -258,33 +262,18 @@ impl LspAdapter for VtslsLspAdapter { }, "autoUseWorkspaceTsdk": true } - }))) - } + }); - async fn workspace_configuration( - self: Arc, - delegate: &Arc, - cx: &mut AsyncAppContext, - ) -> Result { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; - if let Some(options) = override_options { - return Ok(options); + if let Some(override_options) = override_options { + merge_json_value_into(override_options, &mut default_workspace_configuration) } - let config = serde_json::json!({ - "tsserver": { - "maxTsServerMemory": 8092 - }, - }); - - Ok(serde_json::json!({ - "typescript": config, - "javascript": config - })) + Ok(default_workspace_configuration) } fn language_ids(&self) -> HashMap { diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index 080d41efb33c2..fa9827cb06f89 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -68,21 +68,25 @@ Prettier will also be used for TypeScript files by default. To disable this: Zed sets the following initialization options to make the language server send back inlay hints (that is, when Zed has inlay hints enabled in the settings). -You can override these settings in your configuration file: +You can override these settings in your Zed settings file. + +When using `typescript-language-server`: ```json -"lsp": { - "$LANGUAGE_SERVER_NAME": { - "initialization_options": { - "preferences": { - "includeInlayParameterNameHints": "all", - "includeInlayParameterNameHintsWhenArgumentMatchesName": true, - "includeInlayFunctionParameterTypeHints": true, - "includeInlayVariableTypeHints": true, - "includeInlayVariableTypeHintsWhenTypeMatchesName": true, - "includeInlayPropertyDeclarationTypeHints": true, - "includeInlayFunctionLikeReturnTypeHints": true, - "includeInlayEnumMemberValueHints": true, +{ + "lsp": { + "typescript-language-server": { + "initialization_options": { + "preferences": { + "includeInlayParameterNameHints": "all", + "includeInlayParameterNameHintsWhenArgumentMatchesName": true, + "includeInlayFunctionParameterTypeHints": true, + "includeInlayVariableTypeHints": true, + "includeInlayVariableTypeHintsWhenTypeMatchesName": true, + "includeInlayPropertyDeclarationTypeHints": true, + "includeInlayFunctionLikeReturnTypeHints": true, + "includeInlayEnumMemberValueHints": true + } } } } @@ -91,6 +95,69 @@ You can override these settings in your configuration file: See [typescript-language-server inlayhints documentation](https://github.com/typescript-language-server/typescript-language-server?tab=readme-ov-file#inlay-hints-textdocumentinlayhint) for more information. +When using `vtsls`: + +```json +{ + "lsp": { + "vtsls": { + "settings": { + // For JavaScript: + "javascript": { + "inlayHints": { + "parameterNames": { + "enabled": "all", + "suppressWhenArgumentMatchesName": false + }, + "parameterTypes": { + "enabled": true + }, + "variableTypes": { + "enabled": true, + "suppressWhenTypeMatchesName": true + }, + "propertyDeclarationTypes": { + "enabled": true + }, + "functionLikeReturnTypes": { + "enabled": true + }, + "enumMemberValues": { + "enabled": true + } + } + }, + // For TypeScript: + "typescript": { + "inlayHints": { + "parameterNames": { + "enabled": "all", + "suppressWhenArgumentMatchesName": false + }, + "parameterTypes": { + "enabled": true + }, + "variableTypes": { + "enabled": true, + "suppressWhenTypeMatchesName": true + }, + "propertyDeclarationTypes": { + "enabled": true + }, + "functionLikeReturnTypes": { + "enabled": true + }, + "enumMemberValues": { + "enabled": true + } + } + } + } + } + } +} +``` + ## See also - [Zed Yarn documentation](./yarn.md) for a walkthrough of configuring your project to use Yarn. From 399e094f021561a51e3e2ff76993bfb0cef0a5c2 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 24 Sep 2024 15:36:05 +0300 Subject: [PATCH 028/228] Properly use default search options in the buffer search bar (#18271) Only replace current search options if the search was dismissed and the new options are different from the default ones. Follow-up of https://github.com/zed-industries/zed/pull/17179 Closes https://github.com/zed-industries/zed/issues/18166 Release Notes: - Fixed buffer search options toggling unexpectedly on redeploys ([#18166](https://github.com/zed-industries/zed/issues/18166)) --- crates/search/src/buffer_search.rs | 132 +++++++++++++++++++++++++++-- 1 file changed, 126 insertions(+), 6 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 3a7cccfbb916c..1c37bfd481dcb 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -87,6 +87,7 @@ pub struct BufferSearchBar { pending_search: Option>, search_options: SearchOptions, default_options: SearchOptions, + configured_options: SearchOptions, query_contains_error: bool, dismissed: bool, search_history: SearchHistory, @@ -517,6 +518,7 @@ impl BufferSearchBar { active_match_index: None, searchable_items_with_matches: Default::default(), default_options: search_options, + configured_options: search_options, search_options, pending_search: None, query_contains_error: false, @@ -605,10 +607,11 @@ impl BufferSearchBar { return false; }; - self.default_options = SearchOptions::from_settings(&EditorSettings::get_global(cx).search); - - if self.default_options != self.search_options { - self.search_options = self.default_options; + self.configured_options = + SearchOptions::from_settings(&EditorSettings::get_global(cx).search); + if self.dismissed && self.configured_options != self.default_options { + self.search_options = self.configured_options; + self.default_options = self.configured_options; } self.dismissed = false; @@ -627,6 +630,7 @@ impl BufferSearchBar { .map(SearchableItemHandle::supported_options) .unwrap_or_default() } + pub fn search_suggested(&mut self, cx: &mut ViewContext) { let search = self .query_suggestion(cx) @@ -1195,10 +1199,11 @@ mod tests { use std::ops::Range; use super::*; - use editor::{display_map::DisplayRow, DisplayPoint, Editor, MultiBuffer}; - use gpui::{Context, Hsla, TestAppContext, VisualTestContext}; + use editor::{display_map::DisplayRow, DisplayPoint, Editor, MultiBuffer, SearchSettings}; + use gpui::{Context, Hsla, TestAppContext, UpdateGlobal, VisualTestContext}; use language::{Buffer, Point}; use project::Project; + use settings::SettingsStore; use smol::stream::StreamExt as _; use unindent::Unindent as _; @@ -2320,4 +2325,119 @@ mod tests { assert!(display_points_of(editor.all_text_background_highlights(cx)).is_empty(),); }); } + + #[gpui::test] + async fn test_search_options_changes(cx: &mut TestAppContext) { + let (_editor, search_bar, cx) = init_test(cx); + update_search_settings( + SearchSettings { + whole_word: false, + case_sensitive: false, + include_ignored: false, + regex: false, + }, + cx, + ); + + let deploy = Deploy { + focus: true, + replace_enabled: false, + selection_search_enabled: true, + }; + + search_bar.update(cx, |search_bar, cx| { + assert_eq!( + search_bar.search_options, + SearchOptions::NONE, + "Should have no search options enabled by default" + ); + search_bar.toggle_search_option(SearchOptions::WHOLE_WORD, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::WHOLE_WORD, + "Should enable the option toggled" + ); + assert!( + !search_bar.dismissed, + "Search bar should be present and visible" + ); + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.configured_options, + SearchOptions::NONE, + "Should have configured search options matching the settings" + ); + assert_eq!( + search_bar.search_options, + SearchOptions::WHOLE_WORD, + "After (re)deploying, the option should still be enabled" + ); + + search_bar.dismiss(&Dismiss, cx); + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::NONE, + "After hiding and showing the search bar, default options should be used" + ); + + search_bar.toggle_search_option(SearchOptions::REGEX, cx); + search_bar.toggle_search_option(SearchOptions::WHOLE_WORD, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::REGEX | SearchOptions::WHOLE_WORD, + "Should enable the options toggled" + ); + assert!( + !search_bar.dismissed, + "Search bar should be present and visible" + ); + }); + + update_search_settings( + SearchSettings { + whole_word: false, + case_sensitive: true, + include_ignored: false, + regex: false, + }, + cx, + ); + search_bar.update(cx, |search_bar, cx| { + assert_eq!( + search_bar.search_options, + SearchOptions::REGEX | SearchOptions::WHOLE_WORD, + "Should have no search options enabled by default" + ); + + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.configured_options, + SearchOptions::CASE_SENSITIVE, + "Should have configured search options matching the settings" + ); + assert_eq!( + search_bar.search_options, + SearchOptions::REGEX | SearchOptions::WHOLE_WORD, + "Toggling a non-dismissed search bar with custom options should not change the default options" + ); + search_bar.dismiss(&Dismiss, cx); + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::CASE_SENSITIVE, + "After hiding and showing the search bar, default options should be used" + ); + }); + } + + fn update_search_settings(search_settings: SearchSettings, cx: &mut TestAppContext) { + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.search = Some(search_settings); + }); + }); + }); + } } From f019ad563f643a03e83083f04f23d38d62dd1181 Mon Sep 17 00:00:00 2001 From: Boris Verkhovskiy Date: Tue, 24 Sep 2024 06:46:06 -0600 Subject: [PATCH 029/228] Don't highlight Python function arguments as variables (#18252) Works on - #14892 Follow up to - #17473 - https://github.com/zed-industries/zed/pull/17984#issuecomment-2369815207 Release Notes: - N/A --- crates/languages/src/python/highlights.scm | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index 3255677bedc42..5edbefa7be747 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,4 +1,3 @@ -(parameter (identifier) @variable) (attribute attribute: (identifier) @property) (type (identifier) @type) From 93a4295f66c5a4c393e861deac7dcfb8c8dd45d2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 24 Sep 2024 15:03:22 +0200 Subject: [PATCH 030/228] project search: Fix search results not being highlighted (#18273) Closes #18254 Closes #18219 Closes #17690 This fixes the project search not highlighting all results. The problem was relatively simple, even though it took a while to find it: we inserted multiple excerpts concurrently and the order in the multi-buffer ended up being wrong. Sorting the resulting `match_ranges` fixed the problem, but as it turns out, we can do a better job by moving the concurrency into the method on the MultiBuffer. Performance is the same, but now the problem is fixed. Release Notes: - Fixed search results in project-wide search not being highlighted consistently and navigation sometimes being broken (#18254, #18219, #17690) --------- Co-authored-by: Bennet --- crates/multi_buffer/src/multi_buffer.rs | 220 +++++++++++++++--------- crates/search/src/project_search.rs | 73 +++----- 2 files changed, 163 insertions(+), 130 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 0df196bb9829d..828b39967d9e1 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5,7 +5,7 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; use futures::{channel::mpsc, SinkExt}; -use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext}; +use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext, Task}; use itertools::Itertools; use language::{ language_settings::{language_settings, LanguageSettings}, @@ -1130,66 +1130,6 @@ impl MultiBuffer { } } - pub fn stream_excerpts_with_context_lines( - &mut self, - buffer: Model, - ranges: Vec>, - context_line_count: u32, - cx: &mut ModelContext, - ) -> mpsc::Receiver> { - let (buffer_id, buffer_snapshot) = - buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot())); - - let (mut tx, rx) = mpsc::channel(256); - cx.spawn(move |this, mut cx| async move { - let mut excerpt_ranges = Vec::new(); - let mut range_counts = Vec::new(); - cx.background_executor() - .scoped(|scope| { - scope.spawn(async { - let (ranges, counts) = - build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count); - excerpt_ranges = ranges; - range_counts = counts; - }); - }) - .await; - - let mut ranges = ranges.into_iter(); - let mut range_counts = range_counts.into_iter(); - for excerpt_ranges in excerpt_ranges.chunks(100) { - let excerpt_ids = match this.update(&mut cx, |this, cx| { - this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx) - }) { - Ok(excerpt_ids) => excerpt_ids, - Err(_) => return, - }; - - for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref()) - { - for range in ranges.by_ref().take(range_count) { - let start = Anchor { - buffer_id: Some(buffer_id), - excerpt_id, - text_anchor: range.start, - }; - let end = Anchor { - buffer_id: Some(buffer_id), - excerpt_id, - text_anchor: range.end, - }; - if tx.send(start..end).await.is_err() { - break; - } - } - } - } - }) - .detach(); - - rx - } - pub fn push_excerpts( &mut self, buffer: Model, @@ -1239,6 +1179,91 @@ impl MultiBuffer { anchor_ranges } + pub fn push_multiple_excerpts_with_context_lines( + &mut self, + buffers_with_ranges: Vec<(Model, Vec>)>, + context_line_count: u32, + cx: &mut ModelContext, + ) -> Task>> { + use futures::StreamExt; + + let (excerpt_ranges_tx, mut excerpt_ranges_rx) = mpsc::channel(256); + + let mut buffer_ids = Vec::with_capacity(buffers_with_ranges.len()); + + for (buffer, ranges) in buffers_with_ranges { + let (buffer_id, buffer_snapshot) = + buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot())); + + buffer_ids.push(buffer_id); + + cx.background_executor() + .spawn({ + let mut excerpt_ranges_tx = excerpt_ranges_tx.clone(); + + async move { + let (excerpt_ranges, counts) = + build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count); + excerpt_ranges_tx + .send((buffer_id, buffer.clone(), ranges, excerpt_ranges, counts)) + .await + .ok(); + } + }) + .detach() + } + + cx.spawn(move |this, mut cx| async move { + let mut results_by_buffer_id = HashMap::default(); + while let Some((buffer_id, buffer, ranges, excerpt_ranges, range_counts)) = + excerpt_ranges_rx.next().await + { + results_by_buffer_id + .insert(buffer_id, (buffer, ranges, excerpt_ranges, range_counts)); + } + + let mut multi_buffer_ranges = Vec::default(); + 'outer: for buffer_id in buffer_ids { + let Some((buffer, ranges, excerpt_ranges, range_counts)) = + results_by_buffer_id.remove(&buffer_id) + else { + continue; + }; + + let mut ranges = ranges.into_iter(); + let mut range_counts = range_counts.into_iter(); + for excerpt_ranges in excerpt_ranges.chunks(100) { + let excerpt_ids = match this.update(&mut cx, |this, cx| { + this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx) + }) { + Ok(excerpt_ids) => excerpt_ids, + Err(_) => continue 'outer, + }; + + for (excerpt_id, range_count) in + excerpt_ids.into_iter().zip(range_counts.by_ref()) + { + for range in ranges.by_ref().take(range_count) { + let start = Anchor { + buffer_id: Some(buffer_id), + excerpt_id, + text_anchor: range.start, + }; + let end = Anchor { + buffer_id: Some(buffer_id), + excerpt_id, + text_anchor: range.end, + }; + multi_buffer_ranges.push(start..end); + } + } + } + } + + multi_buffer_ranges + }) + } + pub fn insert_excerpts_after( &mut self, prev_excerpt_id: ExcerptId, @@ -5052,7 +5077,6 @@ where #[cfg(test)] mod tests { use super::*; - use futures::StreamExt; use gpui::{AppContext, Context, TestAppContext}; use language::{Buffer, Rope}; use parking_lot::RwLock; @@ -5601,41 +5625,67 @@ mod tests { ); } - #[gpui::test] - async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { - let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { - let snapshot = buffer.read(cx); - let ranges = vec![ - snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)), - snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)), - snapshot.anchor_before(Point::new(15, 0)) - ..snapshot.anchor_before(Point::new(15, 0)), - ]; - multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx) - }); + #[gpui::test(iterations = 100)] + async fn test_push_multiple_excerpts_with_context_lines(cx: &mut TestAppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(15, 4, 'a'), cx)); + let snapshot_1 = buffer_1.update(cx, |buffer, _| buffer.snapshot()); + let snapshot_2 = buffer_2.update(cx, |buffer, _| buffer.snapshot()); + let ranges_1 = vec![ + snapshot_1.anchor_before(Point::new(3, 2))..snapshot_1.anchor_before(Point::new(4, 2)), + snapshot_1.anchor_before(Point::new(7, 1))..snapshot_1.anchor_before(Point::new(7, 3)), + snapshot_1.anchor_before(Point::new(15, 0)) + ..snapshot_1.anchor_before(Point::new(15, 0)), + ]; + let ranges_2 = vec![ + snapshot_2.anchor_before(Point::new(2, 1))..snapshot_2.anchor_before(Point::new(3, 1)), + snapshot_2.anchor_before(Point::new(10, 0)) + ..snapshot_2.anchor_before(Point::new(10, 2)), + ]; - let anchor_ranges = anchor_ranges.collect::>().await; + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let anchor_ranges = multibuffer + .update(cx, |multibuffer, cx| { + multibuffer.push_multiple_excerpts_with_context_lines( + vec![(buffer_1.clone(), ranges_1), (buffer_2.clone(), ranges_2)], + 2, + cx, + ) + }) + .await; let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); assert_eq!( snapshot.text(), concat!( - "bbb\n", // + "bbb\n", // buffer_1 "ccc\n", // - "ddd\n", // - "eee\n", // + "ddd\n", // <-- excerpt 1 + "eee\n", // <-- excerpt 1 "fff\n", // "ggg\n", // - "hhh\n", // + "hhh\n", // <-- excerpt 2 "iii\n", // "jjj\n", // + // "nnn\n", // "ooo\n", // - "ppp\n", // + "ppp\n", // <-- excerpt 3 "qqq\n", // - "rrr", // + "rrr\n", // + // + "aaaa\n", // buffer 2 + "bbbb\n", // + "cccc\n", // <-- excerpt 4 + "dddd\n", // <-- excerpt 4 + "eeee\n", // + "ffff\n", // + // + "iiii\n", // + "jjjj\n", // + "kkkk\n", // <-- excerpt 5 + "llll\n", // + "mmmm", // ) ); @@ -5647,7 +5697,9 @@ mod tests { vec![ Point::new(2, 2)..Point::new(3, 2), Point::new(6, 1)..Point::new(6, 3), - Point::new(11, 0)..Point::new(11, 0) + Point::new(11, 0)..Point::new(11, 0), + Point::new(16, 1)..Point::new(17, 1), + Point::new(22, 0)..Point::new(22, 2) ] ); } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index d5b719a657628..ea94d27daf61d 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -263,54 +263,35 @@ impl ProjectSearch { let mut limit_reached = false; while let Some(results) = matches.next().await { - let tasks = results - .into_iter() - .map(|result| { - let this = this.clone(); - - cx.spawn(|mut cx| async move { - match result { - project::search::SearchResult::Buffer { buffer, ranges } => { - let mut match_ranges_rx = - this.update(&mut cx, |this, cx| { - this.excerpts.update(cx, |excerpts, cx| { - excerpts.stream_excerpts_with_context_lines( - buffer, - ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, - cx, - ) - }) - })?; - - let mut match_ranges = vec![]; - while let Some(range) = match_ranges_rx.next().await { - match_ranges.push(range); - } - anyhow::Ok((match_ranges, false)) - } - project::search::SearchResult::LimitReached => { - anyhow::Ok((vec![], true)) - } - } - }) - }) - .collect::>(); - - let result_ranges = futures::future::join_all(tasks).await; - let mut combined_ranges = vec![]; - for (ranges, result_limit_reached) in result_ranges.into_iter().flatten() { - combined_ranges.extend(ranges); - if result_limit_reached { - limit_reached = result_limit_reached; + let mut buffers_with_ranges = Vec::with_capacity(results.len()); + for result in results { + match result { + project::search::SearchResult::Buffer { buffer, ranges } => { + buffers_with_ranges.push((buffer, ranges)); + } + project::search::SearchResult::LimitReached => { + limit_reached = true; + } } } + + let match_ranges = this + .update(&mut cx, |this, cx| { + this.excerpts.update(cx, |excerpts, cx| { + excerpts.push_multiple_excerpts_with_context_lines( + buffers_with_ranges, + editor::DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ) + }) + }) + .ok()? + .await; + this.update(&mut cx, |this, cx| { - if !combined_ranges.is_empty() { - this.no_results = Some(false); - this.match_ranges.extend(combined_ranges); - cx.notify(); - } + this.no_results = Some(false); + this.match_ranges.extend(match_ranges); + cx.notify(); }) .ok()?; } @@ -2745,7 +2726,7 @@ pub mod tests { search_view .results_editor .update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\nconst TWO: usize = one::ONE + one::ONE;\n\n\n\n\nconst ONE: usize = 1;\n", + "\n\n\nconst ONE: usize = 1;\n\n\n\n\nconst TWO: usize = one::ONE + one::ONE;\n", "New search in directory should have a filter that matches a certain directory" ); }) From 336b4a5690cee0714ae704348cb4944544fcdc99 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Sep 2024 09:15:25 -0400 Subject: [PATCH 031/228] Tweak close stale issues configuration (#18275) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index afc28ec180b15..be69ee2682cfc 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -18,10 +18,14 @@ jobs: Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" + # We will increase `days-before-stale` to 365 on or after Jan 24th, + # 2024. This date marks one year since migrating issues from + # 'community' to 'zed' repository. The migration added activity to all + # issues, preventing 365 days from working until then. days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" - operations-per-run: 100 + operations-per-run: 200 ascending: true enable-statistics: true stale-issue-label: "stale" From 3a2f0653d16bdbacf2e090c15bcf424d96de9e64 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Sep 2024 09:44:27 -0400 Subject: [PATCH 032/228] Bump operations-per-run parameter in stale issues action (#18276) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index be69ee2682cfc..1f287fb5e3d4b 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -25,7 +25,7 @@ jobs: days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" - operations-per-run: 200 + operations-per-run: 1000 ascending: true enable-statistics: true stale-issue-label: "stale" From 437bcc0ce6a270487fe6b2e2c42117433abe0946 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 24 Sep 2024 16:46:11 +0200 Subject: [PATCH 033/228] ssh project: Handle multiple paths and worktrees correctly (#18277) This makes SSH projects work with `ssh_connections` that have multiple paths: ```json { "ssh_connections": [ { "host": "127.0.0.1", "projects": [ { "paths": [ "/Users/thorstenball/work/projs/go-proj", "/Users/thorstenball/work/projs/rust-proj" ] } ] } ] } ``` @ConradIrwin @mikayla-maki since this wasn't really released yet, we didn't create a full-on migration, so old ssh projects that were already serialized need to either be manually deleted from the database, or the whole local DB wiped. Release Notes: - N/A --------- Co-authored-by: Bennet --- crates/recent_projects/src/recent_projects.rs | 8 +-- crates/workspace/src/persistence.rs | 64 ++++++++++--------- crates/workspace/src/persistence/model.rs | 42 +++++++----- crates/workspace/src/workspace.rs | 8 +-- 4 files changed, 66 insertions(+), 56 deletions(-) diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index cb3d3ab65950f..20393d63e1a3d 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -268,7 +268,7 @@ impl PickerDelegate for RecentProjectsDelegate { .as_ref() .map(|port| port.to_string()) .unwrap_or_default(), - ssh_project.path, + ssh_project.paths.join(","), ssh_project .user .as_ref() @@ -403,7 +403,7 @@ impl PickerDelegate for RecentProjectsDelegate { password: None, }; - let paths = vec![PathBuf::from(ssh_project.path.clone())]; + let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); cx.spawn(|_, mut cx| async move { open_ssh_project(connection_options, paths, app_state, open_options, &mut cx).await @@ -460,9 +460,7 @@ impl PickerDelegate for RecentProjectsDelegate { .filter_map(|i| paths.paths().get(*i).cloned()) .collect(), ), - SerializedWorkspaceLocation::Ssh(ssh_project) => { - Arc::new(vec![PathBuf::from(ssh_project.ssh_url())]) - } + SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), SerializedWorkspaceLocation::DevServer(dev_server_project) => { Arc::new(vec![PathBuf::from(format!( "{}:{}", diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 034328a30b2ff..3a0b8eabb90e6 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -366,6 +366,9 @@ define_connection! { ); ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; ), + sql!( + ALTER TABLE ssh_projects RENAME COLUMN path TO paths; + ), ]; } @@ -769,39 +772,40 @@ impl WorkspaceDb { &self, host: String, port: Option, - path: String, + paths: Vec, user: Option, ) -> Result { + let paths = serde_json::to_string(&paths)?; if let Some(project) = self - .get_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await? { Ok(project) } else { - self.insert_ssh_project(host, port, path, user) + self.insert_ssh_project(host, port, paths, user) .await? .ok_or_else(|| anyhow!("failed to insert ssh project")) } } query! { - async fn get_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { - SELECT id, host, port, path, user + async fn get_ssh_project(host: String, port: Option, paths: String, user: Option) -> Result> { + SELECT id, host, port, paths, user FROM ssh_projects - WHERE host IS ? AND port IS ? AND path IS ? AND user IS ? + WHERE host IS ? AND port IS ? AND paths IS ? AND user IS ? LIMIT 1 } } query! { - async fn insert_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + async fn insert_ssh_project(host: String, port: Option, paths: String, user: Option) -> Result> { INSERT INTO ssh_projects( host, port, - path, + paths, user ) VALUES (?1, ?2, ?3, ?4) - RETURNING id, host, port, path, user + RETURNING id, host, port, paths, user } } @@ -840,7 +844,7 @@ impl WorkspaceDb { query! { fn ssh_projects() -> Result> { - SELECT id, host, port, path, user + SELECT id, host, port, paths, user FROM ssh_projects } } @@ -1656,45 +1660,45 @@ mod tests { async fn test_get_or_create_ssh_project() { let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await); - let (host, port, path, user) = ( + let (host, port, paths, user) = ( "example.com".to_string(), Some(22_u16), - "/home/user".to_string(), + vec!["/home/user".to_string(), "/etc/nginx".to_string()], Some("user".to_string()), ); let project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await .unwrap(); assert_eq!(project.host, host); - assert_eq!(project.path, path); + assert_eq!(project.paths, paths); assert_eq!(project.user, user); // Test that calling the function again with the same parameters returns the same project let same_project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await .unwrap(); assert_eq!(project.id, same_project.id); // Test with different parameters - let (host2, path2, user2) = ( + let (host2, paths2, user2) = ( "otherexample.com".to_string(), - "/home/otheruser".to_string(), + vec!["/home/otheruser".to_string()], Some("otheruser".to_string()), ); let different_project = db - .get_or_create_ssh_project(host2.clone(), None, path2.clone(), user2.clone()) + .get_or_create_ssh_project(host2.clone(), None, paths2.clone(), user2.clone()) .await .unwrap(); assert_ne!(project.id, different_project.id); assert_eq!(different_project.host, host2); - assert_eq!(different_project.path, path2); + assert_eq!(different_project.paths, paths2); assert_eq!(different_project.user, user2); } @@ -1702,25 +1706,25 @@ mod tests { async fn test_get_or_create_ssh_project_with_null_user() { let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await); - let (host, port, path, user) = ( + let (host, port, paths, user) = ( "example.com".to_string(), None, - "/home/user".to_string(), + vec!["/home/user".to_string()], None, ); let project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), None) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), None) .await .unwrap(); assert_eq!(project.host, host); - assert_eq!(project.path, path); + assert_eq!(project.paths, paths); assert_eq!(project.user, None); // Test that calling the function again with the same parameters returns the same project let same_project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await .unwrap(); @@ -1735,32 +1739,32 @@ mod tests { ( "example.com".to_string(), None, - "/home/user".to_string(), + vec!["/home/user".to_string()], None, ), ( "anotherexample.com".to_string(), Some(123_u16), - "/home/user2".to_string(), + vec!["/home/user2".to_string()], Some("user2".to_string()), ), ( "yetanother.com".to_string(), Some(345_u16), - "/home/user3".to_string(), + vec!["/home/user3".to_string(), "/proc/1234/exe".to_string()], None, ), ]; - for (host, port, path, user) in projects.iter() { + for (host, port, paths, user) in projects.iter() { let project = db - .get_or_create_ssh_project(host.clone(), *port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), *port, paths.clone(), user.clone()) .await .unwrap(); assert_eq!(&project.host, host); assert_eq!(&project.port, port); - assert_eq!(&project.path, path); + assert_eq!(&project.paths, paths); assert_eq!(&project.user, user); } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 0ad3fa5e606e5..7528e4c3934c5 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -26,24 +26,29 @@ pub struct SerializedSshProject { pub id: SshProjectId, pub host: String, pub port: Option, - pub path: String, + pub paths: Vec, pub user: Option, } impl SerializedSshProject { - pub fn ssh_url(&self) -> String { - let mut result = String::from("ssh://"); - if let Some(user) = &self.user { - result.push_str(user); - result.push('@'); - } - result.push_str(&self.host); - if let Some(port) = &self.port { - result.push(':'); - result.push_str(&port.to_string()); - } - result.push_str(&self.path); - result + pub fn ssh_urls(&self) -> Vec { + self.paths + .iter() + .map(|path| { + let mut result = String::new(); + if let Some(user) = &self.user { + result.push_str(user); + result.push('@'); + } + result.push_str(&self.host); + if let Some(port) = &self.port { + result.push(':'); + result.push_str(&port.to_string()); + } + result.push_str(path); + PathBuf::from(result) + }) + .collect() } } @@ -58,7 +63,8 @@ impl Bind for &SerializedSshProject { let next_index = statement.bind(&self.id.0, start_index)?; let next_index = statement.bind(&self.host, next_index)?; let next_index = statement.bind(&self.port, next_index)?; - let next_index = statement.bind(&self.path, next_index)?; + let raw_paths = serde_json::to_string(&self.paths)?; + let next_index = statement.bind(&raw_paths, next_index)?; statement.bind(&self.user, next_index) } } @@ -68,7 +74,9 @@ impl Column for SerializedSshProject { let id = statement.column_int64(start_index)?; let host = statement.column_text(start_index + 1)?.to_string(); let (port, _) = Option::::column(statement, start_index + 2)?; - let path = statement.column_text(start_index + 3)?.to_string(); + let raw_paths = statement.column_text(start_index + 3)?.to_string(); + let paths: Vec = serde_json::from_str(&raw_paths)?; + let (user, _) = Option::::column(statement, start_index + 4)?; Ok(( @@ -76,7 +84,7 @@ impl Column for SerializedSshProject { id: SshProjectId(id as u64), host, port, - path, + paths, user, }, start_index + 5, diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b732eb5bc70c1..98f793c234aae 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5516,14 +5516,14 @@ pub fn open_ssh_project( cx: &mut AppContext, ) -> Task> { cx.spawn(|mut cx| async move { - // TODO: Handle multiple paths - let path = paths.iter().next().cloned().unwrap_or_default(); - let serialized_ssh_project = persistence::DB .get_or_create_ssh_project( connection_options.host.clone(), connection_options.port, - path.to_string_lossy().to_string(), + paths + .iter() + .map(|path| path.to_string_lossy().to_string()) + .collect::>(), connection_options.username.clone(), ) .await?; From e87d6da2a67c46839c32512c1287ed7a435e2d0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastijan=20Kelneri=C4=8D?= Date: Tue, 24 Sep 2024 16:49:07 +0200 Subject: [PATCH 034/228] Implement grapheme support for supermaven completions (#18279) Closes [#18278](https://github.com/zed-industries/zed/issues/18278) Release Notes: - Fixed a panic when graphemes are included in supermaven completions --- Cargo.lock | 1 + crates/supermaven/Cargo.toml | 1 + .../src/supermaven_completion_provider.rs | 22 ++++++++++--------- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 09a68973340a5..9c6d2fb7b9b84 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11006,6 +11006,7 @@ dependencies = [ "text", "theme", "ui", + "unicode-segmentation", "util", "windows 0.58.0", ] diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml index b8f85c0f05618..e04d0ef51bbf8 100644 --- a/crates/supermaven/Cargo.toml +++ b/crates/supermaven/Cargo.toml @@ -29,6 +29,7 @@ supermaven_api.workspace = true smol.workspace = true text.workspace = true ui.workspace = true +unicode-segmentation.workspace = true util.workspace = true [target.'cfg(target_os = "windows")'.dependencies] diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index 261ce372d9f71..2a7fc31c0db8f 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -12,6 +12,7 @@ use std::{ time::Duration, }; use text::{ToOffset, ToPoint}; +use unicode_segmentation::UnicodeSegmentation; pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); @@ -54,33 +55,34 @@ fn completion_state_from_diff( ) -> CompletionProposal { let buffer_text = snapshot .text_for_range(delete_range.clone()) - .collect::() - .chars() - .collect::>(); + .collect::(); let mut inlays: Vec = Vec::new(); - let completion = completion_text.chars().collect::>(); + let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect(); + let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect(); let mut offset = position.to_offset(&snapshot); let mut i = 0; let mut j = 0; - while i < completion.len() && j < buffer_text.len() { + while i < completion_graphemes.len() && j < buffer_graphemes.len() { // find the next instance of the buffer text in the completion text. - let k = completion[i..].iter().position(|c| *c == buffer_text[j]); + let k = completion_graphemes[i..] + .iter() + .position(|c| *c == buffer_graphemes[j]); match k { Some(k) => { if k != 0 { // the range from the current position to item is an inlay. inlays.push(InlayProposal::Suggestion( snapshot.anchor_after(offset), - completion_text[i..i + k].into(), + completion_graphemes[i..i + k].join("").into(), )); } i += k + 1; j += 1; - offset.add_assign(1); + offset.add_assign(buffer_graphemes[j - 1].len()); } None => { // there are no more matching completions, so drop the remaining @@ -90,11 +92,11 @@ fn completion_state_from_diff( } } - if j == buffer_text.len() && i < completion.len() { + if j == buffer_graphemes.len() && i < completion_graphemes.len() { // there is leftover completion text, so drop it as an inlay. inlays.push(InlayProposal::Suggestion( snapshot.anchor_after(offset), - completion_text[i..completion_text.len()].into(), + completion_graphemes[i..].join("").into(), )); } From 2470db490115322720f85e2ad57e922de930ee1b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 24 Sep 2024 18:21:26 +0300 Subject: [PATCH 035/228] Reuse buffer search queries on tab switch (#18281) Before this change, with a large chunk of text as a search query (N*10^5 in my experiments) and the buffer search bar visible, switching between editor tabs was very slow, even if the editors were N*10^2 lines long. The slow switch was caused by Zed always re-creating the Aho-Corasick queries, which is now reused. Release Notes: - Improved buffer search performance when switching tabs Co-authored-by: Piotr Osiewicz --- crates/search/src/buffer_search.rs | 98 +++++++++++++++++------------- 1 file changed, 55 insertions(+), 43 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 1c37bfd481dcb..9ba7dfd7965b1 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -440,7 +440,7 @@ impl ToolbarItemView for BufferSearchBar { )); self.active_searchable_item = Some(searchable_item_handle); - drop(self.update_matches(cx)); + drop(self.update_matches(true, cx)); if !self.dismissed { return ToolbarItemLocation::Secondary; } @@ -701,7 +701,8 @@ impl BufferSearchBar { cx: &mut ViewContext, ) -> oneshot::Receiver<()> { let options = options.unwrap_or(self.default_options); - if query != self.query(cx) || self.search_options != options { + let updated = query != self.query(cx) || self.search_options != options; + if updated { self.query_editor.update(cx, |query_editor, cx| { query_editor.buffer().update(cx, |query_buffer, cx| { let len = query_buffer.len(cx); @@ -712,7 +713,7 @@ impl BufferSearchBar { self.clear_matches(cx); cx.notify(); } - self.update_matches(cx) + self.update_matches(!updated, cx) } fn render_search_option_button( @@ -738,7 +739,7 @@ impl BufferSearchBar { ) { self.search_options.toggle(search_option); self.default_options = self.search_options; - drop(self.update_matches(cx)); + drop(self.update_matches(false, cx)); cx.notify(); } @@ -841,7 +842,7 @@ impl BufferSearchBar { editor::EditorEvent::Edited { .. } => { self.smartcase(cx); self.clear_matches(cx); - let search = self.update_matches(cx); + let search = self.update_matches(false, cx); let width = editor.update(cx, |editor, cx| { let text_layout_details = editor.text_layout_details(cx); @@ -879,7 +880,7 @@ impl BufferSearchBar { fn on_active_searchable_item_event(&mut self, event: &SearchEvent, cx: &mut ViewContext) { match event { SearchEvent::MatchesInvalidated => { - drop(self.update_matches(cx)); + drop(self.update_matches(false, cx)); } SearchEvent::ActiveMatchChanged => self.update_match_index(cx), } @@ -897,7 +898,7 @@ impl BufferSearchBar { if let Some(active_item) = self.active_searchable_item.as_mut() { self.selection_search_enabled = !self.selection_search_enabled; active_item.toggle_filtered_search_ranges(self.selection_search_enabled, cx); - drop(self.update_matches(cx)); + drop(self.update_matches(false, cx)); cx.notify(); } } @@ -937,7 +938,11 @@ impl BufferSearchBar { .extend(active_item_matches); } - fn update_matches(&mut self, cx: &mut ViewContext) -> oneshot::Receiver<()> { + fn update_matches( + &mut self, + reuse_existing_query: bool, + cx: &mut ViewContext, + ) -> oneshot::Receiver<()> { let (done_tx, done_rx) = oneshot::channel(); let query = self.query(cx); self.pending_search.take(); @@ -949,44 +954,51 @@ impl BufferSearchBar { let _ = done_tx.send(()); cx.notify(); } else { - let query: Arc<_> = if self.search_options.contains(SearchOptions::REGEX) { - match SearchQuery::regex( - query, - self.search_options.contains(SearchOptions::WHOLE_WORD), - self.search_options.contains(SearchOptions::CASE_SENSITIVE), - false, - Default::default(), - Default::default(), - None, - ) { - Ok(query) => query.with_replacement(self.replacement(cx)), - Err(_) => { - self.query_contains_error = true; - self.clear_active_searchable_item_matches(cx); - cx.notify(); - return done_rx; - } - } + let query: Arc<_> = if let Some(search) = + self.active_search.take().filter(|_| reuse_existing_query) + { + search } else { - match SearchQuery::text( - query, - self.search_options.contains(SearchOptions::WHOLE_WORD), - self.search_options.contains(SearchOptions::CASE_SENSITIVE), - false, - Default::default(), - Default::default(), - None, - ) { - Ok(query) => query.with_replacement(self.replacement(cx)), - Err(_) => { - self.query_contains_error = true; - self.clear_active_searchable_item_matches(cx); - cx.notify(); - return done_rx; + if self.search_options.contains(SearchOptions::REGEX) { + match SearchQuery::regex( + query, + self.search_options.contains(SearchOptions::WHOLE_WORD), + self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, + Default::default(), + Default::default(), + None, + ) { + Ok(query) => query.with_replacement(self.replacement(cx)), + Err(_) => { + self.query_contains_error = true; + self.clear_active_searchable_item_matches(cx); + cx.notify(); + return done_rx; + } + } + } else { + match SearchQuery::text( + query, + self.search_options.contains(SearchOptions::WHOLE_WORD), + self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, + Default::default(), + Default::default(), + None, + ) { + Ok(query) => query.with_replacement(self.replacement(cx)), + Err(_) => { + self.query_contains_error = true; + self.clear_active_searchable_item_matches(cx); + cx.notify(); + return done_rx; + } } } - } - .into(); + .into() + }; + self.active_search = Some(query.clone()); let query_text = query.as_str().to_string(); From 21be70f278acc2818c628bb0e8d33c8648655e34 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 24 Sep 2024 11:40:08 -0400 Subject: [PATCH 036/228] Improve diff hunks (#18283) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR improves the display of diff hunks: - Deleted hunks now show a regular line indicator in the gutter when expanded - The rounding on the diff indicators in the gutter has been removed. We also did some refactoring to ensure the sizing of the diff indicators in the gutter were consistent. #### Collapsed Screenshot 2024-09-24 at 11 13 26 AM #### Expanded Screenshot 2024-09-24 at 11 13 35 AM Release Notes: - Improved the appearance of diff hunks in the editor. --------- Co-authored-by: Max --- crates/editor/src/editor.rs | 5 -- crates/editor/src/element.rs | 120 +++++++++++++++++++++++++-------- crates/editor/src/git.rs | 4 +- crates/editor/src/hunk_diff.rs | 77 +++++++++------------ 4 files changed, 124 insertions(+), 82 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index dc536471023f0..a32910e78ab97 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12473,11 +12473,6 @@ impl Editor { Some(gpui::Point::new(source_x, source_y)) } - fn gutter_bounds(&self) -> Option> { - let bounds = self.last_bounds?; - Some(element::gutter_bounds(bounds, self.gutter_dimensions)) - } - pub fn has_active_completions_menu(&self) -> bool { self.context_menu.read().as_ref().map_or(false, |menu| { menu.visible() && matches!(menu, ContextMenu::Completions(_)) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index d4075431ff602..3be71aeefba94 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1269,6 +1269,7 @@ impl EditorElement { line_height: Pixels, gutter_hitbox: &Hitbox, display_rows: Range, + anchor_range: Range, snapshot: &EditorSnapshot, cx: &mut WindowContext, ) -> Vec<(DisplayDiffHunk, Option)> { @@ -1289,30 +1290,84 @@ impl EditorElement { .git .git_gutter .unwrap_or_default(); - let display_hunks = buffer_snapshot - .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) - .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) - .dedup() - .map(|hunk| match git_gutter_setting { - GitGutterSetting::TrackedFiles => { - let hitbox = match hunk { - DisplayDiffHunk::Unfolded { .. } => { - let hunk_bounds = Self::diff_hunk_bounds( - snapshot, - line_height, - gutter_hitbox.bounds, - &hunk, - ); - Some(cx.insert_hitbox(hunk_bounds, true)) + + self.editor.update(cx, |editor, cx| { + let expanded_hunks = &editor.expanded_hunks.hunks; + let expanded_hunks_start_ix = expanded_hunks + .binary_search_by(|hunk| { + hunk.hunk_range + .end + .cmp(&anchor_range.start, &buffer_snapshot) + .then(Ordering::Less) + }) + .unwrap_err(); + let mut expanded_hunks = expanded_hunks[expanded_hunks_start_ix..].iter().peekable(); + + let display_hunks = buffer_snapshot + .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) + .filter_map(|hunk| { + let mut display_hunk = diff_hunk_to_display(&hunk, snapshot); + + if let DisplayDiffHunk::Unfolded { + multi_buffer_range, + status, + .. + } = &mut display_hunk + { + let mut is_expanded = false; + while let Some(expanded_hunk) = expanded_hunks.peek() { + match expanded_hunk + .hunk_range + .start + .cmp(&multi_buffer_range.start, &buffer_snapshot) + { + Ordering::Less => { + expanded_hunks.next(); + } + Ordering::Equal => { + is_expanded = true; + break; + } + Ordering::Greater => { + break; + } + } } - DisplayDiffHunk::Folded { .. } => None, - }; - (hunk, hitbox) - } - GitGutterSetting::Hide => (hunk, None), - }) - .collect(); - display_hunks + match status { + DiffHunkStatus::Added => {} + DiffHunkStatus::Modified => {} + DiffHunkStatus::Removed => { + if is_expanded { + return None; + } + } + } + } + + Some(display_hunk) + }) + .dedup() + .map(|hunk| match git_gutter_setting { + GitGutterSetting::TrackedFiles => { + let hitbox = match hunk { + DisplayDiffHunk::Unfolded { .. } => { + let hunk_bounds = Self::diff_hunk_bounds( + snapshot, + line_height, + gutter_hitbox.bounds, + &hunk, + ); + Some(cx.insert_hitbox(hunk_bounds, true)) + } + DisplayDiffHunk::Folded { .. } => None, + }; + (hunk, hitbox) + } + GitGutterSetting::Hide => (hunk, None), + }) + .collect(); + display_hunks + }) } #[allow(clippy::too_many_arguments)] @@ -3187,7 +3242,7 @@ impl EditorElement { Some(( hunk_bounds, cx.theme().status().modified, - Corners::all(1. * line_height), + Corners::all(px(0.)), )) } DisplayDiffHunk::Unfolded { status, .. } => { @@ -3195,12 +3250,12 @@ impl EditorElement { DiffHunkStatus::Added => ( hunk_hitbox.bounds, cx.theme().status().created, - Corners::all(0.05 * line_height), + Corners::all(px(0.)), ), DiffHunkStatus::Modified => ( hunk_hitbox.bounds, cx.theme().status().modified, - Corners::all(0.05 * line_height), + Corners::all(px(0.)), ), DiffHunkStatus::Removed => ( Bounds::new( @@ -3244,7 +3299,7 @@ impl EditorElement { let start_y = display_row.as_f32() * line_height - scroll_top; let end_y = start_y + line_height; - let width = 0.275 * line_height; + let width = Self::diff_hunk_strip_width(line_height); let highlight_origin = gutter_bounds.origin + point(px(0.), start_y); let highlight_size = size(width, end_y - start_y); Bounds::new(highlight_origin, highlight_size) @@ -3277,7 +3332,7 @@ impl EditorElement { let start_y = start_row.as_f32() * line_height - scroll_top; let end_y = end_row_in_current_excerpt.as_f32() * line_height - scroll_top; - let width = 0.275 * line_height; + let width = Self::diff_hunk_strip_width(line_height); let highlight_origin = gutter_bounds.origin + point(px(0.), start_y); let highlight_size = size(width, end_y - start_y); Bounds::new(highlight_origin, highlight_size) @@ -3289,7 +3344,7 @@ impl EditorElement { let start_y = row.as_f32() * line_height - offset - scroll_top; let end_y = start_y + line_height; - let width = 0.35 * line_height; + let width = (0.35 * line_height).floor(); let highlight_origin = gutter_bounds.origin + point(px(0.), start_y); let highlight_size = size(width, end_y - start_y); Bounds::new(highlight_origin, highlight_size) @@ -3298,6 +3353,12 @@ impl EditorElement { } } + /// Returns the width of the diff strip that will be displayed in the gutter. + pub(super) fn diff_hunk_strip_width(line_height: Pixels) -> Pixels { + // We floor the value to prevent pixel rounding. + (0.275 * line_height).floor() + } + fn paint_gutter_indicators(&self, layout: &mut EditorLayout, cx: &mut WindowContext) { cx.paint_layer(layout.gutter_hitbox.bounds, |cx| { cx.with_element_namespace("gutter_fold_toggles", |cx| { @@ -5158,6 +5219,7 @@ impl Element for EditorElement { line_height, &gutter_hitbox, start_row..end_row, + start_anchor..end_anchor, &snapshot, cx, ); diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 79b78d5d14848..fb18ca45a2a2f 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -90,8 +90,8 @@ pub fn diff_hunk_to_display( let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); let hunk_end_point = Point::new(hunk_end_row.0, 0); - let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point); - let multi_buffer_end = snapshot.buffer_snapshot.anchor_before(hunk_end_point); + let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); + let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); let end = hunk_end_point.to_display_point(snapshot).row(); DisplayDiffHunk::Unfolded { diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 917d07ec4ee85..90836cee51683 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -14,8 +14,8 @@ use multi_buffer::{ use settings::SettingsStore; use text::{BufferId, Point}; use ui::{ - div, h_flex, rems, v_flex, ActiveTheme, Context as _, ContextMenu, InteractiveElement, - IntoElement, ParentElement, Pixels, Styled, ViewContext, VisualContext, + prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels, + Styled, ViewContext, VisualContext, }; use util::{debug_panic, RangeExt}; @@ -38,7 +38,7 @@ pub(super) struct HoveredHunk { #[derive(Debug, Default)] pub(super) struct ExpandedHunks { - hunks: Vec, + pub(crate) hunks: Vec, diff_base: HashMap, hunk_update_tasks: HashMap, Task<()>>, } @@ -414,39 +414,22 @@ impl Editor { style: BlockStyle::Flex, disposition: BlockDisposition::Above, render: Box::new(move |cx| { - let Some(gutter_bounds) = editor.read(cx).gutter_bounds() else { - return div().into_any_element(); - }; - let (gutter_dimensions, hunk_bounds, close_button) = - editor.update(cx.context, |editor, cx| { - let editor_snapshot = editor.snapshot(cx); - let hunk_display_range = hunk - .multi_buffer_range - .clone() - .to_display_points(&editor_snapshot); - let gutter_dimensions = editor.gutter_dimensions; - let hunk_bounds = EditorElement::diff_hunk_bounds( - &editor_snapshot, - cx.line_height(), - gutter_bounds, - &DisplayDiffHunk::Unfolded { - diff_base_byte_range: hunk.diff_base_byte_range.clone(), - multi_buffer_range: hunk.multi_buffer_range.clone(), - display_row_range: hunk_display_range.start.row() - ..hunk_display_range.end.row(), - status: hunk.status, - }, - ); - - let close_button = editor.close_hunk_diff_button( - hunk.clone(), - hunk_display_range.start.row(), - cx, - ); - (gutter_dimensions, hunk_bounds, close_button) - }); - let click_editor = editor.clone(); - let clicked_hunk = hunk.clone(); + let width = EditorElement::diff_hunk_strip_width(cx.line_height()); + let gutter_dimensions = editor.read(cx.context).gutter_dimensions; + + let close_button = editor.update(cx.context, |editor, cx| { + let editor_snapshot = editor.snapshot(cx); + let hunk_display_range = hunk + .multi_buffer_range + .clone() + .to_display_points(&editor_snapshot); + editor.close_hunk_diff_button( + hunk.clone(), + hunk_display_range.start.row(), + cx, + ) + }); + h_flex() .id("gutter with editor") .bg(deleted_hunk_color) @@ -461,27 +444,29 @@ impl Editor { .child( h_flex() .id("gutter hunk") + .bg(cx.theme().status().deleted) .pl(gutter_dimensions.margin + gutter_dimensions .git_blame_entries_width .unwrap_or_default()) - .max_w(hunk_bounds.size.width) - .min_w(hunk_bounds.size.width) + .max_w(width) + .min_w(width) .size_full() .cursor(CursorStyle::PointingHand) .on_mouse_down(MouseButton::Left, { - let click_hunk = hunk.clone(); - move |e, cx| { - let modifiers = e.modifiers; + let editor = editor.clone(); + let hunk = hunk.clone(); + move |event, cx| { + let modifiers = event.modifiers; if modifiers.control || modifiers.platform { - click_editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&click_hunk, cx); + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); }); } else { - click_editor.update(cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.open_hunk_context_menu( - clicked_hunk.clone(), - e.position, + hunk.clone(), + event.position, cx, ); }); From 5e62bbfd29172c966c4a9e494d0063acdba639b9 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 09:44:53 -0600 Subject: [PATCH 037/228] Run system npm directly (#18280) Release Notes: - N/A --- crates/node_runtime/src/node_runtime.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 72c74ce7cf983..9507eb7536485 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -538,11 +538,10 @@ impl NodeRuntimeTrait for SystemNodeRuntime { subcommand: &str, args: &[&str], ) -> anyhow::Result { - let mut command = Command::new(self.node.clone()); + let mut command = Command::new(self.npm.clone()); command .env_clear() .env("PATH", std::env::var_os("PATH").unwrap_or_default()) - .arg(self.npm.clone()) .arg(subcommand) .args(["--cache".into(), self.scratch_dir.join("cache")]) .args([ From 0e86ba0983bfa186030996c62e0163778665bfef Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 10:13:53 -0600 Subject: [PATCH 038/228] Fix get_cached_binary for eslint (#18286) Release Notes: - Fixed running ESLint offline. --- crates/language/src/language.rs | 3 +- crates/languages/src/typescript.rs | 46 +++++++++++++----------------- 2 files changed, 22 insertions(+), 27 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 166d846f86e8b..d70650cf44935 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -384,8 +384,9 @@ pub trait LspAdapter: 'static + Send + Sync { .await { log::info!( - "failed to fetch newest version of language server {:?}. falling back to using {:?}", + "failed to fetch newest version of language server {:?}. error: {:?}, falling back to using {:?}", self.name(), + error, prev_downloaded_binary.path ); binary = Ok(prev_downloaded_binary); diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 25a97c8014d0a..b7eb21132d52c 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -297,7 +297,8 @@ pub struct EsLintLspAdapter { } impl EsLintLspAdapter { - const CURRENT_VERSION: &'static str = "release/2.4.4"; + const CURRENT_VERSION: &'static str = "2.4.4"; + const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4"; #[cfg(not(windows))] const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz; @@ -313,6 +314,10 @@ impl EsLintLspAdapter { pub fn new(node: NodeRuntime) -> Self { EsLintLspAdapter { node } } + + fn build_destination_path(container_dir: &Path) -> PathBuf { + container_dir.join(format!("vscode-eslint-{}", Self::CURRENT_VERSION)) + } } #[async_trait(?Send)] @@ -413,7 +418,7 @@ impl LspAdapter for EsLintLspAdapter { ) -> Result> { let url = build_asset_url( "microsoft/vscode-eslint", - Self::CURRENT_VERSION, + Self::CURRENT_VERSION_TAG_NAME, Self::GITHUB_ASSET_KIND, )?; @@ -430,7 +435,7 @@ impl LspAdapter for EsLintLspAdapter { delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); - let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name)); + let destination_path = Self::build_destination_path(&container_dir); let server_path = destination_path.join(Self::SERVER_PATH); if fs::metadata(&server_path).await.is_err() { @@ -496,38 +501,27 @@ impl LspAdapter for EsLintLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_eslint_server_binary(container_dir, &self.node).await + let server_path = + Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: eslint_server_binary_arguments(&server_path), + }) } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_eslint_server_binary(container_dir, &self.node).await - } -} - -async fn get_cached_eslint_server_binary( - container_dir: PathBuf, - node: &NodeRuntime, -) -> Option { - maybe!(async { - // This is unfortunate but we don't know what the version is to build a path directly - let mut dir = fs::read_dir(&container_dir).await?; - let first = dir.next().await.ok_or(anyhow!("missing first file"))??; - if !first.file_type().await?.is_dir() { - return Err(anyhow!("First entry is not a directory")); - } - let server_path = first.path().join(EsLintLspAdapter::SERVER_PATH); - - Ok(LanguageServerBinary { - path: node.binary_path().await?, + let server_path = + Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, env: None, arguments: eslint_server_binary_arguments(&server_path), }) - }) - .await - .log_err() + } } #[cfg(target_os = "windows")] From b69c6ee7dfdcffd94024491776c68189a9e9d6ea Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 10:17:43 -0600 Subject: [PATCH 039/228] Exclude initialization failed errors from slack (#18232) Release Notes: - N/A --- crates/collab/src/api/events.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index f8ae53201304f..1632c2d798ef1 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -369,6 +369,10 @@ fn report_to_slack(panic: &Panic) -> bool { return false; } + if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { + return false; + } + if panic .payload .contains("GPU has crashed, and no debug information is available") From 4a4d8c1cabcf8f898cdba496e08b2b27a4386922 Mon Sep 17 00:00:00 2001 From: "Sergio C." Date: Tue, 24 Sep 2024 13:21:57 -0300 Subject: [PATCH 040/228] vim: Add ability to spawn multicursors at beginning/end of line (#18183) Closes #17842 Release Notes: - Added the ability to spawn multiple cursors through the g-A and g-I motions while in visual select mode. --- assets/keymaps/vim.json | 2 + crates/vim/src/visual.rs | 85 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 86 insertions(+), 1 deletion(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 8d933f19afb1d..6656ea0ddf22c 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -292,6 +292,8 @@ "g ctrl-x": ["vim::Decrement", { "step": true }], "shift-i": "vim::InsertBefore", "shift-a": "vim::InsertAfter", + "g I": "vim::VisualInsertFirstNonWhiteSpace", + "g A": "vim::VisualInsertEndOfLine", "shift-j": "vim::JoinLines", "r": ["vim::PushOperator", "Replace"], "ctrl-c": ["vim::SwitchMode", "Normal"], diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 55dc7885200b0..1503eaac1b6b4 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -15,7 +15,7 @@ use util::ResultExt; use workspace::searchable::Direction; use crate::{ - motion::{start_of_line, Motion}, + motion::{first_non_whitespace, next_line_end, start_of_line, Motion}, object::Object, state::{Mode, Operator}, Vim, @@ -37,6 +37,8 @@ actions!( SelectNextMatch, SelectPreviousMatch, RestoreVisualSelection, + VisualInsertEndOfLine, + VisualInsertFirstNonWhiteSpace, ] ); @@ -51,6 +53,8 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.toggle_mode(Mode::VisualBlock, cx) }); Vim::action(editor, cx, Vim::other_end); + Vim::action(editor, cx, Vim::visual_insert_end_of_line); + Vim::action(editor, cx, Vim::visual_insert_first_non_white_space); Vim::action(editor, cx, |vim, _: &VisualDelete, cx| { vim.record_current_action(cx); vim.visual_delete(false, cx); @@ -374,6 +378,39 @@ impl Vim { } } + fn visual_insert_end_of_line(&mut self, _: &VisualInsertEndOfLine, cx: &mut ViewContext) { + self.update_editor(cx, |_, editor, cx| { + editor.split_selection_into_lines(&Default::default(), cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, cursor, _| { + (next_line_end(map, cursor, 1), SelectionGoal::None) + }); + }); + }); + + self.switch_mode(Mode::Insert, false, cx); + } + + fn visual_insert_first_non_white_space( + &mut self, + _: &VisualInsertFirstNonWhiteSpace, + cx: &mut ViewContext, + ) { + self.update_editor(cx, |_, editor, cx| { + editor.split_selection_into_lines(&Default::default(), cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, cursor, _| { + ( + first_non_whitespace(map, false, cursor), + SelectionGoal::None, + ) + }); + }); + }); + + self.switch_mode(Mode::Insert, false, cx); + } + fn toggle_mode(&mut self, mode: Mode, cx: &mut ViewContext) { if self.mode == mode { self.switch_mode(Mode::Normal, false, cx); @@ -714,6 +751,52 @@ mod test { ˇ"}); } + #[gpui::test] + async fn test_visual_insert_first_non_whitespace(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! { + "«The quick brown + fox jumps over + the lazy dogˇ»" + }, + Mode::Visual, + ); + cx.simulate_keystrokes("g I"); + cx.assert_state( + indoc! { + "ˇThe quick brown + ˇfox jumps over + ˇthe lazy dog" + }, + Mode::Insert, + ); + } + + #[gpui::test] + async fn test_visual_insert_end_of_line(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! { + "«The quick brown + fox jumps over + the lazy dogˇ»" + }, + Mode::Visual, + ); + cx.simulate_keystrokes("g A"); + cx.assert_state( + indoc! { + "The quick brownˇ + fox jumps overˇ + the lazy dogˇ" + }, + Mode::Insert, + ); + } + #[gpui::test] async fn test_enter_visual_line_mode(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; From fd07fef4dbc475381706c338e9600bd099af162d Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Wed, 25 Sep 2024 00:27:29 +0800 Subject: [PATCH 041/228] Fix proxy settings retrieval on startup (#18171) Closes https://github.com/zed-industries/zed/issues/18155 Release Notes: - N/A --- crates/zed/src/main.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 309931f616352..e3fe2baefa9b6 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -443,6 +443,8 @@ fn main() { AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx); } settings::init(cx); + handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); + handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); client::init_settings(cx); let user_agent = format!( "Zed/{} ({}; {})", @@ -470,9 +472,6 @@ fn main() { OpenListener::set_global(cx, open_listener.clone()); - handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); - handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); - let client = Client::production(cx); cx.set_http_client(client.http_client().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); From 8a7ef4db59668f9dda841a9550abd1cc78651fa3 Mon Sep 17 00:00:00 2001 From: John Cummings Date: Tue, 24 Sep 2024 11:17:17 -0600 Subject: [PATCH 042/228] ollama: Add max tokens for qwen2.5-coder (#18290) --- crates/ollama/src/ollama.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 51c4829048814..e592bfa17717d 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -82,7 +82,8 @@ fn get_max_tokens(name: &str) -> usize { "llama3" | "gemma2" | "gemma" | "codegemma" | "starcoder" | "aya" => 8192, "codellama" | "starcoder2" => 16384, "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768, - "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" => 128000, + "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" + | "qwen2.5-coder" => 128000, _ => DEFAULT_TOKENS, } .clamp(1, MAXIMUM_TOKENS) From f39e54decc613314b1302cfbb1d581a056a6eb68 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 12:23:39 -0600 Subject: [PATCH 043/228] Start work on reload buffers (#18245) Release Notes: - Fixed: ssh-remote reload buffers --------- Co-authored-by: Mikayla --- crates/project/src/buffer_store.rs | 309 +++++++++++++----- crates/project/src/project.rs | 139 +------- crates/remote_server/src/headless_project.rs | 3 +- .../remote_server/src/remote_editing_tests.rs | 82 ++++- 4 files changed, 325 insertions(+), 208 deletions(-) diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 0045aba2e89ec..b69679d6ac3b3 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -14,7 +14,10 @@ use gpui::{ }; use http_client::Url; use language::{ - proto::{deserialize_line_ending, deserialize_version, serialize_version, split_operations}, + proto::{ + deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version, + split_operations, + }, Buffer, BufferEvent, Capability, File as _, Language, Operation, }; use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope}; @@ -29,9 +32,8 @@ use worktree::{ /// A set of open buffers. pub struct BufferStore { - downstream_client: Option, - remote_id: Option, - #[allow(unused)] + state: BufferStoreState, + downstream_client: Option<(AnyProtoClient, u64)>, worktree_store: Model, opened_buffers: HashMap, local_buffer_ids_by_path: HashMap, @@ -44,12 +46,11 @@ pub struct BufferStore { loading_remote_buffers_by_id: HashMap>, remote_buffer_listeners: HashMap, anyhow::Error>>>>, - shared_buffers: HashMap>, + shared_buffers: HashMap>>, } enum OpenBuffer { - Strong(Model), - Weak(WeakModel), + Buffer(WeakModel), Operations(Vec), } @@ -62,6 +63,15 @@ pub enum BufferStoreEvent { }, } +enum BufferStoreState { + Remote { + shared_with_me: HashSet>, + upstream_client: AnyProtoClient, + project_id: u64, + }, + Local {}, +} + #[derive(Default, Debug)] pub struct ProjectTransaction(pub HashMap, language::Transaction>); @@ -75,17 +85,36 @@ impl BufferStore { client.add_model_message_handler(Self::handle_update_diff_base); client.add_model_request_handler(Self::handle_save_buffer); client.add_model_request_handler(Self::handle_blame_buffer); + client.add_model_request_handler(Self::handle_reload_buffers); } /// Creates a buffer store, optionally retaining its buffers. - /// - /// If `retain_buffers` is `true`, then buffers are owned by the buffer store - /// and won't be released unless they are explicitly removed, or `retain_buffers` - /// is set to `false` via `set_retain_buffers`. Otherwise, buffers are stored as - /// weak handles. - pub fn new( + pub fn local(worktree_store: Model, cx: &mut ModelContext) -> Self { + cx.subscribe(&worktree_store, |this, _, event, cx| { + if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { + this.subscribe_to_worktree(worktree, cx); + } + }) + .detach(); + + Self { + state: BufferStoreState::Local {}, + downstream_client: None, + worktree_store, + opened_buffers: Default::default(), + remote_buffer_listeners: Default::default(), + loading_remote_buffers_by_id: Default::default(), + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), + loading_buffers_by_path: Default::default(), + shared_buffers: Default::default(), + } + } + + pub fn remote( worktree_store: Model, - remote_id: Option, + upstream_client: AnyProtoClient, + remote_id: u64, cx: &mut ModelContext, ) -> Self { cx.subscribe(&worktree_store, |this, _, event, cx| { @@ -96,7 +125,11 @@ impl BufferStore { .detach(); Self { - remote_id, + state: BufferStoreState::Remote { + shared_with_me: Default::default(), + upstream_client, + project_id: remote_id, + }, downstream_client: None, worktree_store, opened_buffers: Default::default(), @@ -288,16 +321,14 @@ impl BufferStore { buffer.set_diff_base(diff_base.clone(), cx); buffer.remote_id().to_proto() }); - if let Some(project_id) = this.remote_id { - if let Some(client) = &this.downstream_client { - client - .send(proto::UpdateDiffBase { - project_id, - buffer_id, - diff_base, - }) - .log_err(); - } + if let Some((client, project_id)) = &this.downstream_client { + client + .send(proto::UpdateDiffBase { + project_id: *project_id, + buffer_id, + diff_base, + }) + .log_err(); } } }) @@ -496,8 +527,8 @@ impl BufferStore { let new_file = save.await?; let mtime = new_file.mtime; this.update(&mut cx, |this, cx| { - if let Some(downstream_client) = this.downstream_client.as_ref() { - let project_id = this.remote_id.unwrap_or(0); + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + let project_id = *project_id; if has_changed_file { downstream_client .send(proto::UpdateBufferFile { @@ -620,11 +651,7 @@ impl BufferStore { fn add_buffer(&mut self, buffer: Model, cx: &mut ModelContext) -> Result<()> { let remote_id = buffer.read(cx).remote_id(); let is_remote = buffer.read(cx).replica_id() != 0; - let open_buffer = if self.remote_id.is_some() { - OpenBuffer::Strong(buffer.clone()) - } else { - OpenBuffer::Weak(buffer.downgrade()) - }; + let open_buffer = OpenBuffer::Buffer(buffer.downgrade()); let handle = cx.handle().downgrade(); buffer.update(cx, move |_, cx| { @@ -768,8 +795,7 @@ impl BufferStore { } pub fn disconnected_from_host(&mut self, cx: &mut AppContext) { - self.downstream_client.take(); - self.set_remote_id(None, cx); + self.drop_unnecessary_buffers(cx); for buffer in self.buffers() { buffer.update(cx, |buffer, cx| { @@ -786,32 +812,20 @@ impl BufferStore { &mut self, remote_id: u64, downstream_client: AnyProtoClient, - cx: &mut AppContext, + _cx: &mut AppContext, ) { - self.downstream_client = Some(downstream_client); - self.set_remote_id(Some(remote_id), cx); + self.downstream_client = Some((downstream_client, remote_id)); } pub fn unshared(&mut self, _cx: &mut ModelContext) { - self.remote_id.take(); + self.downstream_client.take(); + self.forget_shared_buffers(); } - fn set_remote_id(&mut self, remote_id: Option, cx: &mut AppContext) { - self.remote_id = remote_id; + fn drop_unnecessary_buffers(&mut self, cx: &mut AppContext) { for open_buffer in self.opened_buffers.values_mut() { - if remote_id.is_some() { - if let OpenBuffer::Weak(buffer) = open_buffer { - if let Some(buffer) = buffer.upgrade() { - *open_buffer = OpenBuffer::Strong(buffer); - } - } - } else { - if let Some(buffer) = open_buffer.upgrade() { - buffer.update(cx, |buffer, _| buffer.give_up_waiting()); - } - if let OpenBuffer::Strong(buffer) = open_buffer { - *open_buffer = OpenBuffer::Weak(buffer.downgrade()); - } + if let Some(buffer) = open_buffer.upgrade() { + buffer.update(cx, |buffer, _| buffer.give_up_waiting()); } } } @@ -881,8 +895,26 @@ impl BufferStore { event: &BufferEvent, cx: &mut ModelContext, ) { - if event == &BufferEvent::FileHandleChanged { - self.buffer_changed_file(buffer, cx); + match event { + BufferEvent::FileHandleChanged => { + self.buffer_changed_file(buffer, cx); + } + BufferEvent::Reloaded => { + let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else { + return; + }; + let buffer = buffer.read(cx); + downstream_client + .send(proto::BufferReloaded { + project_id: *project_id, + buffer_id: buffer.remote_id().to_proto(), + version: serialize_version(&buffer.version()), + mtime: buffer.saved_mtime().map(|t| t.into()), + line_ending: serialize_line_ending(buffer.line_ending()) as i32, + }) + .log_err(); + } + _ => {} } } @@ -986,16 +1018,14 @@ impl BufferStore { } } - if let Some(project_id) = self.remote_id { - if let Some(client) = &self.downstream_client { - client - .send(proto::UpdateBufferFile { - project_id, - buffer_id: buffer_id.to_proto(), - file: Some(new_file.to_proto(cx)), - }) - .ok(); - } + if let Some((client, project_id)) = &self.downstream_client { + client + .send(proto::UpdateBufferFile { + project_id: *project_id, + buffer_id: buffer_id.to_proto(), + file: Some(new_file.to_proto(cx)), + }) + .ok(); } buffer.file_updated(Arc::new(new_file), cx); @@ -1050,11 +1080,8 @@ impl BufferStore { this.update(&mut cx, |this, cx| { match this.opened_buffers.entry(buffer_id) { hash_map::Entry::Occupied(mut e) => match e.get_mut() { - OpenBuffer::Strong(buffer) => { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); - } OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), - OpenBuffer::Weak(buffer) => { + OpenBuffer::Buffer(buffer) => { if let Some(buffer) = buffer.upgrade() { buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } @@ -1090,7 +1117,7 @@ impl BufferStore { self.shared_buffers .entry(guest_id) .or_default() - .insert(buffer_id); + .insert(buffer.clone()); let buffer = buffer.read(cx); response.buffers.push(proto::BufferVersion { @@ -1230,6 +1257,19 @@ impl BufferStore { } } else if chunk.is_last { self.loading_remote_buffers_by_id.remove(&buffer_id); + // retain buffers sent by peers to avoid races. + match &mut self.state { + BufferStoreState::Remote { + ref mut shared_with_me, + upstream_client, + .. + } => { + if upstream_client.is_via_collab() { + shared_with_me.insert(buffer.clone()); + } + } + _ => {} + } self.add_buffer(buffer, cx)?; } } @@ -1303,7 +1343,10 @@ impl BufferStore { let (buffer, project_id) = this.update(&mut cx, |this, _| { anyhow::Ok(( this.get_existing(buffer_id)?, - this.remote_id.context("project is not shared")?, + this.downstream_client + .as_ref() + .map(|(_, project_id)| *project_id) + .context("project is not shared")?, )) })??; buffer @@ -1340,12 +1383,14 @@ impl BufferStore { let peer_id = envelope.sender_id; let buffer_id = BufferId::new(envelope.payload.buffer_id)?; this.update(&mut cx, |this, _| { - if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { - if shared.remove(&buffer_id) { - if shared.is_empty() { - this.shared_buffers.remove(&peer_id); + if let Some(buffer) = this.get(buffer_id) { + if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { + if shared.remove(&buffer) { + if shared.is_empty() { + this.shared_buffers.remove(&peer_id); + } + return; } - return; } }; debug_panic!( @@ -1429,6 +1474,98 @@ impl BufferStore { } } + pub fn reload_buffers( + &self, + buffers: HashSet>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + let mut local_buffers = Vec::new(); + let mut remote_buffers = Vec::new(); + for buffer_handle in buffers { + let buffer = buffer_handle.read(cx); + if buffer.is_dirty() { + if let Some(file) = File::from_dyn(buffer.file()) { + if file.is_local() { + local_buffers.push(buffer_handle); + } else { + remote_buffers.push(buffer_handle); + } + } + } + } + + let client = self.upstream_client(); + + cx.spawn(move |this, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + if let Some((client, project_id)) = client { + let response = client + .request(proto::ReloadBuffers { + project_id, + buffer_ids: remote_buffers + .iter() + .filter_map(|buffer| { + buffer + .update(&mut cx, |buffer, _| buffer.remote_id().into()) + .ok() + }) + .collect(), + }) + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + BufferStore::deserialize_project_transaction( + this, + response, + push_to_history, + cx.clone(), + ) + .await?; + } + + for buffer in local_buffers { + let transaction = buffer + .update(&mut cx, |buffer, cx| buffer.reload(cx))? + .await?; + buffer.update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + project_transaction.0.insert(cx.handle(), transaction); + } + })?; + } + + Ok(project_transaction) + }) + } + + async fn handle_reload_buffers( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let sender_id = envelope.original_sender_id().unwrap_or_default(); + let reload = this.update(&mut cx, |this, cx| { + let mut buffers = HashSet::default(); + for buffer_id in &envelope.payload.buffer_ids { + let buffer_id = BufferId::new(*buffer_id)?; + buffers.insert(this.get_existing(buffer_id)?); + } + Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx)) + })??; + + let project_transaction = reload.await?; + let project_transaction = this.update(&mut cx, |this, cx| { + this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) + })?; + Ok(proto::ReloadBuffersResponse { + transaction: Some(project_transaction), + }) + } + pub fn create_buffer_for_peer( &mut self, buffer: &Model, @@ -1440,12 +1577,12 @@ impl BufferStore { .shared_buffers .entry(peer_id) .or_default() - .insert(buffer_id) + .insert(buffer.clone()) { return Task::ready(Ok(())); } - let Some((client, project_id)) = self.downstream_client.clone().zip(self.remote_id) else { + let Some((client, project_id)) = self.downstream_client.clone() else { return Task::ready(Ok(())); }; @@ -1492,6 +1629,17 @@ impl BufferStore { }) } + pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { + match &self.state { + BufferStoreState::Remote { + upstream_client, + project_id, + .. + } => Some((upstream_client.clone(), *project_id)), + BufferStoreState::Local { .. } => None, + } + } + pub fn forget_shared_buffers(&mut self) { self.shared_buffers.clear(); } @@ -1506,7 +1654,7 @@ impl BufferStore { } } - pub fn shared_buffers(&self) -> &HashMap> { + pub fn shared_buffers(&self) -> &HashMap>> { &self.shared_buffers } @@ -1572,8 +1720,7 @@ impl BufferStore { impl OpenBuffer { fn upgrade(&self) -> Option> { match self { - OpenBuffer::Strong(handle) => Some(handle.clone()), - OpenBuffer::Weak(handle) => handle.upgrade(), + OpenBuffer::Buffer(handle) => handle.upgrade(), OpenBuffer::Operations(_) => None, } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 199b5a8f5c575..454a7586c8856 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -42,10 +42,7 @@ use gpui::{ use itertools::Itertools; use language::{ language_settings::InlayHintKind, - proto::{ - deserialize_anchor, serialize_anchor, serialize_line_ending, serialize_version, - split_operations, - }, + proto::{deserialize_anchor, serialize_anchor, split_operations}, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, ContextProvider, DiagnosticEntry, Documentation, File as _, Language, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped, @@ -559,7 +556,6 @@ impl Project { client.add_model_message_handler(Self::handle_unshare_project); client.add_model_request_handler(Self::handle_update_buffer); client.add_model_message_handler(Self::handle_update_worktree); - client.add_model_request_handler(Self::handle_reload_buffers); client.add_model_request_handler(Self::handle_synchronize_buffers); client.add_model_request_handler(Self::handle_search_project); @@ -599,8 +595,7 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let buffer_store = - cx.new_model(|cx| BufferStore::new(worktree_store.clone(), None, cx)); + let buffer_store = cx.new_model(|cx| BufferStore::local(worktree_store.clone(), cx)); cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); @@ -695,8 +690,14 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let buffer_store = - cx.new_model(|cx| BufferStore::new(worktree_store.clone(), None, cx)); + let buffer_store = cx.new_model(|cx| { + BufferStore::remote( + worktree_store.clone(), + ssh.clone().into(), + SSH_PROJECT_ID, + cx, + ) + }); cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); @@ -851,8 +852,9 @@ impl Project { .map(DevServerProjectId), ) })?; - let buffer_store = - cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; + let buffer_store = cx.new_model(|cx| { + BufferStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) + })?; let lsp_store = cx.new_model(|cx| { let mut lsp_store = LspStore::new_remote( @@ -2167,23 +2169,6 @@ impl Project { .ok(); } - BufferEvent::Reloaded => { - if self.is_local_or_ssh() { - if let Some(project_id) = self.remote_id() { - let buffer = buffer.read(cx); - self.client - .send(proto::BufferReloaded { - project_id, - buffer_id: buffer.remote_id().to_proto(), - version: serialize_version(&buffer.version()), - mtime: buffer.saved_mtime().map(|t| t.into()), - line_ending: serialize_line_ending(buffer.line_ending()) as i32, - }) - .log_err(); - } - } - } - _ => {} } @@ -2347,67 +2332,8 @@ impl Project { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - let mut local_buffers = Vec::new(); - let mut remote_buffers = None; - for buffer_handle in buffers { - let buffer = buffer_handle.read(cx); - if buffer.is_dirty() { - if let Some(file) = File::from_dyn(buffer.file()) { - if file.is_local() { - local_buffers.push(buffer_handle); - } else { - remote_buffers.get_or_insert(Vec::new()).push(buffer_handle); - } - } - } - } - - let remote_buffers = self.remote_id().zip(remote_buffers); - let client = self.client.clone(); - - cx.spawn(move |this, mut cx| async move { - let mut project_transaction = ProjectTransaction::default(); - - if let Some((project_id, remote_buffers)) = remote_buffers { - let response = client - .request(proto::ReloadBuffers { - project_id, - buffer_ids: remote_buffers - .iter() - .filter_map(|buffer| { - buffer - .update(&mut cx, |buffer, _| buffer.remote_id().into()) - .ok() - }) - .collect(), - }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx.clone(), - ) - .await?; - } - - for buffer in local_buffers { - let transaction = buffer - .update(&mut cx, |buffer, cx| buffer.reload(cx))? - .await?; - buffer.update(&mut cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !push_to_history { - buffer.forget_transaction(transaction.id); - } - project_transaction.0.insert(cx.handle(), transaction); - } - })?; - } - - Ok(project_transaction) + self.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.reload_buffers(buffers, push_to_history, cx) }) } @@ -3589,30 +3515,6 @@ impl Project { })? } - async fn handle_reload_buffers( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let sender_id = envelope.original_sender_id()?; - let reload = this.update(&mut cx, |this, cx| { - let mut buffers = HashSet::default(); - for buffer_id in &envelope.payload.buffer_ids { - let buffer_id = BufferId::new(*buffer_id)?; - buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); - } - Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx)) - })??; - - let project_transaction = reload.await?; - let project_transaction = this.update(&mut cx, |this, cx| { - this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) - })?; - Ok(proto::ReloadBuffersResponse { - transaction: Some(project_transaction), - }) - } - async fn handle_synchronize_buffers( this: Model, envelope: TypedEnvelope, @@ -3896,17 +3798,6 @@ impl Project { })? } - fn serialize_project_transaction_for_peer( - &mut self, - project_transaction: ProjectTransaction, - peer_id: proto::PeerId, - cx: &mut AppContext, - ) -> proto::ProjectTransaction { - self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.serialize_project_transaction_for_peer(project_transaction, peer_id, cx) - }) - } - fn create_buffer_for_peer( &mut self, buffer: &Model, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 043f7e95ee026..0af0d6bb1570d 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -50,8 +50,7 @@ impl HeadlessProject { store }); let buffer_store = cx.new_model(|cx| { - let mut buffer_store = - BufferStore::new(worktree_store.clone(), Some(SSH_PROJECT_ID), cx); + let mut buffer_store = BufferStore::local(worktree_store.clone(), cx); buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx); buffer_store }); diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 18eb12b445b97..eca65f1349845 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -7,6 +7,7 @@ use http_client::FakeHttpClient; use language::{ language_settings::{all_language_settings, AllLanguageSettings}, Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LanguageServerName, + LineEnding, }; use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind}; use node_runtime::NodeRuntime; @@ -18,7 +19,10 @@ use remote::SshSession; use serde_json::json; use settings::{Settings, SettingsLocation, SettingsStore}; use smol::stream::StreamExt; -use std::{path::Path, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; #[gpui::test] async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { @@ -440,6 +444,54 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext }) } +#[gpui::test] +async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, _headless, fs) = init_test(cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "a")], None, cx); + }); + + fs.save( + &PathBuf::from("/code/project1/src/lib.rs"), + &("bloop".to_string().into()), + LineEnding::Unix, + ) + .await + .unwrap(); + + cx.run_until_parked(); + cx.update(|cx| { + assert!(buffer.read(cx).has_conflict()); + }); + + project + .update(cx, |project, cx| { + project.reload_buffers([buffer.clone()].into_iter().collect(), false, cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + cx.update(|cx| { + assert!(!buffer.read(cx).has_conflict()); + }); +} + #[gpui::test] async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let (project, _headless, _fs) = init_test(cx, server_cx).await; @@ -483,6 +535,34 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut ); } +#[gpui::test(iterations = 10)] +async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, _headless, _fs) = init_test(cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + let worktree_id = worktree.read_with(cx, |tree, _| tree.id()); + + // Open a buffer on the client but cancel after a random amount of time. + let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, "src/lib.rs"), cx)); + cx.executor().simulate_random_delay().await; + drop(buffer); + + // Try opening the same buffer again as the client, and ensure we can + // still do it despite the cancellation above. + let buffer = project + .update(cx, |p, cx| p.open_buffer((worktree_id, "src/lib.rs"), cx)) + .await + .unwrap(); + + buffer.read_with(cx, |buf, _| { + assert_eq!(buf.text(), "fn one() -> usize { 1 }") + }); +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); From 96068584362a43ef4dd20055f5c8b3c9710066fd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Sep 2024 12:08:22 -0700 Subject: [PATCH 044/228] Fix unnecessarily-specific struct pattern in rust outline query (#18297) Fixes https://github.com/zed-industries/zed/issues/18294 Release Notes: - Fixed a recent regression where tuple and unit structs were omitted from the outline view in Rust (#18294). --- crates/languages/src/rust/outline.scm | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/languages/src/rust/outline.scm b/crates/languages/src/rust/outline.scm index 98892ce93cfb3..3012995e2a7f2 100644 --- a/crates/languages/src/rust/outline.scm +++ b/crates/languages/src/rust/outline.scm @@ -4,8 +4,7 @@ (struct_item (visibility_modifier)? @context "struct" @context - name: (_) @name - body: (_ "{" @open (_)* "}" @close)) @item + name: (_) @name) @item (enum_item (visibility_modifier)? @context From 87ac4cff60c05bac9d118df25b13987df6f1a5bd Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Sep 2024 15:42:26 -0400 Subject: [PATCH 045/228] Update close_stale_issues configuration (#18298) @notpeter and I decided on these things: - Give users 10 days to respond - Only run once a week: Tuesday @7AM ET Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 1f287fb5e3d4b..2d4085524bb8d 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -1,7 +1,7 @@ name: "Close Stale Issues" on: schedule: - - cron: "0 1 * * *" + - cron: "0 11 * * 2" workflow_dispatch: jobs: @@ -14,7 +14,7 @@ jobs: stale-issue-message: > Hi there! 👋 - We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed. + We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 10 days. Feel free to open a new issue if you're seeing this message after the issue has been closed. Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" @@ -23,7 +23,7 @@ jobs: # 'community' to 'zed' repository. The migration added activity to all # issues, preventing 365 days from working until then. days-before-stale: 180 - days-before-close: 7 + days-before-close: 10 any-of-issue-labels: "defect,panic / crash" operations-per-run: 1000 ascending: true From 692590bff435da3bdb7a0bd1bf9f139c3bdc6eb1 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 24 Sep 2024 15:44:55 -0400 Subject: [PATCH 046/228] collab: Fix GitHub user retrieval in seed script (#18296) This PR fixes the GitHub user retrieval in the database seed script. The users returned from the [list users](https://docs.github.com/en/rest/users/users?apiVersion=2022-11-28#list-users) endpoint don't have a `created_at` timestamp, so we need to fetch them individually. I want to rework this further at a later date, this is just a bandaid to get things working again. Release Notes: - N/A --- crates/collab/src/db/queries/users.rs | 6 ++++ crates/collab/src/seed.rs | 43 +++++++++++++++++++++++---- 2 files changed, 44 insertions(+), 5 deletions(-) diff --git a/crates/collab/src/db/queries/users.rs b/crates/collab/src/db/queries/users.rs index b755476e338b6..4443d751542b5 100644 --- a/crates/collab/src/db/queries/users.rs +++ b/crates/collab/src/db/queries/users.rs @@ -298,6 +298,12 @@ impl Database { result } + /// Returns all feature flags. + pub async fn list_feature_flags(&self) -> Result> { + self.transaction(|tx| async move { Ok(feature_flag::Entity::find().all(&*tx).await?) }) + .await + } + /// Creates a new feature flag. pub async fn create_user_flag(&self, flag: &str, enabled_for_all: bool) -> Result { self.transaction(|tx| async move { diff --git a/crates/collab/src/seed.rs b/crates/collab/src/seed.rs index 15aa9d159183f..035d58109b596 100644 --- a/crates/collab/src/seed.rs +++ b/crates/collab/src/seed.rs @@ -16,13 +16,23 @@ struct GithubUser { created_at: DateTime, } +/// A GitHub user returned from the [List users](https://docs.github.com/en/rest/users/users?apiVersion=2022-11-28#list-users) endpoint. +/// +/// Notably, this data type does not have the `created_at` field. +#[derive(Debug, Deserialize)] +struct ListGithubUser { + id: i32, + login: String, + email: Option, +} + #[derive(Deserialize)] struct SeedConfig { - // Which users to create as admins. + /// Which users to create as admins. admins: Vec, - // Which channels to create (all admins are invited to all channels) + /// Which channels to create (all admins are invited to all channels). channels: Vec, - // Number of random users to create from the Github API + /// Number of random users to create from the Github API. number_of_users: Option, } @@ -47,11 +57,21 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result let flag_names = ["remoting", "language-models"]; let mut flags = Vec::new(); + let existing_feature_flags = db.list_feature_flags().await?; + for flag_name in flag_names { + if existing_feature_flags + .iter() + .any(|flag| flag.flag == flag_name) + { + log::info!("Flag {flag_name:?} already exists"); + continue; + } + let flag = db .create_user_flag(flag_name, false) .await - .unwrap_or_else(|_| panic!("failed to create flag: '{flag_name}'")); + .unwrap_or_else(|err| panic!("failed to create flag: '{flag_name}': {err}")); flags.push(flag); } @@ -121,9 +141,19 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result if let Some(last_user_id) = last_user_id { write!(&mut uri, "&since={}", last_user_id).unwrap(); } - let users = fetch_github::>(&client, &uri).await; + let users = fetch_github::>(&client, &uri).await; for github_user in users { + log::info!("Seeding {:?} from GitHub", github_user.login); + + // Fetch the user to get their `created_at` timestamp, since it + // isn't on the list response. + let github_user: GithubUser = fetch_github( + &client, + &format!("https://api.github.com/user/{}", github_user.id), + ) + .await; + last_user_id = Some(github_user.id); user_count += 1; let user = db @@ -143,6 +173,9 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result flag, user.id ))?; } + + // Sleep to avoid getting rate-limited by GitHub. + tokio::time::sleep(std::time::Duration::from_millis(250)).await; } } } From d2ffad0f34322d9a332860e566f63aec01a83fe7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 24 Sep 2024 16:35:09 -0400 Subject: [PATCH 047/228] collab: Seed GitHub users from static data (#18301) This PR updates the collab seed script to seed the GitHub users from a set of static data. This removes the need to hit the GitHub API to retrieve these users. Release Notes: - N/A --- crates/collab/README.md | 3 +- crates/collab/seed.default.json | 3 +- crates/collab/seed/github_users.json | 602 +++++++++++++++++++++++++++ crates/collab/src/seed.rs | 89 ++-- 4 files changed, 630 insertions(+), 67 deletions(-) create mode 100644 crates/collab/seed/github_users.json diff --git a/crates/collab/README.md b/crates/collab/README.md index 345e82aefed78..5aa964ee792fe 100644 --- a/crates/collab/README.md +++ b/crates/collab/README.md @@ -23,8 +23,7 @@ To use a different set of admin users, create `crates/collab/seed.json`. ```json { "admins": ["yourgithubhere"], - "channels": ["zed"], - "number_of_users": 20 + "channels": ["zed"] } ``` diff --git a/crates/collab/seed.default.json b/crates/collab/seed.default.json index 1abec644beed9..dee924e103d62 100644 --- a/crates/collab/seed.default.json +++ b/crates/collab/seed.default.json @@ -8,6 +8,5 @@ "JosephTLyons", "rgbkrk" ], - "channels": ["zed"], - "number_of_users": 100 + "channels": ["zed"] } diff --git a/crates/collab/seed/github_users.json b/crates/collab/seed/github_users.json new file mode 100644 index 0000000000000..88acd6aa54a70 --- /dev/null +++ b/crates/collab/seed/github_users.json @@ -0,0 +1,602 @@ +[ + { + "id": 1, + "login": "mojombo", + "email": "tom@mojombo.com", + "created_at": "2007-10-20T05:24:19Z" + }, + { + "id": 2, + "login": "defunkt", + "email": null, + "created_at": "2007-10-20T05:24:19Z" + }, + { + "id": 3, + "login": "pjhyett", + "email": "pj@hyett.com", + "created_at": "2008-01-07T17:54:22Z" + }, + { + "id": 4, + "login": "wycats", + "email": "wycats@gmail.com", + "created_at": "2008-01-12T05:38:33Z" + }, + { + "id": 5, + "login": "ezmobius", + "email": null, + "created_at": "2008-01-12T07:51:46Z" + }, + { + "id": 6, + "login": "ivey", + "email": "ivey@gweezlebur.com", + "created_at": "2008-01-12T15:15:00Z" + }, + { + "id": 7, + "login": "evanphx", + "email": "evan@phx.io", + "created_at": "2008-01-12T16:46:24Z" + }, + { + "id": 17, + "login": "vanpelt", + "email": "vanpelt@wandb.com", + "created_at": "2008-01-13T05:57:18Z" + }, + { + "id": 18, + "login": "wayneeseguin", + "email": "wayneeseguin@gmail.com", + "created_at": "2008-01-13T06:02:21Z" + }, + { + "id": 19, + "login": "brynary", + "email": null, + "created_at": "2008-01-13T10:19:47Z" + }, + { + "id": 20, + "login": "kevinclark", + "email": "kevin.clark@gmail.com", + "created_at": "2008-01-13T18:33:26Z" + }, + { + "id": 21, + "login": "technoweenie", + "email": "technoweenie@hey.com", + "created_at": "2008-01-14T04:33:35Z" + }, + { + "id": 22, + "login": "macournoyer", + "email": "macournoyer@gmail.com", + "created_at": "2008-01-14T10:49:35Z" + }, + { + "id": 23, + "login": "takeo", + "email": "toby@takeo.email", + "created_at": "2008-01-14T11:25:49Z" + }, + { + "id": 25, + "login": "caged", + "email": "encytemedia@gmail.com", + "created_at": "2008-01-15T04:47:24Z" + }, + { + "id": 26, + "login": "topfunky", + "email": null, + "created_at": "2008-01-15T05:40:05Z" + }, + { + "id": 27, + "login": "anotherjesse", + "email": "anotherjesse@gmail.com", + "created_at": "2008-01-15T07:49:30Z" + }, + { + "id": 28, + "login": "roland", + "email": null, + "created_at": "2008-01-15T08:12:51Z" + }, + { + "id": 29, + "login": "lukas", + "email": "lukas@wandb.com", + "created_at": "2008-01-15T12:50:02Z" + }, + { + "id": 30, + "login": "fanvsfan", + "email": null, + "created_at": "2008-01-15T14:15:23Z" + }, + { + "id": 31, + "login": "tomtt", + "email": null, + "created_at": "2008-01-15T15:44:31Z" + }, + { + "id": 32, + "login": "railsjitsu", + "email": null, + "created_at": "2008-01-16T04:57:23Z" + }, + { + "id": 34, + "login": "nitay", + "email": null, + "created_at": "2008-01-18T14:09:11Z" + }, + { + "id": 35, + "login": "kevwil", + "email": null, + "created_at": "2008-01-19T05:50:12Z" + }, + { + "id": 36, + "login": "KirinDave", + "email": null, + "created_at": "2008-01-19T08:01:02Z" + }, + { + "id": 37, + "login": "jamesgolick", + "email": "jamesgolick@gmail.com", + "created_at": "2008-01-19T22:52:30Z" + }, + { + "id": 38, + "login": "atmos", + "email": "atmos@atmos.org", + "created_at": "2008-01-22T09:14:11Z" + }, + { + "id": 44, + "login": "errfree", + "email": null, + "created_at": "2008-01-24T02:08:37Z" + }, + { + "id": 45, + "login": "mojodna", + "email": null, + "created_at": "2008-01-24T04:40:22Z" + }, + { + "id": 46, + "login": "bmizerany", + "email": "blake.mizerany@gmail.com", + "created_at": "2008-01-24T04:44:30Z" + }, + { + "id": 47, + "login": "jnewland", + "email": "jesse@jnewland.com", + "created_at": "2008-01-25T02:28:12Z" + }, + { + "id": 48, + "login": "joshknowles", + "email": "joshknowles@gmail.com", + "created_at": "2008-01-25T21:30:42Z" + }, + { + "id": 49, + "login": "hornbeck", + "email": "hornbeck@gmail.com", + "created_at": "2008-01-25T21:49:23Z" + }, + { + "id": 50, + "login": "jwhitmire", + "email": "jeff@jwhitmire.com", + "created_at": "2008-01-25T22:07:48Z" + }, + { + "id": 51, + "login": "elbowdonkey", + "email": null, + "created_at": "2008-01-25T22:08:20Z" + }, + { + "id": 52, + "login": "reinh", + "email": null, + "created_at": "2008-01-25T22:16:29Z" + }, + { + "id": 53, + "login": "knzai", + "email": "git@knz.ai", + "created_at": "2008-01-25T22:33:10Z" + }, + { + "id": 68, + "login": "bs", + "email": "yap@bri.tt", + "created_at": "2008-01-27T01:46:29Z" + }, + { + "id": 69, + "login": "rsanheim", + "email": null, + "created_at": "2008-01-27T07:09:47Z" + }, + { + "id": 70, + "login": "schacon", + "email": "schacon@gmail.com", + "created_at": "2008-01-27T17:19:28Z" + }, + { + "id": 71, + "login": "uggedal", + "email": null, + "created_at": "2008-01-27T22:18:57Z" + }, + { + "id": 72, + "login": "bruce", + "email": "brwcodes@gmail.com", + "created_at": "2008-01-28T07:16:45Z" + }, + { + "id": 73, + "login": "sam", + "email": "ssmoot@gmail.com", + "created_at": "2008-01-28T19:01:26Z" + }, + { + "id": 74, + "login": "mmower", + "email": "self@mattmower.com", + "created_at": "2008-01-28T19:47:50Z" + }, + { + "id": 75, + "login": "abhay", + "email": null, + "created_at": "2008-01-28T21:08:23Z" + }, + { + "id": 76, + "login": "rabble", + "email": "evan@protest.net", + "created_at": "2008-01-28T23:27:02Z" + }, + { + "id": 77, + "login": "benburkert", + "email": "ben@benburkert.com", + "created_at": "2008-01-28T23:44:14Z" + }, + { + "id": 78, + "login": "indirect", + "email": "andre@arko.net", + "created_at": "2008-01-29T07:59:27Z" + }, + { + "id": 79, + "login": "fearoffish", + "email": "me@fearof.fish", + "created_at": "2008-01-29T08:43:10Z" + }, + { + "id": 80, + "login": "ry", + "email": "ry@tinyclouds.org", + "created_at": "2008-01-29T08:50:34Z" + }, + { + "id": 81, + "login": "engineyard", + "email": null, + "created_at": "2008-01-29T09:51:30Z" + }, + { + "id": 82, + "login": "jsierles", + "email": null, + "created_at": "2008-01-29T11:10:25Z" + }, + { + "id": 83, + "login": "tweibley", + "email": null, + "created_at": "2008-01-29T13:52:07Z" + }, + { + "id": 84, + "login": "peimei", + "email": "james@railsjitsu.com", + "created_at": "2008-01-29T15:44:11Z" + }, + { + "id": 85, + "login": "brixen", + "email": "brixen@gmail.com", + "created_at": "2008-01-29T16:47:55Z" + }, + { + "id": 87, + "login": "tmornini", + "email": null, + "created_at": "2008-01-29T18:43:39Z" + }, + { + "id": 88, + "login": "outerim", + "email": "lee@outerim.com", + "created_at": "2008-01-29T18:48:32Z" + }, + { + "id": 89, + "login": "daksis", + "email": null, + "created_at": "2008-01-29T19:18:16Z" + }, + { + "id": 90, + "login": "sr", + "email": "me@simonrozet.com", + "created_at": "2008-01-29T20:37:53Z" + }, + { + "id": 91, + "login": "lifo", + "email": null, + "created_at": "2008-01-29T23:09:30Z" + }, + { + "id": 92, + "login": "rsl", + "email": "sconds@gmail.com", + "created_at": "2008-01-29T23:13:36Z" + }, + { + "id": 93, + "login": "imownbey", + "email": null, + "created_at": "2008-01-29T23:13:44Z" + }, + { + "id": 94, + "login": "dylanegan", + "email": null, + "created_at": "2008-01-29T23:15:18Z" + }, + { + "id": 95, + "login": "jm", + "email": "jeremymcanally@gmail.com", + "created_at": "2008-01-29T23:15:32Z" + }, + { + "id": 100, + "login": "kmarsh", + "email": "kevin.marsh@gmail.com", + "created_at": "2008-01-29T23:48:24Z" + }, + { + "id": 101, + "login": "jvantuyl", + "email": "jayson@aggressive.ly", + "created_at": "2008-01-30T01:11:50Z" + }, + { + "id": 102, + "login": "BrianTheCoder", + "email": "wbsmith83@gmail.com", + "created_at": "2008-01-30T02:22:32Z" + }, + { + "id": 103, + "login": "freeformz", + "email": "freeformz@gmail.com", + "created_at": "2008-01-30T06:19:57Z" + }, + { + "id": 104, + "login": "hassox", + "email": "dneighman@gmail.com", + "created_at": "2008-01-30T06:31:06Z" + }, + { + "id": 105, + "login": "automatthew", + "email": "automatthew@gmail.com", + "created_at": "2008-01-30T19:00:58Z" + }, + { + "id": 106, + "login": "queso", + "email": "Joshua.owens@gmail.com", + "created_at": "2008-01-30T19:48:45Z" + }, + { + "id": 107, + "login": "lancecarlson", + "email": null, + "created_at": "2008-01-30T19:53:29Z" + }, + { + "id": 108, + "login": "drnic", + "email": "drnicwilliams@gmail.com", + "created_at": "2008-01-30T23:19:18Z" + }, + { + "id": 109, + "login": "lukesutton", + "email": null, + "created_at": "2008-01-31T04:01:02Z" + }, + { + "id": 110, + "login": "danwrong", + "email": null, + "created_at": "2008-01-31T08:51:31Z" + }, + { + "id": 111, + "login": "HamptonMakes", + "email": "hampton@hamptoncatlin.com", + "created_at": "2008-01-31T17:03:51Z" + }, + { + "id": 112, + "login": "jfrost", + "email": null, + "created_at": "2008-01-31T22:14:27Z" + }, + { + "id": 113, + "login": "mattetti", + "email": null, + "created_at": "2008-01-31T22:56:31Z" + }, + { + "id": 114, + "login": "ctennis", + "email": "c@leb.tennis", + "created_at": "2008-01-31T23:43:14Z" + }, + { + "id": 115, + "login": "lawrencepit", + "email": "lawrence.pit@gmail.com", + "created_at": "2008-01-31T23:57:16Z" + }, + { + "id": 116, + "login": "marcjeanson", + "email": "github@marcjeanson.com", + "created_at": "2008-02-01T01:27:19Z" + }, + { + "id": 117, + "login": "grempe", + "email": null, + "created_at": "2008-02-01T04:12:42Z" + }, + { + "id": 118, + "login": "peterc", + "email": "git@peterc.org", + "created_at": "2008-02-02T01:00:36Z" + }, + { + "id": 119, + "login": "ministrycentered", + "email": null, + "created_at": "2008-02-02T03:50:26Z" + }, + { + "id": 120, + "login": "afarnham", + "email": null, + "created_at": "2008-02-02T05:11:03Z" + }, + { + "id": 121, + "login": "up_the_irons", + "email": null, + "created_at": "2008-02-02T10:59:51Z" + }, + { + "id": 122, + "login": "cristibalan", + "email": "cristibalan@gmail.com", + "created_at": "2008-02-02T11:29:45Z" + }, + { + "id": 123, + "login": "heavysixer", + "email": null, + "created_at": "2008-02-02T15:06:53Z" + }, + { + "id": 124, + "login": "brosner", + "email": "brosner@gmail.com", + "created_at": "2008-02-02T19:03:54Z" + }, + { + "id": 125, + "login": "danielmorrison", + "email": "daniel@collectiveidea.com", + "created_at": "2008-02-02T19:46:35Z" + }, + { + "id": 126, + "login": "danielharan", + "email": "chebuctonian@gmail.com", + "created_at": "2008-02-02T21:42:21Z" + }, + { + "id": 127, + "login": "kvnsmth", + "email": null, + "created_at": "2008-02-02T22:00:03Z" + }, + { + "id": 128, + "login": "collectiveidea", + "email": "info@collectiveidea.com", + "created_at": "2008-02-02T22:34:46Z" + }, + { + "id": 129, + "login": "canadaduane", + "email": "duane.johnson@gmail.com", + "created_at": "2008-02-02T23:25:39Z" + }, + { + "id": 130, + "login": "corasaurus-hex", + "email": "cora@sutton.me", + "created_at": "2008-02-03T04:20:22Z" + }, + { + "id": 131, + "login": "dstrelau", + "email": null, + "created_at": "2008-02-03T14:59:12Z" + }, + { + "id": 132, + "login": "sunny", + "email": "sunny@sunfox.org", + "created_at": "2008-02-03T15:43:43Z" + }, + { + "id": 133, + "login": "dkubb", + "email": "github@dan.kubb.ca", + "created_at": "2008-02-03T20:40:13Z" + }, + { + "id": 134, + "login": "jnicklas", + "email": "jonas@jnicklas.com", + "created_at": "2008-02-03T20:43:50Z" + }, + { + "id": 135, + "login": "richcollins", + "email": "richcollins@gmail.com", + "created_at": "2008-02-03T21:11:25Z" + } +] diff --git a/crates/collab/src/seed.rs b/crates/collab/src/seed.rs index 035d58109b596..5de6515ae3ac8 100644 --- a/crates/collab/src/seed.rs +++ b/crates/collab/src/seed.rs @@ -4,10 +4,13 @@ use anyhow::Context; use chrono::{DateTime, Utc}; use db::Database; use serde::{de::DeserializeOwned, Deserialize}; -use std::{fmt::Write, fs, path::Path}; +use std::{fs, path::Path}; use crate::Config; +/// A GitHub user. +/// +/// This representation corresponds to the entries in the `seed/github_users.json` file. #[derive(Debug, Deserialize)] struct GithubUser { id: i32, @@ -16,24 +19,12 @@ struct GithubUser { created_at: DateTime, } -/// A GitHub user returned from the [List users](https://docs.github.com/en/rest/users/users?apiVersion=2022-11-28#list-users) endpoint. -/// -/// Notably, this data type does not have the `created_at` field. -#[derive(Debug, Deserialize)] -struct ListGithubUser { - id: i32, - login: String, - email: Option, -} - #[derive(Deserialize)] struct SeedConfig { /// Which users to create as admins. admins: Vec, /// Which channels to create (all admins are invited to all channels). channels: Vec, - /// Number of random users to create from the Github API. - number_of_users: Option, } pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result<()> { @@ -126,57 +117,29 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result } } - // TODO: Fix this later - if let Some(number_of_users) = seed_config.number_of_users { - // Fetch 100 other random users from GitHub and insert them into the database - // (for testing autocompleters, etc.) - let mut user_count = db - .get_all_users(0, 200) + let github_users_filepath = seed_path.parent().unwrap().join("seed/github_users.json"); + let github_users: Vec = + serde_json::from_str(&fs::read_to_string(github_users_filepath)?)?; + + for github_user in github_users { + log::info!("Seeding {:?} from GitHub", github_user.login); + + let user = db + .get_or_create_user_by_github_account( + &github_user.login, + github_user.id, + github_user.email.as_deref(), + github_user.created_at, + None, + ) .await - .expect("failed to load users from db") - .len(); - let mut last_user_id = None; - while user_count < number_of_users { - let mut uri = "https://api.github.com/users?per_page=100".to_string(); - if let Some(last_user_id) = last_user_id { - write!(&mut uri, "&since={}", last_user_id).unwrap(); - } - let users = fetch_github::>(&client, &uri).await; - - for github_user in users { - log::info!("Seeding {:?} from GitHub", github_user.login); - - // Fetch the user to get their `created_at` timestamp, since it - // isn't on the list response. - let github_user: GithubUser = fetch_github( - &client, - &format!("https://api.github.com/user/{}", github_user.id), - ) - .await; - - last_user_id = Some(github_user.id); - user_count += 1; - let user = db - .get_or_create_user_by_github_account( - &github_user.login, - github_user.id, - github_user.email.as_deref(), - github_user.created_at, - None, - ) - .await - .expect("failed to insert user"); - - for flag in &flags { - db.add_user_flag(user.id, *flag).await.context(format!( - "Unable to enable flag '{}' for user '{}'", - flag, user.id - ))?; - } - - // Sleep to avoid getting rate-limited by GitHub. - tokio::time::sleep(std::time::Duration::from_millis(250)).await; - } + .expect("failed to insert user"); + + for flag in &flags { + db.add_user_flag(user.id, *flag).await.context(format!( + "Unable to enable flag '{}' for user '{}'", + flag, user.id + ))?; } } From 2d71c36ad3ec7b4b000c6144089c7a2294d0a19c Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 25 Sep 2024 00:29:56 +0300 Subject: [PATCH 048/228] Allow clearning activity indicators on click (#18305) All indicators without the click action are now could be hidden with a click. Sometimes, I see a few language server updates statuses get stuck due to npm desperately attempting to access its registry (3 times per each package, with the timeout a bit under 1 minute per each request). So, while the message seems stuck, npm desperately tries to do some work in the background. https://docs.npmjs.com/cli/v10/using-npm/config has options for timeouts & retries for __package fetching__ but that does not include the actual __registry access attempts__. It's unclear how to proceed with npm on this case now, but at least we should allow hiding these redundant messages. Release Notes: - Improved activity indicators' UX by allowing more of them to be hidden on click --- .../src/activity_indicator.rs | 72 +++++++++++++++---- 1 file changed, 59 insertions(+), 13 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index fee0ef73f7bee..52e6acc393d29 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -227,10 +227,10 @@ impl ActivityIndicator { for status in &self.statuses { match status.status { LanguageServerBinaryStatus::CheckingForUpdate => { - checking_for_update.push(status.name.0.as_ref()) + checking_for_update.push(status.name.clone()) } - LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()), - LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()), + LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()), + LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()), LanguageServerBinaryStatus::None => {} } } @@ -242,8 +242,24 @@ impl ActivityIndicator { .size(IconSize::Small) .into_any_element(), ), - message: format!("Downloading {}...", downloading.join(", "),), - on_click: None, + message: format!( + "Downloading {}...", + downloading.iter().map(|name| name.0.as_ref()).fold( + String::new(), + |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + } + ) + ), + on_click: Some(Arc::new(move |this, cx| { + this.statuses + .retain(|status| !downloading.contains(&status.name)); + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } @@ -256,9 +272,22 @@ impl ActivityIndicator { ), message: format!( "Checking for updates to {}...", - checking_for_update.join(", "), + checking_for_update.iter().map(|name| name.0.as_ref()).fold( + String::new(), + |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + } + ), ), - on_click: None, + on_click: Some(Arc::new(move |this, cx| { + this.statuses + .retain(|status| !checking_for_update.contains(&status.name)); + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } @@ -271,7 +300,16 @@ impl ActivityIndicator { ), message: format!( "Failed to download {}. Click to show error.", - failed.join(", "), + failed + .iter() + .map(|name| name.0.as_ref()) + .fold(String::new(), |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + }), ), on_click: Some(Arc::new(|this, cx| { this.show_error_message(&Default::default(), cx) @@ -304,7 +342,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Checking for Zed updates…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Downloading => Some(Content { icon: Some( @@ -313,7 +353,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Downloading Zed update…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Installing => Some(Content { icon: Some( @@ -322,7 +364,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Installing Zed update…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Updated { binary_path } => Some(Content { icon: None, @@ -342,7 +386,7 @@ impl ActivityIndicator { ), message: "Auto update failed".to_string(), on_click: Some(Arc::new(|this, cx| { - this.dismiss_error_message(&Default::default(), cx) + this.dismiss_error_message(&DismissErrorMessage, cx) })), }), AutoUpdateStatus::Idle => None, @@ -360,7 +404,9 @@ impl ActivityIndicator { .into_any_element(), ), message: format!("Updating {extension_id} extension…"), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } } From 5045f984a90dd26ba0c0e2fc8ce6cbab70ba5b75 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 15:37:09 -0600 Subject: [PATCH 049/228] Tidy up some broken menu items (#18306) Release Notes: - ssh-remoting: Don't show "reveal in finder" in menu --- crates/outline_panel/src/outline_panel.rs | 6 ++++-- crates/project_panel/src/project_panel.rs | 9 ++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 73570dd5afc77..da66ca40313d8 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -3906,9 +3906,11 @@ impl Render for OutlinePanel { .on_action(cx.listener(Self::toggle_active_editor_pin)) .on_action(cx.listener(Self::unfold_directory)) .on_action(cx.listener(Self::fold_directory)) - .when(project.is_local_or_ssh(), |el| { + .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) - .on_action(cx.listener(Self::open_in_terminal)) + }) + .when(project.is_local_or_ssh(), |el| { + el.on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( MouseButton::Right, diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index cd4196dbc67b1..8e741134f0e44 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -484,6 +484,7 @@ impl ProjectPanel { let worktree_id = worktree.id(); let is_read_only = project.is_read_only(); let is_remote = project.is_via_collab() && project.dev_server_project_id().is_none(); + let is_local = project.is_local(); let context_menu = ContextMenu::build(cx, |menu, cx| { menu.context(self.focus_handle.clone()).map(|menu| { @@ -495,13 +496,15 @@ impl ProjectPanel { menu.action("New File", Box::new(NewFile)) .action("New Folder", Box::new(NewDirectory)) .separator() - .when(cfg!(target_os = "macos"), |menu| { + .when(is_local && cfg!(target_os = "macos"), |menu| { menu.action("Reveal in Finder", Box::new(RevealInFileManager)) }) - .when(cfg!(not(target_os = "macos")), |menu| { + .when(is_local && cfg!(not(target_os = "macos")), |menu| { menu.action("Reveal in File Manager", Box::new(RevealInFileManager)) }) - .action("Open in Default App", Box::new(OpenWithSystem)) + .when(is_local, |menu| { + menu.action("Open in Default App", Box::new(OpenWithSystem)) + }) .action("Open in Terminal", Box::new(OpenInTerminal)) .when(is_dir, |menu| { menu.separator() From da1ef13442e095414e23db86623a5b5acd117cd3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Sep 2024 14:39:44 -0700 Subject: [PATCH 050/228] Fix detection that a diff hunk is expanded (#18302) Release Notes: - N/A --------- Co-authored-by: Marshall --- crates/collab/src/tests/editor_tests.rs | 5 +---- crates/editor/src/element.rs | 6 +++++- crates/editor/src/hunk_diff.rs | 8 ++++++-- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 7fb1a49f870d9..121c93656305a 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -2214,10 +2214,7 @@ struct Row10;"#}; let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(5)] - ); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); assert_eq!( all_hunks, vec![( diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3be71aeefba94..31e4efb83b60e 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1335,7 +1335,11 @@ impl EditorElement { } match status { DiffHunkStatus::Added => {} - DiffHunkStatus::Modified => {} + DiffHunkStatus::Modified => { + if is_expanded { + *status = DiffHunkStatus::Added; + } + } DiffHunkStatus::Removed => { if is_expanded { return None; diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 90836cee51683..2f7bb49e85349 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -279,8 +279,12 @@ impl Editor { ..Point::new(remaining_hunk.row_range.end.0, 0); hunks_to_expand.push(HoveredHunk { status: hunk_status(&remaining_hunk), - multi_buffer_range: remaining_hunk_point_range - .to_anchors(&snapshot.buffer_snapshot), + multi_buffer_range: snapshot + .buffer_snapshot + .anchor_before(remaining_hunk_point_range.start) + ..snapshot + .buffer_snapshot + .anchor_after(remaining_hunk_point_range.end), diff_base_byte_range: remaining_hunk.diff_base_byte_range.clone(), }); } From c4e0f5e0ee83e02567a5512b0f5fafef49225e66 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 15:52:30 -0600 Subject: [PATCH 051/228] Rebuild buffer store to be aware of remote/local distinction (#18303) Release Notes: - N/A --------- Co-authored-by: Mikayla --- .../remote_editing_collaboration_tests.rs | 27 +- crates/project/src/buffer_store.rs | 1981 ++++++++++------- crates/project/src/lsp_command.rs | 23 +- crates/project/src/lsp_store.rs | 33 +- crates/project/src/project.rs | 18 +- .../remote_server/src/remote_editing_tests.rs | 1 + 6 files changed, 1192 insertions(+), 891 deletions(-) diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index a81166bb00cee..bad5ef9053ce7 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -3,6 +3,7 @@ use call::ActiveCall; use fs::{FakeFs, Fs as _}; use gpui::{Context as _, TestAppContext}; use language::language_settings::all_language_settings; +use project::ProjectPath; use remote::SshSession; use remote_server::HeadlessProject; use serde_json::json; @@ -108,14 +109,36 @@ async fn test_sharing_an_ssh_remote_project( }); project_b - .update(cx_b, |project, cx| project.save_buffer(buffer_b, cx)) + .update(cx_b, |project, cx| { + project.save_buffer_as( + buffer_b.clone(), + ProjectPath { + worktree_id: worktree_id.to_owned(), + path: Arc::from(Path::new("src/renamed.rs")), + }, + cx, + ) + }) .await .unwrap(); assert_eq!( remote_fs - .load("/code/project1/src/lib.rs".as_ref()) + .load("/code/project1/src/renamed.rs".as_ref()) .await .unwrap(), "fn one() -> usize { 100 }" ); + cx_b.run_until_parked(); + cx_b.update(|cx| { + assert_eq!( + buffer_b + .read(cx) + .file() + .unwrap() + .path() + .to_string_lossy() + .to_string(), + "src/renamed.rs".to_string() + ); + }); } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b69679d6ac3b3..aa86a8f7e256e 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -10,7 +10,8 @@ use fs::Fs; use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt}; use git::blame::Blame; use gpui::{ - AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel, + AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Subscription, + Task, WeakModel, }; use http_client::Url; use language::{ @@ -25,27 +26,72 @@ use smol::channel::Receiver; use std::{io, path::Path, str::FromStr as _, sync::Arc, time::Instant}; use text::BufferId; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; -use worktree::{ - File, PathChange, ProjectEntryId, RemoteWorktree, UpdatedGitRepositoriesSet, Worktree, - WorktreeId, -}; +use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId}; -/// A set of open buffers. -pub struct BufferStore { - state: BufferStoreState, - downstream_client: Option<(AnyProtoClient, u64)>, +trait BufferStoreImpl { + fn open_buffer( + &self, + path: Arc, + worktree: Model, + cx: &mut ModelContext, + ) -> Task>>; + + fn save_buffer( + &self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task>; + + fn save_buffer_as( + &self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task>; + + fn create_buffer(&self, cx: &mut ModelContext) -> Task>>; + + fn reload_buffers( + &self, + buffers: Vec>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task>; + + fn as_remote(&self) -> Option>; + fn as_local(&self) -> Option>; +} + +struct RemoteBufferStore { + shared_with_me: HashSet>, + upstream_client: AnyProtoClient, + project_id: u64, + loading_remote_buffers_by_id: HashMap>, + remote_buffer_listeners: + HashMap, anyhow::Error>>>>, worktree_store: Model, - opened_buffers: HashMap, + buffer_store: WeakModel, +} + +struct LocalBufferStore { local_buffer_ids_by_path: HashMap, local_buffer_ids_by_entry_id: HashMap, + buffer_store: WeakModel, + worktree_store: Model, + _subscription: Subscription, +} + +/// A set of open buffers. +pub struct BufferStore { + state: Box, #[allow(clippy::type_complexity)] loading_buffers_by_path: HashMap< ProjectPath, postage::watch::Receiver, Arc>>>, >, - loading_remote_buffers_by_id: HashMap>, - remote_buffer_listeners: - HashMap, anyhow::Error>>>>, + worktree_store: Model, + opened_buffers: HashMap, + downstream_client: Option<(AnyProtoClient, u64)>, shared_buffers: HashMap>>, } @@ -63,19 +109,858 @@ pub enum BufferStoreEvent { }, } -enum BufferStoreState { - Remote { - shared_with_me: HashSet>, - upstream_client: AnyProtoClient, - project_id: u64, - }, - Local {}, +#[derive(Default, Debug)] +pub struct ProjectTransaction(pub HashMap, language::Transaction>); + +impl EventEmitter for BufferStore {} + +impl RemoteBufferStore { + pub fn wait_for_remote_buffer( + &mut self, + id: BufferId, + cx: &mut AppContext, + ) -> Task>> { + let buffer_store = self.buffer_store.clone(); + let (tx, rx) = oneshot::channel(); + self.remote_buffer_listeners.entry(id).or_default().push(tx); + + cx.spawn(|cx| async move { + if let Some(buffer) = buffer_store + .read_with(&cx, |buffer_store, _| buffer_store.get(id)) + .ok() + .flatten() + { + return Ok(buffer); + } + + cx.background_executor() + .spawn(async move { rx.await? }) + .await + }) + } + + fn save_remote_buffer( + &self, + buffer_handle: Model, + new_path: Option, + cx: &ModelContext, + ) -> Task> { + let buffer = buffer_handle.read(cx); + let buffer_id = buffer.remote_id().into(); + let version = buffer.version(); + let rpc = self.upstream_client.clone(); + let project_id = self.project_id; + cx.spawn(move |_, mut cx| async move { + let response = rpc + .request(proto::SaveBuffer { + project_id, + buffer_id, + new_path, + version: serialize_version(&version), + }) + .await?; + let version = deserialize_version(&response.version); + let mtime = response.mtime.map(|mtime| mtime.into()); + + buffer_handle.update(&mut cx, |buffer, cx| { + buffer.did_save(version.clone(), mtime, cx); + })?; + + Ok(()) + }) + } + + pub fn handle_create_buffer_for_peer( + &mut self, + envelope: TypedEnvelope, + replica_id: u16, + capability: Capability, + cx: &mut ModelContext, + ) -> Result>> { + match envelope + .payload + .variant + .ok_or_else(|| anyhow!("missing variant"))? + { + proto::create_buffer_for_peer::Variant::State(mut state) => { + let buffer_id = BufferId::new(state.id)?; + + let buffer_result = maybe!({ + let mut buffer_file = None; + if let Some(file) = state.file.take() { + let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id); + let worktree = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .ok_or_else(|| { + anyhow!("no worktree found for id {}", file.worktree_id) + })?; + buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?) + as Arc); + } + Buffer::from_proto(replica_id, capability, state, buffer_file) + }); + + match buffer_result { + Ok(buffer) => { + let buffer = cx.new_model(|_| buffer); + self.loading_remote_buffers_by_id.insert(buffer_id, buffer); + } + Err(error) => { + if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { + for listener in listeners { + listener.send(Err(anyhow!(error.cloned()))).ok(); + } + } + } + } + } + proto::create_buffer_for_peer::Variant::Chunk(chunk) => { + let buffer_id = BufferId::new(chunk.buffer_id)?; + let buffer = self + .loading_remote_buffers_by_id + .get(&buffer_id) + .cloned() + .ok_or_else(|| { + anyhow!( + "received chunk for buffer {} without initial state", + chunk.buffer_id + ) + })?; + + let result = maybe!({ + let operations = chunk + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::>>()?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); + anyhow::Ok(()) + }); + + if let Err(error) = result { + self.loading_remote_buffers_by_id.remove(&buffer_id); + if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { + for listener in listeners { + listener.send(Err(error.cloned())).ok(); + } + } + } else if chunk.is_last { + self.loading_remote_buffers_by_id.remove(&buffer_id); + if self.upstream_client.is_via_collab() { + // retain buffers sent by peers to avoid races. + self.shared_with_me.insert(buffer.clone()); + } + + if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) { + for sender in senders { + sender.send(Ok(buffer.clone())).ok(); + } + } + return Ok(Some(buffer)); + } + } + } + return Ok(None); + } + + pub fn incomplete_buffer_ids(&self) -> Vec { + self.loading_remote_buffers_by_id + .keys() + .copied() + .collect::>() + } + + pub fn deserialize_project_transaction( + &self, + message: proto::ProjectTransaction, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(|this, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions) + { + let buffer_id = BufferId::new(buffer_id)?; + let buffer = this + .update(&mut cx, |this, cx| { + this.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; + let transaction = language::proto::deserialize_transaction(transaction)?; + project_transaction.0.insert(buffer, transaction); + } + + for (buffer, transaction) in &project_transaction.0 { + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_edits(transaction.edit_ids.iter().copied()) + })? + .await?; + + if push_to_history { + buffer.update(&mut cx, |buffer, _| { + buffer.push_transaction(transaction.clone(), Instant::now()); + })?; + } + } + + Ok(project_transaction) + }) + } } -#[derive(Default, Debug)] -pub struct ProjectTransaction(pub HashMap, language::Transaction>); +impl BufferStoreImpl for Model { + fn as_remote(&self) -> Option> { + Some(self.clone()) + } + + fn as_local(&self) -> Option> { + None + } + + fn save_buffer( + &self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + this.save_remote_buffer(buffer.clone(), None, cx) + }) + } + fn save_buffer_as( + &self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + this.save_remote_buffer(buffer, Some(path.to_proto()), cx) + }) + } + + fn open_buffer( + &self, + path: Arc, + worktree: Model, + cx: &mut ModelContext, + ) -> Task>> { + self.update(cx, |this, cx| { + let worktree_id = worktree.read(cx).id().to_proto(); + let project_id = this.project_id; + let client = this.upstream_client.clone(); + let path_string = path.clone().to_string_lossy().to_string(); + cx.spawn(move |this, mut cx| async move { + let response = client + .request(proto::OpenBufferByPath { + project_id, + worktree_id, + path: path_string, + }) + .await?; + let buffer_id = BufferId::new(response.buffer_id)?; + + let buffer = this + .update(&mut cx, { + |this, cx| this.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; + + Ok(buffer) + }) + }) + } + + fn create_buffer(&self, cx: &mut ModelContext) -> Task>> { + self.update(cx, |this, cx| { + let create = this.upstream_client.request(proto::OpenNewBuffer { + project_id: this.project_id, + }); + cx.spawn(|this, mut cx| async move { + let response = create.await?; + let buffer_id = BufferId::new(response.buffer_id)?; + + this.update(&mut cx, |this, cx| { + this.wait_for_remote_buffer(buffer_id, cx) + })? + .await + }) + }) + } + + fn reload_buffers( + &self, + buffers: Vec>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + let request = this.upstream_client.request(proto::ReloadBuffers { + project_id: this.project_id, + buffer_ids: buffers + .iter() + .map(|buffer| buffer.read(cx).remote_id().to_proto()) + .collect(), + }); + + cx.spawn(|this, mut cx| async move { + let response = request + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + this.update(&mut cx, |this, cx| { + this.deserialize_project_transaction(response, push_to_history, cx) + })? + .await + }) + }) + } +} + +impl LocalBufferStore { + fn save_local_buffer( + &self, + buffer_handle: Model, + worktree: Model, + path: Arc, + mut has_changed_file: bool, + cx: &mut ModelContext, + ) -> Task> { + let buffer = buffer_handle.read(cx); + + let text = buffer.as_rope().clone(); + let line_ending = buffer.line_ending(); + let version = buffer.version(); + let buffer_id = buffer.remote_id(); + if buffer.file().is_some_and(|file| !file.is_created()) { + has_changed_file = true; + } + + let save = worktree.update(cx, |worktree, cx| { + worktree.write_file(path.as_ref(), text, line_ending, cx) + }); + + cx.spawn(move |this, mut cx| async move { + let new_file = save.await?; + let mtime = new_file.mtime; + this.update(&mut cx, |this, cx| { + if let Some((downstream_client, project_id)) = this.downstream_client(cx) { + if has_changed_file { + downstream_client + .send(proto::UpdateBufferFile { + project_id, + buffer_id: buffer_id.to_proto(), + file: Some(language::File::to_proto(&*new_file, cx)), + }) + .log_err(); + } + downstream_client + .send(proto::BufferSaved { + project_id, + buffer_id: buffer_id.to_proto(), + version: serialize_version(&version), + mtime: mtime.map(|time| time.into()), + }) + .log_err(); + } + })?; + buffer_handle.update(&mut cx, |buffer, cx| { + if has_changed_file { + buffer.file_updated(new_file, cx); + } + buffer.did_save(version.clone(), mtime, cx); + }) + }) + } + + fn subscribe_to_worktree(&mut self, worktree: &Model, cx: &mut ModelContext) { + cx.subscribe(worktree, |this, worktree, event, cx| { + if worktree.read(cx).is_local() { + match event { + worktree::Event::UpdatedEntries(changes) => { + this.local_worktree_entries_changed(&worktree, changes, cx); + } + worktree::Event::UpdatedGitRepositories(updated_repos) => { + this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx) + } + _ => {} + } + } + }) + .detach(); + } + + fn local_worktree_entries_changed( + &mut self, + worktree_handle: &Model, + changes: &[(Arc, ProjectEntryId, PathChange)], + cx: &mut ModelContext, + ) { + let snapshot = worktree_handle.read(cx).snapshot(); + for (path, entry_id, _) in changes { + self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx); + } + } + + fn local_worktree_git_repos_changed( + &mut self, + worktree_handle: Model, + changed_repos: &UpdatedGitRepositoriesSet, + cx: &mut ModelContext, + ) { + debug_assert!(worktree_handle.read(cx).is_local()); + let Some(buffer_store) = self.buffer_store.upgrade() else { + return; + }; + + // Identify the loading buffers whose containing repository that has changed. + let (future_buffers, current_buffers) = buffer_store.update(cx, |buffer_store, cx| { + let future_buffers = buffer_store + .loading_buffers() + .filter_map(|(project_path, receiver)| { + if project_path.worktree_id != worktree_handle.read(cx).id() { + return None; + } + let path = &project_path.path; + changed_repos + .iter() + .find(|(work_dir, _)| path.starts_with(work_dir))?; + let path = path.clone(); + Some(async move { + BufferStore::wait_for_loading_buffer(receiver) + .await + .ok() + .map(|buffer| (buffer, path)) + }) + }) + .collect::>(); + + // Identify the current buffers whose containing repository has changed. + let current_buffers = buffer_store + .buffers() + .filter_map(|buffer| { + let file = File::from_dyn(buffer.read(cx).file())?; + if file.worktree != worktree_handle { + return None; + } + changed_repos + .iter() + .find(|(work_dir, _)| file.path.starts_with(work_dir))?; + Some((buffer, file.path.clone())) + }) + .collect::>(); + (future_buffers, current_buffers) + }); + + if future_buffers.len() + current_buffers.len() == 0 { + return; + } + + cx.spawn(move |this, mut cx| async move { + // Wait for all of the buffers to load. + let future_buffers = future_buffers.collect::>().await; + + // Reload the diff base for every buffer whose containing git repository has changed. + let snapshot = + worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; + let diff_bases_by_buffer = cx + .background_executor() + .spawn(async move { + let mut diff_base_tasks = future_buffers + .into_iter() + .flatten() + .chain(current_buffers) + .filter_map(|(buffer, path)| { + let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; + let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; + Some(async move { + let base_text = + local_repo_entry.repo().load_index_text(&relative_path); + Some((buffer, base_text)) + }) + }) + .collect::>(); + + let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); + while let Some(diff_base) = diff_base_tasks.next().await { + if let Some(diff_base) = diff_base { + diff_bases.push(diff_base); + } + } + diff_bases + }) + .await; + + this.update(&mut cx, |this, cx| { + // Assign the new diff bases on all of the buffers. + for (buffer, diff_base) in diff_bases_by_buffer { + let buffer_id = buffer.update(cx, |buffer, cx| { + buffer.set_diff_base(diff_base.clone(), cx); + buffer.remote_id().to_proto() + }); + if let Some((client, project_id)) = &this.downstream_client(cx) { + client + .send(proto::UpdateDiffBase { + project_id: *project_id, + buffer_id, + diff_base, + }) + .log_err(); + } + } + }) + }) + .detach_and_log_err(cx); + } + + fn local_worktree_entry_changed( + &mut self, + entry_id: ProjectEntryId, + path: &Arc, + worktree: &Model, + snapshot: &worktree::Snapshot, + cx: &mut ModelContext, + ) -> Option<()> { + let project_path = ProjectPath { + worktree_id: snapshot.id(), + path: path.clone(), + }; + let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { + Some(&buffer_id) => buffer_id, + None => self.local_buffer_ids_by_path.get(&project_path).copied()?, + }; + let buffer = self + .buffer_store + .update(cx, |buffer_store, _| { + if let Some(buffer) = buffer_store.get(buffer_id) { + Some(buffer) + } else { + buffer_store.opened_buffers.remove(&buffer_id); + None + } + }) + .ok() + .flatten(); + let buffer = if let Some(buffer) = buffer { + buffer + } else { + self.local_buffer_ids_by_path.remove(&project_path); + self.local_buffer_ids_by_entry_id.remove(&entry_id); + return None; + }; + + let events = buffer.update(cx, |buffer, cx| { + let file = buffer.file()?; + let old_file = File::from_dyn(Some(file))?; + if old_file.worktree != *worktree { + return None; + } + + let new_file = if let Some(entry) = old_file + .entry_id + .and_then(|entry_id| snapshot.entry_for_id(entry_id)) + { + File { + is_local: true, + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree.clone(), + is_deleted: false, + is_private: entry.is_private, + } + } else if let Some(entry) = snapshot.entry_for_path(old_file.path.as_ref()) { + File { + is_local: true, + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree.clone(), + is_deleted: false, + is_private: entry.is_private, + } + } else { + File { + is_local: true, + entry_id: old_file.entry_id, + path: old_file.path.clone(), + mtime: old_file.mtime, + worktree: worktree.clone(), + is_deleted: true, + is_private: old_file.is_private, + } + }; + + if new_file == *old_file { + return None; + } + + let mut events = Vec::new(); + if new_file.path != old_file.path { + self.local_buffer_ids_by_path.remove(&ProjectPath { + path: old_file.path.clone(), + worktree_id: old_file.worktree_id(cx), + }); + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: new_file.worktree_id(cx), + path: new_file.path.clone(), + }, + buffer_id, + ); + events.push(BufferStoreEvent::BufferChangedFilePath { + buffer: cx.handle(), + old_file: buffer.file().cloned(), + }); + } + + if new_file.entry_id != old_file.entry_id { + if let Some(entry_id) = old_file.entry_id { + self.local_buffer_ids_by_entry_id.remove(&entry_id); + } + if let Some(entry_id) = new_file.entry_id { + self.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + + if let Some((client, project_id)) = &self.downstream_client(cx) { + client + .send(proto::UpdateBufferFile { + project_id: *project_id, + buffer_id: buffer_id.to_proto(), + file: Some(new_file.to_proto(cx)), + }) + .ok(); + } + + buffer.file_updated(Arc::new(new_file), cx); + Some(events) + })?; + self.buffer_store + .update(cx, |_buffer_store, cx| { + for event in events { + cx.emit(event); + } + }) + .log_err()?; + + None + } + + fn downstream_client(&self, cx: &AppContext) -> Option<(AnyProtoClient, u64)> { + self.buffer_store + .upgrade()? + .read(cx) + .downstream_client + .clone() + } + + fn buffer_changed_file(&mut self, buffer: Model, cx: &mut AppContext) -> Option<()> { + let file = File::from_dyn(buffer.read(cx).file())?; + + let remote_id = buffer.read(cx).remote_id(); + if let Some(entry_id) = file.entry_id { + match self.local_buffer_ids_by_entry_id.get(&entry_id) { + Some(_) => { + return None; + } + None => { + self.local_buffer_ids_by_entry_id + .insert(entry_id, remote_id); + } + } + }; + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + remote_id, + ); + + Some(()) + } +} + +impl BufferStoreImpl for Model { + fn as_remote(&self) -> Option> { + None + } + + fn as_local(&self) -> Option> { + Some(self.clone()) + } + + fn save_buffer( + &self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + let Some(file) = File::from_dyn(buffer.read(cx).file()) else { + return Task::ready(Err(anyhow!("buffer doesn't have a file"))); + }; + let worktree = file.worktree.clone(); + this.save_local_buffer(buffer, worktree, file.path.clone(), false, cx) + }) + } + + fn save_buffer_as( + &self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + this.save_local_buffer(buffer, worktree, path.path.clone(), true, cx) + }) + } + + fn open_buffer( + &self, + path: Arc, + worktree: Model, + cx: &mut ModelContext, + ) -> Task>> { + let buffer_store = cx.weak_model(); + self.update(cx, |_, cx| { + let load_buffer = worktree.update(cx, |worktree, cx| { + let load_file = worktree.load_file(path.as_ref(), cx); + let reservation = cx.reserve_model(); + let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + cx.spawn(move |_, mut cx| async move { + let loaded = load_file.await?; + let text_buffer = cx + .background_executor() + .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) + .await; + cx.insert_model(reservation, |_| { + Buffer::build( + text_buffer, + loaded.diff_base, + Some(loaded.file), + Capability::ReadWrite, + ) + }) + }) + }); + + cx.spawn(move |this, mut cx| async move { + let buffer = match load_buffer.await { + Ok(buffer) => Ok(buffer), + Err(error) if is_not_found_error(&error) => cx.new_model(|cx| { + let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); + let text_buffer = text::Buffer::new(0, buffer_id, "".into()); + Buffer::build( + text_buffer, + None, + Some(Arc::new(File { + worktree, + path, + mtime: None, + entry_id: None, + is_local: true, + is_deleted: false, + is_private: false, + })), + Capability::ReadWrite, + ) + }), + Err(e) => Err(e), + }?; + this.update(&mut cx, |this, cx| { + buffer_store.update(cx, |buffer_store, cx| { + buffer_store.add_buffer(buffer.clone(), cx) + })??; + let buffer_id = buffer.read(cx).remote_id(); + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + + anyhow::Ok(()) + })??; + + Ok(buffer) + }) + }) + } + + fn create_buffer(&self, cx: &mut ModelContext) -> Task>> { + let handle = self.clone(); + cx.spawn(|buffer_store, mut cx| async move { + let buffer = cx.new_model(|cx| { + Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx) + })?; + buffer_store.update(&mut cx, |buffer_store, cx| { + buffer_store.add_buffer(buffer.clone(), cx).log_err(); + let buffer_id = buffer.read(cx).remote_id(); + handle.update(cx, |this, cx| { + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + }); + })?; + Ok(buffer) + }) + } + + fn reload_buffers( + &self, + buffers: Vec>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(move |_, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + for buffer in buffers { + let transaction = buffer + .update(&mut cx, |buffer, cx| buffer.reload(cx))? + .await?; + buffer.update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + project_transaction.0.insert(cx.handle(), transaction); + } + })?; + } -impl EventEmitter for BufferStore {} + Ok(project_transaction) + }) + } +} impl BufferStore { pub fn init(client: &AnyProtoClient) { @@ -90,24 +975,31 @@ impl BufferStore { /// Creates a buffer store, optionally retaining its buffers. pub fn local(worktree_store: Model, cx: &mut ModelContext) -> Self { - cx.subscribe(&worktree_store, |this, _, event, cx| { - if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { - this.subscribe_to_worktree(worktree, cx); - } - }) - .detach(); - + let this = cx.weak_model(); Self { - state: BufferStoreState::Local {}, + state: Box::new(cx.new_model(|cx| { + let subscription = cx.subscribe( + &worktree_store, + |this: &mut LocalBufferStore, _, event, cx| { + if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { + this.subscribe_to_worktree(worktree, cx); + } + }, + ); + + LocalBufferStore { + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), + buffer_store: this, + worktree_store: worktree_store.clone(), + _subscription: subscription, + } + })), downstream_client: None, - worktree_store, opened_buffers: Default::default(), - remote_buffer_listeners: Default::default(), - loading_remote_buffers_by_id: Default::default(), - local_buffer_ids_by_path: Default::default(), - local_buffer_ids_by_entry_id: Default::default(), - loading_buffers_by_path: Default::default(), shared_buffers: Default::default(), + loading_buffers_by_path: Default::default(), + worktree_store, } } @@ -117,28 +1009,22 @@ impl BufferStore { remote_id: u64, cx: &mut ModelContext, ) -> Self { - cx.subscribe(&worktree_store, |this, _, event, cx| { - if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { - this.subscribe_to_worktree(worktree, cx); - } - }) - .detach(); - + let this = cx.weak_model(); Self { - state: BufferStoreState::Remote { + state: Box::new(cx.new_model(|_| RemoteBufferStore { shared_with_me: Default::default(), - upstream_client, + loading_remote_buffers_by_id: Default::default(), + remote_buffer_listeners: Default::default(), project_id: remote_id, - }, + upstream_client, + worktree_store: worktree_store.clone(), + buffer_store: this, + })), downstream_client: None, - worktree_store, opened_buffers: Default::default(), - remote_buffer_listeners: Default::default(), - loading_remote_buffers_by_id: Default::default(), - local_buffer_ids_by_path: Default::default(), - local_buffer_ids_by_entry_id: Default::default(), loading_buffers_by_path: Default::default(), shared_buffers: Default::default(), + worktree_store, } } @@ -171,18 +1057,13 @@ impl BufferStore { entry.insert(rx.clone()); let project_path = project_path.clone(); - let load_buffer = match worktree.read(cx) { - Worktree::Local(_) => { - self.open_local_buffer_internal(project_path.path.clone(), worktree, cx) - } - Worktree::Remote(tree) => { - self.open_remote_buffer_internal(&project_path.path, tree, cx) - } - }; + let load_buffer = self + .state + .open_buffer(project_path.path.clone(), worktree, cx); cx.spawn(move |this, mut cx| async move { let load_result = load_buffer.await; - *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| { + *tx.borrow_mut() = Some(this.update(&mut cx, |this, _cx| { // Record the fact that the buffer is no longer loading. this.loading_buffers_by_path.remove(&project_path); let buffer = load_result.map_err(Arc::new)?; @@ -201,391 +1082,32 @@ impl BufferStore { .map_err(|e| e.cloned()) }) } - - fn subscribe_to_worktree(&mut self, worktree: &Model, cx: &mut ModelContext) { - cx.subscribe(worktree, |this, worktree, event, cx| { - if worktree.read(cx).is_local() { - match event { - worktree::Event::UpdatedEntries(changes) => { - this.local_worktree_entries_changed(&worktree, changes, cx); - } - worktree::Event::UpdatedGitRepositories(updated_repos) => { - this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx) - } - _ => {} - } - } - }) - .detach(); - } - - fn local_worktree_entries_changed( - &mut self, - worktree_handle: &Model, - changes: &[(Arc, ProjectEntryId, PathChange)], - cx: &mut ModelContext, - ) { - let snapshot = worktree_handle.read(cx).snapshot(); - for (path, entry_id, _) in changes { - self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx); - } - } - - fn local_worktree_git_repos_changed( - &mut self, - worktree_handle: Model, - changed_repos: &UpdatedGitRepositoriesSet, - cx: &mut ModelContext, - ) { - debug_assert!(worktree_handle.read(cx).is_local()); - - // Identify the loading buffers whose containing repository that has changed. - let future_buffers = self - .loading_buffers() - .filter_map(|(project_path, receiver)| { - if project_path.worktree_id != worktree_handle.read(cx).id() { - return None; - } - let path = &project_path.path; - changed_repos - .iter() - .find(|(work_dir, _)| path.starts_with(work_dir))?; - let path = path.clone(); - Some(async move { - Self::wait_for_loading_buffer(receiver) - .await - .ok() - .map(|buffer| (buffer, path)) - }) - }) - .collect::>(); - - // Identify the current buffers whose containing repository has changed. - let current_buffers = self - .buffers() - .filter_map(|buffer| { - let file = File::from_dyn(buffer.read(cx).file())?; - if file.worktree != worktree_handle { - return None; - } - changed_repos - .iter() - .find(|(work_dir, _)| file.path.starts_with(work_dir))?; - Some((buffer, file.path.clone())) - }) - .collect::>(); - - if future_buffers.len() + current_buffers.len() == 0 { - return; - } - - cx.spawn(move |this, mut cx| async move { - // Wait for all of the buffers to load. - let future_buffers = future_buffers.collect::>().await; - - // Reload the diff base for every buffer whose containing git repository has changed. - let snapshot = - worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; - let diff_bases_by_buffer = cx - .background_executor() - .spawn(async move { - let mut diff_base_tasks = future_buffers - .into_iter() - .flatten() - .chain(current_buffers) - .filter_map(|(buffer, path)| { - let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; - let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; - Some(async move { - let base_text = - local_repo_entry.repo().load_index_text(&relative_path); - Some((buffer, base_text)) - }) - }) - .collect::>(); - - let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); - while let Some(diff_base) = diff_base_tasks.next().await { - if let Some(diff_base) = diff_base { - diff_bases.push(diff_base); - } - } - diff_bases - }) - .await; - - this.update(&mut cx, |this, cx| { - // Assign the new diff bases on all of the buffers. - for (buffer, diff_base) in diff_bases_by_buffer { - let buffer_id = buffer.update(cx, |buffer, cx| { - buffer.set_diff_base(diff_base.clone(), cx); - buffer.remote_id().to_proto() - }); - if let Some((client, project_id)) = &this.downstream_client { - client - .send(proto::UpdateDiffBase { - project_id: *project_id, - buffer_id, - diff_base, - }) - .log_err(); - } - } - }) - }) - .detach_and_log_err(cx); - } - - fn open_local_buffer_internal( - &mut self, - path: Arc, - worktree: Model, - cx: &mut ModelContext, - ) -> Task>> { - let load_buffer = worktree.update(cx, |worktree, cx| { - let load_file = worktree.load_file(path.as_ref(), cx); - let reservation = cx.reserve_model(); - let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); - cx.spawn(move |_, mut cx| async move { - let loaded = load_file.await?; - let text_buffer = cx - .background_executor() - .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) - .await; - cx.insert_model(reservation, |_| { - Buffer::build( - text_buffer, - loaded.diff_base, - Some(loaded.file), - Capability::ReadWrite, - ) - }) - }) - }); - - cx.spawn(move |this, mut cx| async move { - let buffer = match load_buffer.await { - Ok(buffer) => Ok(buffer), - Err(error) if is_not_found_error(&error) => cx.new_model(|cx| { - let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); - let text_buffer = text::Buffer::new(0, buffer_id, "".into()); - Buffer::build( - text_buffer, - None, - Some(Arc::new(File { - worktree, - path, - mtime: None, - entry_id: None, - is_local: true, - is_deleted: false, - is_private: false, - })), - Capability::ReadWrite, - ) - }), - Err(e) => Err(e), - }?; - this.update(&mut cx, |this, cx| { - this.add_buffer(buffer.clone(), cx).log_err(); - })?; - Ok(buffer) - }) - } - - fn open_remote_buffer_internal( - &self, - path: &Arc, - worktree: &RemoteWorktree, - cx: &ModelContext, - ) -> Task>> { - let worktree_id = worktree.id().to_proto(); - let project_id = worktree.project_id(); - let client = worktree.client(); - let path_string = path.clone().to_string_lossy().to_string(); - cx.spawn(move |this, mut cx| async move { - let response = client - .request(proto::OpenBufferByPath { - project_id, - worktree_id, - path: path_string, - }) - .await?; - let buffer_id = BufferId::new(response.buffer_id)?; - this.update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await - }) - } - - pub fn create_buffer( - &mut self, - remote_client: Option<(AnyProtoClient, u64)>, - cx: &mut ModelContext, - ) -> Task>> { - if let Some((remote_client, project_id)) = remote_client { - let create = remote_client.request(proto::OpenNewBuffer { project_id }); - cx.spawn(|this, mut cx| async move { - let response = create.await?; - let buffer_id = BufferId::new(response.buffer_id)?; - - this.update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await - }) - } else { - Task::ready(Ok(self.create_local_buffer("", None, cx))) - } - } - - pub fn create_local_buffer( - &mut self, - text: &str, - language: Option>, - cx: &mut ModelContext, - ) -> Model { - let buffer = cx.new_model(|cx| { - Buffer::local(text, cx) - .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx) - }); - self.add_buffer(buffer.clone(), cx).log_err(); - buffer - } - - pub fn save_buffer( - &mut self, - buffer: Model, - cx: &mut ModelContext, - ) -> Task> { - let Some(file) = File::from_dyn(buffer.read(cx).file()) else { - return Task::ready(Err(anyhow!("buffer doesn't have a file"))); - }; - match file.worktree.read(cx) { - Worktree::Local(_) => { - self.save_local_buffer(file.worktree.clone(), buffer, file.path.clone(), false, cx) - } - Worktree::Remote(tree) => self.save_remote_buffer(buffer, None, tree, cx), - } - } - - pub fn save_buffer_as( - &mut self, - buffer: Model, - path: ProjectPath, - cx: &mut ModelContext, - ) -> Task> { - let Some(worktree) = self - .worktree_store - .read(cx) - .worktree_for_id(path.worktree_id, cx) - else { - return Task::ready(Err(anyhow!("no such worktree"))); - }; - - let old_file = buffer.read(cx).file().cloned(); - - let task = match worktree.read(cx) { - Worktree::Local(_) => { - self.save_local_buffer(worktree, buffer.clone(), path.path, true, cx) - } - Worktree::Remote(tree) => { - self.save_remote_buffer(buffer.clone(), Some(path.to_proto()), tree, cx) - } - }; - cx.spawn(|this, mut cx| async move { - task.await?; - this.update(&mut cx, |_, cx| { - cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); - }) - }) + + pub fn create_buffer(&mut self, cx: &mut ModelContext) -> Task>> { + self.state.create_buffer(cx) } - fn save_local_buffer( - &self, - worktree: Model, - buffer_handle: Model, - path: Arc, - mut has_changed_file: bool, + pub fn save_buffer( + &mut self, + buffer: Model, cx: &mut ModelContext, ) -> Task> { - let buffer = buffer_handle.read(cx); - let text = buffer.as_rope().clone(); - let line_ending = buffer.line_ending(); - let version = buffer.version(); - let buffer_id = buffer.remote_id(); - if buffer.file().is_some_and(|file| !file.is_created()) { - has_changed_file = true; - } - - let save = worktree.update(cx, |worktree, cx| { - worktree.write_file(path.as_ref(), text, line_ending, cx) - }); - - cx.spawn(move |this, mut cx| async move { - let new_file = save.await?; - let mtime = new_file.mtime; - this.update(&mut cx, |this, cx| { - if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { - let project_id = *project_id; - if has_changed_file { - downstream_client - .send(proto::UpdateBufferFile { - project_id, - buffer_id: buffer_id.to_proto(), - file: Some(language::File::to_proto(&*new_file, cx)), - }) - .log_err(); - } - downstream_client - .send(proto::BufferSaved { - project_id, - buffer_id: buffer_id.to_proto(), - version: serialize_version(&version), - mtime: mtime.map(|time| time.into()), - }) - .log_err(); - } - })?; - buffer_handle.update(&mut cx, |buffer, cx| { - if has_changed_file { - buffer.file_updated(new_file, cx); - } - buffer.did_save(version.clone(), mtime, cx); - }) - }) + self.state.save_buffer(buffer, cx) } - fn save_remote_buffer( - &self, - buffer_handle: Model, - new_path: Option, - tree: &RemoteWorktree, - cx: &ModelContext, + pub fn save_buffer_as( + &mut self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, ) -> Task> { - let buffer = buffer_handle.read(cx); - let buffer_id = buffer.remote_id().into(); - let version = buffer.version(); - let rpc = tree.client(); - let project_id = tree.project_id(); - cx.spawn(move |_, mut cx| async move { - let response = rpc - .request(proto::SaveBuffer { - project_id, - buffer_id, - new_path, - version: serialize_version(&version), - }) - .await?; - let version = deserialize_version(&response.version); - let mtime = response.mtime.map(|mtime| mtime.into()); - - buffer_handle.update(&mut cx, |buffer, cx| { - buffer.did_save(version.clone(), mtime, cx); - })?; - - Ok(()) + let old_file = buffer.read(cx).file().cloned(); + let task = self.state.save_buffer_as(buffer.clone(), path, cx); + cx.spawn(|this, mut cx| async move { + task.await?; + this.update(&mut cx, |_, cx| { + cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); + }) }) } @@ -684,29 +1206,6 @@ impl BufferStore { } } - if let Some(senders) = self.remote_buffer_listeners.remove(&remote_id) { - for sender in senders { - sender.send(Ok(buffer.clone())).ok(); - } - } - - if let Some(file) = File::from_dyn(buffer.read(cx).file()) { - if file.is_local { - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - remote_id, - ); - - if let Some(entry_id) = file.entry_id { - self.local_buffer_ids_by_entry_id - .insert(entry_id, remote_id); - } - } - } - cx.subscribe(&buffer, Self::on_buffer_event).detach(); cx.emit(BufferStoreEvent::BufferAdded(buffer)); Ok(()) @@ -753,23 +1252,20 @@ impl BufferStore { .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id)) } - pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option> { - self.get(buffer_id) - .or_else(|| self.loading_remote_buffers_by_id.get(&buffer_id).cloned()) - } - - pub fn wait_for_remote_buffer( - &mut self, - id: BufferId, - cx: &mut AppContext, - ) -> Task>> { - let buffer = self.get(id); - if let Some(buffer) = buffer { - return Task::ready(Ok(buffer)); - } - let (tx, rx) = oneshot::channel(); - self.remote_buffer_listeners.entry(id).or_default().push(tx); - cx.background_executor().spawn(async move { rx.await? }) + pub fn get_possibly_incomplete( + &self, + buffer_id: BufferId, + cx: &AppContext, + ) -> Option> { + self.get(buffer_id).or_else(|| { + self.state.as_remote().and_then(|remote| { + remote + .read(cx) + .loading_remote_buffers_by_id + .get(&buffer_id) + .cloned() + }) + }) } pub fn buffer_version_info( @@ -787,15 +1283,19 @@ impl BufferStore { }) .collect(); let incomplete_buffer_ids = self - .loading_remote_buffers_by_id - .keys() - .copied() - .collect::>(); + .state + .as_remote() + .map(|remote| remote.read(cx).incomplete_buffer_ids()) + .unwrap_or_default(); (buffers, incomplete_buffer_ids) } pub fn disconnected_from_host(&mut self, cx: &mut AppContext) { - self.drop_unnecessary_buffers(cx); + for open_buffer in self.opened_buffers.values_mut() { + if let Some(buffer) = open_buffer.upgrade() { + buffer.update(cx, |buffer, _| buffer.give_up_waiting()); + } + } for buffer in self.buffers() { buffer.update(cx, |buffer, cx| { @@ -803,9 +1303,13 @@ impl BufferStore { }); } - // Wake up all futures currently waiting on a buffer to get opened, - // to give them a chance to fail now that we've disconnected. - self.remote_buffer_listeners.clear(); + if let Some(remote) = self.state.as_remote() { + remote.update(cx, |remote, _| { + // Wake up all futures currently waiting on a buffer to get opened, + // to give them a chance to fail now that we've disconnected. + remote.remote_buffer_listeners.clear() + }) + } } pub fn shared( @@ -822,14 +1326,6 @@ impl BufferStore { self.forget_shared_buffers(); } - fn drop_unnecessary_buffers(&mut self, cx: &mut AppContext) { - for open_buffer in self.opened_buffers.values_mut() { - if let Some(buffer) = open_buffer.upgrade() { - buffer.update(cx, |buffer, _| buffer.give_up_waiting()); - } - } - } - pub fn discard_incomplete(&mut self) { self.opened_buffers .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_))); @@ -897,7 +1393,11 @@ impl BufferStore { ) { match event { BufferEvent::FileHandleChanged => { - self.buffer_changed_file(buffer, cx); + if let Some(local) = self.state.as_local() { + local.update(cx, |local, cx| { + local.buffer_changed_file(buffer, cx); + }) + } } BufferEvent::Reloaded => { let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else { @@ -905,164 +1405,17 @@ impl BufferStore { }; let buffer = buffer.read(cx); downstream_client - .send(proto::BufferReloaded { - project_id: *project_id, - buffer_id: buffer.remote_id().to_proto(), - version: serialize_version(&buffer.version()), - mtime: buffer.saved_mtime().map(|t| t.into()), - line_ending: serialize_line_ending(buffer.line_ending()) as i32, - }) - .log_err(); - } - _ => {} - } - } - - fn local_worktree_entry_changed( - &mut self, - entry_id: ProjectEntryId, - path: &Arc, - worktree: &Model, - snapshot: &worktree::Snapshot, - cx: &mut ModelContext, - ) -> Option<()> { - let project_path = ProjectPath { - worktree_id: snapshot.id(), - path: path.clone(), - }; - let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { - Some(&buffer_id) => buffer_id, - None => self.local_buffer_ids_by_path.get(&project_path).copied()?, - }; - let buffer = if let Some(buffer) = self.get(buffer_id) { - buffer - } else { - self.opened_buffers.remove(&buffer_id); - self.local_buffer_ids_by_path.remove(&project_path); - self.local_buffer_ids_by_entry_id.remove(&entry_id); - return None; - }; - - let events = buffer.update(cx, |buffer, cx| { - let file = buffer.file()?; - let old_file = File::from_dyn(Some(file))?; - if old_file.worktree != *worktree { - return None; - } - - let new_file = if let Some(entry) = old_file - .entry_id - .and_then(|entry_id| snapshot.entry_for_id(entry_id)) - { - File { - is_local: true, - entry_id: Some(entry.id), - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree.clone(), - is_deleted: false, - is_private: entry.is_private, - } - } else if let Some(entry) = snapshot.entry_for_path(old_file.path.as_ref()) { - File { - is_local: true, - entry_id: Some(entry.id), - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree.clone(), - is_deleted: false, - is_private: entry.is_private, - } - } else { - File { - is_local: true, - entry_id: old_file.entry_id, - path: old_file.path.clone(), - mtime: old_file.mtime, - worktree: worktree.clone(), - is_deleted: true, - is_private: old_file.is_private, - } - }; - - if new_file == *old_file { - return None; - } - - let mut events = Vec::new(); - if new_file.path != old_file.path { - self.local_buffer_ids_by_path.remove(&ProjectPath { - path: old_file.path.clone(), - worktree_id: old_file.worktree_id(cx), - }); - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: new_file.worktree_id(cx), - path: new_file.path.clone(), - }, - buffer_id, - ); - events.push(BufferStoreEvent::BufferChangedFilePath { - buffer: cx.handle(), - old_file: buffer.file().cloned(), - }); - } - - if new_file.entry_id != old_file.entry_id { - if let Some(entry_id) = old_file.entry_id { - self.local_buffer_ids_by_entry_id.remove(&entry_id); - } - if let Some(entry_id) = new_file.entry_id { - self.local_buffer_ids_by_entry_id - .insert(entry_id, buffer_id); - } - } - - if let Some((client, project_id)) = &self.downstream_client { - client - .send(proto::UpdateBufferFile { + .send(proto::BufferReloaded { project_id: *project_id, - buffer_id: buffer_id.to_proto(), - file: Some(new_file.to_proto(cx)), + buffer_id: buffer.remote_id().to_proto(), + version: serialize_version(&buffer.version()), + mtime: buffer.saved_mtime().map(|t| t.into()), + line_ending: serialize_line_ending(buffer.line_ending()) as i32, }) - .ok(); + .log_err(); } - - buffer.file_updated(Arc::new(new_file), cx); - Some(events) - })?; - - for event in events { - cx.emit(event); + _ => {} } - - None - } - - fn buffer_changed_file(&mut self, buffer: Model, cx: &mut AppContext) -> Option<()> { - let file = File::from_dyn(buffer.read(cx).file())?; - - let remote_id = buffer.read(cx).remote_id(); - if let Some(entry_id) = file.entry_id { - match self.local_buffer_ids_by_entry_id.get(&entry_id) { - Some(_) => { - return None; - } - None => { - self.local_buffer_ids_by_entry_id - .insert(entry_id, remote_id); - } - } - }; - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - remote_id, - ); - - Some(()) } pub async fn handle_update_buffer( @@ -1186,93 +1539,14 @@ impl BufferStore { capability: Capability, cx: &mut ModelContext, ) -> Result<()> { - match envelope - .payload - .variant - .ok_or_else(|| anyhow!("missing variant"))? - { - proto::create_buffer_for_peer::Variant::State(mut state) => { - let buffer_id = BufferId::new(state.id)?; - - let buffer_result = maybe!({ - let mut buffer_file = None; - if let Some(file) = state.file.take() { - let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id); - let worktree = self - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - .ok_or_else(|| { - anyhow!("no worktree found for id {}", file.worktree_id) - })?; - buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?) - as Arc); - } - Buffer::from_proto(replica_id, capability, state, buffer_file) - }); - - match buffer_result { - Ok(buffer) => { - let buffer = cx.new_model(|_| buffer); - self.loading_remote_buffers_by_id.insert(buffer_id, buffer); - } - Err(error) => { - if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { - for listener in listeners { - listener.send(Err(anyhow!(error.cloned()))).ok(); - } - } - } - } - } - proto::create_buffer_for_peer::Variant::Chunk(chunk) => { - let buffer_id = BufferId::new(chunk.buffer_id)?; - let buffer = self - .loading_remote_buffers_by_id - .get(&buffer_id) - .cloned() - .ok_or_else(|| { - anyhow!( - "received chunk for buffer {} without initial state", - chunk.buffer_id - ) - })?; - - let result = maybe!({ - let operations = chunk - .operations - .into_iter() - .map(language::proto::deserialize_operation) - .collect::>>()?; - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); - anyhow::Ok(()) - }); + let Some(remote) = self.state.as_remote() else { + return Err(anyhow!("buffer store is not a remote")); + }; - if let Err(error) = result { - self.loading_remote_buffers_by_id.remove(&buffer_id); - if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { - for listener in listeners { - listener.send(Err(error.cloned())).ok(); - } - } - } else if chunk.is_last { - self.loading_remote_buffers_by_id.remove(&buffer_id); - // retain buffers sent by peers to avoid races. - match &mut self.state { - BufferStoreState::Remote { - ref mut shared_with_me, - upstream_client, - .. - } => { - if upstream_client.is_via_collab() { - shared_with_me.insert(buffer.clone()); - } - } - _ => {} - } - self.add_buffer(buffer, cx)?; - } - } + if let Some(buffer) = remote.update(cx, |remote, cx| { + remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx) + })? { + self.add_buffer(buffer, cx)?; } Ok(()) @@ -1288,7 +1562,7 @@ impl BufferStore { this.update(&mut cx, |this, cx| { let payload = envelope.payload.clone(); - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?; let worktree = this .worktree_store @@ -1313,6 +1587,15 @@ impl BufferStore { cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); } } + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::UpdateBufferFile { + project_id: *project_id, + buffer_id: buffer_id.into(), + file: envelope.payload.file, + }) + .log_err(); + } Ok(()) })? } @@ -1325,11 +1608,20 @@ impl BufferStore { this.update(&mut cx, |this, cx| { let buffer_id = envelope.payload.buffer_id; let buffer_id = BufferId::new(buffer_id)?; - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { buffer.update(cx, |buffer, cx| { - buffer.set_diff_base(envelope.payload.diff_base, cx) + buffer.set_diff_base(envelope.payload.diff_base.clone(), cx) }); } + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::UpdateDiffBase { + project_id: *project_id, + buffer_id: buffer_id.into(), + diff_base: envelope.payload.diff_base, + }) + .log_err(); + } Ok(()) })? } @@ -1408,13 +1700,24 @@ impl BufferStore { ) -> Result<()> { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); - let mtime = envelope.payload.mtime.map(|time| time.into()); - this.update(&mut cx, |this, cx| { - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + let mtime = envelope.payload.mtime.clone().map(|time| time.into()); + this.update(&mut cx, move |this, cx| { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { buffer.update(cx, |buffer, cx| { buffer.did_save(version, mtime, cx); }); } + + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::BufferSaved { + project_id: *project_id, + buffer_id: buffer_id.into(), + mtime: envelope.payload.mtime, + version: envelope.payload.version, + }) + .log_err(); + } }) } @@ -1425,17 +1728,29 @@ impl BufferStore { ) -> Result<()> { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); - let mtime = envelope.payload.mtime.map(|time| time.into()); + let mtime = envelope.payload.mtime.clone().map(|time| time.into()); let line_ending = deserialize_line_ending( proto::LineEnding::from_i32(envelope.payload.line_ending) .ok_or_else(|| anyhow!("missing line ending"))?, ); this.update(&mut cx, |this, cx| { - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { buffer.update(cx, |buffer, cx| { buffer.did_reload(version, line_ending, mtime, cx); }); } + + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::BufferReloaded { + project_id: *project_id, + buffer_id: buffer_id.into(), + mtime: envelope.payload.mtime, + version: envelope.payload.version, + line_ending: envelope.payload.line_ending, + }) + .log_err(); + } }) } @@ -1480,66 +1795,14 @@ impl BufferStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - let mut local_buffers = Vec::new(); - let mut remote_buffers = Vec::new(); - for buffer_handle in buffers { - let buffer = buffer_handle.read(cx); - if buffer.is_dirty() { - if let Some(file) = File::from_dyn(buffer.file()) { - if file.is_local() { - local_buffers.push(buffer_handle); - } else { - remote_buffers.push(buffer_handle); - } - } - } + let buffers: Vec> = buffers + .into_iter() + .filter(|buffer| buffer.read(cx).is_dirty()) + .collect(); + if buffers.is_empty() { + return Task::ready(Ok(ProjectTransaction::default())); } - - let client = self.upstream_client(); - - cx.spawn(move |this, mut cx| async move { - let mut project_transaction = ProjectTransaction::default(); - if let Some((client, project_id)) = client { - let response = client - .request(proto::ReloadBuffers { - project_id, - buffer_ids: remote_buffers - .iter() - .filter_map(|buffer| { - buffer - .update(&mut cx, |buffer, _| buffer.remote_id().into()) - .ok() - }) - .collect(), - }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this, - response, - push_to_history, - cx.clone(), - ) - .await?; - } - - for buffer in local_buffers { - let transaction = buffer - .update(&mut cx, |buffer, cx| buffer.reload(cx))? - .await?; - buffer.update(&mut cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !push_to_history { - buffer.forget_transaction(transaction.id); - } - project_transaction.0.insert(cx.handle(), transaction); - } - })?; - } - - Ok(project_transaction) - }) + self.state.reload_buffers(buffers, push_to_history, cx) } async fn handle_reload_buffers( @@ -1629,17 +1892,6 @@ impl BufferStore { }) } - pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { - match &self.state { - BufferStoreState::Remote { - upstream_client, - project_id, - .. - } => Some((upstream_client.clone(), *project_id)), - BufferStoreState::Local { .. } => None, - } - } - pub fn forget_shared_buffers(&mut self) { self.shared_buffers.clear(); } @@ -1658,6 +1910,72 @@ impl BufferStore { &self.shared_buffers } + pub fn create_local_buffer( + &mut self, + text: &str, + language: Option>, + cx: &mut ModelContext, + ) -> Model { + let buffer = cx.new_model(|cx| { + Buffer::local(text, cx) + .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx) + }); + + self.add_buffer(buffer.clone(), cx).log_err(); + let buffer_id = buffer.read(cx).remote_id(); + + let local = self + .state + .as_local() + .expect("local-only method called in a non-local context"); + local.update(cx, |this, cx| { + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + }); + buffer + } + + pub fn deserialize_project_transaction( + &mut self, + message: proto::ProjectTransaction, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + if let Some(remote) = self.state.as_remote() { + remote.update(cx, |remote, cx| { + remote.deserialize_project_transaction(message, push_to_history, cx) + }) + } else { + debug_panic!("not a remote buffer store"); + Task::ready(Err(anyhow!("not a remote buffer store"))) + } + } + + pub fn wait_for_remote_buffer( + &self, + id: BufferId, + cx: &mut AppContext, + ) -> Task>> { + if let Some(remote) = self.state.as_remote() { + remote.update(cx, |remote, cx| remote.wait_for_remote_buffer(id, cx)) + } else { + debug_panic!("not a remote buffer store"); + Task::ready(Err(anyhow!("not a remote buffer store"))) + } + } + pub fn serialize_project_transaction_for_peer( &mut self, project_transaction: ProjectTransaction, @@ -1680,41 +1998,6 @@ impl BufferStore { } serialized_transaction } - - pub async fn deserialize_project_transaction( - this: WeakModel, - message: proto::ProjectTransaction, - push_to_history: bool, - mut cx: AsyncAppContext, - ) -> Result { - let mut project_transaction = ProjectTransaction::default(); - for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions) { - let buffer_id = BufferId::new(buffer_id)?; - let buffer = this - .update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await?; - let transaction = language::proto::deserialize_transaction(transaction)?; - project_transaction.0.insert(buffer, transaction); - } - - for (buffer, transaction) in &project_transaction.0 { - buffer - .update(&mut cx, |buffer, _| { - buffer.wait_for_edits(transaction.edit_ids.iter().copied()) - })? - .await?; - - if push_to_history { - buffer.update(&mut cx, |buffer, _| { - buffer.push_transaction(transaction.clone(), Instant::now()); - })?; - } - } - - Ok(project_transaction) - } } impl OpenBuffer { diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 2b7b10d9b369a..96eb327e8c434 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -1,10 +1,9 @@ mod signature_help; use crate::{ - buffer_store::BufferStore, lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight, - Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, - InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink, MarkupContent, - ProjectTransaction, ResolveState, + lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock, + HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip, + InlayHintTooltip, Location, LocationLink, MarkupContent, ProjectTransaction, ResolveState, }; use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; @@ -417,18 +416,18 @@ impl LspCommand for PerformRename { message: proto::PerformRenameResponse, lsp_store: Model, _: Model, - cx: AsyncAppContext, + mut cx: AsyncAppContext, ) -> Result { let message = message .transaction .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - lsp_store.read_with(&cx, |lsp_store, _| lsp_store.buffer_store().downgrade())?, - message, - self.push_to_history, - cx, - ) - .await + lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.deserialize_project_transaction(message, self.push_to_history, cx) + }) + })? + .await } fn buffer_id_from_proto(message: &proto::PerformRename) -> Result { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6c71d4baebf56..8d859c091bfe9 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1601,19 +1601,19 @@ impl LspStore { buffer_id: buffer_handle.read(cx).remote_id().into(), action: Some(Self::serialize_code_action(&action)), }; - cx.spawn(move |this, cx| async move { + let buffer_store = self.buffer_store(); + cx.spawn(move |_, mut cx| async move { let response = upstream_client .request(request) .await? .transaction .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx, - ) - .await + + buffer_store + .update(&mut cx, |buffer_store, cx| { + buffer_store.deserialize_project_transaction(response, push_to_history, cx) + })? + .await }) } else { let buffer = buffer_handle.read(cx); @@ -5062,6 +5062,7 @@ impl LspStore { .spawn(this.languages.language_for_name(language_name.0.as_ref())) .detach(); + // host let adapter = this.languages.get_or_register_lsp_adapter( language_name.clone(), server_name.clone(), @@ -5259,7 +5260,8 @@ impl LspStore { result }) } else if let Some((client, project_id)) = self.upstream_client() { - cx.spawn(move |this, mut cx| async move { + let buffer_store = self.buffer_store(); + cx.spawn(move |_, mut cx| async move { let response = client .request(proto::FormatBuffers { project_id, @@ -5274,13 +5276,12 @@ impl LspStore { .await? .transaction .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx, - ) - .await + + buffer_store + .update(&mut cx, |buffer_store, cx| { + buffer_store.deserialize_project_transaction(response, push_to_history, cx) + })? + .await }) } else { Task::ready(Ok(ProjectTransaction::default())) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 454a7586c8856..fe4d2d6b01545 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1667,16 +1667,8 @@ impl Project { } pub fn create_buffer(&mut self, cx: &mut ModelContext) -> Task>> { - self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.create_buffer( - if self.is_via_collab() { - Some((self.client.clone().into(), self.remote_id().unwrap())) - } else { - None - }, - cx, - ) - }) + self.buffer_store + .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx)) } pub fn create_local_buffer( @@ -1685,7 +1677,7 @@ impl Project { language: Option>, cx: &mut ModelContext, ) -> Model { - if self.is_via_collab() { + if self.is_via_collab() || self.is_via_ssh() { panic!("called create_local_buffer on a remote project") } self.buffer_store.update(cx, |buffer_store, cx| { @@ -3770,7 +3762,9 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let buffer = this.update(&mut cx, |this, cx| this.create_local_buffer("", None, cx))?; + let buffer = this + .update(&mut cx, |this, cx| this.create_buffer(cx))? + .await?; let peer_id = envelope.original_sender_id()?; Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx) diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index eca65f1349845..084fcf9929f01 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -56,6 +56,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test }) .await .unwrap(); + buffer.update(cx, |buffer, cx| { assert_eq!(buffer.text(), "fn one() -> usize { 1 }"); assert_eq!( From fdb03d30587d3269fbb76a9f44a6f08a7f51df97 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Sep 2024 15:16:27 -0700 Subject: [PATCH 052/228] Move DisplayDiffHunk into hunk_diff module (#18307) Release Notes: - N/A Co-authored-by: Marshall --- crates/editor/src/editor.rs | 3 +- crates/editor/src/element.rs | 7 +- crates/editor/src/git.rs | 308 ----------------------------- crates/editor/src/hunk_diff.rs | 340 ++++++++++++++++++++++++++++++--- 4 files changed, 320 insertions(+), 338 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a32910e78ab97..316d945ca4df6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -71,7 +71,6 @@ pub use element::{ use futures::{future, FutureExt}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::blame::GitBlame; -use git::diff_hunk_to_display; use gpui::{ div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement, AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardEntry, @@ -84,8 +83,8 @@ use gpui::{ }; use highlight_matching_bracket::refresh_matching_bracket_highlights; use hover_popover::{hide_hover, HoverState}; -use hunk_diff::ExpandedHunks; pub(crate) use hunk_diff::HoveredHunk; +use hunk_diff::{diff_hunk_to_display, ExpandedHunks}; use indent_guides::ActiveIndentGuidesState; use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; pub use inline_completion_provider::*; diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 31e4efb83b60e..cf8edb67dccbc 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7,14 +7,11 @@ use crate::{ CurrentLineHighlight, DoubleClickInMultibuffer, MultiCursorModifier, ScrollBeyondLastLine, ShowScrollbar, }, - git::{ - blame::{CommitDetails, GitBlame}, - diff_hunk_to_display, DisplayDiffHunk, - }, + git::blame::{CommitDetails, GitBlame}, hover_popover::{ self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT, }, - hunk_diff::ExpandedHunk, + hunk_diff::{diff_hunk_to_display, DisplayDiffHunk, ExpandedHunk}, hunk_status, items::BufferSearchHighlights, mouse_context_menu::{self, MenuPosition, MouseContextMenu}, diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index fb18ca45a2a2f..080babe4c682a 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -1,309 +1 @@ pub mod blame; - -use std::ops::Range; - -use git::diff::DiffHunkStatus; -use language::Point; -use multi_buffer::{Anchor, MultiBufferDiffHunk}; - -use crate::{ - display_map::{DisplaySnapshot, ToDisplayPoint}, - hunk_status, AnchorRangeExt, DisplayRow, -}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum DisplayDiffHunk { - Folded { - display_row: DisplayRow, - }, - - Unfolded { - diff_base_byte_range: Range, - display_row_range: Range, - multi_buffer_range: Range, - status: DiffHunkStatus, - }, -} - -impl DisplayDiffHunk { - pub fn start_display_row(&self) -> DisplayRow { - match self { - &DisplayDiffHunk::Folded { display_row } => display_row, - DisplayDiffHunk::Unfolded { - display_row_range, .. - } => display_row_range.start, - } - } - - pub fn contains_display_row(&self, display_row: DisplayRow) -> bool { - let range = match self { - &DisplayDiffHunk::Folded { display_row } => display_row..=display_row, - - DisplayDiffHunk::Unfolded { - display_row_range, .. - } => display_row_range.start..=display_row_range.end, - }; - - range.contains(&display_row) - } -} - -pub fn diff_hunk_to_display( - hunk: &MultiBufferDiffHunk, - snapshot: &DisplaySnapshot, -) -> DisplayDiffHunk { - let hunk_start_point = Point::new(hunk.row_range.start.0, 0); - let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); - let hunk_end_point_sub = Point::new( - hunk.row_range - .end - .0 - .saturating_sub(1) - .max(hunk.row_range.start.0), - 0, - ); - - let status = hunk_status(hunk); - let is_removal = status == DiffHunkStatus::Removed; - - let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); - let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); - let folds_range = folds_start..folds_end; - - let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { - let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot); - let fold_point_range = fold_point_range.start..=fold_point_range.end; - - let folded_start = fold_point_range.contains(&hunk_start_point); - let folded_end = fold_point_range.contains(&hunk_end_point_sub); - let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub); - - (folded_start && folded_end) || (is_removal && folded_start_sub) - }); - - if let Some(fold) = containing_fold { - let row = fold.range.start.to_display_point(snapshot).row(); - DisplayDiffHunk::Folded { display_row: row } - } else { - let start = hunk_start_point.to_display_point(snapshot).row(); - - let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); - let hunk_end_point = Point::new(hunk_end_row.0, 0); - - let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); - let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); - let end = hunk_end_point.to_display_point(snapshot).row(); - - DisplayDiffHunk::Unfolded { - display_row_range: start..end, - multi_buffer_range: multi_buffer_start..multi_buffer_end, - status, - diff_base_byte_range: hunk.diff_base_byte_range.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use crate::Point; - use crate::{editor_tests::init_test, hunk_status}; - use gpui::{Context, TestAppContext}; - use language::Capability::ReadWrite; - use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow}; - use project::{FakeFs, Project}; - use unindent::Unindent; - #[gpui::test] - async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { - use git::diff::DiffHunkStatus; - init_test(cx, |_| {}); - - let fs = FakeFs::new(cx.background_executor.clone()); - let project = Project::test(fs, [], cx).await; - - // buffer has two modified hunks with two rows each - let buffer_1 = project.update(cx, |project, cx| { - project.create_local_buffer( - " - 1.zero - 1.ONE - 1.TWO - 1.three - 1.FOUR - 1.FIVE - 1.six - " - .unindent() - .as_str(), - None, - cx, - ) - }); - buffer_1.update(cx, |buffer, cx| { - buffer.set_diff_base( - Some( - " - 1.zero - 1.one - 1.two - 1.three - 1.four - 1.five - 1.six - " - .unindent(), - ), - cx, - ); - }); - - // buffer has a deletion hunk and an insertion hunk - let buffer_2 = project.update(cx, |project, cx| { - project.create_local_buffer( - " - 2.zero - 2.one - 2.two - 2.three - 2.four - 2.five - 2.six - " - .unindent() - .as_str(), - None, - cx, - ) - }); - buffer_2.update(cx, |buffer, cx| { - buffer.set_diff_base( - Some( - " - 2.zero - 2.one - 2.one-and-a-half - 2.two - 2.three - 2.four - 2.six - " - .unindent(), - ), - cx, - ); - }); - - cx.background_executor.run_until_parked(); - - let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( - buffer_1.clone(), - [ - // excerpt ends in the middle of a modified hunk - ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 5), - primary: Default::default(), - }, - // excerpt begins in the middle of a modified hunk - ExcerptRange { - context: Point::new(5, 0)..Point::new(6, 5), - primary: Default::default(), - }, - ], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ - // excerpt ends at a deletion - ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 5), - primary: Default::default(), - }, - // excerpt starts at a deletion - ExcerptRange { - context: Point::new(2, 0)..Point::new(2, 5), - primary: Default::default(), - }, - // excerpt fully contains a deletion hunk - ExcerptRange { - context: Point::new(1, 0)..Point::new(2, 5), - primary: Default::default(), - }, - // excerpt fully contains an insertion hunk - ExcerptRange { - context: Point::new(4, 0)..Point::new(6, 5), - primary: Default::default(), - }, - ], - cx, - ); - multibuffer - }); - - let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); - - assert_eq!( - snapshot.text(), - " - 1.zero - 1.ONE - 1.FIVE - 1.six - 2.zero - 2.one - 2.two - 2.one - 2.two - 2.four - 2.five - 2.six" - .unindent() - ); - - let expected = [ - ( - DiffHunkStatus::Modified, - MultiBufferRow(1)..MultiBufferRow(2), - ), - ( - DiffHunkStatus::Modified, - MultiBufferRow(2)..MultiBufferRow(3), - ), - //TODO: Define better when and where removed hunks show up at range extremities - ( - DiffHunkStatus::Removed, - MultiBufferRow(6)..MultiBufferRow(6), - ), - ( - DiffHunkStatus::Removed, - MultiBufferRow(8)..MultiBufferRow(8), - ), - ( - DiffHunkStatus::Added, - MultiBufferRow(10)..MultiBufferRow(11), - ), - ]; - - assert_eq!( - snapshot - .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.row_range)) - .collect::>(), - &expected, - ); - - assert_eq!( - snapshot - .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.row_range)) - .collect::>(), - expected - .iter() - .rev() - .cloned() - .collect::>() - .as_slice(), - ); - } -} diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 2f7bb49e85349..67e8a25df58c5 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -1,18 +1,16 @@ -use std::{ - ops::{Range, RangeInclusive}, - sync::Arc, -}; - use collections::{hash_map, HashMap, HashSet}; use git::diff::DiffHunkStatus; use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; -use language::Buffer; +use language::{Buffer, BufferId, Point}; use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, MultiBufferSnapshot, ToPoint, }; use settings::SettingsStore; -use text::{BufferId, Point}; +use std::{ + ops::{Range, RangeInclusive}, + sync::Arc, +}; use ui::{ prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels, Styled, ViewContext, VisualContext, @@ -20,13 +18,11 @@ use ui::{ use util::{debug_panic, RangeExt}; use crate::{ - editor_settings::CurrentLineHighlight, - git::{diff_hunk_to_display, DisplayDiffHunk}, - hunk_status, hunks_for_selections, - mouse_context_menu::MouseContextMenu, - BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, Editor, - EditorElement, EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, - RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, + mouse_context_menu::MouseContextMenu, BlockDisposition, BlockProperties, BlockStyle, + CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, Editor, EditorElement, + EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, RevertSelectedHunks, + ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -43,12 +39,35 @@ pub(super) struct ExpandedHunks { hunk_update_tasks: HashMap, Task<()>>, } +#[derive(Debug, Clone)] +pub(super) struct ExpandedHunk { + pub block: Option, + pub hunk_range: Range, + pub diff_base_byte_range: Range, + pub status: DiffHunkStatus, + pub folded: bool, +} + #[derive(Debug)] struct DiffBaseBuffer { buffer: Model, diff_base_version: usize, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DisplayDiffHunk { + Folded { + display_row: DisplayRow, + }, + + Unfolded { + diff_base_byte_range: Range, + display_row_range: Range, + multi_buffer_range: Range, + status: DiffHunkStatus, + }, +} + impl ExpandedHunks { pub fn hunks(&self, include_folded: bool) -> impl Iterator { self.hunks @@ -57,15 +76,6 @@ impl ExpandedHunks { } } -#[derive(Debug, Clone)] -pub(super) struct ExpandedHunk { - pub block: Option, - pub hunk_range: Range, - pub diff_base_byte_range: Range, - pub status: DiffHunkStatus, - pub folded: bool, -} - impl Editor { pub(super) fn open_hunk_context_menu( &mut self, @@ -883,3 +893,287 @@ fn to_inclusive_row_range( let new_range = point_range.to_anchors(&snapshot.buffer_snapshot); new_range.start..=new_range.end } + +impl DisplayDiffHunk { + pub fn start_display_row(&self) -> DisplayRow { + match self { + &DisplayDiffHunk::Folded { display_row } => display_row, + DisplayDiffHunk::Unfolded { + display_row_range, .. + } => display_row_range.start, + } + } + + pub fn contains_display_row(&self, display_row: DisplayRow) -> bool { + let range = match self { + &DisplayDiffHunk::Folded { display_row } => display_row..=display_row, + + DisplayDiffHunk::Unfolded { + display_row_range, .. + } => display_row_range.start..=display_row_range.end, + }; + + range.contains(&display_row) + } +} + +pub fn diff_hunk_to_display( + hunk: &MultiBufferDiffHunk, + snapshot: &DisplaySnapshot, +) -> DisplayDiffHunk { + let hunk_start_point = Point::new(hunk.row_range.start.0, 0); + let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); + let hunk_end_point_sub = Point::new( + hunk.row_range + .end + .0 + .saturating_sub(1) + .max(hunk.row_range.start.0), + 0, + ); + + let status = hunk_status(hunk); + let is_removal = status == DiffHunkStatus::Removed; + + let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); + let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); + let folds_range = folds_start..folds_end; + + let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { + let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot); + let fold_point_range = fold_point_range.start..=fold_point_range.end; + + let folded_start = fold_point_range.contains(&hunk_start_point); + let folded_end = fold_point_range.contains(&hunk_end_point_sub); + let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub); + + (folded_start && folded_end) || (is_removal && folded_start_sub) + }); + + if let Some(fold) = containing_fold { + let row = fold.range.start.to_display_point(snapshot).row(); + DisplayDiffHunk::Folded { display_row: row } + } else { + let start = hunk_start_point.to_display_point(snapshot).row(); + + let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); + let hunk_end_point = Point::new(hunk_end_row.0, 0); + + let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); + let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); + let end = hunk_end_point.to_display_point(snapshot).row(); + + DisplayDiffHunk::Unfolded { + display_row_range: start..end, + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{editor_tests::init_test, hunk_status}; + use gpui::{Context, TestAppContext}; + use language::Capability::ReadWrite; + use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow}; + use project::{FakeFs, Project}; + use unindent::Unindent as _; + + #[gpui::test] + async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { + use git::diff::DiffHunkStatus; + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + + // buffer has two modified hunks with two rows each + let buffer_1 = project.update(cx, |project, cx| { + project.create_local_buffer( + " + 1.zero + 1.ONE + 1.TWO + 1.three + 1.FOUR + 1.FIVE + 1.six + " + .unindent() + .as_str(), + None, + cx, + ) + }); + buffer_1.update(cx, |buffer, cx| { + buffer.set_diff_base( + Some( + " + 1.zero + 1.one + 1.two + 1.three + 1.four + 1.five + 1.six + " + .unindent(), + ), + cx, + ); + }); + + // buffer has a deletion hunk and an insertion hunk + let buffer_2 = project.update(cx, |project, cx| { + project.create_local_buffer( + " + 2.zero + 2.one + 2.two + 2.three + 2.four + 2.five + 2.six + " + .unindent() + .as_str(), + None, + cx, + ) + }); + buffer_2.update(cx, |buffer, cx| { + buffer.set_diff_base( + Some( + " + 2.zero + 2.one + 2.one-and-a-half + 2.two + 2.three + 2.four + 2.six + " + .unindent(), + ), + cx, + ); + }); + + cx.background_executor.run_until_parked(); + + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + // excerpt ends in the middle of a modified hunk + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt begins in the middle of a modified hunk + ExcerptRange { + context: Point::new(5, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + // excerpt ends at a deletion + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt starts at a deletion + ExcerptRange { + context: Point::new(2, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains a deletion hunk + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains an insertion hunk + ExcerptRange { + context: Point::new(4, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer + }); + + let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); + + assert_eq!( + snapshot.text(), + " + 1.zero + 1.ONE + 1.FIVE + 1.six + 2.zero + 2.one + 2.two + 2.one + 2.two + 2.four + 2.five + 2.six" + .unindent() + ); + + let expected = [ + ( + DiffHunkStatus::Modified, + MultiBufferRow(1)..MultiBufferRow(2), + ), + ( + DiffHunkStatus::Modified, + MultiBufferRow(2)..MultiBufferRow(3), + ), + //TODO: Define better when and where removed hunks show up at range extremities + ( + DiffHunkStatus::Removed, + MultiBufferRow(6)..MultiBufferRow(6), + ), + ( + DiffHunkStatus::Removed, + MultiBufferRow(8)..MultiBufferRow(8), + ), + ( + DiffHunkStatus::Added, + MultiBufferRow(10)..MultiBufferRow(11), + ), + ]; + + assert_eq!( + snapshot + .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) + .collect::>(), + &expected, + ); + + assert_eq!( + snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) + .collect::>(), + expected + .iter() + .rev() + .cloned() + .collect::>() + .as_slice(), + ); + } +} From d33600525ee372813293e803d6aa0f2fa7d50fcb Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 16:23:08 -0600 Subject: [PATCH 053/228] ssh remoting: Fix cmd-o (#18308) Release Notes: - ssh-remoting: Cmd-O now correctly opens files on the remote host --------- Co-authored-by: Mikayla --- crates/assistant/src/context_store.rs | 6 -- .../random_project_collaboration_tests.rs | 11 ++- crates/editor/src/editor.rs | 2 +- crates/feedback/src/feedback_modal.rs | 69 ++++++++----------- crates/file_finder/src/file_finder.rs | 2 +- crates/language_tools/src/lsp_log.rs | 4 +- crates/outline_panel/src/outline_panel.rs | 2 +- crates/project/src/project.rs | 46 +++++++------ crates/project_panel/src/project_panel.rs | 5 +- crates/tasks_ui/src/lib.rs | 2 +- crates/tasks_ui/src/modal.rs | 2 +- crates/terminal_view/src/terminal_panel.rs | 2 +- crates/title_bar/src/collab.rs | 8 +-- crates/workspace/src/workspace.rs | 12 ++-- crates/zed/src/zed.rs | 2 +- 15 files changed, 84 insertions(+), 91 deletions(-) diff --git a/crates/assistant/src/context_store.rs b/crates/assistant/src/context_store.rs index f57a2fbca613c..f4f03dda377ba 100644 --- a/crates/assistant/src/context_store.rs +++ b/crates/assistant/src/context_store.rs @@ -357,9 +357,6 @@ impl ContextStore { let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow!("project was not remote"))); }; - if project.is_local_or_ssh() { - return Task::ready(Err(anyhow!("cannot create remote contexts as the host"))); - } let replica_id = project.replica_id(); let capability = project.capability(); @@ -488,9 +485,6 @@ impl ContextStore { let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow!("project was not remote"))); }; - if project.is_local_or_ssh() { - return Task::ready(Err(anyhow!("cannot open remote contexts as the host"))); - } if let Some(context) = self.loaded_context_for_id(&context_id, cx) { return Task::ready(Ok(context)); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 831114ba1a0c9..19d37f8786be6 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -298,8 +298,7 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; let project_root_name = root_name_for_project(&project, cx); - let is_local = - project.read_with(cx, |project, _| project.is_local_or_ssh()); + let is_local = project.read_with(cx, |project, _| project.is_local()); let worktree = project.read_with(cx, |project, cx| { project .worktrees(cx) @@ -335,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; let project_root_name = root_name_for_project(&project, cx); - let is_local = project.read_with(cx, |project, _| project.is_local_or_ssh()); + let is_local = project.read_with(cx, |project, _| project.is_local()); match rng.gen_range(0..100_u32) { // Manipulate an existing buffer @@ -1256,7 +1255,7 @@ impl RandomizedTest for ProjectCollaborationTest { let buffers = client.buffers().clone(); for (guest_project, guest_buffers) in &buffers { let project_id = if guest_project.read_with(client_cx, |project, _| { - project.is_local_or_ssh() || project.is_disconnected() + project.is_local() || project.is_disconnected() }) { continue; } else { @@ -1560,9 +1559,7 @@ async fn ensure_project_shared( let first_root_name = root_name_for_project(project, cx); let active_call = cx.read(ActiveCall::global); if active_call.read_with(cx, |call, _| call.room().is_some()) - && project.read_with(cx, |project, _| { - project.is_local_or_ssh() && !project.is_shared() - }) + && project.read_with(cx, |project, _| project.is_local() && !project.is_shared()) { match active_call .update(cx, |call, cx| call.share_project(project.clone(), cx)) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 316d945ca4df6..b54889dc0d8c2 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11819,7 +11819,7 @@ impl Editor { .filter_map(|buffer| { let buffer = buffer.read(cx); let language = buffer.language()?; - if project.is_local_or_ssh() + if project.is_local() && project.language_servers_for_buffer(buffer, cx).count() == 0 { None diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index a4a07ad2ad44d..4762b228d3e44 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -18,8 +18,7 @@ use regex::Regex; use serde_derive::Serialize; use ui::{prelude::*, Button, ButtonStyle, IconPosition, Tooltip}; use util::ResultExt; -use workspace::notifications::NotificationId; -use workspace::{DismissDecision, ModalView, Toast, Workspace}; +use workspace::{DismissDecision, ModalView, Workspace}; use crate::{system_specs::SystemSpecs, GiveFeedback, OpenZedRepo}; @@ -120,44 +119,34 @@ impl FeedbackModal { pub fn register(workspace: &mut Workspace, cx: &mut ViewContext) { let _handle = cx.view().downgrade(); workspace.register_action(move |workspace, _: &GiveFeedback, cx| { - let markdown = workspace - .app_state() - .languages - .language_for_name("Markdown"); - - let project = workspace.project().clone(); - let is_local_project = project.read(cx).is_local_or_ssh(); - - if !is_local_project { - struct FeedbackInRemoteProject; - - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "You can only submit feedback in your own project.", - ), - cx, - ); - return; - } - - let system_specs = SystemSpecs::new(cx); - cx.spawn(|workspace, mut cx| async move { - let markdown = markdown.await.log_err(); - let buffer = project.update(&mut cx, |project, cx| { - project.create_local_buffer("", markdown, cx) - })?; - let system_specs = system_specs.await; - - workspace.update(&mut cx, |workspace, cx| { - workspace.toggle_modal(cx, move |cx| { - FeedbackModal::new(system_specs, project, buffer, cx) - }); - })?; - - anyhow::Ok(()) - }) - .detach_and_log_err(cx); + workspace + .with_local_workspace(cx, |workspace, cx| { + let markdown = workspace + .app_state() + .languages + .language_for_name("Markdown"); + + let project = workspace.project().clone(); + + let system_specs = SystemSpecs::new(cx); + cx.spawn(|workspace, mut cx| async move { + let markdown = markdown.await.log_err(); + let buffer = project.update(&mut cx, |project, cx| { + project.create_local_buffer("", markdown, cx) + })?; + let system_specs = system_specs.await; + + workspace.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, move |cx| { + FeedbackModal::new(system_specs, project, buffer, cx) + }); + })?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + }) + .detach_and_log_err(cx); }); } diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 4c3f92d3c156a..726a8bcb5e698 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -884,7 +884,7 @@ impl PickerDelegate for FileFinderDelegate { project .worktree_for_id(history_item.project.worktree_id, cx) .is_some() - || (project.is_local_or_ssh() && history_item.absolute.is_some()) + || (project.is_local() && history_item.absolute.is_some()) }), self.currently_opened_path.as_ref(), None, diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index bde5fe9b199e8..d8fe3aa51840e 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -184,7 +184,7 @@ pub fn init(cx: &mut AppContext) { cx.observe_new_views(move |workspace: &mut Workspace, cx| { let project = workspace.project(); - if project.read(cx).is_local_or_ssh() { + if project.read(cx).is_local() { log_store.update(cx, |store, cx| { store.add_project(project, cx); }); @@ -193,7 +193,7 @@ pub fn init(cx: &mut AppContext) { let log_store = log_store.clone(); workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, cx| { let project = workspace.project().read(cx); - if project.is_local_or_ssh() { + if project.is_local() { workspace.add_item_to_active_pane( Box::new(cx.new_view(|cx| { LspLogView::new(workspace.project().clone(), log_store.clone(), cx) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index da66ca40313d8..4944f770e73a6 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -3909,7 +3909,7 @@ impl Render for OutlinePanel { .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) }) - .when(project.is_local_or_ssh(), |el| { + .when(project.is_local() || project.is_via_ssh(), |el| { el.on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fe4d2d6b01545..5a9b235d91cdb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -487,7 +487,7 @@ impl DirectoryLister { pub fn is_local(&self, cx: &AppContext) -> bool { match self { DirectoryLister::Local(_) => true, - DirectoryLister::Project(project) => project.read(cx).is_local_or_ssh(), + DirectoryLister::Project(project) => project.read(cx).is_local(), } } @@ -1199,7 +1199,13 @@ impl Project { self.dev_server_project_id } - pub fn supports_remote_terminal(&self, cx: &AppContext) -> bool { + pub fn supports_terminal(&self, cx: &AppContext) -> bool { + if self.is_local() { + return true; + } + if self.is_via_ssh() { + return true; + } let Some(id) = self.dev_server_project_id else { return false; }; @@ -1213,10 +1219,6 @@ impl Project { } pub fn ssh_connection_string(&self, cx: &ModelContext) -> Option { - if self.is_local_or_ssh() { - return None; - } - let dev_server_id = self.dev_server_project_id()?; dev_server_projects::Store::global(cx) .read(cx) @@ -1643,13 +1645,6 @@ impl Project { } } - pub fn is_local_or_ssh(&self) -> bool { - match &self.client_state { - ProjectClientState::Local | ProjectClientState::Shared { .. } => true, - ProjectClientState::Remote { .. } => false, - } - } - pub fn is_via_ssh(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { @@ -1735,7 +1730,7 @@ impl Project { ) -> Task>> { if let Some(buffer) = self.buffer_for_id(id, cx) { Task::ready(Ok(buffer)) - } else if self.is_local_or_ssh() { + } else if self.is_local() || self.is_via_ssh() { Task::ready(Err(anyhow!("buffer {} does not exist", id))) } else if let Some(project_id) = self.remote_id() { let request = self.client.request(proto::OpenBufferById { @@ -1857,7 +1852,7 @@ impl Project { let mut changes = rx.ready_chunks(MAX_BATCH_SIZE); while let Some(changes) = changes.next().await { - let is_local = this.update(&mut cx, |this, _| this.is_local_or_ssh())?; + let is_local = this.update(&mut cx, |this, _| this.is_local())?; for change in changes { match change { @@ -2001,7 +1996,7 @@ impl Project { language_server_id, message, } => { - if self.is_local_or_ssh() { + if self.is_local() { self.enqueue_buffer_ordered_message( BufferOrderedMessage::LanguageServerUpdate { language_server_id: *language_server_id, @@ -3039,8 +3034,19 @@ impl Project { query: String, cx: &mut ModelContext, ) -> Task>> { - if self.is_local_or_ssh() { + if self.is_local() { DirectoryLister::Local(self.fs.clone()).list_directory(query, cx) + } else if let Some(session) = self.ssh_session.as_ref() { + let request = proto::ListRemoteDirectory { + dev_server_id: SSH_PROJECT_ID, + path: query, + }; + + let response = session.request(request); + cx.background_executor().spawn(async move { + let response = response.await?; + Ok(response.entries.into_iter().map(PathBuf::from).collect()) + }) } else if let Some(dev_server) = self.dev_server_project_id().and_then(|id| { dev_server_projects::Store::global(cx) .read(cx) @@ -3317,7 +3323,7 @@ impl Project { mut cx: AsyncAppContext, ) -> Result<()> { this.update(&mut cx, |this, cx| { - if this.is_local_or_ssh() { + if this.is_local() || this.is_via_ssh() { this.unshare(cx)?; } else { this.disconnected_from_host(cx); @@ -3995,7 +4001,7 @@ impl Project { location: Location, cx: &mut ModelContext<'_, Project>, ) -> Task> { - if self.is_local_or_ssh() { + if self.is_local() { let (worktree_id, worktree_abs_path) = if let Some(worktree) = self.task_worktree(cx) { ( Some(worktree.read(cx).id()), @@ -4081,7 +4087,7 @@ impl Project { location: Option, cx: &mut ModelContext, ) -> Task>> { - if self.is_local_or_ssh() { + if self.is_local() { let (file, language) = location .map(|location| { let buffer = location.buffer.read(cx); diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 8e741134f0e44..6958bfb3318e2 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2722,11 +2722,14 @@ impl Render for ProjectPanel { } })) }) - .when(project.is_local_or_ssh(), |el| { + .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) .on_action(cx.listener(Self::open_system)) .on_action(cx.listener(Self::open_in_terminal)) }) + .when(project.is_via_ssh(), |el| { + el.on_action(cx.listener(Self::open_in_terminal)) + }) .on_mouse_down( MouseButton::Right, cx.listener(move |this, event: &MouseDownEvent, cx| { diff --git a/crates/tasks_ui/src/lib.rs b/crates/tasks_ui/src/lib.rs index 4ea4a8fa2ccdc..fd14f9aaef2a4 100644 --- a/crates/tasks_ui/src/lib.rs +++ b/crates/tasks_ui/src/lib.rs @@ -94,7 +94,7 @@ fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) workspace .update(&mut cx, |workspace, cx| { if workspace.project().update(cx, |project, cx| { - project.is_local_or_ssh() || project.ssh_connection_string(cx).is_some() + project.is_local() || project.ssh_connection_string(cx).is_some() }) { workspace.toggle_modal(cx, |cx| { TasksModal::new(project, task_context, workspace_handle, cx) diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 931a0b09c365f..662e3f11fd05c 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -225,7 +225,7 @@ impl PickerDelegate for TasksModalDelegate { if project.is_via_collab() && ssh_connection_string.is_none() { Task::ready((Vec::new(), Vec::new())) } else { - let remote_templates = if project.is_local_or_ssh() { + let remote_templates = if project.is_local() { None } else { project diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index f745fbe348ba4..72f8606fa2176 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -144,7 +144,7 @@ impl TerminalPanel { cx.subscribe(&pane, Self::handle_pane_event), ]; let project = workspace.project().read(cx); - let enabled = project.is_local_or_ssh() || project.supports_remote_terminal(cx); + let enabled = project.supports_terminal(cx); let this = Self { pane, fs: workspace.app_state().fs.clone(), diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 1f052e1a5e61a..e9f89643d5729 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -284,14 +284,14 @@ impl TitleBar { let room = room.read(cx); let project = self.project.read(cx); - let is_local = project.is_local_or_ssh(); let is_dev_server_project = project.dev_server_project_id().is_some(); - let is_shared = (is_local || is_dev_server_project) && project.is_shared(); + let is_shared = project.is_shared(); let is_muted = room.is_muted(); let is_deafened = room.is_deafened().unwrap_or(false); let is_screen_sharing = room.is_screen_sharing(); let can_use_microphone = room.can_use_microphone(); - let can_share_projects = room.can_share_projects(); + let can_share_projects = room.can_share_projects() + && (is_dev_server_project || project.is_local() || project.is_via_ssh()); let platform_supported = match self.platform_style { PlatformStyle::Mac => true, PlatformStyle::Linux | PlatformStyle::Windows => false, @@ -299,7 +299,7 @@ impl TitleBar { let mut children = Vec::new(); - if (is_local || is_dev_server_project) && can_share_projects { + if can_share_projects { children.push( Button::new( "toggle_sharing", diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 98f793c234aae..4290e12105a3c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1891,7 +1891,11 @@ impl Workspace { directories: true, multiple: true, }, - DirectoryLister::Local(self.app_state.fs.clone()), + if self.project.read(cx).is_via_ssh() { + DirectoryLister::Project(self.project.clone()) + } else { + DirectoryLister::Local(self.app_state.fs.clone()) + }, cx, ); @@ -3956,7 +3960,7 @@ impl Workspace { fn local_paths(&self, cx: &AppContext) -> Option>> { let project = self.project().read(cx); - if project.is_local_or_ssh() { + if project.is_local() { Some( project .visible_worktrees(cx) @@ -5160,7 +5164,7 @@ async fn join_channel_internal( return None; } - if (project.is_local_or_ssh() || is_dev_server) + if (project.is_local() || project.is_via_ssh() || is_dev_server) && project.visible_worktrees(cx).any(|tree| { tree.read(cx) .root_entry() @@ -5314,7 +5318,7 @@ pub fn local_workspace_windows(cx: &AppContext) -> Vec> .filter(|workspace| { workspace .read(cx) - .is_ok_and(|workspace| workspace.project.read(cx).is_local_or_ssh()) + .is_ok_and(|workspace| workspace.project.read(cx).is_local()) }) .collect() } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 8f4f1af24331c..c631c01f99a1a 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -230,7 +230,7 @@ pub fn initialize_workspace( let project = workspace.project().clone(); if project.update(cx, |project, cx| { - project.is_local_or_ssh() || project.ssh_connection_string(cx).is_some() + project.is_local() || project.is_via_ssh() || project.ssh_connection_string(cx).is_some() }) { project.update(cx, |project, cx| { let fs = app_state.fs.clone(); From 9a8601227d99ad7a8b123a1470a89615919ca43e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 24 Sep 2024 22:23:32 +0000 Subject: [PATCH 054/228] docs: Add example of TOML/taplo LSP settings (#18293) --- docs/src/languages/toml.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/src/languages/toml.md b/docs/src/languages/toml.md index a4aa8436ff089..3f33925a281b6 100644 --- a/docs/src/languages/toml.md +++ b/docs/src/languages/toml.md @@ -18,3 +18,15 @@ include = ["Cargo.toml", "some_directory/**/*.toml"] align_entries = true reorder_keys = true ``` + +Alternatively, you can pass taplo configuration options via [Zed LSP Settings](../configuring-zed.md#lsp) + +```json + "lsp": { + "taplo": { + "settings": { + "array_auto_collapse": false + } + } + } +``` From e9bc9ed5d568997026f3ef0d015cc75880e1b5fd Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Wed, 25 Sep 2024 16:00:17 +0800 Subject: [PATCH 055/228] remote_server: Fix opening a new remote project not refreshing the project panel (#18262) Currently, when open new remote project, project_panel not refresh, we must `ctrl-p` and select an file to refresh the project_panel. After that, project_panel will refresh when remote project window active. Release Notes: - Fixed remote projects not restoring previous locations and not refreshing the project panel on open. --- crates/workspace/src/workspace.rs | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 4290e12105a3c..c7ba4ae3faa89 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5607,6 +5607,9 @@ pub fn join_dev_server_project( }) }); + let serialized_workspace: Option = + persistence::DB.workspace_for_dev_server_project(dev_server_project_id); + let workspace = if let Some(existing_workspace) = existing_workspace { existing_workspace } else { @@ -5620,10 +5623,7 @@ pub fn join_dev_server_project( ) .await?; - let serialized_workspace: Option = - persistence::DB.workspace_for_dev_server_project(dev_server_project_id); - - let workspace_id = if let Some(serialized_workspace) = serialized_workspace { + let workspace_id = if let Some(ref serialized_workspace) = serialized_workspace { serialized_workspace.id } else { persistence::DB.next_id().await? @@ -5650,10 +5650,13 @@ pub fn join_dev_server_project( } }; - workspace.update(&mut cx, |_, cx| { - cx.activate(true); - cx.activate_window(); - })?; + workspace + .update(&mut cx, |_, cx| { + cx.activate(true); + cx.activate_window(); + open_items(serialized_workspace, vec![], app_state, cx) + })? + .await?; anyhow::Ok(workspace) }) From fc9db97ac73e56288077b486e1d3d3618d5ee80e Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 10:02:35 +0200 Subject: [PATCH 056/228] client: Remove unused `fs` dependency (#18324) CI bot notified me about that in https://github.com/zed-industries/zed/pull/18323 Release Notes: - N/A --- Cargo.lock | 1 - crates/client/Cargo.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9c6d2fb7b9b84..f1bc684401cb9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2409,7 +2409,6 @@ dependencies = [ "cocoa 0.26.0", "collections", "feature_flags", - "fs", "futures 0.3.30", "gpui", "http_client", diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 8ae4f15c9796b..dd420bbbe6318 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -23,7 +23,6 @@ chrono = { workspace = true, features = ["serde"] } clock.workspace = true collections.workspace = true feature_flags.workspace = true -fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true From eb71d2f1a8a606ac467e56c8d22e2d46818f87cf Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 10:03:10 +0200 Subject: [PATCH 057/228] zig: Fix highlighting of keywords like `orelse`, `or`, `and` (#18323) This changes the Zig highlights.scm to tag all keywords with `@keyword` and not with `@keyword.`, so the highlighting works properly. Closes #9355 Release Notes: - N/A Demo: ![screenshot-2024-09-25-09 32 20@2x](https://github.com/user-attachments/assets/567b8817-a522-4741-af7f-dcb1a79ddd40) --- extensions/zig/languages/zig/highlights.scm | 78 +++++++-------------- 1 file changed, 24 insertions(+), 54 deletions(-) diff --git a/extensions/zig/languages/zig/highlights.scm b/extensions/zig/languages/zig/highlights.scm index 152a66b670b44..aea2d34addb82 100644 --- a/extensions/zig/languages/zig/highlights.scm +++ b/extensions/zig/languages/zig/highlights.scm @@ -103,6 +103,7 @@ field_constant: (IDENTIFIER) @constant (BlockLabel (IDENTIFIER) @tag) [ + "fn" "asm" "defer" "errdefer" @@ -112,84 +113,53 @@ field_constant: (IDENTIFIER) @constant "enum" "opaque" "error" -] @keyword - -[ + "try" + "catch" + "for" + "while" + "break" + "continue" + "const" + "var" + "volatile" + "allowzero" + "noalias" + "addrspace" + "align" + "callconv" + "linksection" + "comptime" + "export" + "extern" + "inline" + "noinline" + "packed" + "pub" + "threadlocal" "async" "await" "suspend" "nosuspend" "resume" -] @keyword.coroutine - -[ - "fn" -] @keyword - -[ "and" "or" "orelse" -] @operator - -[ "return" -] @keyword.return - -[ "if" "else" "switch" -] @keyword.control - -[ - "for" - "while" - "break" - "continue" ] @keyword [ "usingnamespace" ] @constant -[ - "try" - "catch" -] @keyword - [ "anytype" "anyframe" (BuildinTypeExpr) ] @type -[ - "const" - "var" - "volatile" - "allowzero" - "noalias" -] @keyword - -[ - "addrspace" - "align" - "callconv" - "linksection" -] @keyword.storage - -[ - "comptime" - "export" - "extern" - "inline" - "noinline" - "packed" - "pub" - "threadlocal" -] @keyword - [ "null" "unreachable" From 7bb510971a0cc59b89ed5ed51cbacbf29f365c06 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 10:26:00 +0200 Subject: [PATCH 058/228] file picker: Use muted color for file icons (#18325) I think they were too much in-your-face. Muted looks better. Before: ![image](https://github.com/user-attachments/assets/73d6171a-6276-4450-acfb-52cd44fdfe59) After: ![image](https://github.com/user-attachments/assets/1d5f4524-b0b9-4ba6-ab66-5eaf619e58f9) Release Notes: - N/A --- crates/file_finder/src/file_finder.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 726a8bcb5e698..1a65bd352d61d 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1070,7 +1070,9 @@ impl PickerDelegate for FileFinderDelegate { self.labels_for_match(path_match, cx, ix); let file_icon = if settings.file_icons { - FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path) + FileIcons::get_icon(Path::new(&file_name), cx) + .map(Icon::from_path) + .map(|icon| icon.color(Color::Muted)) } else { None }; From 623a6eca75cb941ea7a368e133097605882efbb9 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 25 Sep 2024 11:34:27 +0200 Subject: [PATCH 059/228] git: Do not rescan .git on fsmonitor events (#18326) Fixes #16404 by ignoring events coming from .git/fsmonitor--daemon/cookies subdirectory. Closes #16404 Release Notes: - Improved performance in repositories using Git fsmonitor--daemon feature. --- crates/git/src/git.rs | 3 +++ crates/worktree/src/worktree.rs | 27 +++++++++++++++++++++++++-- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 20629899e8c0c..fb204fba8266a 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -18,6 +18,9 @@ pub mod repository; pub mod status; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); +pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); +pub static FSMONITOR_DAEMON: LazyLock<&'static OsStr> = + LazyLock::new(|| OsStr::new("fsmonitor--daemon")); pub static GITIGNORE: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".gitignore")); #[derive(Clone, Copy, Eq, Hash, PartialEq)] diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index d8555b71a4f67..550843e51e448 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -22,7 +22,7 @@ use fuzzy::CharBag; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, status::GitStatus, - DOT_GIT, GITIGNORE, + COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, }; use gpui::{ AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext, @@ -3707,9 +3707,32 @@ impl BackgroundScanner { let snapshot = &self.state.lock().snapshot; { let mut is_git_related = false; + + // We don't want to trigger .git rescan for events within .git/fsmonitor--daemon/cookies directory. + #[derive(PartialEq)] + enum FsMonitorParseState { + Cookies, + FsMonitor + } + let mut fsmonitor_parse_state = None; if let Some(dot_git_dir) = abs_path .ancestors() - .find(|ancestor| ancestor.file_name() == Some(*DOT_GIT)) + .find(|ancestor| { + let file_name = ancestor.file_name(); + if file_name == Some(*COOKIES) { + fsmonitor_parse_state = Some(FsMonitorParseState::Cookies); + false + } else if fsmonitor_parse_state == Some(FsMonitorParseState::Cookies) && file_name == Some(*FSMONITOR_DAEMON) { + fsmonitor_parse_state = Some(FsMonitorParseState::FsMonitor); + false + } else if fsmonitor_parse_state != Some(FsMonitorParseState::FsMonitor) && file_name == Some(*DOT_GIT) { + true + } else { + fsmonitor_parse_state.take(); + false + } + + }) { let dot_git_path = dot_git_dir .strip_prefix(&root_canonical_path) From 9d197ddc99b3b6e4c85f481cc45b0d33c170a494 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 12:03:24 +0200 Subject: [PATCH 060/228] ssh remoting: Fix SSH connection not being closed (#18329) This fixes the `SshSession` being leaked. There were two leaks: 1. `Arc` itself got leaked into the `SettingsObserver` that lives as long as the application. Fixed with a weak reference. 2. The two tasks spawned by an `SshSession` had a circular dependency and didn't exit while the other one was running. Fixed by fixing (1) and then attaching one of the tasks to the `SshSession`, which means it gets dropped with the session itself, which leads the other task to error and exit. Co-authored-by: Bennet Release Notes: - N/A --------- Co-authored-by: Bennet --- crates/project/src/project_settings.rs | 13 +++++--- crates/recent_projects/src/recent_projects.rs | 2 +- crates/remote/src/ssh_session.rs | 31 ++++++++++++++----- crates/rpc/src/proto_client.rs | 20 +++++++++++- crates/worktree/src/worktree.rs | 2 +- 5 files changed, 53 insertions(+), 15 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index d6f5600a551ef..68593f8fab052 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -334,17 +334,20 @@ impl SettingsObserver { .log_err(); } + let weak_client = ssh.downgrade(); cx.observe_global::(move |_, cx| { let new_settings = cx.global::().raw_user_settings(); if &settings != new_settings { settings = new_settings.clone() } if let Some(content) = serde_json::to_string(&settings).log_err() { - ssh.send(proto::UpdateUserSettings { - project_id: 0, - content, - }) - .log_err(); + if let Some(ssh) = weak_client.upgrade() { + ssh.send(proto::UpdateUserSettings { + project_id: 0, + content, + }) + .log_err(); + } } }) .detach(); diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 20393d63e1a3d..570e9a565c28e 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -509,7 +509,7 @@ impl PickerDelegate for RecentProjectsDelegate { .color(Color::Muted) .into_any_element() } - SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Screen) + SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server) .color(Color::Muted) .into_any_element(), SerializedWorkspaceLocation::DevServer(_) => { diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 2bd18aa37e19d..9d9d916f19b45 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -11,7 +11,7 @@ use futures::{ future::BoxFuture, select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, StreamExt as _, }; -use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion}; +use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion, Task}; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, @@ -51,6 +51,7 @@ pub struct SshSession { spawn_process_tx: mpsc::UnboundedSender, client_socket: Option, state: Mutex, // Lock + _io_task: Option>>, } struct SshClientState { @@ -173,8 +174,7 @@ impl SshSession { let mut child_stdout = remote_server_child.stdout.take().unwrap(); let mut child_stdin = remote_server_child.stdin.take().unwrap(); - let executor = cx.background_executor().clone(); - executor.clone().spawn(async move { + let io_task = cx.background_executor().spawn(async move { let mut stdin_buffer = Vec::new(); let mut stdout_buffer = Vec::new(); let mut stderr_buffer = Vec::new(); @@ -264,9 +264,18 @@ impl SshSession { } } } - }).detach(); + }); - cx.update(|cx| Self::new(incoming_rx, outgoing_tx, spawn_process_tx, Some(socket), cx)) + cx.update(|cx| { + Self::new( + incoming_rx, + outgoing_tx, + spawn_process_tx, + Some(socket), + Some(io_task), + cx, + ) + }) } pub fn server( @@ -275,7 +284,7 @@ impl SshSession { cx: &AppContext, ) -> Arc { let (tx, _rx) = mpsc::unbounded(); - Self::new(incoming_rx, outgoing_tx, tx, None, cx) + Self::new(incoming_rx, outgoing_tx, tx, None, None, cx) } #[cfg(any(test, feature = "test-support"))] @@ -293,6 +302,7 @@ impl SshSession { client_to_server_tx, tx.clone(), None, // todo() + None, cx, ) }), @@ -302,6 +312,7 @@ impl SshSession { server_to_client_tx, tx.clone(), None, + None, cx, ) }), @@ -313,6 +324,7 @@ impl SshSession { outgoing_tx: mpsc::UnboundedSender, spawn_process_tx: mpsc::UnboundedSender, client_socket: Option, + io_task: Option>>, cx: &AppContext, ) -> Arc { let this = Arc::new(Self { @@ -322,13 +334,18 @@ impl SshSession { spawn_process_tx, client_socket, state: Default::default(), + _io_task: io_task, }); cx.spawn(|cx| { - let this = this.clone(); + let this = Arc::downgrade(&this); async move { let peer_id = PeerId { owner_id: 0, id: 0 }; while let Some(incoming) = incoming_rx.next().await { + let Some(this) = this.upgrade() else { + return anyhow::Ok(()); + }; + if let Some(request_id) = incoming.responding_to { let request_id = MessageId(request_id); let sender = this.response_channels.lock().remove(&request_id); diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 88099102765ed..56b13688bad2b 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -10,11 +10,29 @@ use proto::{ error::ErrorExt as _, AnyTypedEnvelope, EntityMessage, Envelope, EnvelopedMessage, RequestMessage, TypedEnvelope, }; -use std::{any::TypeId, sync::Arc}; +use std::{ + any::TypeId, + sync::{Arc, Weak}, +}; #[derive(Clone)] pub struct AnyProtoClient(Arc); +impl AnyProtoClient { + pub fn downgrade(&self) -> AnyWeakProtoClient { + AnyWeakProtoClient(Arc::downgrade(&self.0)) + } +} + +#[derive(Clone)] +pub struct AnyWeakProtoClient(Weak); + +impl AnyWeakProtoClient { + pub fn upgrade(&self) -> Option { + self.0.upgrade().map(AnyProtoClient) + } +} + pub trait ProtoClient: Send + Sync { fn request( &self, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 550843e51e448..f91a832b80d78 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -472,7 +472,7 @@ impl Worktree { disconnected: false, }; - // Apply updates to a separate snapshto in a background task, then + // Apply updates to a separate snapshot in a background task, then // send them to a foreground task which updates the model. cx.background_executor() .spawn(async move { From a6cb17fb51bd3dca2a8c68bacfc8384234c10105 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 25 Sep 2024 12:27:57 +0200 Subject: [PATCH 061/228] chore: Fix violations of `elided_named_lifetimes` (#18330) I compile Zed from nightly build pretty often and I've noticed that we're getting a few hits on new rustc lint: https://github.com/rust-lang/rust/pull/129207 Release Notes: - N/A --- crates/editor/src/display_map/crease_map.rs | 2 +- crates/editor/src/editor.rs | 2 +- crates/language/src/syntax_map.rs | 6 +++--- crates/project/src/project.rs | 2 +- crates/sum_tree/src/sum_tree.rs | 4 ++-- crates/sum_tree/src/tree_map.rs | 2 +- crates/workspace/src/workspace.rs | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index bfc9c7d1a4ffb..c3f2b0061ac73 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -69,7 +69,7 @@ impl CreaseSnapshot { &'a self, range: Range, snapshot: &'a MultiBufferSnapshot, - ) -> impl '_ + Iterator { + ) -> impl 'a + Iterator { let start = snapshot.anchor_before(Point::new(range.start.0, 0)); let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b54889dc0d8c2..ad5cd24d73ac4 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11515,7 +11515,7 @@ impl Editor { &'a self, position: Anchor, buffer: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator> { let read_highlights = self .background_highlights .get(&TypeId::of::()) diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index daae54fb4da62..55177f79620db 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -794,7 +794,7 @@ impl SyntaxSnapshot { range: Range, buffer: &'a BufferSnapshot, query: fn(&Grammar) -> Option<&Query>, - ) -> SyntaxMapCaptures { + ) -> SyntaxMapCaptures<'a> { SyntaxMapCaptures::new( range.clone(), buffer.as_rope(), @@ -808,7 +808,7 @@ impl SyntaxSnapshot { range: Range, buffer: &'a BufferSnapshot, query: fn(&Grammar) -> Option<&Query>, - ) -> SyntaxMapMatches { + ) -> SyntaxMapMatches<'a> { SyntaxMapMatches::new( range.clone(), buffer.as_rope(), @@ -828,7 +828,7 @@ impl SyntaxSnapshot { range: Range, buffer: &'a BufferSnapshot, include_hidden: bool, - ) -> impl 'a + Iterator { + ) -> impl 'a + Iterator> { let start_offset = range.start.to_offset(buffer); let end_offset = range.end.to_offset(buffer); let start = buffer.anchor_before(start_offset); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5a9b235d91cdb..ee7f93a4f933c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3954,7 +3954,7 @@ impl Project { pub fn supplementary_language_servers<'a>( &'a self, cx: &'a AppContext, - ) -> impl '_ + Iterator { + ) -> impl 'a + Iterator { self.lsp_store.read(cx).supplementary_language_servers() } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 965413d3190aa..7013dc66fd1ed 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -345,7 +345,7 @@ impl SumTree { Iter::new(self) } - pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor + pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor<'a, T, S> where S: Dimension<'a, T::Summary>, { @@ -358,7 +358,7 @@ impl SumTree { &'a self, cx: &::Context, filter_node: F, - ) -> FilterCursor + ) -> FilterCursor<'a, F, T, U> where F: FnMut(&T::Summary) -> bool, U: Dimension<'a, T::Summary>, diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index b7eadb566d3ed..c57226b681432 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -105,7 +105,7 @@ impl TreeMap { cursor.item().map(|item| (&item.key, &item.value)) } - pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + '_ { + pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + 'a { let mut cursor = self.0.cursor::>(&()); let from_key = MapKeyRef(Some(from)); cursor.seek(&from_key, Bias::Left, &()); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index c7ba4ae3faa89..4d656294703d4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -2119,7 +2119,7 @@ impl Workspace { pub fn items<'a>( &'a self, cx: &'a AppContext, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator> { self.panes.iter().flat_map(|pane| pane.read(cx).items()) } From 300bf87f77d3ae4eea93affc088b7f6b4979a277 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 12:45:53 +0200 Subject: [PATCH 062/228] ssh remoting: Kill SSH master process when dropping client (#18331) This was a process leak. Since we use `.spawn()`, the process continued to run in the background, even if our `SshClientState` was dropped. Means we need to manually clean it up. Release Notes: - N/A Co-authored-by: Bennet --- crates/remote/src/ssh_session.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 9d9d916f19b45..06a7f810e6721 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -56,7 +56,7 @@ pub struct SshSession { struct SshClientState { socket: SshSocket, - _master_process: process::Child, + master_process: process::Child, _temp_dir: TempDir, } @@ -593,7 +593,7 @@ impl SshClientState { connection_options, socket_path, }, - _master_process: master_process, + master_process, _temp_dir: temp_dir, }) } @@ -716,6 +716,14 @@ impl SshClientState { } } +impl Drop for SshClientState { + fn drop(&mut self) { + if let Err(error) = self.master_process.kill() { + log::error!("failed to kill SSH master process: {}", error); + } + } +} + impl SshSocket { fn ssh_command>(&self, program: S) -> process::Command { let mut command = process::Command::new("ssh"); From 4e2ae06ca6c467e5ff50600bb85cf7452d46ee92 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 12:59:22 +0200 Subject: [PATCH 063/228] recent project: Fix highlighting for matches in SSH projs (#18332) Release Notes: - N/A Co-authored-by: Bennet --- crates/recent_projects/src/recent_projects.rs | 26 +++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 570e9a565c28e..f73e7069d48a9 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -259,23 +259,12 @@ impl PickerDelegate for RecentProjectsDelegate { dev_server_project.paths.join("") ) } - SerializedWorkspaceLocation::Ssh(ssh_project) => { - format!( - "{}{}{}{}", - ssh_project.host, - ssh_project - .port - .as_ref() - .map(|port| port.to_string()) - .unwrap_or_default(), - ssh_project.paths.join(","), - ssh_project - .user - .as_ref() - .map(|user| user.to_string()) - .unwrap_or_default() - ) - } + SerializedWorkspaceLocation::Ssh(ssh_project) => ssh_project + .ssh_urls() + .iter() + .map(|path| path.to_string_lossy().to_string()) + .collect::>() + .join(""), }; StringMatchCandidate::new(id, combined_string) @@ -458,6 +447,7 @@ impl PickerDelegate for RecentProjectsDelegate { .order() .iter() .filter_map(|i| paths.paths().get(*i).cloned()) + .map(|path| path.compact()) .collect(), ), SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), @@ -473,7 +463,6 @@ impl PickerDelegate for RecentProjectsDelegate { let (match_labels, paths): (Vec<_>, Vec<_>) = paths .iter() .map(|path| { - let path = path.compact(); let highlighted_text = highlights_for_path(path.as_ref(), &hit.positions, path_start_offset); @@ -704,7 +693,6 @@ fn highlights_for_path( }, ) } - impl RecentProjectsDelegate { fn delete_recent_project(&self, ix: usize, cx: &mut ViewContext>) { if let Some(selected_match) = self.matches.get(ix) { From ccc871c44c3085eef65bf4bcc3603b938691e557 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 13:41:18 +0200 Subject: [PATCH 064/228] ssh remoting: Expand tilde on host side (#18333) --- crates/project/src/worktree_store.rs | 5 +++-- crates/recent_projects/src/ssh_connections.rs | 9 ++++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 9f25572fc7ec0..4b1764c3a87fa 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -221,10 +221,11 @@ impl WorktreeStore { ) -> Task, Arc>> { let mut abs_path = abs_path.as_ref().to_string_lossy().to_string(); // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` - // in which case want to strip the leading the `/` and expand the tilde. + // in which case want to strip the leading the `/`. + // On the host-side, the `~` will get expanded. // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850 if abs_path.starts_with("/~") { - abs_path = shellexpand::tilde(&abs_path[1..]).to_string(); + abs_path = abs_path[1..].to_string(); } let root_name = PathBuf::from(abs_path.clone()) .file_name() diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index ad23a5c8963b4..1722c58f07539 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -327,7 +327,14 @@ impl SshClientDelegate { cx, ) .await - .map_err(|e| anyhow::anyhow!("failed to download remote server binary: {}", e))?; + .map_err(|e| { + anyhow::anyhow!( + "failed to download remote server binary (os: {}, arch: {}): {}", + platform.os, + platform.arch, + e + ) + })?; Ok((binary_path, version)) } From 59dc3985a1afa338720912734972f600178a8a85 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Sep 2024 11:41:35 +0000 Subject: [PATCH 065/228] Detect txt files as Plain Text (#18334) --- assets/settings/default.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/settings/default.json b/assets/settings/default.json index 3e8d3c8c70dd7..61239b002bedc 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -762,6 +762,7 @@ // } // "file_types": { + "Plain Text": ["txt"], "JSON": ["flake.lock"], "JSONC": [ "**/.zed/**/*.json", From 500c3c54a64df3b119e1bd8b0a63822f45d2f4c9 Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Wed, 25 Sep 2024 11:02:40 -0400 Subject: [PATCH 066/228] v0.156.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f1bc684401cb9..41b2d6d452af5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14388,7 +14388,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.155.0" +version = "0.156.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 65724480f6233..eb8f45d92e476 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.155.0" +version = "0.156.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From bbf5ed2ba158b5a3cf36d4cb83df4ec471728248 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Sep 2024 09:42:07 -0600 Subject: [PATCH 067/228] Fix collab filtering panics better (#18344) Release Notes: - N/A --- crates/collab/src/api/events.rs | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 1632c2d798ef1..377741f434c2f 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -364,21 +364,19 @@ pub async fn post_panic( } fn report_to_slack(panic: &Panic) -> bool { - if panic.os_name == "Linux" { - if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { - return false; - } + if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { + return false; + } - if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { - return false; - } + if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { + return false; + } - if panic - .payload - .contains("GPU has crashed, and no debug information is available") - { - return false; - } + if panic + .payload + .contains("GPU has crashed, and no debug information is available") + { + return false; } true From 9300dbc83494d2dbcadd9dcd4373a30bfe53a6e4 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 25 Sep 2024 12:04:17 -0400 Subject: [PATCH 068/228] Fix typo (#18345) Release Notes: - N/A --- crates/vim/src/command.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 67a674afa6f12..49e739faadfea 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -757,7 +757,7 @@ mod test { cx.simulate_shared_keystrokes(": j enter").await; - // hack: our cursor positionining after a join command is wrong + // hack: our cursor positioning after a join command is wrong cx.simulate_shared_keystrokes("^").await; cx.shared_state().await.assert_eq(indoc! { "ˇa b From 19162c316083890f999ef6d33e877856a4235df6 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 18:08:34 +0200 Subject: [PATCH 069/228] ssh remoting: Show error message if project path does not exist (#18343) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This now shows an error message if you try open a project over SSH that doesn't exist. If it's a possible file-path though, it acts like Zed's `cli` and opens the file so that it can be created. - Works: `cargo run ssh://127.0.0.1/~/folder-exists/file-does-not-exist` — this will open `file-does-not-exist` - Shows error: `cargo run ssh://127.0.0.1/~/folder-does-not-exist/file-does-not-exist` — this will show an error Release Notes: - N/A Co-authored-by: Bennet Co-authored-by: Conrad --- crates/project/src/worktree_store.rs | 4 +-- crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 25 ++++++++++++++- crates/workspace/src/workspace.rs | 33 +++++++++++++++----- 4 files changed, 53 insertions(+), 10 deletions(-) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 4b1764c3a87fa..e445eab2dd639 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -18,7 +18,7 @@ use gpui::{ use postage::oneshot; use rpc::{ proto::{self, SSH_PROJECT_ID}, - AnyProtoClient, TypedEnvelope, + AnyProtoClient, ErrorExt, TypedEnvelope, }; use smol::{ channel::{Receiver, Sender}, @@ -207,7 +207,7 @@ impl WorktreeStore { cx.background_executor().spawn(async move { match task.await { Ok(worktree) => Ok(worktree), - Err(err) => Err(anyhow!("{}", err)), + Err(err) => Err((*err).cloned()), } }) } diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index ed12b41167cc2..64db2616e9b2f 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -22,6 +22,7 @@ test-support = ["fs/test-support"] [dependencies] anyhow.workspace = true +client.workspace = true env_logger.workspace = true fs.workspace = true futures.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 0af0d6bb1570d..84fb22b282d37 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -189,11 +189,34 @@ impl HeadlessProject { message: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { + use client::ErrorCodeExt; let path = shellexpand::tilde(&message.payload.path).to_string(); + + let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?; + let path = PathBuf::from(path); + + let canonicalized = match fs.canonicalize(&path).await { + Ok(path) => path, + Err(e) => { + let mut parent = path + .parent() + .ok_or(e) + .map_err(|_| anyhow!("{:?} does not exist", path))?; + if parent == Path::new("") { + parent = util::paths::home_dir(); + } + let parent = fs.canonicalize(parent).await.map_err(|_| { + anyhow!(proto::ErrorCode::DevServerProjectPathDoesNotExist + .with_tag("path", &path.to_string_lossy().as_ref())) + })?; + parent.join(path.file_name().unwrap()) + } + }; + let worktree = this .update(&mut cx.clone(), |this, _| { Worktree::local( - Path::new(&path), + Arc::from(canonicalized), true, this.fs.clone(), this.next_entry_id.clone(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 4d656294703d4..cec913851f04d 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5544,12 +5544,21 @@ pub fn open_ssh_project( ) })?; + let mut project_paths_to_open = vec![]; + let mut project_path_errors = vec![]; + for path in paths { - project - .update(&mut cx, |project, cx| { - project.find_or_create_worktree(&path, true, cx) - })? - .await?; + let result = cx + .update(|cx| Workspace::project_path_for_path(project.clone(), &path, true, cx))? + .await; + match result { + Ok((_, project_path)) => { + project_paths_to_open.push((path.clone(), Some(project_path))); + } + Err(error) => { + project_path_errors.push(error); + } + }; } let serialized_workspace = @@ -5576,11 +5585,21 @@ pub fn open_ssh_project( .update(&mut cx, |_, cx| { cx.activate_window(); - open_items(serialized_workspace, vec![], app_state, cx) + open_items(serialized_workspace, project_paths_to_open, app_state, cx) })? .await?; - Ok(()) + window.update(&mut cx, |workspace, cx| { + for error in project_path_errors { + if error.error_code() == proto::ErrorCode::DevServerProjectPathDoesNotExist { + if let Some(path) = error.error_tag("path") { + workspace.show_error(&anyhow!("'{path}' does not exist"), cx) + } + } else { + workspace.show_error(&error, cx) + } + } + }) }) } From 1f54fde4d2338730eecd46501688b8e777c7bb5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Thu, 26 Sep 2024 01:29:02 +0800 Subject: [PATCH 070/228] toml: Add highlight for escape sequences (#18346) --- extensions/toml/languages/toml/highlights.scm | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions/toml/languages/toml/highlights.scm b/extensions/toml/languages/toml/highlights.scm index 04d83b545925d..4be265cce74b3 100644 --- a/extensions/toml/languages/toml/highlights.scm +++ b/extensions/toml/languages/toml/highlights.scm @@ -9,9 +9,10 @@ (boolean) @constant (comment) @comment -(string) @string (integer) @number (float) @number +(string) @string +(escape_sequence) @string.escape (offset_date_time) @string.special (local_date_time) @string.special (local_date) @string.special From dc48af0ca1d5297fac94c7d02bb858d564a6542b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Sep 2024 11:45:56 -0600 Subject: [PATCH 071/228] lsp: Remove reinstall, update config (#18318) Release Notes: - Fixed overriding the path of a language server binary for all language servers. `{"lsp":{"":{"binary":{"path": "_"}}}}` will now work for all language servers including those defined by extensions. - (breaking change) To disable finding lsp adapters in your path, you must now specify `{"lsp":{"":{"binary":{"ignore_system_version": true}}}}`. Previously this was `{"lsp":{"":{"binary":{"path_lookup": false}}}}`. Note that this setting still does not apply to extensions. - Removed automatic reinstallation of language servers. (It mostly didn't work) --------- Co-authored-by: Mikayla --- assets/settings/default.json | 2 +- .../src/activity_indicator.rs | 2 +- crates/extension/src/extension_lsp_adapter.rs | 15 +- crates/language/src/language.rs | 73 +- crates/language/src/language_registry.rs | 190 ++---- crates/languages/src/c.rs | 54 +- crates/languages/src/css.rs | 7 - crates/languages/src/go.rs | 54 +- crates/languages/src/json.rs | 19 - crates/languages/src/python.rs | 7 - crates/languages/src/rust.rs | 104 +-- crates/languages/src/tailwind.rs | 39 -- crates/languages/src/typescript.rs | 20 - crates/languages/src/vtsls.rs | 50 +- crates/languages/src/yaml.rs | 38 -- crates/lsp/src/lsp.rs | 9 + crates/project/src/lsp_store.rs | 628 ++++++++---------- crates/project/src/project.rs | 8 - crates/project/src/project_settings.rs | 4 +- crates/zed/src/main.rs | 2 +- docs/src/languages/rust.md | 4 +- 21 files changed, 397 insertions(+), 932 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 61239b002bedc..cf0de6a5e7f9a 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -783,7 +783,7 @@ /// or to ensure Zed always downloads and installs an isolated version of node: /// { /// "node": { - /// "disable_path_lookup": true + /// "ignore_system_version": true, /// } /// NOTE: changing this setting currently requires restarting Zed. "node": {}, diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 52e6acc393d29..ace972bf87718 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -299,7 +299,7 @@ impl ActivityIndicator { .into_any_element(), ), message: format!( - "Failed to download {}. Click to show error.", + "Failed to run {}. Click to show error.", failed .iter() .map(|name| name.0.as_ref()) diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index d6125241f11a3..25179acec69ed 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -10,16 +10,11 @@ use gpui::AsyncAppContext; use language::{ CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate, }; -use lsp::{CodeActionKind, LanguageServerBinary}; +use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use serde::Serialize; use serde_json::Value; use std::ops::Range; -use std::{ - any::Any, - path::{Path, PathBuf}, - pin::Pin, - sync::Arc, -}; +use std::{any::Any, path::PathBuf, pin::Pin, sync::Arc}; use util::{maybe, ResultExt}; use wasmtime_wasi::WasiView as _; @@ -38,8 +33,8 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Option>, delegate: Arc, + _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, ) -> Pin>>> { @@ -124,10 +119,6 @@ impl LspAdapter for ExtensionLspAdapter { unreachable!("get_language_server_command is overridden") } - async fn installation_test_binary(&self, _: PathBuf) -> Option { - None - } - fn code_action_kinds(&self) -> Option> { let code_action_kinds = self .extension diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index d70650cf44935..4c75ef4eeb38d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -29,7 +29,7 @@ use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; pub use language_registry::LanguageName; -use lsp::{CodeActionKind, LanguageServerBinary}; +use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use parking_lot::Mutex; use regex::Regex; use schemars::{ @@ -69,7 +69,7 @@ pub use buffer::*; pub use diagnostic_set::DiagnosticEntry; pub use language_registry::{ AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry, - LanguageServerBinaryStatus, PendingLanguageServer, QUERY_FILENAME_PREFIXES, + LanguageServerBinaryStatus, QUERY_FILENAME_PREFIXES, }; pub use lsp::LanguageServerId; pub use outline::*; @@ -249,28 +249,17 @@ impl CachedLspAdapter { pub async fn get_language_server_command( self: Arc, - container_dir: Option>, delegate: Arc, + binary_options: LanguageServerBinaryOptions, cx: &mut AsyncAppContext, ) -> Result { let cached_binary = self.cached_binary.lock().await; self.adapter .clone() - .get_language_server_command(container_dir, delegate, cached_binary, cx) + .get_language_server_command(delegate, binary_options, cached_binary, cx) .await } - pub fn can_be_reinstalled(&self) -> bool { - self.adapter.can_be_reinstalled() - } - - pub async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - self.adapter.installation_test_binary(container_dir).await - } - pub fn code_action_kinds(&self) -> Option> { self.adapter.code_action_kinds() } @@ -322,6 +311,7 @@ pub trait LspAdapterDelegate: Send + Sync { fn worktree_id(&self) -> WorktreeId; fn worktree_root_path(&self) -> &Path; fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus); + async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option>; async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; @@ -335,8 +325,8 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, - container_dir: Option>, delegate: Arc, + binary_options: LanguageServerBinaryOptions, mut cached_binary: futures::lock::MutexGuard<'a, Option>, cx: &'a mut AsyncAppContext, ) -> Pin>>> { @@ -352,30 +342,30 @@ pub trait LspAdapter: 'static + Send + Sync { // We only want to cache when we fall back to the global one, // because we don't want to download and overwrite our global one // for each worktree we might have open. - if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { - log::info!( - "found user-installed language server for {}. path: {:?}, arguments: {:?}", - self.name().0, - binary.path, - binary.arguments - ); - return Ok(binary); + if binary_options.allow_path_lookup { + if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { + log::info!( + "found user-installed language server for {}. path: {:?}, arguments: {:?}", + self.name().0, + binary.path, + binary.arguments + ); + return Ok(binary); + } + } + + if !binary_options.allow_binary_download { + return Err(anyhow!("downloading language servers disabled")); } if let Some(cached_binary) = cached_binary.as_ref() { return Ok(cached_binary.clone()); } - let Some(container_dir) = container_dir else { + let Some(container_dir) = delegate.language_server_download_dir(&self.name()).await else { anyhow::bail!("cannot download language servers for remotes (yet)") }; - if !container_dir.exists() { - smol::fs::create_dir_all(&container_dir) - .await - .context("failed to create container directory")?; - } - let mut binary = try_fetch_server_binary(self.as_ref(), &delegate, container_dir.to_path_buf(), cx).await; if let Err(error) = binary.as_ref() { @@ -443,21 +433,6 @@ pub trait LspAdapter: 'static + Send + Sync { delegate: &dyn LspAdapterDelegate, ) -> Option; - /// Returns `true` if a language server can be reinstalled. - /// - /// If language server initialization fails, a reinstallation will be attempted unless the value returned from this method is `false`. - /// - /// Implementations that rely on software already installed on user's system - /// should have [`can_be_reinstalled`](Self::can_be_reinstalled) return `false`. - fn can_be_reinstalled(&self) -> bool { - true - } - - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option; - fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {} /// Post-processes completions provided by the language server. @@ -1711,8 +1686,8 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Option>, _: Arc, + _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, ) -> Pin>>> { @@ -1743,10 +1718,6 @@ impl LspAdapter for FakeLspAdapter { unreachable!(); } - async fn installation_test_binary(&self, _: PathBuf) -> Option { - unreachable!(); - } - fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {} fn disk_based_diagnostic_sources(&self) -> Vec { diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index e264517d5b030..880ae3b6115c3 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -4,18 +4,17 @@ use crate::{ }, task_context::ContextProvider, with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, - LanguageServerName, LspAdapter, LspAdapterDelegate, PLAIN_TEXT, + LanguageServerName, LspAdapter, PLAIN_TEXT, }; use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; use futures::{ channel::{mpsc, oneshot}, - future::Shared, Future, }; use globset::GlobSet; -use gpui::{AppContext, BackgroundExecutor, Task}; +use gpui::{AppContext, BackgroundExecutor}; use lsp::LanguageServerId; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -118,12 +117,6 @@ pub enum LanguageServerBinaryStatus { Failed { error: String }, } -pub struct PendingLanguageServer { - pub server_id: LanguageServerId, - pub task: Task)>>, - pub container_dir: Option>, -} - #[derive(Clone)] pub struct AvailableLanguage { id: LanguageId, @@ -882,123 +875,53 @@ impl LanguageRegistry { self.lsp_binary_status_tx.send(server_name, status); } - #[allow(clippy::too_many_arguments)] - pub fn create_pending_language_server( - self: &Arc, - stderr_capture: Arc>>, - _language_name_for_tests: LanguageName, - adapter: Arc, - root_path: Arc, - delegate: Arc, - project_environment: Shared>>>, - cx: &mut AppContext, - ) -> Option { - let server_id = self.state.write().next_language_server_id(); - log::info!( - "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", - adapter.name.0 - ); + pub fn next_language_server_id(&self) -> LanguageServerId { + self.state.write().next_language_server_id() + } - let container_dir: Option> = self - .language_server_download_dir + pub fn language_server_download_dir(&self, name: &LanguageServerName) -> Option> { + self.language_server_download_dir .as_ref() - .map(|dir| Arc::from(dir.join(adapter.name.0.as_ref()))); - let root_path = root_path.clone(); - let this = Arc::downgrade(self); - - let task = cx.spawn({ - let container_dir = container_dir.clone(); - move |mut cx| async move { - let project_environment = project_environment.await; - - let binary_result = adapter - .clone() - .get_language_server_command(container_dir, delegate.clone(), &mut cx) - .await; - - delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); - - let mut binary = binary_result?; - - // If we do have a project environment (either by spawning a shell in in the project directory - // or by getting it from the CLI) and the language server command itself - // doesn't have an environment (which it would have, if it was found in $PATH), then - // we use the project environment. - if binary.env.is_none() && project_environment.is_some() { - log::info!( - "using project environment for language server {:?}, id: {server_id}", - adapter.name.0 - ); - binary.env = project_environment.clone(); - } - - let options = adapter - .adapter - .clone() - .initialization_options(&delegate) - .await?; + .map(|dir| Arc::from(dir.join(name.0.as_ref()))) + } - #[cfg(any(test, feature = "test-support"))] - if true { - if let Some(this) = this.upgrade() { - if let Some(fake_entry) = this - .state - .write() - .fake_server_entries - .get_mut(&adapter.name) - { - let (server, mut fake_server) = lsp::FakeLanguageServer::new( - server_id, - binary, - adapter.name.0.to_string(), - fake_entry.capabilities.clone(), - cx.clone(), - ); - fake_entry._server = Some(fake_server.clone()); - - if let Some(initializer) = &fake_entry.initializer { - initializer(&mut fake_server); - } + #[cfg(any(test, feature = "test-support"))] + pub fn create_fake_language_server( + &self, + server_id: LanguageServerId, + name: &LanguageServerName, + binary: lsp::LanguageServerBinary, + cx: gpui::AsyncAppContext, + ) -> Option { + let mut state = self.state.write(); + let fake_entry = state.fake_server_entries.get_mut(&name)?; + let (server, mut fake_server) = lsp::FakeLanguageServer::new( + server_id, + binary, + name.0.to_string(), + fake_entry.capabilities.clone(), + cx.clone(), + ); + fake_entry._server = Some(fake_server.clone()); - let tx = fake_entry.tx.clone(); - cx.background_executor() - .spawn(async move { - if fake_server - .try_receive_notification::( - ) - .await - .is_some() - { - tx.unbounded_send(fake_server.clone()).ok(); - } - }) - .detach(); + if let Some(initializer) = &fake_entry.initializer { + initializer(&mut fake_server); + } - return Ok((server, options)); - } - } + let tx = fake_entry.tx.clone(); + cx.background_executor() + .spawn(async move { + if fake_server + .try_receive_notification::() + .await + .is_some() + { + tx.unbounded_send(fake_server.clone()).ok(); } + }) + .detach(); - drop(this); - Ok(( - lsp::LanguageServer::new( - stderr_capture, - server_id, - binary, - &root_path, - adapter.code_action_kinds(), - cx, - )?, - options, - )) - } - }); - - Some(PendingLanguageServer { - server_id, - task, - container_dir, - }) + Some(server) } pub fn language_server_binary_statuses( @@ -1007,29 +930,16 @@ impl LanguageRegistry { self.lsp_binary_status_tx.subscribe() } - pub fn delete_server_container( - &self, - adapter: Arc, - cx: &mut AppContext, - ) -> Task<()> { + pub async fn delete_server_container(&self, name: LanguageServerName) { log::info!("deleting server container"); + let Some(dir) = self.language_server_download_dir(&name) else { + return; + }; - let download_dir = self - .language_server_download_dir - .clone() - .expect("language server download directory has not been assigned before deleting server container"); - - cx.spawn(|_| async move { - let container_dir = download_dir.join(adapter.name.0.as_ref()); - smol::fs::remove_dir_all(container_dir) - .await - .context("server container removal") - .log_err(); - }) - } - - pub fn next_language_server_id(&self) -> LanguageServerId { - self.state.write().next_language_server_id() + smol::fs::remove_dir_all(dir) + .await + .context("server container removal") + .log_err(); } } diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 8a04e0aae6f4e..28a12b5310f38 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -5,7 +5,6 @@ use gpui::AsyncAppContext; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use lsp::LanguageServerBinary; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use smol::fs::{self, File}; use std::{any::Any, env::consts, path::PathBuf, sync::Arc}; use util::{fs::remove_matching, maybe, ResultExt}; @@ -25,41 +24,14 @@ impl super::LspAdapter for CLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path: Some(path), - arguments, - .. - })) => Some(LanguageServerBinary { - path: path.into(), - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env: None, - }), - Ok(Some(BinarySettings { - path_lookup: Some(false), - .. - })) => None, - _ => { - let env = delegate.shell_env().await; - let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; - Some(LanguageServerBinary { - path, - arguments: vec![], - env: Some(env), - }) - } - } + let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; + Some(LanguageServerBinary { + path, + arguments: vec![], + env: None, + }) } async fn fetch_latest_server_version( @@ -141,18 +113,6 @@ impl super::LspAdapter for CLspAdapter { get_cached_server_binary(container_dir).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--help".into()]; - binary - }) - } - async fn label_for_completion( &self, completion: &lsp::CompletionItem, diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 7b7e9ae77f06f..b4e5feaab76c9 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -84,13 +84,6 @@ impl LspAdapter for CssLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn initialization_options( self: Arc, _: &Arc, diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a1a996c066ee4..135c080e00a14 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -6,7 +6,6 @@ use gpui::{AppContext, AsyncAppContext, Task}; use http_client::github::latest_github_release; pub use language::*; use lsp::LanguageServerBinary; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; use serde_json::json; use smol::{fs, process}; @@ -68,41 +67,14 @@ impl super::LspAdapter for GoLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path: Some(path), - arguments, - .. - })) => Some(LanguageServerBinary { - path: path.into(), - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env: None, - }), - Ok(Some(BinarySettings { - path_lookup: Some(false), - .. - })) => None, - _ => { - let env = delegate.shell_env().await; - let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; - Some(LanguageServerBinary { - path, - arguments: server_binary_arguments(), - env: Some(env), - }) - } - } + let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; + Some(LanguageServerBinary { + path, + arguments: server_binary_arguments(), + env: None, + }) } fn will_fetch_server( @@ -214,18 +186,6 @@ impl super::LspAdapter for GoLspAdapter { get_cached_server_binary(container_dir).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--help".into()]; - binary - }) - } - async fn initialization_options( self: Arc, _: &Arc, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 44cc68387676e..95c4070b13a33 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -186,13 +186,6 @@ impl LspAdapter for JsonLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn initialization_options( self: Arc, _: &Arc, @@ -374,18 +367,6 @@ impl LspAdapter for NodeVersionAdapter { ) -> Option { get_cached_version_server_binary(container_dir).await } - - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_version_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--version".into()]; - binary - }) - } } async fn get_cached_version_server_binary(container_dir: PathBuf) -> Option { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 75f124489c382..964abf42b525f 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -97,13 +97,6 @@ impl LspAdapter for PythonLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn process_completions(&self, items: &mut [lsp::CompletionItem]) { // Pyright assigns each completion item a `sortText` of the form `XX.YYYY.name`. // Where `XX` is the sorting category, `YYYY` is based on most recent usage, diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index eebd573a7e25f..0d644e1bfef24 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -8,7 +8,6 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use language_settings::all_language_settings; use lsp::LanguageServerBinary; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; use smol::fs::{self, File}; use std::{ @@ -37,77 +36,34 @@ impl LspAdapter for RustLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx - .update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) + let path = delegate.which("rust-analyzer".as_ref()).await?; + let env = delegate.shell_env().await; + + // It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to + // /usr/bin/rust-analyzer that fails when you run it; so we need to test it. + log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`"); + let result = delegate + .try_exec(LanguageServerBinary { + path: path.clone(), + arguments: vec!["--help".into()], + env: Some(env.clone()), }) - .ok()?; - - let (path, env, arguments) = match configured_binary { - // If nothing is configured, or path_lookup explicitly enabled, - // we lookup the binary in the path. - None - | Some(BinarySettings { - path: None, - path_lookup: Some(true), - .. - }) - | Some(BinarySettings { - path: None, - path_lookup: None, - .. - }) => { - let path = delegate.which("rust-analyzer".as_ref()).await; - let env = delegate.shell_env().await; - - if let Some(path) = path { - // It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to - // /usr/bin/rust-analyzer that fails when you run it; so we need to test it. - log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`"); - match delegate - .try_exec(LanguageServerBinary { - path: path.clone(), - arguments: vec!["--help".into()], - env: Some(env.clone()), - }) - .await - { - Ok(()) => (Some(path), Some(env), None), - Err(err) => { - log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", path, err); - (None, None, None) - } - } - } else { - (None, None, None) - } - } - // Otherwise, we use the configured binary. - Some(BinarySettings { - path: Some(path), - arguments, - path_lookup, - }) => { - if path_lookup.is_some() { - log::warn!("Both `path` and `path_lookup` are set, ignoring `path_lookup`"); - } - (Some(path.into()), None, arguments) - } - - _ => (None, None, None), - }; + .await; + if let Err(err) = result { + log::error!( + "failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", + path, + err + ); + return None; + } - path.map(|path| LanguageServerBinary { + Some(LanguageServerBinary { path, - env, - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), + env: Some(env), + arguments: vec![], }) } @@ -186,18 +142,6 @@ impl LspAdapter for RustLspAdapter { get_cached_server_binary(container_dir).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--help".into()]; - binary - }) - } - fn disk_based_diagnostic_sources(&self) -> Vec { vec!["rustc".into()] } diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 62d967d6a4a26..4ed5c742a9fc8 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -46,38 +46,6 @@ impl LspAdapter for TailwindLspAdapter { Self::SERVER_NAME.clone() } - async fn check_if_user_installed( - &self, - delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, - ) -> Option { - let configured_binary = cx - .update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }) - .ok()??; - - let path = if let Some(configured_path) = configured_binary.path.map(PathBuf::from) { - configured_path - } else { - self.node.binary_path().await.ok()? - }; - - let arguments = configured_binary - .arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(); - - Some(LanguageServerBinary { - path, - arguments, - env: None, - }) - } - async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, @@ -125,13 +93,6 @@ impl LspAdapter for TailwindLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn initialization_options( self: Arc, _: &Arc, diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index b7eb21132d52c..cfd7e04bc6417 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -164,13 +164,6 @@ impl LspAdapter for TypeScriptLspAdapter { get_cached_ts_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_ts_server_binary(container_dir, &self.node).await - } - fn code_action_kinds(&self) -> Option> { Some(vec![ CodeActionKind::QUICKFIX, @@ -509,19 +502,6 @@ impl LspAdapter for EsLintLspAdapter { arguments: eslint_server_binary_arguments(&server_path), }) } - - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - let server_path = - Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); - Some(LanguageServerBinary { - path: self.node.binary_path().await.ok()?, - env: None, - arguments: eslint_server_binary_arguments(&server_path), - }) - } } #[cfg(target_os = "windows")] diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index de6d575a8ee9f..ff8637dc28dbd 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -5,7 +5,7 @@ use gpui::AsyncAppContext; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; +use project::lsp_store::language_server_settings; use serde_json::Value; use std::{ any::Any, @@ -71,40 +71,15 @@ impl LspAdapter for VtslsLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, &SERVER_NAME, cx).and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path: Some(path), - arguments, - .. - })) => Some(LanguageServerBinary { - path: path.into(), - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env: None, - }), - Ok(Some(BinarySettings { - path_lookup: Some(false), - .. - })) => None, - _ => { - let env = delegate.shell_env().await; - let path = delegate.which(SERVER_NAME.as_ref()).await?; - Some(LanguageServerBinary { - path: path.clone(), - arguments: typescript_server_binary_arguments(&path), - env: Some(env), - }) - } - } + let env = delegate.shell_env().await; + let path = delegate.which(SERVER_NAME.as_ref()).await?; + Some(LanguageServerBinary { + path: path.clone(), + arguments: typescript_server_binary_arguments(&path), + env: Some(env), + }) } async fn fetch_server_binary( @@ -157,13 +132,6 @@ impl LspAdapter for VtslsLspAdapter { get_cached_ts_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_ts_server_binary(container_dir, &self.node).await - } - fn code_action_kinds(&self) -> Option> { Some(vec![ CodeActionKind::QUICKFIX, diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 32ca73168ab2d..642d6c030ac91 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -42,37 +42,6 @@ impl LspAdapter for YamlLspAdapter { Self::SERVER_NAME.clone() } - async fn check_if_user_installed( - &self, - delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, - ) -> Option { - let configured_binary = cx - .update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }) - .ok()??; - - let path = if let Some(configured_path) = configured_binary.path.map(PathBuf::from) { - configured_path - } else { - self.node.binary_path().await.ok()? - }; - - let arguments = configured_binary - .arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(); - Some(LanguageServerBinary { - path, - arguments, - env: None, - }) - } - async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, @@ -120,13 +89,6 @@ impl LspAdapter for YamlLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn workspace_configuration( self: Arc, delegate: &Arc, diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index c2a5951de7210..e380da052ddc9 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -64,6 +64,15 @@ pub struct LanguageServerBinary { pub env: Option>, } +/// Configures the search (and installation) of language servers. +#[derive(Debug, Clone, Deserialize)] +pub struct LanguageServerBinaryOptions { + /// Whether the adapter should look at the users system + pub allow_path_lookup: bool, + /// Whether the adapter should download its own version + pub allow_binary_download: bool, +} + /// A running language server process. pub struct LanguageServer { server_id: LanguageServerId, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 8d859c091bfe9..21d5de53e6be2 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -37,16 +37,16 @@ use language::{ proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageConfig, - LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, - LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, - ToPointUtf16, Transaction, Unclipped, + LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerBinaryStatus, + LanguageServerName, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, + TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, - LanguageServer, LanguageServerBinary, LanguageServerId, LspRequestFuture, MessageActionItem, - MessageType, OneOf, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, - WorkDoneProgressCancelParams, WorkspaceFolder, + LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, + LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, ServerStatus, + SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, }; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -67,9 +67,8 @@ use std::{ iter, mem, ops::{ControlFlow, Range}, path::{self, Path, PathBuf}, - process::Stdio, str, - sync::{atomic::Ordering::SeqCst, Arc}, + sync::Arc, time::{Duration, Instant}, }; use text::{Anchor, BufferId, LineEnding}; @@ -87,8 +86,6 @@ pub use worktree::{ FS_WATCH_LATENCY, }; -const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4; -const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); @@ -157,6 +154,7 @@ impl LocalLspStore { futures::future::join_all(shutdown_futures).await; } } + async fn format_locally( lsp_store: WeakModel, mut buffers_with_paths: Vec<(Model, Option)>, @@ -1471,7 +1469,7 @@ impl LspStore { } for (worktree_id, adapter_name) in language_servers_to_stop { - self.stop_language_server(worktree_id, adapter_name, cx) + self.stop_local_language_server(worktree_id, adapter_name, cx) .detach(); } @@ -1488,7 +1486,7 @@ impl LspStore { // Restart all language servers with changed initialization options. for (worktree, language) in language_servers_to_restart { - self.restart_language_servers(worktree, language, cx); + self.restart_local_language_servers(worktree, language, cx); } cx.notify(); @@ -3028,7 +3026,7 @@ impl LspStore { }) } - pub fn primary_language_server_for_buffer<'a>( + fn primary_language_server_for_buffer<'a>( &'a self, buffer: &'a Buffer, cx: &'a AppContext, @@ -3328,7 +3326,7 @@ impl LspStore { Ok(()) } - pub fn update_worktree_diagnostics( + fn update_worktree_diagnostics( &mut self, worktree_id: WorktreeId, server_id: LanguageServerId, @@ -5405,9 +5403,6 @@ impl LspStore { language_registry: self.languages.clone(), }) as Arc; - // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. - let lsp_adapter = adapter.adapter.clone(); - let Some((upstream_client, project_id)) = self.upstream_client() else { return; }; @@ -5419,17 +5414,11 @@ impl LspStore { return; }; - let task = cx.spawn(|_, cx| async move { - let user_binary_task = lsp_adapter.check_if_user_installed(delegate.as_ref(), &cx); - let binary = match user_binary_task.await { - Some(binary) => binary, - None => { - return Err(anyhow!( - "Downloading language server for ssh host is not supported yet" - )) - } - }; + let user_binary_task = + self.get_language_server_binary(adapter.clone(), delegate.clone(), false, cx); + let task = cx.spawn(|_, _| async move { + let binary = user_binary_task.await?; let name = adapter.name(); let code_action_kinds = adapter .adapter @@ -5481,6 +5470,73 @@ impl LspStore { .detach(); } + fn get_language_server_binary( + &self, + adapter: Arc, + delegate: Arc, + allow_binary_download: bool, + cx: &mut ModelContext, + ) -> Task> { + let settings = ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: Path::new(""), + }), + cx, + ) + .lsp + .get(&adapter.name) + .and_then(|s| s.binary.clone()); + + if settings.as_ref().is_some_and(|b| b.path.is_some()) { + let settings = settings.unwrap(); + return cx.spawn(|_, _| async move { + Ok(LanguageServerBinary { + path: PathBuf::from(&settings.path.unwrap()), + env: Some(delegate.shell_env().await), + arguments: settings + .arguments + .unwrap_or_default() + .iter() + .map(Into::into) + .collect(), + }) + }); + } + let lsp_binary_options = LanguageServerBinaryOptions { + allow_path_lookup: !settings + .as_ref() + .and_then(|b| b.ignore_system_version) + .unwrap_or_default(), + allow_binary_download, + }; + cx.spawn(|_, mut cx| async move { + let binary_result = adapter + .clone() + .get_language_server_command(delegate.clone(), lsp_binary_options, &mut cx) + .await; + + delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); + + let mut binary = binary_result?; + if let Some(arguments) = settings.and_then(|b| b.arguments) { + binary.arguments = arguments.into_iter().map(Into::into).collect(); + } + + // If we do have a project environment (either by spawning a shell in in the project directory + // or by getting it from the CLI) and the language server command itself + // doesn't have an environment, then we use the project environment. + if binary.env.is_none() { + log::info!( + "using project environment for language server {:?}", + adapter.name() + ); + binary.env = Some(delegate.shell_env().await); + } + Ok(binary) + }) + } + fn start_language_server( &mut self, worktree_handle: &Model, @@ -5496,6 +5552,7 @@ impl LspStore { let worktree_id = worktree.id(); let worktree_path = worktree.abs_path(); let key = (worktree_id, adapter.name.clone()); + if self.language_server_ids.contains_key(&key) { return; } @@ -5505,31 +5562,6 @@ impl LspStore { return; } - if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT { - return; - } - - let local = self.as_local().unwrap(); - - let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let lsp_adapter_delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx); - let project_environment = local.environment.update(cx, |environment, cx| { - environment.get_environment(Some(worktree_id), Some(worktree_path.clone()), cx) - }); - - let pending_server = match self.languages.create_pending_language_server( - stderr_capture.clone(), - language.clone(), - adapter.clone(), - Arc::clone(&worktree_path), - lsp_adapter_delegate.clone(), - project_environment, - cx, - ) { - Some(pending_server) => pending_server, - None => return, - }; - let project_settings = ProjectSettings::get( Some(SettingsLocation { worktree_id, @@ -5537,76 +5569,146 @@ impl LspStore { }), cx, ); - - // We need some on the SSH client, and some on SSH host let lsp = project_settings.lsp.get(&adapter.name); let override_options = lsp.and_then(|s| s.initialization_options.clone()); - let server_id = pending_server.server_id; - let container_dir = pending_server.container_dir.clone(); - let state = LanguageServerState::Starting({ + let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); + let delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx) + as Arc; + + let server_id = self.languages.next_language_server_id(); + let root_path = worktree_path.clone(); + log::info!( + "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", + adapter.name.0 + ); + + let binary = self.get_language_server_binary(adapter.clone(), delegate.clone(), true, cx); + + let pending_server = cx.spawn({ let adapter = adapter.clone(); + let stderr_capture = stderr_capture.clone(); + + move |_lsp_store, cx| async move { + let binary = binary.await?; + + #[cfg(any(test, feature = "test-support"))] + if let Some(server) = _lsp_store + .update(&mut cx.clone(), |this, cx| { + this.languages.create_fake_language_server( + server_id, + &adapter.name, + binary.clone(), + cx.to_async(), + ) + }) + .ok() + .flatten() + { + return Ok(server); + } + + lsp::LanguageServer::new( + stderr_capture, + server_id, + binary, + &root_path, + adapter.code_action_kinds(), + cx, + ) + } + }); + + let state = LanguageServerState::Starting({ let server_name = adapter.name.0.clone(); + let delegate = delegate as Arc; let language = language.clone(); let key = key.clone(); + let adapter = adapter.clone(); cx.spawn(move |this, mut cx| async move { - let result = Self::setup_and_insert_language_server( - this.clone(), - lsp_adapter_delegate, - override_options, - pending_server, - adapter.clone(), - language.clone(), - server_id, - key, - &mut cx, - ) - .await; + let result = { + let delegate = delegate.clone(); + let adapter = adapter.clone(); + let this = this.clone(); + let mut cx = cx.clone(); + async move { + let language_server = pending_server.await?; - match result { - Ok(server) => { - stderr_capture.lock().take(); - server - } + let workspace_config = adapter + .adapter + .clone() + .workspace_configuration(&delegate, &mut cx) + .await?; - Err(err) => { - log::error!("failed to start language server {server_name:?}: {err}"); - log::error!("server stderr: {:?}", stderr_capture.lock().take()); + let mut initialization_options = adapter + .adapter + .clone() + .initialization_options(&(delegate)) + .await?; - let this = this.upgrade()?; - let container_dir = container_dir?; + Self::setup_lsp_messages(this.clone(), &language_server, delegate, adapter); - let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst); - if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT { - let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT; - log::error!("Hit {max} reinstallation attempts for {server_name:?}"); - return None; + match (&mut initialization_options, override_options) { + (Some(initialization_options), Some(override_options)) => { + merge_json_value_into(override_options, initialization_options); + } + (None, override_options) => initialization_options = override_options, + _ => {} } - log::info!( - "retrying installation of language server {server_name:?} in {}s", - SERVER_REINSTALL_DEBOUNCE_TIMEOUT.as_secs() - ); - cx.background_executor() - .timer(SERVER_REINSTALL_DEBOUNCE_TIMEOUT) - .await; + let language_server = cx + .update(|cx| language_server.initialize(initialization_options, cx))? + .await + .inspect_err(|_| { + if let Some(this) = this.upgrade() { + this.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) + }) + .ok(); + } + })?; - let installation_test_binary = adapter - .installation_test_binary(container_dir.to_path_buf()) - .await; + language_server + .notify::( + lsp::DidChangeConfigurationParams { + settings: workspace_config, + }, + ) + .ok(); - this.update(&mut cx, |_, cx| { - Self::check_errored_server( + anyhow::Ok(language_server) + } + } + .await; + + match result { + Ok(server) => { + this.update(&mut cx, |this, mut cx| { + this.insert_newly_running_language_server( language, adapter, + server.clone(), server_id, - installation_test_binary, - cx, - ) + key, + &mut cx, + ); }) .ok(); + stderr_capture.lock().take(); + Some(server) + } + Err(err) => { + let log = stderr_capture.lock().take().unwrap_or_default(); + delegate.update_status( + adapter.name(), + LanguageServerBinaryStatus::Failed { + error: format!("{err}\n-- stderr--\n{}", log), + }, + ); + log::error!("Failed to start language server {server_name:?}: {err}"); + log::error!("server stderr: {:?}", log); None } } @@ -5620,109 +5722,6 @@ impl LspStore { self.language_server_ids.insert(key, server_id); } - #[allow(clippy::too_many_arguments)] - async fn setup_and_insert_language_server( - this: WeakModel, - delegate: Arc, - override_initialization_options: Option, - pending_server: PendingLanguageServer, - adapter: Arc, - language: LanguageName, - server_id: LanguageServerId, - key: (WorktreeId, LanguageServerName), - cx: &mut AsyncAppContext, - ) -> Result>> { - let language_server = Self::setup_pending_language_server( - this.clone(), - override_initialization_options, - pending_server, - delegate, - adapter.clone(), - server_id, - cx, - ) - .await?; - - let this = match this.upgrade() { - Some(this) => this, - None => return Err(anyhow!("failed to upgrade project handle")), - }; - - this.update(cx, |this, cx| { - this.insert_newly_running_language_server( - language, - adapter, - language_server.clone(), - server_id, - key, - cx, - ) - })??; - - Ok(Some(language_server)) - } - - fn reinstall_language_server( - &mut self, - language: LanguageName, - adapter: Arc, - server_id: LanguageServerId, - cx: &mut ModelContext, - ) -> Option> { - log::info!("beginning to reinstall server"); - - if let Some(local) = self.as_local_mut() { - let existing_server = match local.language_servers.remove(&server_id) { - Some(LanguageServerState::Running { server, .. }) => Some(server), - _ => None, - }; - - self.worktree_store.update(cx, |store, cx| { - for worktree in store.worktrees() { - let key = (worktree.read(cx).id(), adapter.name.clone()); - self.language_server_ids.remove(&key); - } - }); - - Some(cx.spawn(move |this, mut cx| async move { - if let Some(task) = existing_server.and_then(|server| server.shutdown()) { - log::info!("shutting down existing server"); - task.await; - } - - // TODO: This is race-safe with regards to preventing new instances from - // starting while deleting, but existing instances in other projects are going - // to be very confused and messed up - let Some(task) = this - .update(&mut cx, |this, cx| { - this.languages.delete_server_container(adapter.clone(), cx) - }) - .log_err() - else { - return; - }; - task.await; - - this.update(&mut cx, |this, cx| { - for worktree in this.worktree_store.read(cx).worktrees().collect::>() { - this.start_language_server( - &worktree, - adapter.clone(), - language.clone(), - cx, - ); - } - }) - .ok(); - })) - } else if let Some(_ssh_store) = self.as_ssh() { - // TODO - None - } else { - None - } - } - async fn shutdown_language_server( server_state: Option, name: LanguageServerName, @@ -5761,7 +5760,7 @@ impl LspStore { // Returns a list of all of the worktrees which no longer have a language server and the root path // for the stopped server - pub fn stop_language_server( + fn stop_local_language_server( &mut self, worktree_id: WorktreeId, adapter_name: LanguageServerName, @@ -5877,7 +5876,6 @@ impl LspStore { .spawn(request) .detach_and_log_err(cx); } else { - #[allow(clippy::mutable_key_type)] let language_server_lookup_info: HashSet<(Model, LanguageName)> = buffers .into_iter() .filter_map(|buffer| { @@ -5893,12 +5891,12 @@ impl LspStore { .collect(); for (worktree, language) in language_server_lookup_info { - self.restart_language_servers(worktree, language, cx); + self.restart_local_language_servers(worktree, language, cx); } } } - pub fn restart_language_servers( + fn restart_local_language_servers( &mut self, worktree: Model, language: LanguageName, @@ -5912,7 +5910,8 @@ impl LspStore { .lsp_adapters(&language) .iter() .map(|adapter| { - let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx); + let stop_task = + self.stop_local_language_server(worktree_id, adapter.name.clone(), cx); (stop_task, adapter.name.clone()) }) .collect::>(); @@ -5951,93 +5950,14 @@ impl LspStore { .detach(); } - fn check_errored_server( - language: LanguageName, - adapter: Arc, - server_id: LanguageServerId, - installation_test_binary: Option, - cx: &mut ModelContext, - ) { - if !adapter.can_be_reinstalled() { - log::info!( - "Validation check requested for {:?} but it cannot be reinstalled", - adapter.name.0 - ); - return; - } - - cx.spawn(move |this, mut cx| async move { - log::info!("About to spawn test binary"); - - // A lack of test binary counts as a failure - let process = installation_test_binary.and_then(|binary| { - smol::process::Command::new(&binary.path) - .current_dir(&binary.path) - .args(binary.arguments) - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::inherit()) - .kill_on_drop(true) - .spawn() - .ok() - }); - - const PROCESS_TIMEOUT: Duration = Duration::from_secs(5); - let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse(); - - let mut errored = false; - if let Some(mut process) = process { - futures::select! { - status = process.status().fuse() => match status { - Ok(status) => errored = !status.success(), - Err(_) => errored = true, - }, - - _ = timeout => { - log::info!("test binary time-ed out, this counts as a success"); - _ = process.kill(); - } - } - } else { - log::warn!("test binary failed to launch"); - errored = true; - } - - if errored { - log::warn!("test binary check failed"); - let task = this - .update(&mut cx, move |this, cx| { - this.reinstall_language_server(language, adapter, server_id, cx) - }) - .ok() - .flatten(); - - if let Some(task) = task { - task.await; - } - } - }) - .detach(); - } - - async fn setup_pending_language_server( + fn setup_lsp_messages( this: WeakModel, - override_options: Option, - pending_server: PendingLanguageServer, + language_server: &LanguageServer, delegate: Arc, adapter: Arc, - server_id: LanguageServerId, - cx: &mut AsyncAppContext, - ) -> Result> { - let workspace_config = adapter - .adapter - .clone() - .workspace_configuration(&delegate, cx) - .await?; - // This has to come from the server - let (language_server, mut initialization_options) = pending_server.task.await?; - + ) { let name = language_server.name(); + let server_id = language_server.server_id(); language_server .on_notification::({ let adapter = adapter.clone(); @@ -6091,7 +6011,6 @@ impl LspStore { }) .detach(); - let id = language_server.server_id(); language_server .on_request::({ let this = this.clone(); @@ -6099,7 +6018,7 @@ impl LspStore { let this = this.clone(); async move { let Some(server) = - this.update(&mut cx, |this, _| this.language_server_for_id(id))? + this.update(&mut cx, |this, _| this.language_server_for_id(server_id))? else { return Ok(None); }; @@ -6375,9 +6294,6 @@ impl LspStore { }) .detach(); - let disk_based_diagnostics_progress_token = - adapter.disk_based_diagnostics_progress_token.clone(); - language_server .on_notification::({ let this = this.clone(); @@ -6448,6 +6364,10 @@ impl LspStore { } }) .detach(); + + let disk_based_diagnostics_progress_token = + adapter.disk_based_diagnostics_progress_token.clone(); + language_server .on_notification::({ let this = this.clone(); @@ -6502,36 +6422,6 @@ impl LspStore { } }) .detach(); - - match (&mut initialization_options, override_options) { - (Some(initialization_options), Some(override_options)) => { - merge_json_value_into(override_options, initialization_options); - } - (None, override_options) => initialization_options = override_options, - _ => {} - } - - let language_server = cx - .update(|cx| language_server.initialize(initialization_options, cx))? - .await - .inspect_err(|_| { - if let Some(this) = this.upgrade() { - this.update(cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) - }) - .ok(); - } - })?; - - language_server - .notify::( - lsp::DidChangeConfigurationParams { - settings: workspace_config, - }, - ) - .ok(); - - Ok(language_server) } pub fn update_diagnostics( @@ -6664,7 +6554,7 @@ impl LspStore { server_id: LanguageServerId, key: (WorktreeId, LanguageServerName), cx: &mut ModelContext, - ) -> Result<()> { + ) { // If the language server for this key doesn't match the server id, don't store the // server. Which will cause it to be dropped, killing the process if self @@ -6673,7 +6563,7 @@ impl LspStore { .map(|id| id != &server_id) .unwrap_or(false) { - return Ok(()); + return; } // Update language_servers collection with Running variant of LanguageServerState @@ -6703,13 +6593,15 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerAdded(server_id)); if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { - downstream_client.send(proto::StartLanguageServer { - project_id: *project_id, - server: Some(proto::LanguageServer { - id: server_id.0 as u64, - name: language_server.name().to_string(), - }), - })?; + downstream_client + .send(proto::StartLanguageServer { + project_id: *project_id, + server: Some(proto::LanguageServer { + id: server_id.0 as u64, + name: language_server.name().to_string(), + }), + }) + .log_err(); } // Tell the language server about every open buffer in the worktree that matches the language. @@ -6756,16 +6648,18 @@ impl LspStore { let version = snapshot.version; let initial_snapshot = &snapshot.snapshot; let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - language_server.notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri, - adapter.language_id(&language.name()), - version, - initial_snapshot.text(), - ), - }, - )?; + language_server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri, + adapter.language_id(&language.name()), + version, + initial_snapshot.text(), + ), + }, + ) + .log_err(); buffer_handle.update(cx, |buffer, cx| { buffer.set_completion_triggers( @@ -6779,11 +6673,9 @@ impl LspStore { ) }); } - anyhow::Ok(()) - })?; + }); cx.notify(); - Ok(()) } fn buffer_snapshot_for_lsp_version( @@ -6878,7 +6770,7 @@ impl LspStore { }) } - pub fn register_supplementary_language_server( + fn register_supplementary_language_server( &mut self, id: LanguageServerId, name: LanguageServerName, @@ -6893,7 +6785,7 @@ impl LspStore { } } - pub fn unregister_supplementary_language_server( + fn unregister_supplementary_language_server( &mut self, id: LanguageServerId, cx: &mut ModelContext, @@ -7807,11 +7699,8 @@ impl LspAdapter for SshLspAdapter { ) -> Result { anyhow::bail!("SshLspAdapter does not support fetch_server_binary") } - - async fn installation_test_binary(&self, _: PathBuf) -> Option { - None - } } + pub fn language_server_settings<'a, 'b: 'a>( delegate: &'a dyn LspAdapterDelegate, language: &LanguageServerName, @@ -7855,22 +7744,6 @@ impl LocalLspAdapterDelegate { Self::new(lsp_store, worktree, http_client, local.fs.clone(), cx) } - // fn for_ssh( - // lsp_store: &LspStore, - // worktree: &Model, - // upstream_client: AnyProtoClient, - // cx: &mut ModelContext, - // ) -> Arc { - // Self::new( - // lsp_store, - // worktree, - // Arc::new(BlockedHttpClient), - // None, - // Some(upstream_client), - // cx, - // ) - // } - pub fn new( lsp_store: &LspStore, worktree: &Model, @@ -7972,6 +7845,19 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { .update_lsp_status(server_name, status); } + async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option> { + let dir = self.language_registry.language_server_download_dir(name)?; + + if !dir.exists() { + smol::fs::create_dir_all(&dir) + .await + .context("failed to create container directory") + .log_err()?; + } + + Some(dir) + } + async fn read_text_file(&self, path: PathBuf) -> Result { if self.worktree.entry_for_path(&path).is_none() { return Err(anyhow!("no such path {path:?}")); @@ -8056,6 +7942,10 @@ impl LspAdapterDelegate for SshLspAdapterDelegate { Ok(()) } + async fn language_server_download_dir(&self, _: &LanguageServerName) -> Option> { + None + } + fn update_status( &self, server_name: LanguageServerName, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ee7f93a4f933c..8d95c8f2f1823 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3958,14 +3958,6 @@ impl Project { self.lsp_store.read(cx).supplementary_language_servers() } - pub fn language_server_adapter_for_id( - &self, - id: LanguageServerId, - cx: &AppContext, - ) -> Option> { - self.lsp_store.read(cx).language_server_adapter_for_id(id) - } - pub fn language_server_for_id( &self, id: LanguageServerId, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 68593f8fab052..706d3afdce45e 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -55,7 +55,7 @@ pub struct NodeBinarySettings { pub npm_path: Option, /// If disabled, zed will download its own copy of node. #[serde(default)] - pub disable_path_lookup: Option, + pub ignore_system_version: Option, } #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] @@ -143,7 +143,7 @@ const fn true_value() -> bool { pub struct BinarySettings { pub path: Option, pub arguments: Option>, - pub path_lookup: Option, + pub ignore_system_version: Option, } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index e3fe2baefa9b6..0f37e06f438f9 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -481,7 +481,7 @@ fn main() { cx.observe_global::(move |cx| { let settings = &ProjectSettings::get_global(cx).node; let options = NodeBinaryOptions { - allow_path_lookup: !settings.disable_path_lookup.unwrap_or_default(), + allow_path_lookup: !settings.ignore_system_version.unwrap_or_default(), // TODO: Expose this setting allow_binary_download: true, use_paths: settings.path.as_ref().map(|node_path| { diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 233c378dae06d..02e90d60a403b 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -64,14 +64,14 @@ You can configure which `rust-analyzer` binary Zed should use. By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to use that. If that binary successfully executes `rust-analyzer --help`, it's used. Otherwise, Zed will fall back to installing its own `rust-analyzer` version and using that. -If you want to disable Zed looking for a `rust-analyzer` binary, you can set `path_lookup` to `false` in your `settings.json`: +If you want to disable Zed looking for a `rust-analyzer` binary, you can set `ignore_system_version` to `true` in your `settings.json`: ```json { "lsp": { "rust-analyzer": { "binary": { - "path_lookup": false + "ignore_system_version": true } } } From 1eddd2f38d844f50af4ff0e76ddab63687519d7f Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Wed, 25 Sep 2024 15:21:00 -0400 Subject: [PATCH 072/228] Fix file descriptors leak in evals (#18351) Fixes an issue where evals were hitting "too many open files" errors because we were adding (and detaching) new directory watches for each project. Now we add those watches globally/at the worktree level, and we store the tasks so they stop watching on drop. Release Notes: - N/A --------- Co-authored-by: Max Co-authored-by: Piotr Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- Cargo.lock | 1 + .../src/platform/mac/attributed_string.rs | 4 +- crates/project/src/project.rs | 10 +--- crates/snippet_provider/Cargo.toml | 1 + crates/snippet_provider/src/lib.rs | 55 +++++++++++++++---- 5 files changed, 49 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41b2d6d452af5..26b979ccf72aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10498,6 +10498,7 @@ dependencies = [ "futures 0.3.30", "gpui", "parking_lot", + "paths", "serde", "serde_json", "snippet", diff --git a/crates/gpui/src/platform/mac/attributed_string.rs b/crates/gpui/src/platform/mac/attributed_string.rs index 663ce67d4cd41..3f1185bc145cf 100644 --- a/crates/gpui/src/platform/mac/attributed_string.rs +++ b/crates/gpui/src/platform/mac/attributed_string.rs @@ -70,9 +70,7 @@ mod tests { unsafe { let image: id = msg_send![class!(NSImage), alloc]; - image.initWithContentsOfFile_( - NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"), - ); + image.initWithContentsOfFile_(NSString::alloc(nil).init_str("test.jpeg")); let _size = image.size(); let string = NSString::alloc(nil).init_str("Test String"); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 8d95c8f2f1823..10fd88f286d99 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -587,10 +587,7 @@ impl Project { cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) .detach(); let tasks = Inventory::new(cx); - let global_snippets_dir = paths::config_dir().join("snippets"); - let snippets = - SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - + let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([]), cx); let worktree_store = cx.new_model(|_| WorktreeStore::local(false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -875,9 +872,8 @@ impl Project { let this = cx.new_model(|cx| { let replica_id = response.payload.replica_id as ReplicaId; let tasks = Inventory::new(cx); - let global_snippets_dir = paths::config_dir().join("snippets"); - let snippets = - SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); + + let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([]), cx); let mut worktrees = Vec::new(); for worktree in response.payload.worktrees { diff --git a/crates/snippet_provider/Cargo.toml b/crates/snippet_provider/Cargo.toml index 75b7210a7afa5..95ab19ebb6f99 100644 --- a/crates/snippet_provider/Cargo.toml +++ b/crates/snippet_provider/Cargo.toml @@ -15,6 +15,7 @@ fs.workspace = true futures.workspace = true gpui.workspace = true parking_lot.workspace = true +paths.workspace = true serde.workspace = true serde_json.workspace = true snippet.workspace = true diff --git a/crates/snippet_provider/src/lib.rs b/crates/snippet_provider/src/lib.rs index 17d615866a430..a18f9ff1b6f89 100644 --- a/crates/snippet_provider/src/lib.rs +++ b/crates/snippet_provider/src/lib.rs @@ -130,8 +130,29 @@ async fn initial_scan( pub struct SnippetProvider { fs: Arc, snippets: HashMap>>>, + watch_tasks: Vec>>, } +// Watches global snippet directory, is created just once and reused across multiple projects +struct GlobalSnippetWatcher(Model); + +impl GlobalSnippetWatcher { + fn new(fs: Arc, cx: &mut AppContext) -> Self { + let global_snippets_dir = paths::config_dir().join("snippets"); + let provider = cx.new_model(|_cx| SnippetProvider { + fs, + snippets: Default::default(), + watch_tasks: vec![], + }); + provider.update(cx, |this, cx| { + this.watch_directory(&global_snippets_dir, cx) + }); + Self(provider) + } +} + +impl gpui::Global for GlobalSnippetWatcher {} + impl SnippetProvider { pub fn new( fs: Arc, @@ -139,29 +160,29 @@ impl SnippetProvider { cx: &mut AppContext, ) -> Model { cx.new_model(move |cx| { + if !cx.has_global::() { + let global_watcher = GlobalSnippetWatcher::new(fs.clone(), cx); + cx.set_global(global_watcher); + } let mut this = Self { fs, + watch_tasks: Vec::new(), snippets: Default::default(), }; - let mut task_handles = vec![]; for dir in dirs_to_watch { - task_handles.push(this.watch_directory(&dir, cx)); + this.watch_directory(&dir, cx); } - cx.spawn(|_, _| async move { - futures::future::join_all(task_handles).await; - }) - .detach(); this }) } /// Add directory to be watched for content changes - fn watch_directory(&mut self, path: &Path, cx: &mut ModelContext) -> Task> { + fn watch_directory(&mut self, path: &Path, cx: &mut ModelContext) { let path: Arc = Arc::from(path); - cx.spawn(|this, mut cx| async move { + self.watch_tasks.push(cx.spawn(|this, mut cx| async move { let fs = this.update(&mut cx, |this, _| this.fs.clone())?; let watched_path = path.clone(); let watcher = fs.watch(&watched_path, Duration::from_secs(1)); @@ -177,10 +198,10 @@ impl SnippetProvider { .await?; } Ok(()) - }) + })); } - fn lookup_snippets<'a>( + fn lookup_snippets<'a, const LOOKUP_GLOBALS: bool>( &'a self, language: &'a SnippetKind, cx: &AppContext, @@ -193,6 +214,16 @@ impl SnippetProvider { .into_iter() .flat_map(|(_, snippets)| snippets.into_iter()) .collect(); + if LOOKUP_GLOBALS { + if let Some(global_watcher) = cx.try_global::() { + user_snippets.extend( + global_watcher + .0 + .read(cx) + .lookup_snippets::(language, cx), + ); + } + } let Some(registry) = SnippetRegistry::try_global(cx) else { return user_snippets; @@ -205,11 +236,11 @@ impl SnippetProvider { } pub fn snippets_for(&self, language: SnippetKind, cx: &AppContext) -> Vec> { - let mut requested_snippets = self.lookup_snippets(&language, cx); + let mut requested_snippets = self.lookup_snippets::(&language, cx); if language.is_some() { // Look up global snippets as well. - requested_snippets.extend(self.lookup_snippets(&None, cx)); + requested_snippets.extend(self.lookup_snippets::(&None, cx)); } requested_snippets } From dc7c49bd0b386e6303472542a44aaeba0c2c0526 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:25:57 -0400 Subject: [PATCH 073/228] Pin actions/stale action to 28ca103 (#18356) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/stale](https://redirect.github.com/actions/stale) | action | pinDigest | -> `28ca103` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 2d4085524bb8d..bbafb6c9851c4 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -8,7 +8,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: > From ae6a3d15af5814eaa5602c23314499b73a8329e4 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 25 Sep 2024 12:45:41 -0700 Subject: [PATCH 074/228] Make python run local worktree LSPs (#18353) Release Notes: - Python: made it possible to use locally installed `pyright` if available --------- Co-authored-by: conrad --- crates/language/src/language.rs | 4 +++ crates/languages/src/python.rs | 21 +++++++++++ crates/node_runtime/src/node_runtime.rs | 3 +- crates/project/src/lsp_store.rs | 46 +++++++++++++++++++++++++ 4 files changed, 73 insertions(+), 1 deletion(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 4c75ef4eeb38d..fad799da19898 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -313,6 +313,10 @@ pub trait LspAdapterDelegate: Send + Sync { fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus); async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option>; + async fn npm_package_installed_version( + &self, + package_name: &str, + ) -> Result>; async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; async fn read_text_file(&self, path: PathBuf) -> Result; diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 964abf42b525f..4b5fe3d277cd3 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -20,6 +20,7 @@ use task::{TaskTemplate, TaskTemplates, VariableName}; use util::ResultExt; const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js"; +const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js"; fn server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -43,6 +44,26 @@ impl LspAdapter for PythonLspAdapter { Self::SERVER_NAME.clone() } + async fn check_if_user_installed( + &self, + delegate: &dyn LspAdapterDelegate, + _: &AsyncAppContext, + ) -> Option { + let node = delegate.which("node".as_ref()).await?; + let (node_modules_path, _) = delegate + .npm_package_installed_version(Self::SERVER_NAME.as_ref()) + .await + .log_err()??; + + let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH); + + Some(LanguageServerBinary { + path: node, + env: None, + arguments: server_binary_arguments(&path), + }) + } + async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 9507eb7536485..0f0512c65ee71 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -177,6 +177,7 @@ impl NodeRuntime { "5000", ]); + // This is also wrong because the directory is wrong. self.run_npm_subcommand(directory, "install", &arguments) .await?; Ok(()) @@ -576,7 +577,7 @@ impl NodeRuntimeTrait for SystemNodeRuntime { } } -async fn read_package_installed_version( +pub async fn read_package_installed_version( node_module_directory: PathBuf, name: &str, ) -> Result> { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 21d5de53e6be2..bef57bafb48cf 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -48,6 +48,7 @@ use lsp::{ LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, }; +use node_runtime::read_package_installed_version; use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; @@ -7801,6 +7802,44 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { task.await.unwrap_or_default() } + async fn npm_package_installed_version( + &self, + package_name: &str, + ) -> Result> { + let local_package_directory = self.worktree_root_path(); + let node_modules_directory = local_package_directory.join("node_modules"); + + if let Some(version) = + read_package_installed_version(node_modules_directory.clone(), package_name).await? + { + return Ok(Some((node_modules_directory, version))); + } + let Some(npm) = self.which("npm".as_ref()).await else { + log::warn!( + "Failed to find npm executable for {:?}", + local_package_directory + ); + return Ok(None); + }; + + let env = self.shell_env().await; + let output = smol::process::Command::new(&npm) + .args(["root", "-g"]) + .envs(env) + .current_dir(local_package_directory) + .output() + .await?; + let global_node_modules = + PathBuf::from(String::from_utf8_lossy(&output.stdout).to_string()); + + if let Some(version) = + read_package_installed_version(global_node_modules.clone(), package_name).await? + { + return Ok(Some((global_node_modules, version))); + } + return Ok(None); + } + #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { let worktree_abs_path = self.worktree.abs_path(); @@ -7883,6 +7922,13 @@ impl LspAdapterDelegate for SshLspAdapterDelegate { .ok(); } + async fn npm_package_installed_version( + &self, + _package_name: &str, + ) -> Result> { + Ok(None) + } + fn http_client(&self) -> Arc { Arc::new(BlockedHttpClient) } From 21a023980d39b0113d39331942eb05f365af8bc0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Sep 2024 12:50:38 -0700 Subject: [PATCH 075/228] Expand git diffs when clicking the gutter strip, display their controls in a block above (#18313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Todo: * [x] Tooltips for hunk buttons * [x] Buttons to go to next and previous hunk * [x] Ellipsis button that opens a context menu with `Revert all` /cc @iamnbutler @danilo-leal for design 👀 Release Notes: - Changed the behavior of the git gutter so that diff hunk are expanded immediately when clicking the gutter, and hunk controls are displayed above the hunk. --------- Co-authored-by: Marshall Co-authored-by: Marshall Bowers --- crates/collab/src/tests/editor_tests.rs | 289 +----------- crates/editor/src/editor.rs | 126 ++--- crates/editor/src/editor_tests.rs | 211 ++++----- crates/editor/src/element.rs | 95 +--- crates/editor/src/hunk_diff.rs | 597 +++++++++++++++--------- 5 files changed, 570 insertions(+), 748 deletions(-) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 121c93656305a..d2835edc619e2 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -7,18 +7,12 @@ use collections::HashMap; use editor::{ actions::{ ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename, - RevertSelectedHunks, ToggleCodeActions, Undo, - }, - display_map::DisplayRow, - test::{ - editor_hunks, - editor_test_context::{AssertionContextManager, EditorTestContext}, - expanded_hunks, expanded_hunks_background_highlights, + ToggleCodeActions, Undo, }, + test::editor_test_context::{AssertionContextManager, EditorTestContext}, Editor, }; use futures::StreamExt; -use git::diff::DiffHunkStatus; use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; use indoc::indoc; use language::{ @@ -1970,285 +1964,6 @@ async fn test_inlay_hint_refresh_is_forwarded( }); } -#[gpui::test] -async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { - let mut server = TestServer::start(cx_a.executor()).await; - let client_a = server.create_client(cx_a, "user_a").await; - let client_b = server.create_client(cx_b, "user_b").await; - server - .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) - .await; - let active_call_a = cx_a.read(ActiveCall::global); - let active_call_b = cx_b.read(ActiveCall::global); - - cx_a.update(editor::init); - cx_b.update(editor::init); - - client_a.language_registry().add(rust_lang()); - client_b.language_registry().add(rust_lang()); - - let base_text = indoc! {r#"struct Row; -struct Row1; -struct Row2; - -struct Row4; -struct Row5; -struct Row6; - -struct Row8; -struct Row9; -struct Row10;"#}; - - client_a - .fs() - .insert_tree( - "/a", - json!({ - "main.rs": base_text, - }), - ) - .await; - let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; - active_call_a - .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) - .await - .unwrap(); - let project_id = active_call_a - .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) - .await - .unwrap(); - - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - active_call_b - .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) - .await - .unwrap(); - - let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); - let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); - - let editor_a = workspace_a - .update(cx_a, |workspace, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - - let editor_b = workspace_b - .update(cx_b, |workspace, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - - let mut editor_cx_a = EditorTestContext { - cx: cx_a.clone(), - window: cx_a.handle(), - editor: editor_a, - assertion_cx: AssertionContextManager::new(), - }; - let mut editor_cx_b = EditorTestContext { - cx: cx_b.clone(), - window: cx_b.handle(), - editor: editor_b, - assertion_cx: AssertionContextManager::new(), - }; - - // host edits the file, that differs from the base text, producing diff hunks - editor_cx_a.set_state(indoc! {r#"struct Row; - struct Row0.1; - struct Row0.2; - struct Row1; - - struct Row4; - struct Row5444; - struct Row6; - - struct Row9; - struct Row1220;ˇ"#}); - editor_cx_a.update_editor(|editor, cx| { - editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .update(cx, |buffer, cx| { - buffer.set_diff_base(Some(base_text.into()), cx); - }); - }); - editor_cx_b.update_editor(|editor, cx| { - editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .update(cx, |buffer, cx| { - buffer.set_diff_base(Some(base_text.into()), cx); - }); - }); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - - // the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection - // the host does not see the diffs toggled - editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row; - struct Row0.1; - struct Row0.2; - struct Row1; - - struct Row4; - struct Row5444; - struct Row6; - - struct R»ow9; - struct Row1220;"#}); - editor_cx_b - .update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx)); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - editor_cx_a.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(1)..DisplayRow(3) - ), - ( - "struct Row2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4) - ), - ( - "struct Row5;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "struct Row8;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) - ), - ( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - ), - ] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_b.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)], - ); - assert_eq!( - all_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(1)..DisplayRow(3) - ), - ( - "struct Row2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) - ), - ( - "struct Row5;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(8)..DisplayRow(9) - ), - ( - "struct Row8;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(12)..DisplayRow(12) - ), - ( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(13)..DisplayRow(13), - ), - ] - ); - assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]); - }); - - // the client reverts the hunks, removing the expanded diffs too - // both host and the client observe the reverted state (with one hunk left, not covered by client's selection) - editor_cx_b.update_editor(|editor, cx| { - editor.revert_selected_hunks(&RevertSelectedHunks, cx); - }); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - editor_cx_a.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - )] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_b.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - )] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_a.assert_editor_state(indoc! {r#"struct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; - - struct Row8; - struct Row9; - struct Row1220;ˇ"#}); - editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; - - struct Row8; - struct R»ow9; - struct Row1220;"#}); -} - #[gpui::test(iterations = 10)] async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ad5cd24d73ac4..78c8ba6920337 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -154,7 +154,7 @@ use theme::{ }; use ui::{ h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize, - ListItem, Popover, Tooltip, + ListItem, Popover, PopoverMenuHandle, Tooltip, }; use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt}; use workspace::item::{ItemHandle, PreviewTabsSettings}; @@ -562,6 +562,7 @@ pub struct Editor { nav_history: Option, context_menu: RwLock>, mouse_context_menu: Option, + hunk_controls_menu_handle: PopoverMenuHandle, completion_tasks: Vec<(CompletionId, Task>)>, signature_help_state: SignatureHelpState, auto_signature_help: Option, @@ -1938,6 +1939,7 @@ impl Editor { nav_history: None, context_menu: RwLock::new(None), mouse_context_menu: None, + hunk_controls_menu_handle: PopoverMenuHandle::default(), completion_tasks: Default::default(), signature_help_state: SignatureHelpState::default(), auto_signature_help: None, @@ -5383,23 +5385,6 @@ impl Editor { })) } - fn close_hunk_diff_button( - &self, - hunk: HoveredHunk, - row: DisplayRow, - cx: &mut ViewContext, - ) -> IconButton { - IconButton::new( - ("close_hunk_diff_indicator", row.0 as usize), - ui::IconName::Close, - ) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .tooltip(|cx| Tooltip::for_action("Close hunk diff", &ToggleHunkDiff, cx)) - .on_click(cx.listener(move |editor, _e, cx| editor.toggle_hovered_hunk(&hunk, cx))) - } - pub fn context_menu_visible(&self) -> bool { self.context_menu .read() @@ -9335,32 +9320,42 @@ impl Editor { } } - fn go_to_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext) { + fn go_to_next_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext) { let snapshot = self .display_map .update(cx, |display_map, cx| display_map.snapshot(cx)); let selection = self.selections.newest::(cx); + self.go_to_hunk_after_position(&snapshot, selection.head(), cx); + } - if !self.seek_in_direction( - &snapshot, - selection.head(), + fn go_to_hunk_after_position( + &mut self, + snapshot: &DisplaySnapshot, + position: Point, + cx: &mut ViewContext<'_, Editor>, + ) -> Option { + if let Some(hunk) = self.go_to_next_hunk_in_direction( + snapshot, + position, false, - snapshot.buffer_snapshot.git_diff_hunks_in_range( - MultiBufferRow(selection.head().row + 1)..MultiBufferRow::MAX, - ), + snapshot + .buffer_snapshot + .git_diff_hunks_in_range(MultiBufferRow(position.row + 1)..MultiBufferRow::MAX), cx, ) { - let wrapped_point = Point::zero(); - self.seek_in_direction( - &snapshot, - wrapped_point, - true, - snapshot.buffer_snapshot.git_diff_hunks_in_range( - MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX, - ), - cx, - ); + return Some(hunk); } + + let wrapped_point = Point::zero(); + self.go_to_next_hunk_in_direction( + snapshot, + wrapped_point, + true, + snapshot.buffer_snapshot.git_diff_hunks_in_range( + MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX, + ), + cx, + ) } fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext) { @@ -9369,52 +9364,65 @@ impl Editor { .update(cx, |display_map, cx| display_map.snapshot(cx)); let selection = self.selections.newest::(cx); - if !self.seek_in_direction( - &snapshot, - selection.head(), + self.go_to_hunk_before_position(&snapshot, selection.head(), cx); + } + + fn go_to_hunk_before_position( + &mut self, + snapshot: &DisplaySnapshot, + position: Point, + cx: &mut ViewContext<'_, Editor>, + ) -> Option { + if let Some(hunk) = self.go_to_next_hunk_in_direction( + snapshot, + position, false, - snapshot.buffer_snapshot.git_diff_hunks_in_range_rev( - MultiBufferRow(0)..MultiBufferRow(selection.head().row), - ), + snapshot + .buffer_snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(position.row)), cx, ) { - let wrapped_point = snapshot.buffer_snapshot.max_point(); - self.seek_in_direction( - &snapshot, - wrapped_point, - true, - snapshot.buffer_snapshot.git_diff_hunks_in_range_rev( - MultiBufferRow(0)..MultiBufferRow(wrapped_point.row), - ), - cx, - ); + return Some(hunk); } + + let wrapped_point = snapshot.buffer_snapshot.max_point(); + self.go_to_next_hunk_in_direction( + snapshot, + wrapped_point, + true, + snapshot + .buffer_snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(wrapped_point.row)), + cx, + ) } - fn seek_in_direction( + fn go_to_next_hunk_in_direction( &mut self, snapshot: &DisplaySnapshot, initial_point: Point, is_wrapped: bool, hunks: impl Iterator, cx: &mut ViewContext, - ) -> bool { + ) -> Option { let display_point = initial_point.to_display_point(snapshot); let mut hunks = hunks - .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) - .filter(|hunk| is_wrapped || !hunk.contains_display_row(display_point.row())) + .map(|hunk| (diff_hunk_to_display(&hunk, snapshot), hunk)) + .filter(|(display_hunk, _)| { + is_wrapped || !display_hunk.contains_display_row(display_point.row()) + }) .dedup(); - if let Some(hunk) = hunks.next() { + if let Some((display_hunk, hunk)) = hunks.next() { self.change_selections(Some(Autoscroll::fit()), cx, |s| { - let row = hunk.start_display_row(); + let row = display_hunk.start_display_row(); let point = DisplayPoint::new(row, 0); s.select_display_ranges([point..point]); }); - true + Some(hunk) } else { - false + None } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 5927c22cb0843..de1b12abe0077 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -9623,7 +9623,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) cx.update_editor(|editor, cx| { //Wrap around the bottom of the buffer for _ in 0..3 { - editor.go_to_hunk(&GoToHunk, cx); + editor.go_to_next_hunk(&GoToHunk, cx); } }); @@ -9709,7 +9709,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) //Make sure that the fold only gets one hunk for _ in 0..4 { - editor.go_to_hunk(&GoToHunk, cx); + editor.go_to_next_hunk(&GoToHunk, cx); } }); @@ -11226,7 +11226,7 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test cx.update_editor(|editor, cx| { for _ in 0..4 { - editor.go_to_hunk(&GoToHunk, cx); + editor.go_to_next_hunk(&GoToHunk, cx); editor.toggle_hunk_diff(&ToggleHunkDiff, cx); } }); @@ -11249,18 +11249,13 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(1)..=DisplayRow(1), DisplayRow(7)..=DisplayRow(7), DisplayRow(9)..=DisplayRow(9)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); assert_eq!( all_hunks, vec![ - ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(1)..DisplayRow(2)), - ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(4)..DisplayRow(4)), - (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(7)..DisplayRow(8)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(9)..DisplayRow(10)), + ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(2)..DisplayRow(3)), + ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(6)..DisplayRow(6)), + (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(10)..DisplayRow(11)), + ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(14)), ], "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ (from modified and removed hunks)" @@ -11269,6 +11264,11 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test all_hunks, all_expanded_hunks, "Editor hunks should not change and all be expanded" ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(2)..=DisplayRow(2), DisplayRow(10)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(13)], + "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" + ); }); cx.update_editor(|editor, cx| { @@ -11311,7 +11311,7 @@ async fn test_toggled_diff_base_change( const B: u32 = 42; const C: u32 = 42; - fn main(ˇ) { + fn main() { println!("hello"); println!("world"); @@ -11356,9 +11356,9 @@ async fn test_toggled_diff_base_change( DisplayRow(3)..DisplayRow(3) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(7) + DisplayRow(6)..DisplayRow(7) ), ( "".to_string(), @@ -11390,22 +11390,18 @@ async fn test_toggled_diff_base_change( "# .unindent(), ); + cx.update_editor(|editor, cx| { let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(9)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(14)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); assert_eq!( all_hunks, vec![ - ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(1)..DisplayRow(1)), - ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(5)..DisplayRow(5)), - ("fn main(ˇ) {\n println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(9)..DisplayRow(11)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(15)), + ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(2)..DisplayRow(2)), + ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(7)..DisplayRow(7)), + (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(12)..DisplayRow(13)), + ("".to_string(), DiffHunkStatus::Added, DisplayRow(16)..DisplayRow(18)), ], "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ (from modified and removed hunks)" @@ -11414,6 +11410,11 @@ async fn test_toggled_diff_base_change( all_hunks, all_expanded_hunks, "Editor hunks should not change and all be expanded" ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(12)..=DisplayRow(12), DisplayRow(16)..=DisplayRow(17)], + "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" + ); }); cx.set_diff_base(Some("new diff base!")); @@ -11459,7 +11460,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test const B: u32 = 42; const C: u32 = 42; - fn main(ˇ) { + fn main() { println!("hello"); println!("world"); @@ -11520,9 +11521,9 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test DisplayRow(3)..DisplayRow(3) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(7) + DisplayRow(6)..DisplayRow(7) ), ( "".to_string(), @@ -11576,50 +11577,50 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(9)..=DisplayRow(10), - DisplayRow(13)..=DisplayRow(14), - DisplayRow(19)..=DisplayRow(19) - ] - ); assert_eq!( all_hunks, vec![ ( "use some::mod1;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(1)..DisplayRow(1) + DisplayRow(2)..DisplayRow(2) ), ( "const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) + DisplayRow(7)..DisplayRow(7) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(9)..DisplayRow(11) + DisplayRow(12)..DisplayRow(13) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(13)..DisplayRow(15) + DisplayRow(16)..DisplayRow(18) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(19)..DisplayRow(20) + DisplayRow(23)..DisplayRow(24) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(23)..DisplayRow(23) + DisplayRow(28)..DisplayRow(28) ), ], ); assert_eq!(all_hunks, all_expanded_hunks); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(12)..=DisplayRow(12), + DisplayRow(16)..=DisplayRow(17), + DisplayRow(23)..=DisplayRow(23) + ] + ); }); cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx)); @@ -11653,11 +11654,6 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(5)..=DisplayRow(5)], - "Only one hunk is left not folded, its highlight should be visible" - ); assert_eq!( all_hunks, vec![ @@ -11672,7 +11668,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test DisplayRow(0)..DisplayRow(0) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(0)..DisplayRow(0) ), @@ -11684,12 +11680,12 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(6) + DisplayRow(6)..DisplayRow(7) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) + DisplayRow(11)..DisplayRow(11) ), ], "Hunk list should still return shifted folded hunks" @@ -11700,16 +11696,21 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(6) + DisplayRow(6)..DisplayRow(7) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) + DisplayRow(11)..DisplayRow(11) ), ], "Only non-folded hunks should be left expanded" ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(6)..=DisplayRow(6)], + "Only one hunk is left not folded, its highlight should be visible" + ); }); cx.update_editor(|editor, cx| { @@ -11746,51 +11747,51 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(9)..=DisplayRow(10), - DisplayRow(13)..=DisplayRow(14), - DisplayRow(19)..=DisplayRow(19) - ], - "After unfolding, all hunk diffs should be visible again" - ); assert_eq!( all_hunks, vec![ ( "use some::mod1;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(1)..DisplayRow(1) + DisplayRow(2)..DisplayRow(2) ), ( "const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) + DisplayRow(7)..DisplayRow(7) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(9)..DisplayRow(11) + DisplayRow(12)..DisplayRow(13) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(13)..DisplayRow(15) + DisplayRow(16)..DisplayRow(18) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(19)..DisplayRow(20) + DisplayRow(23)..DisplayRow(24) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(23)..DisplayRow(23) + DisplayRow(28)..DisplayRow(28) ), ], ); assert_eq!(all_hunks, all_expanded_hunks); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(12)..=DisplayRow(12), + DisplayRow(16)..=DisplayRow(17), + DisplayRow(23)..=DisplayRow(23) + ], + "After unfolding, all hunk diffs should be visible again" + ); }); } @@ -11940,17 +11941,17 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) ( "bbbb\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5), + DisplayRow(6)..DisplayRow(6), ), ( "nnnn\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(23)..DisplayRow(24), + DisplayRow(25)..DisplayRow(26), ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(43)..DisplayRow(44), + DisplayRow(46)..DisplayRow(47), ), ]; @@ -11975,8 +11976,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) assert_eq!( expanded_hunks_background_highlights(editor, cx), vec![ - DisplayRow(23)..=DisplayRow(23), - DisplayRow(43)..=DisplayRow(43) + DisplayRow(25)..=DisplayRow(25), + DisplayRow(46)..=DisplayRow(46) ], ); assert_eq!(all_hunks, expected_all_hunks_shifted); @@ -12007,8 +12008,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) assert_eq!( expanded_hunks_background_highlights(editor, cx), vec![ - DisplayRow(23)..=DisplayRow(23), - DisplayRow(43)..=DisplayRow(43) + DisplayRow(25)..=DisplayRow(25), + DisplayRow(46)..=DisplayRow(46) ], ); assert_eq!(all_hunks, expected_all_hunks_shifted); @@ -12116,12 +12117,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(7) + DisplayRow(5)..DisplayRow(8) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)] + vec![DisplayRow(5)..=DisplayRow(7)] ); assert_eq!(all_hunks, all_expanded_hunks); }); @@ -12156,12 +12157,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(8) + DisplayRow(5)..DisplayRow(9) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)], + vec![DisplayRow(5)..=DisplayRow(7)], "Edited hunk should have one more line added" ); assert_eq!( @@ -12201,12 +12202,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(9) + DisplayRow(5)..DisplayRow(10) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)], + vec![DisplayRow(5)..=DisplayRow(7)], "Edited hunk should have one more line added" ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12245,12 +12246,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(8) + DisplayRow(5)..DisplayRow(9) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)], + vec![DisplayRow(5)..=DisplayRow(7)], "Deleting a line should shrint the hunk" ); assert_eq!( @@ -12293,12 +12294,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(6) + DisplayRow(6)..DisplayRow(7) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(5)] + vec![DisplayRow(6)..=DisplayRow(6)] ); assert_eq!(all_hunks, all_expanded_hunks); }); @@ -12335,7 +12336,7 @@ async fn test_edits_around_toggled_additions( ( "const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) + DisplayRow(3)..DisplayRow(3) ) ] ); @@ -12349,7 +12350,7 @@ async fn test_edits_around_toggled_additions( vec![( "const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) + DisplayRow(3)..DisplayRow(3) )], "Should open hunks that were adjacent to the stale addition one" ); @@ -12445,7 +12446,7 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4) + DisplayRow(5)..DisplayRow(5) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12485,7 +12486,7 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) + DisplayRow(6)..DisplayRow(6) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12520,7 +12521,7 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(6)..DisplayRow(6) + DisplayRow(7)..DisplayRow(7) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12554,12 +12555,12 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(8) + DisplayRow(8)..DisplayRow(9) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], + vec![DisplayRow(8)..=DisplayRow(8)], "Modified expanded hunks should display additions and highlight their background" ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12653,14 +12654,14 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)], + vec![DisplayRow(7)..=DisplayRow(7)], ); assert_eq!( all_hunks, vec![( "const C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) + DisplayRow(7)..DisplayRow(8) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12696,7 +12697,7 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)], + vec![DisplayRow(7)..=DisplayRow(7)], "Modified hunk should grow highlighted lines on more text additions" ); assert_eq!( @@ -12704,7 +12705,7 @@ async fn test_edits_around_toggled_modifications( vec![( "const C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(9) + DisplayRow(7)..DisplayRow(10) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12742,14 +12743,14 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(8)], + vec![DisplayRow(7)..=DisplayRow(9)], ); assert_eq!( all_hunks, vec![( "const B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(9) + DisplayRow(7)..DisplayRow(10) )], "Modified hunk should grow deleted lines on text deletions above" ); @@ -12786,7 +12787,7 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(9)], + vec![DisplayRow(7)..=DisplayRow(10)], "Modified hunk should grow deleted lines on text modifications above" ); assert_eq!( @@ -12794,7 +12795,7 @@ async fn test_edits_around_toggled_modifications( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(10) + DisplayRow(7)..DisplayRow(11) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12830,7 +12831,7 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(8)], + vec![DisplayRow(7)..=DisplayRow(9)], "Modified hunk should grow shrink lines on modification lines removal" ); assert_eq!( @@ -12838,7 +12839,7 @@ async fn test_edits_around_toggled_modifications( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(9) + DisplayRow(7)..DisplayRow(10) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12880,7 +12881,7 @@ async fn test_edits_around_toggled_modifications( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\nconst D: u32 = 42;\n" .to_string(), DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) + DisplayRow(8)..DisplayRow(8) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12974,14 +12975,14 @@ async fn test_multiple_expanded_hunks_merge( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)], + vec![DisplayRow(7)..=DisplayRow(7)], ); assert_eq!( all_hunks, vec![( "const C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) + DisplayRow(7)..DisplayRow(8) )] ); assert_eq!(all_hunks, all_expanded_hunks); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index cf8edb67dccbc..9fe05bc4f2606 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -11,7 +11,7 @@ use crate::{ hover_popover::{ self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT, }, - hunk_diff::{diff_hunk_to_display, DisplayDiffHunk, ExpandedHunk}, + hunk_diff::{diff_hunk_to_display, DisplayDiffHunk}, hunk_status, items::BufferSearchHighlights, mouse_context_menu::{self, MenuPosition, MouseContextMenu}, @@ -20,8 +20,8 @@ use crate::{ DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown, - PageUp, Point, RangeToAnchorExt, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, - ToPoint, CURSORS_VISIBLE_FOR, MAX_LINE_LEN, + PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint, + CURSORS_VISIBLE_FOR, MAX_LINE_LEN, }; use client::ParticipantIndex; use collections::{BTreeMap, HashMap}; @@ -302,7 +302,7 @@ impl EditorElement { } register_action(view, cx, Editor::go_to_diagnostic); register_action(view, cx, Editor::go_to_prev_diagnostic); - register_action(view, cx, Editor::go_to_hunk); + register_action(view, cx, Editor::go_to_next_hunk); register_action(view, cx, Editor::go_to_prev_hunk); register_action(view, cx, |editor, a, cx| { editor.go_to_definition(a, cx).detach_and_log_err(cx); @@ -489,28 +489,7 @@ impl EditorElement { let mut modifiers = event.modifiers; if let Some(hovered_hunk) = hovered_hunk { - if modifiers.control || modifiers.platform { - editor.toggle_hovered_hunk(&hovered_hunk, cx); - } else { - let display_range = hovered_hunk - .multi_buffer_range - .clone() - .to_display_points(&position_map.snapshot); - let hunk_bounds = Self::diff_hunk_bounds( - &position_map.snapshot, - position_map.line_height, - gutter_hitbox.bounds, - &DisplayDiffHunk::Unfolded { - diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(), - display_row_range: display_range.start.row()..display_range.end.row(), - multi_buffer_range: hovered_hunk.multi_buffer_range.clone(), - status: hovered_hunk.status, - }, - ); - if hunk_bounds.contains(&event.position) { - editor.open_hunk_context_menu(hovered_hunk, event.position, cx); - } - } + editor.toggle_hovered_hunk(&hovered_hunk, cx); cx.notify(); return; } else if gutter_hitbox.is_hovered(cx) { @@ -1303,13 +1282,13 @@ impl EditorElement { let display_hunks = buffer_snapshot .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) .filter_map(|hunk| { - let mut display_hunk = diff_hunk_to_display(&hunk, snapshot); + let display_hunk = diff_hunk_to_display(&hunk, snapshot); if let DisplayDiffHunk::Unfolded { multi_buffer_range, status, .. - } = &mut display_hunk + } = &display_hunk { let mut is_expanded = false; while let Some(expanded_hunk) = expanded_hunks.peek() { @@ -1332,11 +1311,7 @@ impl EditorElement { } match status { DiffHunkStatus::Added => {} - DiffHunkStatus::Modified => { - if is_expanded { - *status = DiffHunkStatus::Added; - } - } + DiffHunkStatus::Modified => {} DiffHunkStatus::Removed => { if is_expanded { return None; @@ -3371,9 +3346,6 @@ impl EditorElement { for test_indicator in layout.test_indicators.iter_mut() { test_indicator.paint(cx); } - for close_indicator in layout.close_indicators.iter_mut() { - close_indicator.paint(cx); - } if let Some(indicator) = layout.code_actions_indicator.as_mut() { indicator.paint(cx); @@ -4159,46 +4131,6 @@ impl EditorElement { + 1; self.column_pixels(digit_count, cx) } - - #[allow(clippy::too_many_arguments)] - fn layout_hunk_diff_close_indicators( - &self, - line_height: Pixels, - scroll_pixel_position: gpui::Point, - gutter_dimensions: &GutterDimensions, - gutter_hitbox: &Hitbox, - rows_with_hunk_bounds: &HashMap>, - expanded_hunks_by_rows: HashMap, - cx: &mut WindowContext, - ) -> Vec { - self.editor.update(cx, |editor, cx| { - expanded_hunks_by_rows - .into_iter() - .map(|(display_row, hunk)| { - let button = editor.close_hunk_diff_button( - HoveredHunk { - multi_buffer_range: hunk.hunk_range, - status: hunk.status, - diff_base_byte_range: hunk.diff_base_byte_range, - }, - display_row, - cx, - ); - - prepaint_gutter_button( - button, - display_row, - line_height, - gutter_dimensions, - scroll_pixel_position, - gutter_hitbox, - rows_with_hunk_bounds, - cx, - ) - }) - .collect() - }) - } } #[allow(clippy::too_many_arguments)] @@ -5549,15 +5481,6 @@ impl Element for EditorElement { } else { Vec::new() }; - let close_indicators = self.layout_hunk_diff_close_indicators( - line_height, - scroll_pixel_position, - &gutter_dimensions, - &gutter_hitbox, - &rows_with_hunk_bounds, - expanded_add_hunks_by_rows, - cx, - ); self.layout_signature_help( &hitbox, @@ -5670,7 +5593,6 @@ impl Element for EditorElement { selections, mouse_context_menu, test_indicators, - close_indicators, code_actions_indicator, gutter_fold_toggles, crease_trailers, @@ -5812,7 +5734,6 @@ pub struct EditorLayout { selections: Vec<(PlayerColor, Vec)>, code_actions_indicator: Option, test_indicators: Vec, - close_indicators: Vec, gutter_fold_toggles: Vec>, crease_trailers: Vec>, mouse_context_menu: Option, diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 67e8a25df58c5..4fa1f10a8a17c 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -1,28 +1,26 @@ use collections::{hash_map, HashMap, HashSet}; use git::diff::DiffHunkStatus; -use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; +use gpui::{Action, AnchorCorner, AppContext, CursorStyle, Hsla, Model, MouseButton, Task, View}; use language::{Buffer, BufferId, Point}; use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, MultiBufferSnapshot, ToPoint, }; -use settings::SettingsStore; use std::{ ops::{Range, RangeInclusive}, sync::Arc, }; use ui::{ - prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels, - Styled, ViewContext, VisualContext, + prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement, + ParentElement, PopoverMenu, Styled, Tooltip, ViewContext, VisualContext, }; -use util::{debug_panic, RangeExt}; +use util::RangeExt; use crate::{ - editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, - mouse_context_menu::MouseContextMenu, BlockDisposition, BlockProperties, BlockStyle, - CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, Editor, EditorElement, - EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, RevertSelectedHunks, - ToDisplayPoint, ToggleHunkDiff, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, + BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, + Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, + RangeToAnchorExt, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -41,7 +39,7 @@ pub(super) struct ExpandedHunks { #[derive(Debug, Clone)] pub(super) struct ExpandedHunk { - pub block: Option, + pub blocks: Vec, pub hunk_range: Range, pub diff_base_byte_range: Range, pub status: DiffHunkStatus, @@ -77,85 +75,6 @@ impl ExpandedHunks { } impl Editor { - pub(super) fn open_hunk_context_menu( - &mut self, - hovered_hunk: HoveredHunk, - clicked_point: gpui::Point, - cx: &mut ViewContext, - ) { - let focus_handle = self.focus_handle.clone(); - let expanded = self - .expanded_hunks - .hunks(false) - .any(|expanded_hunk| expanded_hunk.hunk_range == hovered_hunk.multi_buffer_range); - let editor_handle = cx.view().clone(); - let editor_snapshot = self.snapshot(cx); - let start_point = self - .to_pixel_point(hovered_hunk.multi_buffer_range.start, &editor_snapshot, cx) - .unwrap_or(clicked_point); - let end_point = self - .to_pixel_point(hovered_hunk.multi_buffer_range.start, &editor_snapshot, cx) - .unwrap_or(clicked_point); - let norm = - |a: gpui::Point, b: gpui::Point| (a.x - b.x).abs() + (a.y - b.y).abs(); - let closest_source = if norm(start_point, clicked_point) < norm(end_point, clicked_point) { - hovered_hunk.multi_buffer_range.start - } else { - hovered_hunk.multi_buffer_range.end - }; - - self.mouse_context_menu = MouseContextMenu::pinned_to_editor( - self, - closest_source, - clicked_point, - ContextMenu::build(cx, move |menu, _| { - menu.on_blur_subscription(Subscription::new(|| {})) - .context(focus_handle) - .entry( - if expanded { - "Collapse Hunk" - } else { - "Expand Hunk" - }, - Some(ToggleHunkDiff.boxed_clone()), - { - let editor = editor_handle.clone(); - let hunk = hovered_hunk.clone(); - move |cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); - } - }, - ) - .entry("Revert Hunk", Some(RevertSelectedHunks.boxed_clone()), { - let editor = editor_handle.clone(); - let hunk = hovered_hunk.clone(); - move |cx| { - let multi_buffer = editor.read(cx).buffer().clone(); - let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); - let mut revert_changes = HashMap::default(); - if let Some(hunk) = - crate::hunk_diff::to_diff_hunk(&hunk, &multi_buffer_snapshot) - { - Editor::prepare_revert_change( - &mut revert_changes, - &multi_buffer, - &hunk, - cx, - ); - } - if !revert_changes.is_empty() { - editor.update(cx, |editor, cx| editor.revert(revert_changes, cx)); - } - } - }) - .action("Revert File", RevertFile.boxed_clone()) - }), - cx, - ) - } - pub(super) fn toggle_hovered_hunk( &mut self, hovered_hunk: &HoveredHunk, @@ -264,7 +183,8 @@ impl Editor { break; } else if expanded_hunk_row_range == hunk_to_toggle_row_range { highlights_to_remove.push(expanded_hunk.hunk_range.clone()); - blocks_to_remove.extend(expanded_hunk.block); + blocks_to_remove + .extend(expanded_hunk.blocks.iter().copied()); hunks_to_toggle.next(); retain = false; break; @@ -371,9 +291,17 @@ impl Editor { Err(ix) => ix, }; - let block = match hunk.status { + let blocks; + match hunk.status { DiffHunkStatus::Removed => { - self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx) + blocks = self.insert_blocks( + [ + self.hunk_header_block(&hunk, cx), + Self::deleted_text_block(hunk, diff_base_buffer, deleted_text_lines, cx), + ], + None, + cx, + ); } DiffHunkStatus::Added => { self.highlight_rows::( @@ -382,7 +310,7 @@ impl Editor { false, cx, ); - None + blocks = self.insert_blocks([self.hunk_header_block(&hunk, cx)], None, cx); } DiffHunkStatus::Modified => { self.highlight_rows::( @@ -391,13 +319,20 @@ impl Editor { false, cx, ); - self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx) + blocks = self.insert_blocks( + [ + self.hunk_header_block(&hunk, cx), + Self::deleted_text_block(hunk, diff_base_buffer, deleted_text_lines, cx), + ], + None, + cx, + ); } }; self.expanded_hunks.hunks.insert( block_insert_index, ExpandedHunk { - block, + blocks, hunk_range: hunk_start..hunk_end, status: hunk.status, folded: false, @@ -408,109 +343,368 @@ impl Editor { Some(()) } - fn insert_deleted_text_block( - &mut self, - diff_base_buffer: Model, - deleted_text_height: u32, + fn hunk_header_block( + &self, hunk: &HoveredHunk, - cx: &mut ViewContext<'_, Self>, - ) -> Option { - let deleted_hunk_color = deleted_hunk_color(cx); - let (editor_height, editor_with_deleted_text) = - editor_with_deleted_text(diff_base_buffer, deleted_hunk_color, hunk, cx); - let editor = cx.view().clone(); - let hunk = hunk.clone(); - let height = editor_height.max(deleted_text_height); - let mut new_block_ids = self.insert_blocks( - Some(BlockProperties { - position: hunk.multi_buffer_range.start, - height, - style: BlockStyle::Flex, - disposition: BlockDisposition::Above, - render: Box::new(move |cx| { - let width = EditorElement::diff_hunk_strip_width(cx.line_height()); - let gutter_dimensions = editor.read(cx.context).gutter_dimensions; - - let close_button = editor.update(cx.context, |editor, cx| { - let editor_snapshot = editor.snapshot(cx); - let hunk_display_range = hunk - .multi_buffer_range - .clone() - .to_display_points(&editor_snapshot); - editor.close_hunk_diff_button( - hunk.clone(), - hunk_display_range.start.row(), - cx, - ) - }); + cx: &mut ViewContext<'_, Editor>, + ) -> BlockProperties { + let border_color = cx.theme().colors().border_disabled; + let gutter_color = match hunk.status { + DiffHunkStatus::Added => cx.theme().status().created, + DiffHunkStatus::Modified => cx.theme().status().modified, + DiffHunkStatus::Removed => cx.theme().status().deleted, + }; + + BlockProperties { + position: hunk.multi_buffer_range.start, + height: 1, + style: BlockStyle::Sticky, + disposition: BlockDisposition::Above, + priority: 0, + render: Box::new({ + let editor = cx.view().clone(); + let hunk = hunk.clone(); + move |cx| { + let hunk_controls_menu_handle = + editor.read(cx).hunk_controls_menu_handle.clone(); h_flex() - .id("gutter with editor") - .bg(deleted_hunk_color) - .h(height as f32 * cx.line_height()) + .id(cx.block_id) .w_full() + .h(cx.line_height()) + .child( + div() + .id("gutter-strip") + .w(EditorElement::diff_hunk_strip_width(cx.line_height())) + .h_full() + .bg(gutter_color) + .cursor(CursorStyle::PointingHand) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ) .child( h_flex() - .id("gutter") - .max_w(gutter_dimensions.full_width()) - .min_w(gutter_dimensions.full_width()) .size_full() + .justify_between() + .border_t_1() + .border_color(border_color) .child( h_flex() - .id("gutter hunk") - .bg(cx.theme().status().deleted) - .pl(gutter_dimensions.margin - + gutter_dimensions - .git_blame_entries_width - .unwrap_or_default()) - .max_w(width) - .min_w(width) - .size_full() - .cursor(CursorStyle::PointingHand) - .on_mouse_down(MouseButton::Left, { - let editor = editor.clone(); - let hunk = hunk.clone(); - move |event, cx| { - let modifiers = event.modifiers; - if modifiers.control || modifiers.platform { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); - } else { - editor.update(cx, |editor, cx| { - editor.open_hunk_context_menu( - hunk.clone(), - event.position, + .gap_2() + .pl_6() + .child( + IconButton::new("next-hunk", IconName::ArrowDown) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Hunk", + &GoToHunk, + &focus_handle, cx, - ); - }); - } - } - }), + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let position = hunk + .multi_buffer_range + .end + .to_point( + &snapshot.buffer_snapshot, + ); + if let Some(hunk) = editor + .go_to_hunk_after_position( + &snapshot, position, cx, + ) + { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new( + hunk.row_range.start.0, + 0, + )); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new( + hunk.row_range.end.0, + 0, + )); + editor.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: + multi_buffer_start + ..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk + .diff_base_byte_range, + }, + cx, + ); + } + }); + } + }), + ) + .child( + IconButton::new("prev-hunk", IconName::ArrowUp) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Hunk", + &GoToPrevHunk, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let position = hunk + .multi_buffer_range + .start + .to_point( + &snapshot.buffer_snapshot, + ); + let hunk = editor + .go_to_hunk_before_position( + &snapshot, position, cx, + ); + if let Some(hunk) = hunk { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new( + hunk.row_range.start.0, + 0, + )); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new( + hunk.row_range.end.0, + 0, + )); + editor.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: + multi_buffer_start + ..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk + .diff_base_byte_range, + }, + cx, + ); + } + }); + } + }), + ), ) .child( - v_flex() - .size_full() - .pt(rems(0.25)) - .justify_start() - .child(close_button), + h_flex() + .gap_2() + .pr_6() + .child({ + let focus = editor.focus_handle(cx); + PopoverMenu::new("hunk-controls-dropdown") + .trigger( + IconButton::new( + "toggle_editor_selections_icon", + IconName::EllipsisVertical, + ) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .selected( + hunk_controls_menu_handle.is_deployed(), + ) + .when( + !hunk_controls_menu_handle.is_deployed(), + |this| { + this.tooltip(|cx| { + Tooltip::text("Hunk Controls", cx) + }) + }, + ), + ) + .anchor(AnchorCorner::TopRight) + .with_handle(hunk_controls_menu_handle) + .menu(move |cx| { + let focus = focus.clone(); + let menu = + ContextMenu::build(cx, move |menu, _| { + menu.context(focus.clone()).action( + "Discard All", + RevertFile.boxed_clone(), + ) + }); + Some(menu) + }) + }) + .child( + IconButton::new("discard", IconName::RotateCcw) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Discard Hunk", + &RevertSelectedHunks, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + let multi_buffer = + editor.read(cx).buffer().clone(); + let multi_buffer_snapshot = + multi_buffer.read(cx).snapshot(cx); + let mut revert_changes = HashMap::default(); + if let Some(hunk) = + crate::hunk_diff::to_diff_hunk( + &hunk, + &multi_buffer_snapshot, + ) + { + Editor::prepare_revert_change( + &mut revert_changes, + &multi_buffer, + &hunk, + cx, + ); + } + if !revert_changes.is_empty() { + editor.update(cx, |editor, cx| { + editor.revert(revert_changes, cx) + }); + } + } + }), + ) + .child( + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ), ), ) - .child(editor_with_deleted_text.clone()) .into_any_element() - }), - priority: 0, + } + }), + } + } + + fn deleted_text_block( + hunk: &HoveredHunk, + diff_base_buffer: Model, + deleted_text_height: u32, + cx: &mut ViewContext<'_, Editor>, + ) -> BlockProperties { + let gutter_color = match hunk.status { + DiffHunkStatus::Added => unreachable!(), + DiffHunkStatus::Modified => cx.theme().status().modified, + DiffHunkStatus::Removed => cx.theme().status().deleted, + }; + let deleted_hunk_color = deleted_hunk_color(cx); + let (editor_height, editor_with_deleted_text) = + editor_with_deleted_text(diff_base_buffer, deleted_hunk_color, hunk, cx); + let editor = cx.view().clone(); + let hunk = hunk.clone(); + let height = editor_height.max(deleted_text_height); + BlockProperties { + position: hunk.multi_buffer_range.start, + height, + style: BlockStyle::Flex, + disposition: BlockDisposition::Above, + priority: 0, + render: Box::new(move |cx| { + let width = EditorElement::diff_hunk_strip_width(cx.line_height()); + let gutter_dimensions = editor.read(cx.context).gutter_dimensions; + + h_flex() + .id(cx.block_id) + .bg(deleted_hunk_color) + .h(height as f32 * cx.line_height()) + .w_full() + .child( + h_flex() + .id("gutter") + .max_w(gutter_dimensions.full_width()) + .min_w(gutter_dimensions.full_width()) + .size_full() + .child( + h_flex() + .id("gutter hunk") + .bg(gutter_color) + .pl(gutter_dimensions.margin + + gutter_dimensions + .git_blame_entries_width + .unwrap_or_default()) + .max_w(width) + .min_w(width) + .size_full() + .cursor(CursorStyle::PointingHand) + .on_mouse_down(MouseButton::Left, { + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ), + ) + .child(editor_with_deleted_text.clone()) + .into_any_element() }), - None, - cx, - ); - if new_block_ids.len() == 1 { - new_block_ids.pop() - } else { - debug_panic!( - "Inserted one editor block but did not receive exactly one block id: {new_block_ids:?}" - ); - None } } @@ -521,7 +715,7 @@ impl Editor { .expanded_hunks .hunks .drain(..) - .filter_map(|expanded_hunk| expanded_hunk.block) + .flat_map(|expanded_hunk| expanded_hunk.blocks.into_iter()) .collect::>(); if to_remove.is_empty() { false @@ -603,7 +797,7 @@ impl Editor { expanded_hunk.folded = true; highlights_to_remove .push(expanded_hunk.hunk_range.clone()); - if let Some(block) = expanded_hunk.block.take() { + for block in expanded_hunk.blocks.drain(..) { blocks_to_remove.insert(block); } break; @@ -650,7 +844,7 @@ impl Editor { } } if !retain { - blocks_to_remove.extend(expanded_hunk.block); + blocks_to_remove.extend(expanded_hunk.blocks.drain(..)); highlights_to_remove.push(expanded_hunk.hunk_range.clone()); } retain @@ -749,7 +943,7 @@ fn added_hunk_color(cx: &AppContext) -> Hsla { } fn deleted_hunk_color(cx: &AppContext) -> Hsla { - let mut deleted_color = cx.theme().status().git().deleted; + let mut deleted_color = cx.theme().status().deleted; deleted_color.fade_out(0.7); deleted_color } @@ -788,32 +982,15 @@ fn editor_with_deleted_text( false, cx, ); - - let subscription_editor = parent_editor.clone(); - editor._subscriptions.extend([ - cx.on_blur(&editor.focus_handle, |editor, cx| { - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); + editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); + editor + ._subscriptions + .extend([cx.on_blur(&editor.focus_handle, |editor, cx| { editor.change_selections(None, cx, |s| { s.try_cancel(); }); - cx.notify(); - }), - cx.on_focus(&editor.focus_handle, move |editor, cx| { - let restored_highlight = if let Some(parent_editor) = subscription_editor.upgrade() - { - parent_editor.read(cx).current_line_highlight - } else { - None - }; - editor.set_current_line_highlight(restored_highlight); - cx.notify(); - }), - cx.observe_global::(|editor, cx| { - if !editor.is_focused(cx) { - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); - } - }), - ]); + })]); + let parent_editor_for_reverts = parent_editor.clone(); let original_multi_buffer_range = hunk.multi_buffer_range.clone(); let diff_base_range = hunk.diff_base_byte_range.clone(); From 4b4565fb7afaa7ab2c8d2058c924120fda911311 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 25 Sep 2024 22:55:36 +0200 Subject: [PATCH 076/228] assistant: Enable assistant panel/inline assists in ssh remote projects (#18367) Release Notes: - ssh remoting: Enable assistant panel and inline assists (running on client) --- crates/assistant/src/assistant_panel.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 22237eeb07927..c7a06b428b9f1 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -960,7 +960,8 @@ impl AssistantPanel { } fn new_context(&mut self, cx: &mut ViewContext) -> Option> { - if self.project.read(cx).is_via_collab() { + let project = self.project.read(cx); + if project.is_via_collab() && project.dev_server_project_id().is_none() { let task = self .context_store .update(cx, |store, cx| store.create_remote_context(cx)); From 7398f795e3fde21b8c1a6a40cd67c0b1854ed60c Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Sep 2024 22:01:12 +0000 Subject: [PATCH 077/228] Ollama llama3.2 default context size (#18366) Release Notes: - Ollama: Added llama3.2 support --- crates/ollama/src/ollama.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index e592bfa17717d..84404afce13b1 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -83,7 +83,7 @@ fn get_max_tokens(name: &str) -> usize { "codellama" | "starcoder2" => 16384, "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768, "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" - | "qwen2.5-coder" => 128000, + | "llama3.2" | "qwen2.5-coder" => 128000, _ => DEFAULT_TOKENS, } .clamp(1, MAXIMUM_TOKENS) From 40408e731e859ecaf03919aaa32f22ff41869522 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Sep 2024 22:01:33 +0000 Subject: [PATCH 078/228] Fix sending alt-enter in terminal (#18363) --- crates/terminal/src/mappings/keys.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/terminal/src/mappings/keys.rs b/crates/terminal/src/mappings/keys.rs index e760db3616fea..2d4fe4c62e97b 100644 --- a/crates/terminal/src/mappings/keys.rs +++ b/crates/terminal/src/mappings/keys.rs @@ -51,6 +51,7 @@ pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> ("escape", AlacModifiers::None) => Some("\x1b".to_string()), ("enter", AlacModifiers::None) => Some("\x0d".to_string()), ("enter", AlacModifiers::Shift) => Some("\x0d".to_string()), + ("enter", AlacModifiers::Alt) => Some("\x1b\x0d".to_string()), ("backspace", AlacModifiers::None) => Some("\x7f".to_string()), //Interesting escape codes ("tab", AlacModifiers::Shift) => Some("\x1b[Z".to_string()), From 64532e94e456dd4595897c96a0d06a7e22379add Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Sep 2024 16:29:04 -0600 Subject: [PATCH 079/228] Move adapters to remote (#18359) Release Notes: - ssh remoting: run LSP Adapters on host --------- Co-authored-by: Mikayla --- Cargo.lock | 1 + crates/languages/Cargo.toml | 54 ++- crates/languages/src/lib.rs | 17 +- crates/project/src/lsp_store.rs | 442 +------------------ crates/project/src/project.rs | 3 +- crates/proto/proto/zed.proto | 74 +--- crates/proto/src/proto.rs | 18 - crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 12 +- crates/zed/Cargo.toml | 2 +- 10 files changed, 76 insertions(+), 548 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26b979ccf72aa..0b3ee53e9aa85 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9122,6 +9122,7 @@ dependencies = [ "gpui", "http_client", "language", + "languages", "log", "lsp", "node_runtime", diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 33be1a9809601..5cb5455dd168d 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -10,6 +10,25 @@ workspace = true [features] test-support = [] +load-grammars = [ + "tree-sitter-bash", + "tree-sitter-c", + "tree-sitter-cpp", + "tree-sitter-css", + "tree-sitter-go", + "tree-sitter-go-mod", + "tree-sitter-gowork", + "tree-sitter-jsdoc", + "tree-sitter-json", + "tree-sitter-md", + "protols-tree-sitter-proto", + "tree-sitter-python", + "tree-sitter-regex", + "tree-sitter-rust", + "tree-sitter-typescript", + "tree-sitter-yaml", + "tree-sitter" +] [dependencies] anyhow.workspace = true @@ -36,25 +55,26 @@ settings.workspace = true smol.workspace = true task.workspace = true toml.workspace = true -tree-sitter-bash.workspace = true -tree-sitter-c.workspace = true -tree-sitter-cpp.workspace = true -tree-sitter-css.workspace = true -tree-sitter-go.workspace = true -tree-sitter-go-mod.workspace = true -tree-sitter-gowork.workspace = true -tree-sitter-jsdoc.workspace = true -tree-sitter-json.workspace = true -tree-sitter-md.workspace = true -protols-tree-sitter-proto.workspace = true -tree-sitter-python.workspace = true -tree-sitter-regex.workspace = true -tree-sitter-rust.workspace = true -tree-sitter-typescript.workspace = true -tree-sitter-yaml.workspace = true -tree-sitter.workspace = true util.workspace = true +tree-sitter-bash = {workspace = true, optional = true} +tree-sitter-c = {workspace = true, optional = true} +tree-sitter-cpp = {workspace = true, optional = true} +tree-sitter-css = {workspace = true, optional = true} +tree-sitter-go = {workspace = true, optional = true} +tree-sitter-go-mod = {workspace = true, optional = true} +tree-sitter-gowork = {workspace = true, optional = true} +tree-sitter-jsdoc = {workspace = true, optional = true} +tree-sitter-json = {workspace = true, optional = true} +tree-sitter-md = {workspace = true, optional = true} +protols-tree-sitter-proto = {workspace = true, optional = true} +tree-sitter-python = {workspace = true, optional = true} +tree-sitter-regex = {workspace = true, optional = true} +tree-sitter-rust = {workspace = true, optional = true} +tree-sitter-typescript = {workspace = true, optional = true} +tree-sitter-yaml = {workspace = true, optional = true} +tree-sitter = {workspace = true, optional = true} + [dev-dependencies] text.workspace = true theme = { workspace = true, features = ["test-support"] } diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 7435ddb13196d..295df6e419b7e 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -31,6 +31,7 @@ mod yaml; struct LanguageDir; pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mut AppContext) { + #[cfg(feature = "load-grammars")] languages.register_native_grammars([ ("bash", tree_sitter_bash::LANGUAGE), ("c", tree_sitter_c::LANGUAGE), @@ -282,9 +283,21 @@ fn load_config(name: &str) -> LanguageConfig { ) .unwrap(); - ::toml::from_str(&config_toml) + #[allow(unused_mut)] + let mut config: LanguageConfig = ::toml::from_str(&config_toml) .with_context(|| format!("failed to load config.toml for language {name:?}")) - .unwrap() + .unwrap(); + + #[cfg(not(feature = "load-grammars"))] + { + config = LanguageConfig { + name: config.name, + matcher: config.matcher, + ..Default::default() + } + } + + config } fn load_queries(name: &str) -> LanguageQueries { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index bef57bafb48cf..a4a13b296ed5c 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -36,10 +36,10 @@ use language::{ markdown, point_to_lsp, prepare_completion_documentation, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, - DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageConfig, - LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerBinaryStatus, - LanguageServerName, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, - TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, + DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageName, + LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LocalFile, LspAdapter, + LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, + Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, @@ -53,7 +53,7 @@ use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; -use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient}; +use rpc::AnyProtoClient; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; use sha2::{Digest, Sha256}; @@ -644,16 +644,15 @@ pub struct RemoteLspStore { impl RemoteLspStore {} -pub struct SshLspStore { - upstream_client: AnyProtoClient, - current_lsp_settings: HashMap, -} +// pub struct SshLspStore { +// upstream_client: AnyProtoClient, +// current_lsp_settings: HashMap, +// } #[allow(clippy::large_enum_variant)] pub enum LspStoreMode { Local(LocalLspStore), // ssh host and collab host Remote(RemoteLspStore), // collab guest - Ssh(SshLspStore), // ssh client } impl LspStoreMode { @@ -661,10 +660,6 @@ impl LspStoreMode { matches!(self, LspStoreMode::Local(_)) } - fn is_ssh(&self) -> bool { - matches!(self, LspStoreMode::Ssh(_)) - } - fn is_remote(&self) -> bool { matches!(self, LspStoreMode::Remote(_)) } @@ -787,13 +782,6 @@ impl LspStore { } } - pub fn as_ssh(&self) -> Option<&SshLspStore> { - match &self.mode { - LspStoreMode::Ssh(ssh_lsp_store) => Some(ssh_lsp_store), - _ => None, - } - } - pub fn as_local(&self) -> Option<&LocalLspStore> { match &self.mode { LspStoreMode::Local(local_lsp_store) => Some(local_lsp_store), @@ -810,9 +798,6 @@ impl LspStore { pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { match &self.mode { - LspStoreMode::Ssh(SshLspStore { - upstream_client, .. - }) => Some((upstream_client.clone(), SSH_PROJECT_ID)), LspStoreMode::Remote(RemoteLspStore { upstream_client, upstream_project_id, @@ -827,11 +812,7 @@ impl LspStore { new_settings: HashMap, ) -> Option> { match &mut self.mode { - LspStoreMode::Ssh(SshLspStore { - current_lsp_settings, - .. - }) - | LspStoreMode::Local(LocalLspStore { + LspStoreMode::Local(LocalLspStore { current_lsp_settings, .. }) => { @@ -919,43 +900,6 @@ impl LspStore { }) } - pub fn new_ssh( - buffer_store: Model, - worktree_store: Model, - languages: Arc, - upstream_client: AnyProtoClient, - cx: &mut ModelContext, - ) -> Self { - cx.subscribe(&buffer_store, Self::on_buffer_store_event) - .detach(); - cx.subscribe(&worktree_store, Self::on_worktree_store_event) - .detach(); - cx.observe_global::(Self::on_settings_changed) - .detach(); - - Self { - mode: LspStoreMode::Ssh(SshLspStore { - upstream_client, - current_lsp_settings: Default::default(), - }), - downstream_client: None, - buffer_store, - worktree_store, - languages: languages.clone(), - language_server_ids: Default::default(), - language_server_statuses: Default::default(), - nonce: StdRng::from_entropy().gen(), - buffer_snapshots: Default::default(), - next_diagnostic_group_id: Default::default(), - diagnostic_summaries: Default::default(), - - diagnostics: Default::default(), - active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), - _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), - } - } - pub fn new_remote( buffer_store: Model, worktree_store: Model, @@ -3697,11 +3641,11 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { let response_from_ssh = this.update(&mut cx, |this, _| { - let ssh = this.as_ssh()?; + let (upstream_client, project_id) = this.upstream_client()?; let mut payload = envelope.payload.clone(); - payload.project_id = SSH_PROJECT_ID; + payload.project_id = project_id; - Some(ssh.upstream_client.request(payload)) + Some(upstream_client.request(payload)) })?; if let Some(response_from_ssh) = response_from_ssh { return response_from_ssh.await; @@ -5009,165 +4953,6 @@ impl LspStore { Ok(proto::Ack {}) } - pub async fn handle_create_language_server( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let server_name = LanguageServerName::from_proto(envelope.payload.name); - - let binary = envelope - .payload - .binary - .ok_or_else(|| anyhow!("missing binary"))?; - let binary = LanguageServerBinary { - path: PathBuf::from(binary.path), - env: None, - arguments: binary.arguments.into_iter().map(Into::into).collect(), - }; - let language = envelope - .payload - .language - .ok_or_else(|| anyhow!("missing language"))?; - let language_name = LanguageName::from_proto(language.name); - let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; - - this.update(&mut cx, |this, cx| { - let Some(worktree) = this - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - else { - return Err(anyhow!("worktree not found")); - }; - - this.languages - .register_language(language_name.clone(), None, matcher.clone(), { - let language_name = language_name.clone(); - move || { - Ok(( - LanguageConfig { - name: language_name.clone(), - matcher: matcher.clone(), - ..Default::default() - }, - Default::default(), - Default::default(), - )) - } - }); - cx.background_executor() - .spawn(this.languages.language_for_name(language_name.0.as_ref())) - .detach(); - - // host - let adapter = this.languages.get_or_register_lsp_adapter( - language_name.clone(), - server_name.clone(), - || { - Arc::new(SshLspAdapter::new( - server_name, - binary, - envelope.payload.initialization_options, - envelope.payload.code_action_kinds, - )) - }, - ); - - this.start_language_server(&worktree, adapter, language_name, cx); - Ok(()) - })??; - Ok(proto::Ack {}) - } - - pub async fn handle_which_command( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let command = PathBuf::from(envelope.payload.command); - let response = this - .update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok( - cx.spawn(|_, _| async move { delegate.which(command.as_os_str()).await }), - ) - })?? - .await; - - Ok(proto::WhichCommandResponse { - path: response.map(|path| path.to_string_lossy().to_string()), - }) - } - - pub async fn handle_shell_env( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let response = this - .update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok(cx.spawn(|_, _| async move { delegate.shell_env().await })) - })?? - .await; - - Ok(proto::ShellEnvResponse { - env: response.into_iter().collect(), - }) - } - pub async fn handle_try_exec( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let binary = envelope - .payload - .binary - .ok_or_else(|| anyhow!("missing binary"))?; - let binary = LanguageServerBinary { - path: PathBuf::from(binary.path), - env: None, - arguments: binary.arguments.into_iter().map(Into::into).collect(), - }; - this.update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok(cx.spawn(|_, _| async move { delegate.try_exec(binary).await })) - })?? - .await?; - - Ok(proto::Ack {}) - } - - pub async fn handle_read_text_file( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let path = envelope - .payload - .path - .ok_or_else(|| anyhow!("missing path"))?; - let worktree_id = WorktreeId::from_proto(path.worktree_id); - let path = PathBuf::from(path.path); - let response = this - .update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok(cx.spawn(|_, _| async move { delegate.read_text_file(path).await })) - })?? - .await?; - - Ok(proto::ReadTextFileResponse { text: response }) - } - async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -5388,89 +5173,6 @@ impl LspStore { .reorder_language_servers(&language, enabled_lsp_adapters); } - fn start_language_server_on_ssh_host( - &mut self, - worktree: &Model, - adapter: Arc, - language: LanguageName, - cx: &mut ModelContext, - ) { - let ssh = self.as_ssh().unwrap(); - - let delegate = Arc::new(SshLspAdapterDelegate { - lsp_store: cx.handle().downgrade(), - worktree: worktree.read(cx).snapshot(), - upstream_client: ssh.upstream_client.clone(), - language_registry: self.languages.clone(), - }) as Arc; - - let Some((upstream_client, project_id)) = self.upstream_client() else { - return; - }; - let worktree_id = worktree.read(cx).id().to_proto(); - let name = adapter.name().to_string(); - - let Some(available_language) = self.languages.available_language_for_name(&language) else { - log::error!("failed to find available language {language}"); - return; - }; - - let user_binary_task = - self.get_language_server_binary(adapter.clone(), delegate.clone(), false, cx); - - let task = cx.spawn(|_, _| async move { - let binary = user_binary_task.await?; - let name = adapter.name(); - let code_action_kinds = adapter - .adapter - .code_action_kinds() - .map(|kinds| serde_json::to_string(&kinds)) - .transpose()?; - let get_options = adapter.adapter.clone().initialization_options(&delegate); - let initialization_options = get_options - .await? - .map(|options| serde_json::to_string(&options)) - .transpose()?; - - let language_server_command = proto::LanguageServerCommand { - path: binary.path.to_string_lossy().to_string(), - arguments: binary - .arguments - .iter() - .map(|args| args.to_string_lossy().to_string()) - .collect(), - env: binary.env.unwrap_or_default().into_iter().collect(), - }; - - upstream_client - .request(proto::CreateLanguageServer { - project_id, - worktree_id, - name: name.0.to_string(), - binary: Some(language_server_command), - initialization_options, - code_action_kinds, - language: Some(proto::AvailableLanguage { - name: language.to_proto(), - matcher: serde_json::to_string(&available_language.matcher())?, - }), - }) - .await - }); - cx.spawn(|this, mut cx| async move { - if let Err(e) = task.await { - this.update(&mut cx, |_this, cx| { - cx.emit(LspStoreEvent::Notification(format!( - "failed to start {}: {}", - name, e - ))) - }) - .ok(); - } - }) - .detach(); - } - fn get_language_server_binary( &self, adapter: Arc, @@ -5558,11 +5260,6 @@ impl LspStore { return; } - if self.mode.is_ssh() { - self.start_language_server_on_ssh_host(worktree_handle, adapter, language, cx); - return; - } - let project_settings = ProjectSettings::get( Some(SettingsLocation { worktree_id, @@ -5852,9 +5549,6 @@ impl LspStore { } else { Task::ready(Vec::new()) } - } else if self.mode.is_ssh() { - // TODO ssh - Task::ready(Vec::new()) } else { Task::ready(Vec::new()) } @@ -7905,116 +7599,6 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { } } -struct SshLspAdapterDelegate { - lsp_store: WeakModel, - worktree: worktree::Snapshot, - upstream_client: AnyProtoClient, - language_registry: Arc, -} - -#[async_trait] -impl LspAdapterDelegate for SshLspAdapterDelegate { - fn show_notification(&self, message: &str, cx: &mut AppContext) { - self.lsp_store - .update(cx, |_, cx| { - cx.emit(LspStoreEvent::Notification(message.to_owned())) - }) - .ok(); - } - - async fn npm_package_installed_version( - &self, - _package_name: &str, - ) -> Result> { - Ok(None) - } - - fn http_client(&self) -> Arc { - Arc::new(BlockedHttpClient) - } - - fn worktree_id(&self) -> WorktreeId { - self.worktree.id() - } - - fn worktree_root_path(&self) -> &Path { - self.worktree.abs_path().as_ref() - } - - async fn shell_env(&self) -> HashMap { - use rpc::proto::SSH_PROJECT_ID; - - self.upstream_client - .request(proto::ShellEnv { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - }) - .await - .map(|response| response.env.into_iter().collect()) - .unwrap_or_default() - } - - async fn which(&self, command: &OsStr) -> Option { - use rpc::proto::SSH_PROJECT_ID; - - self.upstream_client - .request(proto::WhichCommand { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - command: command.to_string_lossy().to_string(), - }) - .await - .log_err() - .and_then(|response| response.path) - .map(PathBuf::from) - } - - async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { - self.upstream_client - .request(proto::TryExec { - project_id: rpc::proto::SSH_PROJECT_ID, - worktree_id: self.worktree.id().to_proto(), - binary: Some(proto::LanguageServerCommand { - path: command.path.to_string_lossy().to_string(), - arguments: command - .arguments - .into_iter() - .map(|s| s.to_string_lossy().to_string()) - .collect(), - env: command.env.unwrap_or_default().into_iter().collect(), - }), - }) - .await?; - Ok(()) - } - - async fn language_server_download_dir(&self, _: &LanguageServerName) -> Option> { - None - } - - fn update_status( - &self, - server_name: LanguageServerName, - status: language::LanguageServerBinaryStatus, - ) { - self.language_registry - .update_lsp_status(server_name, status); - } - - async fn read_text_file(&self, path: PathBuf) -> Result { - self.upstream_client - .request(proto::ReadTextFile { - project_id: rpc::proto::SSH_PROJECT_ID, - path: Some(proto::ProjectPath { - worktree_id: self.worktree.id().to_proto(), - path: path.to_string_lossy().to_string(), - }), - }) - .await - .map(|r| r.text) - } -} - async fn populate_labels_for_symbols( symbols: Vec, language_registry: &Arc, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 10fd88f286d99..c3b3c383c11ff 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -706,11 +706,12 @@ impl Project { let environment = ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { - LspStore::new_ssh( + LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), languages.clone(), ssh.clone().into(), + SSH_PROJECT_ID, cx, ) }); diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 475ed139edfb8..d81ef35f6bffb 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -283,18 +283,6 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CreateLanguageServer create_language_server = 247; - - WhichCommand which_command = 248; - WhichCommandResponse which_command_response = 249; - - ShellEnv shell_env = 250; - ShellEnvResponse shell_env_response = 251; - - TryExec try_exec = 252; - ReadTextFile read_text_file = 253; - ReadTextFileResponse read_text_file_response = 254; - CheckFileExists check_file_exists = 255; CheckFileExistsResponse check_file_exists_response = 256; // current max } @@ -302,6 +290,7 @@ message Envelope { reserved 158 to 161; reserved 166 to 169; reserved 224 to 229; + reserved 247 to 254; } // Messages @@ -2517,67 +2506,6 @@ message UpdateUserSettings { string content = 2; } -message LanguageServerCommand { - string path = 1; - repeated string arguments = 2; - map env = 3; -} - -message AvailableLanguage { - string name = 7; - string matcher = 8; -} - -message CreateLanguageServer { - uint64 project_id = 1; - uint64 worktree_id = 2; - string name = 3; - - LanguageServerCommand binary = 4; - optional string initialization_options = 5; - optional string code_action_kinds = 6; - - AvailableLanguage language = 7; -} - -message WhichCommand { - uint64 project_id = 1; - uint64 worktree_id = 2; - string command = 3; -} - -message WhichCommandResponse { - optional string path = 1; -} - -message ShellEnv { - uint64 project_id = 1; - uint64 worktree_id = 2; -} - -message ShellEnvResponse { - map env = 1; -} - -message ReadTextFile { - uint64 project_id = 1; - ProjectPath path = 2; -} - -message ReadTextFileResponse { - string text = 1; -} - -message TryExec { - uint64 project_id = 1; - uint64 worktree_id = 2; - LanguageServerCommand binary = 3; -} - -message TryExecResponse { - string text = 1; -} - message CheckFileExists { uint64 project_id = 1; string path = 2; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 4146a47409ad7..799d51defec71 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -365,14 +365,6 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CreateLanguageServer, Foreground), - (WhichCommand, Foreground), - (WhichCommandResponse, Foreground), - (ShellEnv, Foreground), - (ShellEnvResponse, Foreground), - (TryExec, Foreground), - (ReadTextFile, Foreground), - (ReadTextFileResponse, Foreground), (CheckFileExists, Background), (CheckFileExistsResponse, Background) ); @@ -498,11 +490,6 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CreateLanguageServer, Ack), - (WhichCommand, WhichCommandResponse), - (ShellEnv, ShellEnvResponse), - (ReadTextFile, ReadTextFileResponse), - (TryExec, Ack), (CheckFileExists, CheckFileExistsResponse) ); @@ -577,11 +564,6 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CreateLanguageServer, - WhichCommand, - ShellEnv, - TryExec, - ReadTextFile, CheckFileExists, ); diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index 64db2616e9b2f..b15970042d0f7 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -39,6 +39,7 @@ shellexpand.workspace = true smol.workspace = true worktree.workspace = true language.workspace = true +languages.workspace = true util.workspace = true [dev-dependencies] diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 84fb22b282d37..4b13938d8ca2f 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -44,6 +44,10 @@ impl HeadlessProject { pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); + let node_runtime = NodeRuntime::unavailable(); + + languages::init(languages.clone(), node_runtime.clone(), cx); + let worktree_store = cx.new_model(|cx| { let mut store = WorktreeStore::local(true, fs.clone()); store.shared(SSH_PROJECT_ID, session.clone().into(), cx); @@ -56,7 +60,7 @@ impl HeadlessProject { }); let prettier_store = cx.new_model(|cx| { PrettierStore::new( - NodeRuntime::unavailable(), + node_runtime, fs.clone(), languages.clone(), worktree_store.clone(), @@ -116,12 +120,6 @@ impl HeadlessProject { client.add_model_request_handler(BufferStore::handle_update_buffer); client.add_model_message_handler(BufferStore::handle_close_buffer); - client.add_model_request_handler(LspStore::handle_create_language_server); - client.add_model_request_handler(LspStore::handle_which_command); - client.add_model_request_handler(LspStore::handle_shell_env); - client.add_model_request_handler(LspStore::handle_try_exec); - client.add_model_request_handler(LspStore::handle_read_text_file); - BufferStore::init(&client); WorktreeStore::init(&client); SettingsObserver::init(&client); diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index eb8f45d92e476..897e0e9a28bca 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -64,7 +64,7 @@ language.workspace = true language_model.workspace = true language_selector.workspace = true language_tools.workspace = true -languages.workspace = true +languages = {workspace = true, features = ["load-grammars"] } libc.workspace = true log.workspace = true markdown_preview.workspace = true From 3161aedcb02e5e18bc802ffa38504909490938c6 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 25 Sep 2024 16:03:08 -0700 Subject: [PATCH 080/228] Fix broken collaboration UI from #18308 (#18372) Fixes a bug introduced by #18308, that caused the call controls to render incorrectly. Release Notes: - N/A --- crates/title_bar/src/collab.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index e9f89643d5729..2f53458905035 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -284,14 +284,14 @@ impl TitleBar { let room = room.read(cx); let project = self.project.read(cx); + let is_local = project.is_local() || project.is_via_ssh(); let is_dev_server_project = project.dev_server_project_id().is_some(); - let is_shared = project.is_shared(); + let is_shared = (is_local || is_dev_server_project) && project.is_shared(); let is_muted = room.is_muted(); let is_deafened = room.is_deafened().unwrap_or(false); let is_screen_sharing = room.is_screen_sharing(); let can_use_microphone = room.can_use_microphone(); - let can_share_projects = room.can_share_projects() - && (is_dev_server_project || project.is_local() || project.is_via_ssh()); + let can_share_projects = room.can_share_projects(); let platform_supported = match self.platform_style { PlatformStyle::Mac => true, PlatformStyle::Linux | PlatformStyle::Windows => false, @@ -299,7 +299,7 @@ impl TitleBar { let mut children = Vec::new(); - if can_share_projects { + if (is_local || is_dev_server_project) && can_share_projects { children.push( Button::new( "toggle_sharing", From 6167688a63eed63791814e39b3b8fd1a10da0e9b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Sep 2024 16:33:00 -0700 Subject: [PATCH 081/228] Proposed changes editor features (#18373) This PR adds some more functionality to the Proposed Changes Editor view, which we'll be using in https://github.com/zed-industries/zed/pull/18240 for allowing the assistant to propose changes to a set of buffers. * Add an `Apply All` button, and fully implement applying of changes to the base buffer * Make the proposed changes editor searchable * Fix a bug in branch buffers' diff state management Release Notes: - N/A --- crates/editor/src/editor.rs | 4 +- crates/editor/src/proposed_changes_editor.rs | 84 +++++++++++- crates/language/src/buffer.rs | 130 ++++++++++++++----- crates/language/src/buffer_tests.rs | 96 ++++++++------ crates/zed/src/zed.rs | 3 + 5 files changed, 241 insertions(+), 76 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 78c8ba6920337..23448b43a7cc7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -98,7 +98,9 @@ use language::{ }; use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; use linked_editing_ranges::refresh_linked_ranges; -use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor}; +pub use proposed_changes_editor::{ + ProposedChangesBuffer, ProposedChangesEditor, ProposedChangesEditorToolbar, +}; use similar::{ChangeTag, TextDiff}; use task::{ResolvedTask, TaskTemplate, TaskVariables}; diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 3979e558a4236..ec0c05d88382c 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -6,10 +6,13 @@ use language::{Buffer, BufferEvent, Capability}; use multi_buffer::{ExcerptRange, MultiBuffer}; use project::Project; use smol::stream::StreamExt; -use std::{ops::Range, time::Duration}; +use std::{any::TypeId, ops::Range, time::Duration}; use text::ToOffset; use ui::prelude::*; -use workspace::Item; +use workspace::{ + searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, + ToolbarItemView, +}; pub struct ProposedChangesEditor { editor: View, @@ -23,6 +26,10 @@ pub struct ProposedChangesBuffer { pub ranges: Vec>, } +pub struct ProposedChangesEditorToolbar { + current_editor: Option>, +} + impl ProposedChangesEditor { pub fn new( buffers: Vec>, @@ -96,6 +103,17 @@ impl ProposedChangesEditor { self.recalculate_diffs_tx.unbounded_send(buffer).ok(); } } + + fn apply_all_changes(&self, cx: &mut ViewContext) { + let buffers = self.editor.read(cx).buffer.read(cx).all_buffers(); + for branch_buffer in buffers { + if let Some(base_buffer) = branch_buffer.read(cx).diff_base_buffer() { + base_buffer.update(cx, |base_buffer, cx| { + base_buffer.merge(&branch_buffer, None, cx) + }); + } + } + } } impl Render for ProposedChangesEditor { @@ -122,4 +140,66 @@ impl Item for ProposedChangesEditor { fn tab_content_text(&self, _cx: &WindowContext) -> Option { Some("Proposed changes".into()) } + + fn as_searchable(&self, _: &View) -> Option> { + Some(Box::new(self.editor.clone())) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a View, + _: &'a AppContext, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } +} + +impl ProposedChangesEditorToolbar { + pub fn new() -> Self { + Self { + current_editor: None, + } + } + + fn get_toolbar_item_location(&self) -> ToolbarItemLocation { + if self.current_editor.is_some() { + ToolbarItemLocation::PrimaryRight + } else { + ToolbarItemLocation::Hidden + } + } +} + +impl Render for ProposedChangesEditorToolbar { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + let editor = self.current_editor.clone(); + Button::new("apply-changes", "Apply All").on_click(move |_, cx| { + if let Some(editor) = &editor { + editor.update(cx, |editor, cx| { + editor.apply_all_changes(cx); + }); + } + }) + } +} + +impl EventEmitter for ProposedChangesEditorToolbar {} + +impl ToolbarItemView for ProposedChangesEditorToolbar { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn workspace::ItemHandle>, + _cx: &mut ViewContext, + ) -> workspace::ToolbarItemLocation { + self.current_editor = + active_pane_item.and_then(|item| item.downcast::()); + self.get_toolbar_item_location() + } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 5735ee961651a..7abc9b8dba146 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -87,7 +87,11 @@ pub type BufferRow = u32; #[derive(Clone)] enum BufferDiffBase { Git(Rope), - PastBufferVersion(Model, BufferSnapshot), + PastBufferVersion { + buffer: Model, + rope: Rope, + operations_to_ignore: Vec, + }, } /// An in-memory representation of a source code file, including its text, @@ -795,19 +799,15 @@ impl Buffer { let this = cx.handle(); cx.new_model(|cx| { let mut branch = Self { - diff_base: Some(BufferDiffBase::PastBufferVersion( - this.clone(), - self.snapshot(), - )), + diff_base: Some(BufferDiffBase::PastBufferVersion { + buffer: this.clone(), + rope: self.as_rope().clone(), + operations_to_ignore: Vec::new(), + }), language: self.language.clone(), has_conflict: self.has_conflict, has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()), - _subscriptions: vec![cx.subscribe(&this, |branch: &mut Self, _, event, cx| { - if let BufferEvent::Operation { operation, .. } = event { - branch.apply_ops([operation.clone()], cx); - branch.diff_base_version += 1; - } - })], + _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)], ..Self::build( self.text.branch(), None, @@ -823,18 +823,74 @@ impl Buffer { }) } - pub fn merge(&mut self, branch: &Model, cx: &mut ModelContext) { - let branch = branch.read(cx); - let edits = branch - .edits_since::(&self.version) - .map(|edit| { - ( - edit.old, - branch.text_for_range(edit.new).collect::(), + /// Applies all of the changes in `branch` buffer that intersect the given `range` + /// to this buffer. + pub fn merge( + &mut self, + branch: &Model, + range: Option>, + cx: &mut ModelContext, + ) { + let edits = branch.read_with(cx, |branch, _| { + branch + .edits_since_in_range::( + &self.version, + range.unwrap_or(Anchor::MIN..Anchor::MAX), ) - }) - .collect::>(); - self.edit(edits, None, cx); + .map(|edit| { + ( + edit.old, + branch.text_for_range(edit.new).collect::(), + ) + }) + .collect::>() + }); + let operation = self.edit(edits, None, cx); + + // Prevent this operation from being reapplied to the branch. + branch.update(cx, |branch, cx| { + if let Some(BufferDiffBase::PastBufferVersion { + operations_to_ignore, + .. + }) = &mut branch.diff_base + { + operations_to_ignore.extend(operation); + } + cx.emit(BufferEvent::Edited) + }); + } + + fn on_base_buffer_event( + &mut self, + _: Model, + event: &BufferEvent, + cx: &mut ModelContext, + ) { + if let BufferEvent::Operation { operation, .. } = event { + if let Some(BufferDiffBase::PastBufferVersion { + operations_to_ignore, + .. + }) = &mut self.diff_base + { + let mut is_ignored = false; + if let Operation::Buffer(text::Operation::Edit(buffer_operation)) = &operation { + operations_to_ignore.retain(|operation_to_ignore| { + match buffer_operation.timestamp.cmp(&operation_to_ignore) { + Ordering::Less => true, + Ordering::Equal => { + is_ignored = true; + false + } + Ordering::Greater => false, + } + }); + } + if !is_ignored { + self.apply_ops([operation.clone()], cx); + self.diff_base_version += 1; + } + } + } } #[cfg(test)] @@ -1017,9 +1073,8 @@ impl Buffer { /// Returns the current diff base, see [Buffer::set_diff_base]. pub fn diff_base(&self) -> Option<&Rope> { match self.diff_base.as_ref()? { - BufferDiffBase::Git(rope) => Some(rope), - BufferDiffBase::PastBufferVersion(_, buffer_snapshot) => { - Some(buffer_snapshot.as_rope()) + BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => { + Some(rope) } } } @@ -1050,29 +1105,36 @@ impl Buffer { self.diff_base_version } + pub fn diff_base_buffer(&self) -> Option> { + match self.diff_base.as_ref()? { + BufferDiffBase::Git(_) => None, + BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()), + } + } + /// Recomputes the diff. pub fn recalculate_diff(&mut self, cx: &mut ModelContext) -> Option> { - let diff_base_rope = match self.diff_base.as_mut()? { + let diff_base_rope = match self.diff_base.as_ref()? { BufferDiffBase::Git(rope) => rope.clone(), - BufferDiffBase::PastBufferVersion(base_buffer, base_buffer_snapshot) => { - let new_base_snapshot = base_buffer.read(cx).snapshot(); - *base_buffer_snapshot = new_base_snapshot; - base_buffer_snapshot.as_rope().clone() - } + BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(), }; - let snapshot = self.snapshot(); + let snapshot = self.snapshot(); let mut diff = self.git_diff.clone(); let diff = cx.background_executor().spawn(async move { diff.update(&diff_base_rope, &snapshot).await; - diff + (diff, diff_base_rope) }); Some(cx.spawn(|this, mut cx| async move { - let buffer_diff = diff.await; + let (buffer_diff, diff_base_rope) = diff.await; this.update(&mut cx, |this, cx| { this.git_diff = buffer_diff; this.non_text_state_update_count += 1; + if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base { + *rope = diff_base_rope; + cx.emit(BufferEvent::DiffBaseChanged); + } cx.emit(BufferEvent::DiffUpdated); }) .ok(); diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 1335a94dd0313..49cc31067b93a 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -2413,80 +2413,98 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { }); // Edits to the branch are not applied to the base. - branch_buffer.update(cx, |buffer, cx| { - buffer.edit( - [(Point::new(1, 0)..Point::new(1, 0), "ONE_POINT_FIVE\n")], + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.edit( + [ + (Point::new(1, 0)..Point::new(1, 0), "1.5\n"), + (Point::new(2, 0)..Point::new(2, 5), "THREE"), + ], None, cx, ) }); branch_buffer.read_with(cx, |branch_buffer, cx| { assert_eq!(base_buffer.read(cx).text(), "one\ntwo\nthree\n"); - assert_eq!(branch_buffer.text(), "one\nONE_POINT_FIVE\ntwo\nthree\n"); + assert_eq!(branch_buffer.text(), "one\n1.5\ntwo\nTHREE\n"); }); + // The branch buffer maintains a diff with respect to its base buffer. + start_recalculating_diff(&branch_buffer, cx); + cx.run_until_parked(); + assert_diff_hunks( + &branch_buffer, + cx, + &[(1..2, "", "1.5\n"), (3..4, "three\n", "THREE\n")], + ); + // Edits to the base are applied to the branch. base_buffer.update(cx, |buffer, cx| { buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx) }); branch_buffer.read_with(cx, |branch_buffer, cx| { assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\nthree\n"); - assert_eq!( - branch_buffer.text(), - "ZERO\none\nONE_POINT_FIVE\ntwo\nthree\n" - ); + assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\nTHREE\n"); }); - assert_diff_hunks(&branch_buffer, cx, &[(2..3, "", "ONE_POINT_FIVE\n")]); + // Until the git diff recalculation is complete, the git diff references + // the previous content of the base buffer, so that it stays in sync. + start_recalculating_diff(&branch_buffer, cx); + assert_diff_hunks( + &branch_buffer, + cx, + &[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")], + ); + cx.run_until_parked(); + assert_diff_hunks( + &branch_buffer, + cx, + &[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")], + ); // Edits to any replica of the base are applied to the branch. base_buffer_replica.update(cx, |buffer, cx| { - buffer.edit( - [(Point::new(2, 0)..Point::new(2, 0), "TWO_POINT_FIVE\n")], - None, - cx, - ) + buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "2.5\n")], None, cx) }); branch_buffer.read_with(cx, |branch_buffer, cx| { - assert_eq!( - base_buffer.read(cx).text(), - "ZERO\none\ntwo\nTWO_POINT_FIVE\nthree\n" - ); - assert_eq!( - branch_buffer.text(), - "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" - ); + assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\n2.5\nthree\n"); + assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n"); }); // Merging the branch applies all of its changes to the base. base_buffer.update(cx, |base_buffer, cx| { - base_buffer.merge(&branch_buffer, cx); + base_buffer.merge(&branch_buffer, None, cx); + }); + + branch_buffer.update(cx, |branch_buffer, cx| { assert_eq!( - base_buffer.text(), - "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + base_buffer.read(cx).text(), + "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n" ); + assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n"); }); } +fn start_recalculating_diff(buffer: &Model, cx: &mut TestAppContext) { + buffer + .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) + .detach(); +} + +#[track_caller] fn assert_diff_hunks( buffer: &Model, cx: &mut TestAppContext, expected_hunks: &[(Range, &str, &str)], ) { - buffer - .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) - .detach(); - cx.executor().run_until_parked(); - - buffer.read_with(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - assert_hunks( - snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), - &snapshot, - &buffer.diff_base().unwrap().to_string(), - expected_hunks, - ); - }); + let (snapshot, diff_base) = buffer.read_with(cx, |buffer, _| { + (buffer.snapshot(), buffer.diff_base().unwrap().to_string()) + }); + assert_hunks( + snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), + &snapshot, + &diff_base, + expected_hunks, + ); } #[gpui::test(iterations = 100)] diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index c631c01f99a1a..4dc378a755bda 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -14,6 +14,7 @@ use breadcrumbs::Breadcrumbs; use client::ZED_URL_SCHEME; use collections::VecDeque; use command_palette_hooks::CommandPaletteFilter; +use editor::ProposedChangesEditorToolbar; use editor::{scroll::Autoscroll, Editor, MultiBuffer}; use feature_flags::FeatureFlagAppExt; use gpui::{ @@ -582,6 +583,8 @@ fn initialize_pane(workspace: &mut Workspace, pane: &View, cx: &mut ViewCo let buffer_search_bar = cx.new_view(search::BufferSearchBar::new); toolbar.add_item(buffer_search_bar.clone(), cx); + let proposed_change_bar = cx.new_view(|_| ProposedChangesEditorToolbar::new()); + toolbar.add_item(proposed_change_bar, cx); let quick_action_bar = cx.new_view(|cx| QuickActionBar::new(buffer_search_bar, workspace, cx)); toolbar.add_item(quick_action_bar, cx); From b701eab44f0728d90d2d65b3bba20263e16897ad Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 01:31:17 -0600 Subject: [PATCH 082/228] Avoid unwrap in file finder (#18374) Release Notes: - Fixed a (rare) panic in file finder --------- Co-authored-by: Kirill Bulatov --- crates/file_finder/src/file_finder.rs | 28 +++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 1a65bd352d61d..f63c499ee84c7 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -394,7 +394,7 @@ fn matching_history_items<'a>( .chars(), ), }; - candidates_paths.insert(Arc::clone(&found_path.project.path), found_path); + candidates_paths.insert(&found_path.project, found_path); Some((found_path.project.worktree_id, candidate)) }) .fold( @@ -419,17 +419,21 @@ fn matching_history_items<'a>( max_results, ) .into_iter() - .map(|path_match| { - let (_, found_path) = candidates_paths - .remove_entry(&path_match.path) - .expect("candidate info not found"); - ( - Arc::clone(&path_match.path), - Match::History { - path: found_path.clone(), - panel_match: Some(ProjectPanelOrdMatch(path_match)), - }, - ) + .filter_map(|path_match| { + candidates_paths + .remove_entry(&ProjectPath { + worktree_id: WorktreeId::from_usize(path_match.worktree_id), + path: Arc::clone(&path_match.path), + }) + .map(|(_, found_path)| { + ( + Arc::clone(&path_match.path), + Match::History { + path: found_path.clone(), + panel_match: Some(ProjectPanelOrdMatch(path_match)), + }, + ) + }) }), ); } From 2d2e20f9d426709f8cebfc7321866880834db4a3 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 11:07:07 +0200 Subject: [PATCH 083/228] editor: Fix cursor shape not restoring when setting removed (#18379) Closes #18119 Release Notes: - Fixed the cursor shape in the editor not changing back to default when `{"cursor_shape": "..."}` setting is removed. (Does not apply to Vim mode.) --- crates/editor/src/editor.rs | 18 +++++++++++++----- crates/vim/src/vim.rs | 7 +++++++ 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 23448b43a7cc7..6e5543132c8e7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11925,12 +11925,19 @@ impl Editor { )), cx, ); - let editor_settings = EditorSettings::get_global(cx); - if let Some(cursor_shape) = editor_settings.cursor_shape { - self.cursor_shape = cursor_shape; + + let old_cursor_shape = self.cursor_shape; + + { + let editor_settings = EditorSettings::get_global(cx); + self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; + self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; + self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default(); + } + + if old_cursor_shape != self.cursor_shape { + cx.emit(EditorEvent::CursorShapeChanged); } - self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; - self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; let project_settings = ProjectSettings::get_global(cx); self.serialize_dirty_buffers = project_settings.session.restore_unsaved_buffers; @@ -13127,6 +13134,7 @@ pub enum EditorEvent { TransactionBegun { transaction_id: clock::Lamport, }, + CursorShapeChanged, } impl EventEmitter for Editor {} diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 701972c19bb61..06116bff99de4 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -389,6 +389,7 @@ impl Vim { } EditorEvent::Edited { .. } => self.push_to_change_list(cx), EditorEvent::FocusedIn => self.sync_vim_settings(cx), + EditorEvent::CursorShapeChanged => self.cursor_shape_changed(cx), _ => {} } } @@ -679,6 +680,12 @@ impl Vim { }); } + fn cursor_shape_changed(&mut self, cx: &mut ViewContext) { + self.update_editor(cx, |vim, editor, cx| { + editor.set_cursor_shape(vim.cursor_shape(), cx); + }); + } + fn update_editor( &mut self, cx: &mut ViewContext, From b9b689d3221b6bcbea349b98a480d8e8f87fa802 Mon Sep 17 00:00:00 2001 From: "Hyunmin Woo (Hanul)" Date: Thu, 26 Sep 2024 19:24:29 +0900 Subject: [PATCH 084/228] Fix Typo in rust language guide (#18383) Release Notes: - N/A --- docs/src/languages/rust.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 02e90d60a403b..330b5fa9d0151 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -23,8 +23,8 @@ The following configuration can be used to change the inlay hint settings for `r "inlayHints": { "maxLength": null, "lifetimeElisionHints": { - "enable": "skip_trivial" - "useParameterNames": true, + "enable": "skip_trivial", + "useParameterNames": true }, "closureReturnTypeHints": { "enable": "always" From 140d70289e54328509f59d9de2fefd8e8b35bec0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 12:26:58 +0200 Subject: [PATCH 085/228] Avoid panic by only restoring workspace if UI has launched (#18386) This should fix the `unregistered setting type workspace::workspace_settings::WorkspaceSettings` panic that came from inside `restorable_workspace_locations`. We tracked it down to a possible scenario (we can't recreate it though) in which `app.on_reopen` is called before the app has finished launching. In any case, this check makes sense, because we only want to restore a workspace in case the whole app has launched with a UI. Release Notes: - N/A Co-authored-by: Bennet --- crates/zed/src/main.rs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 0f37e06f438f9..186805d12cd01 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -425,15 +425,22 @@ fn main() { app.on_reopen(move |cx| { if let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade()) { - cx.spawn({ - let app_state = app_state.clone(); - |mut cx| async move { - if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { - fail_to_open_window_async(e, &mut cx) + let ui_has_launched = cx + .try_global::() + .map(|mode| matches!(mode, AppMode::Ui)) + .unwrap_or(false); + + if ui_has_launched { + cx.spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { + fail_to_open_window_async(e, &mut cx) + } } - } - }) - .detach(); + }) + .detach(); + } } }); From 3f415f3587b12cc4745300e4c129f252649356e2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 12:27:08 +0200 Subject: [PATCH 086/228] Fix `use_on_type_format` setting being unused per language (#18387) Before this change, `use_on_type_format` would only have an effect when defined on a global level in our settings. But our default.json settings would also document that it's used in language settings, i.e.: ```json { "languages": { "C": { "use_on_type_format": false }, "C++": { "use_on_type_format": false } } } ``` But this did **not** work. With the change, it now works globally and per-language. Release Notes: - Fixed `use_on_type_format` setting not working when defined inside `"languages"` in the settings. This change will now change the default behavior for C, C++, and Markdown, by turning language server's `OnTypeFormatting` completions off by default. Co-authored-by: Bennet --- crates/editor/src/editor.rs | 11 ++++++++++- crates/editor/src/editor_settings.rs | 6 ------ crates/language/src/language_settings.rs | 9 +++++++++ 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6e5543132c8e7..730482b123bf7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3442,7 +3442,7 @@ impl Editor { s.select(new_selections) }); - if !bracket_inserted && EditorSettings::get_global(cx).use_on_type_format { + if !bracket_inserted { if let Some(on_type_format_task) = this.trigger_on_type_formatting(text.to_string(), cx) { @@ -4191,6 +4191,15 @@ impl Editor { .read(cx) .text_anchor_for_position(position, cx)?; + let settings = language_settings::language_settings( + buffer.read(cx).language_at(buffer_position).as_ref(), + buffer.read(cx).file(), + cx, + ); + if !settings.use_on_type_format { + return None; + } + // OnTypeFormatting returns a list of edits, no need to pass them between Zed instances, // hence we do LSP request & edit on host side only — add formats to host's history. let push_to_lsp_host_history = true; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index d651e76c2c2e7..9137629241468 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -13,7 +13,6 @@ pub struct EditorSettings { pub show_completions_on_input: bool, pub show_completion_documentation: bool, pub completion_documentation_secondary_query_debounce: u64, - pub use_on_type_format: bool, pub toolbar: Toolbar, pub scrollbar: Scrollbar, pub gutter: Gutter, @@ -209,11 +208,6 @@ pub struct EditorSettingsContent { /// /// Default: 300 ms pub completion_documentation_secondary_query_debounce: Option, - /// Whether to use additional LSP queries to format (and amend) the code after - /// every "trigger" symbol input, defined by LSP server capabilities. - /// - /// Default: true - pub use_on_type_format: Option, /// Toolbar related settings pub toolbar: Option, /// Scrollbar related settings diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 735a9a60f87fa..f830c5f25c308 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -113,6 +113,9 @@ pub struct LanguageSettings { pub use_autoclose: bool, /// Whether to automatically surround text with brackets. pub use_auto_surround: bool, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + pub use_on_type_format: bool, // Controls how the editor handles the autoclosed characters. pub always_treat_brackets_as_autoclosed: bool, /// Which code actions to run on save @@ -333,6 +336,11 @@ pub struct LanguageSettingsContent { /// /// Default: false pub always_treat_brackets_as_autoclosed: Option, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + /// + /// Default: true + pub use_on_type_format: Option, /// Which code actions to run on save after the formatter. /// These are not run if formatting is off. /// @@ -1045,6 +1053,7 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent merge(&mut settings.soft_wrap, src.soft_wrap); merge(&mut settings.use_autoclose, src.use_autoclose); merge(&mut settings.use_auto_surround, src.use_auto_surround); + merge(&mut settings.use_on_type_format, src.use_on_type_format); merge( &mut settings.always_treat_brackets_as_autoclosed, src.always_treat_brackets_as_autoclosed, From 31902a1b73ce99934b8580cdacc2dd51eb87a046 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 26 Sep 2024 12:52:56 +0200 Subject: [PATCH 087/228] Remove leftover println statements (#18389) Remove some leftover println statements from #17644 Release Notes: - N/A --- crates/vim/src/normal/increment.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 6d66e380c30b8..b0501eeef7da3 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -213,8 +213,6 @@ fn find_number( begin = Some(offset); } num.push(ch); - println!("pushing {}", ch); - println!(); } else if begin.is_some() { end = Some(offset); break; From db92a31067c8a6e6d889a63326decdd42f9de6c2 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 26 Sep 2024 13:18:50 +0200 Subject: [PATCH 088/228] lsp: Do not notify all language servers on file save (#17756) This is not an ideal solution to https://github.com/fasterthanlime/zed-diags-readme, but current status quo is not great either; we were just going through all of the language servers and notifying them, whereas we should ideally do it based on a glob. /cc @fasterthanlime Release Notes: - N/A --- crates/project/src/lsp_store.rs | 16 +++++++++ crates/project/src/project_tests.rs | 55 ++++++++++++++++++++++++----- 2 files changed, 63 insertions(+), 8 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index a4a13b296ed5c..37922b7c2ee03 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2892,11 +2892,27 @@ impl LspStore { let file = File::from_dyn(buffer.read(cx).file())?; let worktree_id = file.worktree_id(cx); let abs_path = file.as_local()?.abs_path(cx); + let worktree_path = file.as_local()?.path(); let text_document = lsp::TextDocumentIdentifier { uri: lsp::Url::from_file_path(abs_path).log_err()?, }; + let watched_paths_for_server = &self.as_local()?.language_server_watched_paths; for server in self.language_servers_for_worktree(worktree_id) { + let should_notify = maybe!({ + Some( + watched_paths_for_server + .get(&server.server_id())? + .read(cx) + .worktree_paths + .get(&worktree_id)? + .is_match(worktree_path), + ) + }) + .unwrap_or_default(); + if !should_notify { + continue; + } if let Some(include_text) = include_text(server.as_ref()) { let text = if include_text { Some(buffer.read(cx).text()) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 9e58caa244243..dd14ccd60f4e9 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -386,6 +386,34 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // A server is started up, and it is notified about Rust files. let mut fake_rust_server = fake_rust_servers.next().await.unwrap(); + fake_rust_server + .request::(lsp::RegistrationParams { + registrations: vec![lsp::Registration { + id: Default::default(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: serde_json::to_value( + lsp::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String( + "/the-root/Cargo.toml".to_string(), + ), + kind: None, + }, + lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String( + "/the-root/*.rs".to_string(), + ), + kind: None, + }, + ], + }, + ) + .ok(), + }], + }) + .await + .unwrap(); assert_eq!( fake_rust_server .receive_notification::() @@ -433,6 +461,24 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // A json language server is started up and is only notified about the json buffer. let mut fake_json_server = fake_json_servers.next().await.unwrap(); + fake_json_server + .request::(lsp::RegistrationParams { + registrations: vec![lsp::Registration { + id: Default::default(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: serde_json::to_value( + lsp::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String("/the-root/*.json".to_string()), + kind: None, + }], + }, + ) + .ok(), + }], + }) + .await + .unwrap(); assert_eq!( fake_json_server .receive_notification::() @@ -483,7 +529,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ) ); - // Save notifications are reported to all servers. + // Save notifications are reported only to servers that signed up for a given extension. project .update(cx, |project, cx| project.save_buffer(toml_buffer, cx)) .await @@ -495,13 +541,6 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .text_document, lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()) ); - assert_eq!( - fake_json_server - .receive_notification::() - .await - .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()) - ); // Renames are reported only to servers matching the buffer's language. fs.rename( From 1deed247eb37080b6db5d84ed5054214691554e9 Mon Sep 17 00:00:00 2001 From: Taras Martyniuk Date: Thu, 26 Sep 2024 15:36:58 +0300 Subject: [PATCH 089/228] terraform: Bump to v0.1.1 (#18382) This PR bumps the Terraform extension to v0.1.1 - https://github.com/zed-industries/zed/pull/17200 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/terraform/Cargo.toml | 2 +- extensions/terraform/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0b3ee53e9aa85..5138d59e27728 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14687,7 +14687,7 @@ dependencies = [ [[package]] name = "zed_terraform" -version = "0.1.0" +version = "0.1.1" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/terraform/Cargo.toml b/extensions/terraform/Cargo.toml index 7892b68466cc6..56ae621e167ef 100644 --- a/extensions/terraform/Cargo.toml +++ b/extensions/terraform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_terraform" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/terraform/extension.toml b/extensions/terraform/extension.toml index 80fe03fc04431..fc96f773e9b23 100644 --- a/extensions/terraform/extension.toml +++ b/extensions/terraform/extension.toml @@ -1,7 +1,7 @@ id = "terraform" name = "Terraform" description = "Terraform support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Caius Durling ", "Daniel Banck "] repository = "https://github.com/zed-industries/zed" From 1a4f9b289130593d77db0759be738624203731e1 Mon Sep 17 00:00:00 2001 From: Galen Elias Date: Thu, 26 Sep 2024 06:30:06 -0700 Subject: [PATCH 090/228] Fix minimum gutter line number spacing (#18021) I was inspecting how Zed did the layout in the editor, specifically for the gutter, and noticed that `em_width * X` is being used as the 'width of X consecutive characters'. Howevever, that math didn't work for me, because em_width doesn't account for the space between characters, so you can't just multiply it by a character count. One place this is actually noticeable is in the logic for `min_width_for_number_on_gutter`, where we try to reserve 4 characters of line number space. However, once you actually hit 4 characters, the actual width is bigger, causing things to resize. This seems clearly counter to the intent of the code. It seems the more correct logic is to use `em_advance` which accounts for the space between the characters. I am leaving the rest of the uses of `em_width` for generic padding. It is also possible that `column_pixels()` would be the more correct fix here, but it wasn't straightforward to use that due to it residing EditorElement source file. On my MacBook this increases the width of the gutter by 6 pixels when there are <999 lines in the file, otherwise it's identical. It might be worth doing some more general audit of some of the other uses of em_width as a concept. (e.g. `git_blame_entries_width`) https://github.com/user-attachments/assets/f2a28cd5-9bb6-4109-bf41-1838e56a75f9 Release Notes: - Fix a slight gutter flicker when going over 999 lines --- crates/editor/src/editor.rs | 3 ++- crates/editor/src/element.rs | 16 ++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 730482b123bf7..102e94f1abb41 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12956,6 +12956,7 @@ impl EditorSnapshot { font_id: FontId, font_size: Pixels, em_width: Pixels, + em_advance: Pixels, max_line_number_width: Pixels, cx: &AppContext, ) -> GutterDimensions { @@ -12976,7 +12977,7 @@ impl EditorSnapshot { .unwrap_or(gutter_settings.line_numbers); let line_gutter_width = if show_line_numbers { // Avoid flicker-like gutter resizes when the line number gains another digit and only resize the gutter on files with N*10^5 lines. - let min_width_for_number_on_gutter = em_width * 4.0; + let min_width_for_number_on_gutter = em_advance * 4.0; max_line_number_width.max(min_width_for_number_on_gutter) } else { 0.0.into() diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 9fe05bc4f2606..f5db7b94ba8ad 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -4970,6 +4970,7 @@ impl Element for EditorElement { font_id, font_size, em_width, + em_advance, self.max_line_number_width(&snapshot, cx), cx, ); @@ -6283,10 +6284,21 @@ fn compute_auto_height_layout( .unwrap() .size .width; + let em_advance = cx + .text_system() + .advance(font_id, font_size, 'm') + .unwrap() + .width; let mut snapshot = editor.snapshot(cx); - let gutter_dimensions = - snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx); + let gutter_dimensions = snapshot.gutter_dimensions( + font_id, + font_size, + em_width, + em_advance, + max_line_number_width, + cx, + ); editor.gutter_dimensions = gutter_dimensions; let text_width = width - gutter_dimensions.width; From 7eea1a6f51b11bd56150203809f21da7e8b5530d Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 15:47:14 +0200 Subject: [PATCH 091/228] git blame gutter: Use smallest possible space (#18145) Before: ![screenshot-2024-09-26-15 00 20@2x](https://github.com/user-attachments/assets/f6706325-5bef-404e-a0b4-63a5121969fa) After: ![screenshot-2024-09-26-15 02 24@2x](https://github.com/user-attachments/assets/739d0831-0b4a-457f-917e-10f3a662e74d) Release Notes: - Improved the git blame gutter to take up only the space required to display the longest git author name in the current file. --------- Co-authored-by: Bennet Bo Fenner --- crates/editor/src/editor.rs | 35 ++++++++++++++++++++++++++++------ crates/editor/src/element.rs | 31 +++++++++++++++--------------- crates/editor/src/git/blame.rs | 21 ++++++++++++++++++++ 3 files changed, 65 insertions(+), 22 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 102e94f1abb41..54d23a8219d4f 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -663,7 +663,7 @@ pub struct EditorSnapshot { show_git_diff_gutter: Option, show_code_actions: Option, show_runnables: Option, - render_git_blame_gutter: bool, + git_blame_gutter_max_author_length: Option, pub display_snapshot: DisplaySnapshot, pub placeholder_text: Option>, is_focused: bool, @@ -673,7 +673,7 @@ pub struct EditorSnapshot { gutter_hovered: bool, } -const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.; +const GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED: usize = 20; #[derive(Default, Debug, Clone, Copy)] pub struct GutterDimensions { @@ -2211,6 +2211,19 @@ impl Editor { } pub fn snapshot(&mut self, cx: &mut WindowContext) -> EditorSnapshot { + let git_blame_gutter_max_author_length = self + .render_git_blame_gutter(cx) + .then(|| { + if let Some(blame) = self.blame.as_ref() { + let max_author_length = + blame.update(cx, |blame, cx| blame.max_author_length(cx)); + Some(max_author_length) + } else { + None + } + }) + .flatten(); + EditorSnapshot { mode: self.mode, show_gutter: self.show_gutter, @@ -2218,7 +2231,7 @@ impl Editor { show_git_diff_gutter: self.show_git_diff_gutter, show_code_actions: self.show_code_actions, show_runnables: self.show_runnables, - render_git_blame_gutter: self.render_git_blame_gutter(cx), + git_blame_gutter_max_author_length, display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)), scroll_anchor: self.scroll_manager.anchor(), ongoing_scroll: self.scroll_manager.ongoing_scroll(), @@ -12989,9 +13002,19 @@ impl EditorSnapshot { let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables); - let git_blame_entries_width = self - .render_git_blame_gutter - .then_some(em_width * GIT_BLAME_GUTTER_WIDTH_CHARS); + let git_blame_entries_width = + self.git_blame_gutter_max_author_length + .map(|max_author_length| { + // Length of the author name, but also space for the commit hash, + // the spacing and the timestamp. + let max_char_count = max_author_length + .min(GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED) + + 7 // length of commit sha + + 14 // length of max relative timestamp ("60 minutes ago") + + 4; // gaps and margins + + em_advance * max_char_count + }); let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); left_padding += if show_code_actions || show_runnables { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index f5db7b94ba8ad..6f30062d47ec7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -21,7 +21,7 @@ use crate::{ EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown, PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint, - CURSORS_VISIBLE_FOR, MAX_LINE_LEN, + CURSORS_VISIBLE_FOR, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN, }; use client::ParticipantIndex; use collections::{BTreeMap, HashMap}; @@ -1445,7 +1445,7 @@ impl EditorElement { AvailableSpace::MaxContent }; let scroll_top = scroll_position.y * line_height; - let start_x = em_width * 1; + let start_x = em_width; let mut last_used_color: Option<(PlayerColor, Oid)> = None; @@ -4228,7 +4228,7 @@ fn render_blame_entry( let short_commit_id = blame_entry.sha.display_short(); let author_name = blame_entry.author.as_deref().unwrap_or(""); - let name = util::truncate_and_trailoff(author_name, 20); + let name = util::truncate_and_trailoff(author_name, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED); let details = blame.read(cx).details_for_entry(&blame_entry); @@ -4240,22 +4240,21 @@ fn render_blame_entry( h_flex() .w_full() + .justify_between() .font_family(style.text.font().family) .line_height(style.text.line_height) .id(("blame", ix)) - .children([ - div() - .text_color(sha_color.cursor) - .child(short_commit_id) - .mr_2(), - div() - .w_full() - .h_flex() - .justify_between() - .text_color(cx.theme().status().hint) - .child(name) - .child(relative_timestamp), - ]) + .text_color(cx.theme().status().hint) + .pr_2() + .gap_2() + .child( + h_flex() + .items_center() + .gap_2() + .child(div().text_color(sha_color.cursor).child(short_commit_id)) + .child(name), + ) + .child(relative_timestamp) .on_mouse_down(MouseButton::Right, { let blame_entry = blame_entry.clone(); let details = details.clone(); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 775cbcc379e12..733d42d0c57dd 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -207,6 +207,27 @@ impl GitBlame { }) } + pub fn max_author_length(&mut self, cx: &mut ModelContext) -> usize { + self.sync(cx); + + let mut max_author_length = 0; + + for entry in self.entries.iter() { + let author_len = entry + .blame + .as_ref() + .and_then(|entry| entry.author.as_ref()) + .map(|author| author.len()); + if let Some(author_len) = author_len { + if author_len > max_author_length { + max_author_length = author_len; + } + } + } + + max_author_length + } + pub fn blur(&mut self, _: &mut ModelContext) { self.focused = false; } From f143396825c89b96076087ff80630d0b50eeb6cb Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 26 Sep 2024 16:24:11 +0200 Subject: [PATCH 092/228] ssh: Do not require user to be signed in to use ssh remoting (#18396) Fixes #18392 Closes #18392 Release Notes: - N/A --- crates/recent_projects/src/dev_servers.rs | 127 +++++++++------------- 1 file changed, 51 insertions(+), 76 deletions(-) diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index af5f51f14fca2..2038d069b4c68 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -40,7 +40,6 @@ use ui::{ }; use ui_input::{FieldLabelLayout, TextField}; use util::ResultExt; -use workspace::notifications::NotifyResultExt; use workspace::OpenOptions; use workspace::{notifications::DetachAndPromptErr, AppState, ModalView, Workspace, WORKSPACE_DB}; @@ -1133,7 +1132,8 @@ impl DevServerProjects { let dev_server_id = state.dev_server_id; let access_token = state.access_token.clone(); let ssh_prompt = state.ssh_prompt.clone(); - let use_direct_ssh = SshSettings::get_global(cx).use_direct_ssh(); + let use_direct_ssh = SshSettings::get_global(cx).use_direct_ssh() + || Client::global(cx).status().borrow().is_signed_out(); let mut kind = state.kind; if use_direct_ssh && kind == NewServerKind::LegacySSH { @@ -1407,7 +1407,6 @@ impl DevServerProjects { is_creating = Some(*creating); creating_dev_server = Some(*dev_server_id); }; - let is_signed_out = Client::global(cx).status().borrow().is_signed_out(); Modal::new("remote-projects", Some(self.scroll_handle.clone())) .header( @@ -1415,82 +1414,58 @@ impl DevServerProjects { .show_dismiss_button(true) .child(Headline::new("Remote Projects (alpha)").size(HeadlineSize::Small)), ) - .when(is_signed_out, |modal| { - modal - .section(Section::new().child(div().child(Label::new( - "To continue with the remote development features, you need to sign in to Zed.", - )))) - .footer( - ModalFooter::new().end_slot( - Button::new("sign_in", "Sign in with GitHub") - .icon(IconName::Github) - .icon_position(IconPosition::Start) - .full_width() - .on_click(cx.listener(|_, _, cx| { - let client = Client::global(cx).clone(); - cx.spawn(|_, mut cx| async move { - client - .authenticate_and_connect(true, &cx) - .await - .notify_async_err(&mut cx); - }) - .detach(); - cx.emit(gpui::DismissEvent); - })), - ), - ) - }) - .when(!is_signed_out, |modal| { - modal.section( - Section::new().child( - div().child( - List::new() - .empty_message("No dev servers registered yet.") - .header(Some( - ListHeader::new("Connections").end_slot( - Button::new("register-dev-server-button", "Connect New Server") - .icon(IconName::Plus) - .icon_position(IconPosition::Start) - .icon_color(Color::Muted) - .on_click(cx.listener(|this, _, cx| { - this.mode = Mode::CreateDevServer( - CreateDevServer { - kind: if SshSettings::get_global(cx).use_direct_ssh() { NewServerKind::DirectSSH } else { NewServerKind::LegacySSH }, - ..Default::default() - } - ); - this.dev_server_name_input.update( - cx, - |text_field, cx| { - text_field.editor().update( - cx, - |editor, cx| { - editor.set_text("", cx); - }, - ); - }, - ); - cx.notify(); - })), - ), - )) - .children(ssh_connections.iter().cloned().enumerate().map(|(ix, connection)| { + .section( + Section::new().child( + div().child( + List::new() + .empty_message("No dev servers registered yet.") + .header(Some( + ListHeader::new("Connections").end_slot( + Button::new("register-dev-server-button", "Connect New Server") + .icon(IconName::Plus) + .icon_position(IconPosition::Start) + .icon_color(Color::Muted) + .on_click(cx.listener(|this, _, cx| { + this.mode = Mode::CreateDevServer(CreateDevServer { + kind: if SshSettings::get_global(cx) + .use_direct_ssh() + { + NewServerKind::DirectSSH + } else { + NewServerKind::LegacySSH + }, + ..Default::default() + }); + this.dev_server_name_input.update( + cx, + |text_field, cx| { + text_field.editor().update(cx, |editor, cx| { + editor.set_text("", cx); + }); + }, + ); + cx.notify(); + })), + ), + )) + .children(ssh_connections.iter().cloned().enumerate().map( + |(ix, connection)| { self.render_ssh_connection(ix, connection, cx) .into_any_element() - })) - .children(dev_servers.iter().map(|dev_server| { - let creating = if creating_dev_server == Some(dev_server.id) { - is_creating - } else { - None - }; - self.render_dev_server(dev_server, creating, cx) - .into_any_element() - })), - ), + }, + )) + .children(dev_servers.iter().map(|dev_server| { + let creating = if creating_dev_server == Some(dev_server.id) { + is_creating + } else { + None + }; + self.render_dev_server(dev_server, creating, cx) + .into_any_element() + })), ), - ) - }) + ), + ) } } From de1889d6a864a1add60d63d08dd5d293f74a340d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 08:49:50 -0600 Subject: [PATCH 093/228] Update Rust crate async-trait to v0.1.83 (#18364) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [async-trait](https://redirect.github.com/dtolnay/async-trait) | workspace.dependencies | patch | `0.1.82` -> `0.1.83` | --- ### Release Notes
dtolnay/async-trait (async-trait) ### [`v0.1.83`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.83) [Compare Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.82...0.1.83) - Prevent needless_arbitrary_self_type lint being produced in generated code ([#​278](https://redirect.github.com/dtolnay/async-trait/issues/278))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5138d59e27728..4826b312f103e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -895,9 +895,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.82" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", From 82eb753b31426da8d5b9aacdadf0512678f7b1d5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 11:24:01 -0400 Subject: [PATCH 094/228] Update actions/setup-node digest to 0a44ba7 (#18357) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/setup-node](https://redirect.github.com/actions/setup-node) | action | digest | `1e60f62` -> `0a44ba7` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/actions/run_tests/action.yml | 2 +- .github/workflows/ci.yml | 2 +- .github/workflows/danger.yml | 2 +- .github/workflows/randomized_tests.yml | 2 +- .github/workflows/release_nightly.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml index 815953398ba5b..07284e2f5854a 100644 --- a/.github/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -10,7 +10,7 @@ runs: cargo install cargo-nextest - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f059b47004012..07e5499d5eb76 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -172,7 +172,7 @@ jobs: DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 8ff35b9e26da5..0278bbce02154 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -21,7 +21,7 @@ jobs: version: 9 - name: Setup Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "20" cache: "pnpm" diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index 57f43d4961f3d..947b5059bd712 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -22,7 +22,7 @@ jobs: - buildjet-16vcpu-ubuntu-2204 steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 2b973dcddc3d6..4e8a257bdd1bb 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -70,7 +70,7 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" From e5bbd378a61ed719a8635c49bafdfb103d1b33d8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 11:44:38 -0400 Subject: [PATCH 095/228] Update Rust crate cargo_toml to v0.20.5 (#18365) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [cargo_toml](https://lib.rs/cargo_toml) ([source](https://gitlab.com/lib.rs/cargo_toml)) | workspace.dependencies | patch | `0.20.4` -> `0.20.5` | --- ### Release Notes
lib.rs/cargo_toml (cargo_toml) ### [`v0.20.5`](https://gitlab.com/lib.rs/cargo_toml/compare/v0.20.4...v0.20.5) [Compare Source](https://gitlab.com/lib.rs/cargo_toml/compare/v0.20.4...v0.20.5)
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4826b312f103e..94b8205329d34 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2087,9 +2087,9 @@ dependencies = [ [[package]] name = "cargo_toml" -version = "0.20.4" +version = "0.20.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad639525b1c67b6a298f378417b060fbc04618bea559482a8484381cce27d965" +checksum = "88da5a13c620b4ca0078845707ea9c3faf11edbc3ffd8497d11d686211cd1ac0" dependencies = [ "serde", "toml 0.8.19", From 84a6ded657ecf8ced4fce6ad0e6485200a2a4c47 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 11:52:12 -0400 Subject: [PATCH 096/228] Update Rust crate clap to v4.5.18 (#18369) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.17` -> `4.5.18` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.18`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4518---2024-09-20) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.17...v4.5.18) ##### Features - *(builder)* Expose `Arg::get_display_order` and `Command::get_display_order`
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 94b8205329d34..7cd40d4226d63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2283,9 +2283,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.17" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" +checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" dependencies = [ "clap_builder", "clap_derive", @@ -2293,9 +2293,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.17" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" +checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" dependencies = [ "anstream", "anstyle", @@ -2315,9 +2315,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.13" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -6478,7 +6478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -13536,7 +13536,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] From c7a79cfc02bcacfb3983a9d8b3ad15bb06d33380 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:16:49 -0400 Subject: [PATCH 097/228] Update Rust crate libc to v0.2.159 (#18370) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [libc](https://redirect.github.com/rust-lang/libc) | workspace.dependencies | patch | `0.2.158` -> `0.2.159` | --- ### Release Notes
rust-lang/libc (libc) ### [`v0.2.159`](https://redirect.github.com/rust-lang/libc/releases/tag/0.2.159) [Compare Source](https://redirect.github.com/rust-lang/libc/compare/0.2.158...0.2.159) ##### Added - Android: add more `AT_*` constants in [#​3779](https://redirect.github.com/rust-lang/libc/pull/3779) - Apple: add missing `NOTE_*` constants in [#​3883](https://redirect.github.com/rust-lang/libc/pull/3883) - Hermit: add missing error numbers in [#​3858](https://redirect.github.com/rust-lang/libc/pull/3858) - Hurd: add `__timeval` for 64-bit support in [#​3786](https://redirect.github.com/rust-lang/libc/pull/3786) - Linux: add `epoll_pwait2` in [#​3868](https://redirect.github.com/rust-lang/libc/pull/3868) - Linux: add `mq_notify` in [#​3849](https://redirect.github.com/rust-lang/libc/pull/3849) - Linux: add missing `NFT_CT_*` constants in [#​3844](https://redirect.github.com/rust-lang/libc/pull/3844) - Linux: add the `fchmodat2` syscall in [#​3588](https://redirect.github.com/rust-lang/libc/pull/3588) - Linux: add the `mseal` syscall in [#​3798](https://redirect.github.com/rust-lang/libc/pull/3798) - OpenBSD: add `sendmmsg` and `recvmmsg` in [#​3831](https://redirect.github.com/rust-lang/libc/pull/3831) - Unix: add `IN6ADDR_ANY_INIT` and `IN6ADDR_LOOPBACK_INIT` in [#​3693](https://redirect.github.com/rust-lang/libc/pull/3693) - VxWorks: add `S_ISVTX` in [#​3768](https://redirect.github.com/rust-lang/libc/pull/3768) - VxWorks: add `vxCpuLib` and `taskLib` functions [#​3861](https://redirect.github.com/rust-lang/libc/pull/3861) - WASIp2: add definitions for `std::net` support in [#​3892](https://redirect.github.com/rust-lang/libc/pull/3892) ##### Fixed - Correctly handle version checks when `clippy-driver` is used [#​3893](https://redirect.github.com/rust-lang/libc/pull/3893) ##### Changed - EspIdf: change signal constants to c_int in [#​3895](https://redirect.github.com/rust-lang/libc/pull/3895) - HorizonOS: update network definitions in [#​3863](https://redirect.github.com/rust-lang/libc/pull/3863) - Linux: combine `ioctl` APIs in [#​3722](https://redirect.github.com/rust-lang/libc/pull/3722) - WASI: enable CI testing in [#​3869](https://redirect.github.com/rust-lang/libc/pull/3869) - WASIp2: enable CI testing in [#​3870](https://redirect.github.com/rust-lang/libc/pull/3870)
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7cd40d4226d63..68dad1f74613f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6434,9 +6434,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libdbus-sys" From 11058765bec3bb1e0510254904276d29d4fc31f1 Mon Sep 17 00:00:00 2001 From: thataboy Date: Thu, 26 Sep 2024 09:48:23 -0700 Subject: [PATCH 098/228] Add ability to separately set background color for highlighted brackets (#17566) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/16380 Currently brackets are highlighted with `editor.document_highlight.read_background`. This commit adds a separate `editor.document_highlight.bracket_background` theme setting so bracket highlights can be made more prominent without doing the same to other highlights, making the display too busy. (My own theme) https://github.com/user-attachments/assets/29a8c05e-2f1a-4c16-9be8-a4b4cb143548 I set defaults for light and dark theme that I hope are sensible and not too obnoxious, but noticeable so people can change it if they don't like it. Release Notes: - Added `editor.document_highlight.bracket_background` field to the theme to set background color of highlighted brackets. - This will fall back to `editor.document_highlight.read_background`, if not set. Screenshot 2024-09-08 at 8 46 57 AM Screenshot 2024-09-08 at 9 03 27 AM --------- Co-authored-by: Marshall Bowers --- .../editor/src/highlight_matching_bracket.rs | 2 +- crates/theme/src/default_colors.rs | 2 ++ crates/theme/src/one_themes.rs | 1 + crates/theme/src/schema.rs | 21 +++++++++++++++---- crates/theme/src/styles/colors.rs | 4 ++++ 5 files changed, 25 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs index 67915d4d7b499..f63b363f34f47 100644 --- a/crates/editor/src/highlight_matching_bracket.rs +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -32,7 +32,7 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon opening_range.to_anchors(&snapshot.buffer_snapshot), closing_range.to_anchors(&snapshot.buffer_snapshot), ], - |theme| theme.editor_document_highlight_read_background, + |theme| theme.editor_document_highlight_bracket_background, cx, ) } diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 4def0bb8d74d6..a7521bd374d1c 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -80,6 +80,7 @@ impl ThemeColors { editor_indent_guide_active: neutral().light_alpha().step_6(), editor_document_highlight_read_background: neutral().light_alpha().step_3(), editor_document_highlight_write_background: neutral().light_alpha().step_4(), + editor_document_highlight_bracket_background: green().light_alpha().step_5(), terminal_background: neutral().light().step_1(), terminal_foreground: black().light().step_12(), terminal_bright_foreground: black().light().step_11(), @@ -179,6 +180,7 @@ impl ThemeColors { editor_indent_guide_active: neutral().dark_alpha().step_6(), editor_document_highlight_read_background: neutral().dark_alpha().step_4(), editor_document_highlight_write_background: neutral().dark_alpha().step_4(), + editor_document_highlight_bracket_background: green().dark_alpha().step_6(), terminal_background: neutral().dark().step_1(), terminal_ansi_background: neutral().dark().step_1(), terminal_foreground: white().dark().step_12(), diff --git a/crates/theme/src/one_themes.rs b/crates/theme/src/one_themes.rs index 69e69ce23dc8d..50a4184e8bc93 100644 --- a/crates/theme/src/one_themes.rs +++ b/crates/theme/src/one_themes.rs @@ -102,6 +102,7 @@ pub(crate) fn one_dark() -> Theme { 0.2, ), editor_document_highlight_write_background: gpui::red(), + editor_document_highlight_bracket_background: gpui::green(), terminal_background: bg, // todo("Use one colors for terminal") diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 0229b1ea98d59..91863061236f2 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -413,6 +413,12 @@ pub struct ThemeColorsContent { #[serde(rename = "editor.document_highlight.write_background")] pub editor_document_highlight_write_background: Option, + /// Highlighted brackets background color. + /// + /// Matching brackets in the cursor scope are highlighted with this background color. + #[serde(rename = "editor.document_highlight.bracket_background")] + pub editor_document_highlight_bracket_background: Option, + /// Terminal background color. #[serde(rename = "terminal.background")] pub terminal_background: Option, @@ -540,6 +546,10 @@ impl ThemeColorsContent { .border .as_ref() .and_then(|color| try_parse_color(color).ok()); + let editor_document_highlight_read_background = self + .editor_document_highlight_read_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); ThemeColorsRefinement { border, border_variant: self @@ -784,14 +794,17 @@ impl ThemeColorsContent { .editor_indent_guide_active .as_ref() .and_then(|color| try_parse_color(color).ok()), - editor_document_highlight_read_background: self - .editor_document_highlight_read_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), + editor_document_highlight_read_background, editor_document_highlight_write_background: self .editor_document_highlight_write_background .as_ref() .and_then(|color| try_parse_color(color).ok()), + editor_document_highlight_bracket_background: self + .editor_document_highlight_bracket_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + // Fall back to `editor.document_highlight.read_background`, for backwards compatibility. + .or(editor_document_highlight_read_background), terminal_background: self .terminal_background .as_ref() diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 0b37be09923c7..225275f37b619 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -171,6 +171,10 @@ pub struct ThemeColors { /// special attention. Usually a document highlight is visualized by changing /// the background color of its range. pub editor_document_highlight_write_background: Hsla, + /// Highlighted brackets background color. + /// + /// Matching brackets in the cursor scope are highlighted with this background color. + pub editor_document_highlight_bracket_background: Hsla, // === // Terminal From 71da81c74326f1f9763803a6d1cd776b48b58125 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 26 Sep 2024 12:03:57 -0700 Subject: [PATCH 099/228] SSH Remoting: Fix bugs in worktree syncing (#18406) Release Notes: - N/A --------- Co-authored-by: conrad --- crates/collab/src/db/ids.rs | 1 + crates/collab/src/db/queries/projects.rs | 2 +- .../collab/src/tests/channel_buffer_tests.rs | 2 +- crates/collab/src/tests/editor_tests.rs | 30 ++++---- crates/collab/src/tests/following_tests.rs | 16 ++-- crates/collab/src/tests/integration_tests.rs | 74 +++++++++---------- .../remote_editing_collaboration_tests.rs | 25 ++++++- crates/collab/src/tests/test_server.rs | 2 +- crates/project/src/worktree_store.rs | 32 ++++---- .../remote_server/src/remote_editing_tests.rs | 42 +++++++++++ crates/worktree/src/worktree.rs | 7 +- script/zed-local | 16 ++-- 12 files changed, 157 insertions(+), 92 deletions(-) diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 1434bc07cf6c3..9bf767329d002 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -32,6 +32,7 @@ macro_rules! id_type { #[allow(unused)] #[allow(missing_docs)] pub fn from_proto(value: u64) -> Self { + debug_assert!(value != 0); Self(value as i32) } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index b514d4bb03601..8091c6620570f 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -285,7 +285,7 @@ impl Database { ) .one(&*tx) .await? - .ok_or_else(|| anyhow!("no such project"))?; + .ok_or_else(|| anyhow!("no such project: {project_id}"))?; // Update metadata. worktree::Entity::update(worktree::ActiveModel { diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 1ba41c45bb606..b5bfd0f03b9ec 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -246,7 +246,7 @@ async fn test_channel_notes_participant_indices( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); // Clients A and B open the same file. diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index d2835edc619e2..f9bc21efb1abd 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -76,7 +76,7 @@ async fn test_host_disconnect( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; cx_a.background_executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -192,7 +192,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client A let buffer_a = project_a @@ -308,7 +308,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the guest. let buffer_b = project_b @@ -565,7 +565,7 @@ async fn test_collaborating_with_code_actions( .unwrap(); // Join the project as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update(cx_b, |workspace, cx| { @@ -780,7 +780,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b @@ -1030,7 +1030,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes .await .unwrap(); executor.run_until_parked(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; project_b.read_with(cx_b, |project, cx| { let status = project.language_server_statuses(cx).next().unwrap().1; @@ -1126,9 +1126,7 @@ async fn test_share_project( .await .unwrap(); let client_b_peer_id = client_b.peer_id().unwrap(); - let project_b = client_b - .build_dev_server_project(initial_project.id, cx_b) - .await; + let project_b = client_b.join_remote_project(initial_project.id, cx_b).await; let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id()); @@ -1230,9 +1228,7 @@ async fn test_share_project( .update(cx_c, |call, cx| call.accept_incoming(cx)) .await .unwrap(); - let _project_c = client_c - .build_dev_server_project(initial_project.id, cx_c) - .await; + let _project_c = client_c.join_remote_project(initial_project.id, cx_c).await; // Client B closes the editor, and client A sees client B's selections removed. cx_b.update(move |_| drop(editor_b)); @@ -1291,7 +1287,7 @@ async fn test_on_input_format_from_host_to_guest( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the host. let buffer_a = project_a @@ -1411,7 +1407,7 @@ async fn test_on_input_format_from_guest_to_host( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the guest. let buffer_b = project_b @@ -1574,7 +1570,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap(); // Client B joins the project - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1836,7 +1832,7 @@ async fn test_inlay_hint_refresh_is_forwarded( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -2050,7 +2046,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA .unwrap(); // Join the project as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update(cx_b, |workspace, cx| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 9a39d6f3eb2e7..5e9c001491c6c 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -74,7 +74,7 @@ async fn test_basic_following( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -162,7 +162,7 @@ async fn test_basic_following( executor.run_until_parked(); let active_call_c = cx_c.read(ActiveCall::global); - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c); active_call_c .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx)) @@ -175,7 +175,7 @@ async fn test_basic_following( cx_d.executor().run_until_parked(); let active_call_d = cx_d.read(ActiveCall::global); - let project_d = client_d.build_dev_server_project(project_id, cx_d).await; + let project_d = client_d.join_remote_project(project_id, cx_d).await; let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d); active_call_d .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx)) @@ -569,7 +569,7 @@ async fn test_following_tab_order( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -686,7 +686,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T .unwrap(); // Client B joins the project. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1199,7 +1199,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1335,7 +1335,7 @@ async fn test_peers_simultaneously_following_each_other( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); executor.run_until_parked(); @@ -1685,7 +1685,7 @@ async fn test_following_into_excluded_file( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index d5cef3589cce3..afc3e7cfb84ee 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1372,7 +1372,7 @@ async fn test_unshare_project( .unwrap(); let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -1392,7 +1392,7 @@ async fn test_unshare_project( assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected())); // Client C opens the project. - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; // When client A unshares the project, client C's project becomes read-only. project_a @@ -1409,7 +1409,7 @@ async fn test_unshare_project( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c2 = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -1514,9 +1514,9 @@ async fn test_project_reconnect( .await .unwrap(); - let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await; - let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await; - let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await; + let project_b1 = client_b.join_remote_project(project1_id, cx_b).await; + let project_b2 = client_b.join_remote_project(project2_id, cx_b).await; + let project_b3 = client_b.join_remote_project(project3_id, cx_b).await; executor.run_until_parked(); let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| { @@ -2310,8 +2310,8 @@ async fn test_propagate_saves_and_fs_changes( .unwrap(); // Join that worktree as clients B and C. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap()); @@ -2535,7 +2535,7 @@ async fn test_git_diff_base_change( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; let diff_base = " one @@ -2791,7 +2791,7 @@ async fn test_git_branch_name( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; client_a .fs() .set_branch_name(Path::new("/dir/.git"), Some("branch-1")); @@ -2836,7 +2836,7 @@ async fn test_git_branch_name( assert_branch(Some("branch-2"), project, cx) }); - let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_remote_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); project_remote_c.read_with(cx_c, |project, cx| { @@ -2891,7 +2891,7 @@ async fn test_git_status_sync( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; // Wait for it to catch up to the new status executor.run_until_parked(); @@ -2967,7 +2967,7 @@ async fn test_git_status_sync( }); // And synchronization while joining - let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_remote_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); project_remote_c.read_with(cx_c, |project, cx| { @@ -3015,7 +3015,7 @@ async fn test_fs_operations( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); @@ -3316,7 +3316,7 @@ async fn test_local_settings( executor.run_until_parked(); // As client B, join that project and observe the local settings. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); executor.run_until_parked(); @@ -3439,7 +3439,7 @@ async fn test_buffer_conflict_after_save( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client B let buffer_b = project_b @@ -3503,7 +3503,7 @@ async fn test_buffer_reloading( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client B let buffer_b = project_b @@ -3557,7 +3557,7 @@ async fn test_editing_while_guest_opens_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client A let buffer_a = project_a @@ -3605,7 +3605,7 @@ async fn test_leaving_worktree_while_opening_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // See that a guest has joined as client A. executor.run_until_parked(); @@ -3652,7 +3652,7 @@ async fn test_canceling_buffer_opening( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let buffer_a = project_a .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) @@ -3709,8 +3709,8 @@ async fn test_leaving_project( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await; - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_b1 = client_b.join_remote_project(project_id, cx_b).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; // Client A sees that a guest has joined. executor.run_until_parked(); @@ -3751,7 +3751,7 @@ async fn test_leaving_project( }); // Client B re-joins the project and can open buffers as before. - let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b2 = client_b.join_remote_project(project_id, cx_b).await; executor.run_until_parked(); project_a.read_with(cx_a, |project, _| { @@ -3927,7 +3927,7 @@ async fn test_collaborating_with_diagnostics( ); // Join the worktree as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Wait for server to see the diagnostics update. executor.run_until_parked(); @@ -3952,7 +3952,7 @@ async fn test_collaborating_with_diagnostics( }); // Join project as client C and observe the diagnostics. - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); let project_c_diagnostic_summaries = Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| { @@ -4160,7 +4160,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( .unwrap(); // Join the project as client B and open all three files. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| { project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx)) })) @@ -4266,7 +4266,7 @@ async fn test_reloading_buffer_manually( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4364,7 +4364,7 @@ async fn test_formatting_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4486,7 +4486,7 @@ async fn test_prettier_formatting_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4599,7 +4599,7 @@ async fn test_definition( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); @@ -4744,7 +4744,7 @@ async fn test_references( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx)); @@ -4901,7 +4901,7 @@ async fn test_project_search( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Perform a search as the guest. let mut results = HashMap::default(); @@ -4991,7 +4991,7 @@ async fn test_document_highlights( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); @@ -5109,7 +5109,7 @@ async fn test_lsp_hover( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file as the guest let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); @@ -5286,7 +5286,7 @@ async fn test_project_symbols( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Cause the language server to start. let open_buffer_task = @@ -5381,7 +5381,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap(); @@ -6470,7 +6470,7 @@ async fn test_context_collaboration_with_reconnect( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Client A sees that a guest has joined. executor.run_until_parked(); diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index bad5ef9053ce7..a9cc32c1dd3a6 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -9,7 +9,7 @@ use remote_server::HeadlessProject; use serde_json::json; use std::{path::Path, sync::Arc}; -#[gpui::test] +#[gpui::test(iterations = 10)] async fn test_sharing_an_ssh_remote_project( cx_a: &mut TestAppContext, cx_b: &mut TestAppContext, @@ -54,9 +54,8 @@ async fn test_sharing_an_ssh_remote_project( let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; - executor.run_until_parked(); - // User A shares the remote project. + // While the SSH worktree is being scanned, user A shares the remote project. let active_call_a = cx_a.read(ActiveCall::global); let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) @@ -64,12 +63,30 @@ async fn test_sharing_an_ssh_remote_project( .unwrap(); // User B joins the project. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_b = project_b .update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx)) .unwrap(); + let worktree_a = project_a + .update(cx_a, |project, cx| project.worktree_for_id(worktree_id, cx)) + .unwrap(); + executor.run_until_parked(); + + worktree_a.update(cx_a, |worktree, _cx| { + assert_eq!( + worktree.paths().map(Arc::as_ref).collect::>(), + vec![ + Path::new(".zed"), + Path::new(".zed/settings.json"), + Path::new("README.md"), + Path::new("src"), + Path::new("src/lib.rs"), + ] + ); + }); + worktree_b.update(cx_b, |worktree, _cx| { assert_eq!( worktree.paths().map(Arc::as_ref).collect::>(), diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 6f07d76b0b26b..94c7d3907ff4f 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -921,7 +921,7 @@ impl TestClient { }) } - pub async fn build_dev_server_project( + pub async fn join_remote_project( &self, host_project_id: u64, guest_cx: &mut TestAppContext, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index e445eab2dd639..1fc04a0d0b4f4 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -204,8 +204,11 @@ impl WorktreeStore { self.loading_worktrees.insert(path.clone(), task.shared()); } let task = self.loading_worktrees.get(&path).unwrap().clone(); - cx.background_executor().spawn(async move { - match task.await { + cx.spawn(|this, mut cx| async move { + let result = task.await; + this.update(&mut cx, |this, _| this.loading_worktrees.remove(&path)) + .ok(); + match result { Ok(worktree) => Ok(worktree), Err(err) => Err((*err).cloned()), } @@ -219,7 +222,8 @@ impl WorktreeStore { visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let mut abs_path = abs_path.as_ref().to_string_lossy().to_string(); + let path_key: Arc = abs_path.as_ref().into(); + let mut abs_path = path_key.clone().to_string_lossy().to_string(); // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` // in which case want to strip the leading the `/`. // On the host-side, the `~` will get expanded. @@ -261,8 +265,9 @@ impl WorktreeStore { ) })?; - this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; - + this.update(&mut cx, |this, cx| { + this.add(&worktree, cx); + })?; Ok(worktree) }) } @@ -280,10 +285,6 @@ impl WorktreeStore { cx.spawn(move |this, mut cx| async move { let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await; - this.update(&mut cx, |project, _| { - project.loading_worktrees.remove(&path); - })?; - let worktree = worktree?; this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; @@ -317,7 +318,7 @@ impl WorktreeStore { }); let abs_path = abs_path.as_ref().to_path_buf(); - cx.spawn(move |project, mut cx| async move { + cx.spawn(move |project, cx| async move { let (tx, rx) = futures::channel::oneshot::channel(); let tx = RefCell::new(Some(tx)); let Some(project) = project.upgrade() else { @@ -339,14 +340,10 @@ impl WorktreeStore { request.await?; let worktree = rx.await.map_err(|e| anyhow!(e))?; drop(observer); - project.update(&mut cx, |project, _| { - project.loading_worktrees.remove(&path); - })?; Ok(worktree) }) } - #[track_caller] pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { let worktree_id = worktree.read(cx).id(); debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id)); @@ -553,9 +550,12 @@ impl WorktreeStore { let client = client.clone(); async move { if client.is_via_collab() { - client.request(update).map(|result| result.is_ok()).await + client + .request(update) + .map(|result| result.log_err().is_some()) + .await } else { - client.send(update).is_ok() + client.send(update).log_err().is_some() } } } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 084fcf9929f01..892063942754c 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -564,6 +564,48 @@ async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut }); } +#[gpui::test] +async fn test_adding_then_removing_then_adding_worktrees( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + let (project, _headless, _fs) = init_test(cx, server_cx).await; + let (_worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let (worktree_2, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project2", true, cx) + }) + .await + .unwrap(); + let worktree_id_2 = worktree_2.read_with(cx, |tree, _| tree.id()); + + project.update(cx, |project, cx| project.remove_worktree(worktree_id_2, cx)); + + let (worktree_2, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project2", true, cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + worktree_2.update(cx, |worktree, _cx| { + assert!(worktree.is_visible()); + let entries = worktree.entries(true, 0).collect::>(); + assert_eq!(entries.len(), 2); + assert_eq!( + entries[1].path.to_string_lossy().to_string(), + "README.md".to_string() + ) + }) +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index f91a832b80d78..d81c91132b9d3 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -1826,10 +1826,13 @@ impl RemoteWorktree { let initial_update = self .snapshot .build_initial_update(project_id, self.id().to_proto()); - self.updates_tx = Some(tx); + self.update_observer = Some(tx); cx.spawn(|this, mut cx| async move { let mut update = initial_update; loop { + // SSH projects use a special project ID of 0, and we need to + // remap it to the correct one here. + update.project_id = project_id; if !callback(update).await { break; } @@ -1841,7 +1844,7 @@ impl RemoteWorktree { } this.update(&mut cx, |this, _| { let this = this.as_remote_mut().unwrap(); - this.updates_tx.take(); + this.update_observer.take(); }) }) .detach(); diff --git a/script/zed-local b/script/zed-local index c3dfb2879d175..9ec9b24af7509 100755 --- a/script/zed-local +++ b/script/zed-local @@ -9,12 +9,18 @@ SUMMARY Each instance of Zed will be signed in as a different user specified in either \`.admins.json\` or \`.admins.default.json\`. + All arguments after the initial options will be passed through to the first + instance of Zed. This can be used to test SSH remoting along with collab, like + so: + + $ script/zed-local -2 ssh://your-ssh-uri-here + OPTIONS - --help Print this help message - --release Build Zed in release mode - -2, -3, -4, ... Spawn multiple Zed instances, with their windows tiled. - --top Arrange the Zed windows so they take up the top half of the screen. - --stable Use stable Zed release installed on local machine for all instances (except for the first one). + --help Print this help message + --release Build Zed in release mode + -2, -3, -4, ... Spawn multiple Zed instances, with their windows tiled. + --top Arrange the Zed windows so they take up the top half of the screen. + --stable Use stable Zed release installed on local machine for all instances (except for the first one). `.trim(); const { spawn, execSync, execFileSync } = require("child_process"); From c1a039a5d77f4ce2f23d17ad3ffa0bebd267b620 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 26 Sep 2024 12:10:39 -0700 Subject: [PATCH 100/228] Remove old project search code path, bump min-supported zed version for collaboration (#18404) Release Notes: - N/A --- crates/collab/src/rpc.rs | 44 +------------------ crates/collab/src/rpc/connection_pool.rs | 20 +++++---- crates/project/src/project.rs | 55 +++--------------------- crates/project/src/search.rs | 37 +--------------- crates/proto/proto/zed.proto | 19 +------- crates/proto/src/proto.rs | 4 -- 6 files changed, 22 insertions(+), 157 deletions(-) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index bc0f827e78ba5..d9683fb8b366c 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -474,9 +474,6 @@ impl Server { .add_request_handler(user_handler( forward_read_only_project_request::, )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) .add_request_handler(user_handler(forward_find_search_candidates_request)) .add_request_handler(user_handler( forward_read_only_project_request::, @@ -2298,7 +2295,7 @@ async fn list_remote_directory( let dev_server_connection_id = session .connection_pool() .await - .dev_server_connection_id_supporting(dev_server_id, ZedVersion::with_list_directory())?; + .online_dev_server_connection_id(dev_server_id)?; session .db() @@ -2337,10 +2334,7 @@ async fn update_dev_server_project( let dev_server_connection_id = session .connection_pool() .await - .dev_server_connection_id_supporting( - dev_server_project.dev_server_id, - ZedVersion::with_list_directory(), - )?; + .online_dev_server_connection_id(dev_server_project.dev_server_id)?; session.peer.send( dev_server_connection_id, @@ -2950,40 +2944,6 @@ async fn forward_find_search_candidates_request( .await .host_for_read_only_project_request(project_id, session.connection_id, session.user_id()) .await?; - - let host_version = session - .connection_pool() - .await - .connection(host_connection_id) - .map(|c| c.zed_version); - - if host_version.is_some_and(|host_version| host_version < ZedVersion::with_search_candidates()) - { - let query = request.query.ok_or_else(|| anyhow!("missing query"))?; - let search = proto::SearchProject { - project_id: project_id.to_proto(), - query: query.query, - regex: query.regex, - whole_word: query.whole_word, - case_sensitive: query.case_sensitive, - files_to_include: query.files_to_include, - files_to_exclude: query.files_to_exclude, - include_ignored: query.include_ignored, - }; - - let payload = session - .peer - .forward_request(session.connection_id, host_connection_id, search) - .await?; - return response.send(proto::FindSearchCandidatesResponse { - buffer_ids: payload - .locations - .into_iter() - .map(|loc| loc.buffer_id) - .collect(), - }); - } - let payload = session .peer .forward_request(session.connection_id, host_connection_id, request) diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index ad0131aaa18e5..96deefba7949c 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -32,15 +32,7 @@ impl fmt::Display for ZedVersion { impl ZedVersion { pub fn can_collaborate(&self) -> bool { - self.0 >= SemanticVersion::new(0, 134, 0) - } - - pub fn with_list_directory() -> ZedVersion { - ZedVersion(SemanticVersion::new(0, 145, 0)) - } - - pub fn with_search_candidates() -> ZedVersion { - ZedVersion(SemanticVersion::new(0, 151, 0)) + self.0 >= SemanticVersion::new(0, 151, 0) } } @@ -169,6 +161,16 @@ impl ConnectionPool { self.connected_dev_servers.get(&dev_server_id).copied() } + pub fn online_dev_server_connection_id( + &self, + dev_server_id: DevServerId, + ) -> Result { + match self.connected_dev_servers.get(&dev_server_id) { + Some(cid) => Ok(*cid), + None => Err(anyhow!(proto::ErrorCode::DevServerOffline)), + } + } + pub fn dev_server_connection_id_supporting( &self, dev_server_id: DevServerId, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index c3b3c383c11ff..fa373af61951b 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -558,7 +558,6 @@ impl Project { client.add_model_message_handler(Self::handle_update_worktree); client.add_model_request_handler(Self::handle_synchronize_buffers); - client.add_model_request_handler(Self::handle_search_project); client.add_model_request_handler(Self::handle_search_candidate_buffers); client.add_model_request_handler(Self::handle_open_buffer_by_id); client.add_model_request_handler(Self::handle_open_buffer_by_path); @@ -2692,9 +2691,9 @@ impl Project { let (result_tx, result_rx) = smol::channel::unbounded(); let matching_buffers_rx = if query.is_opened_only() { - self.sort_candidate_buffers(&query, cx) + self.sort_search_candidates(&query, cx) } else { - self.search_for_candidate_buffers(&query, MAX_SEARCH_RESULT_FILES + 1, cx) + self.find_search_candidate_buffers(&query, MAX_SEARCH_RESULT_FILES + 1, cx) }; cx.spawn(|_, cx| async move { @@ -2757,7 +2756,7 @@ impl Project { result_rx } - fn search_for_candidate_buffers( + fn find_search_candidate_buffers( &mut self, query: &SearchQuery, limit: usize, @@ -2769,11 +2768,11 @@ impl Project { buffer_store.find_search_candidates(query, limit, fs, cx) }) } else { - self.search_for_candidate_buffers_remote(query, limit, cx) + self.find_search_candidates_remote(query, limit, cx) } } - fn sort_candidate_buffers( + fn sort_search_candidates( &mut self, search_query: &SearchQuery, cx: &mut ModelContext, @@ -2815,7 +2814,7 @@ impl Project { rx } - fn search_for_candidate_buffers_remote( + fn find_search_candidates_remote( &mut self, query: &SearchQuery, limit: usize, @@ -3656,46 +3655,6 @@ impl Project { Ok(proto::TaskTemplatesResponse { templates }) } - async fn handle_search_project( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let peer_id = envelope.original_sender_id()?; - let query = SearchQuery::from_proto_v1(envelope.payload)?; - let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?; - - cx.spawn(move |mut cx| async move { - let mut locations = Vec::new(); - let mut limit_reached = false; - while let Some(result) = result.next().await { - match result { - SearchResult::Buffer { buffer, ranges } => { - for range in ranges { - let start = serialize_anchor(&range.start); - let end = serialize_anchor(&range.end); - let buffer_id = this.update(&mut cx, |this, cx| { - this.create_buffer_for_peer(&buffer, peer_id, cx).into() - })?; - locations.push(proto::Location { - buffer_id, - start: Some(start), - end: Some(end), - }); - } - } - SearchResult::LimitReached => limit_reached = true, - } - } - Ok(proto::SearchProjectResponse { - locations, - limit_reached, - // will restart - }) - }) - .await - } - async fn handle_search_candidate_buffers( this: Model, envelope: TypedEnvelope, @@ -3709,7 +3668,7 @@ impl Project { .ok_or_else(|| anyhow!("missing query field"))?, )?; let mut results = this.update(&mut cx, |this, cx| { - this.search_for_candidate_buffers(&query, message.limit as _, cx) + this.find_search_candidate_buffers(&query, message.limit as _, cx) })?; let mut response = proto::FindSearchCandidatesResponse { diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index d0e435aa13541..4205f3173039c 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -147,30 +147,6 @@ impl SearchQuery { }) } - pub fn from_proto_v1(message: proto::SearchProject) -> Result { - if message.regex { - Self::regex( - message.query, - message.whole_word, - message.case_sensitive, - message.include_ignored, - deserialize_path_matches(&message.files_to_include)?, - deserialize_path_matches(&message.files_to_exclude)?, - None, - ) - } else { - Self::text( - message.query, - message.whole_word, - message.case_sensitive, - message.include_ignored, - deserialize_path_matches(&message.files_to_include)?, - deserialize_path_matches(&message.files_to_exclude)?, - None, - ) - } - } - pub fn from_proto(message: proto::SearchQuery) -> Result { if message.regex { Self::regex( @@ -194,6 +170,7 @@ impl SearchQuery { ) } } + pub fn with_replacement(mut self, new_replacement: String) -> Self { match self { Self::Text { @@ -209,18 +186,6 @@ impl SearchQuery { } } } - pub fn to_protov1(&self, project_id: u64) -> proto::SearchProject { - proto::SearchProject { - project_id, - query: self.as_str().to_string(), - regex: self.is_regex(), - whole_word: self.whole_word(), - case_sensitive: self.case_sensitive(), - include_ignored: self.include_ignored(), - files_to_include: self.files_to_include().sources().join(","), - files_to_exclude: self.files_to_exclude().sources().join(","), - } - } pub fn to_proto(&self) -> proto::SearchQuery { proto::SearchQuery { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index d81ef35f6bffb..07f64557f47e1 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -108,8 +108,6 @@ message Envelope { PrepareRenameResponse prepare_rename_response = 84; PerformRename perform_rename = 85; PerformRenameResponse perform_rename_response = 86; - SearchProject search_project = 87; - SearchProjectResponse search_project_response = 88; UpdateContacts update_contacts = 89; UpdateInviteInfo update_invite_info = 90; @@ -287,6 +285,7 @@ message Envelope { CheckFileExistsResponse check_file_exists_response = 256; // current max } + reserved 87 to 88; reserved 158 to 161; reserved 166 to 169; reserved 224 to 229; @@ -1238,22 +1237,6 @@ message PerformRenameResponse { ProjectTransaction transaction = 2; } -message SearchProject { - uint64 project_id = 1; - string query = 2; - bool regex = 3; - bool whole_word = 4; - bool case_sensitive = 5; - string files_to_include = 6; - string files_to_exclude = 7; - bool include_ignored = 8; -} - -message SearchProjectResponse { - repeated Location locations = 1; - bool limit_reached = 2; -} - message SearchQuery { string query = 2; bool regex = 3; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 799d51defec71..fe1725e0d1c96 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -279,8 +279,6 @@ messages!( (SaveBuffer, Foreground), (SetChannelMemberRole, Foreground), (SetChannelVisibility, Foreground), - (SearchProject, Background), - (SearchProjectResponse, Background), (SendChannelMessage, Background), (SendChannelMessageResponse, Background), (ShareProject, Foreground), @@ -454,7 +452,6 @@ request_messages!( (RespondToChannelInvite, Ack), (RespondToContactRequest, Ack), (SaveBuffer, BufferSaved), - (SearchProject, SearchProjectResponse), (FindSearchCandidates, FindSearchCandidatesResponse), (SendChannelMessage, SendChannelMessageResponse), (SetChannelMemberRole, Ack), @@ -541,7 +538,6 @@ entity_messages!( ResolveCompletionDocumentation, ResolveInlayHint, SaveBuffer, - SearchProject, StartLanguageServer, SynchronizeBuffers, TaskContextForLocation, From e28496d4e2ef581def4854b1e7c4df8cbb542251 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 14:01:05 -0600 Subject: [PATCH 101/228] Stop leaking isahc assumption (#18408) Users of our http_client crate knew they were interacting with isahc as they set its extensions on the request. This change adds our own equivalents for their APIs in preparation for changing the default http client. Release Notes: - N/A --- Cargo.lock | 8 -- crates/anthropic/Cargo.toml | 1 - crates/anthropic/src/anthropic.rs | 7 +- crates/copilot/Cargo.toml | 1 - crates/copilot/src/copilot_chat.rs | 7 +- crates/extension/Cargo.toml | 1 - crates/extension/src/extension_store.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 13 +-- .../src/wasm_host/wit/since_v0_2_0.rs | 13 +-- crates/feedback/Cargo.toml | 1 - crates/feedback/src/feedback_modal.rs | 3 +- .../src/providers/codeberg.rs | 8 +- .../src/providers/github.rs | 8 +- crates/google_ai/Cargo.toml | 1 - crates/google_ai/src/google_ai.rs | 7 +- crates/gpui/src/app.rs | 3 +- crates/http_client/src/http_client.rs | 90 +++++++++++-------- .../src/isahc_http_client.rs | 26 ++++-- crates/language_model/Cargo.toml | 1 - crates/language_model/src/provider/cloud.rs | 5 +- crates/open_ai/Cargo.toml | 1 - crates/open_ai/src/open_ai.rs | 7 +- crates/zed/Cargo.toml | 1 - crates/zed/src/reliability.rs | 5 +- 24 files changed, 114 insertions(+), 106 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68dad1f74613f..85a62c9519e01 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -245,7 +245,6 @@ dependencies = [ "chrono", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -2850,7 +2849,6 @@ dependencies = [ "gpui", "http_client", "indoc", - "isahc", "language", "lsp", "menu", @@ -4128,7 +4126,6 @@ dependencies = [ "gpui", "http_client", "indexed_docs", - "isahc", "isahc_http_client", "language", "log", @@ -4289,7 +4286,6 @@ dependencies = [ "gpui", "http_client", "human_bytes", - "isahc", "language", "log", "menu", @@ -5016,7 +5012,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -6288,7 +6283,6 @@ dependencies = [ "http_client", "image", "inline_completion_button", - "isahc", "language", "log", "menu", @@ -7591,7 +7585,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -14435,7 +14428,6 @@ dependencies = [ "image_viewer", "inline_completion_button", "install_cli", - "isahc", "isahc_http_client", "journal", "language", diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml index 9e48ad0e57d81..ec12932fb74f1 100644 --- a/crates/anthropic/Cargo.toml +++ b/crates/anthropic/Cargo.toml @@ -20,7 +20,6 @@ anyhow.workspace = true chrono.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 91b6723e90be9..6b8972284208a 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -6,9 +6,8 @@ use std::{pin::Pin, str::FromStr}; use anyhow::{anyhow, Context, Result}; use chrono::{DateTime, Utc}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; -use isahc::http::{HeaderMap, HeaderValue}; +use http_client::http::{HeaderMap, HeaderValue}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use strum::{EnumIter, EnumString}; use thiserror::Error; @@ -289,7 +288,7 @@ pub async fn stream_completion_with_rate_limit_info( .header("X-Api-Key", api_key) .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let serialized_request = serde_json::to_string(&request).context("failed to serialize request")?; diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index 54abbaa112060..2a54497562a24 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -37,7 +37,6 @@ fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true language.workspace = true lsp.workspace = true menu.workspace = true diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 5d80c89a6649d..c5ba1bfc6a589 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -7,8 +7,7 @@ use chrono::DateTime; use fs::Fs; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; use gpui::{AppContext, AsyncAppContext, Global}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use paths::home_dir; use serde::{Deserialize, Serialize}; use settings::watch_config_file; @@ -275,7 +274,7 @@ async fn request_api_token( .header("Accept", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let request = request_builder.body(AsyncBody::empty())?; @@ -332,7 +331,7 @@ async fn stream_completion( .header("Copilot-Integration-Id", "vscode-chat"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index edf6184d38475..6ce1bd6862a1d 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -28,7 +28,6 @@ futures.workspace = true gpui.workspace = true http_client.workspace = true indexed_docs.workspace = true -isahc.workspace = true language.workspace = true log.workspace = true lsp.workspace = true diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 5f9fbffb11b2e..535d68326f9c3 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -664,7 +664,7 @@ impl ExtensionStore { let content_length = response .headers() - .get(isahc::http::header::CONTENT_LENGTH) + .get(http_client::http::header::CONTENT_LENGTH) .and_then(|value| value.to_str().ok()?.parse::().ok()); let mut body = BufReader::new(response.body_mut()); diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 3835f58f88529..862e2e7c7f789 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -1,5 +1,5 @@ use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; -use ::http_client::AsyncBody; +use ::http_client::{AsyncBody, HttpRequestExt}; use ::settings::{Settings, WorktreeId}; use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; @@ -8,7 +8,6 @@ use async_trait::async_trait; use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; -use isahc::config::{Configurable, RedirectPolicy}; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; @@ -297,10 +296,12 @@ fn convert_request( let mut request = ::http_client::Request::builder() .method(::http_client::Method::from(extension_request.method)) .uri(&extension_request.url) - .redirect_policy(match extension_request.redirect_policy { - http_client::RedirectPolicy::NoFollow => RedirectPolicy::None, - http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit), - http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow, + .follow_redirects(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow, + http_client::RedirectPolicy::FollowLimit(limit) => { + ::http_client::RedirectPolicy::FollowLimit(limit) + } + http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll, }); for (key, value) in &extension_request.headers { request = request.header(key, value); diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs index eb6e1a09a2ae9..e7f5432e1d32c 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -1,5 +1,5 @@ use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; -use ::http_client::AsyncBody; +use ::http_client::{AsyncBody, HttpRequestExt}; use ::settings::{Settings, WorktreeId}; use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; @@ -8,7 +8,6 @@ use async_trait::async_trait; use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; -use isahc::config::{Configurable, RedirectPolicy}; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; @@ -213,10 +212,12 @@ fn convert_request( let mut request = ::http_client::Request::builder() .method(::http_client::Method::from(extension_request.method)) .uri(&extension_request.url) - .redirect_policy(match extension_request.redirect_policy { - http_client::RedirectPolicy::NoFollow => RedirectPolicy::None, - http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit), - http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow, + .follow_redirects(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow, + http_client::RedirectPolicy::FollowLimit(limit) => { + ::http_client::RedirectPolicy::FollowLimit(limit) + } + http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll, }); for (key, value) in &extension_request.headers { request = request.header(key, value); diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 83c726e3e9ab4..0447858ca53b4 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -23,7 +23,6 @@ editor.workspace = true futures.workspace = true gpui.workspace = true human_bytes = "0.4.1" -isahc.workspace = true http_client.workspace = true language.workspace = true log.workspace = true diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index 4762b228d3e44..5270492aee5c3 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -11,7 +11,6 @@ use gpui::{ PromptLevel, Render, Task, View, ViewContext, }; use http_client::HttpClient; -use isahc::Request; use language::Buffer; use project::Project; use regex::Regex; @@ -299,7 +298,7 @@ impl FeedbackModal { is_staff: is_staff.unwrap_or(false), }; let json_bytes = serde_json::to_vec(&request)?; - let request = Request::post(feedback_endpoint) + let request = http_client::http::Request::post(feedback_endpoint) .header("content-type", "application/json") .body(json_bytes.into())?; let mut response = http_client.send(request).await?; diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index eaadca1ecf961..3f6a016f68fd4 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::{AsyncBody, HttpClient, Request}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; use serde::Deserialize; use url::Url; @@ -49,14 +49,16 @@ impl Codeberg { let url = format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}"); - let mut request = Request::get(&url).header("Content-Type", "application/json"); + let mut request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") { request = request.header("Authorization", format!("Bearer {}", codeberg_token)); } let mut response = client - .send_with_redirect_policy(request.body(AsyncBody::default())?, true) + .send(request.body(AsyncBody::default())?) .await .with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?; diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 77eaa80961e61..4078025fa004f 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -3,7 +3,7 @@ use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::{AsyncBody, HttpClient, Request}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; use regex::Regex; use serde::Deserialize; use url::Url; @@ -53,14 +53,16 @@ impl Github { ) -> Result> { let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}"); - let mut request = Request::get(&url).header("Content-Type", "application/json"); + let mut request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); if let Ok(github_token) = std::env::var("GITHUB_TOKEN") { request = request.header("Authorization", format!("Bearer {}", github_token)); } let mut response = client - .send_with_redirect_policy(request.body(AsyncBody::default())?, true) + .send(request.body(AsyncBody::default())?) .await .with_context(|| format!("error fetching GitHub commit details at {:?}", url))?; diff --git a/crates/google_ai/Cargo.toml b/crates/google_ai/Cargo.toml index 2a52f1968dcb6..f923e0ec91742 100644 --- a/crates/google_ai/Cargo.toml +++ b/crates/google_ai/Cargo.toml @@ -18,7 +18,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index f1dcedf5b31e0..7991c67956bb8 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -2,8 +2,7 @@ mod supported_countries; use anyhow::{anyhow, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use std::time::Duration; @@ -30,7 +29,7 @@ pub async fn stream_generate_content( .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; @@ -85,7 +84,7 @@ pub async fn count_tokens( .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let http_request = request_builder.body(AsyncBody::from(request))?; diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 6cb491b100810..540e459ce1a31 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1524,10 +1524,9 @@ pub struct KeystrokeEvent { struct NullHttpClient; impl HttpClient for NullHttpClient { - fn send_with_redirect_policy( + fn send( &self, _req: http_client::Request, - _follow_redirects: bool, ) -> futures::future::BoxFuture< 'static, Result, anyhow::Error>, diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index c0630151519c5..2f029a1d236bb 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -10,22 +10,46 @@ use futures::future::BoxFuture; use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; -use std::sync::{Arc, Mutex}; +use std::{ + sync::{Arc, Mutex}, + time::Duration, +}; pub use url::Url; +pub struct ReadTimeout(pub Duration); +#[derive(Default, Debug, Clone)] +pub enum RedirectPolicy { + #[default] + NoFollow, + FollowLimit(u32), + FollowAll, +} +pub struct FollowRedirects(pub bool); + +pub trait HttpRequestExt { + /// Set a read timeout on the request. + /// For isahc, this is the low_speed_timeout. + /// For other clients, this is the timeout used for read calls when reading the response. + /// In all cases this prevents servers stalling completely, but allows them to send data slowly. + fn read_timeout(self, timeout: Duration) -> Self; + /// Whether or not to follow redirects + fn follow_redirects(self, follow: RedirectPolicy) -> Self; +} + +impl HttpRequestExt for http::request::Builder { + fn read_timeout(self, timeout: Duration) -> Self { + self.extension(ReadTimeout(timeout)) + } + + fn follow_redirects(self, follow: RedirectPolicy) -> Self { + self.extension(follow) + } +} + pub trait HttpClient: 'static + Send + Sync { fn send( &self, req: http::Request, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.send_with_redirect_policy(req, false) - } - - // TODO: Make a better API for this - fn send_with_redirect_policy( - &self, - req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>>; fn get<'a>( @@ -34,14 +58,17 @@ pub trait HttpClient: 'static + Send + Sync { body: AsyncBody, follow_redirects: bool, ) -> BoxFuture<'a, Result, anyhow::Error>> { - let request = Builder::new().uri(uri).body(body); + let request = Builder::new() + .uri(uri) + .follow_redirects(if follow_redirects { + RedirectPolicy::FollowAll + } else { + RedirectPolicy::NoFollow + }) + .body(body); match request { - Ok(request) => Box::pin(async move { - self.send_with_redirect_policy(request, follow_redirects) - .await - .map_err(Into::into) - }), + Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }), Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -92,12 +119,11 @@ impl HttpClientWithProxy { } impl HttpClient for HttpClientWithProxy { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -106,12 +132,11 @@ impl HttpClient for HttpClientWithProxy { } impl HttpClient for Arc { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -218,12 +243,11 @@ impl HttpClientWithUrl { } impl HttpClient for Arc { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -232,12 +256,11 @@ impl HttpClient for Arc { } impl HttpClient for HttpClientWithUrl { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -283,14 +306,6 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } - - fn send_with_redirect_policy( - &self, - req: Request, - _: bool, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.send(req) - } } #[cfg(feature = "test-support")] @@ -352,10 +367,9 @@ impl fmt::Debug for FakeHttpClient { #[cfg(feature = "test-support")] impl HttpClient for FakeHttpClient { - fn send_with_redirect_policy( + fn send( &self, req: Request, - _follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { let future = (self.handler)(req); future diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs index 6c40b9f53b3f8..778f6a0459890 100644 --- a/crates/isahc_http_client/src/isahc_http_client.rs +++ b/crates/isahc_http_client/src/isahc_http_client.rs @@ -1,7 +1,6 @@ use std::{mem, sync::Arc, time::Duration}; use futures::future::BoxFuture; -use isahc::config::RedirectPolicy; use util::maybe; pub use isahc::config::Configurable; @@ -36,18 +35,29 @@ impl HttpClient for IsahcHttpClient { None } - fn send_with_redirect_policy( + fn send( &self, req: http_client::http::Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { + let redirect_policy = req + .extensions() + .get::() + .cloned() + .unwrap_or_default(); + let read_timeout = req + .extensions() + .get::() + .map(|t| t.0); let req = maybe!({ let (mut parts, body) = req.into_parts(); let mut builder = isahc::Request::builder() .method(parts.method) .uri(parts.uri) .version(parts.version); + if let Some(read_timeout) = read_timeout { + builder = builder.low_speed_timeout(100, read_timeout); + } let headers = builder.headers_mut()?; mem::swap(headers, &mut parts.headers); @@ -64,10 +74,12 @@ impl HttpClient for IsahcHttpClient { }; builder - .redirect_policy(if follow_redirects { - RedirectPolicy::Follow - } else { - RedirectPolicy::None + .redirect_policy(match redirect_policy { + http_client::RedirectPolicy::FollowAll => isahc::config::RedirectPolicy::Follow, + http_client::RedirectPolicy::FollowLimit(limit) => { + isahc::config::RedirectPolicy::Limit(limit) + } + http_client::RedirectPolicy::NoFollow => isahc::config::RedirectPolicy::None, }) .body(isahc_body) .ok() diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index b63428c544369..ef273ac44fca3 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -32,7 +32,6 @@ futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true -isahc.workspace = true inline_completion_button.workspace = true log.workspace = true menu.workspace = true diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 606a6fbacec7b..3c407b77d929d 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -18,8 +18,7 @@ use gpui::{ AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext, Subscription, Task, }; -use http_client::{AsyncBody, HttpClient, Method, Response}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response}; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::value::RawValue; @@ -396,7 +395,7 @@ impl CloudLanguageModel { let response = loop { let mut request_builder = http_client::Request::builder(); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let request = request_builder .method(Method::POST) diff --git a/crates/open_ai/Cargo.toml b/crates/open_ai/Cargo.toml index db9c77bac6bfe..4f729598f82cd 100644 --- a/crates/open_ai/Cargo.toml +++ b/crates/open_ai/Cargo.toml @@ -19,7 +19,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index e67fe1af27cdb..6a24eec69610c 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -6,8 +6,7 @@ use futures::{ stream::{self, BoxStream}, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt, }; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::{ @@ -318,7 +317,7 @@ pub async fn complete( .header("Content-Type", "application/json") .header("Authorization", format!("Bearer {}", api_key)); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let mut request_body = request; @@ -413,7 +412,7 @@ pub async fn stream_completion( .header("Authorization", format!("Bearer {}", api_key)); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 897e0e9a28bca..5422f8b29aa98 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,7 +57,6 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true -isahc.workspace = true isahc_http_client.workspace = true journal.workspace = true language.workspace = true diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 9e811d7c9afbb..50e5a05b823ed 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -4,8 +4,7 @@ use chrono::Utc; use client::telemetry; use db::kvp::KEY_VALUE_STORE; use gpui::{AppContext, SemanticVersion}; -use http_client::Method; -use isahc::config::Configurable; +use http_client::{HttpRequestExt, Method}; use http_client::{self, HttpClient, HttpClientWithUrl}; use paths::{crashes_dir, crashes_retired_dir}; @@ -491,7 +490,7 @@ async fn upload_previous_crashes( .context("error reading crash file")?; let mut request = http_client::Request::post(&crash_report_url.to_string()) - .redirect_policy(isahc::config::RedirectPolicy::Follow) + .follow_redirects(http_client::RedirectPolicy::FollowAll) .header("Content-Type", "text/plain"); if let Some((panicked_on, payload)) = most_recent_panic.as_ref() { From 48c6eb9ac7a0cebc096cf576c163d054de41c92f Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 26 Sep 2024 16:21:20 -0400 Subject: [PATCH 102/228] Add script to generate license dependencies as csv (#18411) Co-authored-by: Joseph T. Lyons --- .gitignore | 2 +- script/generate-licenses | 6 ++++- script/generate-licenses-csv | 23 +++++++++++++++++++ script/licenses/template.csv.hbs | 6 +++++ .../{template.hbs.md => template.md.hbs} | 0 5 files changed, 35 insertions(+), 2 deletions(-) create mode 100755 script/generate-licenses-csv create mode 100644 script/licenses/template.csv.hbs rename script/licenses/{template.hbs.md => template.md.hbs} (100%) diff --git a/.gitignore b/.gitignore index 634b73ac943cc..d19c5a102aac8 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ /crates/collab/seed.json /crates/zed/resources/flatpak/flatpak-cargo-sources.json /dev.zed.Zed*.json -/assets/*licenses.md +/assets/*licenses.* **/venv .build *.wasm diff --git a/script/generate-licenses b/script/generate-licenses index 43b2f5c4588af..9602813f0ced2 100755 --- a/script/generate-licenses +++ b/script/generate-licenses @@ -4,6 +4,7 @@ set -euo pipefail CARGO_ABOUT_VERSION="0.6.1" OUTPUT_FILE="${1:-$(pwd)/assets/licenses.md}" +TEMPLATE_FILE="script/licenses/template.md.hbs" > $OUTPUT_FILE @@ -23,7 +24,10 @@ else fi echo "Generating cargo licenses" -cargo about generate --fail -c script/licenses/zed-licenses.toml script/licenses/template.hbs.md >> $OUTPUT_FILE +cargo about generate \ + --fail \ + -c script/licenses/zed-licenses.toml \ + "${TEMPLATE_FILE}" >> $OUTPUT_FILE sed -i.bak 's/"/"/g' $OUTPUT_FILE diff --git a/script/generate-licenses-csv b/script/generate-licenses-csv new file mode 100755 index 0000000000000..63bfee83a5cbb --- /dev/null +++ b/script/generate-licenses-csv @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -euo pipefail + +CARGO_ABOUT_VERSION="0.6.1" +OUTPUT_FILE="${1:-$(pwd)/assets/licenses.csv}" +TEMPLATE_FILE="script/licenses/template.csv.hbs" + +if ! cargo install --list | grep "cargo-about v$CARGO_ABOUT_VERSION" > /dev/null; then + echo "Installing cargo-about@$CARGO_ABOUT_VERSION..." + cargo install "cargo-about@$CARGO_ABOUT_VERSION" +else + echo "cargo-about@$CARGO_ABOUT_VERSION is already installed." +fi + +echo "Generating cargo licenses" +cargo about generate \ + --fail \ + -c script/licenses/zed-licenses.toml \ + script/licenses/template.csv.hbs \ + | awk 'NR==1{print;next} NF{print | "sort"}' \ + > $OUTPUT_FILE +echo "Completed. See $OUTPUT_FILE" diff --git a/script/licenses/template.csv.hbs b/script/licenses/template.csv.hbs new file mode 100644 index 0000000000000..1459aa648d8da --- /dev/null +++ b/script/licenses/template.csv.hbs @@ -0,0 +1,6 @@ +Crate Name,Crate Version,License,Url +{{#each licenses}} +{{#each used_by}} +{{crate.name}},{{crate.version}},{{../name}},{{#if crate.repository}}{{crate.repository}}{{else}}https://crates.io/crates/{{crate.name}}{{/if}} +{{/each}} +{{/each}} diff --git a/script/licenses/template.hbs.md b/script/licenses/template.md.hbs similarity index 100% rename from script/licenses/template.hbs.md rename to script/licenses/template.md.hbs From c83d007138587d832c5ce3a13c8cb99913c016be Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 26 Sep 2024 23:43:58 +0300 Subject: [PATCH 103/228] Remove non-wrapping settings from the language configuration (#18412) Closes https://github.com/zed-industries/zed/issues/17736 Those are limited with 1024 symbols before wrapping still, and were introduced for git diff deleted hunks display. Instead of confusing people with actually wrapping, restores behavior that was before https://github.com/zed-industries/zed/pull/11080 Release Notes: - Removed confusing soft wrap option behavior ([#17736]https://github.com/zed-industries/zed/issues/17736) --- assets/settings/default.json | 7 +++---- crates/editor/src/editor.rs | 24 +++++++++++++++++------- crates/editor/src/element.rs | 6 ++---- crates/language/src/language_settings.rs | 9 +++++---- docs/src/configuring-zed.md | 6 +++--- 5 files changed, 30 insertions(+), 22 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index cf0de6a5e7f9a..b3be17ad2cedc 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -535,17 +535,16 @@ // How to soft-wrap long lines of text. // Possible values: // - // 1. Do not soft wrap. - // "soft_wrap": "none", // 2. Prefer a single line generally, unless an overly long line is encountered. - // "soft_wrap": "prefer_line", + // "soft_wrap": "none", + // "soft_wrap": "prefer_line", // (deprecated, same as "none") // 3. Soft wrap lines that overflow the editor. // "soft_wrap": "editor_width", // 4. Soft wrap lines at the preferred line length. // "soft_wrap": "preferred_line_length", // 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). // "soft_wrap": "bounded", - "soft_wrap": "prefer_line", + "soft_wrap": "none", // The column at which to soft-wrap lines, for buffers where soft-wrap // is enabled. "preferred_line_length": 80, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 54d23a8219d4f..b7f825df9eec6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -376,12 +376,20 @@ pub enum EditorMode { Full, } -#[derive(Clone, Debug)] +#[derive(Copy, Clone, Debug)] pub enum SoftWrap { + /// Prefer not to wrap at all. + /// + /// Note: this is currently internal, as actually limited by [`crate::MAX_LINE_LEN`] until it wraps. + /// The mode is used inside git diff hunks, where it's seems currently more useful to not wrap as much as possible. + GitDiff, + /// Prefer a single line generally, unless an overly long line is encountered. None, - PreferLine, + /// Soft wrap lines that exceed the editor width. EditorWidth, + /// Soft wrap lines at the preferred line length. Column(u32), + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). Bounded(u32), } @@ -1837,7 +1845,7 @@ impl Editor { let blink_manager = cx.new_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx)); let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. }) - .then(|| language_settings::SoftWrap::PreferLine); + .then(|| language_settings::SoftWrap::None); let mut project_subscriptions = Vec::new(); if mode == EditorMode::Full { @@ -10898,8 +10906,9 @@ impl Editor { let settings = self.buffer.read(cx).settings_at(0, cx); let mode = self.soft_wrap_mode_override.unwrap_or(settings.soft_wrap); match mode { - language_settings::SoftWrap::None => SoftWrap::None, - language_settings::SoftWrap::PreferLine => SoftWrap::PreferLine, + language_settings::SoftWrap::PreferLine | language_settings::SoftWrap::None => { + SoftWrap::None + } language_settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth, language_settings::SoftWrap::PreferredLineLength => { SoftWrap::Column(settings.preferred_line_length) @@ -10947,9 +10956,10 @@ impl Editor { self.soft_wrap_mode_override.take(); } else { let soft_wrap = match self.soft_wrap_mode(cx) { - SoftWrap::None | SoftWrap::PreferLine => language_settings::SoftWrap::EditorWidth, + SoftWrap::GitDiff => return, + SoftWrap::None => language_settings::SoftWrap::EditorWidth, SoftWrap::EditorWidth | SoftWrap::Column(_) | SoftWrap::Bounded(_) => { - language_settings::SoftWrap::PreferLine + language_settings::SoftWrap::None } }; self.soft_wrap_mode_override = Some(soft_wrap); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 6f30062d47ec7..bad16b225f329 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -4994,10 +4994,8 @@ impl Element for EditorElement { snapshot } else { let wrap_width = match editor.soft_wrap_mode(cx) { - SoftWrap::None => None, - SoftWrap::PreferLine => { - Some((MAX_LINE_LEN / 2) as f32 * em_advance) - } + SoftWrap::GitDiff => None, + SoftWrap::None => Some((MAX_LINE_LEN / 2) as f32 * em_advance), SoftWrap::EditorWidth => Some(editor_width), SoftWrap::Column(column) => Some(column as f32 * em_advance), SoftWrap::Bounded(column) => { diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index f830c5f25c308..2f1a7be2bf492 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -379,15 +379,16 @@ pub struct FeaturesContent { #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum SoftWrap { - /// Do not soft wrap. + /// Prefer a single line generally, unless an overly long line is encountered. None, + /// Deprecated: use None instead. Left to avoid breakin existing users' configs. /// Prefer a single line generally, unless an overly long line is encountered. PreferLine, - /// Soft wrap lines that exceed the editor width + /// Soft wrap lines that exceed the editor width. EditorWidth, - /// Soft wrap lines at the preferred line length + /// Soft wrap lines at the preferred line length. PreferredLineLength, - /// Soft wrap line at the preferred line length or the editor width (whichever is smaller) + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). Bounded, } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 7837044a60a66..18d66708ad7fc 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1357,12 +1357,12 @@ Or to set a `socks5` proxy: - Description: Whether or not to automatically wrap lines of text to fit editor / preferred width. - Setting: `soft_wrap` -- Default: `prefer_line` +- Default: `none` **Options** -1. `none` to stop the soft-wrapping -2. `prefer_line` to avoid wrapping generally, unless the line is too long +1. `none` to avoid wrapping generally, unless the line is too long +2. `prefer_line` (deprecated, same as `none`) 3. `editor_width` to wrap lines that overflow the editor width 4. `preferred_line_length` to wrap lines that overflow `preferred_line_length` config value From 32605e9ea4d9c1de2b23ac084d30df5c36b5f4dc Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 20:27:49 -0600 Subject: [PATCH 104/228] Fix register selection in visual mode (#18418) Related to #12895 Release Notes: - vim: Fix register selection in visual yank --- crates/vim/src/normal/mark.rs | 1 - crates/vim/src/normal/paste.rs | 3 +++ crates/vim/test_data/test_special_registers.json | 7 +++++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/vim/src/normal/mark.rs b/crates/vim/src/normal/mark.rs index 787430e747e3f..743ab59ee286c 100644 --- a/crates/vim/src/normal/mark.rs +++ b/crates/vim/src/normal/mark.rs @@ -63,7 +63,6 @@ impl Vim { self.marks.insert("<".to_string(), starts); self.marks.insert(">".to_string(), ends); self.stored_visual_mode.replace((mode, reversed)); - self.clear_operator(cx); } pub fn jump(&mut self, text: Arc, line: bool, cx: &mut ViewContext) { diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 05469dbf9f168..5322f913c1769 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -673,6 +673,9 @@ mod test { cx.simulate_shared_keystrokes("\" _ d d").await; cx.shared_register('_').await.assert_eq(""); + cx.simulate_shared_keystrokes("shift-v \" _ y w").await; + cx.shared_register('"').await.assert_eq("jumps"); + cx.shared_state().await.assert_eq(indoc! {" The quick brown the ˇlazy dog"}); diff --git a/crates/vim/test_data/test_special_registers.json b/crates/vim/test_data/test_special_registers.json index 8b6b098af659f..35f181a05c4f6 100644 --- a/crates/vim/test_data/test_special_registers.json +++ b/crates/vim/test_data/test_special_registers.json @@ -10,6 +10,13 @@ {"Key":"d"} {"Get":{"state":"The quick brown\nthe ˇlazy dog","mode":"Normal"}} {"ReadRegister":{"name":"_","value":""}} +{"Key":"shift-v"} +{"Key":"\""} +{"Key":"_"} +{"Key":"y"} +{"Key":"w"} +{"Get":{"state":"The quick brown\nthe ˇlazy dog","mode":"Normal"}} +{"ReadRegister":{"name":"\"","value":"jumps"}} {"Get":{"state":"The quick brown\nthe ˇlazy dog","mode":"Normal"}} {"Key":"\""} {"Key":"\""} From 1be3c44550012c9c7657de0450044c973f8c2d10 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 23:52:07 -0600 Subject: [PATCH 105/228] vim: Support za (#18421) Closes #6822 Updates #5142 Release Notes: - Added new fold actions to toggle folds (`cmd-k cmd-l`), fold every fold (`cmd-k cmd-0`) unfold every fold (`cmd-k cmd-j`) to fold recursively (`cmd-k cmd-[`) and unfold recursively (`cmd-k cmd-]`). - vim: Added `za` to toggle fold under cursor. - vim: Added `zO`/`zC`/`zA` to open, close and toggle folds recursively (and fixed `zc` to not recurse into selections). - vim: Added `zR`/`zM` to open/close all folds in the buffer. --- assets/keymaps/default-linux.json | 5 + assets/keymaps/default-macos.json | 5 + assets/keymaps/vim.json | 6 ++ crates/editor/src/actions.rs | 6 ++ crates/editor/src/editor.rs | 151 +++++++++++++++++++++++++++++- crates/editor/src/editor_tests.rs | 6 +- crates/editor/src/element.rs | 6 ++ 7 files changed, 179 insertions(+), 6 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index f15c4dfe22b6c..8d4871d95648d 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -310,6 +310,11 @@ "ctrl-shift-\\": "editor::MoveToEnclosingBracket", "ctrl-shift-[": "editor::Fold", "ctrl-shift-]": "editor::UnfoldLines", + "ctrl-k ctrl-l": "editor::ToggleFold", + "ctrl-k ctrl-[": "editor::FoldRecursive", + "ctrl-k ctrl-]": "editor::UnfoldRecursive", + "ctrl-k ctrl-0": "editor::FoldAll", + "ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", "ctrl-.": "editor::ToggleCodeActions", "alt-ctrl-r": "editor::RevealInFileManager", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index a58112b3c0b92..a980ae14e22a9 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -347,6 +347,11 @@ "cmd-shift-\\": "editor::MoveToEnclosingBracket", "alt-cmd-[": "editor::Fold", "alt-cmd-]": "editor::UnfoldLines", + "cmd-k cmd-l": "editor::ToggleFold", + "cmd-k cmd-[": "editor::FoldRecursive", + "cmd-k cmd-]": "editor::UnfoldRecursive", + "cmd-k cmd-0": "editor::FoldAll", + "cmd-k cmd-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", "cmd-.": "editor::ToggleCodeActions", "alt-cmd-r": "editor::RevealInFileManager", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 6656ea0ddf22c..f3a088f11e5d2 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -132,9 +132,15 @@ "z z": "editor::ScrollCursorCenter", "z .": ["workspace::SendKeystrokes", "z z ^"], "z b": "editor::ScrollCursorBottom", + "z a": "editor::ToggleFold", + "z A": "editor::ToggleFoldRecursive", "z c": "editor::Fold", + "z C": "editor::FoldRecursive", "z o": "editor::UnfoldLines", + "z O": "editor::UnfoldRecursive", "z f": "editor::FoldSelectedRanges", + "z M": "editor::FoldAll", + "z R": "editor::UnfoldAll", "shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }], "shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }], // Count support diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 2383c7f71af8a..b5935782580ba 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -230,7 +230,11 @@ gpui::actions!( ExpandMacroRecursively, FindAllReferences, Fold, + FoldAll, + FoldRecursive, FoldSelectedRanges, + ToggleFold, + ToggleFoldRecursive, Format, GoToDeclaration, GoToDeclarationSplit, @@ -340,7 +344,9 @@ gpui::actions!( Transpose, Undo, UndoSelection, + UnfoldAll, UnfoldLines, + UnfoldRecursive, UniqueLinesCaseInsensitive, UniqueLinesCaseSensitive, ] diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b7f825df9eec6..44de6014ec813 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10551,17 +10551,79 @@ impl Editor { } } - pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext) { - let mut fold_ranges = Vec::new(); + pub fn toggle_fold(&mut self, _: &actions::ToggleFold, cx: &mut ViewContext) { + let selection = self.selections.newest::(cx); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let range = if selection.is_empty() { + let point = selection.head().to_display_point(&display_map); + let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); + let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) + .to_point(&display_map); + start..end + } else { + selection.range() + }; + if display_map.folds_in_range(range).next().is_some() { + self.unfold_lines(&Default::default(), cx) + } else { + self.fold(&Default::default(), cx) + } + } + + pub fn toggle_fold_recursive( + &mut self, + _: &actions::ToggleFoldRecursive, + cx: &mut ViewContext, + ) { + let selection = self.selections.newest::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let range = if selection.is_empty() { + let point = selection.head().to_display_point(&display_map); + let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); + let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) + .to_point(&display_map); + start..end + } else { + selection.range() + }; + if display_map.folds_in_range(range).next().is_some() { + self.unfold_recursive(&Default::default(), cx) + } else { + self.fold_recursive(&Default::default(), cx) + } + } + pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all_adjusted(cx); + for selection in selections { let range = selection.range().sorted(); let buffer_start_row = range.start.row; - for row in (0..=range.end.row).rev() { + if range.start.row != range.end.row { + let mut found = false; + let mut row = range.start.row; + while row <= range.end.row { + if let Some((foldable_range, fold_text)) = + { display_map.foldable_range(MultiBufferRow(row)) } + { + found = true; + row = foldable_range.end.row + 1; + fold_ranges.push((foldable_range, fold_text)); + } else { + row += 1 + } + } + if found { + continue; + } + } + + for row in (0..=range.start.row).rev() { if let Some((foldable_range, fold_text)) = display_map.foldable_range(MultiBufferRow(row)) { @@ -10578,6 +10640,61 @@ impl Editor { self.fold_ranges(fold_ranges, true, cx); } + pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + for row in 0..display_map.max_buffer_row().0 { + if let Some((foldable_range, fold_text)) = + display_map.foldable_range(MultiBufferRow(row)) + { + fold_ranges.push((foldable_range, fold_text)); + } + } + + self.fold_ranges(fold_ranges, true, cx); + } + + pub fn fold_recursive(&mut self, _: &actions::FoldRecursive, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all_adjusted(cx); + + for selection in selections { + let range = selection.range().sorted(); + let buffer_start_row = range.start.row; + + if range.start.row != range.end.row { + let mut found = false; + for row in range.start.row..=range.end.row { + if let Some((foldable_range, fold_text)) = + { display_map.foldable_range(MultiBufferRow(row)) } + { + found = true; + fold_ranges.push((foldable_range, fold_text)); + } + } + if found { + continue; + } + } + + for row in (0..=range.start.row).rev() { + if let Some((foldable_range, fold_text)) = + display_map.foldable_range(MultiBufferRow(row)) + { + if foldable_range.end.row >= buffer_start_row { + fold_ranges.push((foldable_range, fold_text)); + } else { + break; + } + } + } + } + + self.fold_ranges(fold_ranges, true, cx); + } + pub fn fold_at(&mut self, fold_at: &FoldAt, cx: &mut ViewContext) { let buffer_row = fold_at.buffer_row; let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -10612,6 +10729,24 @@ impl Editor { self.unfold_ranges(ranges, true, true, cx); } + pub fn unfold_recursive(&mut self, _: &UnfoldRecursive, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + let ranges = selections + .iter() + .map(|s| { + let mut range = s.display_range(&display_map).sorted(); + *range.start.column_mut() = 0; + *range.end.column_mut() = display_map.line_len(range.end.row()); + let start = range.start.to_point(&display_map); + let end = range.end.to_point(&display_map); + start..end + }) + .collect::>(); + + self.unfold_ranges(ranges, true, true, cx); + } + pub fn unfold_at(&mut self, unfold_at: &UnfoldAt, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -10630,6 +10765,16 @@ impl Editor { self.unfold_ranges(std::iter::once(intersection_range), true, autoscroll, cx) } + pub fn unfold_all(&mut self, _: &actions::UnfoldAll, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.unfold_ranges( + [Point::zero()..display_map.max_point().to_point(&display_map)], + true, + true, + cx, + ); + } + pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext) { let selections = self.selections.all::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index de1b12abe0077..31a69918026f7 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -852,7 +852,7 @@ fn test_fold_action(cx: &mut TestAppContext) { _ = view.update(cx, |view, cx| { view.change_selections(None, cx, |s| { s.select_display_ranges([ - DisplayPoint::new(DisplayRow(8), 0)..DisplayPoint::new(DisplayRow(12), 0) + DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(12), 0) ]); }); view.fold(&Fold, cx); @@ -940,7 +940,7 @@ fn test_fold_action_whitespace_sensitive_language(cx: &mut TestAppContext) { _ = view.update(cx, |view, cx| { view.change_selections(None, cx, |s| { s.select_display_ranges([ - DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(10), 0) + DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(10), 0) ]); }); view.fold(&Fold, cx); @@ -1022,7 +1022,7 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) { _ = view.update(cx, |view, cx| { view.change_selections(None, cx, |s| { s.select_display_ranges([ - DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(11), 0) + DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(11), 0) ]); }); view.fold(&Fold, cx); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index bad16b225f329..e5c067e37ec3d 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -335,8 +335,14 @@ impl EditorElement { register_action(view, cx, Editor::open_url); register_action(view, cx, Editor::open_file); register_action(view, cx, Editor::fold); + register_action(view, cx, Editor::fold_all); register_action(view, cx, Editor::fold_at); + register_action(view, cx, Editor::fold_recursive); + register_action(view, cx, Editor::toggle_fold); + register_action(view, cx, Editor::toggle_fold_recursive); register_action(view, cx, Editor::unfold_lines); + register_action(view, cx, Editor::unfold_recursive); + register_action(view, cx, Editor::unfold_all); register_action(view, cx, Editor::unfold_at); register_action(view, cx, Editor::fold_selected_ranges); register_action(view, cx, Editor::show_completions); From 02d05615866a35da38e31630ac41c61aaf64a59d Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Sep 2024 00:36:17 -0600 Subject: [PATCH 106/228] Fix read timeout for ollama (#18417) Supercedes: #18310 Release Notes: - Fixed `low_speed_timeout_in_seconds` for Ollama --- crates/ollama/src/ollama.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 84404afce13b1..a38b9e7a56451 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Context, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; -use http_client::{http, AsyncBody, HttpClient, Method, Request as HttpRequest}; +use http_client::{http, AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{value::RawValue, Value}; @@ -262,14 +262,18 @@ pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, request: ChatRequest, - _: Option, + low_speed_timeout: Option, ) -> Result>> { let uri = format!("{api_url}/api/chat"); - let request_builder = http::Request::builder() + let mut request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.read_timeout(low_speed_timeout); + } + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { From 8559731e0d5bb570dcbb427746d10299d12e2889 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Fri, 27 Sep 2024 14:55:35 +0800 Subject: [PATCH 107/228] project: Fix worktree store event missing in remote projects (#18376) Release Notes: - N/A --- crates/project/src/project.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fa373af61951b..b91250e6b2c4a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -886,6 +886,9 @@ impl Project { cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) .detach(); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); From 5199135b54c98d571af8d9cc16635fd94a0b85ac Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 27 Sep 2024 09:31:45 +0200 Subject: [PATCH 108/228] ssh remoting: Show error if opening connection timed out (#18401) This shows an error if opening a connection to a remote host didn't work in the timeout of 10s (maybe we'll need to make that configurable in the future? for now it seems fine.) ![screenshot-2024-09-26-18 01 07@2x](https://github.com/user-attachments/assets/cbfa0e9f-9c29-4b6c-bade-07fdd7393c9d) Release Notes: - N/A --------- Co-authored-by: Bennet Co-authored-by: Conrad --- crates/recent_projects/src/ssh_connections.rs | 55 +++++++--- crates/remote/src/ssh_session.rs | 101 +++++++++++++----- 2 files changed, 115 insertions(+), 41 deletions(-) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 1722c58f07539..dd30f15f267fc 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -16,8 +16,9 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use ui::{ - h_flex, v_flex, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, - Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, WindowContext, + h_flex, v_flex, Color, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, + IntoElement, Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, + WindowContext, }; use workspace::{AppState, ModalView, Workspace}; @@ -79,6 +80,7 @@ impl Settings for SshSettings { pub struct SshPrompt { connection_string: SharedString, status_message: Option, + error_message: Option, prompt: Option<(SharedString, oneshot::Sender>)>, editor: View, } @@ -92,6 +94,7 @@ impl SshPrompt { Self { connection_string, status_message: None, + error_message: None, prompt: None, editor: cx.new_view(Editor::single_line), } @@ -121,6 +124,11 @@ impl SshPrompt { cx.notify(); } + pub fn set_error(&mut self, error_message: String, cx: &mut ViewContext) { + self.error_message = Some(error_message.into()); + cx.notify(); + } + pub fn confirm(&mut self, cx: &mut ViewContext) { if let Some((_, tx)) = self.prompt.take() { self.editor.update(cx, |editor, cx| { @@ -140,7 +148,12 @@ impl Render for SshPrompt { .child( h_flex() .gap_2() - .child( + .child(if self.error_message.is_some() { + Icon::new(IconName::XCircle) + .size(IconSize::Medium) + .color(Color::Error) + .into_any_element() + } else { Icon::new(IconName::ArrowCircle) .size(IconSize::Medium) .with_animation( @@ -149,16 +162,21 @@ impl Render for SshPrompt { |icon, delta| { icon.transform(Transformation::rotate(percentage(delta))) }, - ), - ) + ) + .into_any_element() + }) .child( Label::new(format!("ssh {}…", self.connection_string)) .size(ui::LabelSize::Large), ), ) - .when_some(self.status_message.as_ref(), |el, status| { - el.child(Label::new(status.clone())) + .when_some(self.error_message.as_ref(), |el, error| { + el.child(Label::new(error.clone())) }) + .when( + self.error_message.is_none() && self.status_message.is_some(), + |el| el.child(Label::new(self.status_message.clone().unwrap())), + ) .when_some(self.prompt.as_ref(), |el, prompt| { el.child(Label::new(prompt.0.clone())) .child(self.editor.clone()) @@ -238,6 +256,10 @@ impl remote::SshClientDelegate for SshClientDelegate { self.update_status(status, cx) } + fn set_error(&self, error: String, cx: &mut AsyncAppContext) { + self.update_error(error, cx) + } + fn get_server_binary( &self, platform: SshPlatform, @@ -270,6 +292,16 @@ impl SshClientDelegate { .ok(); } + fn update_error(&self, error: String, cx: &mut AsyncAppContext) { + self.window + .update(cx, |_, cx| { + self.ui.update(cx, |modal, cx| { + modal.set_error(error, cx); + }) + }) + .ok(); + } + async fn get_server_binary_impl( &self, platform: SshPlatform, @@ -388,7 +420,7 @@ pub async fn open_ssh_project( })? }; - let result = window + let session = window .update(cx, |workspace, cx| { cx.activate_window(); workspace.toggle_modal(cx, |cx| SshConnectionModal::new(&connection_options, cx)); @@ -400,12 +432,7 @@ pub async fn open_ssh_project( .clone(); connect_over_ssh(connection_options.clone(), ui, cx) })? - .await; - - if result.is_err() { - window.update(cx, |_, cx| cx.remove_window()).ok(); - } - let session = result?; + .await?; cx.update(|cx| { workspace::open_ssh_project(window, connection_options, session, app_state, paths, cx) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 06a7f810e6721..915595fd9d295 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -129,6 +129,7 @@ pub trait SshClientDelegate { cx: &mut AsyncAppContext, ) -> oneshot::Receiver>; fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext); + fn set_error(&self, error_message: String, cx: &mut AsyncAppContext); } type ResponseChannels = Mutex)>>>; @@ -208,16 +209,16 @@ impl SshSession { result = child_stdout.read(&mut stdout_buffer).fuse() => { match result { - Ok(len) => { - if len == 0 { - child_stdin.close().await?; - let status = remote_server_child.status().await?; - if !status.success() { - log::info!("channel exited with status: {status:?}"); - } - return Ok(()); + Ok(0) => { + child_stdin.close().await?; + outgoing_rx.close(); + let status = remote_server_child.status().await?; + if !status.success() { + log::error!("channel exited with status: {status:?}"); } - + return Ok(()); + } + Ok(len) => { if len < stdout_buffer.len() { child_stdout.read_exact(&mut stdout_buffer[len..]).await?; } @@ -419,8 +420,13 @@ impl SshSession { let mut response_channels_lock = self.response_channels.lock(); response_channels_lock.insert(MessageId(envelope.id), tx); drop(response_channels_lock); - self.outgoing_tx.unbounded_send(envelope).ok(); + let result = self.outgoing_tx.unbounded_send(envelope); async move { + if let Err(error) = &result { + log::error!("failed to send message: {}", error); + return Err(anyhow!("failed to send message: {}", error)); + } + let response = rx.await.context("connection lost")?.0; if let Some(proto::envelope::Payload::Error(error)) = &response.payload { return Err(RpcError::from_proto(error, type_name)); @@ -525,22 +531,25 @@ impl SshClientState { let listener = UnixListener::bind(&askpass_socket).context("failed to create askpass socket")?; - let askpass_task = cx.spawn(|mut cx| async move { - while let Ok((mut stream, _)) = listener.accept().await { - let mut buffer = Vec::new(); - let mut reader = BufReader::new(&mut stream); - if reader.read_until(b'\0', &mut buffer).await.is_err() { - buffer.clear(); - } - let password_prompt = String::from_utf8_lossy(&buffer); - if let Some(password) = delegate - .ask_password(password_prompt.to_string(), &mut cx) - .await - .context("failed to get ssh password") - .and_then(|p| p) - .log_err() - { - stream.write_all(password.as_bytes()).await.log_err(); + let askpass_task = cx.spawn({ + let delegate = delegate.clone(); + |mut cx| async move { + while let Ok((mut stream, _)) = listener.accept().await { + let mut buffer = Vec::new(); + let mut reader = BufReader::new(&mut stream); + if reader.read_until(b'\0', &mut buffer).await.is_err() { + buffer.clear(); + } + let password_prompt = String::from_utf8_lossy(&buffer); + if let Some(password) = delegate + .ask_password(password_prompt.to_string(), &mut cx) + .await + .context("failed to get ssh password") + .and_then(|p| p) + .log_err() + { + stream.write_all(password.as_bytes()).await.log_err(); + } } } }); @@ -575,7 +584,22 @@ impl SshClientState { // has completed. let stdout = master_process.stdout.as_mut().unwrap(); let mut output = Vec::new(); - stdout.read_to_end(&mut output).await?; + let connection_timeout = std::time::Duration::from_secs(10); + let result = read_with_timeout(stdout, connection_timeout, &mut output).await; + if let Err(e) = result { + let error_message = if e.kind() == std::io::ErrorKind::TimedOut { + format!( + "Failed to connect to host. Timed out after {:?}.", + connection_timeout + ) + } else { + format!("Failed to connect to host: {}.", e) + }; + + delegate.set_error(error_message, cx); + return Err(e.into()); + } + drop(askpass_task); if master_process.try_status()?.is_some() { @@ -716,6 +740,29 @@ impl SshClientState { } } +#[cfg(unix)] +async fn read_with_timeout( + stdout: &mut process::ChildStdout, + timeout: std::time::Duration, + output: &mut Vec, +) -> Result<(), std::io::Error> { + smol::future::or( + async { + stdout.read_to_end(output).await?; + Ok::<_, std::io::Error>(()) + }, + async { + smol::Timer::after(timeout).await; + + Err(std::io::Error::new( + std::io::ErrorKind::TimedOut, + "Read operation timed out", + )) + }, + ) + .await +} + impl Drop for SshClientState { fn drop(&mut self) { if let Err(error) = self.master_process.kill() { From 568a21a7009ea41c93a97a3382874c3f62816889 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 27 Sep 2024 10:48:34 +0300 Subject: [PATCH 109/228] Fix the numeration in line wrap docs (#18428) Follow-up of https://github.com/zed-industries/zed/pull/18412 Release Notes: - N/A Co-authored-by: Thorsten Ball --- assets/settings/default.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index b3be17ad2cedc..82778dc8f6c48 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -535,14 +535,14 @@ // How to soft-wrap long lines of text. // Possible values: // - // 2. Prefer a single line generally, unless an overly long line is encountered. + // 1. Prefer a single line generally, unless an overly long line is encountered. // "soft_wrap": "none", // "soft_wrap": "prefer_line", // (deprecated, same as "none") - // 3. Soft wrap lines that overflow the editor. + // 2. Soft wrap lines that overflow the editor. // "soft_wrap": "editor_width", - // 4. Soft wrap lines at the preferred line length. + // 3. Soft wrap lines at the preferred line length. // "soft_wrap": "preferred_line_length", - // 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). + // 4. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). // "soft_wrap": "bounded", "soft_wrap": "none", // The column at which to soft-wrap lines, for buffers where soft-wrap From a1d2e1106e15aeb072827170f79950f6ce02fd8d Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Sep 2024 10:51:49 +0200 Subject: [PATCH 110/228] assistant: Fix copy/cut not working when selection is empty (#18403) Release Notes: - Fixed copy/cut/paste not working in the assistant panel when selection was empty --- crates/assistant/src/assistant_panel.rs | 108 ++++++++++++++---------- 1 file changed, 63 insertions(+), 45 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index c7a06b428b9f1..15f9e5c59dfef 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -72,6 +72,7 @@ use std::{ time::Duration, }; use terminal_view::{terminal_panel::TerminalPanel, TerminalView}; +use text::SelectionGoal; use ui::TintColor; use ui::{ prelude::*, @@ -3438,7 +3439,7 @@ impl ContextEditor { fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext) { if self.editor.read(cx).selections.count() == 1 { - let (copied_text, metadata) = self.get_clipboard_contents(cx); + let (copied_text, metadata, _) = self.get_clipboard_contents(cx); cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( copied_text, metadata, @@ -3452,11 +3453,9 @@ impl ContextEditor { fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext) { if self.editor.read(cx).selections.count() == 1 { - let (copied_text, metadata) = self.get_clipboard_contents(cx); + let (copied_text, metadata, selections) = self.get_clipboard_contents(cx); self.editor.update(cx, |editor, cx| { - let selections = editor.selections.all::(cx); - editor.transact(cx, |this, cx| { this.change_selections(Some(Autoscroll::fit()), cx, |s| { s.select(selections); @@ -3476,52 +3475,71 @@ impl ContextEditor { cx.propagate(); } - fn get_clipboard_contents(&mut self, cx: &mut ViewContext) -> (String, CopyMetadata) { - let creases = self.editor.update(cx, |editor, cx| { - let selection = editor.selections.newest::(cx); - let selection_start = editor.selections.newest::(cx).start; + fn get_clipboard_contents( + &mut self, + cx: &mut ViewContext, + ) -> (String, CopyMetadata, Vec>) { + let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| { + let mut selection = editor.selections.newest::(cx); let snapshot = editor.buffer().read(cx).snapshot(cx); - editor.display_map.update(cx, |display_map, cx| { - display_map - .snapshot(cx) - .crease_snapshot - .creases_in_range( - MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1), - &snapshot, - ) - .filter_map(|crease| { - if let Some(metadata) = &crease.metadata { - let start = crease - .range - .start - .to_offset(&snapshot) - .saturating_sub(selection_start); - let end = crease - .range - .end - .to_offset(&snapshot) - .saturating_sub(selection_start); - - let range_relative_to_selection = start..end; - - if range_relative_to_selection.is_empty() { - None + + let is_entire_line = selection.is_empty() || editor.selections.line_mode; + if is_entire_line { + selection.start = Point::new(selection.start.row, 0); + selection.end = + cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0)); + selection.goal = SelectionGoal::None; + } + + let selection_start = snapshot.point_to_offset(selection.start); + + ( + snapshot.clone(), + selection.clone(), + editor.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .crease_snapshot + .creases_in_range( + MultiBufferRow(selection.start.row) + ..MultiBufferRow(selection.end.row + 1), + &snapshot, + ) + .filter_map(|crease| { + if let Some(metadata) = &crease.metadata { + let start = crease + .range + .start + .to_offset(&snapshot) + .saturating_sub(selection_start); + let end = crease + .range + .end + .to_offset(&snapshot) + .saturating_sub(selection_start); + + let range_relative_to_selection = start..end; + + if range_relative_to_selection.is_empty() { + None + } else { + Some(SelectedCreaseMetadata { + range_relative_to_selection, + crease: metadata.clone(), + }) + } } else { - Some(SelectedCreaseMetadata { - range_relative_to_selection, - crease: metadata.clone(), - }) + None } - } else { - None - } - }) - .collect::>() - }) + }) + .collect::>() + }), + ) }); + let selection = selection.map(|point| snapshot.point_to_offset(point)); let context = self.context.read(cx); - let selection = self.editor.read(cx).selections.newest::(cx); + let mut text = String::new(); for message in context.messages(cx) { if message.offset_range.start >= selection.range().end { @@ -3540,7 +3558,7 @@ impl ContextEditor { } } - (text, CopyMetadata { creases }) + (text, CopyMetadata { creases }, vec![selection]) } fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext) { From 1c5d9c221a3ec66f496b762fcc720183240bd212 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Sep 2024 11:06:48 +0200 Subject: [PATCH 111/228] Add missing shortcuts in tooltips (#18282) Fixes some missing shortcuts from Tooltips like the project search, buffer search, quick action bar, .... https://github.com/user-attachments/assets/d3a0160a-8d6e-4ddc-bf82-1fabeca42d59 This should hopefully help new users learn and discover some nice keyboard shortcuts Release Notes: - Display keyboard shortcuts inside tooltips in the project search, buffer search etc. --- assets/keymaps/default-linux.json | 2 +- assets/keymaps/default-macos.json | 2 +- crates/breadcrumbs/src/breadcrumbs.rs | 35 +++++--- .../quick_action_bar/src/quick_action_bar.rs | 15 +++- crates/search/src/buffer_search.rs | 80 ++++++++++++++++--- crates/search/src/project_search.rs | 78 ++++++++++++++++-- crates/search/src/search.rs | 5 +- crates/search/src/search_bar.rs | 5 +- crates/terminal_view/src/terminal_panel.rs | 20 ++++- 9 files changed, 199 insertions(+), 43 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 8d4871d95648d..d33df0274725a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -196,7 +196,7 @@ } }, { - "context": "BufferSearchBar && in_replace", + "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", "ctrl-enter": "search::ReplaceAll" diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index a980ae14e22a9..b405ee1852843 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -232,7 +232,7 @@ } }, { - "context": "BufferSearchBar && in_replace", + "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", "cmd-enter": "search::ReplaceAll" diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index 93ebfa0643584..09b29c0436f6e 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -1,7 +1,7 @@ use editor::Editor; use gpui::{ - Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription, - ViewContext, + Element, EventEmitter, FocusableView, IntoElement, ParentElement, Render, StyledText, + Subscription, ViewContext, }; use itertools::Itertools; use std::cmp; @@ -90,17 +90,30 @@ impl Render for Breadcrumbs { ButtonLike::new("toggle outline view") .child(breadcrumbs_stack) .style(ButtonStyle::Transparent) - .on_click(move |_, cx| { - if let Some(editor) = editor.upgrade() { - outline::toggle(editor, &editor::actions::ToggleOutline, cx) + .on_click({ + let editor = editor.clone(); + move |_, cx| { + if let Some(editor) = editor.upgrade() { + outline::toggle(editor, &editor::actions::ToggleOutline, cx) + } } }) - .tooltip(|cx| { - Tooltip::for_action( - "Show symbol outline", - &editor::actions::ToggleOutline, - cx, - ) + .tooltip(move |cx| { + if let Some(editor) = editor.upgrade() { + let focus_handle = editor.read(cx).focus_handle(cx); + Tooltip::for_action_in( + "Show symbol outline", + &editor::actions::ToggleOutline, + &focus_handle, + cx, + ) + } else { + Tooltip::for_action( + "Show symbol outline", + &editor::actions::ToggleOutline, + cx, + ) + } }), ), None => element diff --git a/crates/quick_action_bar/src/quick_action_bar.rs b/crates/quick_action_bar/src/quick_action_bar.rs index 57418b54b7e05..fb05065a19fc1 100644 --- a/crates/quick_action_bar/src/quick_action_bar.rs +++ b/crates/quick_action_bar/src/quick_action_bar.rs @@ -8,8 +8,8 @@ use editor::actions::{ use editor::{Editor, EditorSettings}; use gpui::{ - Action, AnchorCorner, ClickEvent, ElementId, EventEmitter, InteractiveElement, ParentElement, - Render, Styled, Subscription, View, ViewContext, WeakView, + Action, AnchorCorner, ClickEvent, ElementId, EventEmitter, FocusHandle, FocusableView, + InteractiveElement, ParentElement, Render, Styled, Subscription, View, ViewContext, WeakView, }; use search::{buffer_search, BufferSearchBar}; use settings::{Settings, SettingsStore}; @@ -110,12 +110,15 @@ impl Render for QuickActionBar { ) }; + let focus_handle = editor.read(cx).focus_handle(cx); + let search_button = editor.is_singleton(cx).then(|| { QuickActionBarButton::new( "toggle buffer search", IconName::MagnifyingGlass, !self.buffer_search_bar.read(cx).is_dismissed(), Box::new(buffer_search::Deploy::find()), + focus_handle.clone(), "Buffer Search", { let buffer_search_bar = self.buffer_search_bar.clone(); @@ -133,6 +136,7 @@ impl Render for QuickActionBar { IconName::ZedAssistant, false, Box::new(InlineAssist::default()), + focus_handle.clone(), "Inline Assist", { let workspace = self.workspace.clone(); @@ -321,6 +325,7 @@ struct QuickActionBarButton { icon: IconName, toggled: bool, action: Box, + focus_handle: FocusHandle, tooltip: SharedString, on_click: Box, } @@ -331,6 +336,7 @@ impl QuickActionBarButton { icon: IconName, toggled: bool, action: Box, + focus_handle: FocusHandle, tooltip: impl Into, on_click: impl Fn(&ClickEvent, &mut WindowContext) + 'static, ) -> Self { @@ -339,6 +345,7 @@ impl QuickActionBarButton { icon, toggled, action, + focus_handle, tooltip: tooltip.into(), on_click: Box::new(on_click), } @@ -355,7 +362,9 @@ impl RenderOnce for QuickActionBarButton { .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) .selected(self.toggled) - .tooltip(move |cx| Tooltip::for_action(tooltip.clone(), &*action, cx)) + .tooltip(move |cx| { + Tooltip::for_action_in(tooltip.clone(), &*action, &self.focus_handle, cx) + }) .on_click(move |event, cx| (self.on_click)(event, cx)) } } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 9ba7dfd7965b1..6e660a963b987 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -13,9 +13,10 @@ use editor::{ }; use futures::channel::oneshot; use gpui::{ - actions, div, impl_actions, Action, AppContext, ClickEvent, EventEmitter, FocusableView, Hsla, - InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, - Styled, Subscription, Task, TextStyle, View, ViewContext, VisualContext as _, WindowContext, + actions, div, impl_actions, Action, AppContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, Hsla, InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, + Render, ScrollHandle, Styled, Subscription, Task, TextStyle, View, ViewContext, + VisualContext as _, WindowContext, }; use project::{ search::SearchQuery, @@ -142,6 +143,8 @@ impl Render for BufferSearchBar { return div().id("search_bar"); } + let focus_handle = self.focus_handle(cx); + let narrow_mode = self.scroll_handle.bounds().size.width / cx.rem_size() < 340. / BASE_REM_SIZE_IN_PX; let hide_inline_icons = self.editor_needed_width @@ -217,6 +220,7 @@ impl Render for BufferSearchBar { div.children(supported_options.case.then(|| { self.render_search_option_button( SearchOptions::CASE_SENSITIVE, + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_case_sensitive(&ToggleCaseSensitive, cx) }), @@ -225,6 +229,7 @@ impl Render for BufferSearchBar { .children(supported_options.word.then(|| { self.render_search_option_button( SearchOptions::WHOLE_WORD, + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_whole_word(&ToggleWholeWord, cx) }), @@ -233,6 +238,7 @@ impl Render for BufferSearchBar { .children(supported_options.regex.then(|| { self.render_search_option_button( SearchOptions::REGEX, + focus_handle.clone(), cx.listener(|this, _, cx| this.toggle_regex(&ToggleRegex, cx)), ) })) @@ -250,7 +256,17 @@ impl Render for BufferSearchBar { })) .selected(self.replace_enabled) .size(ButtonSize::Compact) - .tooltip(|cx| Tooltip::for_action("Toggle replace", &ToggleReplace, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle replace", + &ToggleReplace, + &focus_handle, + cx, + ) + } + }), ) }) .when(supported_options.selection, |this| { @@ -268,8 +284,16 @@ impl Render for BufferSearchBar { })) .selected(self.selection_search_enabled) .size(ButtonSize::Compact) - .tooltip(|cx| { - Tooltip::for_action("Toggle search selection", &ToggleSelection, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle search selection", + &ToggleSelection, + &focus_handle, + cx, + ) + } }), ) }) @@ -280,8 +304,16 @@ impl Render for BufferSearchBar { IconButton::new("select-all", ui::IconName::SelectAll) .on_click(|_, cx| cx.dispatch_action(SelectAllMatches.boxed_clone())) .size(ButtonSize::Compact) - .tooltip(|cx| { - Tooltip::for_action("Select all matches", &SelectAllMatches, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Select all matches", + &SelectAllMatches, + &focus_handle, + cx, + ) + } }), ) .child(render_nav_button( @@ -289,12 +321,14 @@ impl Render for BufferSearchBar { self.active_match_index.is_some(), "Select previous match", &SelectPrevMatch, + focus_handle.clone(), )) .child(render_nav_button( ui::IconName::ChevronRight, self.active_match_index.is_some(), "Select next match", &SelectNextMatch, + focus_handle.clone(), )) .when(!narrow_mode, |this| { this.child(h_flex().ml_2().min_w(rems_from_px(40.)).child( @@ -335,8 +369,16 @@ impl Render for BufferSearchBar { .flex_none() .child( IconButton::new("search-replace-next", ui::IconName::ReplaceNext) - .tooltip(move |cx| { - Tooltip::for_action("Replace next", &ReplaceNext, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace next match", + &ReplaceNext, + &focus_handle, + cx, + ) + } }) .on_click( cx.listener(|this, _, cx| this.replace_next(&ReplaceNext, cx)), @@ -344,8 +386,16 @@ impl Render for BufferSearchBar { ) .child( IconButton::new("search-replace-all", ui::IconName::ReplaceAll) - .tooltip(move |cx| { - Tooltip::for_action("Replace all", &ReplaceAll, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace all matches", + &ReplaceAll, + &focus_handle, + cx, + ) + } }) .on_click( cx.listener(|this, _, cx| this.replace_all(&ReplaceAll, cx)), @@ -719,10 +769,11 @@ impl BufferSearchBar { fn render_search_option_button( &self, option: SearchOptions, + focus_handle: FocusHandle, action: impl Fn(&ClickEvent, &mut WindowContext) + 'static, ) -> impl IntoElement { let is_active = self.search_options.contains(option); - option.as_button(is_active, action) + option.as_button(is_active, focus_handle, action) } pub fn focus_editor(&mut self, _: &FocusEditor, cx: &mut ViewContext) { @@ -1122,6 +1173,7 @@ impl BufferSearchBar { }); cx.focus(handle); } + fn toggle_replace(&mut self, _: &ToggleReplace, cx: &mut ViewContext) { if self.active_searchable_item.is_some() { self.replace_enabled = !self.replace_enabled; @@ -1134,6 +1186,7 @@ impl BufferSearchBar { cx.notify(); } } + fn replace_next(&mut self, _: &ReplaceNext, cx: &mut ViewContext) { let mut should_propagate = true; if !self.dismissed && self.active_search.is_some() { @@ -1161,6 +1214,7 @@ impl BufferSearchBar { cx.stop_propagation(); } } + pub fn replace_all(&mut self, _: &ReplaceAll, cx: &mut ViewContext) { if !self.dismissed && self.active_search.is_some() { if let Some(searchable_item) = self.active_searchable_item.as_ref() { diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index ea94d27daf61d..12e6ccc12dc49 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1551,6 +1551,7 @@ impl Render for ProjectSearchBar { return div(); }; let search = search.read(cx); + let focus_handle = search.focus_handle(cx); let query_column = h_flex() .flex_1() @@ -1571,18 +1572,21 @@ impl Render for ProjectSearchBar { h_flex() .child(SearchOptions::CASE_SENSITIVE.as_button( self.is_option_enabled(SearchOptions::CASE_SENSITIVE, cx), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::CASE_SENSITIVE, cx); }), )) .child(SearchOptions::WHOLE_WORD.as_button( self.is_option_enabled(SearchOptions::WHOLE_WORD, cx), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::WHOLE_WORD, cx); }), )) .child(SearchOptions::REGEX.as_button( self.is_option_enabled(SearchOptions::REGEX, cx), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::REGEX, cx); }), @@ -1603,7 +1607,17 @@ impl Render for ProjectSearchBar { .map(|search| search.read(cx).filters_enabled) .unwrap_or_default(), ) - .tooltip(|cx| Tooltip::for_action("Toggle filters", &ToggleFilters, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle filters", + &ToggleFilters, + &focus_handle, + cx, + ) + } + }), ) .child( IconButton::new("project-search-toggle-replace", IconName::Replace) @@ -1616,7 +1630,17 @@ impl Render for ProjectSearchBar { .map(|search| search.read(cx).replace_enabled) .unwrap_or_default(), ) - .tooltip(|cx| Tooltip::for_action("Toggle replace", &ToggleReplace, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle replace", + &ToggleReplace, + &focus_handle, + cx, + ) + } + }), ), ); @@ -1650,8 +1674,16 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| { - Tooltip::for_action("Go to previous match", &SelectPrevMatch, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Go to previous match", + &SelectPrevMatch, + &focus_handle, + cx, + ) + } }), ) .child( @@ -1664,7 +1696,17 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| Tooltip::for_action("Go to next match", &SelectNextMatch, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Go to next match", + &SelectNextMatch, + &focus_handle, + cx, + ) + } + }), ) .child( h_flex() @@ -1702,6 +1744,7 @@ impl Render for ProjectSearchBar { .border_color(cx.theme().colors().border) .rounded_lg() .child(self.render_text_input(&search.replacement_editor, cx)); + let focus_handle = search.replacement_editor.read(cx).focus_handle(cx); let replace_actions = h_flex().when(search.replace_enabled, |this| { this.child( IconButton::new("project-search-replace-next", IconName::ReplaceNext) @@ -1712,7 +1755,17 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| Tooltip::for_action("Replace next match", &ReplaceNext, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace next match", + &ReplaceNext, + &focus_handle, + cx, + ) + } + }), ) .child( IconButton::new("project-search-replace-all", IconName::ReplaceAll) @@ -1723,7 +1776,17 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| Tooltip::for_action("Replace all matches", &ReplaceAll, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace all matches", + &ReplaceAll, + &focus_handle, + cx, + ) + } + }), ) }); h_flex() @@ -1790,6 +1853,7 @@ impl Render for ProjectSearchBar { search .search_options .contains(SearchOptions::INCLUDE_IGNORED), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::INCLUDE_IGNORED, cx); }), diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index b99672c532d6b..d13a12576b0f9 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -1,7 +1,7 @@ use bitflags::bitflags; pub use buffer_search::BufferSearchBar; use editor::SearchSettings; -use gpui::{actions, Action, AppContext, IntoElement}; +use gpui::{actions, Action, AppContext, FocusHandle, IntoElement}; use project::search::SearchQuery; pub use project_search::ProjectSearchView; use ui::{prelude::*, Tooltip}; @@ -106,6 +106,7 @@ impl SearchOptions { pub fn as_button( &self, active: bool, + focus_handle: FocusHandle, action: impl Fn(&gpui::ClickEvent, &mut WindowContext) + 'static, ) -> impl IntoElement { IconButton::new(self.label(), self.icon()) @@ -115,7 +116,7 @@ impl SearchOptions { .tooltip({ let action = self.to_toggle_action(); let label = self.label(); - move |cx| Tooltip::for_action(label, &*action, cx) + move |cx| Tooltip::for_action_in(label, &*action, &focus_handle, cx) }) } } diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 0594036c25483..102f04c4b95c2 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -1,4 +1,4 @@ -use gpui::{Action, IntoElement}; +use gpui::{Action, FocusHandle, IntoElement}; use ui::IconButton; use ui::{prelude::*, Tooltip}; @@ -7,12 +7,13 @@ pub(super) fn render_nav_button( active: bool, tooltip: &'static str, action: &'static dyn Action, + focus_handle: FocusHandle, ) -> impl IntoElement { IconButton::new( SharedString::from(format!("search-nav-button-{}", action.name())), icon, ) .on_click(|_, cx| cx.dispatch_action(action.boxed_clone())) - .tooltip(move |cx| Tooltip::for_action(tooltip, action, cx)) + .tooltip(move |cx| Tooltip::for_action_in(tooltip, action, &focus_handle, cx)) .disabled(!active) } diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 72f8606fa2176..7d95613804414 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -166,7 +166,16 @@ impl TerminalPanel { pub fn asssistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext) { self.assistant_enabled = enabled; if enabled { - self.assistant_tab_bar_button = Some(cx.new_view(|_| InlineAssistTabBarButton).into()); + let focus_handle = self + .pane + .read(cx) + .active_item() + .map(|item| item.focus_handle(cx)) + .unwrap_or(self.focus_handle(cx)); + self.assistant_tab_bar_button = Some( + cx.new_view(move |_| InlineAssistTabBarButton { focus_handle }) + .into(), + ); } else { self.assistant_tab_bar_button = None; } @@ -859,16 +868,21 @@ impl Panel for TerminalPanel { } } -struct InlineAssistTabBarButton; +struct InlineAssistTabBarButton { + focus_handle: FocusHandle, +} impl Render for InlineAssistTabBarButton { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let focus_handle = self.focus_handle.clone(); IconButton::new("terminal_inline_assistant", IconName::ZedAssistant) .icon_size(IconSize::Small) .on_click(cx.listener(|_, _, cx| { cx.dispatch_action(InlineAssist::default().boxed_clone()); })) - .tooltip(move |cx| Tooltip::for_action("Inline Assist", &InlineAssist::default(), cx)) + .tooltip(move |cx| { + Tooltip::for_action_in("Inline Assist", &InlineAssist::default(), &focus_handle, cx) + }) } } From 73ff8c0f1fd19ffcf93bc0bc2b8ba9d05f5eb935 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Sep 2024 14:16:14 +0200 Subject: [PATCH 112/228] Fix missing tooltips for selected buttons (#18435) Reverts #13857. Hiding tooltips for selected buttons prevents tooltips like "Close x dock" from showing up, see #14938 for an example. The intention of the original PR was to hide the "Show application menu" tooltip, while the context menu is open. In order to fix this without breaking other UI elements, we track the state of the context menu using `PopoverMenuHandle` now, which allows us to prevent the tooltip from showing up while the context menu is open. Closes #14938 Release Notes: - Fixed an issue where some tooltips would not show up --- crates/storybook/src/story_selector.rs | 4 +++- crates/title_bar/src/application_menu.rs | 24 ++++++++++++------- .../title_bar/src/stories/application_menu.rs | 16 ++++++++++--- crates/title_bar/src/title_bar.rs | 22 ++++++++++------- .../ui/src/components/button/button_like.rs | 6 ++--- 5 files changed, 47 insertions(+), 25 deletions(-) diff --git a/crates/storybook/src/story_selector.rs b/crates/storybook/src/story_selector.rs index 881fd83f8f21b..3a1c2f56306d8 100644 --- a/crates/storybook/src/story_selector.rs +++ b/crates/storybook/src/story_selector.rs @@ -46,7 +46,9 @@ pub enum ComponentStory { impl ComponentStory { pub fn story(&self, cx: &mut WindowContext) -> AnyView { match self { - Self::ApplicationMenu => cx.new_view(|_| title_bar::ApplicationMenuStory).into(), + Self::ApplicationMenu => cx + .new_view(|cx| title_bar::ApplicationMenuStory::new(cx)) + .into(), Self::AutoHeightEditor => AutoHeightEditorStory::new(cx).into(), Self::Avatar => cx.new_view(|_| ui::AvatarStory).into(), Self::Button => cx.new_view(|_| ui::ButtonStory).into(), diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index 47d4818da5e92..13ee10c141073 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -1,16 +1,19 @@ -use ui::{prelude::*, ContextMenu, NumericStepper, PopoverMenu, Tooltip}; +use ui::{prelude::*, ContextMenu, NumericStepper, PopoverMenu, PopoverMenuHandle, Tooltip}; -#[derive(IntoElement)] -pub struct ApplicationMenu; +pub struct ApplicationMenu { + context_menu_handle: PopoverMenuHandle, +} impl ApplicationMenu { - pub fn new() -> Self { - Self + pub fn new(_: &mut ViewContext) -> Self { + Self { + context_menu_handle: PopoverMenuHandle::default(), + } } } -impl RenderOnce for ApplicationMenu { - fn render(self, _cx: &mut WindowContext) -> impl IntoElement { +impl Render for ApplicationMenu { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { PopoverMenu::new("application-menu") .menu(move |cx| { ContextMenu::build(cx, move |menu, cx| { @@ -125,9 +128,12 @@ impl RenderOnce for ApplicationMenu { .trigger( IconButton::new("application-menu", ui::IconName::Menu) .style(ButtonStyle::Subtle) - .tooltip(|cx| Tooltip::text("Open Application Menu", cx)) - .icon_size(IconSize::Small), + .icon_size(IconSize::Small) + .when(!self.context_menu_handle.is_deployed(), |this| { + this.tooltip(|cx| Tooltip::text("Open Application Menu", cx)) + }), ) + .with_handle(self.context_menu_handle.clone()) .into_any_element() } } diff --git a/crates/title_bar/src/stories/application_menu.rs b/crates/title_bar/src/stories/application_menu.rs index 0b804209fd1a3..c3f8c700ae6f5 100644 --- a/crates/title_bar/src/stories/application_menu.rs +++ b/crates/title_bar/src/stories/application_menu.rs @@ -1,11 +1,21 @@ -use gpui::Render; +use gpui::{Render, View}; use story::{Story, StoryItem, StorySection}; use ui::prelude::*; use crate::application_menu::ApplicationMenu; -pub struct ApplicationMenuStory; +pub struct ApplicationMenuStory { + menu: View, +} + +impl ApplicationMenuStory { + pub fn new(cx: &mut WindowContext) -> Self { + Self { + menu: cx.new_view(ApplicationMenu::new), + } + } +} impl Render for ApplicationMenuStory { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { @@ -13,7 +23,7 @@ impl Render for ApplicationMenuStory { .child(Story::title_for::()) .child(StorySection::new().child(StoryItem::new( "Application Menu", - h_flex().child(ApplicationMenu::new()), + h_flex().child(self.menu.clone()), ))) } } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index e2d45a923b7d0..73a82e9ee0620 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -15,7 +15,7 @@ use feature_flags::{FeatureFlagAppExt, ZedPro}; use gpui::{ actions, div, px, Action, AnyElement, AppContext, Decorations, Element, InteractiveElement, Interactivity, IntoElement, Model, MouseButton, ParentElement, Render, Stateful, - StatefulInteractiveElement, Styled, Subscription, ViewContext, VisualContext, WeakView, + StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; use project::{Project, RepositoryEntry}; use recent_projects::RecentProjects; @@ -65,6 +65,7 @@ pub struct TitleBar { client: Arc, workspace: WeakView, should_move: bool, + application_menu: Option>, _subscriptions: Vec, } @@ -131,12 +132,7 @@ impl Render for TitleBar { .child( h_flex() .gap_1() - .children(match self.platform_style { - PlatformStyle::Mac => None, - PlatformStyle::Linux | PlatformStyle::Windows => { - Some(ApplicationMenu::new()) - } - }) + .when_some(self.application_menu.clone(), |this, menu| this.child(menu)) .children(self.render_project_host(cx)) .child(self.render_project_name(cx)) .children(self.render_project_branch(cx)) @@ -215,6 +211,15 @@ impl TitleBar { let user_store = workspace.app_state().user_store.clone(); let client = workspace.app_state().client.clone(); let active_call = ActiveCall::global(cx); + + let platform_style = PlatformStyle::platform(); + let application_menu = match platform_style { + PlatformStyle::Mac => None, + PlatformStyle::Linux | PlatformStyle::Windows => { + Some(cx.new_view(ApplicationMenu::new)) + } + }; + let mut subscriptions = Vec::new(); subscriptions.push( cx.observe(&workspace.weak_handle().upgrade().unwrap(), |_, _, cx| { @@ -227,9 +232,10 @@ impl TitleBar { subscriptions.push(cx.observe(&user_store, |_, _, cx| cx.notify())); Self { - platform_style: PlatformStyle::platform(), + platform_style, content: div().id(id.into()), children: SmallVec::new(), + application_menu, workspace: workspace.weak_handle(), should_move: false, project, diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index 625875e4c9eb9..a22c27d24176d 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -523,10 +523,8 @@ impl RenderOnce for ButtonLike { }) }, ) - .when(!self.selected, |this| { - this.when_some(self.tooltip, |this, tooltip| { - this.tooltip(move |cx| tooltip(cx)) - }) + .when_some(self.tooltip, |this, tooltip| { + this.tooltip(move |cx| tooltip(cx)) }) .children(self.children) } From 03c7f085812a2ffddbf0673f04804851f11230ce Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 27 Sep 2024 10:29:49 -0400 Subject: [PATCH 113/228] docs: Ollama api_url improvements (#18440) --- docs/src/assistant/configuration.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index 17b52a27d8801..9d9c62d8c6bc5 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -124,7 +124,7 @@ Download and install Ollama from [ollama.com/download](https://ollama.com/downlo 3. In the assistant panel, select one of the Ollama models using the model dropdown. -4. (Optional) Specify a [custom api_url](#custom-endpoint) or [custom `low_speed_timeout_in_seconds`](#provider-timeout) if required. +4. (Optional) Specify an [`api_url`](#custom-endpoint) or [`low_speed_timeout_in_seconds`](#provider-timeout) if required. #### Ollama Context Length {#ollama-context} @@ -138,6 +138,7 @@ Depending on your hardware or use-case you may wish to limit or increase the con { "language_models": { "ollama": { + "api_url": "http://localhost:11434", "low_speed_timeout_in_seconds": 120, "available_models": [ { From dc5ffe6994b0f2467164a7e1dd77dada96b64980 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Sinan=20A=C4=9Facan?= Date: Fri, 27 Sep 2024 16:55:03 +0200 Subject: [PATCH 114/228] Fix GoToDefinition changing the viewport unnecessarily (#18441) Closes #10738. Release Notes: - Fixed `GoToDefinition` changing the viewport (scrolling up/down) even when the definition is already within the viewport. ([#10738](https://github.com/zed-industries/zed/issues/10738)) --- crates/editor/src/editor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 44de6014ec813..cfffa584b6c21 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9697,7 +9697,7 @@ impl Editor { if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() { let buffer = target.buffer.read(cx); let range = check_multiline_range(buffer, range); - editor.change_selections(Some(Autoscroll::focused()), cx, |s| { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { s.select_ranges([range]); }); } else { From 6d4ecac6100f7908278e78cb8c5102f7f91c54c5 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 27 Sep 2024 10:59:19 -0400 Subject: [PATCH 115/228] Add a `get-release-notes-since` script (#18445) Release Notes: - N/A --- ...-changes-since => get-pull-requests-since} | 4 +- script/get-release-notes-since | 125 ++++++++++++++++++ 2 files changed, 127 insertions(+), 2 deletions(-) rename script/{get-changes-since => get-pull-requests-since} (94%) create mode 100755 script/get-release-notes-since diff --git a/script/get-changes-since b/script/get-pull-requests-since similarity index 94% rename from script/get-changes-since rename to script/get-pull-requests-since index 3b889ce991ade..c8509480a6c5c 100755 --- a/script/get-changes-since +++ b/script/get-pull-requests-since @@ -22,12 +22,12 @@ async function main() { const startDate = new Date(process.argv[2]); const today = new Date(); - console.log(`Changes from ${startDate} to ${today}\n`); + console.log(`Pull requests from ${startDate} to ${today}\n`); let pullRequestNumbers = getPullRequestNumbers(startDate, today); // Fetch the pull requests from the GitHub API. - console.log("Merged Pull requests:"); + console.log("Merged pull requests:"); for (const pullRequestNumber of pullRequestNumbers) { const webURL = `https://github.com/zed-industries/zed/pull/${pullRequestNumber}`; const apiURL = `https://api.github.com/repos/zed-industries/zed/pulls/${pullRequestNumber}`; diff --git a/script/get-release-notes-since b/script/get-release-notes-since new file mode 100755 index 0000000000000..20a6fc18dee9e --- /dev/null +++ b/script/get-release-notes-since @@ -0,0 +1,125 @@ +#!/usr/bin/env node --redirect-warnings=/dev/null + +const { execFileSync } = require("child_process"); +const { GITHUB_ACCESS_TOKEN } = process.env; + +main(); + +async function main() { + const startDate = new Date(process.argv[2]); + const today = new Date(); + + console.log(`Release notes from ${startDate} to ${today}\n`); + + const releases = await getReleases(startDate, today); + const previewReleases = releases.filter((release) => + release.tagName.includes("-pre"), + ); + + const stableReleases = releases.filter( + (release) => !release.tagName.includes("-pre"), + ); + + // Filter out all preview release, as all of those changes have made it to the stable release, except for the latest preview release + const aggregatedReleases = stableReleases + .concat(previewReleases[0]) + .reverse(); + + const aggregatedReleaseTitles = aggregatedReleases + .map((release) => release.name) + .join(", "); + + console.log(); + console.log(`Release titles: ${aggregatedReleaseTitles}`); + + console.log("Release notes:"); + console.log(); + + for (const release of aggregatedReleases) { + const publishedDate = release.publishedAt.split("T")[0]; + console.log(`${release.name}: ${publishedDate}`); + console.log(); + console.log(release.description); + console.log(); + } +} + +async function getReleases(startDate, endDate) { + const query = ` + query ($owner: String!, $repo: String!, $cursor: String) { + repository(owner: $owner, name: $repo) { + releases(first: 100, orderBy: {field: CREATED_AT, direction: DESC}, after: $cursor) { + nodes { + tagName + name + createdAt + publishedAt + description + url + author { + login + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } + `; + + let allReleases = []; + let hasNextPage = true; + let cursor = null; + + while (hasNextPage) { + const response = await fetch("https://api.github.com/graphql", { + method: "POST", + headers: { + Authorization: `Bearer ${GITHUB_ACCESS_TOKEN}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + query, + variables: { owner: "zed-industries", repo: "zed", cursor }, + }), + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data = await response.json(); + + if (data.errors) { + throw new Error(`GraphQL error: ${JSON.stringify(data.errors)}`); + } + + if (!data.data || !data.data.repository || !data.data.repository.releases) { + throw new Error(`Unexpected response structure: ${JSON.stringify(data)}`); + } + + const releases = data.data.repository.releases.nodes; + allReleases = allReleases.concat(releases); + + hasNextPage = data.data.repository.releases.pageInfo.hasNextPage; + cursor = data.data.repository.releases.pageInfo.endCursor; + + lastReleaseOnPage = releases[releases.length - 1]; + + if ( + releases.length > 0 && + new Date(lastReleaseOnPage.createdAt) < startDate + ) { + break; + } + } + + const filteredReleases = allReleases.filter((release) => { + const releaseDate = new Date(release.createdAt); + return releaseDate >= startDate && releaseDate <= endDate; + }); + + return filteredReleases; +} From 1914cef0aa9621e2ea1122ff2e1db70ac5b93b48 Mon Sep 17 00:00:00 2001 From: Anthony Eid Date: Fri, 27 Sep 2024 11:11:53 -0400 Subject: [PATCH 116/228] Improve contrast for breakpoint & debug active line colors --- crates/theme/src/default_colors.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 6901423ca3ad3..515f85b3ee908 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -48,7 +48,7 @@ impl ThemeColors { icon_disabled: neutral().light().step_9(), icon_placeholder: neutral().light().step_10(), icon_accent: blue().light().step_11(), - debugger_accent: red().dark().step_3(), + debugger_accent: red().light().step_10(), status_bar_background: neutral().light().step_2(), title_bar_background: neutral().light().step_2(), title_bar_inactive_background: neutral().light().step_3(), @@ -72,7 +72,7 @@ impl ThemeColors { editor_subheader_background: neutral().light().step_2(), editor_active_line_background: neutral().light_alpha().step_3(), editor_highlighted_line_background: neutral().light_alpha().step_3(), - editor_debugger_active_line_background: neutral().light().step_8(), + editor_debugger_active_line_background: yellow().dark_alpha().step_3(), editor_line_number: neutral().light().step_10(), editor_active_line_number: neutral().light().step_11(), editor_invisible: neutral().light().step_10(), @@ -149,7 +149,7 @@ impl ThemeColors { icon_disabled: neutral().dark().step_9(), icon_placeholder: neutral().dark().step_10(), icon_accent: blue().dark().step_11(), - debugger_accent: red().dark().step_3(), + debugger_accent: red().light().step_10(), status_bar_background: neutral().dark().step_2(), title_bar_background: neutral().dark().step_2(), title_bar_inactive_background: neutral().dark().step_3(), @@ -172,8 +172,8 @@ impl ThemeColors { editor_gutter_background: neutral().dark().step_1(), editor_subheader_background: neutral().dark().step_3(), editor_active_line_background: neutral().dark_alpha().step_3(), - editor_highlighted_line_background: neutral().dark_alpha().step_4(), - editor_debugger_active_line_background: neutral().light_alpha().step_5(), + editor_highlighted_line_background: yellow().dark_alpha().step_4(), + editor_debugger_active_line_background: yellow().dark_alpha().step_3(), editor_line_number: neutral().dark_alpha().step_10(), editor_active_line_number: neutral().dark_alpha().step_12(), editor_invisible: neutral().dark_alpha().step_4(), From ffd1083cc185ddd13760391dc81231c18236e912 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Sep 2024 10:06:19 -0600 Subject: [PATCH 117/228] vim: Command selection fixes (#18424) Release Notes: - vim: Fixed cursor position after `:{range}yank`. - vim: Added `:fo[ld]`, `:foldo[pen]` and `:foldc[lose]` --- crates/vim/src/command.rs | 260 +++++++++++++++++++++++----------- crates/vim/src/indent.rs | 8 +- crates/vim/src/normal.rs | 6 +- crates/vim/src/normal/yank.rs | 32 ++++- crates/vim/src/visual.rs | 14 +- 5 files changed, 218 insertions(+), 102 deletions(-) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 49e739faadfea..605bc3a05e43c 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1,4 +1,9 @@ -use std::{iter::Peekable, ops::Range, str::Chars, sync::OnceLock}; +use std::{ + iter::Peekable, + ops::{Deref, Range}, + str::Chars, + sync::OnceLock, +}; use anyhow::{anyhow, Result}; use command_palette_hooks::CommandInterceptResult; @@ -21,7 +26,7 @@ use crate::{ JoinLines, }, state::Mode, - visual::{VisualDeleteLine, VisualYankLine}, + visual::VisualDeleteLine, Vim, }; @@ -30,38 +35,55 @@ pub struct GoToLine { range: CommandRange, } -#[derive(Debug)] +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct YankCommand { + range: CommandRange, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] pub struct WithRange { - is_count: bool, + restore_selection: bool, range: CommandRange, - action: Box, + action: WrappedAction, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct WithCount { + count: u32, + action: WrappedAction, } +#[derive(Debug)] +struct WrappedAction(Box); + actions!(vim, [VisualCommand, CountCommand]); -impl_actions!(vim, [GoToLine, WithRange]); +impl_actions!(vim, [GoToLine, YankCommand, WithRange, WithCount]); -impl<'de> Deserialize<'de> for WithRange { +impl<'de> Deserialize<'de> for WrappedAction { fn deserialize(_: D) -> Result where D: serde::Deserializer<'de>, { - Err(serde::de::Error::custom("Cannot deserialize WithRange")) + Err(serde::de::Error::custom("Cannot deserialize WrappedAction")) } } -impl PartialEq for WithRange { +impl PartialEq for WrappedAction { fn eq(&self, other: &Self) -> bool { - self.range == other.range && self.action.partial_eq(&*other.action) + self.0.partial_eq(&*other.0) } } -impl Clone for WithRange { +impl Clone for WrappedAction { fn clone(&self) -> Self { - Self { - is_count: self.is_count, - range: self.range.clone(), - action: self.action.boxed_clone(), - } + Self(self.0.boxed_clone()) + } +} + +impl Deref for WrappedAction { + type Target = dyn Action; + fn deref(&self) -> &dyn Action { + &*self.0 } } @@ -110,13 +132,33 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.move_cursor(Motion::StartOfDocument, Some(buffer_row.0 as usize + 1), cx); }); - Vim::action(editor, cx, |vim, action: &WithRange, cx| { - if action.is_count { - for _ in 0..action.range.as_count() { - cx.dispatch_action(action.action.boxed_clone()) + Vim::action(editor, cx, |vim, action: &YankCommand, cx| { + vim.update_editor(cx, |vim, editor, cx| { + let snapshot = editor.snapshot(cx); + if let Ok(range) = action.range.buffer_range(vim, editor, cx) { + let end = if range.end < snapshot.max_buffer_row() { + Point::new(range.end.0 + 1, 0) + } else { + snapshot.buffer_snapshot.max_point() + }; + vim.copy_ranges( + editor, + true, + true, + vec![Point::new(range.start.0, 0)..end], + cx, + ) } - return; + }); + }); + + Vim::action(editor, cx, |_, action: &WithCount, cx| { + for _ in 0..action.count { + cx.dispatch_action(action.action.boxed_clone()) } + }); + + Vim::action(editor, cx, |vim, action: &WithRange, cx| { let result = vim.update_editor(cx, |vim, editor, cx| { action.range.buffer_range(vim, editor, cx) }); @@ -134,31 +176,51 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { } Some(Ok(result)) => result, }; - vim.update_editor(cx, |_, editor, cx| { - editor.change_selections(None, cx, |s| { - let end = Point::new(range.end.0, s.buffer().line_len(range.end)); - s.select_ranges([end..Point::new(range.start.0, 0)]); + + let previous_selections = vim + .update_editor(cx, |_, editor, cx| { + let selections = action + .restore_selection + .then(|| editor.selections.disjoint_anchor_ranges()); + editor.change_selections(None, cx, |s| { + let end = Point::new(range.end.0, s.buffer().line_len(range.end)); + s.select_ranges([end..Point::new(range.start.0, 0)]); + }); + selections }) - }); + .flatten(); cx.dispatch_action(action.action.boxed_clone()); cx.defer(move |vim, cx| { vim.update_editor(cx, |_, editor, cx| { editor.change_selections(None, cx, |s| { - s.select_ranges([Point::new(range.start.0, 0)..Point::new(range.start.0, 0)]); + if let Some(previous_selections) = previous_selections { + s.select_ranges(previous_selections); + } else { + s.select_ranges([ + Point::new(range.start.0, 0)..Point::new(range.start.0, 0) + ]); + } }) }); }); }); } -#[derive(Debug, Default)] +#[derive(Default)] struct VimCommand { prefix: &'static str, suffix: &'static str, action: Option>, action_name: Option<&'static str>, bang_action: Option>, - has_range: bool, + range: Option< + Box< + dyn Fn(Box, &CommandRange) -> Option> + + Send + + Sync + + 'static, + >, + >, has_count: bool, } @@ -187,16 +249,25 @@ impl VimCommand { self } - fn range(mut self) -> Self { - self.has_range = true; + fn range( + mut self, + f: impl Fn(Box, &CommandRange) -> Option> + Send + Sync + 'static, + ) -> Self { + self.range = Some(Box::new(f)); self } + fn count(mut self) -> Self { self.has_count = true; self } - fn parse(&self, mut query: &str, cx: &AppContext) -> Option> { + fn parse( + &self, + mut query: &str, + range: &Option, + cx: &AppContext, + ) -> Option> { let has_bang = query.ends_with('!'); if has_bang { query = &query[..query.len() - 1]; @@ -207,14 +278,20 @@ impl VimCommand { return None; } - if has_bang && self.bang_action.is_some() { - Some(self.bang_action.as_ref().unwrap().boxed_clone()) + let action = if has_bang && self.bang_action.is_some() { + self.bang_action.as_ref().unwrap().boxed_clone() } else if let Some(action) = self.action.as_ref() { - Some(action.boxed_clone()) + action.boxed_clone() } else if let Some(action_name) = self.action_name { - cx.build_action(action_name, None).log_err() + cx.build_action(action_name, None).log_err()? } else { - None + return None; + }; + + if let Some(range) = range { + self.range.as_ref().and_then(|f| f(action, range)) + } else { + Some(action) } } @@ -405,27 +482,17 @@ impl CommandRange { } } - pub fn as_count(&self) -> u32 { + pub fn as_count(&self) -> Option { if let CommandRange { start: Position::Line { row, offset: 0 }, end: None, } = &self { - *row + Some(*row) } else { - 0 + None } } - - pub fn is_count(&self) -> bool { - matches!( - &self, - CommandRange { - start: Position::Line { row: _, offset: 0 }, - end: None - } - ) - } } fn generate_commands(_: &AppContext) -> Vec { @@ -578,18 +645,32 @@ fn generate_commands(_: &AppContext) -> Vec { VimCommand::str(("cl", "ist"), "diagnostics::Deploy"), VimCommand::new(("cc", ""), editor::actions::Hover), VimCommand::new(("ll", ""), editor::actions::Hover), - VimCommand::new(("cn", "ext"), editor::actions::GoToDiagnostic).count(), - VimCommand::new(("cp", "revious"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("cN", "ext"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("lp", "revious"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("lN", "ext"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("j", "oin"), JoinLines).range(), - VimCommand::new(("dif", "fupdate"), editor::actions::ToggleHunkDiff).range(), - VimCommand::new(("rev", "ert"), editor::actions::RevertSelectedHunks).range(), - VimCommand::new(("d", "elete"), VisualDeleteLine).range(), - VimCommand::new(("y", "ank"), VisualYankLine).range(), - VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(), - VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(), + VimCommand::new(("cn", "ext"), editor::actions::GoToDiagnostic).range(wrap_count), + VimCommand::new(("cp", "revious"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("cN", "ext"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("lp", "revious"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("lN", "ext"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("j", "oin"), JoinLines).range(select_range), + VimCommand::new(("fo", "ld"), editor::actions::FoldSelectedRanges).range(act_on_range), + VimCommand::new(("foldo", "pen"), editor::actions::UnfoldLines) + .bang(editor::actions::UnfoldRecursive) + .range(act_on_range), + VimCommand::new(("foldc", "lose"), editor::actions::Fold) + .bang(editor::actions::FoldRecursive) + .range(act_on_range), + VimCommand::new(("dif", "fupdate"), editor::actions::ToggleHunkDiff).range(act_on_range), + VimCommand::new(("rev", "ert"), editor::actions::RevertSelectedHunks).range(act_on_range), + VimCommand::new(("d", "elete"), VisualDeleteLine).range(select_range), + VimCommand::new(("y", "ank"), gpui::NoAction).range(|_, range| { + Some( + YankCommand { + range: range.clone(), + } + .boxed_clone(), + ) + }), + VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(select_range), + VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(select_range), VimCommand::str(("E", "xplore"), "project_panel::ToggleFocus"), VimCommand::str(("H", "explore"), "project_panel::ToggleFocus"), VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"), @@ -620,6 +701,38 @@ fn commands(cx: &AppContext) -> &Vec { .0 } +fn act_on_range(action: Box, range: &CommandRange) -> Option> { + Some( + WithRange { + restore_selection: true, + range: range.clone(), + action: WrappedAction(action), + } + .boxed_clone(), + ) +} + +fn select_range(action: Box, range: &CommandRange) -> Option> { + Some( + WithRange { + restore_selection: false, + range: range.clone(), + action: WrappedAction(action), + } + .boxed_clone(), + ) +} + +fn wrap_count(action: Box, range: &CommandRange) -> Option> { + range.as_count().map(|count| { + WithCount { + count, + action: WrappedAction(action), + } + .boxed_clone() + }) +} + pub fn command_interceptor(mut input: &str, cx: &AppContext) -> Option { // NOTE: We also need to support passing arguments to commands like :w // (ideally with filename autocompletion). @@ -679,25 +792,12 @@ pub fn command_interceptor(mut input: &str, cx: &AppContext) -> Option) { vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = vim.save_selection_starts(editor, cx); + let original_positions = vim.save_selection_starts(editor, cx); for _ in 0..count { editor.indent(&Default::default(), cx); } - vim.restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, original_positions); }); }); if vim.mode.is_visual() { @@ -38,11 +38,11 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = vim.save_selection_starts(editor, cx); + let original_positions = vim.save_selection_starts(editor, cx); for _ in 0..count { editor.outdent(&Default::default(), cx); } - vim.restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, original_positions); }); }); if vim.mode.is_visual() { diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 10bf3c8e8d73b..4a4927a2fc5bf 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -395,9 +395,9 @@ impl Vim { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = vim.save_selection_starts(editor, cx); + let original_positions = vim.save_selection_starts(editor, cx); editor.toggle_comments(&Default::default(), cx); - vim.restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, original_positions); }); }); if self.mode.is_visual() { @@ -467,7 +467,7 @@ impl Vim { &self, editor: &mut Editor, cx: &mut ViewContext, - positions: &mut HashMap, + mut positions: HashMap, ) { editor.change_selections(Some(Autoscroll::fit()), cx, |s| { s.move_with(|map, selection| { diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index 8271aa6cabc16..c176cd6ca9cf3 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -1,4 +1,4 @@ -use std::time::Duration; +use std::{ops::Range, time::Duration}; use crate::{ motion::Motion, @@ -73,7 +73,18 @@ impl Vim { linewise: bool, cx: &mut ViewContext, ) { - self.copy_selections_content_internal(editor, linewise, true, cx); + self.copy_ranges( + editor, + linewise, + true, + editor + .selections + .all_adjusted(cx) + .iter() + .map(|s| s.range()) + .collect(), + cx, + ) } pub fn copy_selections_content( @@ -82,17 +93,28 @@ impl Vim { linewise: bool, cx: &mut ViewContext, ) { - self.copy_selections_content_internal(editor, linewise, false, cx); + self.copy_ranges( + editor, + linewise, + false, + editor + .selections + .all_adjusted(cx) + .iter() + .map(|s| s.range()) + .collect(), + cx, + ) } - fn copy_selections_content_internal( + pub(crate) fn copy_ranges( &mut self, editor: &mut Editor, linewise: bool, is_yank: bool, + selections: Vec>, cx: &mut ViewContext, ) { - let selections = editor.selections.all_adjusted(cx); let buffer = editor.buffer().read(cx).snapshot(cx); let mut text = String::new(); let mut clipboard_selections = Vec::with_capacity(selections.len()); diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 1503eaac1b6b4..72474d3ae4078 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -63,12 +63,7 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.record_current_action(cx); vim.visual_delete(true, cx); }); - Vim::action(editor, cx, |vim, _: &VisualYank, cx| { - vim.visual_yank(false, cx) - }); - Vim::action(editor, cx, |vim, _: &VisualYankLine, cx| { - vim.visual_yank(true, cx) - }); + Vim::action(editor, cx, |vim, _: &VisualYank, cx| vim.visual_yank(cx)); Vim::action(editor, cx, Vim::select_next); Vim::action(editor, cx, Vim::select_previous); @@ -483,11 +478,10 @@ impl Vim { self.switch_mode(Mode::Normal, true, cx); } - pub fn visual_yank(&mut self, line_mode: bool, cx: &mut ViewContext) { + pub fn visual_yank(&mut self, cx: &mut ViewContext) { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { - let line_mode = line_mode || editor.selections.line_mode; - editor.selections.line_mode = line_mode; + let line_mode = editor.selections.line_mode; vim.yank_selections_content(editor, line_mode, cx); editor.change_selections(None, cx, |s| { s.move_with(|map, selection| { @@ -657,7 +651,7 @@ impl Vim { self.stop_recording(cx); self.visual_delete(false, cx) } - Some(Operator::Yank) => self.visual_yank(false, cx), + Some(Operator::Yank) => self.visual_yank(cx), _ => {} // Ignoring other operators } } From caaa9a00a987855d6f64e6ef88f70cfd2499f848 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 27 Sep 2024 13:30:25 -0400 Subject: [PATCH 118/228] Remove Qwen2 model (#18444) Removed deprecated Qwen2 7B Instruct model from zed.dev provider (staff only). Release Notes: - N/A --- crates/collab/k8s/collab.template.yml | 12 --- crates/collab/src/lib.rs | 4 - crates/collab/src/llm.rs | 36 -------- crates/collab/src/llm/authorization.rs | 2 - crates/collab/src/llm/db/seed.rs | 9 -- .../collab/src/llm/db/tests/provider_tests.rs | 1 - crates/collab/src/tests/test_server.rs | 2 - .../language_model/src/model/cloud_model.rs | 27 ------ crates/language_model/src/provider/cloud.rs | 84 +------------------ crates/rpc/src/llm.rs | 1 - 10 files changed, 2 insertions(+), 176 deletions(-) diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml index 7ddb871503ccc..7d4ea6eb9a3cb 100644 --- a/crates/collab/k8s/collab.template.yml +++ b/crates/collab/k8s/collab.template.yml @@ -149,18 +149,6 @@ spec: secretKeyRef: name: google-ai key: api_key - - name: RUNPOD_API_KEY - valueFrom: - secretKeyRef: - name: runpod - key: api_key - optional: true - - name: RUNPOD_API_SUMMARY_URL - valueFrom: - secretKeyRef: - name: runpod - key: summary - optional: true - name: BLOB_STORE_ACCESS_KEY valueFrom: secretKeyRef: diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 81ff3ff21f692..6c32023a97a28 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -170,8 +170,6 @@ pub struct Config { pub anthropic_api_key: Option>, pub anthropic_staff_api_key: Option>, pub llm_closed_beta_model_name: Option>, - pub runpod_api_key: Option>, - pub runpod_api_summary_url: Option>, pub zed_client_checksum_seed: Option, pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, @@ -235,8 +233,6 @@ impl Config { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - runpod_api_key: None, - runpod_api_summary_url: None, user_backfiller_github_access_token: None, } } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 53f0bfdfd0130..14f10342a78dd 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -400,42 +400,6 @@ async fn perform_completion( }) .boxed() } - LanguageModelProvider::Zed => { - let api_key = state - .config - .runpod_api_key - .as_ref() - .context("no Qwen2-7B API key configured on the server")?; - let api_url = state - .config - .runpod_api_summary_url - .as_ref() - .context("no Qwen2-7B URL configured on the server")?; - let chunks = open_ai::stream_completion( - &state.http_client, - api_url, - api_key, - serde_json::from_str(params.provider_request.get())?, - None, - ) - .await?; - - chunks - .map(|event| { - event.map(|chunk| { - let input_tokens = - chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize; - let output_tokens = - chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize; - ( - serde_json::to_vec(&chunk).unwrap(), - input_tokens, - output_tokens, - ) - }) - }) - .boxed() - } }; Ok(Response::new(Body::wrap_stream(TokenCountingStream { diff --git a/crates/collab/src/llm/authorization.rs b/crates/collab/src/llm/authorization.rs index cc345579eca22..9f82af51c39b7 100644 --- a/crates/collab/src/llm/authorization.rs +++ b/crates/collab/src/llm/authorization.rs @@ -77,7 +77,6 @@ fn authorize_access_for_country( LanguageModelProvider::Anthropic => anthropic::is_supported_country(country_code), LanguageModelProvider::OpenAi => open_ai::is_supported_country(country_code), LanguageModelProvider::Google => google_ai::is_supported_country(country_code), - LanguageModelProvider::Zed => true, }; if !is_country_supported_by_provider { Err(Error::http( @@ -213,7 +212,6 @@ mod tests { (LanguageModelProvider::Anthropic, "T1"), // Tor (LanguageModelProvider::OpenAi, "T1"), // Tor (LanguageModelProvider::Google, "T1"), // Tor - (LanguageModelProvider::Zed, "T1"), // Tor ]; for (provider, country_code) in cases { diff --git a/crates/collab/src/llm/db/seed.rs b/crates/collab/src/llm/db/seed.rs index 24bc224227c8d..55c6c30cd5d8b 100644 --- a/crates/collab/src/llm/db/seed.rs +++ b/crates/collab/src/llm/db/seed.rs @@ -40,15 +40,6 @@ pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool) price_per_million_input_tokens: 25, // $0.25/MTok price_per_million_output_tokens: 125, // $1.25/MTok }, - ModelParams { - provider: LanguageModelProvider::Zed, - name: "Qwen/Qwen2-7B-Instruct".into(), - max_requests_per_minute: 5, - max_tokens_per_minute: 25_000, // These are arbitrary limits we've set to cap costs; we control this number - max_tokens_per_day: 300_000, - price_per_million_input_tokens: 25, - price_per_million_output_tokens: 125, - }, ]) .await } diff --git a/crates/collab/src/llm/db/tests/provider_tests.rs b/crates/collab/src/llm/db/tests/provider_tests.rs index ef0da1c373fca..0bb55ee4b69a6 100644 --- a/crates/collab/src/llm/db/tests/provider_tests.rs +++ b/crates/collab/src/llm/db/tests/provider_tests.rs @@ -26,7 +26,6 @@ async fn test_initialize_providers(db: &mut LlmDatabase) { LanguageModelProvider::Anthropic, LanguageModelProvider::Google, LanguageModelProvider::OpenAi, - LanguageModelProvider::Zed ] ) } diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 94c7d3907ff4f..5ff4a720741bc 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -679,8 +679,6 @@ impl TestServer { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - runpod_api_key: None, - runpod_api_summary_url: None, user_backfiller_github_access_token: None, }, }) diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 2ce48931f6d4d..9242f80e6e16c 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -12,7 +12,6 @@ pub enum CloudModel { Anthropic(anthropic::Model), OpenAi(open_ai::Model), Google(google_ai::Model), - Zed(ZedModel), } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)] @@ -21,26 +20,6 @@ pub enum ZedModel { Qwen2_7bInstruct, } -impl ZedModel { - pub fn id(&self) -> &str { - match self { - ZedModel::Qwen2_7bInstruct => "Qwen/Qwen2-7B-Instruct", - } - } - - pub fn display_name(&self) -> &str { - match self { - ZedModel::Qwen2_7bInstruct => "Qwen2 7B Instruct", - } - } - - pub fn max_token_count(&self) -> usize { - match self { - ZedModel::Qwen2_7bInstruct => 28000, - } - } -} - impl Default for CloudModel { fn default() -> Self { Self::Anthropic(anthropic::Model::default()) @@ -53,7 +32,6 @@ impl CloudModel { Self::Anthropic(model) => model.id(), Self::OpenAi(model) => model.id(), Self::Google(model) => model.id(), - Self::Zed(model) => model.id(), } } @@ -62,7 +40,6 @@ impl CloudModel { Self::Anthropic(model) => model.display_name(), Self::OpenAi(model) => model.display_name(), Self::Google(model) => model.display_name(), - Self::Zed(model) => model.display_name(), } } @@ -78,7 +55,6 @@ impl CloudModel { Self::Anthropic(model) => model.max_token_count(), Self::OpenAi(model) => model.max_token_count(), Self::Google(model) => model.max_token_count(), - Self::Zed(model) => model.max_token_count(), } } @@ -115,9 +91,6 @@ impl CloudModel { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } }, - Self::Zed(model) => match model { - ZedModel::Qwen2_7bInstruct => LanguageModelAvailability::RequiresPlan(Plan::ZedPro), - }, } } } diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 3c407b77d929d..b81f6f9fba336 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -3,7 +3,7 @@ use crate::provider::anthropic::map_to_language_model_completion_events; use crate::{ settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, }; use anthropic::AnthropicError; use anyhow::{anyhow, Result}; @@ -219,9 +219,6 @@ impl LanguageModelProvider for CloudLanguageModelProvider { models.insert(model.id().to_string(), CloudModel::Google(model)); } } - for model in ZedModel::iter() { - models.insert(model.id().to_string(), CloudModel::Zed(model)); - } } else { models.insert( anthropic::Model::Claude3_5Sonnet.id().to_string(), @@ -472,7 +469,7 @@ impl LanguageModel for CloudLanguageModel { min_total_token: cache.min_total_token, }) } - CloudModel::OpenAi(_) | CloudModel::Google(_) | CloudModel::Zed(_) => None, + CloudModel::OpenAi(_) | CloudModel::Google(_) => None, } } @@ -502,9 +499,6 @@ impl LanguageModel for CloudLanguageModel { } .boxed() } - CloudModel::Zed(_) => { - count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx) - } } } @@ -603,35 +597,6 @@ impl LanguageModel for CloudLanguageModel { } .boxed() } - CloudModel::Zed(model) => { - let client = self.client.clone(); - let mut request = request.into_open_ai(model.id().into(), None); - request.max_tokens = Some(4000); - let llm_api_token = self.llm_api_token.clone(); - let future = self.request_limiter.stream(async move { - let response = Self::perform_llm_completion( - client.clone(), - llm_api_token, - PerformCompletionParams { - provider: client::LanguageModelProvider::Zed, - model: request.model.clone(), - provider_request: RawValue::from_string(serde_json::to_string( - &request, - )?)?, - }, - None, - ) - .await?; - Ok(open_ai::extract_text_from_events(response_lines(response))) - }); - async move { - Ok(future - .await? - .map(|result| result.map(LanguageModelCompletionEvent::Text)) - .boxed()) - } - .boxed() - } } } @@ -735,51 +700,6 @@ impl LanguageModel for CloudLanguageModel { CloudModel::Google(_) => { future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed() } - CloudModel::Zed(model) => { - // All Zed models are OpenAI-based at the time of writing. - let mut request = request.into_open_ai(model.id().into(), None); - request.tool_choice = Some(open_ai::ToolChoice::Other( - open_ai::ToolDefinition::Function { - function: open_ai::FunctionDefinition { - name: tool_name.clone(), - description: None, - parameters: None, - }, - }, - )); - request.tools = vec![open_ai::ToolDefinition::Function { - function: open_ai::FunctionDefinition { - name: tool_name.clone(), - description: Some(tool_description), - parameters: Some(input_schema), - }, - }]; - - self.request_limiter - .run(async move { - let response = Self::perform_llm_completion( - client.clone(), - llm_api_token, - PerformCompletionParams { - provider: client::LanguageModelProvider::Zed, - model: request.model.clone(), - provider_request: RawValue::from_string(serde_json::to_string( - &request, - )?)?, - }, - None, - ) - .await?; - - Ok(open_ai::extract_tool_args_from_events( - tool_name, - Box::pin(response_lines(response)), - ) - .await? - .boxed()) - }) - .boxed() - } } } } diff --git a/crates/rpc/src/llm.rs b/crates/rpc/src/llm.rs index 6cae54b3090d5..681f2d8db32d1 100644 --- a/crates/rpc/src/llm.rs +++ b/crates/rpc/src/llm.rs @@ -12,7 +12,6 @@ pub enum LanguageModelProvider { Anthropic, OpenAi, Google, - Zed, } #[derive(Debug, Serialize, Deserialize)] From c3075dfe9afc788a1a0a2b965f0eb4f9a4ba77b4 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 27 Sep 2024 11:14:28 -0700 Subject: [PATCH 119/228] Fix bugs in diff hunk highlighting (#18454) Fixes https://github.com/zed-industries/zed/issues/18405 In https://github.com/zed-industries/zed/pull/18313, we introduced a problem where git addition highlights might spuriously return when undoing certain changes. It turned out, there were already some cases where git hunk highlighting was incorrect when editing at the boundaries of expanded diff hunks. In this PR, I've introduced a test helper method for more rigorously (and readably) testing the editor's git state. You can assert about the entire state of an editor's diff decorations using a formatted diff: ```rust cx.assert_diff_hunks( r#" - use some::mod1; use some::mod2; const A: u32 = 42; - const B: u32 = 42; const C: u32 = 42; fn main() { - println!("hello"); + //println!("hello"); println!("world"); + // + // } fn another() { println!("another"); + println!("another"); } - fn another2() { println!("another2"); } "# .unindent(), ); ``` This will assert about the editor's actual row highlights, not just the editor's internal hunk-tracking state. I rewrote all of our editor diff tests to use these more high-level assertions, and it caught the new bug, as well as some pre-existing bugs in the highlighting of added content. The problem was how we *remove* highlighted rows. Previously, it relied on supplying exactly the same range as one that we had previously highlighted. I've added a `remove_highlighted_rows(ranges)` APIs which is much simpler - it clears out any row ranges that intersect the given ranges (which is all that we need for the Git diff use case). Release Notes: - N/A --- Cargo.lock | 1 + crates/assistant/src/inline_assistant.rs | 4 +- crates/editor/Cargo.toml | 5 +- crates/editor/src/editor.rs | 157 +- crates/editor/src/editor_tests.rs | 1682 +++++------------ crates/editor/src/hunk_diff.rs | 45 +- crates/editor/src/test.rs | 113 -- crates/editor/src/test/editor_test_context.rs | 129 +- crates/go_to_line/src/go_to_line.rs | 2 +- crates/outline/src/outline.rs | 8 +- 10 files changed, 710 insertions(+), 1436 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 85a62c9519e01..123141d188e0e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3729,6 +3729,7 @@ dependencies = [ "multi_buffer", "ordered-float 2.10.1", "parking_lot", + "pretty_assertions", "project", "rand 0.8.5", "release_channel", diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 9c117e66653e9..e2f2fa190d397 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1142,7 +1142,7 @@ impl InlineAssistant { for row_range in inserted_row_ranges { editor.highlight_rows::( row_range, - Some(cx.theme().status().info_background), + cx.theme().status().info_background, false, cx, ); @@ -1209,7 +1209,7 @@ impl InlineAssistant { editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( Anchor::min()..=Anchor::max(), - Some(cx.theme().status().deleted_background), + cx.theme().status().deleted_background, false, cx, ); diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index b6b22ef64d33f..cfd9284f80765 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -24,7 +24,8 @@ test-support = [ "workspace/test-support", "tree-sitter-rust", "tree-sitter-typescript", - "tree-sitter-html" + "tree-sitter-html", + "unindent", ] [dependencies] @@ -54,6 +55,7 @@ markdown.workspace = true multi_buffer.workspace = true ordered-float.workspace = true parking_lot.workspace = true +pretty_assertions.workspace = true project.workspace = true rand.workspace = true rpc.workspace = true @@ -74,6 +76,7 @@ theme.workspace = true tree-sitter-html = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } +unindent = { workspace = true, optional = true } ui.workspace = true url.workspace = true util.workspace = true diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cfffa584b6c21..48785dbaa55cf 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -822,7 +822,7 @@ impl SelectionHistory { struct RowHighlight { index: usize, range: RangeInclusive, - color: Option, + color: Hsla, should_autoscroll: bool, } @@ -11500,41 +11500,125 @@ impl Editor { } } - /// Adds or removes (on `None` color) a highlight for the rows corresponding to the anchor range given. - /// On matching anchor range, replaces the old highlight; does not clear the other existing highlights. - /// If multiple anchor ranges will produce highlights for the same row, the last range added will be used. + /// Adds a row highlight for the given range. If a row has multiple highlights, the + /// last highlight added will be used. pub fn highlight_rows( &mut self, - rows: RangeInclusive, - color: Option, + range: RangeInclusive, + color: Hsla, should_autoscroll: bool, cx: &mut ViewContext, ) { let snapshot = self.buffer().read(cx).snapshot(cx); let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); - let existing_highlight_index = row_highlights.binary_search_by(|highlight| { - highlight - .range - .start() - .cmp(rows.start(), &snapshot) - .then(highlight.range.end().cmp(rows.end(), &snapshot)) + let ix = row_highlights.binary_search_by(|highlight| { + Ordering::Equal + .then_with(|| highlight.range.start().cmp(&range.start(), &snapshot)) + .then_with(|| highlight.range.end().cmp(&range.end(), &snapshot)) }); - match (color, existing_highlight_index) { - (Some(_), Ok(ix)) | (_, Err(ix)) => row_highlights.insert( - ix, - RowHighlight { - index: post_inc(&mut self.highlight_order), - range: rows, - should_autoscroll, - color, - }, - ), - (None, Ok(i)) => { - row_highlights.remove(i); + + if let Err(mut ix) = ix { + let index = post_inc(&mut self.highlight_order); + + // If this range intersects with the preceding highlight, then merge it with + // the preceding highlight. Otherwise insert a new highlight. + let mut merged = false; + if ix > 0 { + let prev_highlight = &mut row_highlights[ix - 1]; + if prev_highlight + .range + .end() + .cmp(&range.start(), &snapshot) + .is_ge() + { + ix -= 1; + if prev_highlight + .range + .end() + .cmp(&range.end(), &snapshot) + .is_lt() + { + prev_highlight.range = *prev_highlight.range.start()..=*range.end(); + } + merged = true; + prev_highlight.index = index; + prev_highlight.color = color; + prev_highlight.should_autoscroll = should_autoscroll; + } + } + + if !merged { + row_highlights.insert( + ix, + RowHighlight { + range: range.clone(), + index, + color, + should_autoscroll, + }, + ); + } + + // If any of the following highlights intersect with this one, merge them. + while let Some(next_highlight) = row_highlights.get(ix + 1) { + let highlight = &row_highlights[ix]; + if next_highlight + .range + .start() + .cmp(&highlight.range.end(), &snapshot) + .is_le() + { + if next_highlight + .range + .end() + .cmp(&highlight.range.end(), &snapshot) + .is_gt() + { + row_highlights[ix].range = + *highlight.range.start()..=*next_highlight.range.end(); + } + row_highlights.remove(ix + 1); + } else { + break; + } } } } + /// Remove any highlighted row ranges of the given type that intersect the + /// given ranges. + pub fn remove_highlighted_rows( + &mut self, + ranges_to_remove: Vec>, + cx: &mut ViewContext, + ) { + let snapshot = self.buffer().read(cx).snapshot(cx); + let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); + let mut ranges_to_remove = ranges_to_remove.iter().peekable(); + row_highlights.retain(|highlight| { + while let Some(range_to_remove) = ranges_to_remove.peek() { + match range_to_remove.end.cmp(&highlight.range.start(), &snapshot) { + Ordering::Less => { + ranges_to_remove.next(); + } + Ordering::Equal => { + return false; + } + Ordering::Greater => { + match range_to_remove.start.cmp(&highlight.range.end(), &snapshot) { + Ordering::Less | Ordering::Equal => { + return false; + } + Ordering::Greater => break, + } + } + } + } + + true + }) + } + /// Clear all anchor ranges for a certain highlight context type, so no corresponding rows will be highlighted. pub fn clear_row_highlights(&mut self) { self.highlighted_rows.remove(&TypeId::of::()); @@ -11543,13 +11627,12 @@ impl Editor { /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. pub fn highlighted_rows( &self, - ) -> Option, Option<&Hsla>)>> { - Some( - self.highlighted_rows - .get(&TypeId::of::())? - .iter() - .map(|highlight| (&highlight.range, highlight.color.as_ref())), - ) + ) -> impl '_ + Iterator, Hsla)> { + self.highlighted_rows + .get(&TypeId::of::()) + .map_or(&[] as &[_], |vec| vec.as_slice()) + .iter() + .map(|highlight| (highlight.range.clone(), highlight.color)) } /// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict. @@ -11574,10 +11657,7 @@ impl Editor { used_highlight_orders.entry(row).or_insert(highlight.index); if highlight.index >= *used_index { *used_index = highlight.index; - match highlight.color { - Some(hsla) => unique_rows.insert(DisplayRow(row), hsla), - None => unique_rows.remove(&DisplayRow(row)), - }; + unique_rows.insert(DisplayRow(row), highlight.color); } } unique_rows @@ -11593,10 +11673,11 @@ impl Editor { .values() .flat_map(|highlighted_rows| highlighted_rows.iter()) .filter_map(|highlight| { - if highlight.color.is_none() || !highlight.should_autoscroll { - return None; + if highlight.should_autoscroll { + Some(highlight.range.start().to_display_point(snapshot).row()) + } else { + None } - Some(highlight.range.start().to_display_point(snapshot).row()) }) .min() } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 31a69918026f7..b17d94a5eb0f0 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -2,9 +2,8 @@ use super::*; use crate::{ scroll::scroll_amount::ScrollAmount, test::{ - assert_text_with_selections, build_editor, editor_hunks, - editor_lsp_test_context::EditorLspTestContext, editor_test_context::EditorTestContext, - expanded_hunks, expanded_hunks_background_highlights, select_ranges, + assert_text_with_selections, build_editor, editor_lsp_test_context::EditorLspTestContext, + editor_test_context::EditorTestContext, select_ranges, }, JoinLines, }; @@ -11196,36 +11195,30 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - let unexpanded_hunks = vec![ - ( - "use some::mod;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(0)..DisplayRow(1), - ), - ( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2), - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(4)..DisplayRow(5), - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7), - ), - ]; + cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!(all_hunks, unexpanded_hunks); + editor.go_to_next_hunk(&GoToHunk, cx); + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); }); + executor.run_until_parked(); + cx.assert_diff_hunks( + r#" + use some::modified; + + + fn main() { + - println!("hello"); + + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); cx.update_editor(|editor, cx| { - for _ in 0..4 { + for _ in 0..3 { editor.go_to_next_hunk(&GoToHunk, cx); editor.toggle_hunk_diff(&ToggleHunkDiff, cx); } @@ -11245,57 +11238,47 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(2)..DisplayRow(3)), - ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(6)..DisplayRow(6)), - (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(10)..DisplayRow(11)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(14)), - ], - "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ - (from modified and removed hunks)" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Editor hunks should not change and all be expanded" - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(2)..=DisplayRow(2), DisplayRow(10)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(13)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); - }); + + cx.assert_diff_hunks( + r#" + - use some::mod; + + use some::modified; + + - const A: u32 = 42; + + fn main() { + - println!("hello"); + + println!("hello there"); + + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); cx.update_editor(|editor, cx| { editor.cancel(&Cancel, cx); - - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "After cancelling in editor, no git highlights should be left" - ); - assert_eq!( - all_expanded_hunks, - Vec::new(), - "After cancelling in editor, no hunks should be expanded" - ); - assert_eq!( - all_hunks, unexpanded_hunks, - "After cancelling in editor, regular hunks' coordinates should get back to normal" - ); }); + + cx.assert_diff_hunks( + r#" + use some::modified; + + + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); } #[gpui::test] -async fn test_toggled_diff_base_change( +async fn test_diff_base_change_with_expanded_diff_hunks( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -11339,115 +11322,78 @@ async fn test_toggled_diff_base_change( cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(9)..DisplayRow(11) - ), - ] - ); - }); cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod2; + cx.assert_diff_hunks( + r#" + - use some::mod1; + use some::mod2; + + const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; + + fn main() { + - println!("hello"); + + //println!("hello"); + + println!("world"); + + // + + // + } + "# + .unindent(), + ); - const A: u32 = 42; - const C: u32 = 42; + cx.set_diff_base(Some("new diff base!")); + executor.run_until_parked(); + cx.assert_diff_hunks( + r#" + use some::mod2; - fn main(ˇ) { - //println!("hello"); + const A: u32 = 42; + const C: u32 = 42; - println!("world"); - // - // - } + fn main() { + //println!("hello"); + + println!("world"); + // + // + } "# .unindent(), ); cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(2)..DisplayRow(2)), - ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(7)..DisplayRow(7)), - (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(12)..DisplayRow(13)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(16)..DisplayRow(18)), - ], - "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ - (from modified and removed hunks)" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Editor hunks should not change and all be expanded" - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(12)..=DisplayRow(12), DisplayRow(16)..=DisplayRow(17)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); - - cx.set_diff_base(Some("new diff base!")); executor.run_until_parked(); - - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "After diff base is changed, old git highlights should be removed" - ); - assert_eq!( - all_expanded_hunks, - Vec::new(), - "After diff base is changed, old git hunk expansions should be removed" - ); - assert_eq!( - all_hunks, - vec![( - "new diff base!".to_string(), - DiffHunkStatus::Modified, - DisplayRow(0)..snapshot.display_snapshot.max_point().row() - )], - "After diff base is changed, hunks should update" - ); - }); + cx.assert_diff_hunks( + r#" + - new diff base! + + use some::mod2; + + + + const A: u32 = 42; + + const C: u32 = 42; + + + + fn main() { + + //println!("hello"); + + + + println!("world"); + + // + + // + + } + "# + .unindent(), + ); } #[gpui::test] -async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { +async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; @@ -11504,337 +11450,138 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(9)..DisplayRow(11) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(15)..DisplayRow(16) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(18)..DisplayRow(18) - ), - ] - ); - }); cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - «use some::mod2; - const A: u32 = 42; - const C: u32 = 42; + cx.assert_diff_hunks( + r#" + - use some::mod1; + use some::mod2; - fn main() { - //println!("hello"); + const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; - println!("world"); - // - //ˇ» - } + fn main() { + - println!("hello"); + + //println!("hello"); - fn another() { - println!("another"); - println!("another"); - } + println!("world"); + + // + + // + } - println!("another2"); - } + fn another() { + println!("another"); + + println!("another"); + } + + - fn another2() { + println!("another2"); + } "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(12)..DisplayRow(13) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(16)..DisplayRow(18) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(23)..DisplayRow(24) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(28)..DisplayRow(28) - ), - ], - ); - assert_eq!(all_hunks, all_expanded_hunks); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(12)..=DisplayRow(12), - DisplayRow(16)..=DisplayRow(17), - DisplayRow(23)..=DisplayRow(23) - ] - ); - }); + // Fold across some of the diff hunks. They should no longer appear expanded. cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx)); cx.executor().run_until_parked(); - cx.assert_editor_state( - &r#" - «use some::mod2; - const A: u32 = 42; - const C: u32 = 42; + // Hunks are not shown if their position is within a fold + cx.assert_diff_hunks( + r#" + use some::mod2; - fn main() { - //println!("hello"); + const A: u32 = 42; + const C: u32 = 42; - println!("world"); - // - //ˇ» - } + fn main() { + //println!("hello"); - fn another() { - println!("another"); - println!("another"); - } + println!("world"); + // + // + } - println!("another2"); - } + fn another() { + println!("another"); + + println!("another"); + } + + - fn another2() { + println!("another2"); + } "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(0)..DisplayRow(0) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(0)..DisplayRow(1) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(11)..DisplayRow(11) - ), - ], - "Hunk list should still return shifted folded hunks" - ); - assert_eq!( - all_expanded_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(11)..DisplayRow(11) - ), - ], - "Only non-folded hunks should be left expanded" - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(6)..=DisplayRow(6)], - "Only one hunk is left not folded, its highlight should be visible" - ); - }); cx.update_editor(|editor, cx| { editor.select_all(&SelectAll, cx); editor.unfold_lines(&UnfoldLines, cx); }); cx.executor().run_until_parked(); - cx.assert_editor_state( - &r#" - «use some::mod2; - const A: u32 = 42; - const C: u32 = 42; + // The deletions reappear when unfolding. + cx.assert_diff_hunks( + r#" + - use some::mod1; + use some::mod2; + + const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; + + fn main() { + - println!("hello"); + + //println!("hello"); + + println!("world"); + + // + + // + } + + fn another() { + println!("another"); + + println!("another"); + } + + - fn another2() { + println!("another2"); + } + "# + .unindent(), + ); +} - fn main() { - //println!("hello"); +#[gpui::test] +async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); - println!("world"); - // - // - } + let file_1_old = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj"; + let file_1_new = "aaa\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj"; + let file_2_old = "lll\nmmm\nnnn\nooo\nppp\nqqq\nrrr\nsss\nttt\nuuu"; + let file_2_new = "lll\nmmm\nNNN\nooo\nppp\nqqq\nrrr\nsss\nttt\nuuu"; + let file_3_old = "111\n222\n333\n444\n555\n777\n888\n999\n000\n!!!"; + let file_3_new = "111\n222\n333\n444\n555\n666\n777\n888\n999\n000\n!!!"; - fn another() { - println!("another"); - println!("another"); - } - - println!("another2"); - } - ˇ»"# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(12)..DisplayRow(13) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(16)..DisplayRow(18) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(23)..DisplayRow(24) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(28)..DisplayRow(28) - ), - ], - ); - assert_eq!(all_hunks, all_expanded_hunks); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(12)..=DisplayRow(12), - DisplayRow(16)..=DisplayRow(17), - DisplayRow(23)..=DisplayRow(23) - ], - "After unfolding, all hunk diffs should be visible again" - ); - }); -} - -#[gpui::test] -async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) { - init_test(cx, |_| {}); - - let cols = 4; - let rows = 10; - let sample_text_1 = sample_text(rows, cols, 'a'); - assert_eq!( - sample_text_1, - "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj" - ); - let modified_sample_text_1 = "aaaa\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj"; - let sample_text_2 = sample_text(rows, cols, 'l'); - assert_eq!( - sample_text_2, - "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" - ); - let modified_sample_text_2 = "llll\nmmmm\n1n1n1n1n1\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu"; - let sample_text_3 = sample_text(rows, cols, 'v'); - assert_eq!( - sample_text_3, - "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" - ); - let modified_sample_text_3 = - "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n@@@@\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}"; - let buffer_1 = cx.new_model(|cx| { - let mut buffer = Buffer::local(modified_sample_text_1.to_string(), cx); - buffer.set_diff_base(Some(sample_text_1.clone()), cx); - buffer - }); - let buffer_2 = cx.new_model(|cx| { - let mut buffer = Buffer::local(modified_sample_text_2.to_string(), cx); - buffer.set_diff_base(Some(sample_text_2.clone()), cx); - buffer - }); - let buffer_3 = cx.new_model(|cx| { - let mut buffer = Buffer::local(modified_sample_text_3.to_string(), cx); - buffer.set_diff_base(Some(sample_text_3.clone()), cx); - buffer - }); + let buffer_1 = cx.new_model(|cx| { + let mut buffer = Buffer::local(file_1_new.to_string(), cx); + buffer.set_diff_base(Some(file_1_old.into()), cx); + buffer + }); + let buffer_2 = cx.new_model(|cx| { + let mut buffer = Buffer::local(file_2_new.to_string(), cx); + buffer.set_diff_base(Some(file_2_old.into()), cx); + buffer + }); + let buffer_3 = cx.new_model(|cx| { + let mut buffer = Buffer::local(file_3_new.to_string(), cx); + buffer.set_diff_base(Some(file_3_old.into()), cx); + buffer + }); let multi_buffer = cx.new_model(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); @@ -11850,7 +11597,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) primary: None, }, ExcerptRange { - context: Point::new(9, 0)..Point::new(10, 4), + context: Point::new(9, 0)..Point::new(10, 3), primary: None, }, ], @@ -11868,7 +11615,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) primary: None, }, ExcerptRange { - context: Point::new(9, 0)..Point::new(10, 4), + context: Point::new(9, 0)..Point::new(10, 3), primary: None, }, ], @@ -11886,7 +11633,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) primary: None, }, ExcerptRange { - context: Point::new(9, 0)..Point::new(10, 4), + context: Point::new(9, 0)..Point::new(10, 3), primary: None, }, ], @@ -11895,143 +11642,81 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) multibuffer }); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/a", - json!({ - "main.rs": modified_sample_text_1, - "other.rs": modified_sample_text_2, - "lib.rs": modified_sample_text_3, - }), - ) - .await; + let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx)); + let mut cx = EditorTestContext::for_editor(editor, cx).await; + cx.run_until_parked(); - let project = Project::test(fs, ["/a".as_ref()], cx).await; - let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); - let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); - let multi_buffer_editor = cx.new_view(|cx| { - Editor::new( - EditorMode::Full, - multi_buffer, - Some(project.clone()), - true, - cx, - ) - }); - cx.executor().run_until_parked(); + cx.assert_editor_state( + &" + ˇaaa + ccc + ddd - let expected_all_hunks = vec![ - ( - "bbbb\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4), - ), - ( - "nnnn\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(21)..DisplayRow(22), - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(41)..DisplayRow(42), - ), - ]; - let expected_all_hunks_shifted = vec![ - ( - "bbbb\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(6)..DisplayRow(6), - ), - ( - "nnnn\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(25)..DisplayRow(26), - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(46)..DisplayRow(47), - ), - ]; + ggg + hhh - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!(all_hunks, expected_all_hunks); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - multi_buffer_editor.update(cx, |editor, cx| { + lll + mmm + NNN + + qqq + rrr + + uuu + 111 + 222 + 333 + + 666 + 777 + + 000 + !!!" + .unindent(), + ); + + cx.update_editor(|editor, cx| { editor.select_all(&SelectAll, cx); editor.toggle_hunk_diff(&ToggleHunkDiff, cx); }); cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(25)..=DisplayRow(25), - DisplayRow(46)..=DisplayRow(46) - ], - ); - assert_eq!(all_hunks, expected_all_hunks_shifted); - assert_eq!(all_hunks, all_expanded_hunks); - }); - multi_buffer_editor.update(cx, |editor, cx| { - editor.toggle_hunk_diff(&ToggleHunkDiff, cx); - }); - cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!(all_hunks, expected_all_hunks); - assert_eq!(all_expanded_hunks, Vec::new()); - }); + cx.assert_diff_hunks( + " + aaa + - bbb + ccc + ddd - multi_buffer_editor.update(cx, |editor, cx| { - editor.toggle_hunk_diff(&ToggleHunkDiff, cx); - }); - cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(25)..=DisplayRow(25), - DisplayRow(46)..=DisplayRow(46) - ], - ); - assert_eq!(all_hunks, expected_all_hunks_shifted); - assert_eq!(all_hunks, all_expanded_hunks); - }); + ggg + hhh - multi_buffer_editor.update(cx, |editor, cx| { - editor.toggle_hunk_diff(&ToggleHunkDiff, cx); - }); - cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!(all_hunks, expected_all_hunks); - assert_eq!(all_expanded_hunks, Vec::new()); - }); + + lll + mmm + - nnn + + NNN + + qqq + rrr + + uuu + 111 + 222 + 333 + + + 666 + 777 + + 000 + !!!" + .unindent(), + ); } #[gpui::test] -async fn test_edits_around_toggled_additions( +async fn test_edits_around_expanded_insertion_hunks( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -12074,71 +11759,21 @@ async fn test_edits_around_toggled_additions( cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(7) - )] - ); - }); + cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - ˇ - - fn main() { - println!("hello"); - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(8) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| editor.handle_input("const D: u32 = 42;\n", cx)); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - const D: u32 = 42; - ˇ + + const B: u32 = 42; + + const C: u32 = 42; + + fn main() { println!("hello"); @@ -12148,134 +11783,20 @@ async fn test_edits_around_toggled_additions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(9) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)], - "Edited hunk should have one more line added" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Expanded hunk should also grow with the addition" - ); - }); - cx.update_editor(|editor, cx| editor.handle_input("const E: u32 = 42;\n", cx)); + cx.update_editor(|editor, cx| editor.handle_input("const D: u32 = 42;\n", cx)); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - const D: u32 = 42; - const E: u32 = 42; - ˇ - - fn main() { - println!("hello"); - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(10) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)], - "Edited hunk should have one more line added" - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - const D: u32 = 42; - ˇ - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(9) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)], - "Deleting a line should shrint the hunk" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Expanded hunk should also shrink with the addition" - ); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - ˇ + + const B: u32 = 42; + + const C: u32 = 42; + + const D: u32 = 42; + + fn main() { println!("hello"); @@ -12285,148 +11806,21 @@ async fn test_edits_around_toggled_additions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - cx.update_editor(|editor, cx| { - editor.select_up_by_lines(&SelectUpByLines { lines: 5 }, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - ˇ - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\nuse some::mod2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - ) - ] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "Should close all stale expanded addition hunks" - ); - assert_eq!( - all_expanded_hunks, - vec![( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - )], - "Should open hunks that were adjacent to the stale addition one" - ); - }); -} - -#[gpui::test] -async fn test_edits_around_toggled_deletions( - executor: BackgroundExecutor, - cx: &mut gpui::TestAppContext, -) { - init_test(cx, |_| {}); - - let mut cx = EditorTestContext::new(cx).await; - - let diff_base = r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(); + cx.update_editor(|editor, cx| editor.handle_input("const E: u32 = 42;\n", cx)); executor.run_until_parked(); - cx.set_state( - &r#" - use some::mod1; - use some::mod2; - - ˇconst B: u32 = 42; - const C: u32 = 42; - - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(), - ); - cx.set_diff_base(Some(&diff_base)); - executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - )] - ); - }); - cx.update_editor(|editor, cx| { - editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; - use some::mod2; - - ˇconst B: u32 = 42; - const C: u32 = 42; + use some::mod2; + const A: u32 = 42; + + const B: u32 = 42; + + const C: u32 = 42; + + const D: u32 = 42; + + const E: u32 = 42; + + fn main() { println!("hello"); @@ -12436,33 +11830,23 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" + + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; - ˇconst C: u32 = 42; - + const A: u32 = 42; + + const B: u32 = 42; + + const C: u32 = 42; + + const D: u32 = 42; + + fn main() { println!("hello"); @@ -12472,27 +11856,13 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "Deleted hunks do not highlight current editor's background" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(6)..DisplayRow(6) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + editor.move_up(&MoveUp, cx); editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); @@ -12501,6 +11871,7 @@ async fn test_edits_around_toggled_deletions( use some::mod1; use some::mod2; + const A: u32 = 42; ˇ fn main() { @@ -12511,33 +11882,15 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - cx.update_editor(|editor, cx| { - editor.handle_input("replacement", cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; - replacementˇ + const A: u32 = 42; + + fn main() { println!("hello"); @@ -12546,29 +11899,29 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); + cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(8)..DisplayRow(9) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(8)..=DisplayRow(8)], - "Modified expanded hunks should display additions and highlight their background" - ); - assert_eq!(all_hunks, all_expanded_hunks); + editor.select_up_by_lines(&SelectUpByLines { lines: 5 }, cx); + editor.delete_line(&DeleteLine, cx); }); + executor.run_until_parked(); + cx.assert_diff_hunks( + r#" + + - const A: u32 = 42; + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); } #[gpui::test] -async fn test_edits_around_toggled_modifications( +async fn test_edits_around_expanded_deletion_hunks( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -12583,14 +11936,14 @@ async fn test_edits_around_toggled_modifications( const A: u32 = 42; const B: u32 = 42; const C: u32 = 42; - const D: u32 = 42; fn main() { println!("hello"); println!("world"); - }"# + } + "# .unindent(); executor.run_until_parked(); cx.set_state( @@ -12598,298 +11951,165 @@ async fn test_edits_around_toggled_modifications( use some::mod1; use some::mod2; - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 43ˇ - const D: u32 = 42; + ˇconst B: u32 = 42; + const C: u32 = 42; fn main() { println!("hello"); println!("world"); - }"# + } + "# .unindent(), ); cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(6) - )] - ); - }); + cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" + + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; - const A: u32 = 42; + - const A: u32 = 42; const B: u32 = 42; - const C: u32 = 43ˇ - const D: u32 = 42; + const C: u32 = 42; fn main() { println!("hello"); println!("world"); - }"# + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], - ); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(8) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { - editor.handle_input("\nnew_line\n", cx); + editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); cx.assert_editor_state( &r#" - use some::mod1; - use some::mod2; + use some::mod1; + use some::mod2; - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 43 - new_line - ˇ - const D: u32 = 42; + ˇconst C: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], - "Modified hunk should grow highlighted lines on more text additions" - ); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(10) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.move_up(&MoveUp, cx); - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - ˇconst C: u32 = 43 - new_line + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; - const D: u32 = 42; + - const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(9)], - ); - assert_eq!( - all_hunks, - vec![( - "const B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(10) - )], - "Modified hunk should grow deleted lines on text deletions above" - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.handle_input("v", cx); + editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); cx.assert_editor_state( &r#" - use some::mod1; - use some::mod2; + use some::mod1; + use some::mod2; + + ˇ - vˇconst A: u32 = 42; - const C: u32 = 43 - new_line + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; - const D: u32 = 42; + - const A: u32 = 42; + - const B: u32 = 42; + - const C: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(10)], - "Modified hunk should grow deleted lines on text modifications above" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(11) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { - editor.move_down(&MoveDown, cx); - editor.move_down(&MoveDown, cx); - editor.delete_line(&DeleteLine, cx) + editor.handle_input("replacement", cx); }); executor.run_until_parked(); cx.assert_editor_state( &r#" - use some::mod1; - use some::mod2; - - vconst A: u32 = 42; - const C: u32 = 43 - ˇ - const D: u32 = 42; + use some::mod1; + use some::mod2; + replacementˇ - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(9)], - "Modified hunk should grow shrink lines on modification lines removal" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(10) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.move_up(&MoveUp, cx); - editor.select_down_by_lines(&SelectDownByLines { lines: 4 }, cx); - editor.delete_line(&DeleteLine, cx) - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; - ˇ + - const A: u32 = 42; + - const B: u32 = 42; + - const C: u32 = 42; + - + + replacement - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "Modified hunk should turn into a removed one on all modified lines removal" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\nconst D: u32 = 42;\n" - .to_string(), - DiffHunkStatus::Removed, - DisplayRow(8)..DisplayRow(8) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); } #[gpui::test] -async fn test_multiple_expanded_hunks_merge( +async fn test_edit_after_expanded_modification_hunk( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -12913,7 +12133,7 @@ async fn test_multiple_expanded_hunks_merge( println!("world"); }"# .unindent(); - executor.run_until_parked(); + cx.set_state( &r#" use some::mod1; @@ -12935,30 +12155,20 @@ async fn test_multiple_expanded_hunks_merge( cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(6) - )] - ); - }); cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" + + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; const A: u32 = 42; const B: u32 = 42; - const C: u32 = 43ˇ + - const C: u32 = 42; + + const C: u32 = 43 const D: u32 = 42; @@ -12969,47 +12179,31 @@ async fn test_multiple_expanded_hunks_merge( }"# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], - ); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(8) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { editor.handle_input("\nnew_line\n", cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 43 - new_line - ˇ - const D: u32 = 42; + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + - const C: u32 = 42; + + const C: u32 = 43 + + new_line + + + const D: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + }"# .unindent(), ); } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 4fa1f10a8a17c..e819032471f44 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -19,8 +19,8 @@ use util::RangeExt; use crate::{ editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, - Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, - RangeToAnchorExt, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, + Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, + RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -219,14 +219,7 @@ impl Editor { }); } - for removed_rows in highlights_to_remove { - editor.highlight_rows::( - to_inclusive_row_range(removed_rows, &snapshot), - None, - false, - cx, - ); - } + editor.remove_highlighted_rows::(highlights_to_remove, cx); editor.remove_blocks(blocks_to_remove, None, cx); for hunk in hunks_to_expand { editor.expand_diff_hunk(None, &hunk, cx); @@ -306,7 +299,7 @@ impl Editor { DiffHunkStatus::Added => { self.highlight_rows::( to_inclusive_row_range(hunk_start..hunk_end, &snapshot), - Some(added_hunk_color(cx)), + added_hunk_color(cx), false, cx, ); @@ -315,7 +308,7 @@ impl Editor { DiffHunkStatus::Modified => { self.highlight_rows::( to_inclusive_row_range(hunk_start..hunk_end, &snapshot), - Some(added_hunk_color(cx)), + added_hunk_color(cx), false, cx, ); @@ -850,14 +843,7 @@ impl Editor { retain }); - for removed_rows in highlights_to_remove { - editor.highlight_rows::( - to_inclusive_row_range(removed_rows, &snapshot), - None, - false, - cx, - ); - } + editor.remove_highlighted_rows::(highlights_to_remove, cx); editor.remove_blocks(blocks_to_remove, None, cx); if let Some(diff_base_buffer) = &diff_base_buffer { @@ -978,7 +964,7 @@ fn editor_with_deleted_text( editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( Anchor::min()..=Anchor::max(), - Some(deleted_color), + deleted_color, false, cx, ); @@ -1060,15 +1046,16 @@ fn to_inclusive_row_range( row_range: Range, snapshot: &EditorSnapshot, ) -> RangeInclusive { - let mut display_row_range = - row_range.start.to_display_point(snapshot)..row_range.end.to_display_point(snapshot); - if display_row_range.end.row() > display_row_range.start.row() { - *display_row_range.end.row_mut() -= 1; + let mut end = row_range.end.to_point(&snapshot.buffer_snapshot); + if end.column == 0 && end.row > 0 { + end = Point::new( + end.row - 1, + snapshot + .buffer_snapshot + .line_len(MultiBufferRow(end.row - 1)), + ); } - let point_range = display_row_range.start.to_point(&snapshot.display_snapshot) - ..display_row_range.end.to_point(&snapshot.display_snapshot); - let new_range = point_range.to_anchors(&snapshot.buffer_snapshot); - new_range.start..=new_range.end + row_range.start..=snapshot.buffer_snapshot.anchor_after(end) } impl DisplayDiffHunk { diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 50214cd723ee3..d04b266e61802 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -88,116 +88,3 @@ pub(crate) fn build_editor_with_project( ) -> Editor { Editor::new(EditorMode::Full, buffer, Some(project), true, cx) } - -#[cfg(any(test, feature = "test-support"))] -pub fn editor_hunks( - editor: &Editor, - snapshot: &DisplaySnapshot, - cx: &mut ViewContext<'_, Editor>, -) -> Vec<( - String, - git::diff::DiffHunkStatus, - std::ops::Range, -)> { - use multi_buffer::MultiBufferRow; - use text::Point; - - use crate::hunk_status; - - snapshot - .buffer_snapshot - .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) - .map(|hunk| { - let display_range = Point::new(hunk.row_range.start.0, 0) - .to_display_point(snapshot) - .row() - ..Point::new(hunk.row_range.end.0, 0) - .to_display_point(snapshot) - .row(); - let (_, buffer, _) = editor - .buffer() - .read(cx) - .excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx) - .expect("no excerpt for expanded buffer's hunk start"); - let diff_base = buffer - .read(cx) - .diff_base() - .expect("should have a diff base for expanded hunk") - .slice(hunk.diff_base_byte_range.clone()) - .to_string(); - (diff_base, hunk_status(&hunk), display_range) - }) - .collect() -} - -#[cfg(any(test, feature = "test-support"))] -pub fn expanded_hunks( - editor: &Editor, - snapshot: &DisplaySnapshot, - cx: &mut ViewContext<'_, Editor>, -) -> Vec<( - String, - git::diff::DiffHunkStatus, - std::ops::Range, -)> { - editor - .expanded_hunks - .hunks(false) - .map(|expanded_hunk| { - let hunk_display_range = expanded_hunk - .hunk_range - .start - .to_display_point(snapshot) - .row() - ..expanded_hunk - .hunk_range - .end - .to_display_point(snapshot) - .row(); - let (_, buffer, _) = editor - .buffer() - .read(cx) - .excerpt_containing(expanded_hunk.hunk_range.start, cx) - .expect("no excerpt for expanded buffer's hunk start"); - let diff_base = buffer - .read(cx) - .diff_base() - .expect("should have a diff base for expanded hunk") - .slice(expanded_hunk.diff_base_byte_range.clone()) - .to_string(); - (diff_base, expanded_hunk.status, hunk_display_range) - }) - .collect() -} - -#[cfg(any(test, feature = "test-support"))] -pub fn expanded_hunks_background_highlights( - editor: &mut Editor, - cx: &mut gpui::WindowContext, -) -> Vec> { - use crate::DisplayRow; - - let mut highlights = Vec::new(); - - let mut range_start = 0; - let mut previous_highlighted_row = None; - for (highlighted_row, _) in editor.highlighted_display_rows(cx) { - match previous_highlighted_row { - Some(previous_row) => { - if previous_row + 1 != highlighted_row.0 { - highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row)); - range_start = highlighted_row.0; - } - } - None => { - range_start = highlighted_row.0; - } - } - previous_highlighted_row = Some(highlighted_row.0); - } - if let Some(previous_row) = previous_highlighted_row { - highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row)); - } - - highlights -} diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 3e4ef174d422a..2ec4f4a3b7b7b 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -1,17 +1,18 @@ use crate::{ - display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer, - RowExt, + display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DiffRowHighlight, DisplayPoint, + Editor, MultiBuffer, RowExt, }; use collections::BTreeMap; use futures::Future; +use git::diff::DiffHunkStatus; use gpui::{ AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext, - VisualTestContext, + VisualTestContext, WindowHandle, }; use indoc::indoc; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; -use multi_buffer::ExcerptRange; +use multi_buffer::{ExcerptRange, ToPoint}; use parking_lot::RwLock; use project::{FakeFs, Project}; use std::{ @@ -71,6 +72,16 @@ impl EditorTestContext { } } + pub async fn for_editor(editor: WindowHandle, cx: &mut gpui::TestAppContext) -> Self { + let editor_view = editor.root_view(cx).unwrap(); + Self { + cx: VisualTestContext::from_window(*editor.deref(), cx), + window: editor.into(), + editor: editor_view, + assertion_cx: AssertionContextManager::new(), + } + } + pub fn new_multibuffer( cx: &mut gpui::TestAppContext, excerpts: [&str; COUNT], @@ -297,6 +308,76 @@ impl EditorTestContext { state_context } + #[track_caller] + pub fn assert_diff_hunks(&mut self, expected_diff: String) { + // Normalize the expected diff. If it has no diff markers, then insert blank markers + // before each line. Strip any whitespace-only lines. + let has_diff_markers = expected_diff + .lines() + .any(|line| line.starts_with("+") || line.starts_with("-")); + let expected_diff_text = expected_diff + .split('\n') + .map(|line| { + let trimmed = line.trim(); + if trimmed.is_empty() { + String::new() + } else if has_diff_markers { + line.to_string() + } else { + format!(" {line}") + } + }) + .join("\n"); + + // Read the actual diff from the editor's row highlights and block + // decorations. + let actual_diff = self.editor.update(&mut self.cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let text = editor.text(cx); + let insertions = editor + .highlighted_rows::() + .map(|(range, _)| { + range.start().to_point(&snapshot.buffer_snapshot).row + ..range.end().to_point(&snapshot.buffer_snapshot).row + 1 + }) + .collect::>(); + let deletions = editor + .expanded_hunks + .hunks + .iter() + .filter_map(|hunk| { + if hunk.blocks.is_empty() { + return None; + } + let row = hunk + .hunk_range + .start + .to_point(&snapshot.buffer_snapshot) + .row; + let (_, buffer, _) = editor + .buffer() + .read(cx) + .excerpt_containing(hunk.hunk_range.start, cx) + .expect("no excerpt for expanded buffer's hunk start"); + let deleted_text = buffer + .read(cx) + .diff_base() + .expect("should have a diff base for expanded hunk") + .slice(hunk.diff_base_byte_range.clone()) + .to_string(); + if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status { + Some((row, deleted_text)) + } else { + None + } + }) + .collect::>(); + format_diff(text, deletions, insertions) + }); + + pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state"); + } + /// Make an assertion about the editor's text and the ranges and directions /// of its selections using a string containing embedded range markers. /// @@ -401,6 +482,46 @@ impl EditorTestContext { } } +fn format_diff( + text: String, + actual_deletions: Vec<(u32, String)>, + actual_insertions: Vec>, +) -> String { + let mut diff = String::new(); + for (row, line) in text.split('\n').enumerate() { + let row = row as u32; + if row > 0 { + diff.push('\n'); + } + if let Some(text) = actual_deletions + .iter() + .find_map(|(deletion_row, deleted_text)| { + if *deletion_row == row { + Some(deleted_text) + } else { + None + } + }) + { + for line in text.lines() { + diff.push('-'); + if !line.is_empty() { + diff.push(' '); + diff.push_str(line); + } + diff.push('\n'); + } + } + let marker = if actual_insertions.iter().any(|range| range.contains(&row)) { + "+ " + } else { + " " + }; + diff.push_str(format!("{marker}{line}").trim_end()); + } + diff +} + impl Deref for EditorTestContext { type Target = gpui::VisualTestContext; diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 4f3e6194a022e..fd631648c2c75 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -121,7 +121,7 @@ impl GoToLine { active_editor.clear_row_highlights::(); active_editor.highlight_rows::( anchor..=anchor, - Some(cx.theme().colors().editor_highlighted_line_background), + cx.theme().colors().editor_highlighted_line_background, true, cx, ); diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index cd641636349e3..520311b6f3c62 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -144,7 +144,7 @@ impl OutlineViewDelegate { active_editor.clear_row_highlights::(); active_editor.highlight_rows::( outline_item.range.start..=outline_item.range.end, - Some(cx.theme().colors().editor_highlighted_line_background), + cx.theme().colors().editor_highlighted_line_background, true, cx, ); @@ -240,10 +240,10 @@ impl PickerDelegate for OutlineViewDelegate { self.prev_scroll_position.take(); self.active_editor.update(cx, |active_editor, cx| { - if let Some(rows) = active_editor + let highlight = active_editor .highlighted_rows::() - .and_then(|highlights| highlights.into_iter().next().map(|(rows, _)| rows.clone())) - { + .next(); + if let Some((rows, _)) = highlight { active_editor.change_selections(Some(Autoscroll::center()), cx, |s| { s.select_ranges([*rows.start()..*rows.start()]) }); From d5f67406b0b7eb9ffd261b79467d17c1dc28a041 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 27 Sep 2024 22:42:04 +0300 Subject: [PATCH 120/228] Install cargo-edito without extra features (#18457) https://github.com/killercup/cargo-edit/pull/907 removed the feature from the crate Release Notes: - N/A --- .github/workflows/bump_patch_version.yml | 2 +- script/lib/bump-version.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index d05da31e6a0f6..b875ff7b22102 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -41,7 +41,7 @@ jobs: exit 1 ;; esac - which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl + which cargo-set-version > /dev/null || cargo install cargo-edit output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //') git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot " git tag v${output}${tag_suffix} diff --git a/script/lib/bump-version.sh b/script/lib/bump-version.sh index 0e1dfa5131d6a..ce955369505db 100755 --- a/script/lib/bump-version.sh +++ b/script/lib/bump-version.sh @@ -12,7 +12,7 @@ if [[ -n $(git status --short --untracked-files=no) ]]; then exit 1 fi -which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl +which cargo-set-version > /dev/null || cargo install cargo-edit which jq > /dev/null || brew install jq cargo set-version --package $package --bump $version_increment cargo check --quiet From 1c5be9de4e8c17419480a922f33f513a2ff197de Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Sep 2024 22:02:32 +0200 Subject: [PATCH 121/228] Capitalize tooltip labels on buffer search (#18458) For consistency, as this seems to be the pattern we're using overall for labels and buttons. --- Release Notes: - N/A --- crates/search/src/buffer_search.rs | 14 +++++++------- crates/search/src/search.rs | 6 +++--- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 6e660a963b987..42b267c3c9563 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -288,7 +288,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Toggle search selection", + "Toggle Search Selection", &ToggleSelection, &focus_handle, cx, @@ -308,7 +308,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Select all matches", + "Select All Matches", &SelectAllMatches, &focus_handle, cx, @@ -319,14 +319,14 @@ impl Render for BufferSearchBar { .child(render_nav_button( ui::IconName::ChevronLeft, self.active_match_index.is_some(), - "Select previous match", + "Select Previous Match", &SelectPrevMatch, focus_handle.clone(), )) .child(render_nav_button( ui::IconName::ChevronRight, self.active_match_index.is_some(), - "Select next match", + "Select Next Match", &SelectNextMatch, focus_handle.clone(), )) @@ -373,7 +373,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Replace next match", + "Replace Next Match", &ReplaceNext, &focus_handle, cx, @@ -390,7 +390,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Replace all matches", + "Replace All Matches", &ReplaceAll, &focus_handle, cx, @@ -442,7 +442,7 @@ impl Render for BufferSearchBar { div.child( IconButton::new(SharedString::from("Close"), IconName::Close) .tooltip(move |cx| { - Tooltip::for_action("Close search bar", &Dismiss, cx) + Tooltip::for_action("Close Search Bar", &Dismiss, cx) }) .on_click(cx.listener(|this, _: &ClickEvent, cx| { this.dismiss(&Dismiss, cx) diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index d13a12576b0f9..0ceb8e710b5f4 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -53,10 +53,10 @@ bitflags! { impl SearchOptions { pub fn label(&self) -> &'static str { match *self { - SearchOptions::WHOLE_WORD => "Match whole words", - SearchOptions::CASE_SENSITIVE => "Match case sensitively", + SearchOptions::WHOLE_WORD => "Match Whole Words", + SearchOptions::CASE_SENSITIVE => "Match Case Sensitively", SearchOptions::INCLUDE_IGNORED => "Also search files ignored by configuration", - SearchOptions::REGEX => "Use regular expressions", + SearchOptions::REGEX => "Use Regular Expressions", _ => panic!("{:?} is not a named SearchOption", self), } } From 689da9d0b1b8ff32ebecdd2dbe8d5c93b4c7a3bf Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 27 Sep 2024 13:13:55 -0700 Subject: [PATCH 122/228] Move git hunk controls to the left side (#18460) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ![Screenshot 2024-09-27 at 1 05 14 PM](https://github.com/user-attachments/assets/260a7d05-daa8-4a22-92bc-3b956035227f) Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 131 ++++++++++++++++----------------- 1 file changed, 64 insertions(+), 67 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index e819032471f44..3e18b992c1dae 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -513,49 +513,7 @@ impl Editor { }); } }), - ), - ) - .child( - h_flex() - .gap_2() - .pr_6() - .child({ - let focus = editor.focus_handle(cx); - PopoverMenu::new("hunk-controls-dropdown") - .trigger( - IconButton::new( - "toggle_editor_selections_icon", - IconName::EllipsisVertical, - ) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .style(ButtonStyle::Subtle) - .selected( - hunk_controls_menu_handle.is_deployed(), - ) - .when( - !hunk_controls_menu_handle.is_deployed(), - |this| { - this.tooltip(|cx| { - Tooltip::text("Hunk Controls", cx) - }) - }, - ), - ) - .anchor(AnchorCorner::TopRight) - .with_handle(hunk_controls_menu_handle) - .menu(move |cx| { - let focus = focus.clone(); - let menu = - ContextMenu::build(cx, move |menu, _| { - menu.context(focus.clone()).action( - "Discard All", - RevertFile.boxed_clone(), - ) - }); - Some(menu) - }) - }) + ) .child( IconButton::new("discard", IconName::RotateCcw) .shape(IconButtonShape::Square) @@ -601,31 +559,70 @@ impl Editor { } }), ) - .child( - IconButton::new("collapse", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Collapse Hunk", - &ToggleHunkDiff, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); + .child({ + let focus = editor.focus_handle(cx); + PopoverMenu::new("hunk-controls-dropdown") + .trigger( + IconButton::new( + "toggle_editor_selections_icon", + IconName::EllipsisVertical, + ) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .selected( + hunk_controls_menu_handle.is_deployed(), + ) + .when( + !hunk_controls_menu_handle.is_deployed(), + |this| { + this.tooltip(|cx| { + Tooltip::text("Hunk Controls", cx) + }) + }, + ), + ) + .anchor(AnchorCorner::TopRight) + .with_handle(hunk_controls_menu_handle) + .menu(move |cx| { + let focus = focus.clone(); + let menu = + ContextMenu::build(cx, move |menu, _| { + menu.context(focus.clone()).action( + "Discard All", + RevertFile.boxed_clone(), + ) }); - } - }), - ), + Some(menu) + }) + }), + ) + .child( + h_flex().gap_2().pr_6().child( + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ), ), ) .into_any_element() From 0daa070448d3a5078cd274ddbd5ec18e425c63d3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 27 Sep 2024 13:48:37 -0700 Subject: [PATCH 123/228] More git hunk highlighting fixes (#18459) Follow-up to https://github.com/zed-industries/zed/pull/18454 Release Notes: - N/A --- crates/assistant/src/inline_assistant.rs | 8 +-- crates/editor/src/editor.rs | 63 +++++++++---------- crates/editor/src/editor_tests.rs | 11 ++-- crates/editor/src/hunk_diff.rs | 29 ++------- crates/editor/src/test/editor_test_context.rs | 32 +++------- crates/go_to_line/src/go_to_line.rs | 22 ++++--- crates/outline/src/outline.rs | 4 +- 7 files changed, 69 insertions(+), 100 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index e2f2fa190d397..fac70f233c656 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1208,7 +1208,7 @@ impl InlineAssistant { editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( - Anchor::min()..=Anchor::max(), + Anchor::min()..Anchor::max(), cx.theme().status().deleted_background, false, cx, @@ -2557,7 +2557,7 @@ enum CodegenStatus { #[derive(Default)] struct Diff { deleted_row_ranges: Vec<(Anchor, RangeInclusive)>, - inserted_row_ranges: Vec>, + inserted_row_ranges: Vec>, } impl Diff { @@ -3103,7 +3103,7 @@ impl CodegenAlternative { new_end_row, new_snapshot.line_len(MultiBufferRow(new_end_row)), )); - self.diff.inserted_row_ranges.push(start..=end); + self.diff.inserted_row_ranges.push(start..end); new_row += lines; } } @@ -3181,7 +3181,7 @@ impl CodegenAlternative { new_end_row, new_snapshot.line_len(MultiBufferRow(new_end_row)), )); - inserted_row_ranges.push(start..=end); + inserted_row_ranges.push(start..end); new_row += line_count; } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 48785dbaa55cf..b604f388debd5 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -821,7 +821,7 @@ impl SelectionHistory { struct RowHighlight { index: usize, - range: RangeInclusive, + range: Range, color: Hsla, should_autoscroll: bool, } @@ -11502,9 +11502,11 @@ impl Editor { /// Adds a row highlight for the given range. If a row has multiple highlights, the /// last highlight added will be used. + /// + /// If the range ends at the beginning of a line, then that line will not be highlighted. pub fn highlight_rows( &mut self, - range: RangeInclusive, + range: Range, color: Hsla, should_autoscroll: bool, cx: &mut ViewContext, @@ -11513,8 +11515,8 @@ impl Editor { let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); let ix = row_highlights.binary_search_by(|highlight| { Ordering::Equal - .then_with(|| highlight.range.start().cmp(&range.start(), &snapshot)) - .then_with(|| highlight.range.end().cmp(&range.end(), &snapshot)) + .then_with(|| highlight.range.start.cmp(&range.start, &snapshot)) + .then_with(|| highlight.range.end.cmp(&range.end, &snapshot)) }); if let Err(mut ix) = ix { @@ -11527,18 +11529,13 @@ impl Editor { let prev_highlight = &mut row_highlights[ix - 1]; if prev_highlight .range - .end() - .cmp(&range.start(), &snapshot) + .end + .cmp(&range.start, &snapshot) .is_ge() { ix -= 1; - if prev_highlight - .range - .end() - .cmp(&range.end(), &snapshot) - .is_lt() - { - prev_highlight.range = *prev_highlight.range.start()..=*range.end(); + if prev_highlight.range.end.cmp(&range.end, &snapshot).is_lt() { + prev_highlight.range.end = range.end; } merged = true; prev_highlight.index = index; @@ -11564,18 +11561,17 @@ impl Editor { let highlight = &row_highlights[ix]; if next_highlight .range - .start() - .cmp(&highlight.range.end(), &snapshot) + .start + .cmp(&highlight.range.end, &snapshot) .is_le() { if next_highlight .range - .end() - .cmp(&highlight.range.end(), &snapshot) + .end + .cmp(&highlight.range.end, &snapshot) .is_gt() { - row_highlights[ix].range = - *highlight.range.start()..=*next_highlight.range.end(); + row_highlights[ix].range.end = next_highlight.range.end; } row_highlights.remove(ix + 1); } else { @@ -11597,15 +11593,12 @@ impl Editor { let mut ranges_to_remove = ranges_to_remove.iter().peekable(); row_highlights.retain(|highlight| { while let Some(range_to_remove) = ranges_to_remove.peek() { - match range_to_remove.end.cmp(&highlight.range.start(), &snapshot) { - Ordering::Less => { + match range_to_remove.end.cmp(&highlight.range.start, &snapshot) { + Ordering::Less | Ordering::Equal => { ranges_to_remove.next(); } - Ordering::Equal => { - return false; - } Ordering::Greater => { - match range_to_remove.start.cmp(&highlight.range.end(), &snapshot) { + match range_to_remove.start.cmp(&highlight.range.end, &snapshot) { Ordering::Less | Ordering::Equal => { return false; } @@ -11625,9 +11618,7 @@ impl Editor { } /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. - pub fn highlighted_rows( - &self, - ) -> impl '_ + Iterator, Hsla)> { + pub fn highlighted_rows(&self) -> impl '_ + Iterator, Hsla)> { self.highlighted_rows .get(&TypeId::of::()) .map_or(&[] as &[_], |vec| vec.as_slice()) @@ -11650,9 +11641,17 @@ impl Editor { .fold( BTreeMap::::new(), |mut unique_rows, highlight| { - let start_row = highlight.range.start().to_display_point(&snapshot).row(); - let end_row = highlight.range.end().to_display_point(&snapshot).row(); - for row in start_row.0..=end_row.0 { + let start = highlight.range.start.to_display_point(&snapshot); + let end = highlight.range.end.to_display_point(&snapshot); + let start_row = start.row().0; + let end_row = if highlight.range.end.text_anchor != text::Anchor::MAX + && end.column() == 0 + { + end.row().0.saturating_sub(1) + } else { + end.row().0 + }; + for row in start_row..=end_row { let used_index = used_highlight_orders.entry(row).or_insert(highlight.index); if highlight.index >= *used_index { @@ -11674,7 +11673,7 @@ impl Editor { .flat_map(|highlighted_rows| highlighted_rows.iter()) .filter_map(|highlight| { if highlight.should_autoscroll { - Some(highlight.range.start().to_display_point(snapshot).row()) + Some(highlight.range.start.to_display_point(snapshot).row()) } else { None } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index b17d94a5eb0f0..249d0a474641d 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -11832,7 +11832,6 @@ async fn test_edits_around_expanded_insertion_hunks( ); cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); @@ -11846,7 +11845,7 @@ async fn test_edits_around_expanded_insertion_hunks( + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; - + + + const E: u32 = 42; fn main() { println!("hello"); @@ -11872,8 +11871,8 @@ async fn test_edits_around_expanded_insertion_hunks( use some::mod2; const A: u32 = 42; + const B: u32 = 42; ˇ - fn main() { println!("hello"); @@ -11889,8 +11888,8 @@ async fn test_edits_around_expanded_insertion_hunks( use some::mod2; const A: u32 = 42; + + const B: u32 = 42; - + fn main() { println!("hello"); @@ -11907,7 +11906,9 @@ async fn test_edits_around_expanded_insertion_hunks( executor.run_until_parked(); cx.assert_diff_hunks( r#" - + use some::mod1; + - use some::mod2; + - - const A: u32 = 42; fn main() { diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 3e18b992c1dae..cf2a857b67d2f 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -6,10 +6,7 @@ use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, MultiBufferSnapshot, ToPoint, }; -use std::{ - ops::{Range, RangeInclusive}, - sync::Arc, -}; +use std::{ops::Range, sync::Arc}; use ui::{ prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement, ParentElement, PopoverMenu, Styled, Tooltip, ViewContext, VisualContext, @@ -19,7 +16,7 @@ use util::RangeExt; use crate::{ editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, - Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, + Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; @@ -298,7 +295,7 @@ impl Editor { } DiffHunkStatus::Added => { self.highlight_rows::( - to_inclusive_row_range(hunk_start..hunk_end, &snapshot), + hunk_start..hunk_end, added_hunk_color(cx), false, cx, @@ -307,7 +304,7 @@ impl Editor { } DiffHunkStatus::Modified => { self.highlight_rows::( - to_inclusive_row_range(hunk_start..hunk_end, &snapshot), + hunk_start..hunk_end, added_hunk_color(cx), false, cx, @@ -960,7 +957,7 @@ fn editor_with_deleted_text( editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( - Anchor::min()..=Anchor::max(), + Anchor::min()..Anchor::max(), deleted_color, false, cx, @@ -1039,22 +1036,6 @@ fn buffer_diff_hunk( None } -fn to_inclusive_row_range( - row_range: Range, - snapshot: &EditorSnapshot, -) -> RangeInclusive { - let mut end = row_range.end.to_point(&snapshot.buffer_snapshot); - if end.column == 0 && end.row > 0 { - end = Point::new( - end.row - 1, - snapshot - .buffer_snapshot - .line_len(MultiBufferRow(end.row - 1)), - ); - } - row_range.start..=snapshot.buffer_snapshot.anchor_after(end) -} - impl DisplayDiffHunk { pub fn start_display_row(&self) -> DisplayRow { match self { diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 2ec4f4a3b7b7b..7234d97c5b77e 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -9,7 +9,6 @@ use gpui::{ AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext, VisualTestContext, WindowHandle, }; -use indoc::indoc; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; use multi_buffer::{ExcerptRange, ToPoint}; @@ -337,8 +336,9 @@ impl EditorTestContext { let insertions = editor .highlighted_rows::() .map(|(range, _)| { - range.start().to_point(&snapshot.buffer_snapshot).row - ..range.end().to_point(&snapshot.buffer_snapshot).row + 1 + let start = range.start.to_point(&snapshot.buffer_snapshot); + let end = range.end.to_point(&snapshot.buffer_snapshot); + start.row..end.row }) .collect::>(); let deletions = editor @@ -384,13 +384,8 @@ impl EditorTestContext { /// See the `util::test::marked_text_ranges` function for more information. #[track_caller] pub fn assert_editor_state(&mut self, marked_text: &str) { - let (unmarked_text, expected_selections) = marked_text_ranges(marked_text, true); - let buffer_text = self.buffer_text(); - - if buffer_text != unmarked_text { - panic!("Unmarked text doesn't match buffer text\nBuffer text: {buffer_text:?}\nUnmarked text: {unmarked_text:?}\nRaw buffer text\n{buffer_text}\nRaw unmarked text\n{unmarked_text}"); - } - + let (expected_text, expected_selections) = marked_text_ranges(marked_text, true); + pretty_assertions::assert_eq!(self.buffer_text(), expected_text, "unexpected buffer text"); self.assert_selections(expected_selections, marked_text.to_string()) } @@ -463,20 +458,11 @@ impl EditorTestContext { let actual_marked_text = generate_marked_text(&self.buffer_text(), &actual_selections, true); if expected_selections != actual_selections { - panic!( - indoc! {" - - {}Editor has unexpected selections. - - Expected selections: - {} - - Actual selections: - {} - "}, - self.assertion_context(), - expected_marked_text, + pretty_assertions::assert_eq!( actual_marked_text, + expected_marked_text, + "{}Editor has unexpected selections", + self.assertion_context(), ); } } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index fd631648c2c75..0e9482b759414 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -116,11 +116,13 @@ impl GoToLine { if let Some(point) = self.point_from_query(cx) { self.active_editor.update(cx, |active_editor, cx| { let snapshot = active_editor.snapshot(cx).display_snapshot; - let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left); - let anchor = snapshot.buffer_snapshot.anchor_before(point); + let start = snapshot.buffer_snapshot.clip_point(point, Bias::Left); + let end = start + Point::new(1, 0); + let start = snapshot.buffer_snapshot.anchor_before(start); + let end = snapshot.buffer_snapshot.anchor_after(end); active_editor.clear_row_highlights::(); active_editor.highlight_rows::( - anchor..=anchor, + start..end, cx.theme().colors().editor_highlighted_line_background, true, cx, @@ -244,13 +246,13 @@ mod tests { field_1: i32, // display line 3 field_2: i32, // display line 4 } // display line 5 - // display line 7 - struct Another { // display line 8 - field_1: i32, // display line 9 - field_2: i32, // display line 10 - field_3: i32, // display line 11 - field_4: i32, // display line 12 - } // display line 13 + // display line 6 + struct Another { // display line 7 + field_1: i32, // display line 8 + field_2: i32, // display line 9 + field_3: i32, // display line 10 + field_4: i32, // display line 11 + } // display line 12 "} }), ) diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 520311b6f3c62..1d82d06ad8570 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -143,7 +143,7 @@ impl OutlineViewDelegate { self.active_editor.update(cx, |active_editor, cx| { active_editor.clear_row_highlights::(); active_editor.highlight_rows::( - outline_item.range.start..=outline_item.range.end, + outline_item.range.start..outline_item.range.end, cx.theme().colors().editor_highlighted_line_background, true, cx, @@ -245,7 +245,7 @@ impl PickerDelegate for OutlineViewDelegate { .next(); if let Some((rows, _)) = highlight { active_editor.change_selections(Some(Autoscroll::center()), cx, |s| { - s.select_ranges([*rows.start()..*rows.start()]) + s.select_ranges([rows.start..rows.start]) }); active_editor.clear_row_highlights::(); active_editor.focus(cx); From 3737d4eb4fa9e576204a367b020ba189ac4aa087 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Sep 2024 23:25:02 +0200 Subject: [PATCH 124/228] Add tooltip for code actions icon button (#18461) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I have just recently discovered this keybinding myself out of talking to folks, ha. The tooltip here might ease the discovery for other folks in the future. Screenshot 2024-09-27 at 11 04 28 PM --- Release Notes: - N/A --- crates/editor/src/editor.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b604f388debd5..d1ca70f705ce5 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5368,6 +5368,19 @@ impl Editor { .icon_size(IconSize::XSmall) .icon_color(Color::Muted) .selected(is_active) + .tooltip({ + let focus_handle = self.focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle Code Actions", + &ToggleCodeActions { + deployed_from_indicator: None, + }, + &focus_handle, + cx, + ) + } + }) .on_click(cx.listener(move |editor, _e, cx| { editor.focus(cx); editor.toggle_code_actions( From 675673ed5462711916e7824528698fc235d679b2 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sat, 28 Sep 2024 01:45:40 +0200 Subject: [PATCH 125/228] Fine-tune hunk control spacing (#18463) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Screenshot 2024-09-28 at 1 09 35 AM --- Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index cf2a857b67d2f..4e7a0f05f4a25 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -338,7 +338,7 @@ impl Editor { hunk: &HoveredHunk, cx: &mut ViewContext<'_, Editor>, ) -> BlockProperties { - let border_color = cx.theme().colors().border_disabled; + let border_color = cx.theme().colors().border_variant; let gutter_color = match hunk.status { DiffHunkStatus::Added => cx.theme().status().created, DiffHunkStatus::Modified => cx.theme().status().modified, @@ -381,14 +381,15 @@ impl Editor { ) .child( h_flex() + .pl_1p5() + .pr_6() .size_full() .justify_between() .border_t_1() .border_color(border_color) .child( h_flex() - .gap_2() - .pl_6() + .gap_1() .child( IconButton::new("next-hunk", IconName::ArrowDown) .shape(IconButtonShape::Square) @@ -595,7 +596,7 @@ impl Editor { }), ) .child( - h_flex().gap_2().pr_6().child( + div().child( IconButton::new("collapse", IconName::Close) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) From 1021f0e28808285e3d301fcb9ccf9af40854ea98 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sat, 28 Sep 2024 14:21:13 -0400 Subject: [PATCH 126/228] Show release notes locally when showing update notification (#18486) Closes https://github.com/zed-industries/zed/issues/17527 I think we are ok to switch to using the local action now. There are a few things we don't support, like media, but we don't include media directly too often, and I think this might help push the community to maybe add support for it. That being said, I updated the markdown coming back from the endpoint to include links to the web version of the release notes, so they can always hop over to that version, if they would like. https://github.com/user-attachments/assets/b4d207a7-1640-48f1-91d0-94537f74116c All forming of the Markdown happens in the endpoint, so if someone with a better eye wants to update this, you can do that here: https://github.com/zed-industries/zed.dev/blob/0e5923e3e7d1caa8b4bf32d0a7f8999b34dbe64c/src/pages/api/release_notes/v2/%5Bchannel_type%5D/%5Bversion%5D.ts#L50-L62 Release Notes: - Changed the `view the release notes` button in the update toast to trigger the local release notes action. --- crates/auto_update/src/auto_update.rs | 12 +++++++----- crates/auto_update/src/update_notification.rs | 19 ++++++++++++++----- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 1fe89cce0f9c4..60d6369ee869c 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -345,15 +345,17 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext) -> Option<()> { let should_show_notification = should_show_notification.await?; if should_show_notification { workspace.update(&mut cx, |workspace, cx| { + let workspace_handle = workspace.weak_handle(); workspace.show_notification( NotificationId::unique::(), cx, - |cx| cx.new_view(|_| UpdateNotification::new(version)), + |cx| cx.new_view(|_| UpdateNotification::new(version, workspace_handle)), ); - updater - .read(cx) - .set_should_show_update_notification(false, cx) - .detach_and_log_err(cx); + updater.update(cx, |updater, cx| { + updater + .set_should_show_update_notification(false, cx) + .detach_and_log_err(cx); + }); })?; } anyhow::Ok(()) diff --git a/crates/auto_update/src/update_notification.rs b/crates/auto_update/src/update_notification.rs index 66028c2401199..7568a0eb1a94e 100644 --- a/crates/auto_update/src/update_notification.rs +++ b/crates/auto_update/src/update_notification.rs @@ -1,13 +1,18 @@ use gpui::{ div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render, - SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, + SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, WeakView, }; use menu::Cancel; use release_channel::ReleaseChannel; -use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}; +use util::ResultExt; +use workspace::{ + ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}, + Workspace, +}; pub struct UpdateNotification { version: SemanticVersion, + workspace: WeakView, } impl EventEmitter for UpdateNotification {} @@ -41,7 +46,11 @@ impl Render for UpdateNotification { .child(Label::new("View the release notes")) .cursor_pointer() .on_click(cx.listener(|this, _, cx| { - crate::view_release_notes(&Default::default(), cx); + this.workspace + .update(cx, |workspace, cx| { + crate::view_release_notes_locally(workspace, cx); + }) + .log_err(); this.dismiss(&menu::Cancel, cx) })), ) @@ -49,8 +58,8 @@ impl Render for UpdateNotification { } impl UpdateNotification { - pub fn new(version: SemanticVersion) -> Self { - Self { version } + pub fn new(version: SemanticVersion, workspace: WeakView) -> Self { + Self { version, workspace } } pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext) { From 8aeab4800c6133f4bb51541a01acb630bae739e8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sat, 28 Sep 2024 15:20:32 -0400 Subject: [PATCH 127/228] Continue to redirect to GitHub commits for nightly and dev release notes (#18487) We are now using the `view release notes locally` action when clicking on the update toast - the endpoint for this action does not currently return anything for valid for these channels, as we don't have support yet for diffing between these builds, so for now, [continue to do what the `view release notes` action did and just send the user to the commit view on GitHub](https://github.com/zed-industries/zed/blob/caffb2733f8e859637050e3708dbd10b6e409762/crates/auto_update/src/auto_update.rs#L255-L260). It is a bit counterintuitive to send the user to the browser when using the "local" action, but this is just a patch in the interim. If we make adjustments to our channels to keep the nightly tag stable and add some sort of unique suffix, like a timestamp, we can then adjust things to return these in the request body and show them in the editor. Release Notes: - N/A --- crates/auto_update/src/auto_update.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 60d6369ee869c..2c93ee4171c8b 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -264,6 +264,18 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<( fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext) { let release_channel = ReleaseChannel::global(cx); + + let url = match release_channel { + ReleaseChannel::Nightly => Some("https://github.com/zed-industries/zed/commits/nightly/"), + ReleaseChannel::Dev => Some("https://github.com/zed-industries/zed/commits/main/"), + _ => None, + }; + + if let Some(url) = url { + cx.open_url(url); + return; + } + let version = AppVersion::global(cx).to_string(); let client = client::Client::global(cx).http_client(); From 84ce81caf155fefb60b1a26e44b14b1a28cfb2cd Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Sun, 29 Sep 2024 10:30:48 -0600 Subject: [PATCH 128/228] Pass `Summary::Context` to `Item::summarize` (#18510) We are going to use this in the multi-buffer to produce a summary for an `Excerpt` that contains a `Range`. Release Notes: - N/A Co-authored-by: Nathan --- crates/channel/src/channel_chat.rs | 2 +- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/display_map/crease_map.rs | 2 +- crates/editor/src/display_map/fold_map.rs | 4 ++-- crates/editor/src/display_map/inlay_map.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 2 +- crates/editor/src/git/blame.rs | 2 +- crates/git/src/diff.rs | 2 +- crates/gpui/src/elements/list.rs | 2 +- crates/language/src/diagnostic_set.rs | 2 +- crates/language/src/syntax_map.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 4 ++-- crates/notifications/src/notification_store.rs | 2 +- crates/rope/src/rope.rs | 2 +- crates/sum_tree/src/sum_tree.rs | 12 ++++++------ crates/sum_tree/src/tree_map.rs | 2 +- crates/text/src/locator.rs | 2 +- crates/text/src/operation_queue.rs | 2 +- crates/text/src/text.rs | 6 +++--- crates/text/src/undo_map.rs | 2 +- crates/worktree/src/worktree.rs | 4 ++-- 21 files changed, 31 insertions(+), 31 deletions(-) diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 1a9e46db0460a..e5b5b74c16262 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -808,7 +808,7 @@ pub fn mentions_to_proto(mentions: &[(Range, UserId)]) -> Vec Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { ChannelMessageSummary { max_id: self.id, count: 1, diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index efa026a56c610..52e0ca2486d25 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1360,7 +1360,7 @@ impl<'a> Iterator for BlockBufferRows<'a> { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index c3f2b0061ac73..531c650c43a6e 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -291,7 +291,7 @@ impl sum_tree::Summary for ItemSummary { impl sum_tree::Item for CreaseItem { type Summary = ItemSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { ItemSummary { range: self.crease.range.clone(), } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 37983030b8e1a..5eb26ff969388 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -944,7 +944,7 @@ struct TransformSummary { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } @@ -1004,7 +1004,7 @@ impl Default for FoldRange { impl sum_tree::Item for Fold { type Summary = FoldSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { FoldSummary { start: self.range.start, end: self.range.end, diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 712db45e3f61a..d4e39f2df9270 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -74,7 +74,7 @@ impl Inlay { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { match self { Transform::Isomorphic(summary) => TransformSummary { input: summary.clone(), diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 564bba2158030..dc4d93058cdf7 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -917,7 +917,7 @@ impl Transform { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 733d42d0c57dd..303ead16b2231 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -29,7 +29,7 @@ pub struct GitBlameEntrySummary { impl sum_tree::Item for GitBlameEntry { type Summary = GitBlameEntrySummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { GitBlameEntrySummary { rows: self.rows } } } diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 1f7930ce1442d..baad824577d30 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -34,7 +34,7 @@ struct InternalDiffHunk { impl sum_tree::Item for InternalDiffHunk { type Summary = DiffHunkSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary { DiffHunkSummary { buffer_range: self.buffer_range.clone(), } diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index d03392867b5ee..6ac6d2a9bf8d7 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -858,7 +858,7 @@ impl Styled for List { impl sum_tree::Item for ListItem { type Summary = ListItemSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _: &()) -> Self::Summary { match self { ListItem::Unmeasured { focus_handle } => ListItemSummary { count: 1, diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index c35659d9bbd4d..38d4216beeabc 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -224,7 +224,7 @@ impl DiagnosticSet { impl sum_tree::Item for DiagnosticEntry { type Summary = Summary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary { Summary { start: self.range.start, end: self.range.end, diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 55177f79620db..8617696cc471e 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -1739,7 +1739,7 @@ impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> impl sum_tree::Item for SyntaxLayerEntry { type Summary = SyntaxLayerSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &BufferSnapshot) -> Self::Summary { SyntaxLayerSummary { min_depth: self.depth, max_depth: self.depth, diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 828b39967d9e1..7aa733ba8fa37 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -4596,7 +4596,7 @@ impl fmt::Debug for Excerpt { impl sum_tree::Item for Excerpt { type Summary = ExcerptSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { let mut text = self.text_summary.clone(); if self.has_trailing_newline { text += TextSummary::from("\n"); @@ -4613,7 +4613,7 @@ impl sum_tree::Item for Excerpt { impl sum_tree::Item for ExcerptIdMapping { type Summary = ExcerptId; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.id } } diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index 48fcb5dfbb830..5c3de53ee1895 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -455,7 +455,7 @@ impl EventEmitter for NotificationStore {} impl sum_tree::Item for NotificationEntry { type Summary = NotificationSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { NotificationSummary { max_id: self.id, count: 1, diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 56fe7fc054447..68ff7d5c6956a 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -1159,7 +1159,7 @@ impl Chunk { impl sum_tree::Item for Chunk { type Summary = ChunkSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { ChunkSummary::from(self.0.as_str()) } } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 7013dc66fd1ed..fbfe3b06f3ab4 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -20,7 +20,7 @@ pub const TREE_BASE: usize = 6; pub trait Item: Clone { type Summary: Summary; - fn summary(&self) -> Self::Summary; + fn summary(&self, cx: &::Context) -> Self::Summary; } /// An [`Item`] whose summary has a specific key that can be used to identify it @@ -211,7 +211,7 @@ impl SumTree { while iter.peek().is_some() { let items: ArrayVec = iter.by_ref().take(2 * TREE_BASE).collect(); let item_summaries: ArrayVec = - items.iter().map(|item| item.summary()).collect(); + items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { @@ -281,7 +281,7 @@ impl SumTree { .map(|items| { let items: ArrayVec = items.into_iter().collect(); let item_summaries: ArrayVec = - items.iter().map(|item| item.summary()).collect(); + items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { ::add_summary(&mut summary, item_summary, cx); @@ -405,7 +405,7 @@ impl SumTree { if let Some((item, item_summary)) = items.last_mut().zip(item_summaries.last_mut()) { (f)(item); - *item_summary = item.summary(); + *item_summary = item.summary(cx); *summary = sum(item_summaries.iter(), cx); Some(summary.clone()) } else { @@ -461,7 +461,7 @@ impl SumTree { } pub fn push(&mut self, item: T, cx: &::Context) { - let summary = item.summary(); + let summary = item.summary(cx); self.append( SumTree(Arc::new(Node::Leaf { summary: summary.clone(), @@ -1352,7 +1352,7 @@ mod tests { impl Item for u8 { type Summary = IntegersSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { IntegersSummary { count: 1, sum: *self as usize, diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index c57226b681432..9a4d952e93f22 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -224,7 +224,7 @@ where { type Summary = MapKey; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.key() } } diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 7afc16f581549..e38b9b58af22c 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -69,7 +69,7 @@ impl Default for Locator { impl sum_tree::Item for Locator { type Summary = Locator; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.clone() } } diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index c7964f62674ca..52b534a41b739 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -107,7 +107,7 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey { impl Item for OperationItem { type Summary = OperationSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { OperationSummary { key: OperationKey::new(self.0.lamport_timestamp()), len: 1, diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 8bdc9fdb03d89..80eafcf4eba2d 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2617,7 +2617,7 @@ impl Fragment { impl sum_tree::Item for Fragment { type Summary = FragmentSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &Option) -> Self::Summary { let mut max_version = clock::Global::new(); max_version.observe(self.timestamp); for deletion in &self.deletions { @@ -2688,7 +2688,7 @@ impl Default for FragmentSummary { impl sum_tree::Item for InsertionFragment { type Summary = InsertionFragmentKey; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { InsertionFragmentKey { timestamp: self.timestamp, split_offset: self.split_offset, @@ -2700,7 +2700,7 @@ impl sum_tree::KeyedItem for InsertionFragment { type Key = InsertionFragmentKey; fn key(&self) -> Self::Key { - sum_tree::Item::summary(self) + sum_tree::Item::summary(self, &()) } } diff --git a/crates/text/src/undo_map.rs b/crates/text/src/undo_map.rs index 4e670fd456068..ed363cfc6b6d7 100644 --- a/crates/text/src/undo_map.rs +++ b/crates/text/src/undo_map.rs @@ -11,7 +11,7 @@ struct UndoMapEntry { impl sum_tree::Item for UndoMapEntry { type Summary = UndoMapKey; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.key } } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index d81c91132b9d3..a3daf8ea2eade 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -3339,7 +3339,7 @@ impl EntryKind { impl sum_tree::Item for Entry { type Summary = EntrySummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { let non_ignored_count = if self.is_ignored || self.is_external { 0 } else { @@ -3434,7 +3434,7 @@ struct PathEntry { impl sum_tree::Item for PathEntry { type Summary = PathEntrySummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { PathEntrySummary { max_id: self.id } } } From 5f35fa5d92d45269bf311f1aa5e9c1703e54ffc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Thorben=20Kr=C3=B6ger?= Date: Sun, 29 Sep 2024 19:54:09 +0200 Subject: [PATCH 129/228] Associate `uv.lock` files with TOML (#18426) The `uv` python package manager uses the TOML for it's `uv.lock` file, see https://docs.astral.sh/uv/guides/projects/#uvlock. Ref #7808 Release Notes: - associate `uv.lock` files with the TOML language --- assets/settings/default.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 82778dc8f6c48..2a11a85d4efae 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -769,7 +769,8 @@ "**/Zed/**/*.json", "tsconfig.json", "pyrightconfig.json" - ] + ], + "TOML": ["uv.lock"] }, /// By default use a recent system version of node, or install our own. /// You can override this to use a version of node that is not in $PATH with: From 250f2e76eb6f97fe097ee39863c621335762c102 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 30 Sep 2024 08:05:51 +0200 Subject: [PATCH 130/228] tasks: Display runnables at the start of folds (#18526) Release Notes: - Fixed task indicators not showing up at the starts of folds. --- crates/editor/src/element.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e5c067e37ec3d..1c35fa6bcdd4e 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1646,7 +1646,16 @@ impl EditorElement { return None; } if snapshot.is_line_folded(multibuffer_row) { - return None; + // Skip folded indicators, unless it's the starting line of a fold. + if multibuffer_row + .0 + .checked_sub(1) + .map_or(false, |previous_row| { + snapshot.is_line_folded(MultiBufferRow(previous_row)) + }) + { + return None; + } } let button = editor.render_run_indicator( &self.style, From 77df7e56f7852aaa121cafb0bc0596c9b43d3680 Mon Sep 17 00:00:00 2001 From: Tom Wieczorek Date: Mon, 30 Sep 2024 08:34:41 +0200 Subject: [PATCH 131/228] settings: Make external formatter arguments optional (#18340) If specifying a formatter in the settings like this: "languages": { "foo": { "formatter": { "external": { "command": "/path/to/foo-formatter" } } } } Zed will show an error like this: Invalid user settings file data did not match any variant of untagged enum SingleOrVec This is because the arguments are not optional. The error is hard to understand, so let's make the arguments actually optional, which makes the above settings snippet valid. Release Notes: - Make external formatter arguments optional --- crates/collab/src/tests/integration_tests.rs | 2 +- crates/language/src/language_settings.rs | 2 +- crates/project/src/lsp_store.rs | 31 +++++++++++++------- 3 files changed, 22 insertions(+), 13 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index afc3e7cfb84ee..615ad52e2ef36 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -4409,7 +4409,7 @@ async fn test_formatting_buffer( file.defaults.formatter = Some(SelectedFormatter::List(FormatterList( vec![Formatter::External { command: "awk".into(), - arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(), + arguments: Some(vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into()), }] .into(), ))); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 2f1a7be2bf492..d610ab09865ce 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -661,7 +661,7 @@ pub enum Formatter { /// The external program to run. command: Arc, /// The arguments to pass to the program. - arguments: Arc<[String]>, + arguments: Option>, }, /// Files should be formatted using code actions executed by language servers. CodeActions(HashMap), diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 37922b7c2ee03..a3763810e1a26 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -539,13 +539,19 @@ impl LocalLspStore { } Formatter::External { command, arguments } => { let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); - Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) - .await - .context(format!( - "failed to format via external command {:?}", - command - ))? - .map(FormatOperation::External) + Self::format_via_external_command( + buffer, + buffer_abs_path, + command, + arguments.as_deref(), + cx, + ) + .await + .context(format!( + "failed to format via external command {:?}", + command + ))? + .map(FormatOperation::External) } Formatter::CodeActions(code_actions) => { let code_actions = deserialize_code_actions(code_actions); @@ -571,7 +577,7 @@ impl LocalLspStore { buffer: &Model, buffer_abs_path: Option<&Path>, command: &str, - arguments: &[String], + arguments: Option<&[String]>, cx: &mut AsyncAppContext, ) -> Result> { let working_dir_path = buffer.update(cx, |buffer, cx| { @@ -595,14 +601,17 @@ impl LocalLspStore { child.current_dir(working_dir_path); } - let mut child = child - .args(arguments.iter().map(|arg| { + if let Some(arguments) = arguments { + child.args(arguments.iter().map(|arg| { if let Some(buffer_abs_path) = buffer_abs_path { arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy()) } else { arg.replace("{buffer_path}", "Untitled") } - })) + })); + } + + let mut child = child .stdin(smol::process::Stdio::piped()) .stdout(smol::process::Stdio::piped()) .stderr(smol::process::Stdio::piped()) From e39695bf1c16b298552336fc2c063f4a88a46a45 Mon Sep 17 00:00:00 2001 From: Maksim Bondarenkov <119937608+ognevny@users.noreply.github.com> Date: Mon, 30 Sep 2024 09:38:49 +0300 Subject: [PATCH 132/228] docs: Update msys2 section in development/windows (#18385) merge after https://packages.msys2.org/packages/mingw-w64-clang-x86_64-zed is available. alternatively you can check the [queue](https://packages.msys2.org/queue) for build status Zed now compiles and runs under msys2/CLANG64 environment, so change the docs to give the users a choice of their environment Release Notes: - N/A --- docs/src/development/windows.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 86eb21965e550..67808186d1961 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -63,16 +63,16 @@ cargo test --workspace ## Installing from msys2 -[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). To download the prebuilt binary, run +[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). The package is available for UCRT64 and CLANG64. To download it, run ```sh pacman -Syu -pacman -S mingw-w64-ucrt-x86_64-zed +pacman -S $MINGW_PACKAGE_PREFIX-zed ``` -then you can run `zed` in a UCRT64 shell. +then you can run `zed` in a shell. -You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details. +You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details on build process. > Please, report any issue in [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed) first. From 5b40debb5f042a1d4230ceb5f012e7d09a785fb2 Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Mon, 30 Sep 2024 09:54:22 +0300 Subject: [PATCH 133/228] Don't stop loading the env if direnv call fails (#18473) Before this we we would stop loading the environment if the call to direnv failed, which is not necessary in any way cc @mrnugget Release Notes: - Fixed the environment not loading if `direnv` mode is set to `direct` and `.envrc` is not allowed --- crates/project/src/environment.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 641ad206c05b2..23d23c9dc6bb0 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -198,8 +198,9 @@ async fn load_shell_environment( anyhow::ensure!( direnv_output.status.success(), - "direnv exited with error {:?}", - direnv_output.status + "direnv exited with error {:?}. Stderr:\n{}", + direnv_output.status, + String::from_utf8_lossy(&direnv_output.stderr) ); let output = String::from_utf8_lossy(&direnv_output.stdout); @@ -214,7 +215,7 @@ async fn load_shell_environment( let direnv_environment = match load_direnv { DirenvSettings::ShellHook => None, - DirenvSettings::Direct => load_direnv_environment(dir).await?, + DirenvSettings::Direct => load_direnv_environment(dir).await.log_err().flatten(), } .unwrap_or(HashMap::default()); From 898d48a57497011f6b96c98ddee35b511398fbd9 Mon Sep 17 00:00:00 2001 From: Sylvain Brunerie Date: Mon, 30 Sep 2024 09:02:12 +0200 Subject: [PATCH 134/228] php: Add syntax highlighting inside heredoc strings (#18368) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PHP heredoc strings make it easy to define string literals over multiple lines: ```php $someString = << Date: Mon, 30 Sep 2024 10:09:13 +0300 Subject: [PATCH 135/228] gpui: Fix blur region on Plasma/Wayland (#18465) Once again aping after what winit does - since we always want to have the whole window blurred there is apparently no need to specify a blur region at all. Rounded corners would be the exception, but that is not possible with the current protocol (it is planned for the vendor-neutral version though!) This eliminates the problem where only a fixed region of the window would get blurred if the window was resized to be larger than at launch. Also a drive-by comment grammar fix :wink: Release Notes: - Fixed blur region handling on Plasma/Wayland --- crates/gpui/src/platform/linux/wayland/window.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index a0520f6b7056e..329b6c185fbfe 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -1046,8 +1046,8 @@ fn update_window(mut state: RefMut) { && state.decorations == WindowDecorations::Server { // Promise the compositor that this region of the window surface - // contains no transparent pixels. This allows the compositor to - // do skip whatever is behind the surface for better performance. + // contains no transparent pixels. This allows the compositor to skip + // updating whatever is behind the surface for better performance. state.surface.set_opaque_region(Some(®ion)); } else { state.surface.set_opaque_region(None); @@ -1057,7 +1057,6 @@ fn update_window(mut state: RefMut) { if state.background_appearance == WindowBackgroundAppearance::Blurred { if state.blur.is_none() { let blur = blur_manager.create(&state.surface, &state.globals.qh, ()); - blur.set_region(Some(®ion)); state.blur = Some(blur); } state.blur.as_ref().unwrap().commit(); From ed5eb725f93305642f02b10676ce56c8a7779d5d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 30 Sep 2024 11:25:11 +0300 Subject: [PATCH 136/228] Improve language server log view split ergonomics (#18527) Allows to split log view, and opens it split on the right, same as the syntax tree view. Release Notes: - Improved language server log panel split ergonomics --- crates/language_tools/src/lsp_log.rs | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index d8fe3aa51840e..aee39ff0a088e 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -17,7 +17,7 @@ use ui::{prelude::*, Button, Checkbox, ContextMenu, Label, PopoverMenu, Selectio use workspace::{ item::{Item, ItemHandle}, searchable::{SearchEvent, SearchableItem, SearchableItemHandle}, - ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, + SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId, }; const SEND_LINE: &str = "// Send:"; @@ -194,12 +194,11 @@ pub fn init(cx: &mut AppContext) { workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, cx| { let project = workspace.project().read(cx); if project.is_local() { - workspace.add_item_to_active_pane( + workspace.split_item( + SplitDirection::Right, Box::new(cx.new_view(|cx| { LspLogView::new(workspace.project().clone(), log_store.clone(), cx) })), - None, - true, cx, ); } @@ -912,6 +911,27 @@ impl Item for LspLogView { fn as_searchable(&self, handle: &View) -> Option> { Some(Box::new(handle.clone())) } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(|cx| { + let mut new_view = Self::new(self.project.clone(), self.log_store.clone(), cx); + if let Some(server_id) = self.current_server_id { + match self.active_entry_kind { + LogKind::Rpc => new_view.show_rpc_trace_for_server(server_id, cx), + LogKind::Trace => new_view.show_trace_for_server(server_id, cx), + LogKind::Logs => new_view.show_logs_for_server(server_id, cx), + } + } + new_view + })) + } } impl SearchableItem for LspLogView { From 1f72069b42543d380be892c415b7433519375771 Mon Sep 17 00:00:00 2001 From: VacheDesNeiges <33199153+VacheDesNeiges@users.noreply.github.com> Date: Mon, 30 Sep 2024 10:27:30 +0200 Subject: [PATCH 137/228] Improve C++ Tree-sitter queries (#18016) I made a few tree-sitter queries for improving the highlighting of C++. There is one query that I'm not totally certain about and would appreciate some feedback on it, the one that concerns attributes. Many editor only highlight the identifier as a keyword (This is the behavior implemented in this commit), while others, for example the tree-sitter plugin for neovim, tags the entire attribute for highlighting (double brackets included). I don't know which one is preferable. Here are screenshots of the two versions: ![image](https://github.com/user-attachments/assets/4e1b92c8-adc7-4900-a5b1-dc43c98f4c67) ![image](https://github.com/user-attachments/assets/290a13e3-5cb3-45cb-b6d9-3dc3e6a8af2d) Release Notes: - Fixed C++ attributes identifiers being wrongly highlighed through the tag "variable" - C++ attribute identifiers (nodiscard,deprecated, noreturn, etc.. ) are now highlighted through the tag "keyword" - Changed C++ primitives types (void, bool, int, size_t, etc.. ) to no longer be highlighted with the tag "keyword", they can now be highlighted by the tag "type.primitive". - Added a tag "concept" for highlighting C++ concept identifiers. (This tag name has been chosen to be the same than the one returned by clangd's semantic tokens) --- crates/languages/src/cpp/highlights.scm | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index 4c9db569285e3..2df9ec2923be3 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -2,6 +2,10 @@ (field_identifier) @property (namespace_identifier) @namespace +(concept_definition + (identifier) @concept) + + (call_expression function: (qualified_identifier name: (identifier) @function)) @@ -64,6 +68,14 @@ (auto) @type (type_identifier) @type +type :(primitive_type) @type.primitive + +(requires_clause + constraint: (template_type + name: (type_identifier) @concept)) + +(attribute + name: (identifier) @keyword) ((identifier) @constant (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) @@ -119,7 +131,6 @@ "using" "virtual" "while" - (primitive_type) (sized_type_specifier) (storage_class_specifier) (type_qualifier) From 707ccb04d2eb15129dce19e12388c1e5321aef57 Mon Sep 17 00:00:00 2001 From: Patrick MARIE Date: Mon, 30 Sep 2024 10:27:47 +0200 Subject: [PATCH 138/228] Restore paste on middle-click on linux (#18503) This is a partial revert of e6c1c51b37a, which removed the middle-click pasting on linux (both x11 & wayland). It also restores the `middle_click_paste` option behavior which became unexistent. Release Notes: - Restore Linux middle-click pasting. --- crates/editor/src/element.rs | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 1c35fa6bcdd4e..98a5ff7f4dff5 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -636,11 +636,30 @@ impl EditorElement { cx.stop_propagation(); } else if end_selection && pending_nonempty_selections { cx.stop_propagation(); - } else if cfg!(target_os = "linux") - && event.button == MouseButton::Middle - && (!text_hitbox.is_hovered(cx) || editor.read_only(cx)) - { - return; + } else if cfg!(target_os = "linux") && event.button == MouseButton::Middle { + if !text_hitbox.is_hovered(cx) || editor.read_only(cx) { + return; + } + + #[cfg(target_os = "linux")] + if EditorSettings::get_global(cx).middle_click_paste { + if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) { + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + let position = point_for_position.previous_valid; + + editor.select( + SelectPhase::Begin { + position, + add: false, + click_count: 1, + }, + cx, + ); + editor.insert(&text, cx); + } + cx.stop_propagation() + } } } From 57ad5778fad8cbdeada7815ca9c58fec63db90d8 Mon Sep 17 00:00:00 2001 From: 0hDEADBEAF <0xdeadbeaf@tutamail.com> Date: Mon, 30 Sep 2024 10:34:44 +0200 Subject: [PATCH 139/228] Add a way to explicitly specify RC toolkit path (#18402) Closes #18393 Release Notes: - Added a `ZED_RC_TOOLKIT_PATH` env variable so `winresource` crate can fetch the RC executable path correctly on some configurations --- crates/zed/build.rs | 7 +++++++ docs/src/development/windows.md | 27 +++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/crates/zed/build.rs b/crates/zed/build.rs index c0d5c418750a4..3013773f91834 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -56,6 +56,13 @@ fn main() { println!("cargo:rerun-if-changed={}", icon.display()); let mut res = winresource::WindowsResource::new(); + + // Depending on the security applied to the computer, winresource might fail + // fetching the RC path. Therefore, we add a way to explicitly specify the + // toolkit path, allowing winresource to use a valid RC path. + if let Some(explicit_rc_toolkit_path) = std::env::var("ZED_RC_TOOLKIT_PATH").ok() { + res.set_toolkit_path(explicit_rc_toolkit_path.as_str()); + } res.set_icon(icon.to_str().unwrap()); res.set("FileDescription", "Zed"); res.set("ProductName", "Zed"); diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 67808186d1961..03e8cae66b86d 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -93,3 +93,30 @@ This error can happen if you are using the "rust-lld.exe" linker. Consider tryin If you are using a global config, consider moving the Zed repository to a nested directory and add a `.cargo/config.toml` with a custom linker config in the parent directory. See this issue for more information [#12041](https://github.com/zed-industries/zed/issues/12041) + +### Invalid RC path selected + +Sometimes, depending on the security rules applied to your laptop, you may get the following error while compiling Zed: + +``` +error: failed to run custom build command for `zed(C:\Users\USER\src\zed\crates\zed)` + +Caused by: + process didn't exit successfully: `C:\Users\USER\src\zed\target\debug\build\zed-b24f1e9300107efc\build-script-build` (exit code: 1) + --- stdout + cargo:rerun-if-changed=../../.git/logs/HEAD + cargo:rustc-env=ZED_COMMIT_SHA=25e2e9c6727ba9b77415588cfa11fd969612adb7 + cargo:rustc-link-arg=/stack:8388608 + cargo:rerun-if-changed=resources/windows/app-icon.ico + package.metadata.winresource does not exist + Selected RC path: 'bin\x64\rc.exe' + + --- stderr + The system cannot find the path specified. (os error 3) +warning: build failed, waiting for other jobs to finish... +``` + +In order to fix this issue, you can manually set the `ZED_RC_TOOLKIT_PATH` environment variable to the RC toolkit path. Usually, you can set it to: +`C:\Program Files (x86)\Windows Kits\10\bin\\x64`. + +See this [issue](https://github.com/zed-industries/zed/issues/18393) for more information. From 533416c5a96d3201dfd5e639a6a2d8fc9798a6fb Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 30 Sep 2024 12:38:57 +0200 Subject: [PATCH 140/228] terminal: Make CursorShape configurable (#18530) This builds on top of @Yevgen's #15840 and combines it with the settings names introduced in #17572. Closes #4731. Release Notes: - Added a setting for the terminal's default cursor shape. The setting is `{"terminal": {"cursor_shape": "block"}}``. Possible values: `block`, `bar`, `hollow`, `underline`. Demo: https://github.com/user-attachments/assets/96ed28c2-c222-436b-80cb-7cd63eeb47dd --- assets/settings/default.json | 12 +++++++ crates/project/src/terminals.rs | 1 + crates/terminal/src/terminal.rs | 13 +++++-- crates/terminal/src/terminal_settings.rs | 43 +++++++++++++++++++++++ crates/terminal_view/src/terminal_view.rs | 23 ++++++++++-- 5 files changed, 88 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 2a11a85d4efae..ba95c2cfcdaaa 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -671,6 +671,18 @@ // 3. Always blink the cursor, ignoring the terminal mode // "blinking": "on", "blinking": "terminal_controlled", + // Default cursor shape for the terminal. + // 1. A block that surrounds the following character + // "block" + // 2. A vertical bar + // "bar" + // 3. An underline that runs along the following character + // "underscore" + // 4. A box drawn around the following character + // "hollow" + // + // Default: not set, defaults to "block" + "cursor_shape": null, // Set whether Alternate Scroll mode (code: ?1007) is active by default. // Alternate Scroll mode converts mouse scroll events into up / down key // presses when in the alternate screen (e.g. when running applications diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 136842d158604..ababb3261b337 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -216,6 +216,7 @@ impl Project { shell, env, Some(settings.blinking), + settings.cursor_shape.unwrap_or_default(), settings.alternate_scroll, settings.max_scroll_history_lines, window, diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 8f8982f02c470..f9767b07d1261 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -18,7 +18,9 @@ use alacritty_terminal::{ Config, RenderableCursor, TermMode, }, tty::{self}, - vte::ansi::{ClearMode, Handler, NamedPrivateMode, PrivateMode}, + vte::ansi::{ + ClearMode, CursorStyle as AlacCursorStyle, Handler, NamedPrivateMode, PrivateMode, + }, Term, }; use anyhow::{bail, Result}; @@ -40,7 +42,7 @@ use serde::{Deserialize, Serialize}; use settings::Settings; use smol::channel::{Receiver, Sender}; use task::{HideStrategy, Shell, TaskId}; -use terminal_settings::{AlternateScroll, TerminalBlink, TerminalSettings}; +use terminal_settings::{AlternateScroll, CursorShape, TerminalBlink, TerminalSettings}; use theme::{ActiveTheme, Theme}; use util::truncate_and_trailoff; @@ -314,6 +316,7 @@ impl TerminalBuilder { shell: Shell, mut env: HashMap, blink_settings: Option, + cursor_shape: CursorShape, alternate_scroll: AlternateScroll, max_scroll_history_lines: Option, window: AnyWindowHandle, @@ -353,6 +356,7 @@ impl TerminalBuilder { // Setup Alacritty's env, which modifies the current process's environment alacritty_terminal::tty::setup_env(); + let default_cursor_style = AlacCursorStyle::from(cursor_shape); let scrolling_history = if task.is_some() { // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling. // After the task finishes, we do not allow appending to that terminal, so small tasks output should not @@ -365,6 +369,7 @@ impl TerminalBuilder { }; let config = Config { scrolling_history, + default_cursor_style, ..Config::default() }; @@ -951,6 +956,10 @@ impl Terminal { &self.last_content } + pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape) { + self.term.lock().set_cursor_style(Some(cursor_shape.into())); + } + pub fn total_lines(&self) -> usize { let term = self.term.clone(); let terminal = term.lock_unfair(); diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 4051caf864d25..956cde19d90ed 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -1,3 +1,6 @@ +use alacritty_terminal::vte::ansi::{ + CursorShape as AlacCursorShape, CursorStyle as AlacCursorStyle, +}; use collections::HashMap; use gpui::{ px, AbsoluteLength, AppContext, FontFallbacks, FontFeatures, FontWeight, Pixels, SharedString, @@ -32,6 +35,7 @@ pub struct TerminalSettings { pub font_weight: Option, pub line_height: TerminalLineHeight, pub env: HashMap, + pub cursor_shape: Option, pub blinking: TerminalBlink, pub alternate_scroll: AlternateScroll, pub option_as_meta: bool, @@ -129,6 +133,11 @@ pub struct TerminalSettingsContent { /// /// Default: {} pub env: Option>, + /// Default cursor shape for the terminal. + /// Can be "bar", "block", "underscore", or "hollow". + /// + /// Default: None + pub cursor_shape: Option, /// Sets the cursor blinking behavior in the terminal. /// /// Default: terminal_controlled @@ -282,3 +291,37 @@ pub struct ToolbarContent { /// Default: true pub title: Option, } + +#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum CursorShape { + /// Cursor is a block like `█`. + #[default] + Block, + /// Cursor is an underscore like `_`. + Underline, + /// Cursor is a vertical bar like `⎸`. + Bar, + /// Cursor is a hollow box like `▯`. + Hollow, +} + +impl From for AlacCursorShape { + fn from(value: CursorShape) -> Self { + match value { + CursorShape::Block => AlacCursorShape::Block, + CursorShape::Underline => AlacCursorShape::Underline, + CursorShape::Bar => AlacCursorShape::Beam, + CursorShape::Hollow => AlacCursorShape::HollowBlock, + } + } +} + +impl From for AlacCursorStyle { + fn from(value: CursorShape) -> Self { + AlacCursorStyle { + shape: value.into(), + blinking: false, + } + } +} diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index e0b92035d107b..f7b38e3f5ca3e 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -19,7 +19,7 @@ use terminal::{ index::Point, term::{search::RegexSearch, TermMode}, }, - terminal_settings::{TerminalBlink, TerminalSettings, WorkingDirectory}, + terminal_settings::{CursorShape, TerminalBlink, TerminalSettings, WorkingDirectory}, Clear, Copy, Event, MaybeNavigationTarget, Paste, ScrollLineDown, ScrollLineUp, ScrollPageDown, ScrollPageUp, ScrollToBottom, ScrollToTop, ShowCharacterPalette, TaskStatus, Terminal, TerminalSize, @@ -102,6 +102,7 @@ pub struct TerminalView { //Currently using iTerm bell, show bell emoji in tab until input is received has_bell: bool, context_menu: Option<(View, gpui::Point, Subscription)>, + cursor_shape: CursorShape, blink_state: bool, blinking_on: bool, blinking_paused: bool, @@ -171,6 +172,9 @@ impl TerminalView { let focus_out = cx.on_focus_out(&focus_handle, |terminal_view, _event, cx| { terminal_view.focus_out(cx); }); + let cursor_shape = TerminalSettings::get_global(cx) + .cursor_shape + .unwrap_or_default(); Self { terminal, @@ -178,6 +182,7 @@ impl TerminalView { has_bell: false, focus_handle, context_menu: None, + cursor_shape, blink_state: true, blinking_on: false, blinking_paused: false, @@ -255,6 +260,16 @@ impl TerminalView { fn settings_changed(&mut self, cx: &mut ViewContext) { let settings = TerminalSettings::get_global(cx); self.show_title = settings.toolbar.title; + + let new_cursor_shape = settings.cursor_shape.unwrap_or_default(); + let old_cursor_shape = self.cursor_shape; + if old_cursor_shape != new_cursor_shape { + self.cursor_shape = new_cursor_shape; + self.terminal.update(cx, |term, _| { + term.set_cursor_shape(self.cursor_shape); + }); + } + cx.notify(); } @@ -903,7 +918,10 @@ impl TerminalView { } fn focus_in(&mut self, cx: &mut ViewContext) { - self.terminal.read(cx).focus_in(); + self.terminal.update(cx, |terminal, _| { + terminal.set_cursor_shape(self.cursor_shape); + terminal.focus_in(); + }); self.blink_cursors(self.blink_epoch, cx); cx.invalidate_character_coordinates(); cx.notify(); @@ -912,6 +930,7 @@ impl TerminalView { fn focus_out(&mut self, cx: &mut ViewContext) { self.terminal.update(cx, |terminal, _| { terminal.focus_out(); + terminal.set_cursor_shape(CursorShape::Hollow); }); cx.notify(); } From 65f6a7e5bc56daa33470fcfdcfb5fa7080c18b13 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 30 Sep 2024 12:39:11 +0200 Subject: [PATCH 141/228] linux/x11: Give title bar inactive bg on mouse down (#18529) This fixes something that I felt was off for a while. Previously, when you'd click on the titlebar to move the window, the titlebar would only change its background once the moving starts, but not on mouse-down. That felt really off, since the moving is down with mouse-down and move, so I think giving the user feedback about the mouse-down event makes more sense. I know there's a subjectivity to this change, so I'm ready to hear other opinions, but for now I want to go with this. Release Notes: - N/A --- crates/title_bar/src/title_bar.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 73a82e9ee0620..d6cc839cfdb7b 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -76,7 +76,7 @@ impl Render for TitleBar { let supported_controls = cx.window_controls(); let decorations = cx.window_decorations(); let titlebar_color = if cfg!(target_os = "linux") { - if cx.is_window_active() { + if cx.is_window_active() && !self.should_move { cx.theme().colors().title_bar_background } else { cx.theme().colors().title_bar_inactive_background From 8ae74bc6df9f4480d70202a0dbc4d975d25158af Mon Sep 17 00:00:00 2001 From: wannacu <76616478+wannacu@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:57:59 +0800 Subject: [PATCH 142/228] gpui: Fix pre-edit position after applying scale factor (#18214) before: ![image](https://github.com/user-attachments/assets/20590089-3333-4ca8-a371-b07acfbe43f9) after: ![image](https://github.com/user-attachments/assets/2d25623e-0602-4d24-b563-64e1d2ec3492) Release Notes: - N/A --- crates/gpui/src/geometry.rs | 6 ++++++ crates/gpui/src/platform.rs | 6 +++--- crates/gpui/src/platform/linux/wayland/client.rs | 4 ++-- crates/gpui/src/platform/linux/wayland/window.rs | 4 ++-- crates/gpui/src/platform/linux/x11/client.rs | 5 +++-- crates/gpui/src/platform/linux/x11/window.rs | 8 ++++---- crates/gpui/src/platform/mac/window.rs | 7 ++++--- crates/gpui/src/platform/test/window.rs | 6 +++--- crates/gpui/src/platform/windows/window.rs | 2 +- crates/gpui/src/window.rs | 4 +++- 10 files changed, 31 insertions(+), 21 deletions(-) diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 8de9e6f009d73..9b9b169804391 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -2612,6 +2612,12 @@ impl From for f64 { } } +impl From for u32 { + fn from(pixels: ScaledPixels) -> Self { + pixels.0 as u32 + } +} + /// Represents a length in rems, a unit based on the font-size of the window, which can be assigned with [`WindowContext::set_rem_size`][set_rem_size]. /// /// Rems are used for defining lengths that are scalable and consistent across different UI elements. diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 680c813078b86..b5d6ae312783f 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -23,8 +23,8 @@ use crate::{ point, Action, AnyWindowHandle, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GPUSpecs, GlyphId, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point, - RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, SharedString, Size, - SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE, + RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, ScaledPixels, Scene, + SharedString, Size, SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE, }; use anyhow::Result; use async_task::Runnable; @@ -381,7 +381,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn set_client_inset(&self, _inset: Pixels) {} fn gpu_specs(&self) -> Option; - fn update_ime_position(&self, _bounds: Bounds); + fn update_ime_position(&self, _bounds: Bounds); #[cfg(any(test, feature = "test-support"))] fn as_test(&mut self) -> Option<&mut TestWindow> { diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 57c43a7e46a0c..ba4971b63af6d 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -84,7 +84,7 @@ use crate::{ use crate::{ AnyWindowHandle, CursorStyle, DisplayId, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, - NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScrollDelta, + NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, ScrollWheelEvent, TouchPhase, }; use crate::{LinuxCommon, WindowParams}; @@ -313,7 +313,7 @@ impl WaylandClientStatePtr { } } - pub fn update_ime_position(&self, bounds: Bounds) { + pub fn update_ime_position(&self, bounds: Bounds) { let client = self.get_client(); let mut state = client.borrow_mut(); if state.composing || state.text_input.is_none() || state.pre_edit_text.is_some() { diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index 329b6c185fbfe..1ca358edecd0a 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -26,7 +26,7 @@ use crate::platform::{PlatformAtlas, PlatformInputHandler, PlatformWindow}; use crate::scene::Scene; use crate::{ px, size, AnyWindowHandle, Bounds, Decorations, GPUSpecs, Globals, Modifiers, Output, Pixels, - PlatformDisplay, PlatformInput, Point, PromptLevel, ResizeEdge, Size, Tiling, + PlatformDisplay, PlatformInput, Point, PromptLevel, ResizeEdge, ScaledPixels, Size, Tiling, WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, WindowParams, }; @@ -1010,7 +1010,7 @@ impl PlatformWindow for WaylandWindow { } } - fn update_ime_position(&self, bounds: Bounds) { + fn update_ime_position(&self, bounds: Bounds) { let state = self.borrow(); state.client.update_ime_position(bounds); } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index d6c806236a9e4..7f5342a50360e 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -38,7 +38,8 @@ use crate::platform::{LinuxCommon, PlatformWindow}; use crate::{ modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, - PlatformDisplay, PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, + PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase, + WindowParams, X11Window, }; use super::{button_of_key, modifiers_from_state, pressed_button_from_mask}; @@ -188,7 +189,7 @@ impl X11ClientStatePtr { } } - pub fn update_ime_position(&self, bounds: Bounds) { + pub fn update_ime_position(&self, bounds: Bounds) { let client = self.get_client(); let mut state = client.0.borrow_mut(); if state.composing || state.ximc.is_none() { diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index b0cf82d605fff..62b895d01f426 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -4,9 +4,9 @@ use crate::{ platform::blade::{BladeRenderer, BladeSurfaceConfig}, px, size, AnyWindowHandle, Bounds, Decorations, DevicePixels, ForegroundExecutor, GPUSpecs, Modifiers, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, - PlatformWindow, Point, PromptLevel, ResizeEdge, Scene, Size, Tiling, WindowAppearance, - WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, WindowParams, - X11ClientStatePtr, + PlatformWindow, Point, PromptLevel, ResizeEdge, ScaledPixels, Scene, Size, Tiling, + WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, + WindowParams, X11ClientStatePtr, }; use blade_graphics as gpu; @@ -1412,7 +1412,7 @@ impl PlatformWindow for X11Window { } } - fn update_ime_position(&self, bounds: Bounds) { + fn update_ime_position(&self, bounds: Bounds) { let mut state = self.0.state.borrow_mut(); let client = state.client.clone(); drop(state); diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index bf8369069731f..885c3565ccc89 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -3,8 +3,9 @@ use crate::{ platform::PlatformInputHandler, point, px, size, AnyWindowHandle, Bounds, DisplayLink, ExternalPaths, FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, - PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel, Size, Timer, - WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowKind, WindowParams, + PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel, + ScaledPixels, Size, Timer, WindowAppearance, WindowBackgroundAppearance, WindowBounds, + WindowKind, WindowParams, }; use block::ConcreteBlock; use cocoa::{ @@ -1119,7 +1120,7 @@ impl PlatformWindow for MacWindow { None } - fn update_ime_position(&self, _bounds: Bounds) { + fn update_ime_position(&self, _bounds: Bounds) { unsafe { let input_context: id = msg_send![class!(NSTextInputContext), currentInputContext]; let _: () = msg_send![input_context, invalidateCharacterCoordinates]; diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index 1464dd8e73b38..ebbf6ecc12b46 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -1,8 +1,8 @@ use crate::{ AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GPUSpecs, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, - Point, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, WindowBounds, - WindowParams, + Point, ScaledPixels, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, + WindowBounds, WindowParams, }; use collections::HashMap; use parking_lot::Mutex; @@ -274,7 +274,7 @@ impl PlatformWindow for TestWindow { unimplemented!() } - fn update_ime_position(&self, _bounds: Bounds) {} + fn update_ime_position(&self, _bounds: Bounds) {} fn gpu_specs(&self) -> Option { None diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index e2cfb38afd4df..d5ea3be6cac5e 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -685,7 +685,7 @@ impl PlatformWindow for WindowsWindow { Some(self.0.state.borrow().renderer.gpu_specs()) } - fn update_ime_position(&self, _bounds: Bounds) { + fn update_ime_position(&self, _bounds: Bounds) { // todo(windows) } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 57066b0ce64c5..af968c5a2c123 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3610,7 +3610,9 @@ impl<'a> WindowContext<'a> { self.on_next_frame(|cx| { if let Some(mut input_handler) = cx.window.platform_window.take_input_handler() { if let Some(bounds) = input_handler.selected_bounds(cx) { - cx.window.platform_window.update_ime_position(bounds); + cx.window + .platform_window + .update_ime_position(bounds.scale(cx.scale_factor())); } cx.window.platform_window.set_input_handler(input_handler); } From e64a86ce9fdf2ee72cf8cc6572295bf3b372b7e1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 30 Sep 2024 15:28:46 +0300 Subject: [PATCH 143/228] Fix a typo in the multi buffers documentation (#18535) Closes https://github.com/zed-industries/zed/issues/18533 Release Notes: - N/A --- docs/src/multibuffers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/multibuffers.md b/docs/src/multibuffers.md index 46bfb1d93f7e4..7d9f4cafc4ab3 100644 --- a/docs/src/multibuffers.md +++ b/docs/src/multibuffers.md @@ -31,7 +31,7 @@ To start a search run the `pane: Toggle Search` command (`cmd-shift-f` on macOS, ## Diagnostics -If you have a language server installed, the diagnostics pane can show you all errors across your project. You can open it by clicking on the icon in the status bar, or running the `diagnostcs: Deploy` command` ('cmd-shift-m` on macOS, `ctrl-shift-m` on Windows/Linux, or `:clist` in Vim mode). +If you have a language server installed, the diagnostics pane can show you all errors across your project. You can open it by clicking on the icon in the status bar, or running the `diagnostics: Deploy` command` ('cmd-shift-m` on macOS, `ctrl-shift-m` on Windows/Linux, or `:clist` in Vim mode). ## Find References From 215bce19748652820e01eecff6a1052586f357ae Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Mon, 30 Sep 2024 16:35:36 +0300 Subject: [PATCH 144/228] Make direct direnv loading default (#18536) I've been running with direct direnv loading for a while now and haven't experienced any significant issues other than #18473. Making it default would make direnv integration more reliable and consistent. I've also updated the docs a bit to ensure that they represent current status of direnv integration Release Notes: - Made direnv integration use direct (`direnv export json`) mode by default instead of relying on a shell hook, improving consistency and reliability of direnv detection --- assets/settings/default.json | 10 ++++------ crates/project/src/project_settings.rs | 5 +---- docs/src/configuring-zed.md | 6 ++++-- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index ba95c2cfcdaaa..f6c498e0278e6 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -599,13 +599,11 @@ } }, // Configuration for how direnv configuration should be loaded. May take 2 values: - // 1. Load direnv configuration through the shell hook, works for POSIX shells and fish. - // "load_direnv": "shell_hook" - // 2. Load direnv configuration using `direnv export json` directly. - // This can help with some shells that otherwise would not detect - // the direnv environment, such as nushell or elvish. + // 1. Load direnv configuration using `direnv export json` directly. // "load_direnv": "direct" - "load_direnv": "shell_hook", + // 2. Load direnv configuration through the shell hook, works for POSIX shells and fish. + // "load_direnv": "shell_hook" + "load_direnv": "direct", "inline_completions": { // A list of globs representing files that inline completions should be disabled for. "disabled_globs": [".env"] diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 706d3afdce45e..d794563672ed0 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -62,12 +62,9 @@ pub struct NodeBinarySettings { #[serde(rename_all = "snake_case")] pub enum DirenvSettings { /// Load direnv configuration through a shell hook - #[default] ShellHook, /// Load direnv configuration directly using `direnv export json` - /// - /// Warning: This option is experimental and might cause some inconsistent behavior compared to using the shell hook. - /// If it does, please report it to GitHub + #[default] Direct, } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 18d66708ad7fc..ad6a628ed0880 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -267,12 +267,14 @@ left and right padding of the central pane from the workspace when the centered ## Direnv Integration -- Description: Settings for [direnv](https://direnv.net/) integration. Requires `direnv` to be installed. `direnv` integration currently only means that the environment variables set by a `direnv` configuration can be used to detect some language servers in `$PATH` instead of installing them. +- Description: Settings for [direnv](https://direnv.net/) integration. Requires `direnv` to be installed. + `direnv` integration make it possible to use the environment variables set by a `direnv` configuration to detect some language servers in `$PATH` instead of installing them. + It also allows for those environment variables to be used in tasks. - Setting: `load_direnv` - Default: ```json -"load_direnv": "shell_hook" +"load_direnv": "direct" ``` **Options** From 69e698c3be73b6e31605abd317b835f63670172c Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 30 Sep 2024 15:36:35 +0200 Subject: [PATCH 145/228] terminal: Fix blinking settings & blinking with custom shape (#18538) This is a follow-up to #18530 thanks to this comment here: https://github.com/zed-industries/zed/pull/18530#issuecomment-2382870564 In short: it fixes the `blinking` setting and the `cursor_shape` setting as it relates to blinking. Turns out our `blinking` setting was always the wrong value when using `terminal_controlled` and the terminal _would_ control the blinking. Example script to test with: ```bash echo -e "0 normal \x1b[\x30 q"; sleep 2 echo -e "1 blink block \x1b[\x31 q"; sleep 2 echo -e "2 solid block \x1b[\x32 q"; sleep 2 echo -e "3 blink under \x1b[\x33 q"; sleep 2 echo -e "4 solid under \x1b[\x34 q"; sleep 2 echo -e "5 blink vert \x1b[\x35 q"; sleep 2 echo -e "6 solid vert \x1b[\x36 q"; sleep 2 echo -e "0 normal \x1b[\x30 q"; sleep 2 echo -e "color \x1b]12;#00ff00\x1b\\"; sleep 2 echo -e "reset \x1b]112\x1b\\ \x1b[\x30 q" ``` Before the changes in here, this script would set the cursor shape and the blinking, but the blinking boolean would always be wrong. This change here makes sure that it works consistently: - `terminal.cursor_shape` only controls the *default* shape of the terminal, not the blinking. - `terminal.blinking = on` means that it's *always* blinking, regardless of what terminal programs want - `terminal.blinking = off` means that it's *never* blinking, regardless of what terminal programs want - `terminal.blinking = terminal_controlled (default)` means that it's blinking depending on what terminal programs want. when a terminal program resets the cursor to default, it sets it back to `terminal.cursor_shape` if that is set. Release Notes: - Fixed the behavior of `{"terminal": {"blinking": "[on|off|terminal_controlled]"}` to work correctly and to work correctly when custom `cursor_shape` is set. - `terminal.cursor_shape` only controls the *default* shape of the terminal, not the blinking. - `terminal.blinking = on` means that it's *always* blinking, regardless of what terminal programs want - `terminal.blinking = off` means that it's *never* blinking, regardless of what terminal programs want - `terminal.blinking = terminal_controlled (default)` means that it's blinking depending on what terminal programs want. when a terminal program resets the cursor to default, it sets it back to `terminal.cursor_shape` if that is set. Demo: https://github.com/user-attachments/assets/b3fbeafd-ad58-41c8-9c07-1f03bc31771f Co-authored-by: Bennet --- crates/project/src/terminals.rs | 1 - crates/terminal/src/terminal.rs | 21 ++++++++++----------- crates/terminal_view/src/terminal_view.rs | 19 ++++++++++++++----- 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index ababb3261b337..54dd48cf433ff 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -215,7 +215,6 @@ impl Project { spawn_task, shell, env, - Some(settings.blinking), settings.cursor_shape.unwrap_or_default(), settings.alternate_scroll, settings.max_scroll_history_lines, diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index f9767b07d1261..b51308df37884 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -42,7 +42,7 @@ use serde::{Deserialize, Serialize}; use settings::Settings; use smol::channel::{Receiver, Sender}; use task::{HideStrategy, Shell, TaskId}; -use terminal_settings::{AlternateScroll, CursorShape, TerminalBlink, TerminalSettings}; +use terminal_settings::{AlternateScroll, CursorShape, TerminalSettings}; use theme::{ActiveTheme, Theme}; use util::truncate_and_trailoff; @@ -102,7 +102,7 @@ pub enum Event { CloseTerminal, Bell, Wakeup, - BlinkChanged, + BlinkChanged(bool), SelectionsChanged, NewNavigationTarget(Option), Open(MaybeNavigationTarget), @@ -315,7 +315,6 @@ impl TerminalBuilder { task: Option, shell: Shell, mut env: HashMap, - blink_settings: Option, cursor_shape: CursorShape, alternate_scroll: AlternateScroll, max_scroll_history_lines: Option, @@ -378,16 +377,11 @@ impl TerminalBuilder { let (events_tx, events_rx) = unbounded(); //Set up the terminal... let mut term = Term::new( - config, + config.clone(), &TerminalSize::default(), ZedListener(events_tx.clone()), ); - //Start off blinking if we need to - if let Some(TerminalBlink::On) = blink_settings { - term.set_private_mode(PrivateMode::Named(NamedPrivateMode::BlinkingCursor)); - } - //Alacritty defaults to alternate scrolling being on, so we just need to turn it off. if let AlternateScroll::Off = alternate_scroll { term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll)); @@ -437,6 +431,7 @@ impl TerminalBuilder { pty_tx: Notifier(pty_tx), completion_tx, term, + term_config: config, events: VecDeque::with_capacity(10), //Should never get this high. last_content: Default::default(), last_mouse: None, @@ -588,6 +583,7 @@ pub struct Terminal { pty_tx: Notifier, completion_tx: Sender<()>, term: Arc>>, + term_config: Config, events: VecDeque, /// This is only used for mouse mode cell change detection last_mouse: Option<(AlacPoint, AlacDirection)>, @@ -672,7 +668,9 @@ impl Terminal { self.write_to_pty(format(self.last_content.size.into())) } AlacTermEvent::CursorBlinkingChange => { - cx.emit(Event::BlinkChanged); + let terminal = self.term.lock(); + let blinking = terminal.cursor_style().blinking; + cx.emit(Event::BlinkChanged(blinking)); } AlacTermEvent::Bell => { cx.emit(Event::Bell); @@ -957,7 +955,8 @@ impl Terminal { } pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape) { - self.term.lock().set_cursor_style(Some(cursor_shape.into())); + self.term_config.default_cursor_style = cursor_shape.into(); + self.term.lock().set_options(self.term_config.clone()); } pub fn total_lines(&self) -> usize { diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index f7b38e3f5ca3e..ce65be30c6d4f 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -104,7 +104,7 @@ pub struct TerminalView { context_menu: Option<(View, gpui::Point, Subscription)>, cursor_shape: CursorShape, blink_state: bool, - blinking_on: bool, + blinking_terminal_enabled: bool, blinking_paused: bool, blink_epoch: usize, can_navigate_to_selected_word: bool, @@ -184,7 +184,7 @@ impl TerminalView { context_menu: None, cursor_shape, blink_state: true, - blinking_on: false, + blinking_terminal_enabled: false, blinking_paused: false, blink_epoch: 0, can_navigate_to_selected_word: false, @@ -434,7 +434,6 @@ impl TerminalView { pub fn should_show_cursor(&self, focused: bool, cx: &mut gpui::ViewContext) -> bool { //Don't blink the cursor when not focused, blinking is disabled, or paused if !focused - || !self.blinking_on || self.blinking_paused || self .terminal @@ -450,7 +449,10 @@ impl TerminalView { //If the user requested to never blink, don't blink it. TerminalBlink::Off => true, //If the terminal is controlling it, check terminal mode - TerminalBlink::TerminalControlled | TerminalBlink::On => self.blink_state, + TerminalBlink::TerminalControlled => { + !self.blinking_terminal_enabled || self.blink_state + } + TerminalBlink::On => self.blink_state, } } @@ -642,7 +644,14 @@ fn subscribe_for_terminal_events( cx.emit(Event::Wakeup); } - Event::BlinkChanged => this.blinking_on = !this.blinking_on, + Event::BlinkChanged(blinking) => { + if matches!( + TerminalSettings::get_global(cx).blinking, + TerminalBlink::TerminalControlled + ) { + this.blinking_terminal_enabled = *blinking; + } + } Event::TitleChanged => { cx.emit(ItemEvent::UpdateTab); From 053e31994fd5ffc99cd0a59404fb5c4fac4fd3c5 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:33:20 +0200 Subject: [PATCH 146/228] Fine-tune hunk controls block (#18543) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR changes the undo icon and adds a background color so that indent lines don't bleed through the control block. Screenshot 2024-09-30 at 5 38 44 PM Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 4e7a0f05f4a25..2ccd60c668de7 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -360,8 +360,11 @@ impl Editor { h_flex() .id(cx.block_id) - .w_full() .h(cx.line_height()) + .w_full() + .border_t_1() + .border_color(border_color) + .bg(cx.theme().colors().editor_background) .child( div() .id("gutter-strip") @@ -381,12 +384,10 @@ impl Editor { ) .child( h_flex() - .pl_1p5() + .pl_2() .pr_6() .size_full() .justify_between() - .border_t_1() - .border_color(border_color) .child( h_flex() .gap_1() @@ -513,7 +514,7 @@ impl Editor { }), ) .child( - IconButton::new("discard", IconName::RotateCcw) + IconButton::new("discard", IconName::Undo) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .tooltip({ From 74cba2407f3c5999ab27597dde1643e7e748db8e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 16:14:26 -0400 Subject: [PATCH 147/228] ci: Move collab to Dockerfile-collab (#18515) This makes it possible to have multiple Dockerfiles, each with their own `.dockerignore`. Previously any docker builds would always include anything inside `.dockerignore`. I believe this feature may require `export DOCKER_BUILDKIT=1` but we use that in CI already. --- .github/workflows/deploy_collab.yml | 6 +++++- .zed/settings.json | 4 ++++ Dockerfile => Dockerfile-collab | 0 .dockerignore => Dockerfile-collab.dockerignore | 0 4 files changed, 9 insertions(+), 1 deletion(-) rename Dockerfile => Dockerfile-collab (100%) rename .dockerignore => Dockerfile-collab.dockerignore (100%) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index c4193adcd2434..1e6e6cf2803e2 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -76,7 +76,11 @@ jobs: clean: false - name: Build docker image - run: docker build . --build-arg GITHUB_SHA=$GITHUB_SHA --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA + run: | + docker build -f Dockerfile-collab \ + --build-arg GITHUB_SHA=$GITHUB_SHA \ + --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA \ + . - name: Publish docker image run: docker push registry.digitalocean.com/zed/collab:${GITHUB_SHA} diff --git a/.zed/settings.json b/.zed/settings.json index 176fd33a9b966..41adfdbf591d3 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -38,6 +38,10 @@ } } }, + "file_types": { + "Dockerfile": ["Dockerfile*[!dockerignore]"], + "Git Ignore": ["dockerignore"] + }, "hard_tabs": false, "formatter": "auto", "remove_trailing_whitespace_on_save": true, diff --git a/Dockerfile b/Dockerfile-collab similarity index 100% rename from Dockerfile rename to Dockerfile-collab diff --git a/.dockerignore b/Dockerfile-collab.dockerignore similarity index 100% rename from .dockerignore rename to Dockerfile-collab.dockerignore From 09424edc3510b3b6004f93f7c7f3c5e173a4c5fa Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 16:17:21 -0400 Subject: [PATCH 148/228] ci: Add script/determine-release-channel (#18476) - Refactor duplicated inline script from ci.yml to `script/determine-release-channel` - Remove references to non-existent '-nightly' release tags Release Notes: - N/A --- .github/workflows/ci.yml | 79 ++++---------------------------- script/bump-zed-patch-version | 5 +- script/determine-release-channel | 32 +++++++++++++ 3 files changed, 43 insertions(+), 73 deletions(-) create mode 100755 script/determine-release-channel diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 07e5499d5eb76..ca0d8bb9100fd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -192,29 +192,12 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel + + - name: Draft release notes + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + run: | mkdir -p target/ # Ignore any errors that occur while drafting release notes to not fail the build. script/draft-release-notes "$version" "$channel" > target/release-notes.md || true @@ -289,29 +272,8 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel - name: Create Linux .tar.gz bundle run: script/bundle-linux @@ -357,29 +319,8 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel - name: Create and upload Linux .tar.gz bundle run: script/bundle-linux diff --git a/script/bump-zed-patch-version b/script/bump-zed-patch-version index e00e747aa32da..b52feff72f1cb 100755 --- a/script/bump-zed-patch-version +++ b/script/bump-zed-patch-version @@ -9,11 +9,8 @@ case $channel in preview) tag_suffix="-pre" ;; - nightly) - tag_suffix="-nightly" - ;; *) - echo "this must be run on either of stable|preview|nightly release branches" >&2 + echo "this must be run on either of stable|preview release branches" >&2 exit 1 ;; esac diff --git a/script/determine-release-channel b/script/determine-release-channel new file mode 100755 index 0000000000000..ae49fbf1e5d07 --- /dev/null +++ b/script/determine-release-channel @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [ -z "${GITHUB_ACTIONS-}" ]; then + echo "Error: This script must be run in a GitHub Actions environment" + exit 1 +elif [ -z "${GITHUB_REF-}" ]; then + # This should be the release tag 'v0.x.x' + echo "Error: GITHUB_REF is not set" + exit 1 +fi + +version=$(script/get-crate-version zed) +channel=$(cat crates/zed/RELEASE_CHANNEL) +echo "Publishing version: ${version} on release channel ${channel}" +echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV + +expected_tag_name="" +case ${channel} in +stable) + expected_tag_name="v${version}";; +preview) + expected_tag_name="v${version}-pre";; +*) + echo "can't publish a release on channel ${channel}" + exit 1;; +esac +if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then + echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" + exit 1 +fi From 432de00e894e6a104fa69e9146081b159de2dc43 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 17:02:19 -0400 Subject: [PATCH 149/228] ci: Use BuildJet Ubuntu 20.04 runners for better glibc compatibility (#18442) Use BuildJet Ubuntu 20.04 runners. - Linux arm64 unchanged (glibc >= 2.35) - Linux x64 glibc requirement becomes to >= 2.31 (from glibc >= 2.35). Note: Ubuntu 20.04 repo cmake (3.16.3) is normally too old to build Zed, but `ubuntu-2004` [includes cmake 3.30.3](https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2004-Readme.md#tools). --- .github/workflows/ci.yml | 4 +-- .github/workflows/release_nightly.yml | 4 +-- docs/src/linux.md | 6 ++--- script/install-mold | 37 +++++++++++++++++++++++++++ script/linux | 14 ++++++++-- 5 files changed, 56 insertions(+), 9 deletions(-) create mode 100755 script/install-mold diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ca0d8bb9100fd..ef1570fbf2121 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -254,7 +254,7 @@ jobs: timeout-minutes: 60 name: Create a Linux bundle runs-on: - - buildjet-16vcpu-ubuntu-2204 + - buildjet-16vcpu-ubuntu-2004 if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -267,7 +267,7 @@ jobs: clean: false - name: Install Linux dependencies - run: ./script/linux + run: ./script/linux && ./script/install-mold 2.34.0 - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 4e8a257bdd1bb..349d14f990fcb 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -100,7 +100,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - buildjet-16vcpu-ubuntu-2004 needs: tests env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} @@ -117,7 +117,7 @@ jobs: run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Install Linux dependencies - run: ./script/linux + run: ./script/linux && ./script/install-mold 2.34.0 - name: Limit target directory size run: script/clear-target-dir-if-larger-than 100 diff --git a/docs/src/linux.md b/docs/src/linux.md index 17334c325ca30..3bba9c8f93aaa 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.35 (Ubuntu 22 and newer) + - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). @@ -24,8 +24,8 @@ Both Nix and Alpine have third-party Zed packages available (though they are cur You will need to build from source for: - architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) -- Amazon Linux -- Rocky Linux 9.3 +- Redhat Enterprise Linux 8.x, Rocky Linux 8, AlmaLinux 8, Amazon Linux 2 on all architectures +- Redhat Enterprise Linux 9.x, Rocky Linux 9.3, AlmaLinux 8, Amazon Linux 2023 on aarch64 (x86_x64 OK) ## Other ways to install Zed on Linux diff --git a/script/install-mold b/script/install-mold new file mode 100755 index 0000000000000..1c451b7269061 --- /dev/null +++ b/script/install-mold @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Install `mold` official binaries from GitHub Releases. +# +# Adapted from the official rui314/setup-mold@v1 action to: +# * use environment variables instead of action inputs +# * remove make-default support +# * use curl instead of wget +# * support doas for sudo +# * support redhat systems +# See: https://github.com/rui314/setup-mold/blob/main/action.yml + +set -euo pipefail + +MOLD_VERSION="${MOLD_VERSION:-${1:-}}" +if [ "$(uname -s)" != "Linux" ]; then + echo "Error: This script is intended for Linux systems only." + exit 1 +elif [ -z "$MOLD_VERSION" ]; then + echo "Usage: $0 2.34.0" + exit 1 +elif [ -e /usr/local/bin/mold ]; then + echo "Warning: existing mold found at /usr/local/bin/mold. Skipping installation." + exit 0 +fi + +if [ "$(whoami)" = root ]; then SUDO=; else SUDO="$(command -v sudo || command -v doas || true)"; fi + +MOLD_REPO="${MOLD_REPO:-https://github.com/rui314/mold}" +MOLD_URL="${MOLD_URL:-$MOLD_REPO}/releases/download/v$MOLD_VERSION/mold-$MOLD_VERSION-$(uname -m)-linux.tar.gz" + +echo "Downloading from $MOLD_URL" +curl --location --show-error --output - --retry 3 --retry-delay 5 "$MOLD_URL" \ + | $SUDO tar -C /usr/local --strip-components=1 --no-overwrite-dir -xzf - + +# Note this binary depends on the system libatomic.so.1 which is usually +# provided as a dependency of gcc so it should be available on most systems. diff --git a/script/linux b/script/linux index eca3bf7f7d249..189407a5107b8 100755 --- a/script/linux +++ b/script/linux @@ -20,19 +20,29 @@ if [[ -n $apt ]]; then libwayland-dev libxkbcommon-x11-dev libssl-dev - libstdc++-12-dev libzstd-dev libvulkan1 libgit2-dev make cmake clang - mold jq gettext-base elfutils libsqlite3-dev ) + # Ubuntu 20.04 / Debian Bullseye (including CI for release) + if grep -q "bullseye" /etc/debian_version; then + deps+=( + libstdc++-10-dev + ) + else + deps+=( + libstdc++-12-dev + mold + ) + fi + $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" exit 0 From 3010dfe038dce73a4d5b809b6d0949e723c76835 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 17:46:21 -0400 Subject: [PATCH 150/228] Support More Linux (#18480) - Add `script/build-docker` - Add `script/install-cmake` - Add `script/install-mold` - Improve `script/linux` - Add missing dependencies: `jq`, `git`, `tar`, `gzip` as required. - Add check for mold - Fix Redhat 8.x derivatives (RHEL, Centos, Almalinux, Rocky, Oracle, Amazon) - Fix perl libs to be Fedora only - Install the best `libstdc++` available on apt distros - ArchLinux: run `pacman -Syu` to update repos before installing. - Should work on Raspbian (untested) This make it possible to test builds on other distros using docker: ``` ./script/build-docker amazonlinux:2023 ``` --- Dockerfile-distros | 26 +++++++ Dockerfile-distros.dockerignore | 2 + docs/src/development/macos.md | 6 ++ script/build-docker | 25 +++++++ script/install-cmake | 77 +++++++++++++++++++++ script/install-mold | 2 +- script/linux | 116 +++++++++++++++++++++++--------- 7 files changed, 220 insertions(+), 34 deletions(-) create mode 100644 Dockerfile-distros create mode 100644 Dockerfile-distros.dockerignore create mode 100755 script/build-docker create mode 100755 script/install-cmake diff --git a/Dockerfile-distros b/Dockerfile-distros new file mode 100644 index 0000000000000..c8a98d2f7db9b --- /dev/null +++ b/Dockerfile-distros @@ -0,0 +1,26 @@ +# syntax=docker/dockerfile:1 + +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +WORKDIR /app +ARG TZ=Etc/UTC \ + LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 \ + DEBIAN_FRONTEND=noninteractive +ENV CARGO_TERM_COLOR=always + +COPY script/linux script/ +RUN ./script/linux +COPY script/install-mold script/install-cmake script/ +RUN ./script/install-mold "2.34.0" +RUN ./script/install-cmake "3.30.4" + +COPY . . + +# When debugging, make these into individual RUN statements. +# Cleanup to avoid saving big layers we aren't going to use. +RUN . "$HOME/.cargo/env" \ + && cargo fetch \ + && cargo build \ + && cargo run -- --help \ + && cargo clean --quiet diff --git a/Dockerfile-distros.dockerignore b/Dockerfile-distros.dockerignore new file mode 100644 index 0000000000000..de70e0d16772e --- /dev/null +++ b/Dockerfile-distros.dockerignore @@ -0,0 +1,2 @@ +**/target +**/node_modules diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 1407b0f610dc6..c5cbf56e15f1a 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -35,6 +35,12 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). brew install cmake ``` +- (Optional) Install `mold` to speed up link times + + ```sh + brew install mold + ``` + ## Backend Dependencies If you are developing collaborative features of Zed, you'll need to install the dependencies of zed's `collab` server: diff --git a/script/build-docker b/script/build-docker new file mode 100755 index 0000000000000..c5ea294c73420 --- /dev/null +++ b/script/build-docker @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +# Use a docker BASE_IMAGE to test building Zed. +# e.g: ./script/bundle-docker ubuntu:20.04 +# +# Increasing resources available to podman may speed this up: +# podman machine stop +# podman machine set --memory 16384 --cpus 8 --disk-size 200 +# podman machine start + +set -euo pipefail + +BASE_IMAGE=${BASE_IMAGE:-${1:-}} +if [ -z "$BASE_IMAGE" ]; then + echo "Usage: $0 BASE_IMAGE" >&2 + exit 1 +fi + +export DOCKER_BUILDKIT=1 +cd "$(dirname "$0")/.." + +podman build . \ + -f Dockerfile-distros \ + -t many \ + --build-arg BASE_IMAGE="$BASE_IMAGE" diff --git a/script/install-cmake b/script/install-cmake new file mode 100755 index 0000000000000..71b5aaeeef2ed --- /dev/null +++ b/script/install-cmake @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# +# This script installs an up-to-date version of CMake. +# +# For MacOS use Homebrew to install the latest version. +# +# For Ubuntu use the official KitWare Apt repository with backports. +# See: https://apt.kitware.com/ +# +# For other systems (RHEL 8.x, 9.x, AmazonLinux, SUSE, Fedora, Arch, etc) +# use the official CMake installer script from KitWare. +# +# Note this is similar to how GitHub Actions runners install cmake: +# https://github.com/actions/runner-images/blob/main/images/ubuntu/scripts/build/install-cmake.sh +# +# Upstream: 3.30.4 (2024-09-27) + +set -euo pipefail + + +if [[ "$(uname -s)" == "darwin" ]]; then + brew --version >/dev/null \ + || echo "Error: Homebrew is required to install cmake on MacOS." && exit 1 + echo "Installing cmake via Homebrew (can't pin to old versions)." + brew install cmake + exit 0 +elif [ "$(uname -s)" != "Linux" ]; then + echo "Error: This script is intended for MacOS/Linux systems only." + exit 1 +elif [ -z "${1:-}" ]; then + echo "Usage: $0 [3.30.4]" + exit 1 +fi +CMAKE_VERSION="${CMAKE_VERSION:-${1:-3.30.4}}" + +if [ "$(whoami)" = root ]; then SUDO=; else SUDO="$(command -v sudo || command -v doas || true)"; fi + +if cmake --version | grep -q "$CMAKE_VERSION"; then + echo "CMake $CMAKE_VERSION is already installed." + exit 0 +elif [ -e /usr/local/bin/cmake ]; then + echo "Warning: existing cmake found at /usr/local/bin/cmake. Skipping installation." + exit 0 +elif [ -e /etc/apt/sources.list.d/kitware.list ]; then + echo "Warning: existing KitWare repository found. Skipping installation." + exit 0 +elif [ -e /etc/lsb-release ] && grep -qP 'DISTRIB_ID=Ubuntu' /etc/lsb-release; then + curl -fsSL https://apt.kitware.com/keys/kitware-archive-latest.asc \ + | $SUDO gpg --dearmor - \ + | $SUDO tee /usr/share/keyrings/kitware-archive-keyring.gpg >/dev/null + echo "deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" \ + | $SUDO tee /etc/apt/sources.list.d/kitware.list >/dev/null + $SUDO apt-get update + $SUDO apt-get install -y kitware-archive-keyring cmake==$CMAKE_VERSION +else + arch="$(uname -m)" + if [ "$arch" != "x86_64" ] && [ "$arch" != "aarch64" ]; then + echo "Error. Only x86_64 and aarch64 are supported." + exit 1 + fi + tempdir=$(mktemp -d) + pushd "$tempdir" + CMAKE_REPO="https://github.com/Kitware/CMake" + CMAKE_INSTALLER="cmake-$CMAKE_VERSION-linux-$arch.sh" + curl -fsSL --output cmake-$CMAKE_VERSION-SHA-256.txt \ + "$CMAKE_REPO/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-SHA-256.txt" + curl -fsSL --output $CMAKE_INSTALLER \ + "$CMAKE_REPO/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-linux-$arch.sh" + # workaround for old versions of sha256sum not having --ignore-missing + grep -F "cmake-$CMAKE_VERSION-linux-$arch.sh" "cmake-$CMAKE_VERSION-SHA-256.txt" \ + | sha256sum -c \ + | grep -qP "^${CMAKE_INSTALLER}: OK" + chmod +x cmake-$CMAKE_VERSION-linux-$arch.sh + $SUDO ./cmake-$CMAKE_VERSION-linux-$arch.sh --prefix=/usr/local --skip-license + popd + rm -rf "$tempdir" +fi diff --git a/script/install-mold b/script/install-mold index 1c451b7269061..fca8d73f21a10 100755 --- a/script/install-mold +++ b/script/install-mold @@ -30,7 +30,7 @@ MOLD_REPO="${MOLD_REPO:-https://github.com/rui314/mold}" MOLD_URL="${MOLD_URL:-$MOLD_REPO}/releases/download/v$MOLD_VERSION/mold-$MOLD_VERSION-$(uname -m)-linux.tar.gz" echo "Downloading from $MOLD_URL" -curl --location --show-error --output - --retry 3 --retry-delay 5 "$MOLD_URL" \ +curl -fsSL --output - "$MOLD_URL" \ | $SUDO tar -C /usr/local --strip-components=1 --no-overwrite-dir -xzf - # Note this binary depends on the system libatomic.so.1 which is usually diff --git a/script/linux b/script/linux index 189407a5107b8..540bf063ec92e 100755 --- a/script/linux +++ b/script/linux @@ -1,15 +1,25 @@ #!/usr/bin/env bash -set -ex +set -xeuo pipefail -# install the wasm toolchain -which rustup > /dev/null 2>&1 || curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +# if root or if sudo/unavailable, define an empty variable +if [ "$(id -u)" -eq 0 ] +then maysudo='' +else maysudo="$(command -v sudo || command -v doas || true)" +fi -# if sudo is not installed, define an empty alias -maysudo=$(command -v sudo || command -v doas || true) +function finalize { + # after packages install (curl, etc), get the rust toolchain + which rustup > /dev/null 2>&1 || curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + # verify the mold situation + if ! command -v mold >/dev/null 2>&1; then + echo "Warning: Mold binaries are unavailable on your system." >&2 + echo " Builds will be slower without mold. Try: scripts/install-mold" >&2 + fi + echo "Finished installing Linux dependencies with script/linux" +} -# Ubuntu, Debian, etc. -# https://packages.ubuntu.com/ +# Ubuntu, Debian, Mint, Kali, Pop!_OS, Raspbian, etc. apt=$(command -v apt-get || true) if [[ -n $apt ]]; then deps=( @@ -27,58 +37,88 @@ if [[ -n $apt ]]; then cmake clang jq + git + curl gettext-base elfutils libsqlite3-dev ) - # Ubuntu 20.04 / Debian Bullseye (including CI for release) - if grep -q "bullseye" /etc/debian_version; then - deps+=( - libstdc++-10-dev - ) - else - deps+=( - libstdc++-12-dev - mold - ) + if (grep -qP 'PRETTY_NAME="(.+24\.04)' /etc/os-release); then + deps+=( mold libstdc++-14-dev ) + elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|.+22\.04)' /etc/os-release); then + deps+=( mold libstdc++-12-dev ) + elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+11|.+20\.04)' /etc/os-release); then + deps+=( libstdc++-10-dev ) fi $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" + finalize exit 0 fi -# Fedora, CentOS, RHEL, etc. -# https://packages.fedoraproject.org/ +# Fedora, CentOS, RHEL, Alma, Amazon 2023, Oracle, etc. dnf=$(command -v dnf || true) -if [[ -n $dnf ]]; then +# Old Redhat (yum only): Amazon Linux 2, Oracle Linux 7, etc. +yum=$(command -v yum || true) + +if [[ -n $dnf ]] || [[ -n $yum ]]; then + pkg_cmd="${dnf:-${yum}}" deps=( gcc - g++ clang cmake - mold alsa-lib-devel fontconfig-devel wayland-devel libxkbcommon-x11-devel openssl-devel libzstd-devel - # Perl dependencies are needed for openssl-sys crate see https://docs.rs/openssl/latest/openssl/ - perl-FindBin - perl-IPC-Cmd - perl-File-Compare - perl-File-Copy vulkan-loader sqlite-devel + jq + git + tar ) + # perl used for building openssl-sys crate. See: https://docs.rs/openssl/latest/openssl/ + if grep -qP '^ID="(fedora)' /etc/os-release; then + deps+=( + perl-FindBin + perl-IPC-Cmd + perl-File-Compare + perl-File-Copy + mold + ) + elif grep grep -qP '^ID="(rhel|rocky|alma|centos|ol)' /etc/os-release; then + deps+=( perl-interpreter ) + fi - # libxkbcommon-x11-devel is in the crb repo on RHEL and CentOS, not needed for Fedora - if ! grep -q "Fedora" /etc/redhat-release; then - $maysudo "$dnf" config-manager --set-enabled crb + # gcc-c++ is g++ on RHEL8 and 8.x clones + if grep -qP '^ID="(rhel|rocky|alma|centos|ol)' /etc/os-release \ + && grep -qP '^VERSION_ID="8' /etc/os-release; then + deps+=( gcc-c++ ) + else + deps+=( g++ ) + fi + + # libxkbcommon-x11-devel is in a non-default repo on RHEL 8.x/9.x (except on AmazonLinux) + if grep -qP '^VERSION_ID="(8|9)' && grep -qP '^ID="(rhel|rocky|centos|alma|ol)' /etc/os-release; then + $maysudo dnf install -y 'dnf-command(config-manager)' + if grep -qP '^PRETTY_NAME="(AlmaLinux 8|Rocky Linux 8)' /etc/os-release; then + $maysudo dnf config-manager --set-enabled powertools + elif grep -qP '^PRETTY_NAME="((AlmaLinux|Rocky|CentOS Stream) 9|Red Hat.+(8|9))' /etc/os-release; then + $maysudo dnf config-manager --set-enabled crb + elif grep -qP '^PRETTY_NAME="Oracle Linux Server 8' /etc/os-release; then + $maysudo dnf config-manager --set-enabled ol8_codeready_builder + elif grep -qP '^PRETTY_NAME="Oracle Linux Server 9' /etc/os-release; then + $maysudo dnf config-manager --set-enabled ol9_codeready_builder + else + echo "Unexpected distro" && grep 'PRETTY_NAME' /etc/os-release && exit 1 + fi fi - $maysudo "$dnf" install -y "${deps[@]}" + $maysudo $pkg_cmd install -y "${deps[@]}" + finalize exit 0 fi @@ -99,10 +139,14 @@ if [[ -n $zyp ]]; then openssl-devel libzstd-devel libvulkan1 - mold sqlite3-devel + jq + git + tar + gzip ) $maysudo "$zyp" install -y "${deps[@]}" + finalize exit 0 fi @@ -125,8 +169,10 @@ if [[ -n $pacman ]]; then mold sqlite jq + git ) - $maysudo "$pacman" -S --needed --noconfirm "${deps[@]}" + $maysudo "$pacman" -Syu --needed --noconfirm "${deps[@]}" + finalize exit 0 fi @@ -153,6 +199,7 @@ if [[ -n $xbps ]]; then sqlite-devel ) $maysudo "$xbps" -Syu "${deps[@]}" + finalize exit 0 fi @@ -162,6 +209,7 @@ emerge=$(command -v emerge || true) if [[ -n $emerge ]]; then deps=( app-arch/zstd + app-misc/jq dev-libs/openssl dev-libs/wayland dev-util/cmake @@ -174,7 +222,9 @@ if [[ -n $emerge ]]; then dev-db/sqlite ) $maysudo "$emerge" -u "${deps[@]}" + finalize exit 0 fi echo "Unsupported Linux distribution in script/linux" +exit 1 From eb9fd62a90d9b8825e7280f1e27775f6892c3924 Mon Sep 17 00:00:00 2001 From: Andrey Arutiunian <110744283+andarut@users.noreply.github.com> Date: Tue, 1 Oct 2024 01:50:30 +0300 Subject: [PATCH 151/228] Fix rendering of markdown tables (#18315) - Closes: https://github.com/zed-industries/zed/issues/11024 ## Release Notes: - Improved Markdown Preview rendering of tables ## Before: ![image](https://github.com/user-attachments/assets/25f05604-38a9-4bde-901c-6d53a5d9d94d) Screenshot 2024-09-25 at 05 47 19 ## Now: ![image](https://github.com/user-attachments/assets/ce06f045-d0db-4b8c-a1fc-2811d35f2683) Screenshot 2024-09-25 at 05 47 48 ## Note: I'm not a Rust programmer and this is my first PR in Zed (because i just want to fix this, so i can view my notes in Markdown in Zed, not slow Visual Studio Code) - so there may be errors. I'm open for critic a --- .../markdown_preview/src/markdown_renderer.rs | 56 ++++++++++++++++--- 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index ad169f036b9ce..8bab51b2c3f21 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -6,8 +6,8 @@ use crate::markdown_elements::{ }; use gpui::{ div, px, rems, AbsoluteLength, AnyElement, DefiniteLength, Div, Element, ElementId, - HighlightStyle, Hsla, InteractiveText, IntoElement, Keystroke, Modifiers, ParentElement, - SharedString, Styled, StyledText, TextStyle, WeakView, WindowContext, + HighlightStyle, Hsla, InteractiveText, IntoElement, Keystroke, Length, Modifiers, + ParentElement, SharedString, Styled, StyledText, TextStyle, WeakView, WindowContext, }; use settings::Settings; use std::{ @@ -16,7 +16,7 @@ use std::{ }; use theme::{ActiveTheme, SyntaxTheme, ThemeSettings}; use ui::{ - h_flex, v_flex, Checkbox, FluentBuilder, InteractiveElement, LinkPreview, Selection, + h_flex, relative, v_flex, Checkbox, FluentBuilder, InteractiveElement, LinkPreview, Selection, StatefulInteractiveElement, Tooltip, }; use workspace::Workspace; @@ -231,12 +231,48 @@ fn render_markdown_list_item( } fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -> AnyElement { - let header = render_markdown_table_row(&parsed.header, &parsed.column_alignments, true, cx); + let mut max_lengths: Vec = vec![0; parsed.header.children.len()]; + + for (index, cell) in parsed.header.children.iter().enumerate() { + let length = cell.contents.len(); + max_lengths[index] = length; + } + + for row in &parsed.body { + for (index, cell) in row.children.iter().enumerate() { + let length = cell.contents.len(); + if length > max_lengths[index] { + max_lengths[index] = length; + } + } + } + + let total_max_length: usize = max_lengths.iter().sum(); + let max_column_widths: Vec = max_lengths + .iter() + .map(|&length| length as f32 / total_max_length as f32) + .collect(); + + let header = render_markdown_table_row( + &parsed.header, + &parsed.column_alignments, + &max_column_widths, + true, + cx, + ); let body: Vec = parsed .body .iter() - .map(|row| render_markdown_table_row(row, &parsed.column_alignments, false, cx)) + .map(|row| { + render_markdown_table_row( + row, + &parsed.column_alignments, + &max_column_widths, + false, + cx, + ) + }) .collect(); cx.with_common_p(v_flex()) @@ -249,14 +285,15 @@ fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) - fn render_markdown_table_row( parsed: &ParsedMarkdownTableRow, alignments: &Vec, + max_column_widths: &Vec, is_header: bool, cx: &mut RenderContext, ) -> AnyElement { let mut items = vec![]; - for cell in &parsed.children { + for (index, cell) in parsed.children.iter().enumerate() { let alignment = alignments - .get(items.len()) + .get(index) .copied() .unwrap_or(ParsedMarkdownTableAlignment::None); @@ -268,8 +305,11 @@ fn render_markdown_table_row( ParsedMarkdownTableAlignment::Right => v_flex().items_end(), }; + let max_width = max_column_widths.get(index).unwrap_or(&0.0); + let mut cell = container - .w_full() + .w(Length::Definite(relative(*max_width))) + .h_full() .child(contents) .px_2() .py_1() From 837756198f91e8b33238a0d186d9a4685e4d5b08 Mon Sep 17 00:00:00 2001 From: maan2003 <49202620+maan2003@users.noreply.github.com> Date: Tue, 1 Oct 2024 04:55:32 +0530 Subject: [PATCH 152/228] linux/wayland: Add support for pasting images (#17671) Release Notes: - You can now paste images into the Assistant Panel to include them as context on Linux wayland --- crates/gpui/src/platform/linux/platform.rs | 14 +- .../gpui/src/platform/linux/wayland/client.rs | 9 +- .../src/platform/linux/wayland/clipboard.rs | 148 ++++++++++++------ 3 files changed, 106 insertions(+), 65 deletions(-) diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index a0bd6b1d33d17..67f1a43cbe322 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -603,17 +603,11 @@ pub(super) fn get_xkb_compose_state(cx: &xkb::Context) -> Option Result { +pub(super) unsafe fn read_fd(mut fd: FileDescriptor) -> Result> { let mut file = File::from_raw_fd(fd.as_raw_fd()); - - let mut buffer = String::new(); - file.read_to_string(&mut buffer)?; - - // Normalize the text to unix line endings, otherwise - // copying from eg: firefox inserts a lot of blank - // lines, and that is super annoying. - let result = buffer.replace("\r\n", "\n"); - Ok(result) + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer)?; + Ok(buffer) } impl CursorStyle { diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index ba4971b63af6d..f7ade828615f4 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -1799,10 +1799,11 @@ impl Dispatch for WaylandClientStatePtr { let fd = pipe.read; drop(pipe.write); - let read_task = state - .common - .background_executor - .spawn(async { unsafe { read_fd(fd) } }); + let read_task = state.common.background_executor.spawn(async { + let buffer = unsafe { read_fd(fd)? }; + let text = String::from_utf8(buffer)?; + anyhow::Ok(text) + }); let this = this.clone(); state diff --git a/crates/gpui/src/platform/linux/wayland/clipboard.rs b/crates/gpui/src/platform/linux/wayland/clipboard.rs index 26b5256bddcd4..b3ec52d20515f 100644 --- a/crates/gpui/src/platform/linux/wayland/clipboard.rs +++ b/crates/gpui/src/platform/linux/wayland/clipboard.rs @@ -6,10 +6,14 @@ use std::{ use calloop::{LoopHandle, PostAction}; use filedescriptor::Pipe; +use strum::IntoEnumIterator; use wayland_client::{protocol::wl_data_offer::WlDataOffer, Connection}; use wayland_protocols::wp::primary_selection::zv1::client::zwp_primary_selection_offer_v1::ZwpPrimarySelectionOfferV1; -use crate::{platform::linux::platform::read_fd, ClipboardItem, WaylandClientStatePtr}; +use crate::{ + hash, platform::linux::platform::read_fd, ClipboardEntry, ClipboardItem, Image, ImageFormat, + WaylandClientStatePtr, +}; pub(crate) const TEXT_MIME_TYPE: &str = "text/plain;charset=utf-8"; pub(crate) const FILE_LIST_MIME_TYPE: &str = "text/uri-list"; @@ -33,14 +37,30 @@ pub(crate) struct Clipboard { current_primary_offer: Option>, } +pub(crate) trait ReceiveData { + fn receive_data(&self, mime_type: String, fd: BorrowedFd<'_>); +} + +impl ReceiveData for WlDataOffer { + fn receive_data(&self, mime_type: String, fd: BorrowedFd<'_>) { + self.receive(mime_type, fd); + } +} + +impl ReceiveData for ZwpPrimarySelectionOfferV1 { + fn receive_data(&self, mime_type: String, fd: BorrowedFd<'_>) { + self.receive(mime_type, fd); + } +} + #[derive(Clone, Debug)] /// Wrapper for `WlDataOffer` and `ZwpPrimarySelectionOfferV1`, used to help track mime types. -pub(crate) struct DataOffer { +pub(crate) struct DataOffer { pub inner: T, mime_types: Vec, } -impl DataOffer { +impl DataOffer { pub fn new(offer: T) -> Self { Self { inner: offer, @@ -52,17 +72,71 @@ impl DataOffer { self.mime_types.push(mime_type) } - pub fn has_mime_type(&self, mime_type: &str) -> bool { + fn has_mime_type(&self, mime_type: &str) -> bool { self.mime_types.iter().any(|t| t == mime_type) } - pub fn find_text_mime_type(&self) -> Option { - for offered_mime_type in &self.mime_types { - if let Some(offer_text_mime_type) = ALLOWED_TEXT_MIME_TYPES - .into_iter() - .find(|text_mime_type| text_mime_type == offered_mime_type) - { - return Some(offer_text_mime_type.to_owned()); + fn read_bytes(&self, connection: &Connection, mime_type: &str) -> Option> { + let pipe = Pipe::new().unwrap(); + self.inner.receive_data(mime_type.to_string(), unsafe { + BorrowedFd::borrow_raw(pipe.write.as_raw_fd()) + }); + let fd = pipe.read; + drop(pipe.write); + + connection.flush().unwrap(); + + match unsafe { read_fd(fd) } { + Ok(bytes) => Some(bytes), + Err(err) => { + log::error!("error reading clipboard pipe: {err:?}"); + None + } + } + } + + fn read_text(&self, connection: &Connection) -> Option { + let mime_type = self.mime_types.iter().find(|&mime_type| { + ALLOWED_TEXT_MIME_TYPES + .iter() + .any(|&allowed| allowed == mime_type) + })?; + let bytes = self.read_bytes(connection, mime_type)?; + let text_content = match String::from_utf8(bytes) { + Ok(content) => content, + Err(e) => { + log::error!("Failed to convert clipboard content to UTF-8: {}", e); + return None; + } + }; + + // Normalize the text to unix line endings, otherwise + // copying from eg: firefox inserts a lot of blank + // lines, and that is super annoying. + let result = text_content.replace("\r\n", "\n"); + Some(ClipboardItem::new_string(result)) + } + + fn read_image(&self, connection: &Connection) -> Option { + for format in ImageFormat::iter() { + let mime_type = match format { + ImageFormat::Png => "image/png", + ImageFormat::Jpeg => "image/jpeg", + ImageFormat::Webp => "image/webp", + ImageFormat::Gif => "image/gif", + ImageFormat::Svg => "image/svg+xml", + ImageFormat::Bmp => "image/bmp", + ImageFormat::Tiff => "image/tiff", + }; + if !self.has_mime_type(mime_type) { + continue; + } + + if let Some(bytes) = self.read_bytes(connection, mime_type) { + let id = hash(&bytes); + return Some(ClipboardItem { + entries: vec![ClipboardEntry::Image(Image { format, bytes, id })], + }); } } None @@ -128,7 +202,7 @@ impl Clipboard { } pub fn read(&mut self) -> Option { - let offer = self.current_offer.clone()?; + let offer = self.current_offer.as_ref()?; if let Some(cached) = self.cached_read.clone() { return Some(cached); } @@ -137,30 +211,16 @@ impl Clipboard { return self.contents.clone(); } - let mime_type = offer.find_text_mime_type()?; - let pipe = Pipe::new().unwrap(); - offer.inner.receive(mime_type, unsafe { - BorrowedFd::borrow_raw(pipe.write.as_raw_fd()) - }); - let fd = pipe.read; - drop(pipe.write); - - self.connection.flush().unwrap(); + let item = offer + .read_text(&self.connection) + .or_else(|| offer.read_image(&self.connection))?; - match unsafe { read_fd(fd) } { - Ok(v) => { - self.cached_read = Some(ClipboardItem::new_string(v)); - self.cached_read.clone() - } - Err(err) => { - log::error!("error reading clipboard pipe: {err:?}"); - None - } - } + self.cached_read = Some(item.clone()); + Some(item) } pub fn read_primary(&mut self) -> Option { - let offer = self.current_primary_offer.clone()?; + let offer = self.current_primary_offer.as_ref()?; if let Some(cached) = self.cached_primary_read.clone() { return Some(cached); } @@ -169,26 +229,12 @@ impl Clipboard { return self.primary_contents.clone(); } - let mime_type = offer.find_text_mime_type()?; - let pipe = Pipe::new().unwrap(); - offer.inner.receive(mime_type, unsafe { - BorrowedFd::borrow_raw(pipe.write.as_raw_fd()) - }); - let fd = pipe.read; - drop(pipe.write); - - self.connection.flush().unwrap(); + let item = offer + .read_text(&self.connection) + .or_else(|| offer.read_image(&self.connection))?; - match unsafe { read_fd(fd) } { - Ok(v) => { - self.cached_primary_read = Some(ClipboardItem::new_string(v.clone())); - self.cached_primary_read.clone() - } - Err(err) => { - log::error!("error reading clipboard pipe: {err:?}"); - None - } - } + self.cached_primary_read = Some(item.clone()); + Some(item) } fn send_internal(&self, fd: OwnedFd, bytes: Vec) { From ecb7144b9571d8f99fb0cef19ae2ce554d2862ce Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Tue, 1 Oct 2024 07:26:20 +0800 Subject: [PATCH 153/228] windows: Fix can not set folder for `FileSaveDialog` (#17708) Closes #17622 Closes #17682 The story here is that `SHCreateItemFromParsingName` dose not accept UNC path. Video: https://github.com/user-attachments/assets/f4f7f671-5ab5-4965-9158-e7a79ac02654 Release Notes: - N/A --- crates/gpui/src/platform/windows/platform.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index d9f08c2247adf..a900d0114bb23 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -664,10 +664,11 @@ fn file_save_dialog(directory: PathBuf) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; if !directory.to_string_lossy().is_empty() { if let Some(full_path) = directory.canonicalize().log_err() { - let full_path = full_path.to_string_lossy().to_string(); - if !full_path.is_empty() { + let full_path = full_path.to_string_lossy(); + let full_path_str = full_path.trim_start_matches("\\\\?\\"); + if !full_path_str.is_empty() { let path_item: IShellItem = - unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? }; + unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_str), None)? }; unsafe { dialog.SetFolder(&path_item).log_err() }; } } From 77506afd8314a8b300971b4ac444955bad654652 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Tue, 1 Oct 2024 07:29:23 +0800 Subject: [PATCH 154/228] windows: Implement copy/paste images (#17852) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **Clipboard Behavior on Windows Under This PR:** | User Action | Zed’s Behavior | | ------------------- | -------------------------------------------------- | | Paste PNG | Worked | | Paste JPEG | Worked | | Paste WebP | Worked, but not in the way you expect (see Issue section below) | | Paste GIF | Partially worked (see Issue section below) | | Paste SVG | Partially worked (see Issue section below) | | Paste BMP | Worked, but not in the way you expect (see Issue section below) | | Paste TIFF | Worked, but not in the way you expect (see Issue section below) | | Paste Files | Worked, same behavior as macOS | | Copy image in Zed | Not tested, as I couldn’t find a way to copy images | --- **Differences Between the Windows and macOS Clipboard** The clipboard functionality on Windows differs significantly from macOS. On macOS, there can be multiple items in the clipboard, whereas, on Windows, the clipboard holds only a single item. You can retrieve different formats from the clipboard, but they are all just different representations of the same item. For example, when you copy a JPG image from Microsoft Word, the clipboard will contain data in several formats: - Microsoft Office proprietary data - JPG format data - PNG format data - SVG format data Please note that these formats all represent the same image, just in different formats. This is due to compatibility concerns on Windows, as various applications support different formats. Ideally, multiple formats should be placed on the clipboard to support more software. However, in general, supporting PNG will cover 99% of software, like Chrome, which only supports PNG and BMP formats. Additionally, since the clipboard on Windows only contains a single item, special handling is required when copying multiple objects, such as text and images. For instance, if you copy both text and an image simultaneously in Microsoft Word, Microsoft places the following data on the clipboard: - Microsoft Office proprietary data containing a lot of content such as text fonts, sizes, italics, positioning, image size, content, etc. - RTF data representing the above content in RTF format - HTML data representing the content in HTML format - Plain text data Therefore, for the current `ClipboardItem` implementation, if there are multiple `ClipboardEntry` objects to be placed on the clipboard, RTF or HTML formats are required. This PR does not support this scenario, and only supports copying or pasting a single item from the clipboard. --- **Known Issues** - **WebP, BMP, TIFF**: These formats are not explicitly supported in this PR. However, as mentioned earlier, in most cases, there are corresponding PNG format data on the clipboard. This PR retrieves data via PNG format, so users copying images in these formats from other sources will still see the images displayed correctly. - **GIF**: In this PR, GIFs are displayed, but for GIF images with multiple frames, the image will not animate and will freeze on a single frame. Since I observed the same behavior on macOS, I believe this is not an issue with this PR. - **SVG**: In this PR, only the top-left corner of the SVG image is displayed. Again, I observed the same behavior on macOS, so I believe this issue is not specific to this PR. --- I hope this provides a clearer understanding. Any feedback or suggestions on how to improve this are welcome. Release Notes: - N/A --- crates/gpui/src/platform/windows.rs | 2 + crates/gpui/src/platform/windows/clipboard.rs | 366 ++++++++++++++++++ crates/gpui/src/platform/windows/platform.rs | 140 +------ crates/gpui/src/platform/windows/window.rs | 23 +- crates/gpui/src/platform/windows/wrapper.rs | 34 +- 5 files changed, 408 insertions(+), 157 deletions(-) create mode 100644 crates/gpui/src/platform/windows/clipboard.rs diff --git a/crates/gpui/src/platform/windows.rs b/crates/gpui/src/platform/windows.rs index aa484eccdc29c..84cf107c70516 100644 --- a/crates/gpui/src/platform/windows.rs +++ b/crates/gpui/src/platform/windows.rs @@ -1,3 +1,4 @@ +mod clipboard; mod direct_write; mod dispatcher; mod display; @@ -8,6 +9,7 @@ mod util; mod window; mod wrapper; +pub(crate) use clipboard::*; pub(crate) use direct_write::*; pub(crate) use dispatcher::*; pub(crate) use display::*; diff --git a/crates/gpui/src/platform/windows/clipboard.rs b/crates/gpui/src/platform/windows/clipboard.rs new file mode 100644 index 0000000000000..7e607b6a8c775 --- /dev/null +++ b/crates/gpui/src/platform/windows/clipboard.rs @@ -0,0 +1,366 @@ +use std::sync::LazyLock; + +use anyhow::Result; +use collections::{FxHashMap, FxHashSet}; +use itertools::Itertools; +use util::ResultExt; +use windows::Win32::{ + Foundation::HANDLE, + System::{ + DataExchange::{ + CloseClipboard, CountClipboardFormats, EmptyClipboard, EnumClipboardFormats, + GetClipboardData, GetClipboardFormatNameW, IsClipboardFormatAvailable, OpenClipboard, + RegisterClipboardFormatW, SetClipboardData, + }, + Memory::{GlobalAlloc, GlobalLock, GlobalUnlock, GMEM_MOVEABLE}, + Ole::{CF_HDROP, CF_UNICODETEXT}, + }, + UI::Shell::{DragQueryFileW, HDROP}, +}; +use windows_core::PCWSTR; + +use crate::{ + hash, ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, SmartGlobal, +}; + +// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew +const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF; + +// Clipboard formats +static CLIPBOARD_HASH_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("GPUI internal text hash"))); +static CLIPBOARD_METADATA_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("GPUI internal metadata"))); +static CLIPBOARD_SVG_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("image/svg+xml"))); +static CLIPBOARD_GIF_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("GIF"))); +static CLIPBOARD_PNG_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("PNG"))); +static CLIPBOARD_JPG_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("JFIF"))); + +// Helper maps and sets +static FORMATS_MAP: LazyLock> = LazyLock::new(|| { + let mut formats_map = FxHashMap::default(); + formats_map.insert(CF_UNICODETEXT.0 as u32, ClipboardFormatType::Text); + formats_map.insert(*CLIPBOARD_PNG_FORMAT, ClipboardFormatType::Image); + formats_map.insert(*CLIPBOARD_GIF_FORMAT, ClipboardFormatType::Image); + formats_map.insert(*CLIPBOARD_JPG_FORMAT, ClipboardFormatType::Image); + formats_map.insert(*CLIPBOARD_SVG_FORMAT, ClipboardFormatType::Image); + formats_map.insert(CF_HDROP.0 as u32, ClipboardFormatType::Files); + formats_map +}); +static FORMATS_SET: LazyLock> = LazyLock::new(|| { + let mut formats_map = FxHashSet::default(); + formats_map.insert(CF_UNICODETEXT.0 as u32); + formats_map.insert(*CLIPBOARD_PNG_FORMAT); + formats_map.insert(*CLIPBOARD_GIF_FORMAT); + formats_map.insert(*CLIPBOARD_JPG_FORMAT); + formats_map.insert(*CLIPBOARD_SVG_FORMAT); + formats_map.insert(CF_HDROP.0 as u32); + formats_map +}); +static IMAGE_FORMATS_MAP: LazyLock> = LazyLock::new(|| { + let mut formats_map = FxHashMap::default(); + formats_map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png); + formats_map.insert(*CLIPBOARD_GIF_FORMAT, ImageFormat::Gif); + formats_map.insert(*CLIPBOARD_JPG_FORMAT, ImageFormat::Jpeg); + formats_map.insert(*CLIPBOARD_SVG_FORMAT, ImageFormat::Svg); + formats_map +}); + +#[derive(Debug, Clone, Copy)] +enum ClipboardFormatType { + Text, + Image, + Files, +} + +pub(crate) fn write_to_clipboard(item: ClipboardItem) { + write_to_clipboard_inner(item).log_err(); + unsafe { CloseClipboard().log_err() }; +} + +pub(crate) fn read_from_clipboard() -> Option { + let result = read_from_clipboard_inner(); + unsafe { CloseClipboard().log_err() }; + result +} + +pub(crate) fn with_file_names(hdrop: HDROP, mut f: F) +where + F: FnMut(String), +{ + let file_count = unsafe { DragQueryFileW(hdrop, DRAGDROP_GET_FILES_COUNT, None) }; + for file_index in 0..file_count { + let filename_length = unsafe { DragQueryFileW(hdrop, file_index, None) } as usize; + let mut buffer = vec![0u16; filename_length + 1]; + let ret = unsafe { DragQueryFileW(hdrop, file_index, Some(buffer.as_mut_slice())) }; + if ret == 0 { + log::error!("unable to read file name"); + continue; + } + if let Some(file_name) = String::from_utf16(&buffer[0..filename_length]).log_err() { + f(file_name); + } + } +} + +fn register_clipboard_format(format: PCWSTR) -> u32 { + let ret = unsafe { RegisterClipboardFormatW(format) }; + if ret == 0 { + panic!( + "Error when registering clipboard format: {}", + std::io::Error::last_os_error() + ); + } + ret +} + +#[inline] +fn format_to_type(item_format: u32) -> &'static ClipboardFormatType { + FORMATS_MAP.get(&item_format).unwrap() +} + +// Currently, we only write the first item. +fn write_to_clipboard_inner(item: ClipboardItem) -> Result<()> { + unsafe { + OpenClipboard(None)?; + EmptyClipboard()?; + } + match item.entries().first() { + Some(entry) => match entry { + ClipboardEntry::String(string) => { + write_string_to_clipboard(string)?; + } + ClipboardEntry::Image(image) => { + write_image_to_clipboard(image)?; + } + }, + None => { + // Writing an empty list of entries just clears the clipboard. + } + } + Ok(()) +} + +fn write_string_to_clipboard(item: &ClipboardString) -> Result<()> { + let encode_wide = item.text.encode_utf16().chain(Some(0)).collect_vec(); + set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?; + + if let Some(metadata) = item.metadata.as_ref() { + let hash_result = { + let hash = ClipboardString::text_hash(&item.text); + hash.to_ne_bytes() + }; + let encode_wide = + unsafe { std::slice::from_raw_parts(hash_result.as_ptr().cast::(), 4) }; + set_data_to_clipboard(encode_wide, *CLIPBOARD_HASH_FORMAT)?; + + let metadata_wide = metadata.encode_utf16().chain(Some(0)).collect_vec(); + set_data_to_clipboard(&metadata_wide, *CLIPBOARD_METADATA_FORMAT)?; + } + Ok(()) +} + +fn set_data_to_clipboard(data: &[T], format: u32) -> Result<()> { + unsafe { + let global = GlobalAlloc(GMEM_MOVEABLE, std::mem::size_of_val(data))?; + let handle = GlobalLock(global); + std::ptr::copy_nonoverlapping(data.as_ptr(), handle as _, data.len()); + let _ = GlobalUnlock(global); + SetClipboardData(format, HANDLE(global.0))?; + } + Ok(()) +} + +// Here writing PNG to the clipboard to better support other apps. For more info, please ref to +// the PR. +fn write_image_to_clipboard(item: &Image) -> Result<()> { + match item.format { + ImageFormat::Svg => set_data_to_clipboard(item.bytes(), *CLIPBOARD_SVG_FORMAT)?, + ImageFormat::Gif => { + set_data_to_clipboard(item.bytes(), *CLIPBOARD_GIF_FORMAT)?; + let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Gif)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + ImageFormat::Png => { + set_data_to_clipboard(item.bytes(), *CLIPBOARD_PNG_FORMAT)?; + let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Png)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + ImageFormat::Jpeg => { + set_data_to_clipboard(item.bytes(), *CLIPBOARD_JPG_FORMAT)?; + let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Jpeg)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + other => { + log::warn!( + "Clipboard unsupported image format: {:?}, convert to PNG instead.", + item.format + ); + let png_bytes = convert_image_to_png_format(item.bytes(), other)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + } + Ok(()) +} + +fn convert_image_to_png_format(bytes: &[u8], image_format: ImageFormat) -> Result> { + let image = image::load_from_memory_with_format(bytes, image_format.into())?; + let mut output_buf = Vec::new(); + image.write_to( + &mut std::io::Cursor::new(&mut output_buf), + image::ImageFormat::Png, + )?; + Ok(output_buf) +} + +fn read_from_clipboard_inner() -> Option { + unsafe { OpenClipboard(None) }.log_err()?; + with_best_match_format(|item_format| match format_to_type(item_format) { + ClipboardFormatType::Text => read_string_from_clipboard(), + ClipboardFormatType::Image => read_image_from_clipboard(item_format), + ClipboardFormatType::Files => read_files_from_clipboard(), + }) +} + +// Here, we enumerate all formats on the clipboard and find the first one that we can process. +// The reason we don't use `GetPriorityClipboardFormat` is that it sometimes returns the +// wrong format. +// For instance, when copying a JPEG image from Microsoft Word, there may be several formats +// on the clipboard: Jpeg, Png, Svg. +// If we use `GetPriorityClipboardFormat`, it will return Svg, which is not what we want. +fn with_best_match_format(f: F) -> Option +where + F: Fn(u32) -> Option, +{ + let count = unsafe { CountClipboardFormats() }; + let mut clipboard_format = 0; + for _ in 0..count { + clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; + let Some(item_format) = FORMATS_SET.get(&clipboard_format) else { + continue; + }; + if let Some(entry) = f(*item_format) { + return Some(ClipboardItem { + entries: vec![entry], + }); + } + } + // log the formats that we don't support yet. + { + clipboard_format = 0; + for _ in 0..count { + clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; + let mut buffer = [0u16; 64]; + unsafe { GetClipboardFormatNameW(clipboard_format, &mut buffer) }; + let format_name = String::from_utf16_lossy(&buffer); + log::warn!( + "Try to paste with unsupported clipboard format: {}, {}.", + clipboard_format, + format_name + ); + } + } + None +} + +fn read_string_from_clipboard() -> Option { + let text = { + let global = SmartGlobal::from_raw_ptr( + unsafe { GetClipboardData(CF_UNICODETEXT.0 as u32).log_err() }?.0, + ); + let text = PCWSTR(global.lock() as *const u16); + String::from_utf16_lossy(unsafe { text.as_wide() }) + }; + let Some(hash) = read_hash_from_clipboard() else { + return Some(ClipboardEntry::String(ClipboardString::new(text))); + }; + let Some(metadata) = read_metadata_from_clipboard() else { + return Some(ClipboardEntry::String(ClipboardString::new(text))); + }; + if hash == ClipboardString::text_hash(&text) { + Some(ClipboardEntry::String(ClipboardString { + text, + metadata: Some(metadata), + })) + } else { + Some(ClipboardEntry::String(ClipboardString::new(text))) + } +} + +fn read_hash_from_clipboard() -> Option { + if unsafe { IsClipboardFormatAvailable(*CLIPBOARD_HASH_FORMAT).is_err() } { + return None; + } + let global = + SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(*CLIPBOARD_HASH_FORMAT).log_err() }?.0); + let raw_ptr = global.lock() as *const u16; + let hash_bytes: [u8; 8] = unsafe { + std::slice::from_raw_parts(raw_ptr.cast::(), 8) + .to_vec() + .try_into() + .log_err() + }?; + Some(u64::from_ne_bytes(hash_bytes)) +} + +fn read_metadata_from_clipboard() -> Option { + unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).log_err()? }; + let global = SmartGlobal::from_raw_ptr( + unsafe { GetClipboardData(*CLIPBOARD_METADATA_FORMAT).log_err() }?.0, + ); + let text = PCWSTR(global.lock() as *const u16); + Some(String::from_utf16_lossy(unsafe { text.as_wide() })) +} + +fn read_image_from_clipboard(format: u32) -> Option { + let image_format = format_number_to_image_format(format)?; + read_image_for_type(format, *image_format) +} + +#[inline] +fn format_number_to_image_format(format_number: u32) -> Option<&'static ImageFormat> { + IMAGE_FORMATS_MAP.get(&format_number) +} + +fn read_image_for_type(format_number: u32, format: ImageFormat) -> Option { + let global = SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(format_number).log_err() }?.0); + let image_ptr = global.lock(); + let iamge_size = global.size(); + let bytes = + unsafe { std::slice::from_raw_parts(image_ptr as *mut u8 as _, iamge_size).to_vec() }; + let id = hash(&bytes); + Some(ClipboardEntry::Image(Image { format, bytes, id })) +} + +fn read_files_from_clipboard() -> Option { + let global = + SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(CF_HDROP.0 as u32).log_err() }?.0); + let hdrop = HDROP(global.lock()); + let mut filenames = String::new(); + with_file_names(hdrop, |file_name| { + filenames.push_str(&file_name); + }); + Some(ClipboardEntry::String(ClipboardString { + text: filenames, + metadata: None, + })) +} + +impl From for image::ImageFormat { + fn from(value: ImageFormat) -> Self { + match value { + ImageFormat::Png => image::ImageFormat::Png, + ImageFormat::Jpeg => image::ImageFormat::Jpeg, + ImageFormat::Webp => image::ImageFormat::WebP, + ImageFormat::Gif => image::ImageFormat::Gif, + // ImageFormat::Svg => todo!(), + ImageFormat::Bmp => image::ImageFormat::Bmp, + ImageFormat::Tiff => image::ImageFormat::Tiff, + _ => unreachable!(), + } + } +} diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index a900d0114bb23..30e7c402d26d8 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -17,24 +17,12 @@ use windows::{ core::*, Win32::{ Foundation::*, - Globalization::u_memcpy, Graphics::{ Gdi::*, Imaging::{CLSID_WICImagingFactory, IWICImagingFactory}, }, Security::Credentials::*, - System::{ - Com::*, - DataExchange::{ - CloseClipboard, EmptyClipboard, GetClipboardData, OpenClipboard, - RegisterClipboardFormatW, SetClipboardData, - }, - LibraryLoader::*, - Memory::{GlobalAlloc, GlobalLock, GlobalUnlock, GMEM_MOVEABLE}, - Ole::*, - SystemInformation::*, - Threading::*, - }, + System::{Com::*, LibraryLoader::*, Ole::*, SystemInformation::*, Threading::*}, UI::{Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*}, }, UI::ViewManagement::UISettings, @@ -52,8 +40,6 @@ pub(crate) struct WindowsPlatform { background_executor: BackgroundExecutor, foreground_executor: ForegroundExecutor, text_system: Arc, - clipboard_hash_format: u32, - clipboard_metadata_format: u32, windows_version: WindowsVersion, bitmap_factory: ManuallyDrop, validation_number: usize, @@ -108,9 +94,6 @@ impl WindowsPlatform { let icon = load_icon().unwrap_or_default(); let state = RefCell::new(WindowsPlatformState::new()); let raw_window_handles = RwLock::new(SmallVec::new()); - let clipboard_hash_format = register_clipboard_format(CLIPBOARD_HASH_FORMAT).unwrap(); - let clipboard_metadata_format = - register_clipboard_format(CLIPBOARD_METADATA_FORMAT).unwrap(); let windows_version = WindowsVersion::new().expect("Error retrieve windows version"); let validation_number = rand::random::(); @@ -123,8 +106,6 @@ impl WindowsPlatform { background_executor, foreground_executor, text_system, - clipboard_hash_format, - clipboard_metadata_format, windows_version, bitmap_factory, validation_number, @@ -487,15 +468,11 @@ impl Platform for WindowsPlatform { } fn write_to_clipboard(&self, item: ClipboardItem) { - write_to_clipboard( - item, - self.clipboard_hash_format, - self.clipboard_metadata_format, - ); + write_to_clipboard(item); } fn read_from_clipboard(&self) -> Option { - read_from_clipboard(self.clipboard_hash_format, self.clipboard_metadata_format) + read_from_clipboard() } fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task> { @@ -725,117 +702,6 @@ fn should_auto_hide_scrollbars() -> Result { Ok(ui_settings.AutoHideScrollBars()?) } -fn register_clipboard_format(format: PCWSTR) -> Result { - let ret = unsafe { RegisterClipboardFormatW(format) }; - if ret == 0 { - Err(anyhow::anyhow!( - "Error when registering clipboard format: {}", - std::io::Error::last_os_error() - )) - } else { - Ok(ret) - } -} - -fn write_to_clipboard(item: ClipboardItem, hash_format: u32, metadata_format: u32) { - write_to_clipboard_inner(item, hash_format, metadata_format).log_err(); - unsafe { CloseClipboard().log_err() }; -} - -fn write_to_clipboard_inner( - item: ClipboardItem, - hash_format: u32, - metadata_format: u32, -) -> Result<()> { - unsafe { - OpenClipboard(None)?; - EmptyClipboard()?; - let encode_wide = item - .text() - .unwrap_or_default() - .encode_utf16() - .chain(Some(0)) - .collect_vec(); - set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?; - - if let Some((metadata, text)) = item.metadata().zip(item.text()) { - let hash_result = { - let hash = ClipboardString::text_hash(&text); - hash.to_ne_bytes() - }; - let encode_wide = std::slice::from_raw_parts(hash_result.as_ptr().cast::(), 4); - set_data_to_clipboard(encode_wide, hash_format)?; - - let metadata_wide = metadata.encode_utf16().chain(Some(0)).collect_vec(); - set_data_to_clipboard(&metadata_wide, metadata_format)?; - } - } - Ok(()) -} - -fn set_data_to_clipboard(data: &[u16], format: u32) -> Result<()> { - unsafe { - let global = GlobalAlloc(GMEM_MOVEABLE, data.len() * 2)?; - let handle = GlobalLock(global); - u_memcpy(handle as _, data.as_ptr(), data.len() as _); - let _ = GlobalUnlock(global); - SetClipboardData(format, HANDLE(global.0))?; - } - Ok(()) -} - -fn read_from_clipboard(hash_format: u32, metadata_format: u32) -> Option { - let result = read_from_clipboard_inner(hash_format, metadata_format).log_err(); - unsafe { CloseClipboard().log_err() }; - result -} - -fn read_from_clipboard_inner(hash_format: u32, metadata_format: u32) -> Result { - unsafe { - OpenClipboard(None)?; - let text = { - let handle = GetClipboardData(CF_UNICODETEXT.0 as u32)?; - let text = PCWSTR(handle.0 as *const u16); - String::from_utf16_lossy(text.as_wide()) - }; - let Some(hash) = read_hash_from_clipboard(hash_format) else { - return Ok(ClipboardItem::new_string(text)); - }; - let Some(metadata) = read_metadata_from_clipboard(metadata_format) else { - return Ok(ClipboardItem::new_string(text)); - }; - if hash == ClipboardString::text_hash(&text) { - Ok(ClipboardItem::new_string_with_metadata(text, metadata)) - } else { - Ok(ClipboardItem::new_string(text)) - } - } -} - -fn read_hash_from_clipboard(hash_format: u32) -> Option { - unsafe { - let handle = GetClipboardData(hash_format).log_err()?; - let raw_ptr = handle.0 as *const u16; - let hash_bytes: [u8; 8] = std::slice::from_raw_parts(raw_ptr.cast::(), 8) - .to_vec() - .try_into() - .log_err()?; - Some(u64::from_ne_bytes(hash_bytes)) - } -} - -fn read_metadata_from_clipboard(metadata_format: u32) -> Option { - unsafe { - let handle = GetClipboardData(metadata_format).log_err()?; - let text = PCWSTR(handle.0 as *const u16); - Some(String::from_utf16_lossy(text.as_wide())) - } -} - -// clipboard -pub const CLIPBOARD_HASH_FORMAT: PCWSTR = windows::core::w!("zed-text-hash"); -pub const CLIPBOARD_METADATA_FORMAT: PCWSTR = windows::core::w!("zed-metadata"); - #[cfg(test)] mod tests { use crate::{ClipboardItem, Platform, WindowsPlatform}; diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index d5ea3be6cac5e..b212a03a98148 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -735,23 +735,11 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl { } let hdrop = idata.u.hGlobal.0 as *mut HDROP; let mut paths = SmallVec::<[PathBuf; 2]>::new(); - let file_count = DragQueryFileW(*hdrop, DRAGDROP_GET_FILES_COUNT, None); - for file_index in 0..file_count { - let filename_length = DragQueryFileW(*hdrop, file_index, None) as usize; - let mut buffer = vec![0u16; filename_length + 1]; - let ret = DragQueryFileW(*hdrop, file_index, Some(buffer.as_mut_slice())); - if ret == 0 { - log::error!("unable to read file name"); - continue; + with_file_names(*hdrop, |file_name| { + if let Some(path) = PathBuf::from_str(&file_name).log_err() { + paths.push(path); } - if let Some(file_name) = - String::from_utf16(&buffer[0..filename_length]).log_err() - { - if let Some(path) = PathBuf::from_str(&file_name).log_err() { - paths.push(path); - } - } - } + }); ReleaseStgMedium(&mut idata); let mut cursor_position = POINT { x: pt.x, y: pt.y }; ScreenToClient(self.0.hwnd, &mut cursor_position) @@ -1069,9 +1057,6 @@ fn calculate_client_rect( } } -// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew -const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF; - mod windows_renderer { use std::{num::NonZeroIsize, sync::Arc}; diff --git a/crates/gpui/src/platform/windows/wrapper.rs b/crates/gpui/src/platform/windows/wrapper.rs index 6015dffdab299..e6e645e61aebd 100644 --- a/crates/gpui/src/platform/windows/wrapper.rs +++ b/crates/gpui/src/platform/windows/wrapper.rs @@ -1,6 +1,11 @@ use std::ops::Deref; -use windows::Win32::{Foundation::HANDLE, UI::WindowsAndMessaging::HCURSOR}; +use util::ResultExt; +use windows::Win32::{ + Foundation::{HANDLE, HGLOBAL}, + System::Memory::{GlobalLock, GlobalSize, GlobalUnlock}, + UI::WindowsAndMessaging::HCURSOR, +}; #[derive(Debug, Clone, Copy)] pub(crate) struct SafeHandle { @@ -45,3 +50,30 @@ impl Deref for SafeCursor { &self.raw } } + +#[derive(Debug, Clone)] +pub(crate) struct SmartGlobal { + raw: HGLOBAL, +} + +impl SmartGlobal { + pub(crate) fn from_raw_ptr(ptr: *mut std::ffi::c_void) -> Self { + Self { raw: HGLOBAL(ptr) } + } + + pub(crate) fn lock(&self) -> *mut std::ffi::c_void { + unsafe { GlobalLock(self.raw) } + } + + pub(crate) fn size(&self) -> usize { + unsafe { GlobalSize(self.raw) } + } +} + +impl Drop for SmartGlobal { + fn drop(&mut self) { + unsafe { + GlobalUnlock(self.raw).log_err(); + } + } +} From 938a0679c004be4ae2b6c1e897de75e4b4e7aa24 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Tue, 1 Oct 2024 07:39:19 +0800 Subject: [PATCH 155/228] gpui: Fix img element to auto size when only have width or height (#17994) Release Notes: - N/A --- We may only want to set the height of an image to limit the size and make the width adaptive. In HTML, we will only set width or height, and the other side will adapt and maintain the original image ratio. I changed this because I had a logo image that only to be limited in height, and then I found that setting the height of the `img` alone would not display correctly. I also tried to set `ObjectFit` in this Demo, but it seems that none of them can achieve the same effect as "After". ## Before before 2024-09-18 164029 ## After after 2024-09-18 172003 --- crates/gpui/examples/image/image.rs | 54 +++++++++++++++++++++-------- crates/gpui/src/elements/img.rs | 36 +++++++++++++------ 2 files changed, 65 insertions(+), 25 deletions(-) diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index 157dbdf70f1af..24a94bf746ec4 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -69,25 +69,51 @@ struct ImageShowcase { impl Render for ImageShowcase { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { div() - .flex() - .flex_row() .size_full() + .flex() + .flex_col() .justify_center() .items_center() .gap_8() .bg(rgb(0xFFFFFF)) - .child(ImageContainer::new( - "Image loaded from a local file", - self.local_resource.clone(), - )) - .child(ImageContainer::new( - "Image loaded from a remote resource", - self.remote_resource.clone(), - )) - .child(ImageContainer::new( - "Image loaded from an asset", - self.asset_resource.clone(), - )) + .child( + div() + .flex() + .flex_row() + .justify_center() + .items_center() + .gap_8() + .child(ImageContainer::new( + "Image loaded from a local file", + self.local_resource.clone(), + )) + .child(ImageContainer::new( + "Image loaded from a remote resource", + self.remote_resource.clone(), + )) + .child(ImageContainer::new( + "Image loaded from an asset", + self.asset_resource.clone(), + )), + ) + .child( + div() + .flex() + .flex_row() + .gap_8() + .child( + div() + .flex_col() + .child("Auto Width") + .child(img("https://picsum.photos/800/400").h(px(180.))), + ) + .child( + div() + .flex_col() + .child("Auto Height") + .child(img("https://picsum.photos/480/640").w(px(180.))), + ), + ) } } diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 63236d5309f14..58ee639265ec1 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -1,7 +1,7 @@ use crate::{ px, AbsoluteLength, AppContext, Asset, Bounds, DefiniteLength, Element, ElementId, GlobalElementId, Hitbox, Image, InteractiveElement, Interactivity, IntoElement, LayoutId, - Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, Size, StyleRefinement, Styled, + Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, StyleRefinement, Styled, SvgSize, UriOrPath, WindowContext, }; use futures::{AsyncReadExt, Future}; @@ -187,16 +187,30 @@ impl Element for Img { let image_size = data.size(frame_index); - if let (Length::Auto, Length::Auto) = (style.size.width, style.size.height) - { - style.size = Size { - width: Length::Definite(DefiniteLength::Absolute( - AbsoluteLength::Pixels(px(image_size.width.0 as f32)), - )), - height: Length::Definite(DefiniteLength::Absolute( - AbsoluteLength::Pixels(px(image_size.height.0 as f32)), - )), - } + if let Length::Auto = style.size.width { + style.size.width = match style.size.height { + Length::Definite(DefiniteLength::Absolute( + AbsoluteLength::Pixels(height), + )) => Length::Definite( + px(image_size.width.0 as f32 * height.0 + / image_size.height.0 as f32) + .into(), + ), + _ => Length::Definite(px(image_size.width.0 as f32).into()), + }; + } + + if let Length::Auto = style.size.height { + style.size.height = match style.size.width { + Length::Definite(DefiniteLength::Absolute( + AbsoluteLength::Pixels(width), + )) => Length::Definite( + px(image_size.height.0 as f32 * width.0 + / image_size.width.0 as f32) + .into(), + ), + _ => Length::Definite(px(image_size.height.0 as f32).into()), + }; } if global_id.is_some() && data.frame_count() > 1 { From a752bbcee83f9cf853082e91c693f7bde197eb0f Mon Sep 17 00:00:00 2001 From: Patrick MARIE Date: Tue, 1 Oct 2024 01:51:05 +0200 Subject: [PATCH 156/228] Fix linux double click (#18504) Closes #17573 Release Notes: - Check that double clicks on Linux are triggered by same button. --- crates/gpui/src/platform/linux/wayland/client.rs | 7 +++++++ crates/gpui/src/platform/linux/x11/client.rs | 10 ++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index f7ade828615f4..4b7816a73ac36 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -236,6 +236,7 @@ pub struct DragState { } pub struct ClickState { + last_mouse_button: Option, last_click: Instant, last_location: Point, current_count: usize, @@ -535,6 +536,7 @@ impl WaylandClient { }, click: ClickState { last_click: Instant::now(), + last_mouse_button: None, last_location: Point::default(), current_count: 0, }, @@ -1524,6 +1526,10 @@ impl Dispatch for WaylandClientStatePtr { let click_elapsed = state.click.last_click.elapsed(); if click_elapsed < DOUBLE_CLICK_INTERVAL + && state + .click + .last_mouse_button + .is_some_and(|prev_button| prev_button == button) && is_within_click_distance( state.click.last_location, state.mouse_location.unwrap(), @@ -1535,6 +1541,7 @@ impl Dispatch for WaylandClientStatePtr { } state.click.last_click = Instant::now(); + state.click.last_mouse_button = Some(button); state.click.last_location = state.mouse_location.unwrap(); state.button_pressed = Some(button); diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 7f5342a50360e..5339cc95fd1b1 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -37,8 +37,8 @@ use crate::platform::linux::LinuxClient; use crate::platform::{LinuxCommon, PlatformWindow}; use crate::{ modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, - DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, - PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase, + DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, + Platform, PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, }; @@ -122,6 +122,7 @@ pub struct X11ClientState { pub(crate) event_loop: Option>, pub(crate) last_click: Instant, + pub(crate) last_mouse_button: Option, pub(crate) last_location: Point, pub(crate) current_count: usize, @@ -404,6 +405,7 @@ impl X11Client { loop_handle: handle, common, last_click: Instant::now(), + last_mouse_button: None, last_location: Point::new(px(0.0), px(0.0)), current_count: 0, scale_factor, @@ -952,6 +954,9 @@ impl X11Client { let click_elapsed = state.last_click.elapsed(); if click_elapsed < DOUBLE_CLICK_INTERVAL + && state + .last_mouse_button + .is_some_and(|prev_button| prev_button == button) && is_within_click_distance(state.last_location, position) { state.current_count += 1; @@ -960,6 +965,7 @@ impl X11Client { } state.last_click = Instant::now(); + state.last_mouse_button = Some(button); state.last_location = position; let current_count = state.current_count; From 1d2172aba8c7718d36d363e8d4e7f8b1a3ad1cec Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 21:07:10 -0400 Subject: [PATCH 157/228] docs: Correct glibc requirements (#18554) --- docs/src/linux.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index 3bba9c8f93aaa..33d12d0a8ca02 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer) + - x86_64 (Intel/AMD): glibc version >= 2.31 (Ubuntu 20 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). From 39be9e5949483e8964322d9878e7d5cb794872cb Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Tue, 1 Oct 2024 09:25:02 +0800 Subject: [PATCH 158/228] gpui: Fix `show: false` support on Windows to create an invisible window (#18161) Release Notes: - N/A - The `show` of WindowOptions is valid on macOS but not on Windows, this changes to fix it to support create an invisible window. ```bash cargo run -p gpui --example window ``` ## Before https://github.com/user-attachments/assets/4157bdaa-39a7-44df-bbdc-30b00e9c61e9 ## After https://github.com/user-attachments/assets/d48fa524-0caa-4f87-932d-01d7a468c488 https://github.com/user-attachments/assets/dd052f15-c8db-4a2a-a6af-a7c0ffecca84 --- crates/gpui/examples/window.rs | 168 +++++++++++++++++++++ crates/gpui/src/platform/mac/window.rs | 2 +- crates/gpui/src/platform/windows/window.rs | 13 +- 3 files changed, 180 insertions(+), 3 deletions(-) create mode 100644 crates/gpui/examples/window.rs diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs new file mode 100644 index 0000000000000..0f0d4287da723 --- /dev/null +++ b/crates/gpui/examples/window.rs @@ -0,0 +1,168 @@ +use gpui::*; +use prelude::FluentBuilder as _; + +struct SubWindow { + custom_titlebar: bool, +} + +fn button(text: &str, on_click: impl Fn(&mut WindowContext) + 'static) -> impl IntoElement { + div() + .id(SharedString::from(text.to_string())) + .flex_none() + .px_2() + .bg(rgb(0xf7f7f7)) + .active(|this| this.opacity(0.85)) + .border_1() + .border_color(rgb(0xe0e0e0)) + .rounded_md() + .cursor_pointer() + .child(text.to_string()) + .on_click(move |_, cx| on_click(cx)) +} + +impl Render for SubWindow { + fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { + div() + .flex() + .flex_col() + .bg(rgb(0xffffff)) + .size_full() + .gap_2() + .when(self.custom_titlebar, |cx| { + cx.child( + div() + .flex() + .h(px(32.)) + .px_4() + .bg(gpui::blue()) + .text_color(gpui::white()) + .w_full() + .child( + div() + .flex() + .items_center() + .justify_center() + .size_full() + .child("Custom Titlebar"), + ), + ) + }) + .child( + div() + .p_8() + .gap_2() + .child("SubWindow") + .child(button("Close", |cx| { + cx.remove_window(); + })), + ) + } +} + +struct WindowDemo {} + +impl Render for WindowDemo { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let window_bounds = + WindowBounds::Windowed(Bounds::centered(None, size(px(300.0), px(300.0)), cx)); + + div() + .p_4() + .flex() + .flex_wrap() + .bg(rgb(0xffffff)) + .size_full() + .justify_center() + .items_center() + .gap_2() + .child(button("Normal", move |cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + .child(button("Popup", move |cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(window_bounds), + kind: WindowKind::PopUp, + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + .child(button("Custom Titlebar", move |cx| { + cx.open_window( + WindowOptions { + titlebar: None, + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: true, + }) + }, + ) + .unwrap(); + })) + .child(button("Invisible", move |cx| { + cx.open_window( + WindowOptions { + show: false, + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + .child(button("Unmovable", move |cx| { + cx.open_window( + WindowOptions { + is_movable: false, + titlebar: None, + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + } +} + +fn main() { + App::new().run(|cx: &mut AppContext| { + let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |cx| cx.new_view(|_cx| WindowDemo {}), + ) + .unwrap(); + }); +} diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 885c3565ccc89..5f9ee43dec48f 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -707,7 +707,7 @@ impl MacWindow { } } - if focus { + if focus && show { native_window.makeKeyAndOrderFront_(nil); } else if show { native_window.orderFront_(nil); diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index b212a03a98148..d7b9a469b7d4e 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -287,7 +287,7 @@ impl WindowsWindow { .map(|title| title.as_ref()) .unwrap_or(""), ); - let (dwexstyle, dwstyle) = if params.kind == WindowKind::PopUp { + let (dwexstyle, mut dwstyle) = if params.kind == WindowKind::PopUp { (WS_EX_TOOLWINDOW, WINDOW_STYLE(0x0)) } else { ( @@ -295,6 +295,10 @@ impl WindowsWindow { WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX, ) }; + if !params.show { + dwstyle |= WS_MINIMIZE; + } + let hinstance = get_module_handle(); let display = if let Some(display_id) = params.display_id { // if we obtain a display_id, then this ID must be valid. @@ -357,7 +361,12 @@ impl WindowsWindow { drop(lock); SetWindowPlacement(raw_hwnd, &placement)?; } - unsafe { ShowWindow(raw_hwnd, SW_SHOW).ok()? }; + + if params.show { + unsafe { ShowWindow(raw_hwnd, SW_SHOW).ok()? }; + } else { + unsafe { ShowWindow(raw_hwnd, SW_HIDE).ok()? }; + } Ok(Self(state_ptr)) } From 8d795ff882ec6ee6eb40346ee4fbcba88e6e6b6d Mon Sep 17 00:00:00 2001 From: Alvaro Parker <64918109+AlvaroParker@users.noreply.github.com> Date: Tue, 1 Oct 2024 03:04:35 -0300 Subject: [PATCH 159/228] Fix file watching for symlinks (#17609) Closes #17605 Watches for target paths if file watched is a symlink in Linux. This will check if the generated `notify::Event` has any paths matching the `root_path` and if the file is a symlink it will also check if the path matches the `target_root_path` (the path that the symlink is pointing to) Release Notes: - Added file watching for symlinks --- crates/fs/src/fs.rs | 84 +++++++++++++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 30 deletions(-) diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index b649831fd2de6..7064448e16829 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -587,38 +587,54 @@ impl Fs for RealFs { let pending_paths: Arc>> = Default::default(); let root_path = path.to_path_buf(); - watcher::global(|g| { - let tx = tx.clone(); - let pending_paths = pending_paths.clone(); - g.add(move |event: ¬ify::Event| { - let kind = match event.kind { - EventKind::Create(_) => Some(PathEventKind::Created), - EventKind::Modify(_) => Some(PathEventKind::Changed), - EventKind::Remove(_) => Some(PathEventKind::Removed), - _ => None, - }; - let mut paths = event - .paths - .iter() - .filter_map(|path| { - path.starts_with(&root_path).then(|| PathEvent { - path: path.clone(), - kind, + // Check if root path is a symlink + let target_path = self.read_link(&path).await.ok(); + + watcher::global({ + let target_path = target_path.clone(); + |g| { + let tx = tx.clone(); + let pending_paths = pending_paths.clone(); + g.add(move |event: ¬ify::Event| { + let kind = match event.kind { + EventKind::Create(_) => Some(PathEventKind::Created), + EventKind::Modify(_) => Some(PathEventKind::Changed), + EventKind::Remove(_) => Some(PathEventKind::Removed), + _ => None, + }; + let mut paths = event + .paths + .iter() + .filter_map(|path| { + if let Some(target) = target_path.clone() { + if path.starts_with(target) { + return Some(PathEvent { + path: path.clone(), + kind, + }); + } + } else if path.starts_with(&root_path) { + return Some(PathEvent { + path: path.clone(), + kind, + }); + } + None }) - }) - .collect::>(); - - if !paths.is_empty() { - paths.sort(); - let mut pending_paths = pending_paths.lock(); - if pending_paths.is_empty() { - tx.try_send(()).ok(); + .collect::>(); + + if !paths.is_empty() { + paths.sort(); + let mut pending_paths = pending_paths.lock(); + if pending_paths.is_empty() { + tx.try_send(()).ok(); + } + util::extend_sorted(&mut *pending_paths, paths, usize::MAX, |a, b| { + a.path.cmp(&b.path) + }); } - util::extend_sorted(&mut *pending_paths, paths, usize::MAX, |a, b| { - a.path.cmp(&b.path) - }); - } - }) + }) + } }) .log_err(); @@ -626,6 +642,14 @@ impl Fs for RealFs { watcher.add(path).ok(); // Ignore "file doesn't exist error" and rely on parent watcher. + // Check if path is a symlink and follow the target parent + if let Some(target) = target_path { + watcher.add(&target).ok(); + if let Some(parent) = target.parent() { + watcher.add(parent).log_err(); + } + } + // watch the parent dir so we can tell when settings.json is created if let Some(parent) = path.parent() { watcher.add(parent).log_err(); From 72be8c5d145d0f7554fd7cb8d3ee3c6c4e99423d Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Tue, 1 Oct 2024 14:20:24 +0800 Subject: [PATCH 160/228] gpui: Fix `hide`, `activate` method on Windows to hide/show application (#18164) Release Notes: - N/A Continue #18161 to fix `cx.hide`, `cx.activate` method on Windows to hide/show application. ## After https://github.com/user-attachments/assets/fe0070f9-7844-4c2a-b859-3e22ee4b8d22 --------- Co-authored-by: Mikayla Maki --- crates/gpui/examples/window.rs | 12 ++++++++++ crates/gpui/src/platform/windows/platform.rs | 24 ++++++++++++++++---- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs index 0f0d4287da723..78a47782c956a 100644 --- a/crates/gpui/examples/window.rs +++ b/crates/gpui/examples/window.rs @@ -150,6 +150,18 @@ impl Render for WindowDemo { ) .unwrap(); })) + .child(button("Hide Application", |cx| { + cx.hide(); + + // Restore the application after 3 seconds + cx.spawn(|mut cx| async move { + Timer::after(std::time::Duration::from_secs(3)).await; + cx.update(|cx| { + cx.activate(false); + }) + }) + .detach(); + })) } } diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 30e7c402d26d8..7f6677973b2fb 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -33,6 +33,8 @@ use crate::*; pub(crate) struct WindowsPlatform { state: RefCell, raw_window_handles: RwLock>, + // The window handles that are hided by `hide` method. + hidden_windows: RwLock>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, main_receiver: flume::Receiver, @@ -100,6 +102,7 @@ impl WindowsPlatform { Self { state, raw_window_handles, + hidden_windows: RwLock::new(SmallVec::new()), icon, main_receiver, dispatch_event, @@ -295,12 +298,25 @@ impl Platform for WindowsPlatform { } } - // todo(windows) - fn activate(&self, _ignoring_other_apps: bool) {} + fn activate(&self, _ignoring_other_apps: bool) { + let mut state = self.hidden_windows.write(); + state.iter().for_each(|handle| unsafe { + ShowWindow(*handle, SW_SHOW).ok().log_err(); + }); + state.clear(); + } - // todo(windows) fn hide(&self) { - unimplemented!() + let mut state = self.hidden_windows.write(); + self.raw_window_handles + .read() + .iter() + .for_each(|handle| unsafe { + if IsWindowVisible(*handle).as_bool() { + state.push(*handle); + ShowWindow(*handle, SW_HIDE).ok().log_err(); + } + }); } // todo(windows) From 527c9097f848feef78cfb6cfd36d32eb048e6bb5 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 1 Oct 2024 01:14:40 -0600 Subject: [PATCH 161/228] linux: Various X11 scroll improvements (#18484) Closes #14089, #14416, #15970, #17230, #18485 Release Notes: - Fixed some cases where Linux X11 mouse scrolling doesn't work at all (#14089, ##15970, #17230) - Fixed handling of switching between Linux X11 devices used for scrolling (#14416, #18485) Change details: Also includes the commit from PR #18317 so I don't have to deal with merge conflicts. * Now uses valuator info from slave pointers rather than master. This hopefully fixes remaining cases where scrolling is fully broken. https://github.com/zed-industries/zed/issues/14089, https://github.com/zed-industries/zed/issues/15970, https://github.com/zed-industries/zed/issues/17230 * Per-device recording of "last scroll position" used to calculate deltas. This meant that swithing scroll devices would cause a sudden jump of scroll position, often to the beginning or end of the file (https://github.com/zed-industries/zed/issues/14416). * Re-queries device metadata when devices change, so that newly plugged in devices will work, and re-use of device-ids don't use old metadata with a new device. * xinput 2 documentation describes support for multiple master devices. I believe this implementation will support that, since now it just uses `DeviceInfo` from slave devices. The concept of master devices is only used in registering for events. * Uses popcount+bit masking to resolve axis indexes, instead of iterating bit indices. --------- Co-authored-by: Thorsten Ball --- crates/gpui/src/platform/linux/platform.rs | 2 +- .../gpui/src/platform/linux/wayland/client.rs | 8 +- crates/gpui/src/platform/linux/x11/client.rs | 406 ++++++++++++------ crates/gpui/src/platform/linux/x11/event.rs | 116 ++++- crates/gpui/src/platform/linux/x11/window.rs | 19 +- 5 files changed, 408 insertions(+), 143 deletions(-) diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index 67f1a43cbe322..6e09badb493a6 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -45,7 +45,7 @@ use crate::{ use super::x11::X11Client; -pub(crate) const SCROLL_LINES: f64 = 3.0; +pub(crate) const SCROLL_LINES: f32 = 3.0; // Values match the defaults on GTK. // Taken from https://github.com/GNOME/gtk/blob/main/gtk/gtksettings.c#L320 diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 4b7816a73ac36..f0015a7e5820b 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -1634,10 +1634,10 @@ impl Dispatch for WaylandClientStatePtr { let scroll_delta = state.discrete_scroll_delta.get_or_insert(point(0.0, 0.0)); match axis { wl_pointer::Axis::VerticalScroll => { - scroll_delta.y += discrete as f32 * axis_modifier * SCROLL_LINES as f32; + scroll_delta.y += discrete as f32 * axis_modifier * SCROLL_LINES; } wl_pointer::Axis::HorizontalScroll => { - scroll_delta.x += discrete as f32 * axis_modifier * SCROLL_LINES as f32; + scroll_delta.x += discrete as f32 * axis_modifier * SCROLL_LINES; } _ => unreachable!(), } @@ -1662,10 +1662,10 @@ impl Dispatch for WaylandClientStatePtr { let wheel_percent = value120 as f32 / 120.0; match axis { wl_pointer::Axis::VerticalScroll => { - scroll_delta.y += wheel_percent * axis_modifier * SCROLL_LINES as f32; + scroll_delta.y += wheel_percent * axis_modifier * SCROLL_LINES; } wl_pointer::Axis::HorizontalScroll => { - scroll_delta.x += wheel_percent * axis_modifier * SCROLL_LINES as f32; + scroll_delta.x += wheel_percent * axis_modifier * SCROLL_LINES; } _ => unreachable!(), } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 5339cc95fd1b1..459f2045bb732 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,6 +1,6 @@ use core::str; use std::cell::RefCell; -use std::collections::HashSet; +use std::collections::{BTreeMap, HashSet}; use std::ops::Deref; use std::path::PathBuf; use std::rc::{Rc, Weak}; @@ -42,7 +42,10 @@ use crate::{ WindowParams, X11Window, }; -use super::{button_of_key, modifiers_from_state, pressed_button_from_mask}; +use super::{ + button_or_scroll_from_event_detail, get_valuator_axis_index, modifiers_from_state, + pressed_button_from_mask, ButtonOrScroll, ScrollDirection, +}; use super::{X11Display, X11WindowStatePtr, XcbAtoms}; use super::{XimCallbackEvent, XimHandler}; use crate::platform::linux::platform::{DOUBLE_CLICK_INTERVAL, SCROLL_LINES}; @@ -51,7 +54,15 @@ use crate::platform::linux::{ get_xkb_compose_state, is_within_click_distance, open_uri_internal, reveal_path_internal, }; -pub(super) const XINPUT_MASTER_DEVICE: u16 = 1; +/// Value for DeviceId parameters which selects all devices. +pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0; + +/// Value for DeviceId parameters which selects all device groups. Events that +/// occur within the group are emitted by the group itself. +/// +/// In XInput 2's interface, these are referred to as "master devices", but that +/// terminology is both archaic and unclear. +pub(crate) const XINPUT_ALL_DEVICE_GROUPS: xinput::DeviceId = 1; pub(crate) struct WindowRef { window: X11WindowStatePtr, @@ -117,6 +128,26 @@ pub struct Xdnd { position: Point, } +#[derive(Debug)] +struct PointerDeviceState { + horizontal: ScrollAxisState, + vertical: ScrollAxisState, +} + +#[derive(Debug, Default)] +struct ScrollAxisState { + /// Valuator number for looking up this axis's scroll value. + valuator_number: Option, + /// Conversion factor from scroll units to lines. + multiplier: f32, + /// Last scroll value for calculating scroll delta. + /// + /// This gets set to `None` whenever it might be invalid - when devices change or when window focus changes. + /// The logic errs on the side of invalidating this, since the consequence is just skipping the delta of one scroll event. + /// The consequence of not invalidating it can be large invalid deltas, which are much more user visible. + scroll_value: Option, +} + pub struct X11ClientState { pub(crate) loop_handle: LoopHandle<'static, X11Client>, pub(crate) event_loop: Option>, @@ -152,9 +183,7 @@ pub struct X11ClientState { pub(crate) cursor_styles: HashMap, pub(crate) cursor_cache: HashMap, - pub(crate) scroll_class_data: Vec, - pub(crate) scroll_x: Option, - pub(crate) scroll_y: Option, + pointer_device_states: BTreeMap, pub(crate) common: LinuxCommon, pub(crate) clipboard: x11_clipboard::Clipboard, @@ -266,31 +295,21 @@ impl X11Client { .prefetch_extension_information(xinput::X11_EXTENSION_NAME) .unwrap(); + // Announce to X server that XInput up to 2.1 is supported. To increase this to 2.2 and + // beyond, support for touch events would need to be added. let xinput_version = xcb_connection - .xinput_xi_query_version(2, 0) + .xinput_xi_query_version(2, 1) .unwrap() .reply() .unwrap(); + // XInput 1.x is not supported. assert!( xinput_version.major_version >= 2, - "XInput Extension v2 not supported." + "XInput version >= 2 required." ); - let master_device_query = xcb_connection - .xinput_xi_query_device(XINPUT_MASTER_DEVICE) - .unwrap() - .reply() - .unwrap(); - let scroll_class_data = master_device_query - .infos - .iter() - .find(|info| info.type_ == xinput::DeviceType::MASTER_POINTER) - .unwrap() - .classes - .iter() - .filter_map(|class| class.data.as_scroll()) - .map(|class| *class) - .collect::>(); + let pointer_device_states = + get_new_pointer_device_states(&xcb_connection, &BTreeMap::new()); let atoms = XcbAtoms::new(&xcb_connection).unwrap().reply().unwrap(); @@ -434,9 +453,7 @@ impl X11Client { cursor_styles: HashMap::default(), cursor_cache: HashMap::default(), - scroll_class_data, - scroll_x: None, - scroll_y: None, + pointer_device_states, clipboard, clipboard_item: None, @@ -950,35 +967,56 @@ impl X11Client { window.handle_ime_commit(text); state = self.0.borrow_mut(); } - if let Some(button) = button_of_key(event.detail.try_into().unwrap()) { - let click_elapsed = state.last_click.elapsed(); - - if click_elapsed < DOUBLE_CLICK_INTERVAL - && state - .last_mouse_button - .is_some_and(|prev_button| prev_button == button) - && is_within_click_distance(state.last_location, position) - { - state.current_count += 1; - } else { - state.current_count = 1; - } - - state.last_click = Instant::now(); - state.last_mouse_button = Some(button); - state.last_location = position; - let current_count = state.current_count; + match button_or_scroll_from_event_detail(event.detail) { + Some(ButtonOrScroll::Button(button)) => { + let click_elapsed = state.last_click.elapsed(); + if click_elapsed < DOUBLE_CLICK_INTERVAL + && state + .last_mouse_button + .is_some_and(|prev_button| prev_button == button) + && is_within_click_distance(state.last_location, position) + { + state.current_count += 1; + } else { + state.current_count = 1; + } - drop(state); - window.handle_input(PlatformInput::MouseDown(crate::MouseDownEvent { - button, - position, - modifiers, - click_count: current_count, - first_mouse: false, - })); - } else { - log::warn!("Unknown button press: {event:?}"); + state.last_click = Instant::now(); + state.last_mouse_button = Some(button); + state.last_location = position; + let current_count = state.current_count; + + drop(state); + window.handle_input(PlatformInput::MouseDown(crate::MouseDownEvent { + button, + position, + modifiers, + click_count: current_count, + first_mouse: false, + })); + } + Some(ButtonOrScroll::Scroll(direction)) => { + drop(state); + // Emulated scroll button presses are sent simultaneously with smooth scrolling XinputMotion events. + // Since handling those events does the scrolling, they are skipped here. + if !event + .flags + .contains(xinput::PointerEventFlags::POINTER_EMULATED) + { + let scroll_delta = match direction { + ScrollDirection::Up => Point::new(0.0, SCROLL_LINES), + ScrollDirection::Down => Point::new(0.0, -SCROLL_LINES), + ScrollDirection::Left => Point::new(SCROLL_LINES, 0.0), + ScrollDirection::Right => Point::new(-SCROLL_LINES, 0.0), + }; + window.handle_input(PlatformInput::ScrollWheel( + make_scroll_wheel_event(position, scroll_delta, modifiers), + )); + } + } + None => { + log::error!("Unknown x11 button: {}", event.detail); + } } } Event::XinputButtonRelease(event) => { @@ -991,15 +1029,19 @@ impl X11Client { px(event.event_x as f32 / u16::MAX as f32 / state.scale_factor), px(event.event_y as f32 / u16::MAX as f32 / state.scale_factor), ); - if let Some(button) = button_of_key(event.detail.try_into().unwrap()) { - let click_count = state.current_count; - drop(state); - window.handle_input(PlatformInput::MouseUp(crate::MouseUpEvent { - button, - position, - modifiers, - click_count, - })); + match button_or_scroll_from_event_detail(event.detail) { + Some(ButtonOrScroll::Button(button)) => { + let click_count = state.current_count; + drop(state); + window.handle_input(PlatformInput::MouseUp(crate::MouseUpEvent { + button, + position, + modifiers, + click_count, + })); + } + Some(ButtonOrScroll::Scroll(_)) => {} + None => {} } } Event::XinputMotion(event) => { @@ -1014,12 +1056,6 @@ impl X11Client { state.modifiers = modifiers; drop(state); - let axisvalues = event - .axisvalues - .iter() - .map(|axisvalue| fp3232_to_f32(*axisvalue)) - .collect::>(); - if event.valuator_mask[0] & 3 != 0 { window.handle_input(PlatformInput::MouseMove(crate::MouseMoveEvent { position, @@ -1028,64 +1064,17 @@ impl X11Client { })); } - let mut valuator_idx = 0; - let scroll_class_data = self.0.borrow().scroll_class_data.clone(); - for shift in 0..32 { - if (event.valuator_mask[0] >> shift) & 1 == 0 { - continue; - } - - for scroll_class in &scroll_class_data { - if scroll_class.scroll_type == xinput::ScrollType::HORIZONTAL - && scroll_class.number == shift - { - let new_scroll = axisvalues[valuator_idx] - / fp3232_to_f32(scroll_class.increment) - * SCROLL_LINES as f32; - let old_scroll = self.0.borrow().scroll_x; - self.0.borrow_mut().scroll_x = Some(new_scroll); - - if let Some(old_scroll) = old_scroll { - let delta_scroll = old_scroll - new_scroll; - window.handle_input(PlatformInput::ScrollWheel( - crate::ScrollWheelEvent { - position, - delta: ScrollDelta::Lines(Point::new(delta_scroll, 0.0)), - modifiers, - touch_phase: TouchPhase::default(), - }, - )); - } - } else if scroll_class.scroll_type == xinput::ScrollType::VERTICAL - && scroll_class.number == shift - { - // the `increment` is the valuator delta equivalent to one positive unit of scrolling. Here that means SCROLL_LINES lines. - let new_scroll = axisvalues[valuator_idx] - / fp3232_to_f32(scroll_class.increment) - * SCROLL_LINES as f32; - let old_scroll = self.0.borrow().scroll_y; - self.0.borrow_mut().scroll_y = Some(new_scroll); - - if let Some(old_scroll) = old_scroll { - let delta_scroll = old_scroll - new_scroll; - let (x, y) = if !modifiers.shift { - (0.0, delta_scroll) - } else { - (delta_scroll, 0.0) - }; - window.handle_input(PlatformInput::ScrollWheel( - crate::ScrollWheelEvent { - position, - delta: ScrollDelta::Lines(Point::new(x, y)), - modifiers, - touch_phase: TouchPhase::default(), - }, - )); - } - } + state = self.0.borrow_mut(); + if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) { + let scroll_delta = get_scroll_delta_and_update_state(&mut pointer, &event); + drop(state); + if let Some(scroll_delta) = scroll_delta { + window.handle_input(PlatformInput::ScrollWheel(make_scroll_wheel_event( + position, + scroll_delta, + modifiers, + ))); } - - valuator_idx += 1; } } Event::XinputEnter(event) if event.mode == xinput::NotifyMode::NORMAL => { @@ -1095,10 +1084,10 @@ impl X11Client { state.mouse_focused_window = Some(event.event); } Event::XinputLeave(event) if event.mode == xinput::NotifyMode::NORMAL => { - self.0.borrow_mut().scroll_x = None; // Set last scroll to `None` so that a large delta isn't created if scrolling is done outside the window (the valuator is global) - self.0.borrow_mut().scroll_y = None; - let mut state = self.0.borrow_mut(); + + // Set last scroll values to `None` so that a large delta isn't created if scrolling is done outside the window (the valuator is global) + reset_all_pointer_device_scroll_positions(&mut state.pointer_device_states); state.mouse_focused_window = None; let pressed_button = pressed_button_from_mask(event.buttons[0]); let position = point( @@ -1117,6 +1106,26 @@ impl X11Client { })); window.set_hovered(false); } + Event::XinputHierarchy(event) => { + let mut state = self.0.borrow_mut(); + // Temporarily use `state.pointer_device_states` to only store pointers that still have valid scroll values. + // Any change to a device invalidates its scroll values. + for info in event.infos { + if is_pointer_device(info.type_) { + state.pointer_device_states.remove(&info.deviceid); + } + } + state.pointer_device_states = get_new_pointer_device_states( + &state.xcb_connection, + &state.pointer_device_states, + ); + } + Event::XinputDeviceChanged(event) => { + let mut state = self.0.borrow_mut(); + if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) { + reset_pointer_device_scroll_positions(&mut pointer); + } + } _ => {} }; @@ -1742,3 +1751,142 @@ fn xdnd_send_status( .send_event(false, target, EventMask::default(), message) .unwrap(); } + +/// Recomputes `pointer_device_states` by querying all pointer devices. +/// When a device is present in `scroll_values_to_preserve`, its value for `ScrollAxisState.scroll_value` is used. +fn get_new_pointer_device_states( + xcb_connection: &XCBConnection, + scroll_values_to_preserve: &BTreeMap, +) -> BTreeMap { + let devices_query_result = xcb_connection + .xinput_xi_query_device(XINPUT_ALL_DEVICES) + .unwrap() + .reply() + .unwrap(); + + let mut pointer_device_states = BTreeMap::new(); + pointer_device_states.extend( + devices_query_result + .infos + .iter() + .filter(|info| is_pointer_device(info.type_)) + .filter_map(|info| { + let scroll_data = info + .classes + .iter() + .filter_map(|class| class.data.as_scroll()) + .map(|class| *class) + .rev() + .collect::>(); + let old_state = scroll_values_to_preserve.get(&info.deviceid); + let old_horizontal = old_state.map(|state| &state.horizontal); + let old_vertical = old_state.map(|state| &state.vertical); + let horizontal = scroll_data + .iter() + .find(|data| data.scroll_type == xinput::ScrollType::HORIZONTAL) + .map(|data| scroll_data_to_axis_state(data, old_horizontal)); + let vertical = scroll_data + .iter() + .find(|data| data.scroll_type == xinput::ScrollType::VERTICAL) + .map(|data| scroll_data_to_axis_state(data, old_vertical)); + if horizontal.is_none() && vertical.is_none() { + None + } else { + Some(( + info.deviceid, + PointerDeviceState { + horizontal: horizontal.unwrap_or_else(Default::default), + vertical: vertical.unwrap_or_else(Default::default), + }, + )) + } + }), + ); + if pointer_device_states.is_empty() { + log::error!("Found no xinput mouse pointers."); + } + return pointer_device_states; +} + +/// Returns true if the device is a pointer device. Does not include pointer device groups. +fn is_pointer_device(type_: xinput::DeviceType) -> bool { + type_ == xinput::DeviceType::SLAVE_POINTER +} + +fn scroll_data_to_axis_state( + data: &xinput::DeviceClassDataScroll, + old_axis_state_with_valid_scroll_value: Option<&ScrollAxisState>, +) -> ScrollAxisState { + ScrollAxisState { + valuator_number: Some(data.number), + multiplier: SCROLL_LINES / fp3232_to_f32(data.increment), + scroll_value: old_axis_state_with_valid_scroll_value.and_then(|state| state.scroll_value), + } +} + +fn reset_all_pointer_device_scroll_positions( + pointer_device_states: &mut BTreeMap, +) { + pointer_device_states + .iter_mut() + .for_each(|(_, device_state)| reset_pointer_device_scroll_positions(device_state)); +} + +fn reset_pointer_device_scroll_positions(pointer: &mut PointerDeviceState) { + pointer.horizontal.scroll_value = None; + pointer.vertical.scroll_value = None; +} + +/// Returns the scroll delta for a smooth scrolling motion event, or `None` if no scroll data is present. +fn get_scroll_delta_and_update_state( + pointer: &mut PointerDeviceState, + event: &xinput::MotionEvent, +) -> Option> { + let delta_x = get_axis_scroll_delta_and_update_state(event, &mut pointer.horizontal); + let delta_y = get_axis_scroll_delta_and_update_state(event, &mut pointer.vertical); + if delta_x.is_some() || delta_y.is_some() { + Some(Point::new(delta_x.unwrap_or(0.0), delta_y.unwrap_or(0.0))) + } else { + None + } +} + +fn get_axis_scroll_delta_and_update_state( + event: &xinput::MotionEvent, + axis: &mut ScrollAxisState, +) -> Option { + let axis_index = get_valuator_axis_index(&event.valuator_mask, axis.valuator_number?)?; + if let Some(axis_value) = event.axisvalues.get(axis_index) { + let new_scroll = fp3232_to_f32(*axis_value); + let delta_scroll = axis + .scroll_value + .map(|old_scroll| (old_scroll - new_scroll) * axis.multiplier); + axis.scroll_value = Some(new_scroll); + delta_scroll + } else { + log::error!("Encountered invalid XInput valuator_mask, scrolling may not work properly."); + None + } +} + +fn make_scroll_wheel_event( + position: Point, + scroll_delta: Point, + modifiers: Modifiers, +) -> crate::ScrollWheelEvent { + // When shift is held down, vertical scrolling turns into horizontal scrolling. + let delta = if modifiers.shift { + Point { + x: scroll_delta.y, + y: 0.0, + } + } else { + scroll_delta + }; + crate::ScrollWheelEvent { + position, + delta: ScrollDelta::Lines(delta), + modifiers, + touch_phase: TouchPhase::default(), + } +} diff --git a/crates/gpui/src/platform/linux/x11/event.rs b/crates/gpui/src/platform/linux/x11/event.rs index 18ec392fc657e..cd4cef24a33f3 100644 --- a/crates/gpui/src/platform/linux/x11/event.rs +++ b/crates/gpui/src/platform/linux/x11/event.rs @@ -5,13 +5,29 @@ use x11rb::protocol::{ use crate::{Modifiers, MouseButton, NavigationDirection}; -pub(crate) fn button_of_key(detail: xproto::Button) -> Option { +pub(crate) enum ButtonOrScroll { + Button(MouseButton), + Scroll(ScrollDirection), +} + +pub(crate) enum ScrollDirection { + Up, + Down, + Left, + Right, +} + +pub(crate) fn button_or_scroll_from_event_detail(detail: u32) -> Option { Some(match detail { - 1 => MouseButton::Left, - 2 => MouseButton::Middle, - 3 => MouseButton::Right, - 8 => MouseButton::Navigate(NavigationDirection::Back), - 9 => MouseButton::Navigate(NavigationDirection::Forward), + 1 => ButtonOrScroll::Button(MouseButton::Left), + 2 => ButtonOrScroll::Button(MouseButton::Middle), + 3 => ButtonOrScroll::Button(MouseButton::Right), + 4 => ButtonOrScroll::Scroll(ScrollDirection::Up), + 5 => ButtonOrScroll::Scroll(ScrollDirection::Down), + 6 => ButtonOrScroll::Scroll(ScrollDirection::Left), + 7 => ButtonOrScroll::Scroll(ScrollDirection::Right), + 8 => ButtonOrScroll::Button(MouseButton::Navigate(NavigationDirection::Back)), + 9 => ButtonOrScroll::Button(MouseButton::Navigate(NavigationDirection::Forward)), _ => return None, }) } @@ -48,3 +64,91 @@ pub(crate) fn pressed_button_from_mask(button_mask: u32) -> Option return None; }) } + +pub(crate) fn get_valuator_axis_index( + valuator_mask: &Vec, + valuator_number: u16, +) -> Option { + // XInput valuator masks have a 1 at the bit indexes corresponding to each + // valuator present in this event's axisvalues. Axisvalues is ordered from + // lowest valuator number to highest, so counting bits before the 1 bit for + // this valuator yields the index in axisvalues. + if bit_is_set_in_vec(&valuator_mask, valuator_number) { + Some(popcount_upto_bit_index(&valuator_mask, valuator_number) as usize) + } else { + None + } +} + +/// Returns the number of 1 bits in `bit_vec` for all bits where `i < bit_index`. +fn popcount_upto_bit_index(bit_vec: &Vec, bit_index: u16) -> u32 { + let array_index = bit_index as usize / 32; + let popcount: u32 = bit_vec + .get(array_index) + .map_or(0, |bits| keep_bits_upto(*bits, bit_index % 32).count_ones()); + if array_index == 0 { + popcount + } else { + // Valuator numbers over 32 probably never occur for scroll position, but may as well + // support it. + let leading_popcount: u32 = bit_vec + .iter() + .take(array_index) + .map(|bits| bits.count_ones()) + .sum(); + popcount + leading_popcount + } +} + +fn bit_is_set_in_vec(bit_vec: &Vec, bit_index: u16) -> bool { + let array_index = bit_index as usize / 32; + bit_vec + .get(array_index) + .map_or(false, |bits| bit_is_set(*bits, bit_index % 32)) +} + +fn bit_is_set(bits: u32, bit_index: u16) -> bool { + bits & (1 << bit_index) != 0 +} + +/// Sets every bit with `i >= bit_index` to 0. +fn keep_bits_upto(bits: u32, bit_index: u16) -> u32 { + if bit_index == 0 { + 0 + } else if bit_index >= 32 { + u32::MAX + } else { + bits & ((1 << bit_index) - 1) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_valuator_axis_index() { + assert!(get_valuator_axis_index(&vec![0b11], 0) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b11], 1) == Some(1)); + assert!(get_valuator_axis_index(&vec![0b11], 2) == None); + + assert!(get_valuator_axis_index(&vec![0b100], 0) == None); + assert!(get_valuator_axis_index(&vec![0b100], 1) == None); + assert!(get_valuator_axis_index(&vec![0b100], 2) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b100], 3) == None); + + assert!(get_valuator_axis_index(&vec![0b1010, 0], 0) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0], 1) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b1010, 0], 2) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0], 3) == Some(1)); + + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 0) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 1) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 2) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 3) == Some(1)); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 32) == Some(2)); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 33) == None); + + assert!(get_valuator_axis_index(&vec![0b1010, 0b101], 34) == Some(3)); + } +} diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index 62b895d01f426..2884c7ea91a51 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -29,7 +29,7 @@ use std::{ sync::Arc, }; -use super::{X11Display, XINPUT_MASTER_DEVICE}; +use super::{X11Display, XINPUT_ALL_DEVICES, XINPUT_ALL_DEVICE_GROUPS}; x11rb::atom_manager! { pub XcbAtoms: AtomsCookie { XA_ATOM, @@ -475,7 +475,7 @@ impl X11WindowState { .xinput_xi_select_events( x_window, &[xinput::EventMask { - deviceid: XINPUT_MASTER_DEVICE, + deviceid: XINPUT_ALL_DEVICE_GROUPS, mask: vec![ xinput::XIEventMask::MOTION | xinput::XIEventMask::BUTTON_PRESS @@ -487,6 +487,19 @@ impl X11WindowState { ) .unwrap(); + xcb_connection + .xinput_xi_select_events( + x_window, + &[xinput::EventMask { + deviceid: XINPUT_ALL_DEVICES, + mask: vec![ + xinput::XIEventMask::HIERARCHY, + xinput::XIEventMask::DEVICE_CHANGED, + ], + }], + ) + .unwrap(); + xcb_connection.flush().unwrap(); let raw = RawWindow { @@ -1253,7 +1266,7 @@ impl PlatformWindow for X11Window { self.0.x_window, state.atoms._GTK_SHOW_WINDOW_MENU, [ - XINPUT_MASTER_DEVICE as u32, + XINPUT_ALL_DEVICE_GROUPS as u32, coords.dst_x as u32, coords.dst_y as u32, 0, From 7ce8797d78794f5a53e7a7d113e4c14a65e6297f Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 1 Oct 2024 12:16:44 +0200 Subject: [PATCH 162/228] ssh remoting: Add infrastructure to handle reconnects (#18572) This restructures the code in `remote` so that it's easier to replace the current SSH connection with a new one in case of disconnects/reconnects. Right now, it successfully reconnects, BUT we're still missing the big piece on the server-side: keeping the server process alive and reconnecting to the same process that keeps the project-state. Release Notes: - N/A --------- Co-authored-by: Bennet --- .../remote_editing_collaboration_tests.rs | 4 +- crates/collab/src/tests/test_server.rs | 4 +- crates/project/src/project.rs | 83 +- crates/project/src/terminals.rs | 8 +- crates/recent_projects/src/ssh_connections.rs | 6 +- crates/remote/src/remote.rs | 2 +- crates/remote/src/ssh_session.rs | 826 +++++++++++------- crates/remote_server/src/headless_project.rs | 4 +- crates/remote_server/src/main.rs | 5 +- .../remote_server/src/remote_editing_tests.rs | 11 +- crates/workspace/src/workspace.rs | 4 +- 11 files changed, 559 insertions(+), 398 deletions(-) diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index a9cc32c1dd3a6..7de50511ea276 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -4,7 +4,7 @@ use fs::{FakeFs, Fs as _}; use gpui::{Context as _, TestAppContext}; use language::language_settings::all_language_settings; use project::ProjectPath; -use remote::SshSession; +use remote::SshRemoteClient; use remote_server::HeadlessProject; use serde_json::json; use std::{path::Path, sync::Arc}; @@ -24,7 +24,7 @@ async fn test_sharing_an_ssh_remote_project( .await; // Set up project on remote FS - let (client_ssh, server_ssh) = SshSession::fake(cx_a, server_cx); + let (client_ssh, server_ssh) = SshRemoteClient::fake(cx_a, server_cx); let remote_fs = FakeFs::new(server_cx.executor()); remote_fs .insert_tree( diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 5ff4a720741bc..5e7d935c36438 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -25,7 +25,7 @@ use node_runtime::NodeRuntime; use notifications::NotificationStore; use parking_lot::Mutex; use project::{Project, WorktreeId}; -use remote::SshSession; +use remote::SshRemoteClient; use rpc::{ proto::{self, ChannelRole}, RECEIVE_TIMEOUT, @@ -835,7 +835,7 @@ impl TestClient { pub async fn build_ssh_project( &self, root_path: impl AsRef, - ssh: Arc, + ssh: Arc, cx: &mut TestAppContext, ) -> (Model, WorktreeId) { let project = cx.update(|cx| { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b91250e6b2c4a..dadbd394bbf9b 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -54,7 +54,7 @@ use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; pub use prettier_store::PrettierStore; use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; -use remote::SshSession; +use remote::SshRemoteClient; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; use search_history::SearchHistory; @@ -138,7 +138,7 @@ pub struct Project { join_project_response_message_id: u32, user_store: Model, fs: Arc, - ssh_session: Option>, + ssh_client: Option>, client_state: ProjectClientState, collaborators: HashMap, client_subscriptions: Vec, @@ -643,7 +643,7 @@ impl Project { user_store, settings_observer, fs, - ssh_session: None, + ssh_client: None, buffers_needing_diff: Default::default(), git_diff_debouncer: DebouncedDelay::new(), terminals: Terminals { @@ -664,7 +664,7 @@ impl Project { } pub fn ssh( - ssh: Arc, + ssh: Arc, client: Arc, node: NodeRuntime, user_store: Model, @@ -682,14 +682,14 @@ impl Project { SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); let worktree_store = - cx.new_model(|_| WorktreeStore::remote(false, ssh.clone().into(), 0, None)); + cx.new_model(|_| WorktreeStore::remote(false, ssh.to_proto_client(), 0, None)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); let buffer_store = cx.new_model(|cx| { BufferStore::remote( worktree_store.clone(), - ssh.clone().into(), + ssh.to_proto_client(), SSH_PROJECT_ID, cx, ) @@ -698,7 +698,7 @@ impl Project { .detach(); let settings_observer = cx.new_model(|cx| { - SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) + SettingsObserver::new_ssh(ssh.to_proto_client(), worktree_store.clone(), cx) }); cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); @@ -709,7 +709,7 @@ impl Project { buffer_store.clone(), worktree_store.clone(), languages.clone(), - ssh.clone().into(), + ssh.to_proto_client(), SSH_PROJECT_ID, cx, ) @@ -733,7 +733,7 @@ impl Project { user_store, settings_observer, fs, - ssh_session: Some(ssh.clone()), + ssh_client: Some(ssh.clone()), buffers_needing_diff: Default::default(), git_diff_debouncer: DebouncedDelay::new(), terminals: Terminals { @@ -751,7 +751,7 @@ impl Project { search_excluded_history: Self::new_search_history(), }; - let client: AnyProtoClient = ssh.clone().into(); + let client: AnyProtoClient = ssh.to_proto_client(); ssh.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.buffer_store); @@ -907,7 +907,7 @@ impl Project { user_store: user_store.clone(), snippets, fs, - ssh_session: None, + ssh_client: None, settings_observer: settings_observer.clone(), client_subscriptions: Default::default(), _subscriptions: vec![cx.on_release(Self::release)], @@ -1230,7 +1230,7 @@ impl Project { match self.client_state { ProjectClientState::Remote { replica_id, .. } => replica_id, _ => { - if self.ssh_session.is_some() { + if self.ssh_client.is_some() { 1 } else { 0 @@ -1638,7 +1638,7 @@ impl Project { pub fn is_local(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { - self.ssh_session.is_none() + self.ssh_client.is_none() } ProjectClientState::Remote { .. } => false, } @@ -1647,7 +1647,7 @@ impl Project { pub fn is_via_ssh(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { - self.ssh_session.is_some() + self.ssh_client.is_some() } ProjectClientState::Remote { .. } => false, } @@ -1933,8 +1933,9 @@ impl Project { } BufferStoreEvent::BufferChangedFilePath { .. } => {} BufferStoreEvent::BufferDropped(buffer_id) => { - if let Some(ref ssh_session) = self.ssh_session { - ssh_session + if let Some(ref ssh_client) = self.ssh_client { + ssh_client + .to_proto_client() .send(proto::CloseBuffer { project_id: 0, buffer_id: buffer_id.to_proto(), @@ -2139,13 +2140,14 @@ impl Project { } => { let operation = language::proto::serialize_operation(operation); - if let Some(ssh) = &self.ssh_session { - ssh.send(proto::UpdateBuffer { - project_id: 0, - buffer_id: buffer_id.to_proto(), - operations: vec![operation.clone()], - }) - .ok(); + if let Some(ssh) = &self.ssh_client { + ssh.to_proto_client() + .send(proto::UpdateBuffer { + project_id: 0, + buffer_id: buffer_id.to_proto(), + operations: vec![operation.clone()], + }) + .ok(); } self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation { @@ -2825,14 +2827,13 @@ impl Project { ) -> Receiver> { let (tx, rx) = smol::channel::unbounded(); - let (client, remote_id): (AnyProtoClient, _) = - if let Some(ssh_session) = self.ssh_session.clone() { - (ssh_session.into(), 0) - } else if let Some(remote_id) = self.remote_id() { - (self.client.clone().into(), remote_id) - } else { - return rx; - }; + let (client, remote_id): (AnyProtoClient, _) = if let Some(ssh_client) = &self.ssh_client { + (ssh_client.to_proto_client(), 0) + } else if let Some(remote_id) = self.remote_id() { + (self.client.clone().into(), remote_id) + } else { + return rx; + }; let request = client.request(proto::FindSearchCandidates { project_id: remote_id, @@ -2961,11 +2962,13 @@ impl Project { exists.then(|| ResolvedPath::AbsPath(expanded)) }) - } else if let Some(ssh_session) = self.ssh_session.as_ref() { - let request = ssh_session.request(proto::CheckFileExists { - project_id: SSH_PROJECT_ID, - path: path.to_string(), - }); + } else if let Some(ssh_client) = self.ssh_client.as_ref() { + let request = ssh_client + .to_proto_client() + .request(proto::CheckFileExists { + project_id: SSH_PROJECT_ID, + path: path.to_string(), + }); cx.background_executor().spawn(async move { let response = request.await.log_err()?; if response.exists { @@ -3035,13 +3038,13 @@ impl Project { ) -> Task>> { if self.is_local() { DirectoryLister::Local(self.fs.clone()).list_directory(query, cx) - } else if let Some(session) = self.ssh_session.as_ref() { + } else if let Some(session) = self.ssh_client.as_ref() { let request = proto::ListRemoteDirectory { dev_server_id: SSH_PROJECT_ID, path: query, }; - let response = session.request(request); + let response = session.to_proto_client().request(request); cx.background_executor().spawn(async move { let response = response.await?; Ok(response.entries.into_iter().map(PathBuf::from).collect()) @@ -3465,11 +3468,11 @@ impl Project { cx: AsyncAppContext, ) -> Result { let buffer_store = this.read_with(&cx, |this, cx| { - if let Some(ssh) = &this.ssh_session { + if let Some(ssh) = &this.ssh_client { let mut payload = envelope.payload.clone(); payload.project_id = 0; cx.background_executor() - .spawn(ssh.request(payload)) + .spawn(ssh.to_proto_client().request(payload)) .detach_and_log_err(cx); } this.buffer_store.clone() diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 54dd48cf433ff..7175b75e22a32 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -67,8 +67,12 @@ impl Project { } fn ssh_command(&self, cx: &AppContext) -> Option { - if let Some(ssh_session) = self.ssh_session.as_ref() { - return Some(SshCommand::Direct(ssh_session.ssh_args())); + if let Some(args) = self + .ssh_client + .as_ref() + .and_then(|session| session.ssh_args()) + { + return Some(SshCommand::Direct(args)); } let dev_server_project_id = self.dev_server_project_id()?; diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index dd30f15f267fc..d0fffc031f0bf 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -11,7 +11,7 @@ use gpui::{ Transformation, View, }; use release_channel::{AppVersion, ReleaseChannel}; -use remote::{SshConnectionOptions, SshPlatform, SshSession}; +use remote::{SshConnectionOptions, SshPlatform, SshRemoteClient}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -376,12 +376,12 @@ pub fn connect_over_ssh( connection_options: SshConnectionOptions, ui: View, cx: &mut WindowContext, -) -> Task>> { +) -> Task>> { let window = cx.window_handle(); let known_password = connection_options.password.clone(); cx.spawn(|mut cx| async move { - remote::SshSession::client( + remote::SshRemoteClient::new( connection_options, Arc::new(SshClientDelegate { window, diff --git a/crates/remote/src/remote.rs b/crates/remote/src/remote.rs index 23f798c1914db..c3d9e8f9cc125 100644 --- a/crates/remote/src/remote.rs +++ b/crates/remote/src/remote.rs @@ -2,4 +2,4 @@ pub mod json_log; pub mod protocol; pub mod ssh_session; -pub use ssh_session::{SshClientDelegate, SshConnectionOptions, SshPlatform, SshSession}; +pub use ssh_session::{SshClientDelegate, SshConnectionOptions, SshPlatform, SshRemoteClient}; diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 915595fd9d295..fe1e42fe96630 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -7,19 +7,23 @@ use crate::{ use anyhow::{anyhow, Context as _, Result}; use collections::HashMap; use futures::{ - channel::{mpsc, oneshot}, + channel::{ + mpsc::{self, UnboundedReceiver, UnboundedSender}, + oneshot, + }, future::BoxFuture, - select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, StreamExt as _, + select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, SinkExt, + StreamExt as _, }; use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion, Task}; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, - EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, + AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, }; use smol::{ fs, - process::{self, Stdio}, + process::{self, Child, Stdio}, }; use std::{ any::TypeId, @@ -44,22 +48,6 @@ pub struct SshSocket { socket_path: PathBuf, } -pub struct SshSession { - next_message_id: AtomicU32, - response_channels: ResponseChannels, // Lock - outgoing_tx: mpsc::UnboundedSender, - spawn_process_tx: mpsc::UnboundedSender, - client_socket: Option, - state: Mutex, // Lock - _io_task: Option>>, -} - -struct SshClientState { - socket: SshSocket, - master_process: process::Child, - _temp_dir: TempDir, -} - #[derive(Debug, Clone, PartialEq, Eq)] pub struct SshConnectionOptions { pub host: String, @@ -105,18 +93,13 @@ impl SshConnectionOptions { } } -struct SpawnRequest { - command: String, - process_tx: oneshot::Sender, -} - #[derive(Copy, Clone, Debug)] pub struct SshPlatform { pub os: &'static str, pub arch: &'static str, } -pub trait SshClientDelegate { +pub trait SshClientDelegate: Send + Sync { fn ask_password( &self, prompt: String, @@ -132,48 +115,249 @@ pub trait SshClientDelegate { fn set_error(&self, error_message: String, cx: &mut AsyncAppContext); } -type ResponseChannels = Mutex)>>>; +impl SshSocket { + fn ssh_command>(&self, program: S) -> process::Command { + let mut command = process::Command::new("ssh"); + self.ssh_options(&mut command) + .arg(self.connection_options.ssh_url()) + .arg(program); + command + } + + fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command { + command + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .args(["-o", "ControlMaster=no", "-o"]) + .arg(format!("ControlPath={}", self.socket_path.display())) + } + + fn ssh_args(&self) -> Vec { + vec![ + "-o".to_string(), + "ControlMaster=no".to_string(), + "-o".to_string(), + format!("ControlPath={}", self.socket_path.display()), + self.connection_options.ssh_url(), + ] + } +} -impl SshSession { - pub async fn client( +async fn run_cmd(command: &mut process::Command) -> Result { + let output = command.output().await?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(anyhow!( + "failed to run command: {}", + String::from_utf8_lossy(&output.stderr) + )) + } +} +#[cfg(unix)] +async fn read_with_timeout( + stdout: &mut process::ChildStdout, + timeout: std::time::Duration, + output: &mut Vec, +) -> Result<(), std::io::Error> { + smol::future::or( + async { + stdout.read_to_end(output).await?; + Ok::<_, std::io::Error>(()) + }, + async { + smol::Timer::after(timeout).await; + + Err(std::io::Error::new( + std::io::ErrorKind::TimedOut, + "Read operation timed out", + )) + }, + ) + .await +} + +struct ChannelForwarder { + quit_tx: UnboundedSender<()>, + forwarding_task: Task<(UnboundedSender, UnboundedReceiver)>, +} + +impl ChannelForwarder { + fn new( + mut incoming_tx: UnboundedSender, + mut outgoing_rx: UnboundedReceiver, + cx: &mut AsyncAppContext, + ) -> (Self, UnboundedSender, UnboundedReceiver) { + let (quit_tx, mut quit_rx) = mpsc::unbounded::<()>(); + + let (proxy_incoming_tx, mut proxy_incoming_rx) = mpsc::unbounded::(); + let (mut proxy_outgoing_tx, proxy_outgoing_rx) = mpsc::unbounded::(); + + let forwarding_task = cx.background_executor().spawn(async move { + loop { + select_biased! { + _ = quit_rx.next().fuse() => { + break; + }, + incoming_envelope = proxy_incoming_rx.next().fuse() => { + if let Some(envelope) = incoming_envelope { + if incoming_tx.send(envelope).await.is_err() { + break; + } + } else { + break; + } + } + outgoing_envelope = outgoing_rx.next().fuse() => { + if let Some(envelope) = outgoing_envelope { + if proxy_outgoing_tx.send(envelope).await.is_err() { + break; + } + } else { + break; + } + } + } + } + + (incoming_tx, outgoing_rx) + }); + + ( + Self { + forwarding_task, + quit_tx, + }, + proxy_incoming_tx, + proxy_outgoing_rx, + ) + } + + async fn into_channels(mut self) -> (UnboundedSender, UnboundedReceiver) { + let _ = self.quit_tx.send(()).await; + self.forwarding_task.await + } +} + +struct SshRemoteClientState { + ssh_connection: SshRemoteConnection, + delegate: Arc, + forwarder: ChannelForwarder, + _multiplex_task: Task>, +} + +pub struct SshRemoteClient { + client: Arc, + inner_state: Arc>>, +} + +impl SshRemoteClient { + pub async fn new( connection_options: SshConnectionOptions, delegate: Arc, cx: &mut AsyncAppContext, ) -> Result> { - let client_state = SshClientState::new(connection_options, delegate.clone(), cx).await?; + let (outgoing_tx, outgoing_rx) = mpsc::unbounded::(); + let (incoming_tx, incoming_rx) = mpsc::unbounded::(); - let platform = client_state.query_platform().await?; - let (local_binary_path, version) = delegate.get_server_binary(platform, cx).await??; - let remote_binary_path = delegate.remote_server_binary_path(cx)?; - client_state - .ensure_server_binary( - &delegate, - &local_binary_path, - &remote_binary_path, - version, + let client = cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx))?; + let this = Arc::new(Self { + client, + inner_state: Arc::new(Mutex::new(None)), + }); + + let inner_state = { + let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = + ChannelForwarder::new(incoming_tx, outgoing_rx, cx); + + let (ssh_connection, ssh_process) = + Self::establish_connection(connection_options.clone(), delegate.clone(), cx) + .await?; + + let multiplex_task = Self::multiplex( + this.clone(), + ssh_process, + proxy_incoming_tx, + proxy_outgoing_rx, cx, - ) - .await?; + ); - let (spawn_process_tx, mut spawn_process_rx) = mpsc::unbounded::(); - let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded::(); - let (incoming_tx, incoming_rx) = mpsc::unbounded::(); + SshRemoteClientState { + ssh_connection, + delegate, + forwarder: proxy, + _multiplex_task: multiplex_task, + } + }; - let socket = client_state.socket.clone(); - run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?; + this.inner_state.lock().replace(inner_state); - let mut remote_server_child = socket - .ssh_command(format!( - "RUST_LOG={} RUST_BACKTRACE={} {:?} run", - std::env::var("RUST_LOG").unwrap_or_default(), - std::env::var("RUST_BACKTRACE").unwrap_or_default(), - remote_binary_path, - )) - .spawn() - .context("failed to spawn remote server")?; - let mut child_stderr = remote_server_child.stderr.take().unwrap(); - let mut child_stdout = remote_server_child.stdout.take().unwrap(); - let mut child_stdin = remote_server_child.stdin.take().unwrap(); + Ok(this) + } + + fn reconnect(this: Arc, cx: &mut AsyncAppContext) -> Result<()> { + let Some(state) = this.inner_state.lock().take() else { + return Err(anyhow!("reconnect is already in progress")); + }; + + let SshRemoteClientState { + mut ssh_connection, + delegate, + forwarder: proxy, + _multiplex_task, + } = state; + drop(_multiplex_task); + + cx.spawn(|mut cx| async move { + let (incoming_tx, outgoing_rx) = proxy.into_channels().await; + + ssh_connection.master_process.kill()?; + ssh_connection + .master_process + .status() + .await + .context("Failed to kill ssh process")?; + + let connection_options = ssh_connection.socket.connection_options.clone(); + + let (ssh_connection, ssh_process) = + Self::establish_connection(connection_options, delegate.clone(), &mut cx).await?; + + let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = + ChannelForwarder::new(incoming_tx, outgoing_rx, &mut cx); + + let inner_state = SshRemoteClientState { + ssh_connection, + delegate, + forwarder: proxy, + _multiplex_task: Self::multiplex( + this.clone(), + ssh_process, + proxy_incoming_tx, + proxy_outgoing_rx, + &mut cx, + ), + }; + this.inner_state.lock().replace(inner_state); + + anyhow::Ok(()) + }) + .detach(); + + anyhow::Ok(()) + } + + fn multiplex( + this: Arc, + mut ssh_process: Child, + incoming_tx: UnboundedSender, + mut outgoing_rx: UnboundedReceiver, + cx: &mut AsyncAppContext, + ) -> Task> { + let mut child_stderr = ssh_process.stderr.take().unwrap(); + let mut child_stdout = ssh_process.stdout.take().unwrap(); + let mut child_stdin = ssh_process.stdin.take().unwrap(); let io_task = cx.background_executor().spawn(async move { let mut stdin_buffer = Vec::new(); @@ -194,27 +378,15 @@ impl SshSession { write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?; } - request = spawn_process_rx.next().fuse() => { - let Some(request) = request else { - return Ok(()); - }; - - log::info!("spawn process: {:?}", request.command); - let child = client_state.socket - .ssh_command(&request.command) - .spawn() - .context("failed to create channel")?; - request.process_tx.send(child).ok(); - } - result = child_stdout.read(&mut stdout_buffer).fuse() => { match result { Ok(0) => { child_stdin.close().await?; outgoing_rx.close(); - let status = remote_server_child.status().await?; + let status = ssh_process.status().await?; if !status.success() { - log::error!("channel exited with status: {status:?}"); + log::error!("ssh process exited with status: {status:?}"); + return Err(anyhow!("ssh process exited with non-zero status code: {:?}", status.code())); } return Ok(()); } @@ -267,239 +439,112 @@ impl SshSession { } }); - cx.update(|cx| { - Self::new( - incoming_rx, - outgoing_tx, - spawn_process_tx, - Some(socket), - Some(io_task), - cx, - ) - }) - } + cx.spawn(|mut cx| async move { + let result = io_task.await; - pub fn server( - incoming_rx: mpsc::UnboundedReceiver, - outgoing_tx: mpsc::UnboundedSender, - cx: &AppContext, - ) -> Arc { - let (tx, _rx) = mpsc::unbounded(); - Self::new(incoming_rx, outgoing_tx, tx, None, None, cx) - } - - #[cfg(any(test, feature = "test-support"))] - pub fn fake( - client_cx: &mut gpui::TestAppContext, - server_cx: &mut gpui::TestAppContext, - ) -> (Arc, Arc) { - let (server_to_client_tx, server_to_client_rx) = mpsc::unbounded(); - let (client_to_server_tx, client_to_server_rx) = mpsc::unbounded(); - let (tx, _rx) = mpsc::unbounded(); - ( - client_cx.update(|cx| { - Self::new( - server_to_client_rx, - client_to_server_tx, - tx.clone(), - None, // todo() - None, - cx, - ) - }), - server_cx.update(|cx| { - Self::new( - client_to_server_rx, - server_to_client_tx, - tx.clone(), - None, - None, - cx, - ) - }), - ) - } - - fn new( - mut incoming_rx: mpsc::UnboundedReceiver, - outgoing_tx: mpsc::UnboundedSender, - spawn_process_tx: mpsc::UnboundedSender, - client_socket: Option, - io_task: Option>>, - cx: &AppContext, - ) -> Arc { - let this = Arc::new(Self { - next_message_id: AtomicU32::new(0), - response_channels: ResponseChannels::default(), - outgoing_tx, - spawn_process_tx, - client_socket, - state: Default::default(), - _io_task: io_task, - }); - - cx.spawn(|cx| { - let this = Arc::downgrade(&this); - async move { - let peer_id = PeerId { owner_id: 0, id: 0 }; - while let Some(incoming) = incoming_rx.next().await { - let Some(this) = this.upgrade() else { - return anyhow::Ok(()); - }; - - if let Some(request_id) = incoming.responding_to { - let request_id = MessageId(request_id); - let sender = this.response_channels.lock().remove(&request_id); - if let Some(sender) = sender { - let (tx, rx) = oneshot::channel(); - if incoming.payload.is_some() { - sender.send((incoming, tx)).ok(); - } - rx.await.ok(); - } - } else if let Some(envelope) = - build_typed_envelope(peer_id, Instant::now(), incoming) - { - let type_name = envelope.payload_type_name(); - if let Some(future) = ProtoMessageHandlerSet::handle_message( - &this.state, - envelope, - this.clone().into(), - cx.clone(), - ) { - log::debug!("ssh message received. name:{type_name}"); - match future.await { - Ok(_) => { - log::debug!("ssh message handled. name:{type_name}"); - } - Err(error) => { - log::error!( - "error handling message. type:{type_name}, error:{error}", - ); - } - } - } else { - log::error!("unhandled ssh message name:{type_name}"); - } - } - } - anyhow::Ok(()) + if let Err(error) = result { + log::warn!("ssh io task died with error: {:?}. reconnecting...", error); + Self::reconnect(this, &mut cx).ok(); } - }) - .detach(); - - this - } - pub fn request( - &self, - payload: T, - ) -> impl 'static + Future> { - log::debug!("ssh request start. name:{}", T::NAME); - let response = self.request_dynamic(payload.into_envelope(0, None, None), T::NAME); - async move { - let response = response.await?; - log::debug!("ssh request finish. name:{}", T::NAME); - T::Response::from_envelope(response) - .ok_or_else(|| anyhow!("received a response of the wrong type")) - } - } - - pub fn send(&self, payload: T) -> Result<()> { - log::debug!("ssh send name:{}", T::NAME); - self.send_dynamic(payload.into_envelope(0, None, None)) + Ok(()) + }) } - pub fn request_dynamic( - &self, - mut envelope: proto::Envelope, - type_name: &'static str, - ) -> impl 'static + Future> { - envelope.id = self.next_message_id.fetch_add(1, SeqCst); - let (tx, rx) = oneshot::channel(); - let mut response_channels_lock = self.response_channels.lock(); - response_channels_lock.insert(MessageId(envelope.id), tx); - drop(response_channels_lock); - let result = self.outgoing_tx.unbounded_send(envelope); - async move { - if let Err(error) = &result { - log::error!("failed to send message: {}", error); - return Err(anyhow!("failed to send message: {}", error)); - } - - let response = rx.await.context("connection lost")?.0; - if let Some(proto::envelope::Payload::Error(error)) = &response.payload { - return Err(RpcError::from_proto(error, type_name)); - } - Ok(response) - } - } + async fn establish_connection( + connection_options: SshConnectionOptions, + delegate: Arc, + cx: &mut AsyncAppContext, + ) -> Result<(SshRemoteConnection, Child)> { + let ssh_connection = + SshRemoteConnection::new(connection_options, delegate.clone(), cx).await?; - pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { - envelope.id = self.next_message_id.fetch_add(1, SeqCst); - self.outgoing_tx.unbounded_send(envelope)?; - Ok(()) - } + let platform = ssh_connection.query_platform().await?; + let (local_binary_path, version) = delegate.get_server_binary(platform, cx).await??; + let remote_binary_path = delegate.remote_server_binary_path(cx)?; + ssh_connection + .ensure_server_binary( + &delegate, + &local_binary_path, + &remote_binary_path, + version, + cx, + ) + .await?; - pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { - let id = (TypeId::of::(), remote_id); + let socket = ssh_connection.socket.clone(); + run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?; - let mut state = self.state.lock(); - if state.entities_by_type_and_remote_id.contains_key(&id) { - panic!("already subscribed to entity"); - } + let ssh_process = socket + .ssh_command(format!( + "RUST_LOG={} RUST_BACKTRACE={} {:?} run", + std::env::var("RUST_LOG").unwrap_or_default(), + std::env::var("RUST_BACKTRACE").unwrap_or_default(), + remote_binary_path, + )) + .spawn() + .context("failed to spawn remote server")?; - state.entities_by_type_and_remote_id.insert( - id, - EntityMessageSubscriber::Entity { - handle: entity.downgrade().into(), - }, - ); + Ok((ssh_connection, ssh_process)) } - pub async fn spawn_process(&self, command: String) -> process::Child { - let (process_tx, process_rx) = oneshot::channel(); - self.spawn_process_tx - .unbounded_send(SpawnRequest { - command, - process_tx, - }) - .ok(); - process_rx.await.unwrap() + pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { + self.client.subscribe_to_entity(remote_id, entity); } - pub fn ssh_args(&self) -> Vec { - self.client_socket.as_ref().unwrap().ssh_args() + pub fn ssh_args(&self) -> Option> { + let state = self.inner_state.lock(); + state + .as_ref() + .map(|state| state.ssh_connection.socket.ssh_args()) } -} -impl ProtoClient for SshSession { - fn request( - &self, - envelope: proto::Envelope, - request_type: &'static str, - ) -> BoxFuture<'static, Result> { - self.request_dynamic(envelope, request_type).boxed() + pub fn to_proto_client(&self) -> AnyProtoClient { + self.client.clone().into() } - fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> { - self.send_dynamic(envelope) - } + #[cfg(any(test, feature = "test-support"))] + pub fn fake( + client_cx: &mut gpui::TestAppContext, + server_cx: &mut gpui::TestAppContext, + ) -> (Arc, Arc) { + let (server_to_client_tx, server_to_client_rx) = mpsc::unbounded(); + let (client_to_server_tx, client_to_server_rx) = mpsc::unbounded(); - fn send_response(&self, envelope: Envelope, _message_type: &'static str) -> anyhow::Result<()> { - self.send_dynamic(envelope) + ( + client_cx.update(|cx| { + let client = ChannelClient::new(server_to_client_rx, client_to_server_tx, cx); + Arc::new(Self { + client, + inner_state: Arc::new(Mutex::new(None)), + }) + }), + server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), + ) } +} - fn message_handler_set(&self) -> &Mutex { - &self.state +impl From for AnyProtoClient { + fn from(client: SshRemoteClient) -> Self { + AnyProtoClient::new(client.client.clone()) } +} - fn is_via_collab(&self) -> bool { - false +struct SshRemoteConnection { + socket: SshSocket, + master_process: process::Child, + _temp_dir: TempDir, +} + +impl Drop for SshRemoteConnection { + fn drop(&mut self) { + if let Err(error) = self.master_process.kill() { + log::error!("failed to kill SSH master process: {}", error); + } } } -impl SshClientState { +impl SshRemoteConnection { #[cfg(not(unix))] async fn new( _connection_options: SshConnectionOptions, @@ -740,74 +785,181 @@ impl SshClientState { } } -#[cfg(unix)] -async fn read_with_timeout( - stdout: &mut process::ChildStdout, - timeout: std::time::Duration, - output: &mut Vec, -) -> Result<(), std::io::Error> { - smol::future::or( - async { - stdout.read_to_end(output).await?; - Ok::<_, std::io::Error>(()) - }, - async { - smol::Timer::after(timeout).await; +type ResponseChannels = Mutex)>>>; - Err(std::io::Error::new( - std::io::ErrorKind::TimedOut, - "Read operation timed out", - )) - }, - ) - .await +pub struct ChannelClient { + next_message_id: AtomicU32, + outgoing_tx: mpsc::UnboundedSender, + response_channels: ResponseChannels, // Lock + message_handlers: Mutex, // Lock } -impl Drop for SshClientState { - fn drop(&mut self) { - if let Err(error) = self.master_process.kill() { - log::error!("failed to kill SSH master process: {}", error); +impl ChannelClient { + pub fn new( + incoming_rx: mpsc::UnboundedReceiver, + outgoing_tx: mpsc::UnboundedSender, + cx: &AppContext, + ) -> Arc { + let this = Arc::new(Self { + outgoing_tx, + next_message_id: AtomicU32::new(0), + response_channels: ResponseChannels::default(), + message_handlers: Default::default(), + }); + + Self::start_handling_messages(this.clone(), incoming_rx, cx); + + this + } + + fn start_handling_messages( + this: Arc, + mut incoming_rx: mpsc::UnboundedReceiver, + cx: &AppContext, + ) { + cx.spawn(|cx| { + let this = Arc::downgrade(&this); + async move { + let peer_id = PeerId { owner_id: 0, id: 0 }; + while let Some(incoming) = incoming_rx.next().await { + let Some(this) = this.upgrade() else { + return anyhow::Ok(()); + }; + + if let Some(request_id) = incoming.responding_to { + let request_id = MessageId(request_id); + let sender = this.response_channels.lock().remove(&request_id); + if let Some(sender) = sender { + let (tx, rx) = oneshot::channel(); + if incoming.payload.is_some() { + sender.send((incoming, tx)).ok(); + } + rx.await.ok(); + } + } else if let Some(envelope) = + build_typed_envelope(peer_id, Instant::now(), incoming) + { + let type_name = envelope.payload_type_name(); + if let Some(future) = ProtoMessageHandlerSet::handle_message( + &this.message_handlers, + envelope, + this.clone().into(), + cx.clone(), + ) { + log::debug!("ssh message received. name:{type_name}"); + match future.await { + Ok(_) => { + log::debug!("ssh message handled. name:{type_name}"); + } + Err(error) => { + log::error!( + "error handling message. type:{type_name}, error:{error}", + ); + } + } + } else { + log::error!("unhandled ssh message name:{type_name}"); + } + } + } + anyhow::Ok(()) + } + }) + .detach(); + } + + pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { + let id = (TypeId::of::(), remote_id); + + let mut message_handlers = self.message_handlers.lock(); + if message_handlers + .entities_by_type_and_remote_id + .contains_key(&id) + { + panic!("already subscribed to entity"); } + + message_handlers.entities_by_type_and_remote_id.insert( + id, + EntityMessageSubscriber::Entity { + handle: entity.downgrade().into(), + }, + ); } -} -impl SshSocket { - fn ssh_command>(&self, program: S) -> process::Command { - let mut command = process::Command::new("ssh"); - self.ssh_options(&mut command) - .arg(self.connection_options.ssh_url()) - .arg(program); - command + pub fn request( + &self, + payload: T, + ) -> impl 'static + Future> { + log::debug!("ssh request start. name:{}", T::NAME); + let response = self.request_dynamic(payload.into_envelope(0, None, None), T::NAME); + async move { + let response = response.await?; + log::debug!("ssh request finish. name:{}", T::NAME); + T::Response::from_envelope(response) + .ok_or_else(|| anyhow!("received a response of the wrong type")) + } } - fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command { - command - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .args(["-o", "ControlMaster=no", "-o"]) - .arg(format!("ControlPath={}", self.socket_path.display())) + pub fn send(&self, payload: T) -> Result<()> { + log::debug!("ssh send name:{}", T::NAME); + self.send_dynamic(payload.into_envelope(0, None, None)) } - fn ssh_args(&self) -> Vec { - vec![ - "-o".to_string(), - "ControlMaster=no".to_string(), - "-o".to_string(), - format!("ControlPath={}", self.socket_path.display()), - self.connection_options.ssh_url(), - ] + pub fn request_dynamic( + &self, + mut envelope: proto::Envelope, + type_name: &'static str, + ) -> impl 'static + Future> { + envelope.id = self.next_message_id.fetch_add(1, SeqCst); + let (tx, rx) = oneshot::channel(); + let mut response_channels_lock = self.response_channels.lock(); + response_channels_lock.insert(MessageId(envelope.id), tx); + drop(response_channels_lock); + let result = self.outgoing_tx.unbounded_send(envelope); + async move { + if let Err(error) = &result { + log::error!("failed to send message: {}", error); + return Err(anyhow!("failed to send message: {}", error)); + } + + let response = rx.await.context("connection lost")?.0; + if let Some(proto::envelope::Payload::Error(error)) = &response.payload { + return Err(RpcError::from_proto(error, type_name)); + } + Ok(response) + } + } + + pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { + envelope.id = self.next_message_id.fetch_add(1, SeqCst); + self.outgoing_tx.unbounded_send(envelope)?; + Ok(()) } } -async fn run_cmd(command: &mut process::Command) -> Result { - let output = command.output().await?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(anyhow!( - "failed to run command: {}", - String::from_utf8_lossy(&output.stderr) - )) +impl ProtoClient for ChannelClient { + fn request( + &self, + envelope: proto::Envelope, + request_type: &'static str, + ) -> BoxFuture<'static, Result> { + self.request_dynamic(envelope, request_type).boxed() + } + + fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> { + self.send_dynamic(envelope) + } + + fn send_response(&self, envelope: Envelope, _message_type: &'static str) -> anyhow::Result<()> { + self.send_dynamic(envelope) + } + + fn message_handler_set(&self) -> &Mutex { + &self.message_handlers + } + + fn is_via_collab(&self) -> bool { + false } } diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 4b13938d8ca2f..39540b04e04cb 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -10,7 +10,7 @@ use project::{ worktree_store::WorktreeStore, LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, }; -use remote::SshSession; +use remote::ssh_session::ChannelClient; use rpc::{ proto::{self, SSH_PEER_ID, SSH_PROJECT_ID}, AnyProtoClient, TypedEnvelope, @@ -41,7 +41,7 @@ impl HeadlessProject { project::Project::init_settings(cx); } - pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { + pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let node_runtime = NodeRuntime::unavailable(); diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 908a0a89b6273..73b8a91da1876 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -6,7 +6,6 @@ use gpui::Context as _; use remote::{ json_log::LogRecord, protocol::{read_message, write_message}, - SshSession, }; use remote_server::HeadlessProject; use smol::{io::AsyncWriteExt, stream::StreamExt as _, Async}; @@ -24,6 +23,8 @@ fn main() { #[cfg(not(windows))] fn main() { + use remote::ssh_session::ChannelClient; + env_logger::builder() .format(|buf, record| { serde_json::to_writer(&mut *buf, &LogRecord::new(record))?; @@ -55,7 +56,7 @@ fn main() { let mut stdin = Async::new(io::stdin()).unwrap(); let mut stdout = Async::new(io::stdout()).unwrap(); - let session = SshSession::server(incoming_rx, outgoing_tx, cx); + let session = ChannelClient::new(incoming_rx, outgoing_tx, cx); let project = cx.new_model(|cx| { HeadlessProject::new( session.clone(), diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 892063942754c..960b7c248c0e2 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -15,7 +15,7 @@ use project::{ search::{SearchQuery, SearchResult}, Project, ProjectPath, }; -use remote::SshSession; +use remote::SshRemoteClient; use serde_json::json; use settings::{Settings, SettingsLocation, SettingsStore}; use smol::stream::StreamExt; @@ -616,7 +616,7 @@ async fn init_test( cx: &mut TestAppContext, server_cx: &mut TestAppContext, ) -> (Model, Model, Arc) { - let (client_ssh, server_ssh) = SshSession::fake(cx, server_cx); + let (ssh_remote_client, ssh_server_client) = SshRemoteClient::fake(cx, server_cx); init_logger(); let fs = FakeFs::new(server_cx.executor()); @@ -642,8 +642,9 @@ async fn init_test( ); server_cx.update(HeadlessProject::init); - let headless = server_cx.new_model(|cx| HeadlessProject::new(server_ssh, fs.clone(), cx)); - let project = build_project(client_ssh, cx); + let headless = + server_cx.new_model(|cx| HeadlessProject::new(ssh_server_client, fs.clone(), cx)); + let project = build_project(ssh_remote_client, cx); project .update(cx, { @@ -654,7 +655,7 @@ async fn init_test( (project, headless, fs) } -fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model { +fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index cec913851f04d..b668a5802c3b1 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -61,7 +61,7 @@ use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, }; -use remote::{SshConnectionOptions, SshSession}; +use remote::{SshConnectionOptions, SshRemoteClient}; use serde::Deserialize; use session::AppSession; use settings::{InvalidSettingsError, Settings}; @@ -5514,7 +5514,7 @@ pub fn join_hosted_project( pub fn open_ssh_project( window: WindowHandle, connection_options: SshConnectionOptions, - session: Arc, + session: Arc, app_state: Arc, paths: Vec, cx: &mut AppContext, From 6336248c1a90d23f08678bf6d30fd84bc1638625 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Tue, 1 Oct 2024 18:58:40 +0800 Subject: [PATCH 163/228] windows: Revert "Fix `hide`, `activate` method on Windows to hide/show application" (#18571) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR reverts the changes introduced via #18164. As shown in the video below, once you `hide` the app, there is essentially no way to bring it back. I must emphasize that the window logic on Windows is entirely different from macOS. On macOS, when you `hide` an app, its icon always remains visible in the dock, and you can always bring the hidden app back by clicking that icon. However, on Windows, there is no such mechanism—the app is literally hidden. I think the `hide` feature should be macOS-only. https://github.com/user-attachments/assets/65c8a007-eedb-4444-9499-787b50f2d1e9 Release Notes: - N/A --- crates/gpui/src/platform/windows/platform.rs | 24 ++------------------ 1 file changed, 2 insertions(+), 22 deletions(-) diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 7f6677973b2fb..e90b2c6ef1bed 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -33,8 +33,6 @@ use crate::*; pub(crate) struct WindowsPlatform { state: RefCell, raw_window_handles: RwLock>, - // The window handles that are hided by `hide` method. - hidden_windows: RwLock>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, main_receiver: flume::Receiver, @@ -102,7 +100,6 @@ impl WindowsPlatform { Self { state, raw_window_handles, - hidden_windows: RwLock::new(SmallVec::new()), icon, main_receiver, dispatch_event, @@ -298,26 +295,9 @@ impl Platform for WindowsPlatform { } } - fn activate(&self, _ignoring_other_apps: bool) { - let mut state = self.hidden_windows.write(); - state.iter().for_each(|handle| unsafe { - ShowWindow(*handle, SW_SHOW).ok().log_err(); - }); - state.clear(); - } + fn activate(&self, _ignoring_other_apps: bool) {} - fn hide(&self) { - let mut state = self.hidden_windows.write(); - self.raw_window_handles - .read() - .iter() - .for_each(|handle| unsafe { - if IsWindowVisible(*handle).as_bool() { - state.push(*handle); - ShowWindow(*handle, SW_HIDE).ok().log_err(); - } - }); - } + fn hide(&self) {} // todo(windows) fn hide_other_apps(&self) { From 1be24f77396aaade46739218e70cb059491aaedf Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 1 Oct 2024 09:31:03 -0400 Subject: [PATCH 164/228] Rename proto language to Proto (#18559) All the other languages are capitalized. Proto should be too. --- crates/languages/src/proto/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/proto/config.toml b/crates/languages/src/proto/config.toml index b8bccfd39b6a3..6d25c23da5dfa 100644 --- a/crates/languages/src/proto/config.toml +++ b/crates/languages/src/proto/config.toml @@ -1,4 +1,4 @@ -name = "proto" +name = "Proto" grammar = "proto" path_suffixes = ["proto"] line_comments = ["// "] From 68d6177d370defa04be8f989240ab9e8a8e8c79f Mon Sep 17 00:00:00 2001 From: pantheraleo-7 <159872817+pantheraleo-7@users.noreply.github.com> Date: Tue, 1 Oct 2024 20:39:34 +0530 Subject: [PATCH 165/228] docs: Correct typo in `configuring-zed.md` (#18580) Release Notes: - N/A --- docs/src/configuring-zed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index ad6a628ed0880..1e531f7c744d1 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1736,7 +1736,7 @@ See Buffer Font Features ## Terminal: Detect Virtual Environments {#terminal-detect_venv} -- Description: Activate the [Python Virtual Environment](https://docs.python.org/3/library/venv.html), if one is found, in the terminal's working directory (as resolved by the working_directory and automatically activating the virtual environemtn +- Description: Activate the [Python Virtual Environment](https://docs.python.org/3/library/venv.html), if one is found, in the terminal's working directory (as resolved by the working_directory and automatically activating the virtual environment. - Setting: `detect_venv` - Default: From 051627c4493b5f5446cb3957576be89899b1d386 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 1 Oct 2024 18:32:16 +0300 Subject: [PATCH 166/228] Project panel horizontal scrollbar (#18513) image https://github.com/user-attachments/assets/734f1f52-70d9-4308-b1fc-36c7cfd4dd76 Closes https://github.com/zed-industries/zed/issues/7001 Closes https://github.com/zed-industries/zed/issues/4427 Part of https://github.com/zed-industries/zed/issues/15324 Part of https://github.com/zed-industries/zed/issues/14551 * Adjusts a `UniformList` to have a horizontal sizing behavior: the old mode forced all items to have the size of the list exactly. A new mode (with corresponding `ListItems` having `overflow_x` enabled) lays out the uniform list elements with width of its widest element, setting the same width to the list itself too. * Using the new behavior, adds a new scrollbar into the project panel and enhances its file name editor to scroll it during editing of long file names * Also restyles the scrollbar a bit, making it narrower and removing its background * Changes the project_panel.scrollbar.show settings to accept `null` and be `null` by default, to inherit `editor`'s scrollbar settings. All editor scrollbar settings are supported now. Release Notes: - Added a horizontal scrollbar to project panel ([#7001](https://github.com/zed-industries/zed/issues/7001)) ([#4427](https://github.com/zed-industries/zed/issues/4427)) --------- Co-authored-by: Piotr Osiewicz --- assets/settings/default.json | 14 +- crates/editor/src/editor.rs | 2 +- crates/gpui/src/elements/div.rs | 19 +- crates/gpui/src/elements/list.rs | 10 + crates/gpui/src/elements/uniform_list.rs | 96 ++++- crates/gpui/src/style.rs | 3 + crates/language/src/language_settings.rs | 2 +- crates/project_panel/src/project_panel.rs | 335 +++++++++++++++--- .../src/project_panel_settings.rs | 22 +- crates/project_panel/src/scrollbar.rs | 192 +++++++--- crates/ui/src/components/list/list_item.rs | 15 +- docs/src/configuring-zed.md | 6 +- 12 files changed, 567 insertions(+), 149 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index f6c498e0278e6..133ff9451d027 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -356,9 +356,19 @@ /// Scrollbar-related settings "scrollbar": { /// When to show the scrollbar in the project panel. + /// This setting can take four values: /// - /// Default: always - "show": "always" + /// 1. null (default): Inherit editor settings + /// 2. Show the scrollbar if there's important information or + /// follow the system's configured behavior (default): + /// "auto" + /// 3. Match the system's configured behavior: + /// "system" + /// 4. Always show the scrollbar: + /// "always" + /// 5. Never show the scrollbar: + /// "never" + "show": null } }, "outline_panel": { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d1ca70f705ce5..61a47d7f631fd 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -61,7 +61,7 @@ use debounced_delay::DebouncedDelay; use display_map::*; pub use display_map::{DisplayPoint, FoldPlaceholder}; pub use editor_settings::{ - CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, + CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar, }; pub use editor_settings_controls::*; use element::LineWithInvisibles; diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 4e92f7f82c4cb..284e574627631 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -2057,6 +2057,7 @@ impl Interactivity { fn paint_scroll_listener(&self, hitbox: &Hitbox, style: &Style, cx: &mut WindowContext) { if let Some(scroll_offset) = self.scroll_offset.clone() { let overflow = style.overflow; + let allow_concurrent_scroll = style.allow_concurrent_scroll; let line_height = cx.line_height(); let hitbox = hitbox.clone(); cx.on_mouse_event(move |event: &ScrollWheelEvent, phase, cx| { @@ -2065,27 +2066,31 @@ impl Interactivity { let old_scroll_offset = *scroll_offset; let delta = event.delta.pixel_delta(line_height); + let mut delta_x = Pixels::ZERO; if overflow.x == Overflow::Scroll { - let mut delta_x = Pixels::ZERO; if !delta.x.is_zero() { delta_x = delta.x; } else if overflow.y != Overflow::Scroll { delta_x = delta.y; } - - scroll_offset.x += delta_x; } - + let mut delta_y = Pixels::ZERO; if overflow.y == Overflow::Scroll { - let mut delta_y = Pixels::ZERO; if !delta.y.is_zero() { delta_y = delta.y; } else if overflow.x != Overflow::Scroll { delta_y = delta.x; } - - scroll_offset.y += delta_y; } + if !allow_concurrent_scroll && !delta_x.is_zero() && !delta_y.is_zero() { + if delta_x.abs() > delta_y.abs() { + delta_y = Pixels::ZERO; + } else { + delta_x = Pixels::ZERO; + } + } + scroll_offset.y += delta_y; + scroll_offset.x += delta_x; cx.stop_propagation(); if *scroll_offset != old_scroll_offset { diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 6ac6d2a9bf8d7..d77c91e6552d3 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -89,6 +89,16 @@ pub enum ListSizingBehavior { Auto, } +/// The horizontal sizing behavior to apply during layout. +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ListHorizontalSizingBehavior { + /// List items' width can never exceed the width of the list. + #[default] + FitList, + /// List items' width may go over the width of the list, if any item is wider. + Unconstrained, +} + struct LayoutItemsResponse { max_item_width: Pixels, scroll_top: ListOffset, diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 4dc2f5335dedd..54297d1214f3f 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -5,8 +5,8 @@ //! elements with uniform height. use crate::{ - point, px, size, AnyElement, AvailableSpace, Bounds, ContentMask, Element, ElementId, - GlobalElementId, Hitbox, InteractiveElement, Interactivity, IntoElement, LayoutId, + point, size, AnyElement, AvailableSpace, Bounds, ContentMask, Element, ElementId, + GlobalElementId, Hitbox, InteractiveElement, Interactivity, IntoElement, IsZero, LayoutId, ListSizingBehavior, Pixels, Render, ScrollHandle, Size, StyleRefinement, Styled, View, ViewContext, WindowContext, }; @@ -14,6 +14,8 @@ use smallvec::SmallVec; use std::{cell::RefCell, cmp, ops::Range, rc::Rc}; use taffy::style::Overflow; +use super::ListHorizontalSizingBehavior; + /// uniform_list provides lazy rendering for a set of items that are of uniform height. /// When rendered into a container with overflow-y: hidden and a fixed (or max) height, /// uniform_list will only render the visible subset of items. @@ -57,6 +59,7 @@ where }, scroll_handle: None, sizing_behavior: ListSizingBehavior::default(), + horizontal_sizing_behavior: ListHorizontalSizingBehavior::default(), } } @@ -69,11 +72,11 @@ pub struct UniformList { interactivity: Interactivity, scroll_handle: Option, sizing_behavior: ListSizingBehavior, + horizontal_sizing_behavior: ListHorizontalSizingBehavior, } /// Frame state used by the [UniformList]. pub struct UniformListFrameState { - item_size: Size, items: SmallVec<[AnyElement; 32]>, } @@ -87,7 +90,18 @@ pub struct UniformListScrollHandle(pub Rc>); pub struct UniformListScrollState { pub base_handle: ScrollHandle, pub deferred_scroll_to_item: Option, - pub last_item_height: Option, + /// Size of the item, captured during last layout. + pub last_item_size: Option, +} + +#[derive(Copy, Clone, Debug, Default)] +/// The size of the item and its contents. +pub struct ItemSize { + /// The size of the item. + pub item: Size, + /// The size of the item's contents, which may be larger than the item itself, + /// if the item was bounded by a parent element. + pub contents: Size, } impl UniformListScrollHandle { @@ -96,7 +110,7 @@ impl UniformListScrollHandle { Self(Rc::new(RefCell::new(UniformListScrollState { base_handle: ScrollHandle::new(), deferred_scroll_to_item: None, - last_item_height: None, + last_item_size: None, }))) } @@ -170,7 +184,6 @@ impl Element for UniformList { ( layout_id, UniformListFrameState { - item_size, items: SmallVec::new(), }, ) @@ -193,17 +206,30 @@ impl Element for UniformList { - point(border.right + padding.right, border.bottom + padding.bottom), ); + let can_scroll_horizontally = matches!( + self.horizontal_sizing_behavior, + ListHorizontalSizingBehavior::Unconstrained + ); + + let longest_item_size = self.measure_item(None, cx); + let content_width = if can_scroll_horizontally { + padded_bounds.size.width.max(longest_item_size.width) + } else { + padded_bounds.size.width + }; let content_size = Size { - width: padded_bounds.size.width, - height: frame_state.item_size.height * self.item_count + padding.top + padding.bottom, + width: content_width, + height: longest_item_size.height * self.item_count + padding.top + padding.bottom, }; let shared_scroll_offset = self.interactivity.scroll_offset.clone().unwrap(); - - let item_height = self.measure_item(Some(padded_bounds.size.width), cx).height; + let item_height = longest_item_size.height; let shared_scroll_to_item = self.scroll_handle.as_mut().and_then(|handle| { let mut handle = handle.0.borrow_mut(); - handle.last_item_height = Some(item_height); + handle.last_item_size = Some(ItemSize { + item: padded_bounds.size, + contents: content_size, + }); handle.deferred_scroll_to_item.take() }); @@ -228,12 +254,19 @@ impl Element for UniformList { if self.item_count > 0 { let content_height = item_height * self.item_count + padding.top + padding.bottom; - let min_scroll_offset = padded_bounds.size.height - content_height; - let is_scrolled = scroll_offset.y != px(0.); + let is_scrolled_vertically = !scroll_offset.y.is_zero(); + let min_vertical_scroll_offset = padded_bounds.size.height - content_height; + if is_scrolled_vertically && scroll_offset.y < min_vertical_scroll_offset { + shared_scroll_offset.borrow_mut().y = min_vertical_scroll_offset; + scroll_offset.y = min_vertical_scroll_offset; + } - if is_scrolled && scroll_offset.y < min_scroll_offset { - shared_scroll_offset.borrow_mut().y = min_scroll_offset; - scroll_offset.y = min_scroll_offset; + let content_width = content_size.width + padding.left + padding.right; + let is_scrolled_horizontally = + can_scroll_horizontally && !scroll_offset.x.is_zero(); + if is_scrolled_horizontally && content_width <= padded_bounds.size.width { + shared_scroll_offset.borrow_mut().x = Pixels::ZERO; + scroll_offset.x = Pixels::ZERO; } if let Some(ix) = shared_scroll_to_item { @@ -263,9 +296,17 @@ impl Element for UniformList { cx.with_content_mask(Some(content_mask), |cx| { for (mut item, ix) in items.into_iter().zip(visible_range) { let item_origin = padded_bounds.origin - + point(px(0.), item_height * ix + scroll_offset.y + padding.top); + + point( + scroll_offset.x + padding.left, + item_height * ix + scroll_offset.y + padding.top, + ); + let available_width = if can_scroll_horizontally { + padded_bounds.size.width + scroll_offset.x.abs() + } else { + padded_bounds.size.width + }; let available_space = size( - AvailableSpace::Definite(padded_bounds.size.width), + AvailableSpace::Definite(available_width), AvailableSpace::Definite(item_height), ); item.layout_as_root(available_space, cx); @@ -318,6 +359,25 @@ impl UniformList { self } + /// Sets the horizontal sizing behavior, controlling the way list items laid out horizontally. + /// With [`ListHorizontalSizingBehavior::Unconstrained`] behavior, every item and the list itself will + /// have the size of the widest item and lay out pushing the `end_slot` to the right end. + pub fn with_horizontal_sizing_behavior( + mut self, + behavior: ListHorizontalSizingBehavior, + ) -> Self { + self.horizontal_sizing_behavior = behavior; + match behavior { + ListHorizontalSizingBehavior::FitList => { + self.interactivity.base_style.overflow.x = None; + } + ListHorizontalSizingBehavior::Unconstrained => { + self.interactivity.base_style.overflow.x = Some(Overflow::Scroll); + } + } + self + } + fn measure_item(&self, list_width: Option, cx: &mut WindowContext) -> Size { if self.item_count == 0 { return Size::default(); diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index c3148fcfa8b4b..455a2e162d563 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -156,6 +156,8 @@ pub struct Style { pub overflow: Point, /// How much space (in points) should be reserved for the scrollbars of `Overflow::Scroll` and `Overflow::Auto` nodes. pub scrollbar_width: f32, + /// Whether both x and y axis should be scrollable at the same time. + pub allow_concurrent_scroll: bool, // Position properties /// What should the `position` value of this struct use as a base offset? @@ -667,6 +669,7 @@ impl Default for Style { x: Overflow::Visible, y: Overflow::Visible, }, + allow_concurrent_scroll: false, scrollbar_width: 0.0, position: Position::Relative, inset: Edges::auto(), diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index d610ab09865ce..de37e52290bf4 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -381,7 +381,7 @@ pub struct FeaturesContent { pub enum SoftWrap { /// Prefer a single line generally, unless an overly long line is encountered. None, - /// Deprecated: use None instead. Left to avoid breakin existing users' configs. + /// Deprecated: use None instead. Left to avoid breaking existing users' configs. /// Prefer a single line generally, unless an overly long line is encountered. PreferLine, /// Soft wrap lines that exceed the editor width. diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 6958bfb3318e2..53b274ee6fa96 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -8,20 +8,22 @@ use db::kvp::KEY_VALUE_STORE; use editor::{ items::entry_git_aware_label_color, scroll::{Autoscroll, ScrollbarAutoHide}, - Editor, + Editor, EditorEvent, EditorSettings, ShowScrollbar, }; use file_icons::FileIcons; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context as _, Result}; use collections::{hash_map, BTreeSet, HashMap}; +use core::f32; use git::repository::GitFileStatus; use gpui::{ actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, DragMoveEvent, - EventEmitter, ExternalPaths, FocusHandle, FocusableView, InteractiveElement, KeyContext, - ListSizingBehavior, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, - PromptLevel, Render, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, - ViewContext, VisualContext as _, WeakView, WindowContext, + Entity, EventEmitter, ExternalPaths, FocusHandle, FocusableView, InteractiveElement, + KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, Model, MouseButton, + MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render, Stateful, Styled, + Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext as _, WeakView, + WindowContext, }; use indexmap::IndexMap; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev}; @@ -29,7 +31,7 @@ use project::{ relativize_path, Entry, EntryKind, Fs, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId, }; -use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings, ShowScrollbar}; +use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings}; use serde::{Deserialize, Serialize}; use std::{ cell::{Cell, OnceCell}, @@ -80,8 +82,10 @@ pub struct ProjectPanel { width: Option, pending_serialization: Task>, show_scrollbar: bool, - scrollbar_drag_thumb_offset: Rc>>, + vertical_scrollbar_drag_thumb_offset: Rc>>, + horizontal_scrollbar_drag_thumb_offset: Rc>>, hide_scrollbar_task: Option>, + max_width_item_index: Option, } #[derive(Clone, Debug)] @@ -90,6 +94,8 @@ struct EditState { entry_id: ProjectEntryId, is_new_entry: bool, is_dir: bool, + is_symlink: bool, + depth: usize, processing_filename: Option, } @@ -254,23 +260,26 @@ impl ProjectPanel { let filename_editor = cx.new_view(Editor::single_line); - cx.subscribe(&filename_editor, |this, _, event, cx| match event { - editor::EditorEvent::BufferEdited - | editor::EditorEvent::SelectionsChanged { .. } => { - this.autoscroll(cx); - } - editor::EditorEvent::Blurred => { - if this - .edit_state - .as_ref() - .map_or(false, |state| state.processing_filename.is_none()) - { - this.edit_state = None; - this.update_visible_entries(None, cx); + cx.subscribe( + &filename_editor, + |project_panel, _, editor_event, cx| match editor_event { + EditorEvent::BufferEdited | EditorEvent::SelectionsChanged { .. } => { + project_panel.autoscroll(cx); } - } - _ => {} - }) + EditorEvent::Blurred => { + if project_panel + .edit_state + .as_ref() + .map_or(false, |state| state.processing_filename.is_none()) + { + project_panel.edit_state = None; + project_panel.update_visible_entries(None, cx); + cx.notify(); + } + } + _ => {} + }, + ) .detach(); cx.observe_global::(|_, cx| { @@ -311,7 +320,9 @@ impl ProjectPanel { pending_serialization: Task::ready(None), show_scrollbar: !Self::should_autohide_scrollbar(cx), hide_scrollbar_task: None, - scrollbar_drag_thumb_offset: Default::default(), + vertical_scrollbar_drag_thumb_offset: Default::default(), + horizontal_scrollbar_drag_thumb_offset: Default::default(), + max_width_item_index: None, }; this.update_visible_entries(None, cx); @@ -827,7 +838,7 @@ impl ProjectPanel { Some(cx.spawn(|project_panel, mut cx| async move { let new_entry = edit_task.await; project_panel.update(&mut cx, |project_panel, cx| { - project_panel.edit_state.take(); + project_panel.edit_state = None; cx.notify(); })?; @@ -970,6 +981,8 @@ impl ProjectPanel { is_new_entry: true, is_dir, processing_filename: None, + is_symlink: false, + depth: 0, }); self.filename_editor.update(cx, |editor, cx| { editor.clear(cx); @@ -992,6 +1005,7 @@ impl ProjectPanel { leaf_entry_id } } + fn rename(&mut self, _: &Rename, cx: &mut ViewContext) { if let Some(SelectedEntry { worktree_id, @@ -1007,6 +1021,8 @@ impl ProjectPanel { is_new_entry: false, is_dir: entry.is_dir(), processing_filename: None, + is_symlink: entry.is_symlink, + depth: 0, }); let file_name = entry .path @@ -1750,6 +1766,7 @@ impl ProjectPanel { let old_ancestors = std::mem::take(&mut self.ancestors); self.visible_entries.clear(); + let mut max_width_item = None; for worktree in project.visible_worktrees(cx) { let snapshot = worktree.read(cx).snapshot(); let worktree_id = snapshot.id(); @@ -1805,6 +1822,12 @@ impl ProjectPanel { .get(&entry.id) .map(|ancestor| ancestor.current_ancestor_depth) .unwrap_or_default(); + if let Some(edit_state) = &mut self.edit_state { + if edit_state.entry_id == entry.id { + edit_state.is_symlink = entry.is_symlink; + edit_state.depth = depth; + } + } let mut ancestors = std::mem::take(&mut auto_folded_ancestors); if ancestors.len() > 1 { ancestors.reverse(); @@ -1837,6 +1860,78 @@ impl ProjectPanel { is_fifo: entry.is_fifo, }); } + let worktree_abs_path = worktree.read(cx).abs_path(); + let (depth, path) = if Some(entry) == worktree.read(cx).root_entry() { + let Some(path_name) = worktree_abs_path + .file_name() + .with_context(|| { + format!("Worktree abs path has no file name, root entry: {entry:?}") + }) + .log_err() + else { + continue; + }; + let path = Arc::from(Path::new(path_name)); + let depth = 0; + (depth, path) + } else if entry.is_file() { + let Some(path_name) = entry + .path + .file_name() + .with_context(|| format!("Non-root entry has no file name: {entry:?}")) + .log_err() + else { + continue; + }; + let path = Arc::from(Path::new(path_name)); + let depth = entry.path.ancestors().count() - 1; + (depth, path) + } else { + let path = self + .ancestors + .get(&entry.id) + .and_then(|ancestors| { + let outermost_ancestor = ancestors.ancestors.last()?; + let root_folded_entry = worktree + .read(cx) + .entry_for_id(*outermost_ancestor)? + .path + .as_ref(); + entry + .path + .strip_prefix(root_folded_entry) + .ok() + .and_then(|suffix| { + let full_path = Path::new(root_folded_entry.file_name()?); + Some(Arc::::from(full_path.join(suffix))) + }) + }) + .unwrap_or_else(|| entry.path.clone()); + let depth = path + .strip_prefix(worktree_abs_path) + .map(|suffix| suffix.components().count()) + .unwrap_or_default(); + (depth, path) + }; + let width_estimate = item_width_estimate( + depth, + path.to_string_lossy().chars().count(), + entry.is_symlink, + ); + + match max_width_item.as_mut() { + Some((id, worktree_id, width)) => { + if *width < width_estimate { + *id = entry.id; + *worktree_id = worktree.read(cx).id(); + *width = width_estimate; + } + } + None => { + max_width_item = Some((entry.id, worktree.read(cx).id(), width_estimate)) + } + } + if expanded_dir_ids.binary_search(&entry.id).is_err() && entry_iter.advance_to_sibling() { @@ -1851,6 +1946,22 @@ impl ProjectPanel { .push((worktree_id, visible_worktree_entries, OnceCell::new())); } + if let Some((project_entry_id, worktree_id, _)) = max_width_item { + let mut visited_worktrees_length = 0; + let index = self.visible_entries.iter().find_map(|(id, entries, _)| { + if worktree_id == *id { + entries + .iter() + .position(|entry| entry.id == project_entry_id) + } else { + visited_worktrees_length += entries.len(); + None + } + }); + if let Some(index) = index { + self.max_width_item_index = Some(visited_worktrees_length + index); + } + } if let Some((worktree_id, entry_id)) = new_selected_entry { self.selection = Some(SelectedEntry { worktree_id, @@ -2474,7 +2585,8 @@ impl ProjectPanel { cx.stop_propagation(); this.deploy_context_menu(event.position, entry_id, cx); }, - )), + )) + .overflow_x(), ) .border_1() .border_r_2() @@ -2498,22 +2610,19 @@ impl ProjectPanel { ) } - fn render_scrollbar( - &self, - items_count: usize, - cx: &mut ViewContext, - ) -> Option> { - let settings = ProjectPanelSettings::get_global(cx); - if settings.scrollbar.show == ShowScrollbar::Never { + fn render_vertical_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) { return None; } let scroll_handle = self.scroll_handle.0.borrow(); - - let height = scroll_handle - .last_item_height - .filter(|_| self.show_scrollbar || self.scrollbar_drag_thumb_offset.get().is_some())?; - - let total_list_length = height.0 as f64 * items_count as f64; + let total_list_length = scroll_handle + .last_item_size + .filter(|_| { + self.show_scrollbar || self.vertical_scrollbar_drag_thumb_offset.get().is_some() + })? + .contents + .height + .0 as f64; let current_offset = scroll_handle.base_handle.offset().y.0.min(0.).abs() as f64; let mut percentage = current_offset / total_list_length; let end_offset = (current_offset + scroll_handle.base_handle.bounds().size.height.0 as f64) @@ -2536,7 +2645,7 @@ impl ProjectPanel { Some( div() .occlude() - .id("project-panel-scroll") + .id("project-panel-vertical-scroll") .on_mouse_move(cx.listener(|_, _, cx| { cx.notify(); cx.stop_propagation() @@ -2550,7 +2659,7 @@ impl ProjectPanel { .on_mouse_up( MouseButton::Left, cx.listener(|this, _, cx| { - if this.scrollbar_drag_thumb_offset.get().is_none() + if this.vertical_scrollbar_drag_thumb_offset.get().is_none() && !this.focus_handle.contains_focused(cx) { this.hide_scrollbar(cx); @@ -2565,21 +2674,101 @@ impl ProjectPanel { })) .h_full() .absolute() - .right_0() - .top_0() - .bottom_0() + .right_1() + .top_1() + .bottom_1() .w(px(12.)) .cursor_default() - .child(ProjectPanelScrollbar::new( + .child(ProjectPanelScrollbar::vertical( percentage as f32..end_offset as f32, self.scroll_handle.clone(), - self.scrollbar_drag_thumb_offset.clone(), - cx.view().clone().into(), - items_count, + self.vertical_scrollbar_drag_thumb_offset.clone(), + cx.view().entity_id(), )), ) } + fn render_horizontal_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) { + return None; + } + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|_| { + self.show_scrollbar || self.horizontal_scrollbar_drag_thumb_offset.get().is_some() + }) + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + let current_offset = scroll_handle.base_handle.offset().x.0.min(0.).abs() as f64; + let mut percentage = current_offset / longest_item_width; + let end_offset = (current_offset + scroll_handle.base_handle.bounds().size.width.0 as f64) + / longest_item_width; + // Uniform scroll handle might briefly report an offset greater than the length of a list; + // in such case we'll adjust the starting offset as well to keep the scrollbar thumb length stable. + let overshoot = (end_offset - 1.).clamp(0., 1.); + if overshoot > 0. { + percentage -= overshoot; + } + const MINIMUM_SCROLLBAR_PERCENTAGE_WIDTH: f64 = 0.005; + if percentage + MINIMUM_SCROLLBAR_PERCENTAGE_WIDTH > 1.0 || end_offset > longest_item_width + { + return None; + } + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + let end_offset = end_offset.clamp(percentage + MINIMUM_SCROLLBAR_PERCENTAGE_WIDTH, 1.); + Some( + div() + .occlude() + .id("project-panel-horizontal-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|this, _, cx| { + if this.horizontal_scrollbar_drag_thumb_offset.get().is_none() + && !this.focus_handle.contains_focused(cx) + { + this.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .w_full() + .absolute() + .right_1() + .left_1() + .bottom_1() + .h(px(12.)) + .cursor_default() + .when(self.width.is_some(), |this| { + this.child(ProjectPanelScrollbar::horizontal( + percentage as f32..end_offset as f32, + self.scroll_handle.clone(), + self.horizontal_scrollbar_drag_thumb_offset.clone(), + cx.view().entity_id(), + )) + }), + ) + } + fn dispatch_context(&self, cx: &ViewContext) -> KeyContext { let mut dispatch_context = KeyContext::new_with_defaults(); dispatch_context.add("ProjectPanel"); @@ -2595,9 +2784,32 @@ impl ProjectPanel { dispatch_context } + fn should_show_scrollbar(cx: &AppContext) -> bool { + let show = ProjectPanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => true, + ShowScrollbar::Always => true, + ShowScrollbar::Never => false, + } + } + fn should_autohide_scrollbar(cx: &AppContext) -> bool { - cx.try_global::() - .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0) + let show = ProjectPanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => cx + .try_global::() + .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0), + ShowScrollbar::Always => false, + ShowScrollbar::Never => true, + } } fn hide_scrollbar(&mut self, cx: &mut ViewContext) { @@ -2623,7 +2835,7 @@ impl ProjectPanel { project: Model, entry_id: ProjectEntryId, skip_ignored: bool, - cx: &mut ViewContext<'_, ProjectPanel>, + cx: &mut ViewContext<'_, Self>, ) { if let Some(worktree) = project.read(cx).worktree_for_entry(entry_id, cx) { let worktree = worktree.read(cx); @@ -2645,13 +2857,22 @@ impl ProjectPanel { } } +fn item_width_estimate(depth: usize, item_text_chars: usize, is_symlink: bool) -> usize { + const ICON_SIZE_FACTOR: usize = 2; + let mut item_width = depth * ICON_SIZE_FACTOR + item_text_chars; + if is_symlink { + item_width += ICON_SIZE_FACTOR; + } + item_width +} + impl Render for ProjectPanel { fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { let has_worktree = !self.visible_entries.is_empty(); let project = self.project.read(cx); if has_worktree { - let items_count = self + let item_count = self .visible_entries .iter() .map(|(_, worktree_entries, _)| worktree_entries.len()) @@ -2742,7 +2963,7 @@ impl Render for ProjectPanel { ) .track_focus(&self.focus_handle) .child( - uniform_list(cx.view().clone(), "entries", items_count, { + uniform_list(cx.view().clone(), "entries", item_count, { |this, range, cx| { let mut items = Vec::with_capacity(range.end - range.start); this.for_each_visible_entry(range, cx, |id, details, cx| { @@ -2753,9 +2974,12 @@ impl Render for ProjectPanel { }) .size_full() .with_sizing_behavior(ListSizingBehavior::Infer) + .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) + .with_width_from_item(self.max_width_item_index) .track_scroll(self.scroll_handle.clone()), ) - .children(self.render_scrollbar(items_count, cx)) + .children(self.render_vertical_scrollbar(cx)) + .children(self.render_horizontal_scrollbar(cx)) .children(self.context_menu.as_ref().map(|(menu, position, _)| { deferred( anchored() @@ -2934,6 +3158,7 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::path::{Path, PathBuf}; + use ui::Context; use workspace::{ item::{Item, ProjectItem}, register_project_item, AppState, diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 4d73ae92456da..0114b3968d4db 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -1,3 +1,4 @@ +use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; @@ -24,33 +25,20 @@ pub struct ProjectPanelSettings { pub scrollbar: ScrollbarSettings, } -/// When to show the scrollbar in the project panel. -/// -/// Default: always -#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -pub enum ShowScrollbar { - #[default] - /// Always show the scrollbar. - Always, - /// Never show the scrollbar. - Never, -} - #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. /// - /// Default: always - pub show: ShowScrollbar, + /// Default: inherits editor scrollbar settings + pub show: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettingsContent { /// When to show the scrollbar in the project panel. /// - /// Default: always - pub show: Option, + /// Default: inherits editor scrollbar settings + pub show: Option>, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] diff --git a/crates/project_panel/src/scrollbar.rs b/crates/project_panel/src/scrollbar.rs index 0da9db7cb794f..cb7b15386c207 100644 --- a/crates/project_panel/src/scrollbar.rs +++ b/crates/project_panel/src/scrollbar.rs @@ -1,34 +1,54 @@ use std::{cell::Cell, ops::Range, rc::Rc}; use gpui::{ - point, AnyView, Bounds, ContentMask, Hitbox, MouseDownEvent, MouseMoveEvent, MouseUpEvent, - ScrollWheelEvent, Style, UniformListScrollHandle, + point, quad, Bounds, ContentMask, Corners, Edges, EntityId, Hitbox, Hsla, MouseDownEvent, + MouseMoveEvent, MouseUpEvent, ScrollWheelEvent, Style, UniformListScrollHandle, }; use ui::{prelude::*, px, relative, IntoElement}; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum ScrollbarKind { + Horizontal, + Vertical, +} + pub(crate) struct ProjectPanelScrollbar { thumb: Range, scroll: UniformListScrollHandle, // If Some(), there's an active drag, offset by percentage from the top of thumb. scrollbar_drag_state: Rc>>, - item_count: usize, - view: AnyView, + kind: ScrollbarKind, + parent_id: EntityId, } impl ProjectPanelScrollbar { - pub(crate) fn new( + pub(crate) fn vertical( thumb: Range, scroll: UniformListScrollHandle, scrollbar_drag_state: Rc>>, - view: AnyView, - item_count: usize, + parent_id: EntityId, ) -> Self { Self { thumb, scroll, scrollbar_drag_state, - item_count, - view, + kind: ScrollbarKind::Vertical, + parent_id, + } + } + + pub(crate) fn horizontal( + thumb: Range, + scroll: UniformListScrollHandle, + scrollbar_drag_state: Rc>>, + parent_id: EntityId, + ) -> Self { + Self { + thumb, + scroll, + scrollbar_drag_state, + kind: ScrollbarKind::Horizontal, + parent_id, } } } @@ -50,8 +70,14 @@ impl gpui::Element for ProjectPanelScrollbar { let mut style = Style::default(); style.flex_grow = 1.; style.flex_shrink = 1.; - style.size.width = px(12.).into(); - style.size.height = relative(1.).into(); + if self.kind == ScrollbarKind::Vertical { + style.size.width = px(12.).into(); + style.size.height = relative(1.).into(); + } else { + style.size.width = relative(1.).into(); + style.size.height = px(12.).into(); + } + (cx.request_layout(style, None), ()) } @@ -77,25 +103,65 @@ impl gpui::Element for ProjectPanelScrollbar { ) { cx.with_content_mask(Some(ContentMask { bounds }), |cx| { let colors = cx.theme().colors(); - let scrollbar_background = colors.scrollbar_track_background; let thumb_background = colors.scrollbar_thumb_background; - cx.paint_quad(gpui::fill(bounds, scrollbar_background)); - - let thumb_offset = self.thumb.start * bounds.size.height; - let thumb_end = self.thumb.end * bounds.size.height; + let is_vertical = self.kind == ScrollbarKind::Vertical; + let extra_padding = px(5.0); + let padded_bounds = if is_vertical { + Bounds::from_corners( + bounds.origin + point(Pixels::ZERO, extra_padding), + bounds.lower_right() - point(Pixels::ZERO, extra_padding * 3), + ) + } else { + Bounds::from_corners( + bounds.origin + point(extra_padding, Pixels::ZERO), + bounds.lower_right() - point(extra_padding * 3, Pixels::ZERO), + ) + }; - let thumb_percentage_size = self.thumb.end - self.thumb.start; - let thumb_bounds = { - let thumb_upper_left = point(bounds.origin.x, bounds.origin.y + thumb_offset); + let mut thumb_bounds = if is_vertical { + let thumb_offset = self.thumb.start * padded_bounds.size.height; + let thumb_end = self.thumb.end * padded_bounds.size.height; + let thumb_upper_left = point( + padded_bounds.origin.x, + padded_bounds.origin.y + thumb_offset, + ); let thumb_lower_right = point( - bounds.origin.x + bounds.size.width, - bounds.origin.y + thumb_end, + padded_bounds.origin.x + padded_bounds.size.width, + padded_bounds.origin.y + thumb_end, ); Bounds::from_corners(thumb_upper_left, thumb_lower_right) + } else { + let thumb_offset = self.thumb.start * padded_bounds.size.width; + let thumb_end = self.thumb.end * padded_bounds.size.width; + let thumb_upper_left = point( + padded_bounds.origin.x + thumb_offset, + padded_bounds.origin.y, + ); + let thumb_lower_right = point( + padded_bounds.origin.x + thumb_end, + padded_bounds.origin.y + padded_bounds.size.height, + ); + Bounds::from_corners(thumb_upper_left, thumb_lower_right) + }; + let corners = if is_vertical { + thumb_bounds.size.width /= 1.5; + Corners::all(thumb_bounds.size.width / 2.0) + } else { + thumb_bounds.size.height /= 1.5; + Corners::all(thumb_bounds.size.height / 2.0) }; - cx.paint_quad(gpui::fill(thumb_bounds, thumb_background)); + cx.paint_quad(quad( + thumb_bounds, + corners, + thumb_background, + Edges::default(), + Hsla::transparent_black(), + )); + let scroll = self.scroll.clone(); - let item_count = self.item_count; + let kind = self.kind; + let thumb_percentage_size = self.thumb.end - self.thumb.start; + cx.on_mouse_event({ let scroll = self.scroll.clone(); let is_dragging = self.scrollbar_drag_state.clone(); @@ -103,20 +169,37 @@ impl gpui::Element for ProjectPanelScrollbar { if phase.bubble() && bounds.contains(&event.position) { if !thumb_bounds.contains(&event.position) { let scroll = scroll.0.borrow(); - if let Some(last_height) = scroll.last_item_height { - let max_offset = item_count as f32 * last_height; - let percentage = - (event.position.y - bounds.origin.y) / bounds.size.height; - - let percentage = percentage.min(1. - thumb_percentage_size); - scroll - .base_handle - .set_offset(point(px(0.), -max_offset * percentage)); + if let Some(item_size) = scroll.last_item_size { + match kind { + ScrollbarKind::Horizontal => { + let percentage = (event.position.x - bounds.origin.x) + / bounds.size.width; + let max_offset = item_size.contents.width; + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + -max_offset * percentage, + scroll.base_handle.offset().y, + )); + } + ScrollbarKind::Vertical => { + let percentage = (event.position.y - bounds.origin.y) + / bounds.size.height; + let max_offset = item_size.contents.height; + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + scroll.base_handle.offset().x, + -max_offset * percentage, + )); + } + } } } else { - let thumb_top_offset = - (event.position.y - thumb_bounds.origin.y) / bounds.size.height; - is_dragging.set(Some(thumb_top_offset)); + let thumb_offset = if is_vertical { + (event.position.y - thumb_bounds.origin.y) / bounds.size.height + } else { + (event.position.x - thumb_bounds.origin.x) / bounds.size.width + }; + is_dragging.set(Some(thumb_offset)); } } } @@ -127,6 +210,7 @@ impl gpui::Element for ProjectPanelScrollbar { if phase.bubble() && bounds.contains(&event.position) { let scroll = scroll.0.borrow_mut(); let current_offset = scroll.base_handle.offset(); + scroll .base_handle .set_offset(current_offset + event.delta.pixel_delta(cx.line_height())); @@ -134,19 +218,39 @@ impl gpui::Element for ProjectPanelScrollbar { } }); let drag_state = self.scrollbar_drag_state.clone(); - let view_id = self.view.entity_id(); + let view_id = self.parent_id; + let kind = self.kind; cx.on_mouse_event(move |event: &MouseMoveEvent, _, cx| { if let Some(drag_state) = drag_state.get().filter(|_| event.dragging()) { let scroll = scroll.0.borrow(); - if let Some(last_height) = scroll.last_item_height { - let max_offset = item_count as f32 * last_height; - let percentage = - (event.position.y - bounds.origin.y) / bounds.size.height - drag_state; + if let Some(item_size) = scroll.last_item_size { + match kind { + ScrollbarKind::Horizontal => { + let max_offset = item_size.contents.width; + let percentage = (event.position.x - bounds.origin.x) + / bounds.size.width + - drag_state; + + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + -max_offset * percentage, + scroll.base_handle.offset().y, + )); + } + ScrollbarKind::Vertical => { + let max_offset = item_size.contents.height; + let percentage = (event.position.y - bounds.origin.y) + / bounds.size.height + - drag_state; + + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + scroll.base_handle.offset().x, + -max_offset * percentage, + )); + } + }; - let percentage = percentage.min(1. - thumb_percentage_size); - scroll - .base_handle - .set_offset(point(px(0.), -max_offset * percentage)); cx.notify(view_id); } } else { diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index e1c90894fdd3d..e13fb8ef265eb 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -36,6 +36,7 @@ pub struct ListItem { on_secondary_mouse_down: Option>, children: SmallVec<[AnyElement; 2]>, selectable: bool, + overflow_x: bool, } impl ListItem { @@ -58,6 +59,7 @@ impl ListItem { tooltip: None, children: SmallVec::new(), selectable: true, + overflow_x: false, } } @@ -131,6 +133,11 @@ impl ListItem { self.end_hover_slot = end_hover_slot.into().map(IntoElement::into_any_element); self } + + pub fn overflow_x(mut self) -> Self { + self.overflow_x = true; + self + } } impl Disableable for ListItem { @@ -239,7 +246,13 @@ impl RenderOnce for ListItem { .flex_shrink_0() .flex_basis(relative(0.25)) .gap(Spacing::Small.rems(cx)) - .overflow_hidden() + .map(|list_content| { + if self.overflow_x { + list_content + } else { + list_content.overflow_hidden() + } + }) .children(self.start_slot) .children(self.children), ) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 1e531f7c744d1..fbd5fa53cfbd8 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1954,7 +1954,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "auto_reveal_entries": true, "auto_fold_dirs": true, "scrollbar": { - "show": "always" + "show": null } } } @@ -2074,13 +2074,13 @@ Run the `theme selector: toggle` action in the command palette to see a current ### Scrollbar -- Description: Scrollbar related settings. Possible values: "always", "never". +- Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. - Setting: `scrollbar` - Default: ```json "scrollbar": { - "show": "always" + "show": null } ``` From 280b8a89ea132f225b249ce743839c0cab96b05b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 1 Oct 2024 12:40:18 -0400 Subject: [PATCH 167/228] editor: Allow opening excerpts from proposed changes editor (#18591) This PR adds the ability to open excerpts in the base buffer from the proposed changes editor. Release Notes: - N/A --- crates/editor/src/proposed_changes_editor.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index ec0c05d88382c..a791e87e62053 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -11,7 +11,7 @@ use text::ToOffset; use ui::prelude::*; use workspace::{ searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, - ToolbarItemView, + ToolbarItemView, Workspace, }; pub struct ProposedChangesEditor { @@ -159,6 +159,11 @@ impl Item for ProposedChangesEditor { None } } + + fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + } } impl ProposedChangesEditorToolbar { From eb962b7bfc15dd77c702cfcbf7dc32585e89749a Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 1 Oct 2024 13:05:50 -0400 Subject: [PATCH 168/228] editor: Include proposed changes editor in navigation history (#18593) This PR makes it so the proposed changes editor works with the workspace navigation history. This allows for easily navigating back to the proposed changes editor after opening one of the excerpts into the base buffer. Release Notes: - N/A --- crates/editor/src/proposed_changes_editor.rs | 22 +++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index a791e87e62053..0666346e48776 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -161,8 +161,28 @@ impl Item for ProposedChangesEditor { } fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor.update(cx, |editor, cx| { + Item::added_to_workspace(editor, workspace, cx) + }); + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor.update(cx, Item::deactivated); + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { self.editor - .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + .update(cx, |editor, cx| Item::navigate(editor, data, cx)) + } + + fn set_nav_history( + &mut self, + nav_history: workspace::ItemNavHistory, + cx: &mut ViewContext, + ) { + self.editor.update(cx, |editor, cx| { + Item::set_nav_history(editor, nav_history, cx) + }); } } From d14e36b3231071d2da7492bc89e12b6f964fee3a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Oct 2024 11:07:52 -0600 Subject: [PATCH 169/228] Add an apply button to hunks in proposed changes editor (#18592) Release Notes: - N/A --------- Co-authored-by: Antonio Co-authored-by: Nathan --- crates/editor/src/actions.rs | 1 + crates/editor/src/editor.rs | 14 + crates/editor/src/element.rs | 1 + crates/editor/src/hunk_diff.rs | 272 +++++++++++-------- crates/editor/src/proposed_changes_editor.rs | 52 ++-- crates/language/src/buffer.rs | 55 ++-- crates/language/src/buffer_tests.rs | 16 +- 7 files changed, 248 insertions(+), 163 deletions(-) diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index b5935782580ba..502b70361b4f8 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -193,6 +193,7 @@ gpui::actions!( AcceptPartialInlineCompletion, AddSelectionAbove, AddSelectionBelow, + ApplyDiffHunk, Backspace, Cancel, CancelLanguageServerWork, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 61a47d7f631fd..b43433e3f41e6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6205,6 +6205,20 @@ impl Editor { } } + fn apply_selected_diff_hunks(&mut self, _: &ApplyDiffHunk, cx: &mut ViewContext) { + let snapshot = self.buffer.read(cx).snapshot(cx); + let hunks = hunks_for_selections(&snapshot, &self.selections.disjoint_anchors()); + self.transact(cx, |editor, cx| { + for hunk in hunks { + if let Some(buffer) = editor.buffer.read(cx).buffer(hunk.buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.merge_into_base(Some(hunk.buffer_range.to_offset(buffer)), cx); + }); + } + } + }); + } + pub fn open_active_item_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { let project_path = buffer.read(cx).project_path(cx)?; diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 98a5ff7f4dff5..8a0735354720d 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -436,6 +436,7 @@ impl EditorElement { register_action(view, cx, Editor::accept_inline_completion); register_action(view, cx, Editor::revert_file); register_action(view, cx, Editor::revert_selected_hunks); + register_action(view, cx, Editor::apply_selected_diff_hunks); register_action(view, cx, Editor::open_active_item_in_terminal) } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 2ccd60c668de7..c8caa30b59c49 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -14,9 +14,9 @@ use ui::{ use util::RangeExt; use crate::{ - editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, - BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, - Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk, + BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, + DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; @@ -238,19 +238,14 @@ impl Editor { cx: &mut ViewContext<'_, Editor>, ) -> Option<()> { let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let multi_buffer_row_range = hunk - .multi_buffer_range - .start - .to_point(&multi_buffer_snapshot) - ..hunk.multi_buffer_range.end.to_point(&multi_buffer_snapshot); - let hunk_start = hunk.multi_buffer_range.start; - let hunk_end = hunk.multi_buffer_range.end; + let hunk_range = hunk.multi_buffer_range.clone(); + let hunk_point_range = hunk_range.to_point(&multi_buffer_snapshot); let buffer = self.buffer().clone(); let snapshot = self.snapshot(cx); let (diff_base_buffer, deleted_text_lines) = buffer.update(cx, |buffer, cx| { - let hunk = buffer_diff_hunk(&snapshot.buffer_snapshot, multi_buffer_row_range.clone())?; - let mut buffer_ranges = buffer.range_to_buffer_ranges(multi_buffer_row_range, cx); + let hunk = buffer_diff_hunk(&snapshot.buffer_snapshot, hunk_point_range.clone())?; + let mut buffer_ranges = buffer.range_to_buffer_ranges(hunk_point_range, cx); if buffer_ranges.len() == 1 { let (buffer, _, _) = buffer_ranges.pop()?; let diff_base_buffer = diff_base_buffer @@ -275,7 +270,7 @@ impl Editor { probe .hunk_range .start - .cmp(&hunk_start, &multi_buffer_snapshot) + .cmp(&hunk_range.start, &multi_buffer_snapshot) }) { Ok(_already_present) => return None, Err(ix) => ix, @@ -295,7 +290,7 @@ impl Editor { } DiffHunkStatus::Added => { self.highlight_rows::( - hunk_start..hunk_end, + hunk_range.clone(), added_hunk_color(cx), false, cx, @@ -304,7 +299,7 @@ impl Editor { } DiffHunkStatus::Modified => { self.highlight_rows::( - hunk_start..hunk_end, + hunk_range.clone(), added_hunk_color(cx), false, cx, @@ -323,7 +318,7 @@ impl Editor { block_insert_index, ExpandedHunk { blocks, - hunk_range: hunk_start..hunk_end, + hunk_range, status: hunk.status, folded: false, diff_base_byte_range: hunk.diff_base_byte_range.clone(), @@ -333,11 +328,47 @@ impl Editor { Some(()) } + fn apply_changes_in_range( + &mut self, + range: Range, + cx: &mut ViewContext<'_, Editor>, + ) -> Option<()> { + let (buffer, range, _) = self + .buffer + .read(cx) + .range_to_buffer_ranges(range, cx) + .into_iter() + .next()?; + + buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(Some(range), cx); + }); + + None + } + + pub(crate) fn apply_all_changes(&self, cx: &mut ViewContext) { + let buffers = self.buffer.read(cx).all_buffers(); + for branch_buffer in buffers { + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(None, cx); + }); + } + } + fn hunk_header_block( &self, hunk: &HoveredHunk, cx: &mut ViewContext<'_, Editor>, ) -> BlockProperties { + let is_branch_buffer = self + .buffer + .read(cx) + .point_to_buffer_offset(hunk.multi_buffer_range.start, cx) + .map_or(false, |(buffer, _, _)| { + buffer.read(cx).diff_base_buffer().is_some() + }); + let border_color = cx.theme().colors().border_variant; let gutter_color = match hunk.status { DiffHunkStatus::Added => cx.theme().status().created, @@ -388,6 +419,10 @@ impl Editor { .pr_6() .size_full() .justify_between() + .border_t_1() + .pl_6() + .pr_6() + .border_color(border_color) .child( h_flex() .gap_1() @@ -411,43 +446,10 @@ impl Editor { let hunk = hunk.clone(); move |_event, cx| { editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let position = hunk - .multi_buffer_range - .end - .to_point( - &snapshot.buffer_snapshot, - ); - if let Some(hunk) = editor - .go_to_hunk_after_position( - &snapshot, position, cx, - ) - { - let multi_buffer_start = snapshot - .buffer_snapshot - .anchor_before(Point::new( - hunk.row_range.start.0, - 0, - )); - let multi_buffer_end = snapshot - .buffer_snapshot - .anchor_after(Point::new( - hunk.row_range.end.0, - 0, - )); - editor.expand_diff_hunk( - None, - &HoveredHunk { - multi_buffer_range: - multi_buffer_start - ..multi_buffer_end, - status: hunk_status(&hunk), - diff_base_byte_range: hunk - .diff_base_byte_range, - }, - cx, - ); - } + editor.go_to_subsequent_hunk( + hunk.multi_buffer_range.end, + cx, + ); }); } }), @@ -472,43 +474,10 @@ impl Editor { let hunk = hunk.clone(); move |_event, cx| { editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let position = hunk - .multi_buffer_range - .start - .to_point( - &snapshot.buffer_snapshot, - ); - let hunk = editor - .go_to_hunk_before_position( - &snapshot, position, cx, - ); - if let Some(hunk) = hunk { - let multi_buffer_start = snapshot - .buffer_snapshot - .anchor_before(Point::new( - hunk.row_range.start.0, - 0, - )); - let multi_buffer_end = snapshot - .buffer_snapshot - .anchor_after(Point::new( - hunk.row_range.end.0, - 0, - )); - editor.expand_diff_hunk( - None, - &HoveredHunk { - multi_buffer_range: - multi_buffer_start - ..multi_buffer_end, - status: hunk_status(&hunk), - diff_base_byte_range: hunk - .diff_base_byte_range, - }, - cx, - ); - } + editor.go_to_preceding_hunk( + hunk.multi_buffer_range.start, + cx, + ); }); } }), @@ -558,6 +527,36 @@ impl Editor { } }), ) + .when(is_branch_buffer, |this| { + this.child( + IconButton::new("apply", IconName::Check) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Apply Hunk", + &ApplyDiffHunk, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.apply_changes_in_range( + hunk.multi_buffer_range.clone(), + cx, + ); + }); + } + }), + ) + }) .child({ let focus = editor.focus_handle(cx); PopoverMenu::new("hunk-controls-dropdown") @@ -597,31 +596,29 @@ impl Editor { }), ) .child( - div().child( - IconButton::new("collapse", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Collapse Hunk", - &ToggleHunkDiff, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); - } - }), - ), + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), ), ) .into_any_element() @@ -876,6 +873,51 @@ impl Editor { } }) } + + fn go_to_subsequent_hunk(&mut self, position: Anchor, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx); + let position = position.to_point(&snapshot.buffer_snapshot); + if let Some(hunk) = self.go_to_hunk_after_position(&snapshot, position, cx) { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new(hunk.row_range.start.0, 0)); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new(hunk.row_range.end.0, 0)); + self.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk.diff_base_byte_range, + }, + cx, + ); + } + } + + fn go_to_preceding_hunk(&mut self, position: Anchor, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx); + let position = position.to_point(&snapshot.buffer_snapshot); + let hunk = self.go_to_hunk_before_position(&snapshot, position, cx); + if let Some(hunk) = hunk { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new(hunk.row_range.start.0, 0)); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new(hunk.row_range.end.0, 0)); + self.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk.diff_base_byte_range, + }, + cx, + ); + } + } } fn to_diff_hunk( diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 0666346e48776..62e37bc677f5f 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -18,7 +18,7 @@ pub struct ProposedChangesEditor { editor: View, _subscriptions: Vec, _recalculate_diffs_task: Task>, - recalculate_diffs_tx: mpsc::UnboundedSender>, + recalculate_diffs_tx: mpsc::UnboundedSender, } pub struct ProposedChangesBuffer { @@ -30,6 +30,11 @@ pub struct ProposedChangesEditorToolbar { current_editor: Option>, } +struct RecalculateDiff { + buffer: Model, + debounce: bool, +} + impl ProposedChangesEditor { pub fn new( buffers: Vec>, @@ -63,16 +68,18 @@ impl ProposedChangesEditor { recalculate_diffs_tx, _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { let mut buffers_to_diff = HashSet::default(); - while let Some(buffer) = recalculate_diffs_rx.next().await { - buffers_to_diff.insert(buffer); + while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await { + buffers_to_diff.insert(recalculate_diff.buffer); - loop { + while recalculate_diff.debounce { cx.background_executor() .timer(Duration::from_millis(250)) .await; let mut had_further_changes = false; - while let Ok(next_buffer) = recalculate_diffs_rx.try_next() { - buffers_to_diff.insert(next_buffer?); + while let Ok(next_recalculate_diff) = recalculate_diffs_rx.try_next() { + let next_recalculate_diff = next_recalculate_diff?; + recalculate_diff.debounce &= next_recalculate_diff.debounce; + buffers_to_diff.insert(next_recalculate_diff.buffer); had_further_changes = true; } if !had_further_changes { @@ -99,19 +106,24 @@ impl ProposedChangesEditor { event: &BufferEvent, _cx: &mut ViewContext, ) { - if let BufferEvent::Edited = event { - self.recalculate_diffs_tx.unbounded_send(buffer).ok(); - } - } - - fn apply_all_changes(&self, cx: &mut ViewContext) { - let buffers = self.editor.read(cx).buffer.read(cx).all_buffers(); - for branch_buffer in buffers { - if let Some(base_buffer) = branch_buffer.read(cx).diff_base_buffer() { - base_buffer.update(cx, |base_buffer, cx| { - base_buffer.merge(&branch_buffer, None, cx) - }); + match event { + BufferEvent::Operation { .. } => { + self.recalculate_diffs_tx + .unbounded_send(RecalculateDiff { + buffer, + debounce: true, + }) + .ok(); + } + BufferEvent::DiffBaseChanged => { + self.recalculate_diffs_tx + .unbounded_send(RecalculateDiff { + buffer, + debounce: false, + }) + .ok(); } + _ => (), } } } @@ -208,7 +220,9 @@ impl Render for ProposedChangesEditorToolbar { Button::new("apply-changes", "Apply All").on_click(move |_, cx| { if let Some(editor) = &editor { editor.update(cx, |editor, cx| { - editor.apply_all_changes(cx); + editor.editor.update(cx, |editor, cx| { + editor.apply_all_changes(cx); + }) }); } }) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 7abc9b8dba146..8afc4d389db7f 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -62,7 +62,7 @@ pub use text::{ use theme::SyntaxTheme; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; -use util::RangeExt; +use util::{debug_panic, RangeExt}; #[cfg(any(test, feature = "test-support"))] pub use {tree_sitter_rust, tree_sitter_typescript}; @@ -823,40 +823,41 @@ impl Buffer { }) } - /// Applies all of the changes in `branch` buffer that intersect the given `range` - /// to this buffer. - pub fn merge( - &mut self, - branch: &Model, - range: Option>, - cx: &mut ModelContext, - ) { - let edits = branch.read_with(cx, |branch, _| { - branch - .edits_since_in_range::( - &self.version, - range.unwrap_or(Anchor::MIN..Anchor::MAX), - ) - .map(|edit| { - ( - edit.old, - branch.text_for_range(edit.new).collect::(), - ) + /// Applies all of the changes in this buffer that intersect the given `range` + /// to its base buffer. This buffer must be a branch buffer to call this method. + pub fn merge_into_base(&mut self, range: Option>, cx: &mut ModelContext) { + let Some(base_buffer) = self.diff_base_buffer() else { + debug_panic!("not a branch buffer"); + return; + }; + + base_buffer.update(cx, |base_buffer, cx| { + let edits = self + .edits_since::(&base_buffer.version) + .filter_map(|edit| { + if range + .as_ref() + .map_or(true, |range| range.overlaps(&edit.new)) + { + Some((edit.old, self.text_for_range(edit.new).collect::())) + } else { + None + } }) - .collect::>() - }); - let operation = self.edit(edits, None, cx); + .collect::>(); + + let operation = base_buffer.edit(edits, None, cx); - // Prevent this operation from being reapplied to the branch. - branch.update(cx, |branch, cx| { + // Prevent this operation from being reapplied to the branch. if let Some(BufferDiffBase::PastBufferVersion { operations_to_ignore, .. - }) = &mut branch.diff_base + }) = &mut self.diff_base { operations_to_ignore.extend(operation); } - cx.emit(BufferEvent::Edited) + + cx.emit(BufferEvent::DiffBaseChanged); }); } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 49cc31067b93a..da53d5a7637b9 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -2471,8 +2471,8 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { }); // Merging the branch applies all of its changes to the base. - base_buffer.update(cx, |base_buffer, cx| { - base_buffer.merge(&branch_buffer, None, cx); + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(None, cx); }); branch_buffer.update(cx, |branch_buffer, cx| { @@ -2484,6 +2484,18 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_merge_into_base(cx: &mut AppContext) { + init_settings(cx, |_| {}); + let base = cx.new_model(|cx| Buffer::local("abcdefghijk", cx)); + let branch = base.update(cx, |buffer, cx| buffer.branch(cx)); + branch.update(cx, |branch, cx| { + branch.edit([(0..3, "ABC"), (7..9, "HI")], None, cx); + branch.merge_into_base(Some(5..8), cx); + }); + assert_eq!(base.read(cx).text(), "abcdefgHIjk"); +} + fn start_recalculating_diff(buffer: &Model, cx: &mut TestAppContext) { buffer .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) From 9b148f3dcc5e4281bfadd515efd817bcdbf21bc3 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Wed, 2 Oct 2024 01:32:31 +0800 Subject: [PATCH 170/228] Limit the value can be set for font weight (#18594) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #18531 This PR limits the range of values that can be set for `FontWeight`. Since any value less than 1.0 or greater than 999.9 causes Zed to crash on Windows, I’ve restricted `FontWeight` to this range. I could apply this constraint only on Windows, but considering the documentation at https://zed.dev/docs/configuring-zed#buffer-font-weight indicates that `FontWeight` should be between 100 and 900, I thought it might be a good idea to apply this restriction in the settings. Release Notes: - Changed `ui_font_weight` and `buffer_font_weight` settings to require values to be between `100` and `950` (inclusive). --------- Co-authored-by: Marshall Bowers --- crates/theme/src/settings.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 7fa9a870de559..86383cec8ea07 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -520,6 +520,10 @@ pub fn reset_ui_font_size(cx: &mut AppContext) { } } +fn clamp_font_weight(weight: f32) -> FontWeight { + FontWeight(weight.clamp(100., 950.)) +} + impl settings::Settings for ThemeSettings { const KEY: Option<&'static str> = None; @@ -579,7 +583,7 @@ impl settings::Settings for ThemeSettings { this.buffer_font.fallbacks = Some(FontFallbacks::from_fonts(value)); } if let Some(value) = value.buffer_font_weight { - this.buffer_font.weight = FontWeight(value); + this.buffer_font.weight = clamp_font_weight(value); } if let Some(value) = value.ui_font_family.clone() { @@ -592,7 +596,7 @@ impl settings::Settings for ThemeSettings { this.ui_font.fallbacks = Some(FontFallbacks::from_fonts(value)); } if let Some(value) = value.ui_font_weight { - this.ui_font.weight = FontWeight(value); + this.ui_font.weight = clamp_font_weight(value); } if let Some(value) = &value.theme { From 7dcb0de28cb3abf482b81e2821332cf234891cda Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Oct 2024 12:58:12 -0600 Subject: [PATCH 171/228] Keep all hunks expanded in proposed change editor (#18598) Also, fix visual bug when pressing escape with a non-empty selection in a deleted text block. Release Notes: - N/A Co-authored-by: Antonio --- crates/editor/src/editor.rs | 2 +- crates/editor/src/hunk_diff.rs | 440 +++++++++++-------- crates/editor/src/proposed_changes_editor.rs | 7 +- 3 files changed, 259 insertions(+), 190 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b43433e3f41e6..54a1318bdfe52 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3059,7 +3059,7 @@ impl Editor { } pub fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { - if self.clear_clicked_diff_hunks(cx) { + if self.clear_expanded_diff_hunks(cx) { cx.notify(); return; } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index c8caa30b59c49..ff3451fc9216b 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -32,6 +32,7 @@ pub(super) struct ExpandedHunks { pub(crate) hunks: Vec, diff_base: HashMap, hunk_update_tasks: HashMap, Task<()>>, + expand_all: bool, } #[derive(Debug, Clone)] @@ -72,6 +73,10 @@ impl ExpandedHunks { } impl Editor { + pub fn set_expand_all_diff_hunks(&mut self) { + self.expanded_hunks.expand_all = true; + } + pub(super) fn toggle_hovered_hunk( &mut self, hovered_hunk: &HoveredHunk, @@ -133,6 +138,10 @@ impl Editor { hunks_to_toggle: Vec, cx: &mut ViewContext, ) { + if self.expanded_hunks.expand_all { + return; + } + let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); let new_toggle_task = cx.spawn(move |editor, mut cx| async move { if let Some(task) = previous_toggle_task { @@ -426,62 +435,64 @@ impl Editor { .child( h_flex() .gap_1() - .child( - IconButton::new("next-hunk", IconName::ArrowDown) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Next Hunk", - &GoToHunk, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.go_to_subsequent_hunk( - hunk.multi_buffer_range.end, + .when(!is_branch_buffer, |row| { + row.child( + IconButton::new("next-hunk", IconName::ArrowDown) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Hunk", + &GoToHunk, + &focus_handle, cx, - ); - }); - } - }), - ) - .child( - IconButton::new("prev-hunk", IconName::ArrowUp) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Previous Hunk", - &GoToPrevHunk, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.go_to_preceding_hunk( - hunk.multi_buffer_range.start, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.go_to_subsequent_hunk( + hunk.multi_buffer_range.end, + cx, + ); + }); + } + }), + ) + .child( + IconButton::new("prev-hunk", IconName::ArrowUp) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Hunk", + &GoToPrevHunk, + &focus_handle, cx, - ); - }); - } - }), - ) + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.go_to_preceding_hunk( + hunk.multi_buffer_range.start, + cx, + ); + }); + } + }), + ) + }) .child( IconButton::new("discard", IconName::Undo) .shape(IconButtonShape::Square) @@ -527,99 +538,115 @@ impl Editor { } }), ) - .when(is_branch_buffer, |this| { - this.child( - IconButton::new("apply", IconName::Check) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Apply Hunk", - &ApplyDiffHunk, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.apply_changes_in_range( - hunk.multi_buffer_range.clone(), + .map(|this| { + if is_branch_buffer { + this.child( + IconButton::new("apply", IconName::Check) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = + editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Apply Hunk", + &ApplyDiffHunk, + &focus_handle, cx, - ); - }); - } - }), - ) - }) - .child({ - let focus = editor.focus_handle(cx); - PopoverMenu::new("hunk-controls-dropdown") - .trigger( - IconButton::new( - "toggle_editor_selections_icon", - IconName::EllipsisVertical, - ) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .style(ButtonStyle::Subtle) - .selected( - hunk_controls_menu_handle.is_deployed(), - ) - .when( - !hunk_controls_menu_handle.is_deployed(), - |this| { - this.tooltip(|cx| { - Tooltip::text("Hunk Controls", cx) - }) - }, - ), + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.apply_changes_in_range( + hunk.multi_buffer_range + .clone(), + cx, + ); + }); + } + }), ) - .anchor(AnchorCorner::TopRight) - .with_handle(hunk_controls_menu_handle) - .menu(move |cx| { - let focus = focus.clone(); - let menu = - ContextMenu::build(cx, move |menu, _| { - menu.context(focus.clone()).action( - "Discard All", - RevertFile.boxed_clone(), + } else { + this.child({ + let focus = editor.focus_handle(cx); + PopoverMenu::new("hunk-controls-dropdown") + .trigger( + IconButton::new( + "toggle_editor_selections_icon", + IconName::EllipsisVertical, ) - }); - Some(menu) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .selected( + hunk_controls_menu_handle + .is_deployed(), + ) + .when( + !hunk_controls_menu_handle + .is_deployed(), + |this| { + this.tooltip(|cx| { + Tooltip::text( + "Hunk Controls", + cx, + ) + }) + }, + ), + ) + .anchor(AnchorCorner::TopRight) + .with_handle(hunk_controls_menu_handle) + .menu(move |cx| { + let focus = focus.clone(); + let menu = ContextMenu::build( + cx, + move |menu, _| { + menu.context(focus.clone()) + .action( + "Discard All", + RevertFile + .boxed_clone(), + ) + }, + ); + Some(menu) + }) }) - }), - ) - .child( - IconButton::new("collapse", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Collapse Hunk", - &ToggleHunkDiff, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); } }), - ), + ) + .when(!is_branch_buffer, |div| { + div.child( + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ) + }), ) .into_any_element() } @@ -694,7 +721,10 @@ impl Editor { } } - pub(super) fn clear_clicked_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) -> bool { + pub(super) fn clear_expanded_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) -> bool { + if self.expanded_hunks.expand_all { + return false; + } self.expanded_hunks.hunk_update_tasks.clear(); self.clear_row_highlights::(); let to_remove = self @@ -798,33 +828,43 @@ impl Editor { status, } => { let hunk_display_range = display_row_range; + if expanded_hunk_display_range.start > hunk_display_range.end { recalculated_hunks.next(); - continue; - } else if expanded_hunk_display_range.end - < hunk_display_range.start - { - break; - } else { - if !expanded_hunk.folded - && expanded_hunk_display_range == hunk_display_range - && expanded_hunk.status == hunk_status(buffer_hunk) - && expanded_hunk.diff_base_byte_range - == buffer_hunk.diff_base_byte_range - { - recalculated_hunks.next(); - retain = true; - } else { + if editor.expanded_hunks.expand_all { hunks_to_reexpand.push(HoveredHunk { status, multi_buffer_range, diff_base_byte_range, }); } + continue; + } + + if expanded_hunk_display_range.end + < hunk_display_range.start + { break; } + + if !expanded_hunk.folded + && expanded_hunk_display_range == hunk_display_range + && expanded_hunk.status == hunk_status(buffer_hunk) + && expanded_hunk.diff_base_byte_range + == buffer_hunk.diff_base_byte_range + { + recalculated_hunks.next(); + retain = true; + } else { + hunks_to_reexpand.push(HoveredHunk { + status, + multi_buffer_range, + diff_base_byte_range, + }); + } + break; } } } @@ -836,6 +876,26 @@ impl Editor { retain }); + if editor.expanded_hunks.expand_all { + for hunk in recalculated_hunks { + match diff_hunk_to_display(&hunk, &snapshot) { + DisplayDiffHunk::Folded { .. } => {} + DisplayDiffHunk::Unfolded { + diff_base_byte_range, + multi_buffer_range, + status, + .. + } => { + hunks_to_reexpand.push(HoveredHunk { + status, + multi_buffer_range, + diff_base_byte_range, + }); + } + } + } + } + editor.remove_highlighted_rows::(highlights_to_remove, cx); editor.remove_blocks(blocks_to_remove, None, cx); @@ -1000,13 +1060,15 @@ fn editor_with_deleted_text( editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); - editor.highlight_rows::( + + enum DeletedBlockRowHighlight {} + editor.highlight_rows::( Anchor::min()..Anchor::max(), deleted_color, false, cx, ); - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); + editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); // editor ._subscriptions .extend([cx.on_blur(&editor.focus_handle, |editor, cx| { @@ -1015,37 +1077,41 @@ fn editor_with_deleted_text( }); })]); - let parent_editor_for_reverts = parent_editor.clone(); let original_multi_buffer_range = hunk.multi_buffer_range.clone(); let diff_base_range = hunk.diff_base_byte_range.clone(); editor - .register_action::(move |_, cx| { - parent_editor_for_reverts - .update(cx, |editor, cx| { - let Some((buffer, original_text)) = - editor.buffer().update(cx, |buffer, cx| { - let (_, buffer, _) = buffer - .excerpt_containing(original_multi_buffer_range.start, cx)?; - let original_text = - buffer.read(cx).diff_base()?.slice(diff_base_range.clone()); - Some((buffer, Arc::from(original_text.to_string()))) - }) - else { - return; - }; - buffer.update(cx, |buffer, cx| { - buffer.edit( - Some(( - original_multi_buffer_range.start.text_anchor - ..original_multi_buffer_range.end.text_anchor, - original_text, - )), - None, - cx, - ) - }); - }) - .ok(); + .register_action::({ + let parent_editor = parent_editor.clone(); + move |_, cx| { + parent_editor + .update(cx, |editor, cx| { + let Some((buffer, original_text)) = + editor.buffer().update(cx, |buffer, cx| { + let (_, buffer, _) = buffer.excerpt_containing( + original_multi_buffer_range.start, + cx, + )?; + let original_text = + buffer.read(cx).diff_base()?.slice(diff_base_range.clone()); + Some((buffer, Arc::from(original_text.to_string()))) + }) + else { + return; + }; + buffer.update(cx, |buffer, cx| { + buffer.edit( + Some(( + original_multi_buffer_range.start.text_anchor + ..original_multi_buffer_range.end.text_anchor, + original_text, + )), + None, + cx, + ) + }); + }) + .ok(); + } }) .detach(); let hunk = hunk.clone(); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 62e37bc677f5f..8c8aa710a2f72 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -63,8 +63,11 @@ impl ProposedChangesEditor { let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded(); Self { - editor: cx - .new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)), + editor: cx.new_view(|cx| { + let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, true, cx); + editor.set_expand_all_diff_hunks(); + editor + }), recalculate_diffs_tx, _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { let mut buffers_to_diff = HashSet::default(); From 563a1dcbab67b58a6c9cf10fe4dea35d862376d1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Oct 2024 12:58:21 -0600 Subject: [PATCH 172/228] Fix panic when opening proposed changes editor with reversed ranges (#18599) Closes https://github.com/zed-industries/zed/issues/18589 Release Notes: - N/A Co-authored-by: Antonio --- crates/editor/src/editor.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 54a1318bdfe52..ddc7de4e41a46 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12257,12 +12257,9 @@ impl Editor { let buffer = self.buffer.read(cx); let mut new_selections_by_buffer = HashMap::default(); for selection in self.selections.all::(cx) { - for (buffer, mut range, _) in + for (buffer, range, _) in buffer.range_to_buffer_ranges(selection.start..selection.end, cx) { - if selection.reversed { - mem::swap(&mut range.start, &mut range.end); - } let mut range = range.to_point(buffer.read(cx)); range.start.column = 0; range.end.column = buffer.read(cx).line_len(range.end.row); From e80cbab93f0b30ce6619f9b37636410bc8caab5d Mon Sep 17 00:00:00 2001 From: Roman Zipp Date: Wed, 2 Oct 2024 13:03:23 +0200 Subject: [PATCH 173/228] Fix docs `format_on_save` value is not a boolean (#18619) Fixed [Configuring Languages](https://zed.dev/docs/configuring-languages) docs using boolean value for `format_on_save` option although it accepts string values of `"on"` or `"off"` Details: The documentation on [configuring languages](https://zed.dev/docs/configuring-languages) states the use of boolean values for the `format_on_save` option although the [configuration reference](https://zed.dev/docs/configuring-zed#format-on-save) only allows the usage of string values `"on"` or `"off"`. In fact using boolean values will not work and won't translate to `on` or `off` Release Notes: - N/A --- docs/src/configuring-languages.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 65e9f822b9478..fc6de9efeab63 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -33,7 +33,7 @@ Here's an example of language-specific settings: "Python": { "tab_size": 4, "formatter": "language_server", - "format_on_save": true + "format_on_save": "on" }, "JavaScript": { "tab_size": 2, @@ -209,11 +209,11 @@ Zed supports both built-in and external formatters. Configure formatters globall "arguments": ["--stdin-filepath", "{buffer_path}"] } }, - "format_on_save": true + "format_on_save": "on" }, "Rust": { "formatter": "language_server", - "format_on_save": true + "format_on_save": "on" } } ``` @@ -225,7 +225,7 @@ To disable formatting for a specific language: ```json "languages": { "Markdown": { - "format_on_save": false + "format_on_save": "off" } } ``` @@ -276,7 +276,7 @@ Zed allows you to run both formatting and linting on save. Here's an example tha "code_actions_on_format": { "source.fixAll.eslint": true }, - "format_on_save": true + "format_on_save": "on" } } ``` From b3cdd2ccff82d0a3d2e33171e22838402f8f3f79 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 2 Oct 2024 13:21:19 +0200 Subject: [PATCH 174/228] ssh remoting: Fix ssh process not being cleaned up when connection is closed (#18623) We introduced a memory leak in #18572, which meant that `Drop` was never called on `SshRemoteConnection`, meaning that the ssh process kept running Co-Authored-by: Thorsten Release Notes: - N/A --------- Co-authored-by: Thorsten --- crates/remote/src/ssh_session.rs | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index fe1e42fe96630..6bca9938baac7 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -31,7 +31,7 @@ use std::{ path::{Path, PathBuf}, sync::{ atomic::{AtomicU32, Ordering::SeqCst}, - Arc, + Arc, Weak, }, time::Instant, }; @@ -244,12 +244,12 @@ struct SshRemoteClientState { ssh_connection: SshRemoteConnection, delegate: Arc, forwarder: ChannelForwarder, - _multiplex_task: Task>, + multiplex_task: Task>, } pub struct SshRemoteClient { client: Arc, - inner_state: Arc>>, + inner_state: Mutex>, } impl SshRemoteClient { @@ -264,7 +264,7 @@ impl SshRemoteClient { let client = cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx))?; let this = Arc::new(Self { client, - inner_state: Arc::new(Mutex::new(None)), + inner_state: Mutex::new(None), }); let inner_state = { @@ -276,7 +276,7 @@ impl SshRemoteClient { .await?; let multiplex_task = Self::multiplex( - this.clone(), + Arc::downgrade(&this), ssh_process, proxy_incoming_tx, proxy_outgoing_rx, @@ -287,7 +287,7 @@ impl SshRemoteClient { ssh_connection, delegate, forwarder: proxy, - _multiplex_task: multiplex_task, + multiplex_task, } }; @@ -305,9 +305,9 @@ impl SshRemoteClient { mut ssh_connection, delegate, forwarder: proxy, - _multiplex_task, + multiplex_task, } = state; - drop(_multiplex_task); + drop(multiplex_task); cx.spawn(|mut cx| async move { let (incoming_tx, outgoing_rx) = proxy.into_channels().await; @@ -331,8 +331,8 @@ impl SshRemoteClient { ssh_connection, delegate, forwarder: proxy, - _multiplex_task: Self::multiplex( - this.clone(), + multiplex_task: Self::multiplex( + Arc::downgrade(&this), ssh_process, proxy_incoming_tx, proxy_outgoing_rx, @@ -349,7 +349,7 @@ impl SshRemoteClient { } fn multiplex( - this: Arc, + this: Weak, mut ssh_process: Child, incoming_tx: UnboundedSender, mut outgoing_rx: UnboundedReceiver, @@ -444,7 +444,9 @@ impl SshRemoteClient { if let Err(error) = result { log::warn!("ssh io task died with error: {:?}. reconnecting...", error); - Self::reconnect(this, &mut cx).ok(); + if let Some(this) = this.upgrade() { + Self::reconnect(this, &mut cx).ok(); + } } Ok(()) @@ -516,7 +518,7 @@ impl SshRemoteClient { let client = ChannelClient::new(server_to_client_rx, client_to_server_tx, cx); Arc::new(Self { client, - inner_state: Arc::new(Mutex::new(None)), + inner_state: Mutex::new(None), }) }), server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), From 0ee1d7ab2634521e11cb7f221074b0287763359d Mon Sep 17 00:00:00 2001 From: loczek <30776250+loczek@users.noreply.github.com> Date: Wed, 2 Oct 2024 13:27:16 +0200 Subject: [PATCH 175/228] Add snippet commands (#18453) Closes #17860 Closes #15403 Release Notes: - Added `snippets: configure snippets` command to create and modify snippets - Added `snippets: open folder` command for opening the `~/.config/zed/snippets` directory https://github.com/user-attachments/assets/fd9e664c-44b1-49bf-87a8-42b9e516f12f --- Cargo.lock | 15 ++ Cargo.toml | 2 + crates/snippets_ui/Cargo.toml | 22 +++ crates/snippets_ui/LICENSE-GPL | 1 + crates/snippets_ui/src/snippets_ui.rs | 226 ++++++++++++++++++++++++++ crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + 7 files changed, 268 insertions(+) create mode 100644 crates/snippets_ui/Cargo.toml create mode 120000 crates/snippets_ui/LICENSE-GPL create mode 100644 crates/snippets_ui/src/snippets_ui.rs diff --git a/Cargo.lock b/Cargo.lock index 123141d188e0e..7c92ef0f5257c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10500,6 +10500,20 @@ dependencies = [ "util", ] +[[package]] +name = "snippets_ui" +version = "0.1.0" +dependencies = [ + "fuzzy", + "gpui", + "language", + "paths", + "picker", + "ui", + "util", + "workspace", +] + [[package]] name = "socket2" version = "0.4.10" @@ -14468,6 +14482,7 @@ dependencies = [ "simplelog", "smol", "snippet_provider", + "snippets_ui", "supermaven", "sysinfo", "tab_switcher", diff --git a/Cargo.toml b/Cargo.toml index c72fec020fe67..1ef14dae70c20 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -99,6 +99,7 @@ members = [ "crates/settings_ui", "crates/snippet", "crates/snippet_provider", + "crates/snippets_ui", "crates/sqlez", "crates/sqlez_macros", "crates/story", @@ -275,6 +276,7 @@ settings = { path = "crates/settings" } settings_ui = { path = "crates/settings_ui" } snippet = { path = "crates/snippet" } snippet_provider = { path = "crates/snippet_provider" } +snippets_ui = { path = "crates/snippets_ui" } sqlez = { path = "crates/sqlez" } sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } diff --git a/crates/snippets_ui/Cargo.toml b/crates/snippets_ui/Cargo.toml new file mode 100644 index 0000000000000..da9eff4ae55ba --- /dev/null +++ b/crates/snippets_ui/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "snippets_ui" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/snippets_ui.rs" + +[dependencies] +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +paths.workspace = true +picker.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true diff --git a/crates/snippets_ui/LICENSE-GPL b/crates/snippets_ui/LICENSE-GPL new file mode 120000 index 0000000000000..89e542f750cd3 --- /dev/null +++ b/crates/snippets_ui/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/snippets_ui/src/snippets_ui.rs b/crates/snippets_ui/src/snippets_ui.rs new file mode 100644 index 0000000000000..c8ab6febdaa07 --- /dev/null +++ b/crates/snippets_ui/src/snippets_ui.rs @@ -0,0 +1,226 @@ +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{ + actions, AppContext, DismissEvent, EventEmitter, FocusableView, ParentElement, Render, Styled, + View, ViewContext, VisualContext, WeakView, +}; +use language::LanguageRegistry; +use paths::config_dir; +use picker::{Picker, PickerDelegate}; +use std::{borrow::Borrow, fs, sync::Arc}; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing, WindowContext}; +use util::ResultExt; +use workspace::{notifications::NotifyResultExt, ModalView, Workspace}; + +actions!(snippets, [ConfigureSnippets, OpenFolder]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(register).detach(); +} + +fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(configure_snippets); + workspace.register_action(open_folder); +} + +fn configure_snippets( + workspace: &mut Workspace, + _: &ConfigureSnippets, + cx: &mut ViewContext, +) { + let language_registry = workspace.app_state().languages.clone(); + let workspace_handle = workspace.weak_handle(); + + workspace.toggle_modal(cx, move |cx| { + ScopeSelector::new(language_registry, workspace_handle, cx) + }); +} + +fn open_folder(workspace: &mut Workspace, _: &OpenFolder, cx: &mut ViewContext) { + fs::create_dir_all(config_dir().join("snippets")).notify_err(workspace, cx); + cx.open_with_system(config_dir().join("snippets").borrow()); +} + +pub struct ScopeSelector { + picker: View>, +} + +impl ScopeSelector { + fn new( + language_registry: Arc, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + let delegate = + ScopeSelectorDelegate::new(workspace, cx.view().downgrade(), language_registry); + + let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); + + Self { picker } + } +} + +impl ModalView for ScopeSelector {} + +impl EventEmitter for ScopeSelector {} + +impl FocusableView for ScopeSelector { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for ScopeSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +pub struct ScopeSelectorDelegate { + workspace: WeakView, + scope_selector: WeakView, + language_registry: Arc, + candidates: Vec, + matches: Vec, + selected_index: usize, +} + +impl ScopeSelectorDelegate { + fn new( + workspace: WeakView, + scope_selector: WeakView, + language_registry: Arc, + ) -> Self { + let candidates = Vec::from(["Global".to_string()]).into_iter(); + let languages = language_registry.language_names().into_iter(); + + let candidates = candidates + .chain(languages) + .enumerate() + .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name)) + .collect::>(); + + Self { + workspace, + scope_selector, + language_registry, + candidates, + matches: vec![], + selected_index: 0, + } + } +} + +impl PickerDelegate for ScopeSelectorDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _: &mut WindowContext) -> Arc { + "Select snippet scope...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(mat) = self.matches.get(self.selected_index) { + let scope_name = self.candidates[mat.candidate_id].string.clone(); + let language = self.language_registry.language_for_name(&scope_name); + + if let Some(workspace) = self.workspace.upgrade() { + cx.spawn(|_, mut cx| async move { + let scope = match scope_name.as_str() { + "Global" => "snippets".to_string(), + _ => language.await?.lsp_id(), + }; + + workspace.update(&mut cx, |workspace, cx| { + workspace + .open_abs_path( + config_dir().join("snippets").join(scope + ".json"), + false, + cx, + ) + .detach(); + }) + }) + .detach_and_log_err(cx); + }; + } + self.dismissed(cx); + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.scope_selector + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let background = cx.background_executor().clone(); + let candidates = self.candidates.clone(); + cx.spawn(|this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .into_iter() + .enumerate() + .map(|(index, candidate)| StringMatch { + candidate_id: index, + string: candidate.string, + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background, + ) + .await + }; + + this.update(&mut cx, |this, cx| { + let delegate = &mut this.delegate; + delegate.matches = matches; + delegate.selected_index = delegate + .selected_index + .min(delegate.matches.len().saturating_sub(1)); + cx.notify(); + }) + .log_err(); + }) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut ViewContext>, + ) -> Option { + let mat = &self.matches[ix]; + let label = mat.string.clone(); + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child(HighlightedLabel::new(label, mat.positions.clone())), + ) + } +} diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 5422f8b29aa98..775a59e475cb4 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -96,6 +96,7 @@ shellexpand.workspace = true simplelog.workspace = true smol.workspace = true snippet_provider.workspace = true +snippets_ui.workspace = true supermaven.workspace = true sysinfo.workspace = true tab_switcher.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 186805d12cd01..06f1d926aea37 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -256,6 +256,7 @@ fn init_ui( project_panel::init(Assets, cx); outline_panel::init(Assets, cx); tasks_ui::init(cx); + snippets_ui::init(cx); channel::init(&app_state.client.clone(), app_state.user_store.clone(), cx); search::init(cx); vim::init(cx); From fd94c2b3fd5ceac67ff852d1781b4c707f5ed487 Mon Sep 17 00:00:00 2001 From: Patrick <39161540+patrickJramos@users.noreply.github.com> Date: Wed, 2 Oct 2024 08:44:42 -0300 Subject: [PATCH 176/228] Keep tab position when closing tabs (#18168) - Closes #18036 Release Notes: - N/A --- crates/workspace/src/pane.rs | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 82300690e7dbe..3e228b7b1602b 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1407,17 +1407,13 @@ impl Pane { self.pinned_tab_count -= 1; } if item_index == self.active_item_index { - let index_to_activate = self - .activation_history - .pop() - .and_then(|last_activated_item| { - self.items.iter().enumerate().find_map(|(index, item)| { - (item.item_id() == last_activated_item.entity_id).then_some(index) - }) - }) - // We didn't have a valid activation history entry, so fallback - // to activating the item to the left - .unwrap_or_else(|| item_index.min(self.items.len()).saturating_sub(1)); + self.activation_history.pop(); + + let index_to_activate = if item_index + 1 < self.items.len() { + item_index + 1 + } else { + item_index.saturating_sub(1) + }; let should_activate = activate_pane || self.has_focus(cx); if self.items.len() == 1 && should_activate { @@ -3320,7 +3316,7 @@ mod tests { .unwrap() .await .unwrap(); - assert_item_labels(&pane, ["A", "B*", "C", "D"], cx); + assert_item_labels(&pane, ["A", "B", "C*", "D"], cx); pane.update(cx, |pane, cx| pane.activate_item(3, false, false, cx)); assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); @@ -3331,7 +3327,7 @@ mod tests { .unwrap() .await .unwrap(); - assert_item_labels(&pane, ["A", "B*", "C"], cx); + assert_item_labels(&pane, ["A", "B", "C*"], cx); pane.update(cx, |pane, cx| { pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) @@ -3339,7 +3335,7 @@ mod tests { .unwrap() .await .unwrap(); - assert_item_labels(&pane, ["A", "C*"], cx); + assert_item_labels(&pane, ["A", "B*"], cx); pane.update(cx, |pane, cx| { pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) From e01bc6765db7352559d20d3b0f18b124dc4707d8 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 2 Oct 2024 13:45:07 +0200 Subject: [PATCH 177/228] editor: Fix "Reveal in File Manager" not working with multibuffers (#18626) Additionally, mark context menu entry as disabled when the action would fail (untitled buffer, collab sessions). Supersedes #18584 Release Notes: - Fixed "Reveal in Finder/File Manager", "Copy Path", "Copy Relative Path" and "Copy file location" actions not working with multibuffers. --- crates/editor/src/editor.rs | 40 ++++++++++++------------- crates/editor/src/mouse_context_menu.rs | 18 +++++++---- 2 files changed, 33 insertions(+), 25 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ddc7de4e41a46..525a94f2582ab 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11257,30 +11257,32 @@ impl Editor { None } + fn target_file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn language::LocalFile> { + self.active_excerpt(cx)? + .1 + .read(cx) + .file() + .and_then(|f| f.as_local()) + } + pub fn reveal_in_finder(&mut self, _: &RevealInFileManager, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - cx.reveal_path(&file.abs_path(cx)); - } + if let Some(target) = self.target_file(cx) { + cx.reveal_path(&target.abs_path(cx)); } } pub fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(path) = file.abs_path(cx).to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.abs_path(cx).to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } } } pub fn copy_relative_path(&mut self, _: &CopyRelativePath, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(path) = file.path().to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.path().to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } } } @@ -11491,12 +11493,10 @@ impl Editor { } pub fn copy_file_location(&mut self, _: &CopyFileLocation, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(path) = file.path().to_str() { - let selection = self.selections.newest::(cx).start.row + 1; - cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); - } + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.path().to_str() { + let selection = self.selections.newest::(cx).start.row + 1; + cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); } } } diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 239d7955a056b..936d95dccbf8c 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -158,6 +158,12 @@ pub fn deploy_context_menu( } let focus = cx.focused(); + let has_reveal_target = editor.target_file(cx).is_some(); + let reveal_in_finder_label = if cfg!(target_os = "macos") { + "Reveal in Finder" + } else { + "Reveal in File Manager" + }; ui::ContextMenu::build(cx, |menu, _cx| { let builder = menu .on_blur_subscription(Subscription::new(|| {})) @@ -180,11 +186,13 @@ pub fn deploy_context_menu( .action("Copy", Box::new(Copy)) .action("Paste", Box::new(Paste)) .separator() - .when(cfg!(target_os = "macos"), |builder| { - builder.action("Reveal in Finder", Box::new(RevealInFileManager)) - }) - .when(cfg!(not(target_os = "macos")), |builder| { - builder.action("Reveal in File Manager", Box::new(RevealInFileManager)) + .map(|builder| { + if has_reveal_target { + builder.action(reveal_in_finder_label, Box::new(RevealInFileManager)) + } else { + builder + .disabled_action(reveal_in_finder_label, Box::new(RevealInFileManager)) + } }) .action("Open in Terminal", Box::new(OpenInTerminal)) .action("Copy Permalink", Box::new(CopyPermalinkToLine)); From 82d3fcdf4b97566357633801df047027240aa286 Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Wed, 2 Oct 2024 09:29:11 -0400 Subject: [PATCH 178/228] Tweak assistant prompt to only fix diagnostic issues when requested to do so (#18596) Release Notes: - Assistant: Make the model less likely to incorporate diagnostic information when not requested to fix any issues. ![CleanShot 2024-10-01 at 13 44 08](https://github.com/user-attachments/assets/f0e9a132-6cac-4dc6-889f-467e59ec8bbc) --- assets/prompts/content_prompt.hbs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/assets/prompts/content_prompt.hbs b/assets/prompts/content_prompt.hbs index e944e230f56f9..c029f84b24c36 100644 --- a/assets/prompts/content_prompt.hbs +++ b/assets/prompts/content_prompt.hbs @@ -50,6 +50,9 @@ And here's the section to rewrite based on that prompt again for reference: {{#if diagnostic_errors}} {{#each diagnostic_errors}} + +Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to. + {{line_number}} {{error_message}} From 8a18c94f33fc36c89016c8f606a20632ce63b472 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 2 Oct 2024 15:35:50 +0200 Subject: [PATCH 179/228] Make slash command descriptions consistent (#18595) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds a description constant in most of the slash command files so that both the editor _and_ footer pickers use the same string. In terms of copywriting, I did some tweaking to reduce the longer ones a bit. Also standardized them all to use sentence case, as opposed to each instance using a different convention. The editor picker needs more work, though, given the arguments and descriptions are being cut at the moment. This should happen in a follow-up! Screenshot 2024-10-01 at 7 25 19 PM --- Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/assistant/src/slash_command/auto_command.rs | 4 ++-- crates/assistant/src/slash_command/delta_command.rs | 4 ++-- crates/assistant/src/slash_command/diagnostics_command.rs | 2 +- crates/assistant/src/slash_command/fetch_command.rs | 4 ++-- crates/assistant/src/slash_command/file_command.rs | 4 ++-- crates/assistant/src/slash_command/now_command.rs | 4 ++-- crates/assistant/src/slash_command/project_command.rs | 4 ++-- crates/assistant/src/slash_command/prompt_command.rs | 4 ++-- crates/assistant/src/slash_command/search_command.rs | 4 ++-- crates/assistant/src/slash_command/symbols_command.rs | 4 ++-- crates/assistant/src/slash_command/tab_command.rs | 4 ++-- crates/assistant/src/slash_command/terminal_command.rs | 4 ++-- crates/assistant/src/slash_command/workflow_command.rs | 4 ++-- crates/assistant/src/slash_command_picker.rs | 6 ++++-- 14 files changed, 29 insertions(+), 27 deletions(-) diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index 14cee29682098..14bbb7c8412b4 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -31,11 +31,11 @@ impl SlashCommand for AutoCommand { } fn description(&self) -> String { - "Automatically infer what context to add, based on your prompt".into() + "Automatically infer what context to add".into() } fn menu_text(&self) -> String { - "Automatically Infer Context".into() + self.description() } fn label(&self, cx: &AppContext) -> CodeLabel { diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs index 6a66ad3f09aa9..6f697ecbb9bcb 100644 --- a/crates/assistant/src/slash_command/delta_command.rs +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -19,11 +19,11 @@ impl SlashCommand for DeltaSlashCommand { } fn description(&self) -> String { - "re-insert changed files".into() + "Re-insert changed files".into() } fn menu_text(&self) -> String { - "Re-insert Changed Files".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 3f79c01675031..146a4e5d366dd 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -95,7 +95,7 @@ impl SlashCommand for DiagnosticsSlashCommand { } fn menu_text(&self) -> String { - "Insert Diagnostics".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 23d3c884a8ec2..3a01bb645a36b 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -104,11 +104,11 @@ impl SlashCommand for FetchSlashCommand { } fn description(&self) -> String { - "insert URL contents".into() + "Insert fetched URL contents".into() } fn menu_text(&self) -> String { - "Insert fetched URL contents".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 260c6b0e2a084..6da56d064178a 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -110,11 +110,11 @@ impl SlashCommand for FileSlashCommand { } fn description(&self) -> String { - "insert file".into() + "Insert file".into() } fn menu_text(&self) -> String { - "Insert File".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index eb0ca926f015b..221ba05cafc62 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -19,11 +19,11 @@ impl SlashCommand for NowSlashCommand { } fn description(&self) -> String { - "insert the current date and time".into() + "Insert current date and time".into() } fn menu_text(&self) -> String { - "Insert Current Date and Time".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 197e91d91adda..58fef8f338771 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -47,11 +47,11 @@ impl SlashCommand for ProjectSlashCommand { } fn description(&self) -> String { - "Generate semantic searches based on the current context".into() + "Generate a semantic search based on context".into() } fn menu_text(&self) -> String { - "Project Context".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index effbcc0f90ce8..978c6d7504cae 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -16,11 +16,11 @@ impl SlashCommand for PromptSlashCommand { } fn description(&self) -> String { - "insert prompt from library".into() + "Insert prompt from library".into() } fn menu_text(&self) -> String { - "Insert Prompt from Library".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index f0f3ee3d25c66..c7183e95bbc85 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -34,11 +34,11 @@ impl SlashCommand for SearchSlashCommand { } fn description(&self) -> String { - "semantic search".into() + "Search your project semantically".into() } fn menu_text(&self) -> String { - "Semantic Search".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index 1cf8536c0dbfe..887b57ba9956c 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -17,11 +17,11 @@ impl SlashCommand for OutlineSlashCommand { } fn description(&self) -> String { - "insert symbols for active tab".into() + "Insert symbols for active tab".into() } fn menu_text(&self) -> String { - "Insert Symbols for Active Tab".into() + self.description() } fn complete_argument( diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index bdf8450d43be8..0bff4730d8e5c 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -24,11 +24,11 @@ impl SlashCommand for TabSlashCommand { } fn description(&self) -> String { - "insert open tabs (active tab by default)".to_owned() + "Insert open tabs (active tab by default)".to_owned() } fn menu_text(&self) -> String { - "Insert Open Tabs".to_owned() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 1d0293c235d44..1d4959fb19957 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -29,11 +29,11 @@ impl SlashCommand for TerminalSlashCommand { } fn description(&self) -> String { - "insert terminal output".into() + "Insert terminal output".into() } fn menu_text(&self) -> String { - "Insert Terminal Output".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index c66dd9bebff14..071b4feaf436e 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -29,11 +29,11 @@ impl SlashCommand for WorkflowSlashCommand { } fn description(&self) -> String { - "insert a prompt that opts into the edit workflow".into() + "Insert prompt to opt into the edit workflow".into() } fn menu_text(&self) -> String { - "Insert Workflow Prompt".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command_picker.rs b/crates/assistant/src/slash_command_picker.rs index 4b57dcfb3306c..58023848b0e50 100644 --- a/crates/assistant/src/slash_command_picker.rs +++ b/crates/assistant/src/slash_command_picker.rs @@ -184,7 +184,7 @@ impl PickerDelegate for SlashCommandDelegate { h_flex() .group(format!("command-entry-label-{ix}")) .w_full() - .min_w(px(220.)) + .min_w(px(250.)) .child( v_flex() .child( @@ -203,7 +203,9 @@ impl PickerDelegate for SlashCommandDelegate { div() .font_buffer(cx) .child( - Label::new(args).size(LabelSize::Small), + Label::new(args) + .size(LabelSize::Small) + .color(Color::Muted), ) .visible_on_hover(format!( "command-entry-label-{ix}" From 21336eb12491e3b505437c8d3fa99e7aaf64460f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 10:10:53 -0400 Subject: [PATCH 180/228] docs: Add note about forking the extensions repo to a personal GitHub account (#18631) This PR adds a note to the docs encouraging folks to fork the `zed-industries/extensions` repo to a personal GitHub account rather than a GitHub organization, as this makes life easier for everyone. Release Notes: - N/A --- docs/src/extensions/developing-extensions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index c1330a4c49eef..503d253fc33bb 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -86,6 +86,8 @@ If you already have a published extension with the same name installed, your dev To publish an extension, open a PR to [the `zed-industries/extensions` repo](https://github.com/zed-industries/extensions). +> Note: It is very helpful if you fork the `zed-industries/extensions` repo to a personal GitHub account instead of a GitHub organization, as this allows Zed staff to push any needed changes to your PR to expedite the publishing process. + In your PR, do the following: 1. Add your extension as a Git submodule within the `extensions/` directory From cfd61f933773ff152d3520cba84f6ac69453c7bf Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 10:38:23 -0400 Subject: [PATCH 181/228] Clean up formatting in `Cargo.toml` (#18632) This PR cleans up some formatting in some `Cargo.toml` files. Release Notes: - N/A --- crates/languages/Cargo.toml | 35 +++++++++++++++++------------------ crates/util/Cargo.toml | 8 ++++---- crates/worktree/Cargo.toml | 8 ++++---- crates/zed/Cargo.toml | 2 +- 4 files changed, 26 insertions(+), 27 deletions(-) diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 5cb5455dd168d..19842efac20cf 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -46,6 +46,7 @@ lsp.workspace = true node_runtime.workspace = true paths.workspace = true project.workspace = true +protols-tree-sitter-proto = { workspace = true, optional = true } regex.workspace = true rope.workspace = true rust-embed.workspace = true @@ -55,26 +56,24 @@ settings.workspace = true smol.workspace = true task.workspace = true toml.workspace = true +tree-sitter = { workspace = true, optional = true } +tree-sitter-bash = { workspace = true, optional = true } +tree-sitter-c = { workspace = true, optional = true } +tree-sitter-cpp = { workspace = true, optional = true } +tree-sitter-css = { workspace = true, optional = true } +tree-sitter-go = { workspace = true, optional = true } +tree-sitter-go-mod = { workspace = true, optional = true } +tree-sitter-gowork = { workspace = true, optional = true } +tree-sitter-jsdoc = { workspace = true, optional = true } +tree-sitter-json = { workspace = true, optional = true } +tree-sitter-md = { workspace = true, optional = true } +tree-sitter-python = { workspace = true, optional = true } +tree-sitter-regex = { workspace = true, optional = true } +tree-sitter-rust = { workspace = true, optional = true } +tree-sitter-typescript = { workspace = true, optional = true } +tree-sitter-yaml = { workspace = true, optional = true } util.workspace = true -tree-sitter-bash = {workspace = true, optional = true} -tree-sitter-c = {workspace = true, optional = true} -tree-sitter-cpp = {workspace = true, optional = true} -tree-sitter-css = {workspace = true, optional = true} -tree-sitter-go = {workspace = true, optional = true} -tree-sitter-go-mod = {workspace = true, optional = true} -tree-sitter-gowork = {workspace = true, optional = true} -tree-sitter-jsdoc = {workspace = true, optional = true} -tree-sitter-json = {workspace = true, optional = true} -tree-sitter-md = {workspace = true, optional = true} -protols-tree-sitter-proto = {workspace = true, optional = true} -tree-sitter-python = {workspace = true, optional = true} -tree-sitter-regex = {workspace = true, optional = true} -tree-sitter-rust = {workspace = true, optional = true} -tree-sitter-typescript = {workspace = true, optional = true} -tree-sitter-yaml = {workspace = true, optional = true} -tree-sitter = {workspace = true, optional = true} - [dev-dependencies] text.workspace = true theme = { workspace = true, features = ["test-support"] } diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 6257ffd64269f..58c4686bf9052 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -17,19 +17,19 @@ test-support = ["tempfile", "git2", "rand"] [dependencies] anyhow.workspace = true +async-fs.workspace = true collections.workspace = true dirs.workspace = true +futures-lite.workspace = true futures.workspace = true git2 = { workspace = true, optional = true } globset.workspace = true log.workspace = true -rand = {workspace = true, optional = true} +rand = { workspace = true, optional = true } regex.workspace = true rust-embed.workspace = true serde.workspace = true serde_json.workspace = true -async-fs.workspace = true -futures-lite.workspace = true take-until = "0.2.0" tempfile = { workspace = true, optional = true } unicase.workspace = true @@ -39,5 +39,5 @@ tendril = "0.4.3" [dev-dependencies] git2.workspace = true -tempfile.workspace = true rand.workspace = true +tempfile.workspace = true diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 1186c988adb17..41221d7b6e818 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -48,12 +48,12 @@ text.workspace = true util.workspace = true [dev-dependencies] -clock = {workspace = true, features = ["test-support"]} +clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } env_logger.workspace = true git2.workspace = true -gpui = {workspace = true, features = ["test-support"]} +gpui = { workspace = true, features = ["test-support"] } http_client.workspace = true -rand.workspace = true -settings = {workspace = true, features = ["test-support"]} pretty_assertions.workspace = true +rand.workspace = true +settings = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 775a59e475cb4..04e2be7ed0266 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -63,7 +63,7 @@ language.workspace = true language_model.workspace = true language_selector.workspace = true language_tools.workspace = true -languages = {workspace = true, features = ["load-grammars"] } +languages = { workspace = true, features = ["load-grammars"] } libc.workspace = true log.workspace = true markdown_preview.workspace = true From 028d7a624f24a143858b316bee7edc69911918ee Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Wed, 2 Oct 2024 11:03:57 -0400 Subject: [PATCH 182/228] v0.157.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c92ef0f5257c..b69e4541cc9e2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14398,7 +14398,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.156.0" +version = "0.157.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 04e2be7ed0266..e22f75f5bb3e5 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.156.0" +version = "0.157.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 2cd12f84def5bf4a05ca4e19138d33e4e1d6073c Mon Sep 17 00:00:00 2001 From: Victor Roetman Date: Wed, 2 Oct 2024 12:18:41 -0400 Subject: [PATCH 183/228] docs: Add FIPS mode error to Linux troubleshooting (#18407) - Closes: #18335 Update linux.md with a workaround for the ``` crypto/fips/fips.c:154: OpenSSL internal error: FATAL FIPS SELFTEST FAILURE ``` error when using bundled libssl and libcrypto. Co-authored-by: Peter Tripp --- docs/src/linux.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/src/linux.md b/docs/src/linux.md index 33d12d0a8ca02..4abd7de8ba8eb 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -144,3 +144,20 @@ If you are seeing "too many open files" then first try `sysctl fs.inotify`. - You should see that `max_user_watches` is 8000 or higher (you can change the limit with `sudo sysctl fs.inotify.max_user_watches=64000`). Zed needs one watch per directory in all your open projects + one per git repository + a handful more for settings, themes, keymaps, extensions. It is also possible that you are running out of file descriptors. You can check the limits with `ulimit` and update them by editing `/etc/security/limits.conf`. + +### FIPS Mode OpenSSL internal error {#fips} + +If your machine is running in FIPS mode (`cat /proc/sys/crypto/fips_enabled` is set to `1`) Zed may fail to start and output the following when launched with `zed --foreground`: + +``` +crypto/fips/fips.c:154: OpenSSL internal error: FATAL FIPS SELFTEST FAILURE +``` + +As a workaround, remove the bundled `libssl` and `libcrypto` libraries from the `zed.app/lib` directory: + +``` +rm ~/.local/zed.app/lib/libssl.so.1.1 +rm ~/.local/zed.app/lib/libcrypto.so.1.1 +``` + +This will force zed to fallback to the system `libssl` and `libcrypto` libraries. From 167af4bc1d3acf3bc2b7d624983ce52602d9bc08 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 12:33:13 -0400 Subject: [PATCH 184/228] Use `const` over `static` for string literals (#18635) I noticed a few places where we were storing `&'static str`s in `static`s instead of `const`s. This PR updates them to use `const`. Release Notes: - N/A --- crates/client/src/client.rs | 2 +- crates/collab/src/api/events.rs | 2 +- crates/editor/src/clangd_ext.rs | 2 +- crates/editor/src/rust_analyzer_ext.rs | 2 +- crates/gpui/src/taffy.rs | 2 +- crates/task/src/vscode_format.rs | 4 ++-- crates/worktree/src/worktree.rs | 2 +- crates/zed/src/reliability.rs | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 48bd646d8aa70..d565d620c3c20 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1752,7 +1752,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { } /// prefix for the zed:// url scheme -pub static ZED_URL_SCHEME: &str = "zed"; +pub const ZED_URL_SCHEME: &str = "zed"; /// Parses the given link into a Zed link. /// diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 377741f434c2f..bbfa69c0b8f70 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -23,7 +23,7 @@ use telemetry_events::{ }; use uuid::Uuid; -static CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; +const CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; pub fn router() -> Router { Router::new() diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 2f0f7aaee47e7..501f81b1073df 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -9,7 +9,7 @@ use crate::lsp_ext::find_specific_language_server_in_selection; use crate::{element::register_action, Editor, SwitchSourceHeader}; -static CLANGD_SERVER_NAME: &str = "clangd"; +const CLANGD_SERVER_NAME: &str = "clangd"; fn is_c_language(language: &Language) -> bool { return language.name() == "C++".into() || language.name() == "C".into(); diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index db17eaab282fa..fa39e5c9d49ce 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -10,7 +10,7 @@ use crate::{ ExpandMacroRecursively, }; -static RUST_ANALYZER_NAME: &str = "rust-analyzer"; +const RUST_ANALYZER_NAME: &str = "rust-analyzer"; fn is_rust_language(language: &Language) -> bool { language.name() == "Rust".into() diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index a6a61031c9ce1..a80c734b81bbf 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -24,7 +24,7 @@ pub struct TaffyLayoutEngine { nodes_to_measure: FxHashMap, } -static EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by construction if possible"; +const EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by construction if possible"; impl TaffyLayoutEngine { pub fn new() -> Self { diff --git a/crates/task/src/vscode_format.rs b/crates/task/src/vscode_format.rs index 74be56b5b1769..c150ee807fb25 100644 --- a/crates/task/src/vscode_format.rs +++ b/crates/task/src/vscode_format.rs @@ -200,7 +200,7 @@ mod tests { #[test] fn can_deserialize_ts_tasks() { - static TYPESCRIPT_TASKS: &str = include_str!("../test_data/typescript.json"); + const TYPESCRIPT_TASKS: &str = include_str!("../test_data/typescript.json"); let vscode_definitions: VsCodeTaskFile = serde_json_lenient::from_str(TYPESCRIPT_TASKS).unwrap(); @@ -290,7 +290,7 @@ mod tests { #[test] fn can_deserialize_rust_analyzer_tasks() { - static RUST_ANALYZER_TASKS: &str = include_str!("../test_data/rust-analyzer.json"); + const RUST_ANALYZER_TASKS: &str = include_str!("../test_data/rust-analyzer.json"); let vscode_definitions: VsCodeTaskFile = serde_json_lenient::from_str(RUST_ANALYZER_TASKS).unwrap(); let expected = vec![ diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index a3daf8ea2eade..40cd465d9ba7d 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -351,7 +351,7 @@ pub enum Event { DeletedEntry(ProjectEntryId), } -static EMPTY_PATH: &str = ""; +const EMPTY_PATH: &str = ""; impl EventEmitter for Worktree {} diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 50e5a05b823ed..b40bbc78bdb47 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -441,7 +441,7 @@ async fn upload_previous_panics( Ok::<_, anyhow::Error>(most_recent_panic) } -static LAST_CRASH_UPLOADED: &str = "LAST_CRASH_UPLOADED"; +const LAST_CRASH_UPLOADED: &str = "LAST_CRASH_UPLOADED"; /// upload crashes from apple's diagnostic reports to our server. /// (only if telemetry is enabled) From 845991c0e59a34e2d98300237956ede553c44289 Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Thu, 3 Oct 2024 01:35:35 +0900 Subject: [PATCH 185/228] docs: Add missing UI font settings to "Configuring Zed" (#18267) - Add missing `ui_font` options in `configuring-zed.md` Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- docs/src/configuring-zed.md | 68 +++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index fbd5fa53cfbd8..230255597e35c 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2180,6 +2180,64 @@ Float values between `0.0` and `0.9`, where: } ``` +## UI Font Family + +- Description: The name of the font to use for text in the UI. +- Setting: `ui_font_family` +- Default: `Zed Plex Sans` + +**Options** + +The name of any font family installed on the system. + +## UI Font Features + +- Description: The OpenType features to enable for text in the UI. +- Setting: `ui_font_features` +- Default: `null` +- Platform: macOS and Windows. + +**Options** + +Zed supports all OpenType features that can be enabled or disabled for a given UI font, as well as setting values for font features. + +For example, to disable font ligatures, add the following to your settings: + +```json +{ + "ui_font_features": { + "calt": false + } +} +``` + +You can also set other OpenType features, like setting `cv01` to `7`: + +```json +{ + "ui_font_features": { + "cv01": 7 + } +} +``` + +## UI Font Fallbacks + +- Description: The font fallbacks to use for text in the UI. +- Setting: `ui_font_fallbacks` +- Default: `null` +- Platform: macOS and Windows. + +**Options** + +For example, to use `Nerd Font` as a fallback, add the following to your settings: + +```json +{ + "ui_font_fallbacks": ["Nerd Font"] +} +``` + ## UI Font Size - Description: The default font size for text in the UI. @@ -2190,6 +2248,16 @@ Float values between `0.0` and `0.9`, where: `integer` values from `6` to `100` pixels (inclusive) +## UI Font Weight + +- Description: The default font weight for text in the UI. +- Setting: `ui_font_weight` +- Default: `400` + +**Options** + +`integer` values between `100` and `900` + ## An example configuration: ```json From 5aaaed52fc46fdc3029133fac4f96a7652681ea9 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:57:03 +0200 Subject: [PATCH 186/228] Adjust spacing and sizing of buffer search bar icon buttons (#18638) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR mostly makes all of the search bar icon buttons all squared and adjusts the spacing between them, as well as the additional input that appears when you toggle the "Replace all" action. Screenshot 2024-10-02 at 6 08 30 PM --- Release Notes: - N/A --- crates/search/src/buffer_search.rs | 133 +++++++++++++++------------- crates/search/src/project_search.rs | 2 +- crates/search/src/search.rs | 3 +- crates/search/src/search_bar.rs | 3 +- 4 files changed, 76 insertions(+), 65 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 42b267c3c9563..5846a6efc51dd 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -27,7 +27,7 @@ use settings::Settings; use std::sync::Arc; use theme::ThemeSettings; -use ui::{h_flex, prelude::*, IconButton, IconName, Tooltip, BASE_REM_SIZE_IN_PX}; +use ui::{h_flex, prelude::*, IconButton, IconButtonShape, IconName, Tooltip, BASE_REM_SIZE_IN_PX}; use util::ResultExt; use workspace::{ item::ItemHandle, @@ -200,7 +200,7 @@ impl Render for BufferSearchBar { }; let search_line = h_flex() - .mb_1() + .gap_2() .child( h_flex() .id("editor-scroll") @@ -208,7 +208,6 @@ impl Render for BufferSearchBar { .flex_1() .h_8() .px_2() - .mr_2() .py_1() .border_1() .border_color(editor_border) @@ -244,66 +243,70 @@ impl Render for BufferSearchBar { })) }), ) - .when(supported_options.replacement, |this| { - this.child( - IconButton::new("buffer-search-bar-toggle-replace-button", IconName::Replace) - .style(ButtonStyle::Subtle) - .when(self.replace_enabled, |button| { - button.style(ButtonStyle::Filled) - }) - .on_click(cx.listener(|this, _: &ClickEvent, cx| { - this.toggle_replace(&ToggleReplace, cx); - })) - .selected(self.replace_enabled) - .size(ButtonSize::Compact) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |cx| { - Tooltip::for_action_in( - "Toggle replace", - &ToggleReplace, - &focus_handle, - cx, - ) - } - }), - ) - }) - .when(supported_options.selection, |this| { - this.child( - IconButton::new( - "buffer-search-bar-toggle-search-selection-button", - IconName::SearchSelection, - ) - .style(ButtonStyle::Subtle) - .when(self.selection_search_enabled, |button| { - button.style(ButtonStyle::Filled) - }) - .on_click(cx.listener(|this, _: &ClickEvent, cx| { - this.toggle_selection(&ToggleSelection, cx); - })) - .selected(self.selection_search_enabled) - .size(ButtonSize::Compact) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |cx| { - Tooltip::for_action_in( - "Toggle Search Selection", - &ToggleSelection, - &focus_handle, - cx, - ) - } - }), - ) - }) .child( h_flex() .flex_none() + .gap_0p5() + .when(supported_options.replacement, |this| { + this.child( + IconButton::new( + "buffer-search-bar-toggle-replace-button", + IconName::Replace, + ) + .style(ButtonStyle::Subtle) + .shape(IconButtonShape::Square) + .when(self.replace_enabled, |button| { + button.style(ButtonStyle::Filled) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.toggle_replace(&ToggleReplace, cx); + })) + .selected(self.replace_enabled) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle Replace", + &ToggleReplace, + &focus_handle, + cx, + ) + } + }), + ) + }) + .when(supported_options.selection, |this| { + this.child( + IconButton::new( + "buffer-search-bar-toggle-search-selection-button", + IconName::SearchSelection, + ) + .style(ButtonStyle::Subtle) + .shape(IconButtonShape::Square) + .when(self.selection_search_enabled, |button| { + button.style(ButtonStyle::Filled) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.toggle_selection(&ToggleSelection, cx); + })) + .selected(self.selection_search_enabled) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle Search Selection", + &ToggleSelection, + &focus_handle, + cx, + ) + } + }), + ) + }) .child( IconButton::new("select-all", ui::IconName::SelectAll) .on_click(|_, cx| cx.dispatch_action(SelectAllMatches.boxed_clone())) - .size(ButtonSize::Compact) + .shape(IconButtonShape::Square) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -332,11 +335,13 @@ impl Render for BufferSearchBar { )) .when(!narrow_mode, |this| { this.child(h_flex().ml_2().min_w(rems_from_px(40.)).child( - Label::new(match_text).color(if self.active_match_index.is_some() { - Color::Default - } else { - Color::Disabled - }), + Label::new(match_text).size(LabelSize::Small).color( + if self.active_match_index.is_some() { + Color::Default + } else { + Color::Disabled + }, + ), )) }), ); @@ -367,8 +372,10 @@ impl Render for BufferSearchBar { .child( h_flex() .flex_none() + .gap_0p5() .child( IconButton::new("search-replace-next", ui::IconName::ReplaceNext) + .shape(IconButtonShape::Square) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -386,6 +393,7 @@ impl Render for BufferSearchBar { ) .child( IconButton::new("search-replace-all", ui::IconName::ReplaceAll) + .shape(IconButtonShape::Square) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -441,6 +449,7 @@ impl Render for BufferSearchBar { .when(!narrow_mode, |div| { div.child( IconButton::new(SharedString::from("Close"), IconName::Close) + .shape(IconButtonShape::Square) .tooltip(move |cx| { Tooltip::for_action("Close Search Bar", &Dismiss, cx) }) diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 12e6ccc12dc49..693d4b265867f 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1634,7 +1634,7 @@ impl Render for ProjectSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Toggle replace", + "Toggle Replace", &ToggleReplace, &focus_handle, cx, diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index 0ceb8e710b5f4..60ff80834feed 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -5,7 +5,7 @@ use gpui::{actions, Action, AppContext, FocusHandle, IntoElement}; use project::search::SearchQuery; pub use project_search::ProjectSearchView; use ui::{prelude::*, Tooltip}; -use ui::{ButtonStyle, IconButton}; +use ui::{ButtonStyle, IconButton, IconButtonShape}; use workspace::notifications::NotificationId; use workspace::{Toast, Workspace}; @@ -112,6 +112,7 @@ impl SearchOptions { IconButton::new(self.label(), self.icon()) .on_click(action) .style(ButtonStyle::Subtle) + .shape(IconButtonShape::Square) .selected(active) .tooltip({ let action = self.to_toggle_action(); diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 102f04c4b95c2..080679663d0ba 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -1,6 +1,6 @@ use gpui::{Action, FocusHandle, IntoElement}; -use ui::IconButton; use ui::{prelude::*, Tooltip}; +use ui::{IconButton, IconButtonShape}; pub(super) fn render_nav_button( icon: ui::IconName, @@ -13,6 +13,7 @@ pub(super) fn render_nav_button( SharedString::from(format!("search-nav-button-{}", action.name())), icon, ) + .shape(IconButtonShape::Square) .on_click(|_, cx| cx.dispatch_action(action.boxed_clone())) .tooltip(move |cx| Tooltip::for_action_in(tooltip, action, &focus_handle, cx)) .disabled(!active) From a5f50e5c1e7fc982fad3bc700e55aee3243791f1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:57:20 +0200 Subject: [PATCH 187/228] Tweak warning diagnostic toggle (#18637) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds color to the warning diagnostic toggle, so that, if it's turned on, the warning icon is yellow. And, in the opposite case, it's muted. | Turned on | Turned off | |--------|--------| | Screenshot 2024-10-02 at 6 08 30 PM | Screenshot 2024-10-02 at 6 08 36 PM | --- Release Notes: - N/A --- crates/diagnostics/src/toolbar_controls.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index b546db50a064b..0d3000814262a 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -1,7 +1,7 @@ use crate::ProjectDiagnosticsEditor; use gpui::{EventEmitter, ParentElement, Render, View, ViewContext, WeakView}; use ui::prelude::*; -use ui::{IconButton, IconName, Tooltip}; +use ui::{IconButton, IconButtonShape, IconName, Tooltip}; use workspace::{item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView}; pub struct ToolbarControls { @@ -33,11 +33,19 @@ impl Render for ToolbarControls { "Include Warnings" }; + let warning_color = if include_warnings { + Color::Warning + } else { + Color::Muted + }; + h_flex() + .gap_1() .when(has_stale_excerpts, |div| { div.child( IconButton::new("update-excerpts", IconName::Update) .icon_color(Color::Info) + .shape(IconButtonShape::Square) .disabled(is_updating) .tooltip(move |cx| Tooltip::text("Update excerpts", cx)) .on_click(cx.listener(|this, _, cx| { @@ -51,6 +59,8 @@ impl Render for ToolbarControls { }) .child( IconButton::new("toggle-warnings", IconName::Warning) + .icon_color(warning_color) + .shape(IconButtonShape::Square) .tooltip(move |cx| Tooltip::text(tooltip, cx)) .on_click(cx.listener(|this, _, cx| { if let Some(editor) = this.editor() { From 209ebb0c65bc0ba56e4e0bad1a7b7e475414082b Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 2 Oct 2024 10:44:16 -0700 Subject: [PATCH 188/228] Revert "Fix blurry cursor on Wayland at a scale other than 100%" (#18642) Closes #17771 Reverts zed-industries/zed#17496 This PR turns out to need more work than I thought when I merged it. Release Notes: - Linux: Fix a bug where the cursor would be the wrong size on Wayland --- .../gpui/src/platform/linux/wayland/client.rs | 3 +- .../gpui/src/platform/linux/wayland/cursor.rs | 33 ++++--------------- 2 files changed, 8 insertions(+), 28 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index f0015a7e5820b..ab87bb20242ed 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -477,8 +477,7 @@ impl WaylandClient { .as_ref() .map(|primary_selection_manager| primary_selection_manager.get_device(&seat, &qh, ())); - // FIXME: Determine the scaling factor dynamically by the compositor - let mut cursor = Cursor::new(&conn, &globals, 24, 2); + let mut cursor = Cursor::new(&conn, &globals, 24); handle .insert_source(XDPEventSource::new(&common.background_executor), { diff --git a/crates/gpui/src/platform/linux/wayland/cursor.rs b/crates/gpui/src/platform/linux/wayland/cursor.rs index ea29eee73c71f..6a527650429a4 100644 --- a/crates/gpui/src/platform/linux/wayland/cursor.rs +++ b/crates/gpui/src/platform/linux/wayland/cursor.rs @@ -11,7 +11,6 @@ pub(crate) struct Cursor { theme_name: Option, surface: WlSurface, size: u32, - scale: u32, shm: WlShm, connection: Connection, } @@ -24,7 +23,7 @@ impl Drop for Cursor { } impl Cursor { - pub fn new(connection: &Connection, globals: &Globals, size: u32, scale: u32) -> Self { + pub fn new(connection: &Connection, globals: &Globals, size: u32) -> Self { Self { theme: CursorTheme::load(&connection, globals.shm.clone(), size).log_err(), theme_name: None, @@ -32,7 +31,6 @@ impl Cursor { shm: globals.shm.clone(), connection: connection.clone(), size, - scale, } } @@ -40,18 +38,14 @@ impl Cursor { if let Some(size) = size { self.size = size; } - if let Some(theme) = CursorTheme::load_from_name( - &self.connection, - self.shm.clone(), - theme_name, - self.size * self.scale, - ) - .log_err() + if let Some(theme) = + CursorTheme::load_from_name(&self.connection, self.shm.clone(), theme_name, self.size) + .log_err() { self.theme = Some(theme); self.theme_name = Some(theme_name.to_string()); } else if let Some(theme) = - CursorTheme::load(&self.connection, self.shm.clone(), self.size * self.scale).log_err() + CursorTheme::load(&self.connection, self.shm.clone(), self.size).log_err() { self.theme = Some(theme); self.theme_name = None; @@ -97,22 +91,9 @@ impl Cursor { let (width, height) = buffer.dimensions(); let (hot_x, hot_y) = buffer.hotspot(); - let scaled_width = width / self.scale; - let scaled_height = height / self.scale; - let scaled_hot_x = hot_x / self.scale; - let scaled_hot_y = hot_y / self.scale; - - self.surface.set_buffer_scale(self.scale as i32); - - wl_pointer.set_cursor( - serial_id, - Some(&self.surface), - scaled_hot_x as i32, - scaled_hot_y as i32, - ); + wl_pointer.set_cursor(serial_id, Some(&self.surface), hot_x as i32, hot_y as i32); self.surface.attach(Some(&buffer), 0, 0); - self.surface - .damage(0, 0, scaled_width as i32, scaled_height as i32); + self.surface.damage(0, 0, width as i32, height as i32); self.surface.commit(); } } else { From 0e8276560f2e9de2dd1783ef8d9e208d01dada44 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 14:10:19 -0400 Subject: [PATCH 189/228] language: Update buffer doc comments (#18646) This PR updates the doc comments in `buffer.rs` to use the standard style for linking to other items. Release Notes: - N/A --- crates/language/src/buffer.rs | 45 ++++++++++++++++++----------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 8afc4d389db7f..20ecd9594b9d4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -73,7 +73,7 @@ pub use lsp::DiagnosticSeverity; /// a diff against the contents of its file. pub static BUFFER_DIFF_TASK: LazyLock = LazyLock::new(TaskLabel::new); -/// Indicate whether a [Buffer] has permissions to edit. +/// Indicate whether a [`Buffer`] has permissions to edit. #[derive(PartialEq, Clone, Copy, Debug)] pub enum Capability { /// The buffer is a mutable replica. @@ -211,7 +211,7 @@ pub struct Diagnostic { /// /// When a language server produces a diagnostic with /// one or more associated diagnostics, those diagnostics are all - /// assigned a single group id. + /// assigned a single group ID. pub group_id: usize, /// Whether this diagnostic is the primary diagnostic for its group. /// @@ -718,7 +718,7 @@ impl Buffer { self } - /// Returns the [Capability] of this buffer. + /// Returns the [`Capability`] of this buffer. pub fn capability(&self) -> Capability { self.capability } @@ -728,7 +728,7 @@ impl Buffer { self.capability == Capability::ReadOnly } - /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability]. + /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`]. pub fn build( buffer: TextBuffer, diff_base: Option, @@ -941,7 +941,7 @@ impl Buffer { self.syntax_map.lock().language_registry() } - /// Assign the buffer a new [Capability]. + /// Assign the buffer a new [`Capability`]. pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext) { self.capability = capability; cx.emit(BufferEvent::CapabilityChanged) @@ -1032,7 +1032,7 @@ impl Buffer { cx.notify(); } - /// Updates the [File] backing this buffer. This should be called when + /// Updates the [`File`] backing this buffer. This should be called when /// the file has changed or has been deleted. pub fn file_updated(&mut self, new_file: Arc, cx: &mut ModelContext) { let mut file_changed = false; @@ -1071,7 +1071,7 @@ impl Buffer { } } - /// Returns the current diff base, see [Buffer::set_diff_base]. + /// Returns the current diff base, see [`Buffer::set_diff_base`]. pub fn diff_base(&self) -> Option<&Rope> { match self.diff_base.as_ref()? { BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => { @@ -1142,12 +1142,12 @@ impl Buffer { })) } - /// Returns the primary [Language] assigned to this [Buffer]. + /// Returns the primary [`Language`] assigned to this [`Buffer`]. pub fn language(&self) -> Option<&Arc> { self.language.as_ref() } - /// Returns the [Language] at the given location. + /// Returns the [`Language`] at the given location. pub fn language_at(&self, position: D) -> Option> { let offset = position.to_offset(self); self.syntax_map @@ -2730,6 +2730,7 @@ impl BufferSnapshot { .collect(); (captures, highlight_maps) } + /// Iterates over chunks of text in the given range of the buffer. Text is chunked /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also /// returned in chunks where each chunk has a single syntax highlighting style and @@ -2781,12 +2782,12 @@ impl BufferSnapshot { .last() } - /// Returns the main [Language] + /// Returns the main [`Language`]. pub fn language(&self) -> Option<&Arc> { self.language.as_ref() } - /// Returns the [Language] at the given location. + /// Returns the [`Language`] at the given location. pub fn language_at(&self, position: D) -> Option<&Arc> { self.syntax_layer_at(position) .map(|info| info.language) @@ -2806,7 +2807,7 @@ impl BufferSnapshot { CharClassifier::new(self.language_scope_at(point)) } - /// Returns the [LanguageScope] at the given location. + /// Returns the [`LanguageScope`] at the given location. pub fn language_scope_at(&self, position: D) -> Option { let offset = position.to_offset(self); let mut scope = None; @@ -2961,7 +2962,7 @@ impl BufferSnapshot { /// Returns the outline for the buffer. /// - /// This method allows passing an optional [SyntaxTheme] to + /// This method allows passing an optional [`SyntaxTheme`] to /// syntax-highlight the returned symbols. pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option> { self.outline_items_containing(0..self.len(), true, theme) @@ -2970,7 +2971,7 @@ impl BufferSnapshot { /// Returns all the symbols that contain the given position. /// - /// This method allows passing an optional [SyntaxTheme] to + /// This method allows passing an optional [`SyntaxTheme`] to /// syntax-highlight the returned symbols. pub fn symbols_containing( &self, @@ -3213,7 +3214,7 @@ impl BufferSnapshot { } /// For each grammar in the language, runs the provided - /// [tree_sitter::Query] against the given range. + /// [`tree_sitter::Query`] against the given range. pub fn matches( &self, range: Range, @@ -3774,7 +3775,7 @@ impl BufferSnapshot { }) } - /// Whether the buffer contains any git changes. + /// Whether the buffer contains any Git changes. pub fn has_git_diff(&self) -> bool { !self.git_diff.is_empty() } @@ -3856,7 +3857,7 @@ impl BufferSnapshot { } /// Returns all the diagnostic groups associated with the given - /// language server id. If no language server id is provided, + /// language server ID. If no language server ID is provided, /// all diagnostics groups are returned. pub fn diagnostic_groups( &self, @@ -4239,7 +4240,7 @@ impl Default for Diagnostic { } impl IndentSize { - /// Returns an [IndentSize] representing the given spaces. + /// Returns an [`IndentSize`] representing the given spaces. pub fn spaces(len: u32) -> Self { Self { len, @@ -4247,7 +4248,7 @@ impl IndentSize { } } - /// Returns an [IndentSize] representing a tab. + /// Returns an [`IndentSize`] representing a tab. pub fn tab() -> Self { Self { len: 1, @@ -4255,12 +4256,12 @@ impl IndentSize { } } - /// An iterator over the characters represented by this [IndentSize]. + /// An iterator over the characters represented by this [`IndentSize`]. pub fn chars(&self) -> impl Iterator { iter::repeat(self.char()).take(self.len as usize) } - /// The character representation of this [IndentSize]. + /// The character representation of this [`IndentSize`]. pub fn char(&self) -> char { match self.kind { IndentKind::Space => ' ', @@ -4268,7 +4269,7 @@ impl IndentSize { } } - /// Consumes the current [IndentSize] and returns a new one that has + /// Consumes the current [`IndentSize`] and returns a new one that has /// been shrunk or enlarged by the given size along the given direction. pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self { match direction { From 7c4615519befe8c35b25e22620d45b07b4b9c401 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 14:23:59 -0400 Subject: [PATCH 190/228] editor: Ensure proposed changes editor is syntax-highlighted when opened (#18648) This PR fixes an issue where the proposed changes editor would not have any syntax highlighting until a modification was made. When creating the branch buffer we reparse the buffer to rebuild the syntax map. Release Notes: - N/A --- crates/language/src/buffer.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 20ecd9594b9d4..1f4c56ecc86ff 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -819,6 +819,9 @@ impl Buffer { branch.set_language_registry(language_registry); } + // Reparse the branch buffer so that we get syntax highlighting immediately. + branch.reparse(cx); + branch }) } From 778dedec6c07bca9803c2d6b84ea7c3be7f6fe7e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 2 Oct 2024 22:00:40 +0300 Subject: [PATCH 191/228] Prepare to sync other kinds of settings (#18616) This PR does not change how things work for settings, but lays the ground work for the future functionality. After this change, Zed is prepared to sync more than just `settings.json` files from local worktree and user config. * ssh tasks Part of this work is to streamline the task sync mechanism. Instead of having an extra set of requests to fetch the task contents from the server (as remote-via-collab does now and does not cover all sync cases), we want to reuse the existing mechanism for synchronizing user and local settings. * editorconfig Part of the task is to sync .editorconfig file changes to everyone which involves sending and storing those configs. Both ssh (and remove-over-collab) .zed/tasks.json and .editorconfig files behave similar to .zed/settings.json local files: they belong to a certain path in a certain worktree; may update over time, changing Zed's functionality; can be merged hierarchically. Settings sync follows the same "config file changed -> send to watchers -> parse and merge locally and on watchers" path that's needed for both new kinds of files, ergo the messaging layer is extended to send more types of settings for future watch & parse and merge impls to follow. Release Notes: - N/A --- .../20221109000000_test_schema.sql | 1 + ...20241002120231_add_local_settings_kind.sql | 1 + crates/collab/src/db.rs | 20 +++ crates/collab/src/db/queries/projects.rs | 9 ++ crates/collab/src/db/queries/rooms.rs | 1 + .../src/db/tables/worktree_settings_file.rs | 16 ++ crates/collab/src/rpc.rs | 2 + crates/collab/src/tests/integration_tests.rs | 44 +++++- crates/project/src/project_settings.rs | 46 +++++- crates/proto/proto/zed.proto | 13 ++ crates/settings/src/settings.rs | 3 +- crates/settings/src/settings_store.rs | 140 +++++++++++------- 12 files changed, 222 insertions(+), 74 deletions(-) create mode 100644 crates/collab/migrations/20241002120231_add_local_settings_kind.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 5c2c3961600ac..5764aceea5fc4 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -112,6 +112,7 @@ CREATE TABLE "worktree_settings_files" ( "worktree_id" INTEGER NOT NULL, "path" VARCHAR NOT NULL, "content" TEXT, + "kind" VARCHAR, PRIMARY KEY(project_id, worktree_id, path), FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE ); diff --git a/crates/collab/migrations/20241002120231_add_local_settings_kind.sql b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql new file mode 100644 index 0000000000000..aec4ffb8f8519 --- /dev/null +++ b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql @@ -0,0 +1 @@ +ALTER TABLE "worktree_settings_files" ADD COLUMN "kind" VARCHAR; diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 5c30a85738924..f717566824e7b 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -35,6 +35,7 @@ use std::{ }; use time::PrimitiveDateTime; use tokio::sync::{Mutex, OwnedMutexGuard}; +use worktree_settings_file::LocalSettingsKind; #[cfg(test)] pub use tests::TestDb; @@ -766,6 +767,7 @@ pub struct Worktree { pub struct WorktreeSettingsFile { pub path: String, pub content: String, + pub kind: LocalSettingsKind, } pub struct NewExtensionVersion { @@ -783,3 +785,21 @@ pub struct ExtensionVersionConstraints { pub schema_versions: RangeInclusive, pub wasm_api_versions: RangeInclusive, } + +impl LocalSettingsKind { + pub fn from_proto(proto_kind: proto::LocalSettingsKind) -> Self { + match proto_kind { + proto::LocalSettingsKind::Settings => Self::Settings, + proto::LocalSettingsKind::Tasks => Self::Tasks, + proto::LocalSettingsKind::Editorconfig => Self::Editorconfig, + } + } + + pub fn to_proto(&self) -> proto::LocalSettingsKind { + match self { + Self::Settings => proto::LocalSettingsKind::Settings, + Self::Tasks => proto::LocalSettingsKind::Tasks, + Self::Editorconfig => proto::LocalSettingsKind::Editorconfig, + } + } +} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 8091c6620570f..ceac78203d9a1 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -1,3 +1,4 @@ +use anyhow::Context as _; use util::ResultExt; use super::*; @@ -527,6 +528,12 @@ impl Database { connection: ConnectionId, ) -> Result>> { let project_id = ProjectId::from_proto(update.project_id); + let kind = match update.kind { + Some(kind) => proto::LocalSettingsKind::from_i32(kind) + .with_context(|| format!("unknown worktree settings kind: {kind}"))?, + None => proto::LocalSettingsKind::Settings, + }; + let kind = LocalSettingsKind::from_proto(kind); self.project_transaction(project_id, |tx| async move { // Ensure the update comes from the host. let project = project::Entity::find_by_id(project_id) @@ -543,6 +550,7 @@ impl Database { worktree_id: ActiveValue::Set(update.worktree_id as i64), path: ActiveValue::Set(update.path.clone()), content: ActiveValue::Set(content.clone()), + kind: ActiveValue::Set(kind), }) .on_conflict( OnConflict::columns([ @@ -800,6 +808,7 @@ impl Database { worktree.settings_files.push(WorktreeSettingsFile { path: db_settings_file.path, content: db_settings_file.content, + kind: db_settings_file.kind, }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 635e2d232f087..baba0f2cf9d7c 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -735,6 +735,7 @@ impl Database { worktree.settings_files.push(WorktreeSettingsFile { path: db_settings_file.path, content: db_settings_file.content, + kind: db_settings_file.kind, }); } } diff --git a/crates/collab/src/db/tables/worktree_settings_file.rs b/crates/collab/src/db/tables/worktree_settings_file.rs index 92348c1ec9436..71f7b73fc1c39 100644 --- a/crates/collab/src/db/tables/worktree_settings_file.rs +++ b/crates/collab/src/db/tables/worktree_settings_file.rs @@ -11,9 +11,25 @@ pub struct Model { #[sea_orm(primary_key)] pub path: String, pub content: String, + pub kind: LocalSettingsKind, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} + +#[derive( + Copy, Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Default, Hash, serde::Serialize, +)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +#[serde(rename_all = "snake_case")] +pub enum LocalSettingsKind { + #[default] + #[sea_orm(string_value = "settings")] + Settings, + #[sea_orm(string_value = "tasks")] + Tasks, + #[sea_orm(string_value = "editorconfig")] + Editorconfig, +} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index d9683fb8b366c..5f21df4ab9dbc 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1739,6 +1739,7 @@ fn notify_rejoined_projects( worktree_id: worktree.id, path: settings_file.path, content: Some(settings_file.content), + kind: Some(settings_file.kind.to_proto().into()), }, )?; } @@ -2220,6 +2221,7 @@ fn join_project_internal( worktree_id: worktree.id, path: settings_file.path, content: Some(settings_file.content), + kind: Some(proto::update_user_settings::Kind::Settings.into()), }, )?; } diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 615ad52e2ef36..2859113634417 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -33,7 +33,7 @@ use project::{ }; use rand::prelude::*; use serde_json::json; -use settings::SettingsStore; +use settings::{LocalSettingsKind, SettingsStore}; use std::{ cell::{Cell, RefCell}, env, future, mem, @@ -3327,8 +3327,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("").into(), r#"{"tab_size":2}"#.to_string()), - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ( + Path::new("").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":2}"#.to_string() + ), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), ] ) }); @@ -3346,8 +3354,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("").into(), r#"{}"#.to_string()), - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ( + Path::new("").into(), + LocalSettingsKind::Settings, + r#"{}"#.to_string() + ), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), ] ) }); @@ -3375,8 +3391,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), - (Path::new("b").into(), r#"{"tab_size":4}"#.to_string()), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), + ( + Path::new("b").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":4}"#.to_string() + ), ] ) }); @@ -3406,7 +3430,11 @@ async fn test_local_settings( store .local_settings(worktree_b.read(cx).id()) .collect::>(), - &[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),] + &[( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"hard_tabs":true}"#.to_string() + ),] ) }); } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index d794563672ed0..87150587b3607 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,3 +1,4 @@ +use anyhow::Context; use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; @@ -6,7 +7,7 @@ use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{InvalidSettingsError, Settings, SettingsSources, SettingsStore}; +use settings::{InvalidSettingsError, LocalSettingsKind, Settings, SettingsSources, SettingsStore}; use std::{ path::{Path, PathBuf}, sync::Arc, @@ -266,13 +267,14 @@ impl SettingsObserver { let store = cx.global::(); for worktree in self.worktree_store.read(cx).worktrees() { let worktree_id = worktree.read(cx).id().to_proto(); - for (path, content) in store.local_settings(worktree.read(cx).id()) { + for (path, kind, content) in store.local_settings(worktree.read(cx).id()) { downstream_client .send(proto::UpdateWorktreeSettings { project_id, worktree_id, path: path.to_string_lossy().into(), content: Some(content), + kind: Some(local_settings_kind_to_proto(kind).into()), }) .log_err(); } @@ -288,6 +290,11 @@ impl SettingsObserver { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> anyhow::Result<()> { + let kind = match envelope.payload.kind { + Some(kind) => proto::LocalSettingsKind::from_i32(kind) + .with_context(|| format!("unknown kind {kind}"))?, + None => proto::LocalSettingsKind::Settings, + }; this.update(&mut cx, |this, cx| { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); let Some(worktree) = this @@ -297,10 +304,12 @@ impl SettingsObserver { else { return; }; + this.update_settings( worktree, [( PathBuf::from(&envelope.payload.path).into(), + local_settings_kind_from_proto(kind), envelope.payload.content, )], cx, @@ -327,6 +336,7 @@ impl SettingsObserver { ssh.send(proto::UpdateUserSettings { project_id: 0, content, + kind: Some(proto::LocalSettingsKind::Settings.into()), }) .log_err(); } @@ -342,6 +352,7 @@ impl SettingsObserver { ssh.send(proto::UpdateUserSettings { project_id: 0, content, + kind: Some(proto::LocalSettingsKind::Settings.into()), }) .log_err(); } @@ -397,6 +408,7 @@ impl SettingsObserver { settings_contents.push(async move { ( settings_dir, + LocalSettingsKind::Settings, if removed { None } else { @@ -413,15 +425,15 @@ impl SettingsObserver { let worktree = worktree.clone(); cx.spawn(move |this, cx| async move { - let settings_contents: Vec<(Arc, _)> = + let settings_contents: Vec<(Arc, _, _)> = futures::future::join_all(settings_contents).await; cx.update(|cx| { this.update(cx, |this, cx| { this.update_settings( worktree, - settings_contents - .into_iter() - .map(|(path, content)| (path, content.and_then(|c| c.log_err()))), + settings_contents.into_iter().map(|(path, kind, content)| { + (path, kind, content.and_then(|c| c.log_err())) + }), cx, ) }) @@ -433,17 +445,18 @@ impl SettingsObserver { fn update_settings( &mut self, worktree: Model, - settings_contents: impl IntoIterator, Option)>, + settings_contents: impl IntoIterator, LocalSettingsKind, Option)>, cx: &mut ModelContext, ) { let worktree_id = worktree.read(cx).id(); let remote_worktree_id = worktree.read(cx).id(); let result = cx.update_global::>(|store, cx| { - for (directory, file_content) in settings_contents { + for (directory, kind, file_content) in settings_contents { store.set_local_settings( worktree_id, directory.clone(), + kind, file_content.as_deref(), cx, )?; @@ -455,6 +468,7 @@ impl SettingsObserver { worktree_id: remote_worktree_id.to_proto(), path: directory.to_string_lossy().into_owned(), content: file_content, + kind: Some(local_settings_kind_to_proto(kind).into()), }) .log_err(); } @@ -481,3 +495,19 @@ impl SettingsObserver { } } } + +pub fn local_settings_kind_from_proto(kind: proto::LocalSettingsKind) -> LocalSettingsKind { + match kind { + proto::LocalSettingsKind::Settings => LocalSettingsKind::Settings, + proto::LocalSettingsKind::Tasks => LocalSettingsKind::Tasks, + proto::LocalSettingsKind::Editorconfig => LocalSettingsKind::Editorconfig, + } +} + +pub fn local_settings_kind_to_proto(kind: LocalSettingsKind) -> proto::LocalSettingsKind { + match kind { + LocalSettingsKind::Settings => proto::LocalSettingsKind::Settings, + LocalSettingsKind::Tasks => proto::LocalSettingsKind::Tasks, + LocalSettingsKind::Editorconfig => proto::LocalSettingsKind::Editorconfig, + } +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 07f64557f47e1..f6e9645e9c11a 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -642,6 +642,13 @@ message UpdateWorktreeSettings { uint64 worktree_id = 2; string path = 3; optional string content = 4; + optional LocalSettingsKind kind = 5; +} + +enum LocalSettingsKind { + Settings = 0; + Tasks = 1; + Editorconfig = 2; } message CreateProjectEntry { @@ -2487,6 +2494,12 @@ message AddWorktreeResponse { message UpdateUserSettings { uint64 project_id = 1; string content = 2; + optional Kind kind = 3; + + enum Kind { + Settings = 0; + Tasks = 1; + } } message CheckFileExists { diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index f1f8591bba452..2ed01dc7c722f 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -14,7 +14,8 @@ pub use json_schema::*; pub use keymap_file::KeymapFile; pub use settings_file::*; pub use settings_store::{ - InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore, + InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources, + SettingsStore, }; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 20bf52f2c57ef..445420c1db8a7 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -157,13 +157,14 @@ pub struct SettingsLocation<'a> { pub path: &'a Path, } -/// A set of strongly-typed setting values defined via multiple JSON files. +/// A set of strongly-typed setting values defined via multiple config files. pub struct SettingsStore { setting_values: HashMap>, raw_default_settings: serde_json::Value, raw_user_settings: serde_json::Value, raw_extension_settings: serde_json::Value, - raw_local_settings: BTreeMap<(WorktreeId, Arc), serde_json::Value>, + raw_local_settings: + BTreeMap<(WorktreeId, Arc), HashMap>, tab_size_callback: Option<( TypeId, Box Option + Send + Sync + 'static>, @@ -174,6 +175,13 @@ pub struct SettingsStore { >, } +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum LocalSettingsKind { + Settings, + Tasks, + Editorconfig, +} + impl Global for SettingsStore {} #[derive(Debug)] @@ -520,19 +528,21 @@ impl SettingsStore { pub fn set_local_settings( &mut self, root_id: WorktreeId, - path: Arc, + directory_path: Arc, + kind: LocalSettingsKind, settings_content: Option<&str>, cx: &mut AppContext, ) -> Result<()> { + let raw_local_settings = self + .raw_local_settings + .entry((root_id, directory_path.clone())) + .or_default(); if settings_content.is_some_and(|content| !content.is_empty()) { - self.raw_local_settings.insert( - (root_id, path.clone()), - parse_json_with_comments(settings_content.unwrap())?, - ); + raw_local_settings.insert(kind, parse_json_with_comments(settings_content.unwrap())?); } else { - self.raw_local_settings.remove(&(root_id, path.clone())); + raw_local_settings.remove(&kind); } - self.recompute_values(Some((root_id, &path)), cx)?; + self.recompute_values(Some((root_id, &directory_path)), cx)?; Ok(()) } @@ -553,7 +563,8 @@ impl SettingsStore { /// Add or remove a set of local settings via a JSON string. pub fn clear_local_settings(&mut self, root_id: WorktreeId, cx: &mut AppContext) -> Result<()> { - self.raw_local_settings.retain(|k, _| k.0 != root_id); + self.raw_local_settings + .retain(|(worktree_id, _), _| worktree_id != &root_id); self.recompute_values(Some((root_id, "".as_ref())), cx)?; Ok(()) } @@ -561,7 +572,7 @@ impl SettingsStore { pub fn local_settings( &self, root_id: WorktreeId, - ) -> impl '_ + Iterator, String)> { + ) -> impl '_ + Iterator, LocalSettingsKind, String)> { self.raw_local_settings .range( (root_id, Path::new("").into()) @@ -570,7 +581,12 @@ impl SettingsStore { Path::new("").into(), ), ) - .map(|((_, path), content)| (path.clone(), serde_json::to_string(content).unwrap())) + .flat_map(|((_, path), content)| { + content.iter().filter_map(|(&kind, raw_content)| { + let parsed_content = serde_json::to_string(raw_content).log_err()?; + Some((path.clone(), kind, parsed_content)) + }) + }) } pub fn json_schema( @@ -739,56 +755,63 @@ impl SettingsStore { // Reload the local values for the setting. paths_stack.clear(); project_settings_stack.clear(); - for ((root_id, path), local_settings) in &self.raw_local_settings { - // Build a stack of all of the local values for that setting. - while let Some(prev_entry) = paths_stack.last() { - if let Some((prev_root_id, prev_path)) = prev_entry { - if root_id != prev_root_id || !path.starts_with(prev_path) { - paths_stack.pop(); - project_settings_stack.pop(); - continue; + for ((root_id, directory_path), local_settings) in &self.raw_local_settings { + if let Some(local_settings) = local_settings.get(&LocalSettingsKind::Settings) { + // Build a stack of all of the local values for that setting. + while let Some(prev_entry) = paths_stack.last() { + if let Some((prev_root_id, prev_path)) = prev_entry { + if root_id != prev_root_id || !directory_path.starts_with(prev_path) { + paths_stack.pop(); + project_settings_stack.pop(); + continue; + } } + break; } - break; - } - match setting_value.deserialize_setting(local_settings) { - Ok(local_settings) => { - paths_stack.push(Some((*root_id, path.as_ref()))); - project_settings_stack.push(local_settings); - - // If a local settings file changed, then avoid recomputing local - // settings for any path outside of that directory. - if changed_local_path.map_or( - false, - |(changed_root_id, changed_local_path)| { - *root_id != changed_root_id || !path.starts_with(changed_local_path) - }, - ) { - continue; - } - - if let Some(value) = setting_value - .load_setting( - SettingsSources { - default: &default_settings, - extensions: extension_settings.as_ref(), - user: user_settings.as_ref(), - release_channel: release_channel_settings.as_ref(), - project: &project_settings_stack.iter().collect::>(), + match setting_value.deserialize_setting(local_settings) { + Ok(local_settings) => { + paths_stack.push(Some((*root_id, directory_path.as_ref()))); + project_settings_stack.push(local_settings); + + // If a local settings file changed, then avoid recomputing local + // settings for any path outside of that directory. + if changed_local_path.map_or( + false, + |(changed_root_id, changed_local_path)| { + *root_id != changed_root_id + || !directory_path.starts_with(changed_local_path) }, - cx, - ) - .log_err() - { - setting_value.set_local_value(*root_id, path.clone(), value); + ) { + continue; + } + + if let Some(value) = setting_value + .load_setting( + SettingsSources { + default: &default_settings, + extensions: extension_settings.as_ref(), + user: user_settings.as_ref(), + release_channel: release_channel_settings.as_ref(), + project: &project_settings_stack.iter().collect::>(), + }, + cx, + ) + .log_err() + { + setting_value.set_local_value( + *root_id, + directory_path.clone(), + value, + ); + } + } + Err(error) => { + return Err(anyhow!(InvalidSettingsError::LocalSettings { + path: directory_path.join(local_settings_file_relative_path()), + message: error.to_string() + })); } - } - Err(error) => { - return Err(anyhow!(InvalidSettingsError::LocalSettings { - path: path.join(local_settings_file_relative_path()), - message: error.to_string() - })); } } } @@ -1201,6 +1224,7 @@ mod tests { .set_local_settings( WorktreeId::from_usize(1), Path::new("/root1").into(), + LocalSettingsKind::Settings, Some(r#"{ "user": { "staff": true } }"#), cx, ) @@ -1209,6 +1233,7 @@ mod tests { .set_local_settings( WorktreeId::from_usize(1), Path::new("/root1/subdir").into(), + LocalSettingsKind::Settings, Some(r#"{ "user": { "name": "Jane Doe" } }"#), cx, ) @@ -1218,6 +1243,7 @@ mod tests { .set_local_settings( WorktreeId::from_usize(1), Path::new("/root2").into(), + LocalSettingsKind::Settings, Some(r#"{ "user": { "age": 42 }, "key2": "b" }"#), cx, ) From f809787275850a70a45a30bf7b72ae5c9dd547b2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 15:23:22 -0400 Subject: [PATCH 192/228] Update cloudflare/wrangler-action digest to 168bc28 (#18651) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [cloudflare/wrangler-action](https://redirect.github.com/cloudflare/wrangler-action) | action | digest | `f84a562` -> `168bc28` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/deploy_cloudflare.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 5cf4d6fd13140..5c09c29b0f44f 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -36,28 +36,28 @@ jobs: mdbook build ./docs --dest-dir=../target/deploy/docs/ - name: Deploy Docs - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: pages deploy target/deploy --project-name=docs - name: Deploy Install - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh - name: Deploy Docs Workers - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: deploy .cloudflare/docs-proxy/src/worker.js - name: Deploy Install Workers - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} From 3a5deb5c6fc7ae1354cc5c59773055e85519a3cc Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 3 Oct 2024 01:00:48 +0530 Subject: [PATCH 193/228] Replace isahc with async ureq (#18414) REplace isahc with ureq everywhere gpui is used. This should allow us to make http requests without libssl; and avoid a long-tail of panics caused by ishac. Release Notes: - (potentially breaking change) updated our http client --------- Co-authored-by: Mikayla --- Cargo.lock | 602 ++++++++++++------ Cargo.toml | 17 +- crates/client/Cargo.toml | 3 +- crates/client/src/client.rs | 27 +- crates/collab/Cargo.toml | 2 +- crates/collab/src/api/events.rs | 40 +- crates/collab/src/llm.rs | 12 +- crates/collab/src/rpc.rs | 6 +- crates/evals/Cargo.toml | 2 +- crates/evals/src/eval.rs | 7 +- crates/extension/Cargo.toml | 4 +- crates/extension/src/extension_builder.rs | 2 +- crates/extension/src/extension_store_test.rs | 52 +- crates/extension_cli/Cargo.toml | 2 +- crates/extension_cli/src/main.rs | 9 +- crates/http_client/Cargo.toml | 4 +- crates/http_client/src/http_client.rs | 29 +- crates/isahc_http_client/LICENSE-APACHE | 1 - .../src/isahc_http_client.rs | 105 --- crates/live_kit_server/Cargo.toml | 2 +- crates/reqwest_client/Cargo.toml | 31 + crates/reqwest_client/LICENSE-GPL | 1 + crates/reqwest_client/examples/client.rs | 16 + crates/reqwest_client/src/reqwest_client.rs | 232 +++++++ crates/semantic_index/Cargo.toml | 2 +- crates/semantic_index/examples/index.rs | 7 +- .../Cargo.toml | 16 +- crates/ureq_client/LICENSE-GPL | 1 + crates/ureq_client/examples/client.rs | 24 + crates/ureq_client/src/ureq_client.rs | 187 ++++++ crates/vim/Cargo.toml | 2 +- crates/zed/Cargo.toml | 2 +- crates/zed/src/main.rs | 10 +- 33 files changed, 1063 insertions(+), 396 deletions(-) delete mode 120000 crates/isahc_http_client/LICENSE-APACHE delete mode 100644 crates/isahc_http_client/src/isahc_http_client.rs create mode 100644 crates/reqwest_client/Cargo.toml create mode 120000 crates/reqwest_client/LICENSE-GPL create mode 100644 crates/reqwest_client/examples/client.rs create mode 100644 crates/reqwest_client/src/reqwest_client.rs rename crates/{isahc_http_client => ureq_client}/Cargo.toml (52%) create mode 120000 crates/ureq_client/LICENSE-GPL create mode 100644 crates/ureq_client/examples/client.rs create mode 100644 crates/ureq_client/src/ureq_client.rs diff --git a/Cargo.lock b/Cargo.lock index b69e4541cc9e2..a96e59df34c1d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -846,8 +846,8 @@ dependencies = [ "chrono", "futures-util", "http-types", - "hyper", - "hyper-rustls", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "serde", "serde_json", "serde_path_to_error", @@ -880,15 +880,14 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-tls" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" +checksum = "b2ae3c9eba89d472a0e4fe1dea433df78fbbe63d2b764addaf2ba3a6bde89a5e" dependencies = [ "futures-core", "futures-io", - "rustls 0.20.9", + "rustls 0.21.12", "rustls-pemfile 1.0.4", - "webpki", "webpki-roots 0.22.6", ] @@ -905,9 +904,9 @@ dependencies = [ [[package]] name = "async-tungstenite" -version = "0.23.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" +checksum = "90e661b6cb0a6eb34d02c520b052daa3aa9ac0cc02495c9d066bbce13ead132b" dependencies = [ "async-std", "async-tls", @@ -915,7 +914,7 @@ dependencies = [ "futures-util", "log", "pin-project-lite", - "tungstenite 0.20.1", + "tungstenite 0.24.0", ] [[package]] @@ -1064,7 +1063,7 @@ dependencies = [ "fastrand 2.1.1", "hex", "http 0.2.12", - "ring 0.17.8", + "ring", "time", "tokio", "tracing", @@ -1233,7 +1232,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring 0.17.8", + "ring", "sha2", "subtle", "time", @@ -1336,13 +1335,13 @@ dependencies = [ "aws-smithy-types", "bytes 1.7.1", "fastrand 2.1.1", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper", - "hyper-rustls", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", "pin-utils", @@ -1432,7 +1431,7 @@ dependencies = [ "headers", "http 0.2.12", "http-body 0.4.6", - "hyper", + "hyper 0.14.30", "itoa", "matchit", "memchr", @@ -1445,7 +1444,7 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper", + "sync_wrapper 0.1.2", "tokio", "tokio-tungstenite 0.20.1", "tower", @@ -1584,7 +1583,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.76", ] @@ -1604,7 +1603,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.76", ] @@ -2100,12 +2099,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "castaway" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" - [[package]] name = "cbc" version = "0.1.2" @@ -2363,8 +2356,8 @@ dependencies = [ "clickhouse-derive", "clickhouse-rs-cityhash-sys", "futures 0.3.30", - "hyper", - "hyper-tls", + "hyper 0.14.30", + "hyper-tls 0.5.0", "lz4", "sealed", "serde", @@ -2402,6 +2395,7 @@ dependencies = [ "anyhow", "async-native-tls", "async-recursion 0.3.2", + "async-tls", "async-tungstenite", "chrono", "clock", @@ -2419,8 +2413,6 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", - "rustls 0.20.9", - "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2567,9 +2559,8 @@ dependencies = [ "headless", "hex", "http_client", - "hyper", + "hyper 0.14.30", "indoc", - "isahc_http_client", "jsonwebtoken", "language", "language_model", @@ -2593,7 +2584,8 @@ dependencies = [ "release_channel", "remote", "remote_server", - "reqwest", + "reqwest 0.11.27", + "reqwest_client", "rpc", "rustc-demangle", "scrypt", @@ -2677,7 +2669,7 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -2995,7 +2987,7 @@ dependencies = [ "log", "rangemap", "rayon", - "rustc-hash", + "rustc-hash 1.1.0", "rustybuzz", "self_cell", "swash", @@ -3085,7 +3077,7 @@ dependencies = [ "hashbrown 0.14.5", "log", "regalloc2", - "rustc-hash", + "rustc-hash 1.1.0", "smallvec", "target-lexicon", ] @@ -3341,36 +3333,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "curl" -version = "0.4.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2 0.5.7", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.74+curl-8.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "cursor-icon" version = "1.1.0" @@ -4032,7 +3994,6 @@ dependencies = [ "git", "gpui", "http_client", - "isahc_http_client", "language", "languages", "node_runtime", @@ -4043,6 +4004,7 @@ dependencies = [ "serde_json", "settings", "smol", + "ureq_client", ] [[package]] @@ -4127,7 +4089,6 @@ dependencies = [ "gpui", "http_client", "indexed_docs", - "isahc_http_client", "language", "log", "lsp", @@ -4136,6 +4097,7 @@ dependencies = [ "paths", "project", "release_channel", + "reqwest_client", "schemars", "semantic_version", "serde", @@ -4145,8 +4107,10 @@ dependencies = [ "snippet_provider", "task", "theme", + "tokio", "toml 0.8.19", "ui", + "ureq_client", "url", "util", "wasm-encoder 0.215.0", @@ -4166,9 +4130,9 @@ dependencies = [ "env_logger", "extension", "fs", - "isahc_http_client", "language", "log", + "reqwest_client", "rpc", "serde", "serde_json", @@ -4415,7 +4379,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin 0.9.8", + "spin", ] [[package]] @@ -5181,6 +5145,25 @@ dependencies = [ "tracing", ] +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes 1.7.1", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.4.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.4.1" @@ -5561,8 +5544,10 @@ dependencies = [ "anyhow", "derive_more", "futures 0.3.30", - "http 0.2.12", + "http 1.1.0", "log", + "rustls 0.21.12", + "rustls-native-certs 0.8.0", "serde", "serde_json", "smol", @@ -5603,7 +5588,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "httparse", @@ -5617,6 +5602,26 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes 1.7.1", + "futures-channel", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -5625,12 +5630,29 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper", + "hyper 0.14.30", "log", "rustls 0.21.12", "rustls-native-certs 0.6.3", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.13", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tower-service", ] [[package]] @@ -5640,12 +5662,47 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.7.1", - "hyper", + "hyper 0.14.30", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes 1.7.1", + "http-body-util", + "hyper 1.4.1", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +dependencies = [ + "bytes 1.7.1", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "socket2 0.5.7", + "tokio", + "tower-service", + "tracing", +] + [[package]] name = "iana-time-zone" version = "0.1.60" @@ -6013,44 +6070,6 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" -[[package]] -name = "isahc" -version = "1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" -dependencies = [ - "async-channel 1.9.0", - "castaway", - "crossbeam-utils", - "curl", - "curl-sys", - "encoding_rs", - "event-listener 2.5.3", - "futures-lite 1.13.0", - "http 0.2.12", - "log", - "mime", - "once_cell", - "polling 2.8.0", - "slab", - "sluice", - "tracing", - "tracing-futures", - "url", - "waker-fn", -] - -[[package]] -name = "isahc_http_client" -version = "0.1.0" -dependencies = [ - "anyhow", - "futures 0.3.30", - "http_client", - "isahc", - "util", -] - [[package]] name = "itertools" version = "0.10.5" @@ -6155,7 +6174,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring 0.17.8", + "ring", "serde", "serde_json", "simple_asn1", @@ -6406,7 +6425,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin 0.9.8", + "spin", ] [[package]] @@ -6601,7 +6620,7 @@ dependencies = [ "prost", "prost-build", "prost-types", - "reqwest", + "reqwest 0.12.8", "serde", ] @@ -7085,7 +7104,7 @@ dependencies = [ "hexf-parse", "indexmap 2.4.0", "log", - "rustc-hash", + "rustc-hash 1.1.0", "spirv", "termcolor", "thiserror", @@ -8742,6 +8761,54 @@ dependencies = [ "zed_actions", ] +[[package]] +name = "quinn" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" +dependencies = [ + "bytes 1.7.1", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.0.0", + "rustls 0.23.13", + "socket2 0.5.7", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +dependencies = [ + "bytes 1.7.1", + "rand 0.8.5", + "ring", + "rustc-hash 2.0.0", + "rustls 0.23.13", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" +dependencies = [ + "libc", + "once_cell", + "socket2 0.5.7", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" version = "1.0.37" @@ -9019,7 +9086,7 @@ checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" dependencies = [ "hashbrown 0.13.2", "log", - "rustc-hash", + "rustc-hash 1.1.0", "slice-group-by", "smallvec", ] @@ -9196,11 +9263,11 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper", - "hyper-tls", + "hyper 0.14.30", + "hyper-tls 0.5.0", "ipnet", "js-sys", "log", @@ -9213,8 +9280,8 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", - "system-configuration", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tower-service", @@ -9225,6 +9292,68 @@ dependencies = [ "winreg 0.50.0", ] +[[package]] +name = "reqwest" +version = "0.12.8" +source = "git+https://github.com/zed-industries/reqwest.git?rev=fd110f6998da16bbca97b6dddda9be7827c50e29#fd110f6998da16bbca97b6dddda9be7827c50e29" +dependencies = [ + "base64 0.22.1", + "bytes 1.7.1", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-rustls 0.27.3", + "hyper-tls 0.6.0", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.13", + "rustls-pemfile 2.1.3", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "system-configuration 0.6.1", + "tokio", + "tokio-native-tls", + "tokio-rustls 0.26.0", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + +[[package]] +name = "reqwest_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "bytes 1.7.1", + "futures 0.3.30", + "http_client", + "reqwest 0.12.8", + "serde", + "smol", + "tokio", +] + [[package]] name = "resvg" version = "0.41.0" @@ -9273,21 +9402,6 @@ dependencies = [ "util", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - [[package]] name = "ring" version = "0.17.8" @@ -9298,8 +9412,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.52.0", ] @@ -9455,7 +9569,7 @@ dependencies = [ "futures 0.3.30", "glob", "rand 0.8.5", - "ring 0.17.8", + "ring", "serde", "serde_json", "shellexpand 3.1.0", @@ -9527,6 +9641,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" + [[package]] name = "rustc_version" version = "0.4.1" @@ -9578,26 +9698,28 @@ dependencies = [ [[package]] name = "rustls" -version = "0.20.9" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring 0.16.20", + "ring", + "rustls-webpki 0.101.7", "sct", - "webpki", ] [[package]] name = "rustls" -version = "0.21.12" +version = "0.23.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki", - "sct", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", ] [[package]] @@ -9656,8 +9778,19 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", ] [[package]] @@ -9771,8 +9904,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -9968,7 +10101,6 @@ dependencies = [ "gpui", "heed", "http_client", - "isahc_http_client", "language", "language_model", "languages", @@ -9986,6 +10118,7 @@ dependencies = [ "tree-sitter", "ui", "unindent", + "ureq_client", "util", "workspace", "worktree", @@ -10418,17 +10551,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "sluice" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" -dependencies = [ - "async-channel 1.9.0", - "futures-core", - "futures-io", -] - [[package]] name = "smallvec" version = "1.13.2" @@ -10543,12 +10665,6 @@ dependencies = [ "smallvec", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -11178,6 +11294,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + [[package]] name = "synchronoise" version = "1.0.1" @@ -11218,7 +11343,18 @@ checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation 0.9.4", - "system-configuration-sys", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.9.4", + "system-configuration-sys 0.6.0", ] [[package]] @@ -11231,6 +11367,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "system-deps" version = "6.2.2" @@ -11607,7 +11753,7 @@ dependencies = [ "fancy-regex", "lazy_static", "parking_lot", - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -11822,6 +11968,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.13", + "rustls-pki-types", + "tokio", +] + [[package]] name = "tokio-socks" version = "0.5.2" @@ -11871,9 +12028,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes 1.7.1", "futures-core", @@ -12055,16 +12212,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -12371,6 +12518,24 @@ dependencies = [ "utf-8", ] +[[package]] +name = "tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" +dependencies = [ + "byteorder", + "bytes 1.7.1", + "data-encoding", + "http 1.1.0", + "httparse", + "log", + "rand 0.8.5", + "sha1", + "thiserror", + "utf-8", +] + [[package]] name = "typeid" version = "1.0.2" @@ -12531,15 +12696,40 @@ checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] -name = "untrusted" -version = "0.9.0" +name = "ureq" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" +dependencies = [ + "base64 0.21.7", + "flate2", + "log", + "once_cell", + "rustls 0.21.12", + "rustls-webpki 0.101.7", + "url", + "webpki-roots 0.25.4", +] + +[[package]] +name = "ureq_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.30", + "gpui", + "http_client", + "parking_lot", + "serde", + "smol", + "ureq", + "util", +] [[package]] name = "url" @@ -12844,7 +13034,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.12", - "hyper", + "hyper 0.14.30", "log", "mime", "mime_guess", @@ -12980,6 +13170,19 @@ dependencies = [ "wasmparser 0.201.0", ] +[[package]] +name = "wasm-streams" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasmparser" version = "0.201.0" @@ -13395,8 +13598,8 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -13653,6 +13856,17 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result 0.2.0", + "windows-strings", + "windows-targets 0.52.6", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -14443,7 +14657,6 @@ dependencies = [ "image_viewer", "inline_completion_button", "install_cli", - "isahc_http_client", "journal", "language", "language_model", @@ -14496,6 +14709,7 @@ dependencies = [ "tree-sitter-md", "tree-sitter-rust", "ui", + "ureq_client", "url", "urlencoding", "util", diff --git a/Cargo.toml b/Cargo.toml index 1ef14dae70c20..fea528db5b89e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ members = [ "crates/assistant", "crates/assistant_slash_command", "crates/assistant_tool", + "crates/ureq_client", "crates/audio", "crates/auto_update", "crates/breadcrumbs", @@ -52,7 +53,6 @@ members = [ "crates/indexed_docs", "crates/inline_completion_button", "crates/install_cli", - "crates/isahc_http_client", "crates/journal", "crates/language", "crates/language_model", @@ -87,6 +87,7 @@ members = [ "crates/release_channel", "crates/remote", "crates/remote_server", + "crates/reqwest_client", "crates/repl", "crates/rich_text", "crates/rope", @@ -186,6 +187,8 @@ assets = { path = "crates/assets" } assistant = { path = "crates/assistant" } assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_tool = { path = "crates/assistant_tool" } +ureq_client = { path = "crates/ureq_client" } +async-compat = { version = "0.2.1" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } breadcrumbs = { path = "crates/breadcrumbs" } @@ -228,7 +231,6 @@ image_viewer = { path = "crates/image_viewer" } indexed_docs = { path = "crates/indexed_docs" } inline_completion_button = { path = "crates/inline_completion_button" } install_cli = { path = "crates/install_cli" } -isahc_http_client = { path = "crates/isahc_http_client" } journal = { path = "crates/journal" } language = { path = "crates/language" } language_model = { path = "crates/language_model" } @@ -265,6 +267,7 @@ release_channel = { path = "crates/release_channel" } remote = { path = "crates/remote" } remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } +reqwest_client = { path = "crates/reqwest_client" } rich_text = { path = "crates/rich_text" } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } @@ -325,7 +328,7 @@ async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "8 async-recursion = "1.0.0" async-tar = "0.5.0" async-trait = "0.1" -async-tungstenite = "0.23" +async-tungstenite = "0.28" async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } base64 = "0.22" @@ -364,10 +367,7 @@ ignore = "0.4.22" image = "0.25.1" indexmap = { version = "1.6.2", features = ["serde"] } indoc = "2" -# We explicitly disable http2 support in isahc. -isahc = { version = "1.7.2", default-features = false, features = [ - "text-decoding", -] } + itertools = "0.13.0" jsonwebtoken = "9.3" libc = "0.2" @@ -392,13 +392,14 @@ pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" repair_json = "0.1.0" +reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } rsa = "0.9.6" runtimelib = { version = "0.15", default-features = false, features = [ "async-dispatcher-runtime", ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } -rustls = "0.20.3" +rustls = "0.21.12" rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index dd420bbbe6318..c3fbea1f98a88 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,6 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" +async-tls = "0.13" async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true @@ -34,8 +35,6 @@ postage.workspace = true rand.workspace = true release_channel.workspace = true rpc = { workspace = true, features = ["gpui"] } -rustls.workspace = true -rustls-native-certs.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index d565d620c3c20..819bd7551f596 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1023,7 +1023,7 @@ impl Client { &self, http: Arc, release_channel: Option, - ) -> impl Future> { + ) -> impl Future> { #[cfg(any(test, feature = "test-support"))] let url_override = self.rpc_url.read().clone(); @@ -1117,7 +1117,7 @@ impl Client { // for us from the RPC URL. // // Among other things, it will generate and set a `Sec-WebSocket-Key` header for us. - let mut request = rpc_url.into_client_request()?; + let mut request = IntoClientRequest::into_client_request(rpc_url.as_str())?; // We then modify the request to add our desired headers. let request_headers = request.headers_mut(); @@ -1137,30 +1137,13 @@ impl Client { match url_scheme { Https => { - let client_config = { - let mut root_store = rustls::RootCertStore::empty(); - - let root_certs = rustls_native_certs::load_native_certs(); - for error in root_certs.errors { - log::warn!("error loading native certs: {:?}", error); - } - root_store.add_parsable_certificates( - &root_certs - .certs - .into_iter() - .map(|cert| cert.as_ref().to_owned()) - .collect::>(), - ); - rustls::ClientConfig::builder() - .with_safe_defaults() - .with_root_certificates(root_store) - .with_no_client_auth() - }; let (stream, _) = async_tungstenite::async_tls::client_async_tls_with_connector( request, stream, - Some(client_config.into()), + Some(async_tls::TlsConnector::from( + http_client::TLS_CONFIG.clone(), + )), ) .await?; Ok(Connection::new( diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index ad43d2d1f0cf5..7d4c5d0c706b7 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -37,7 +37,7 @@ futures.workspace = true google_ai.workspace = true hex.workspace = true http_client.workspace = true -isahc_http_client.workspace = true +reqwest_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index bbfa69c0b8f70..dd1370e886644 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -674,7 +674,7 @@ pub struct EditorEventRow { copilot_enabled_for_language: bool, historical_event: bool, architecture: String, - is_staff: Option, + is_staff: bool, major: Option, minor: Option, patch: Option, @@ -708,7 +708,7 @@ impl EditorEventRow { installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), metrics_id: body.metrics_id.clone().unwrap_or_default(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), operation: event.operation, file_extension: event.file_extension.unwrap_or_default(), @@ -741,7 +741,7 @@ pub struct InlineCompletionEventRow { region_code: String, city: String, time: i64, - is_staff: Option, + is_staff: bool, major: Option, minor: Option, patch: Option, @@ -772,7 +772,7 @@ impl InlineCompletionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), file_extension: event.file_extension.unwrap_or_default(), signed_in: wrapper.signed_in, @@ -800,7 +800,7 @@ pub struct CallEventRow { // ClientEventBase installation_id: String, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // CallEventRow @@ -832,7 +832,7 @@ impl CallEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), operation: event.operation, room_id: event.room_id, @@ -856,7 +856,7 @@ pub struct AssistantEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // AssistantEventRow @@ -891,7 +891,7 @@ impl AssistantEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), @@ -909,7 +909,7 @@ impl AssistantEventRow { pub struct CpuEventRow { installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, usage_as_percentage: f32, core_count: u32, app_version: String, @@ -947,7 +947,7 @@ impl CpuEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), usage_as_percentage: event.usage_as_percentage, core_count: event.core_count, @@ -970,7 +970,7 @@ pub struct MemoryEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // MemoryEventRow @@ -1001,7 +1001,7 @@ impl MemoryEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), memory_in_bytes: event.memory_in_bytes, virtual_memory_in_bytes: event.virtual_memory_in_bytes, @@ -1024,7 +1024,7 @@ pub struct AppEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // AppEventRow @@ -1054,7 +1054,7 @@ impl AppEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), operation: event.operation, } @@ -1076,7 +1076,7 @@ pub struct SettingEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // SettingEventRow setting: String, @@ -1106,7 +1106,7 @@ impl SettingEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), setting: event.setting, value: event.value, @@ -1129,7 +1129,7 @@ pub struct ExtensionEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // ExtensionEventRow @@ -1164,7 +1164,7 @@ impl ExtensionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), extension_id: event.extension_id, extension_version: event.version, @@ -1198,7 +1198,7 @@ pub struct ReplEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // ReplEventRow @@ -1230,7 +1230,7 @@ impl ReplEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), kernel_language: event.kernel_language, kernel_status: event.kernel_status, diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 14f10342a78dd..2d040cfa28e1a 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -22,7 +22,8 @@ use chrono::{DateTime, Duration, Utc}; use collections::HashMap; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; -use isahc_http_client::IsahcHttpClient; + +use reqwest_client::ReqwestClient; use rpc::ListModelsResponse; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, @@ -43,7 +44,7 @@ pub struct LlmState { pub config: Config, pub executor: Executor, pub db: Arc, - pub http_client: IsahcHttpClient, + pub http_client: ReqwestClient, pub clickhouse_client: Option, active_user_count_by_model: RwLock, ActiveUserCount)>>, @@ -69,11 +70,8 @@ impl LlmState { let db = Arc::new(db); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = IsahcHttpClient::builder() - .default_header("User-Agent", user_agent) - .build() - .map(IsahcHttpClient::from) - .context("failed to construct http client")?; + let http_client = + ReqwestClient::user_agent(&user_agent).context("failed to construct http client")?; let this = Self { executor, diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 5f21df4ab9dbc..27c95a5b44e1a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -36,8 +36,8 @@ use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; use http_client::HttpClient; -use isahc_http_client::IsahcHttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; +use reqwest_client::ReqwestClient; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -954,8 +954,8 @@ impl Server { tracing::info!("connection opened"); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { - Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), + let http_client = match ReqwestClient::user_agent(&user_agent) { + Ok(http_client) => Arc::new(http_client), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); return; diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index 400ab139aa2e4..52af0ce446f91 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -16,6 +16,7 @@ path = "src/eval.rs" [dependencies] clap.workspace = true anyhow.workspace = true +ureq_client.workspace = true client.workspace = true clock.workspace = true collections.workspace = true @@ -24,7 +25,6 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true -isahc_http_client.workspace = true language.workspace = true languages.workspace = true http_client.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 899d821053711..e2dc5c8e03a2f 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -32,6 +32,7 @@ use std::{ Arc, }, }; +use ureq_client::UreqClient; const CODESEARCH_NET_DIR: &'static str = "target/datasets/code-search-net"; const EVAL_REPOS_DIR: &'static str = "target/datasets/eval-repos"; @@ -100,7 +101,11 @@ fn main() -> Result<()> { gpui::App::headless().run(move |cx| { let executor = cx.background_executor().clone(); - let client = isahc_http_client::IsahcHttpClient::new(None, None); + let client = Arc::new(UreqClient::new( + None, + "Zed LLM evals".to_string(), + executor.clone(), + )); cx.set_http_client(client.clone()); match cli.command { Commands::Fetch {} => { diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 6ce1bd6862a1d..9fea3a768a0c0 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -56,10 +56,12 @@ task.workspace = true serde_json_lenient.workspace = true [dev-dependencies] -isahc_http_client.workspace = true +ureq_client.workspace = true ctor.workspace = true env_logger.workspace = true parking_lot.workspace = true +reqwest_client.workspace = true +tokio.workspace = true fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 7380e699f9e71..876d0336dc707 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -25,7 +25,7 @@ use wit_component::ComponentEncoder; /// Once Rust 1.78 is released, there will be a `wasm32-wasip2` target available, so we will /// not need the adapter anymore. const RUST_TARGET: &str = "wasm32-wasip1"; -const WASI_ADAPTER_URL: &str = +pub const WASI_ADAPTER_URL: &str = "https://github.com/bytecodealliance/wasmtime/releases/download/v18.0.2/wasi_snapshot_preview1.reactor.wasm"; /// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 126e6b2cfbdad..7a3c645e041a1 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -1,3 +1,4 @@ +use crate::extension_builder::WASI_ADAPTER_URL; use crate::extension_manifest::SchemaVersion; use crate::extension_settings::ExtensionSettings; use crate::{ @@ -11,14 +12,14 @@ use collections::BTreeMap; use fs::{FakeFs, Fs, RealFs}; use futures::{io::BufReader, AsyncReadExt, StreamExt}; use gpui::{Context, SemanticVersion, TestAppContext}; -use http_client::{FakeHttpClient, Response}; +use http_client::{AsyncBody, FakeHttpClient, HttpClient, Response}; use indexed_docs::IndexedDocsRegistry; -use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; use node_runtime::NodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; use release_channel::AppVersion; +use reqwest_client::ReqwestClient; use serde_json::json; use settings::{Settings as _, SettingsStore}; use snippet_provider::SnippetRegistry; @@ -28,6 +29,7 @@ use std::{ sync::Arc, }; use theme::ThemeRegistry; +use ureq_client::UreqClient; use util::test::temp_tree; #[cfg(test)] @@ -576,7 +578,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { std::env::consts::ARCH ) }); - let builder_client = IsahcHttpClient::new(None, Some(user_agent)); + let builder_client = Arc::new(UreqClient::new(None, user_agent, cx.executor().clone())); let extension_store = cx.new_model(|cx| { ExtensionStore::new( @@ -769,6 +771,50 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { assert!(fs.metadata(&expected_server_path).await.unwrap().is_none()); } +#[gpui::test] +async fn test_wasi_adapter_download(cx: &mut TestAppContext) { + let client = Arc::new(UreqClient::new( + None, + "zed-test-wasi-adapter-download".to_string(), + cx.executor().clone(), + )); + + let mut response = client + .get(WASI_ADAPTER_URL, AsyncBody::default(), true) + .await + .unwrap(); + + let mut content = Vec::new(); + let mut body = BufReader::new(response.body_mut()); + body.read_to_end(&mut content).await.unwrap(); + + assert!(wasmparser::Parser::is_core_wasm(&content)); + assert_eq!(content.len(), 96801); // Determined by downloading this to my computer + wit_component::ComponentEncoder::default() + .adapter("wasi_snapshot_preview1", &content) + .unwrap(); +} + +#[tokio::test] +async fn test_wasi_adapter_download_tokio() { + let client = Arc::new(ReqwestClient::new()); + + let mut response = client + .get(WASI_ADAPTER_URL, AsyncBody::default(), true) + .await + .unwrap(); + + let mut content = Vec::new(); + let mut body = BufReader::new(response.body_mut()); + body.read_to_end(&mut content).await.unwrap(); + + assert!(wasmparser::Parser::is_core_wasm(&content)); + assert_eq!(content.len(), 96801); // Determined by downloading this to my computer + wit_component::ComponentEncoder::default() + .adapter("wasi_snapshot_preview1", &content) + .unwrap(); +} + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let store = SettingsStore::test(cx); diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index bc649d8e04989..3e109a0036b2f 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -isahc_http_client.workspace = true +reqwest_client.workspace = true language.workspace = true log.workspace = true rpc.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 6eaebca2f0e9b..dd6f221378119 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -13,8 +13,8 @@ use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, ExtensionManifest, }; -use isahc_http_client::IsahcHttpClient; use language::LanguageConfig; +use reqwest_client::ReqwestClient; use theme::ThemeRegistry; use tree_sitter::{Language, Query, WasmStore}; @@ -66,12 +66,7 @@ async fn main() -> Result<()> { std::env::consts::OS, std::env::consts::ARCH ); - let http_client = Arc::new( - IsahcHttpClient::builder() - .default_header("User-Agent", user_agent) - .build() - .map(IsahcHttpClient::from)?, - ); + let http_client = Arc::new(ReqwestClient::user_agent(&user_agent)?); let builder = ExtensionBuilder::new(http_client, scratch_dir); builder diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 0244ac41042b6..52c2947b8a7e4 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,7 +16,9 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "0.2" +http = "1.1" +rustls.workspace = true +rustls-native-certs.workspace = true anyhow.workspace = true derive_more.workspace = true futures.workspace = true diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 2f029a1d236bb..015c73a448c5b 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -11,13 +11,21 @@ use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; use std::{ - sync::{Arc, Mutex}, + sync::{Arc, LazyLock, Mutex}, time::Duration, }; pub use url::Url; +#[derive(Clone)] pub struct ReadTimeout(pub Duration); -#[derive(Default, Debug, Clone)] +impl Default for ReadTimeout { + fn default() -> Self { + Self(Duration::from_secs(5)) + } +} + +#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] + pub enum RedirectPolicy { #[default] NoFollow, @@ -26,6 +34,23 @@ pub enum RedirectPolicy { } pub struct FollowRedirects(pub bool); +pub static TLS_CONFIG: LazyLock> = LazyLock::new(|| { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates(&root_certs.certs); + + Arc::new( + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth(), + ) +}); + pub trait HttpRequestExt { /// Set a read timeout on the request. /// For isahc, this is the low_speed_timeout. diff --git a/crates/isahc_http_client/LICENSE-APACHE b/crates/isahc_http_client/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3aff..0000000000000 --- a/crates/isahc_http_client/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs deleted file mode 100644 index 778f6a0459890..0000000000000 --- a/crates/isahc_http_client/src/isahc_http_client.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::{mem, sync::Arc, time::Duration}; - -use futures::future::BoxFuture; -use util::maybe; - -pub use isahc::config::Configurable; -pub struct IsahcHttpClient(isahc::HttpClient); - -pub use http_client::*; - -impl IsahcHttpClient { - pub fn new(proxy: Option, user_agent: Option) -> Arc { - let mut builder = isahc::HttpClient::builder() - .connect_timeout(Duration::from_secs(5)) - .low_speed_timeout(100, Duration::from_secs(5)) - .proxy(proxy.clone()); - if let Some(agent) = user_agent { - builder = builder.default_header("User-Agent", agent); - } - Arc::new(IsahcHttpClient(builder.build().unwrap())) - } - pub fn builder() -> isahc::HttpClientBuilder { - isahc::HttpClientBuilder::new() - } -} - -impl From for IsahcHttpClient { - fn from(client: isahc::HttpClient) -> Self { - Self(client) - } -} - -impl HttpClient for IsahcHttpClient { - fn proxy(&self) -> Option<&Uri> { - None - } - - fn send( - &self, - req: http_client::http::Request, - ) -> BoxFuture<'static, Result, anyhow::Error>> - { - let redirect_policy = req - .extensions() - .get::() - .cloned() - .unwrap_or_default(); - let read_timeout = req - .extensions() - .get::() - .map(|t| t.0); - let req = maybe!({ - let (mut parts, body) = req.into_parts(); - let mut builder = isahc::Request::builder() - .method(parts.method) - .uri(parts.uri) - .version(parts.version); - if let Some(read_timeout) = read_timeout { - builder = builder.low_speed_timeout(100, read_timeout); - } - - let headers = builder.headers_mut()?; - mem::swap(headers, &mut parts.headers); - - let extensions = builder.extensions_mut()?; - mem::swap(extensions, &mut parts.extensions); - - let isahc_body = match body.0 { - http_client::Inner::Empty => isahc::AsyncBody::empty(), - http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader), - http_client::Inner::SyncReader(reader) => { - isahc::AsyncBody::from_bytes_static(reader.into_inner()) - } - }; - - builder - .redirect_policy(match redirect_policy { - http_client::RedirectPolicy::FollowAll => isahc::config::RedirectPolicy::Follow, - http_client::RedirectPolicy::FollowLimit(limit) => { - isahc::config::RedirectPolicy::Limit(limit) - } - http_client::RedirectPolicy::NoFollow => isahc::config::RedirectPolicy::None, - }) - .body(isahc_body) - .ok() - }); - - let client = self.0.clone(); - - Box::pin(async move { - match req { - Some(req) => client - .send_async(req) - .await - .map_err(Into::into) - .map(|response| { - let (parts, body) = response.into_parts(); - let body = http_client::AsyncBody::from_reader(body); - http_client::Response::from_parts(parts, body) - }), - None => Err(anyhow::anyhow!("Request was malformed")), - } - }) - } -} diff --git a/crates/live_kit_server/Cargo.toml b/crates/live_kit_server/Cargo.toml index bad4c5a05f475..4b4b5e13dad43 100644 --- a/crates/live_kit_server/Cargo.toml +++ b/crates/live_kit_server/Cargo.toml @@ -20,7 +20,7 @@ jsonwebtoken.workspace = true log.workspace = true prost.workspace = true prost-types.workspace = true -reqwest = "0.11" +reqwest.workspace = true serde.workspace = true [build-dependencies] diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml new file mode 100644 index 0000000000000..d39319125299f --- /dev/null +++ b/crates/reqwest_client/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "reqwest_client" +version = "0.1.0" +edition = "2021" +publish = false +license = "Apache-2.0" + +[lints] +workspace = true + +[features] +test-support = [] + +[lib] +path = "src/reqwest_client.rs" +doctest = true + +[[example]] +name = "client" +path = "examples/client.rs" + +[dependencies] +anyhow.workspace = true +futures.workspace = true +serde.workspace = true +smol.workspace = true +http_client.workspace = true +tokio.workspace = true +bytes = "1.0" + +reqwest = { workspace = true, features = ["rustls-tls-manual-roots", "stream"] } diff --git a/crates/reqwest_client/LICENSE-GPL b/crates/reqwest_client/LICENSE-GPL new file mode 120000 index 0000000000000..89e542f750cd3 --- /dev/null +++ b/crates/reqwest_client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/reqwest_client/examples/client.rs b/crates/reqwest_client/examples/client.rs new file mode 100644 index 0000000000000..1f50d21e4edc2 --- /dev/null +++ b/crates/reqwest_client/examples/client.rs @@ -0,0 +1,16 @@ +use futures::AsyncReadExt as _; +use http_client::AsyncBody; +use http_client::HttpClient; +use reqwest_client::ReqwestClient; + +#[tokio::main] +async fn main() { + let resp = ReqwestClient::new() + .get("http://zed.dev", AsyncBody::empty(), true) + .await + .unwrap(); + + let mut body = String::new(); + resp.into_body().read_to_string(&mut body).await.unwrap(); + println!("{}", &body); +} diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs new file mode 100644 index 0000000000000..6e84c58954ab1 --- /dev/null +++ b/crates/reqwest_client/src/reqwest_client.rs @@ -0,0 +1,232 @@ +use std::{borrow::Cow, io::Read, pin::Pin, task::Poll}; + +use anyhow::anyhow; +use bytes::{BufMut, Bytes, BytesMut}; +use futures::{AsyncRead, TryStreamExt}; +use http_client::{http, AsyncBody, ReadTimeout}; +use reqwest::header::{HeaderMap, HeaderValue}; +use smol::future::FutureExt; + +const DEFAULT_CAPACITY: usize = 4096; + +pub struct ReqwestClient { + client: reqwest::Client, +} + +impl ReqwestClient { + pub fn new() -> Self { + Self { + client: reqwest::Client::new(), + } + } + + pub fn user_agent(agent: &str) -> anyhow::Result { + let mut map = HeaderMap::new(); + map.insert(http::header::USER_AGENT, HeaderValue::from_str(agent)?); + Ok(Self { + client: reqwest::Client::builder().default_headers(map).build()?, + }) + } +} + +impl From for ReqwestClient { + fn from(client: reqwest::Client) -> Self { + Self { client } + } +} + +// This struct is essentially a re-implementation of +// https://docs.rs/tokio-util/0.7.12/tokio_util/io/struct.ReaderStream.html +// except outside of Tokio's aegis +struct ReaderStream { + reader: Option>>, + buf: BytesMut, + capacity: usize, +} + +impl ReaderStream { + fn new(reader: Pin>) -> Self { + Self { + reader: Some(reader), + buf: BytesMut::new(), + capacity: DEFAULT_CAPACITY, + } + } +} + +impl futures::Stream for ReaderStream { + type Item = std::io::Result; + + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + let mut this = self.as_mut(); + + let mut reader = match this.reader.take() { + Some(r) => r, + None => return Poll::Ready(None), + }; + + if this.buf.capacity() == 0 { + let capacity = this.capacity; + this.buf.reserve(capacity); + } + + match poll_read_buf(&mut reader, cx, &mut this.buf) { + Poll::Pending => Poll::Pending, + Poll::Ready(Err(err)) => { + self.reader = None; + + Poll::Ready(Some(Err(err))) + } + Poll::Ready(Ok(0)) => { + self.reader = None; + Poll::Ready(None) + } + Poll::Ready(Ok(_)) => { + let chunk = this.buf.split(); + self.reader = Some(reader); + Poll::Ready(Some(Ok(chunk.freeze()))) + } + } + } +} + +/// Implementation from https://docs.rs/tokio-util/0.7.12/src/tokio_util/util/poll_buf.rs.html +/// Specialized for this use case +pub fn poll_read_buf( + io: &mut Pin>, + cx: &mut std::task::Context<'_>, + buf: &mut BytesMut, +) -> Poll> { + if !buf.has_remaining_mut() { + return Poll::Ready(Ok(0)); + } + + let n = { + let dst = buf.chunk_mut(); + + // Safety: `chunk_mut()` returns a `&mut UninitSlice`, and `UninitSlice` is a + // transparent wrapper around `[MaybeUninit]`. + let dst = unsafe { &mut *(dst as *mut _ as *mut [std::mem::MaybeUninit]) }; + let mut buf = tokio::io::ReadBuf::uninit(dst); + let ptr = buf.filled().as_ptr(); + let unfilled_portion = buf.initialize_unfilled(); + // SAFETY: Pin projection + let io_pin = unsafe { Pin::new_unchecked(io) }; + std::task::ready!(io_pin.poll_read(cx, unfilled_portion)?); + + // Ensure the pointer does not change from under us + assert_eq!(ptr, buf.filled().as_ptr()); + buf.filled().len() + }; + + // Safety: This is guaranteed to be the number of initialized (and read) + // bytes due to the invariants provided by `ReadBuf::filled`. + unsafe { + buf.advance_mut(n); + } + + Poll::Ready(Ok(n)) +} + +enum WrappedBodyInner { + None, + SyncReader(std::io::Cursor>), + Stream(ReaderStream), +} + +struct WrappedBody(WrappedBodyInner); + +impl WrappedBody { + fn new(body: AsyncBody) -> Self { + match body.0 { + http_client::Inner::Empty => Self(WrappedBodyInner::None), + http_client::Inner::SyncReader(cursor) => Self(WrappedBodyInner::SyncReader(cursor)), + http_client::Inner::AsyncReader(pin) => { + Self(WrappedBodyInner::Stream(ReaderStream::new(pin))) + } + } + } +} + +impl futures::stream::Stream for WrappedBody { + type Item = Result; + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match &mut self.0 { + WrappedBodyInner::None => Poll::Ready(None), + WrappedBodyInner::SyncReader(cursor) => { + let mut buf = Vec::new(); + match cursor.read_to_end(&mut buf) { + Ok(_) => { + return Poll::Ready(Some(Ok(Bytes::from(buf)))); + } + Err(e) => return Poll::Ready(Some(Err(e))), + } + } + WrappedBodyInner::Stream(stream) => { + // SAFETY: Pin projection + let stream = unsafe { Pin::new_unchecked(stream) }; + futures::Stream::poll_next(stream, cx) + } + } + } +} + +impl http_client::HttpClient for ReqwestClient { + fn proxy(&self) -> Option<&http::Uri> { + None + } + + fn send( + &self, + req: http::Request, + ) -> futures::future::BoxFuture< + 'static, + Result, anyhow::Error>, + > { + let (parts, body) = req.into_parts(); + + let mut request = self.client.request(parts.method, parts.uri.to_string()); + + request = request.headers(parts.headers); + + if let Some(redirect_policy) = parts.extensions.get::() { + request = request.redirect_policy(match redirect_policy { + http_client::RedirectPolicy::NoFollow => reqwest::redirect::Policy::none(), + http_client::RedirectPolicy::FollowLimit(limit) => { + reqwest::redirect::Policy::limited(*limit as usize) + } + http_client::RedirectPolicy::FollowAll => reqwest::redirect::Policy::limited(100), + }); + } + + if let Some(ReadTimeout(timeout)) = parts.extensions.get::() { + request = request.timeout(*timeout); + } + + let body = WrappedBody::new(body); + let request = request.body(reqwest::Body::wrap_stream(body)); + + async move { + let response = request.send().await.map_err(|e| anyhow!(e))?; + let status = response.status(); + let mut builder = http::Response::builder().status(status.as_u16()); + for (name, value) in response.headers() { + builder = builder.header(name, value); + } + let bytes = response.bytes_stream(); + let bytes = bytes + .map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e)) + .into_async_read(); + let body = http_client::AsyncBody::from_reader(bytes); + builder.body(body).map_err(|e| anyhow!(e)) + } + .boxed() + } +} diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 691d6e57f6d44..8842093f7857f 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -51,7 +51,7 @@ workspace.workspace = true worktree.workspace = true [dev-dependencies] -isahc_http_client.workspace = true +ureq_client.workspace = true env_logger.workspace = true client = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index c5c2c633a1060..1ebed4c17f3bd 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -2,7 +2,6 @@ use client::Client; use futures::channel::oneshot; use gpui::App; use http_client::HttpClientWithUrl; -use isahc_http_client::IsahcHttpClient; use language::language_settings::AllLanguageSettings; use project::Project; use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; @@ -29,7 +28,11 @@ fn main() { let clock = Arc::new(FakeSystemClock::default()); let http = Arc::new(HttpClientWithUrl::new( - IsahcHttpClient::new(None, None), + Arc::new(ureq_client::UreqClient::new( + None, + "Zed semantic index example".to_string(), + cx.background_executor().clone(), + )), "http://localhost:11434", None, )); diff --git a/crates/isahc_http_client/Cargo.toml b/crates/ureq_client/Cargo.toml similarity index 52% rename from crates/isahc_http_client/Cargo.toml rename to crates/ureq_client/Cargo.toml index 82f7621bf8cac..a14419a2261a0 100644 --- a/crates/isahc_http_client/Cargo.toml +++ b/crates/ureq_client/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "isahc_http_client" +name = "ureq_client" version = "0.1.0" edition = "2021" publish = false @@ -12,11 +12,21 @@ workspace = true test-support = [] [lib] -path = "src/isahc_http_client.rs" +path = "src/ureq_client.rs" +doctest = true + +[[example]] +name = "client" +path = "examples/client.rs" [dependencies] anyhow.workspace = true futures.workspace = true +serde.workspace = true +smol.workspace = true +gpui.workspace = true http_client.workspace = true -isahc.workspace = true util.workspace = true +parking_lot.workspace = true + +ureq = "=2.9.1" diff --git a/crates/ureq_client/LICENSE-GPL b/crates/ureq_client/LICENSE-GPL new file mode 120000 index 0000000000000..89e542f750cd3 --- /dev/null +++ b/crates/ureq_client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ureq_client/examples/client.rs b/crates/ureq_client/examples/client.rs new file mode 100644 index 0000000000000..c5caae40dac9e --- /dev/null +++ b/crates/ureq_client/examples/client.rs @@ -0,0 +1,24 @@ +use futures::AsyncReadExt; +use http_client::{AsyncBody, HttpClient}; +use ureq_client::UreqClient; + +fn main() { + gpui::App::headless().run(|cx| { + println!("{:?}", std::thread::current().id()); + cx.spawn(|cx| async move { + let resp = UreqClient::new( + None, + "Conrad's bot".to_string(), + cx.background_executor().clone(), + ) + .get("http://zed.dev", AsyncBody::empty(), true) + .await + .unwrap(); + + let mut body = String::new(); + resp.into_body().read_to_string(&mut body).await.unwrap(); + println!("{}", body); + }) + .detach(); + }) +} diff --git a/crates/ureq_client/src/ureq_client.rs b/crates/ureq_client/src/ureq_client.rs new file mode 100644 index 0000000000000..8951e80ac2df5 --- /dev/null +++ b/crates/ureq_client/src/ureq_client.rs @@ -0,0 +1,187 @@ +use std::collections::HashMap; +use std::io::Read; +use std::sync::Arc; +use std::time::Duration; +use std::{pin::Pin, task::Poll}; + +use anyhow::Error; +use futures::channel::mpsc; +use futures::future::BoxFuture; +use futures::{AsyncRead, SinkExt, StreamExt}; +use http_client::{http, AsyncBody, HttpClient, RedirectPolicy, Uri}; +use smol::future::FutureExt; +use util::ResultExt; + +pub struct UreqClient { + // Note in ureq 2.x the options are stored on the Agent. + // In ureq 3.x we'll be able to set these on the request. + // In practice it's probably "fine" to have many clients, the number of distinct options + // is low; and most requests to the same connection will have the same options so the + // connection pool will work. + clients: Arc>>, + proxy_url: Option, + proxy: Option, + user_agent: String, + background_executor: gpui::BackgroundExecutor, +} + +impl UreqClient { + pub fn new( + proxy_url: Option, + user_agent: String, + background_executor: gpui::BackgroundExecutor, + ) -> Self { + Self { + clients: Arc::default(), + proxy_url: proxy_url.clone(), + proxy: proxy_url.and_then(|url| ureq::Proxy::new(url.to_string()).log_err()), + user_agent, + background_executor, + } + } + + fn agent_for(&self, redirect_policy: RedirectPolicy, timeout: Duration) -> ureq::Agent { + let mut clients = self.clients.lock(); + // in case our assumption of distinct options is wrong, we'll sporadically clean it out. + if clients.len() > 50 { + clients.clear() + } + + clients + .entry((timeout, redirect_policy.clone())) + .or_insert_with(|| { + let mut builder = ureq::AgentBuilder::new() + .timeout_connect(Duration::from_secs(5)) + .timeout_read(timeout) + .timeout_write(timeout) + .user_agent(&self.user_agent) + .tls_config(http_client::TLS_CONFIG.clone()) + .redirects(match redirect_policy { + RedirectPolicy::NoFollow => 0, + RedirectPolicy::FollowLimit(limit) => limit, + RedirectPolicy::FollowAll => 100, + }); + if let Some(proxy) = &self.proxy { + builder = builder.proxy(proxy.clone()); + } + builder.build() + }) + .clone() + } +} +impl HttpClient for UreqClient { + fn proxy(&self) -> Option<&Uri> { + self.proxy_url.as_ref() + } + + fn send( + &self, + request: http::Request, + ) -> BoxFuture<'static, Result, Error>> { + let agent = self.agent_for( + request + .extensions() + .get::() + .cloned() + .unwrap_or_default(), + request + .extensions() + .get::() + .cloned() + .unwrap_or_default() + .0, + ); + let mut req = agent.request(&request.method().as_ref(), &request.uri().to_string()); + for (name, value) in request.headers().into_iter() { + req = req.set(name.as_str(), value.to_str().unwrap()); + } + let body = request.into_body(); + let executor = self.background_executor.clone(); + + self.background_executor + .spawn(async move { + let response = req.send(body)?; + + let mut builder = http::Response::builder() + .status(response.status()) + .version(http::Version::HTTP_11); + for name in response.headers_names() { + if let Some(value) = response.header(&name) { + builder = builder.header(name, value); + } + } + + let body = AsyncBody::from_reader(UreqResponseReader::new(executor, response)); + let http_response = builder.body(body)?; + + Ok(http_response) + }) + .boxed() + } +} + +struct UreqResponseReader { + receiver: mpsc::Receiver>>, + buffer: Vec, + idx: usize, + _task: gpui::Task<()>, +} + +impl UreqResponseReader { + fn new(background_executor: gpui::BackgroundExecutor, response: ureq::Response) -> Self { + let (mut sender, receiver) = mpsc::channel(1); + let mut reader = response.into_reader(); + let task = background_executor.spawn(async move { + let mut buffer = vec![0; 8192]; + loop { + let n = match reader.read(&mut buffer) { + Ok(0) => break, + Ok(n) => n, + Err(e) => { + let _ = sender.send(Err(e)).await; + break; + } + }; + let _ = sender.send(Ok(buffer[..n].to_vec())).await; + } + }); + + UreqResponseReader { + _task: task, + receiver, + buffer: Vec::new(), + idx: 0, + } + } +} + +impl AsyncRead for UreqResponseReader { + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut [u8], + ) -> Poll> { + if self.buffer.is_empty() { + match self.receiver.poll_next_unpin(cx) { + Poll::Ready(Some(Ok(data))) => self.buffer = data, + Poll::Ready(Some(Err(e))) => { + return Poll::Ready(Err(e)); + } + Poll::Ready(None) => { + return Poll::Ready(Ok(0)); + } + Poll::Pending => { + return Poll::Pending; + } + } + } + let n = std::cmp::min(buf.len(), self.buffer.len() - self.idx); + buf[..n].copy_from_slice(&self.buffer[self.idx..self.idx + n]); + self.idx += n; + if self.idx == self.buffer.len() { + self.buffer.clear(); + self.idx = 0; + } + Poll::Ready(Ok(n)) + } +} diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index dcbf2e8b597a3..99394b7922c4f 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -17,7 +17,7 @@ neovim = ["nvim-rs", "async-compat", "async-trait", "tokio"] [dependencies] anyhow.workspace = true -async-compat = { version = "0.2.1", "optional" = true } +async-compat = { workspace = true, "optional" = true } async-trait = { workspace = true, "optional" = true } collections.workspace = true command_palette.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e22f75f5bb3e5..ac73bf15eea18 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,7 +57,7 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true -isahc_http_client.workspace = true +ureq_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 06f1d926aea37..adb5feb9fe5bd 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -24,9 +24,9 @@ use gpui::{ UpdateGlobal as _, VisualContext, }; use http_client::{read_proxy_from_env, Uri}; -use isahc_http_client::IsahcHttpClient; use language::LanguageRegistry; use log::LevelFilter; +use ureq_client::UreqClient; use assets::Assets; use node_runtime::{NodeBinaryOptions, NodeRuntime}; @@ -334,9 +334,7 @@ fn main() { log::info!("========== starting zed =========="); - let app = App::new() - .with_assets(Assets) - .with_http_client(IsahcHttpClient::new(None, None)); + let app = App::new().with_assets(Assets); let system_id = app.background_executor().block(system_id()).ok(); let installation_id = app.background_executor().block(installation_id()).ok(); @@ -470,8 +468,8 @@ fn main() { .ok() }) .or_else(read_proxy_from_env); - let http = IsahcHttpClient::new(proxy_url, Some(user_agent)); - cx.set_http_client(http); + let http = UreqClient::new(proxy_url, user_agent, cx.background_executor().clone()); + cx.set_http_client(Arc::new(http)); ::set_global(fs.clone(), cx); From 9565a90528056988a402280c9303f55843bc63fb Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 16:10:25 -0400 Subject: [PATCH 194/228] collab: Revert changes to Clickhouse event rows (#18654) This PR reverts the changes to the Clickhouse event rows that were included in https://github.com/zed-industries/zed/pull/18414. The changes don't seem to be correct, as they make the row structs differ from the underlying table schema. Release Notes: - N/A --- crates/collab/src/api/events.rs | 40 ++++++++++++++++----------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index dd1370e886644..bbfa69c0b8f70 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -674,7 +674,7 @@ pub struct EditorEventRow { copilot_enabled_for_language: bool, historical_event: bool, architecture: String, - is_staff: bool, + is_staff: Option, major: Option, minor: Option, patch: Option, @@ -708,7 +708,7 @@ impl EditorEventRow { installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), metrics_id: body.metrics_id.clone().unwrap_or_default(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, file_extension: event.file_extension.unwrap_or_default(), @@ -741,7 +741,7 @@ pub struct InlineCompletionEventRow { region_code: String, city: String, time: i64, - is_staff: bool, + is_staff: Option, major: Option, minor: Option, patch: Option, @@ -772,7 +772,7 @@ impl InlineCompletionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), file_extension: event.file_extension.unwrap_or_default(), signed_in: wrapper.signed_in, @@ -800,7 +800,7 @@ pub struct CallEventRow { // ClientEventBase installation_id: String, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // CallEventRow @@ -832,7 +832,7 @@ impl CallEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, room_id: event.room_id, @@ -856,7 +856,7 @@ pub struct AssistantEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // AssistantEventRow @@ -891,7 +891,7 @@ impl AssistantEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), @@ -909,7 +909,7 @@ impl AssistantEventRow { pub struct CpuEventRow { installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, usage_as_percentage: f32, core_count: u32, app_version: String, @@ -947,7 +947,7 @@ impl CpuEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), usage_as_percentage: event.usage_as_percentage, core_count: event.core_count, @@ -970,7 +970,7 @@ pub struct MemoryEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // MemoryEventRow @@ -1001,7 +1001,7 @@ impl MemoryEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), memory_in_bytes: event.memory_in_bytes, virtual_memory_in_bytes: event.virtual_memory_in_bytes, @@ -1024,7 +1024,7 @@ pub struct AppEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // AppEventRow @@ -1054,7 +1054,7 @@ impl AppEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, } @@ -1076,7 +1076,7 @@ pub struct SettingEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // SettingEventRow setting: String, @@ -1106,7 +1106,7 @@ impl SettingEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), setting: event.setting, value: event.value, @@ -1129,7 +1129,7 @@ pub struct ExtensionEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // ExtensionEventRow @@ -1164,7 +1164,7 @@ impl ExtensionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), extension_id: event.extension_id, extension_version: event.version, @@ -1198,7 +1198,7 @@ pub struct ReplEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // ReplEventRow @@ -1230,7 +1230,7 @@ impl ReplEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), kernel_language: event.kernel_language, kernel_status: event.kernel_status, From 6f4385e73741b0cba6cb3028ecd8d4d76086ba4b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 16:26:48 -0400 Subject: [PATCH 195/228] Sort dependencies in `Cargo.toml` files (#18657) This PR sorts the dependencies in various `Cargo.toml` files after #18414. Release Notes: - N/A --- Cargo.toml | 24 ++++++++++++------------ crates/collab/Cargo.toml | 12 ++++++------ crates/evals/Cargo.toml | 12 ++++++------ crates/extension/Cargo.toml | 17 ++++++++--------- crates/extension_cli/Cargo.toml | 2 +- crates/http_client/Cargo.toml | 6 +++--- crates/reqwest_client/Cargo.toml | 4 ++-- crates/semantic_index/Cargo.toml | 18 +++++++++--------- crates/ureq_client/Cargo.toml | 7 +++---- crates/vim/Cargo.toml | 10 ++++------ crates/zed/Cargo.toml | 2 +- 11 files changed, 55 insertions(+), 59 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fea528db5b89e..8feb93a57856a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,6 @@ members = [ "crates/assistant", "crates/assistant_slash_command", "crates/assistant_tool", - "crates/ureq_client", "crates/audio", "crates/auto_update", "crates/breadcrumbs", @@ -87,8 +86,8 @@ members = [ "crates/release_channel", "crates/remote", "crates/remote_server", - "crates/reqwest_client", "crates/repl", + "crates/reqwest_client", "crates/rich_text", "crates/rope", "crates/rpc", @@ -123,6 +122,7 @@ members = [ "crates/ui", "crates/ui_input", "crates/ui_macros", + "crates/ureq_client", "crates/util", "crates/vcs_menu", "crates/vim", @@ -176,6 +176,7 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] + # # Workspace member crates # @@ -187,8 +188,6 @@ assets = { path = "crates/assets" } assistant = { path = "crates/assistant" } assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_tool = { path = "crates/assistant_tool" } -ureq_client = { path = "crates/ureq_client" } -async-compat = { version = "0.2.1" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } breadcrumbs = { path = "crates/breadcrumbs" } @@ -223,7 +222,6 @@ go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } gpui = { path = "crates/gpui" } gpui_macros = { path = "crates/gpui_macros" } -handlebars = "4.3" headless = { path = "crates/headless" } html_to_markdown = { path = "crates/html_to_markdown" } http_client = { path = "crates/http_client" } @@ -302,6 +300,7 @@ title_bar = { path = "crates/title_bar" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } +ureq_client = { path = "crates/ureq_client" } util = { path = "crates/util" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } @@ -321,6 +320,7 @@ any_vec = "0.14" anyhow = "1.0.86" arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = "0.9.1" +async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" async-fs = "1.6" @@ -359,15 +359,15 @@ futures-batch = "0.6.1" futures-lite = "1.13" git2 = { version = "0.19", default-features = false } globset = "0.4" +handlebars = "4.3" heed = { version = "0.20.1", features = ["read-txn-no-tls"] } hex = "0.4.3" -hyper = "0.14" html5ever = "0.27.0" +hyper = "0.14" ignore = "0.4.22" image = "0.25.1" indexmap = { version = "1.6.2", features = ["serde"] } indoc = "2" - itertools = "0.13.0" jsonwebtoken = "9.3" libc = "0.2" @@ -382,17 +382,18 @@ ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" pathdiff = "0.2" -profiling = "1" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = "1.3.0" +profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" +protols-tree-sitter-proto = { git = "https://github.com/zed-industries/tree-sitter-proto", rev = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" } pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" repair_json = "0.1.0" -reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } +reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } rsa = "0.9.6" runtimelib = { version = "0.15", default-features = false, features = [ "async-dispatcher-runtime", @@ -453,15 +454,14 @@ tree-sitter-html = "0.20" tree-sitter-jsdoc = "0.23" tree-sitter-json = "0.23" tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "4cfa6aad6b75052a5077c80fd934757d9267d81b" } -protols-tree-sitter-proto = { git = "https://github.com/zed-industries/tree-sitter-proto", rev = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" } tree-sitter-python = "0.23" tree-sitter-regex = "0.23" tree-sitter-ruby = "0.23" tree-sitter-rust = "0.23" tree-sitter-typescript = "0.23" -tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } -unindent = "0.1.7" +tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } unicase = "2.6" +unindent = "0.1.7" unicode-segmentation = "1.10" url = "2.2" uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 7d4c5d0c706b7..de7a3c621465e 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -28,8 +28,8 @@ axum = { version = "0.6", features = ["json", "headers", "ws"] } axum-extra = { version = "0.4", features = ["erased-json"] } base64.workspace = true chrono.workspace = true -clock.workspace = true clickhouse.workspace = true +clock.workspace = true collections.workspace = true dashmap.workspace = true envy = "0.4.2" @@ -37,19 +37,19 @@ futures.workspace = true google_ai.workspace = true hex.workspace = true http_client.workspace = true -reqwest_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true nanoid.workspace = true open_ai.workspace = true -supermaven_api.workspace = true parking_lot.workspace = true prometheus = "0.13" prost.workspace = true rand.workspace = true reqwest = { version = "0.11", features = ["json"] } +reqwest_client.workspace = true rpc.workspace = true +rustc-demangle.workspace = true scrypt = "0.11" sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } semantic_version.workspace = true @@ -61,7 +61,7 @@ sha2.workspace = true sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] } strum.workspace = true subtle.workspace = true -rustc-demangle.workspace = true +supermaven_api.workspace = true telemetry_events.workspace = true text.workspace = true thiserror.workspace = true @@ -85,6 +85,7 @@ client = { workspace = true, features = ["test-support"] } collab_ui = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } ctor.workspace = true +dev_server_projects.workspace = true editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true file_finder.workspace = true @@ -92,6 +93,7 @@ fs = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } +headless.workspace = true hyper.workspace = true indoc.workspace = true language = { workspace = true, features = ["test-support"] } @@ -108,7 +110,6 @@ recent_projects = { workspace = true } release_channel.workspace = true remote = { workspace = true, features = ["test-support"] } remote_server.workspace = true -dev_server_projects.workspace = true rpc = { workspace = true, features = ["test-support"] } sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] } serde_json.workspace = true @@ -120,7 +121,6 @@ unindent.workspace = true util.workspace = true workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } -headless.workspace = true [package.metadata.cargo-machete] ignored = ["async-stripe"] diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index 52af0ce446f91..2697b768453f1 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -14,9 +14,8 @@ name = "eval" path = "src/eval.rs" [dependencies] -clap.workspace = true anyhow.workspace = true -ureq_client.workspace = true +clap.workspace = true client.workspace = true clock.workspace = true collections.workspace = true @@ -25,14 +24,15 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true +http_client.workspace = true language.workspace = true languages.workspace = true -http_client.workspace = true +node_runtime.workspace = true open_ai.workspace = true project.workspace = true -settings.workspace = true +semantic_index.workspace = true serde.workspace = true serde_json.workspace = true +settings.workspace = true smol.workspace = true -semantic_index.workspace = true -node_runtime.workspace = true +ureq_client.workspace = true diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 9fea3a768a0c0..2b1d6193f8669 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -39,32 +39,31 @@ schemars.workspace = true semantic_version.workspace = true serde.workspace = true serde_json.workspace = true +serde_json_lenient.workspace = true settings.workspace = true snippet_provider.workspace = true +task.workspace = true theme.workspace = true toml.workspace = true ui.workspace = true url.workspace = true util.workspace = true wasm-encoder.workspace = true -wasmtime.workspace = true -wasmtime-wasi.workspace = true wasmparser.workspace = true +wasmtime-wasi.workspace = true +wasmtime.workspace = true wit-component.workspace = true workspace.workspace = true -task.workspace = true -serde_json_lenient.workspace = true [dev-dependencies] -ureq_client.workspace = true ctor.workspace = true env_logger.workspace = true -parking_lot.workspace = true -reqwest_client.workspace = true -tokio.workspace = true - fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } +parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } +reqwest_client.workspace = true +tokio.workspace = true +ureq_client.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 3e109a0036b2f..6de3e858d4fff 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,9 +18,9 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -reqwest_client.workspace = true language.workspace = true log.workspace = true +reqwest_client.workspace = true rpc.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 52c2947b8a7e4..e8585cff9820f 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,13 +16,13 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "1.1" -rustls.workspace = true -rustls-native-certs.workspace = true anyhow.workspace = true derive_more.workspace = true futures.workspace = true +http = "1.1" log.workspace = true +rustls-native-certs.workspace = true +rustls.workspace = true serde.workspace = true serde_json.workspace = true smol.workspace = true diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index d39319125299f..060a382d72ee4 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -21,11 +21,11 @@ path = "examples/client.rs" [dependencies] anyhow.workspace = true +bytes = "1.0" futures.workspace = true +http_client.workspace = true serde.workspace = true smol.workspace = true -http_client.workspace = true tokio.workspace = true -bytes = "1.0" reqwest = { workspace = true, features = ["rustls-tls-manual-roots", "stream"] } diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 8842093f7857f..508e64ffea362 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -26,42 +26,42 @@ clock.workspace = true collections.workspace = true feature_flags.workspace = true fs.workspace = true -futures.workspace = true futures-batch.workspace = true +futures.workspace = true gpui.workspace = true +heed.workspace = true +http_client.workspace = true language.workspace = true language_model.workspace = true log.workspace = true -heed.workspace = true -http_client.workspace = true open_ai.workspace = true parking_lot.workspace = true project.workspace = true -settings.workspace = true serde.workspace = true serde_json.workspace = true +settings.workspace = true sha2.workspace = true smol.workspace = true theme.workspace = true tree-sitter.workspace = true ui. workspace = true -util. workspace = true unindent.workspace = true +util. workspace = true workspace.workspace = true worktree.workspace = true [dev-dependencies] -ureq_client.workspace = true -env_logger.workspace = true client = { workspace = true, features = ["test-support"] } +env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } futures.workspace = true gpui = { workspace = true, features = ["test-support"] } +http_client = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } languages.workspace = true project = { workspace = true, features = ["test-support"] } tempfile.workspace = true +ureq_client.workspace = true util = { workspace = true, features = ["test-support"] } -worktree = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } +worktree = { workspace = true, features = ["test-support"] } diff --git a/crates/ureq_client/Cargo.toml b/crates/ureq_client/Cargo.toml index a14419a2261a0..757ba010946c7 100644 --- a/crates/ureq_client/Cargo.toml +++ b/crates/ureq_client/Cargo.toml @@ -22,11 +22,10 @@ path = "examples/client.rs" [dependencies] anyhow.workspace = true futures.workspace = true -serde.workspace = true -smol.workspace = true gpui.workspace = true http_client.workspace = true -util.workspace = true parking_lot.workspace = true - +serde.workspace = true +smol.workspace = true ureq = "=2.9.1" +util.workspace = true diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index 99394b7922c4f..bb347f49b79d8 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -28,10 +28,9 @@ itertools.workspace = true language.workspace = true log.workspace = true multi_buffer.workspace = true -nvim-rs = { git = "https://github.com/KillTheMule/nvim-rs", branch = "master", features = [ - "use_tokio", -], optional = true } +nvim-rs = { git = "https://github.com/KillTheMule/nvim-rs", branch = "master", features = ["use_tokio"], optional = true } regex.workspace = true +schemars.workspace = true search.workspace = true serde.workspace = true serde_derive.workspace = true @@ -39,21 +38,20 @@ serde_json.workspace = true settings.workspace = true tokio = { version = "1.15", "optional" = true } ui.workspace = true +util.workspace = true workspace.workspace = true zed_actions.workspace = true -schemars.workspace = true -util.workspace = true [dev-dependencies] command_palette.workspace = true editor = { workspace = true, features = ["test-support"] } futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -release_channel.workspace = true indoc.workspace = true language = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } parking_lot.workspace = true +release_channel.workspace = true settings.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index ac73bf15eea18..e340e176a7414 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,7 +57,6 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true -ureq_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true @@ -108,6 +107,7 @@ theme.workspace = true theme_selector.workspace = true time.workspace = true ui.workspace = true +ureq_client.workspace = true url.workspace = true urlencoding = "2.1.2" util.workspace = true From e2d613a803e0f04c6365d832e5300059ab493ae3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 17:39:32 -0400 Subject: [PATCH 196/228] Update Rust crate clap to v4.5.19 (#18660) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.18` -> `4.5.19` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.19`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4519---2024-10-01) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.18...v4.5.19) ##### Internal - Update dependencies
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a96e59df34c1d..187d0c92c806c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2275,9 +2275,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.18" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" +checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615" dependencies = [ "clap_builder", "clap_derive", @@ -2285,9 +2285,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.18" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" +checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b" dependencies = [ "anstream", "anstyle", @@ -11571,12 +11571,12 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef" dependencies = [ "rustix 0.38.35", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] From 19b186671b85cb587c187bd33b1ccead43abe49a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 3 Oct 2024 00:35:56 +0200 Subject: [PATCH 197/228] ssh: Add session state indicator to title bar (#18645) ![image](https://github.com/user-attachments/assets/0ed6f59c-e0e7-49e6-8db7-f09ec5cdf653) The indicator turns yellow when ssh client is trying to reconnect. Note that the state tracking is probably not ideal (we'll see how it pans out once we start dog-fooding), but at the very least "green=good" should be a decent mental model for now. Release Notes: - N/A --- crates/project/src/project.rs | 9 +++- crates/recent_projects/src/ssh_connections.rs | 1 + crates/remote/src/ssh_session.rs | 16 +++++-- crates/title_bar/src/title_bar.rs | 45 ++++++++++++++++++- 4 files changed, 66 insertions(+), 5 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dadbd394bbf9b..59c2c895cddcb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1217,7 +1217,10 @@ impl Project { server.ssh_connection_string.is_some() } - pub fn ssh_connection_string(&self, cx: &ModelContext) -> Option { + pub fn ssh_connection_string(&self, cx: &AppContext) -> Option { + if let Some(ssh_state) = &self.ssh_client { + return Some(ssh_state.connection_string().into()); + } let dev_server_id = self.dev_server_project_id()?; dev_server_projects::Store::global(cx) .read(cx) @@ -1226,6 +1229,10 @@ impl Project { .clone() } + pub fn ssh_is_connected(&self) -> Option { + Some(!self.ssh_client.as_ref()?.is_reconnect_underway()) + } + pub fn replica_id(&self) -> ReplicaId { match self.client_state { ProjectClientState::Remote { replica_id, .. } => replica_id, diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index d0fffc031f0bf..1aff16a4a44f7 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -317,6 +317,7 @@ impl SshClientDelegate { if release_channel == ReleaseChannel::Dev && platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS + && false { use smol::process::{Command, Stdio}; diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 6bca9938baac7..89ec5db949aa7 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -36,6 +36,7 @@ use std::{ time::Instant, }; use tempfile::TempDir; +use util::maybe; #[derive( Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, @@ -48,7 +49,7 @@ pub struct SshSocket { socket_path: PathBuf, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct SshConnectionOptions { pub host: String, pub username: Option, @@ -250,6 +251,7 @@ struct SshRemoteClientState { pub struct SshRemoteClient { client: Arc, inner_state: Mutex>, + connection_options: SshConnectionOptions, } impl SshRemoteClient { @@ -265,6 +267,7 @@ impl SshRemoteClient { let this = Arc::new(Self { client, inner_state: Mutex::new(None), + connection_options: connection_options.clone(), }); let inner_state = { @@ -272,8 +275,7 @@ impl SshRemoteClient { ChannelForwarder::new(incoming_tx, outgoing_rx, cx); let (ssh_connection, ssh_process) = - Self::establish_connection(connection_options.clone(), delegate.clone(), cx) - .await?; + Self::establish_connection(connection_options, delegate.clone(), cx).await?; let multiplex_task = Self::multiplex( Arc::downgrade(&this), @@ -505,6 +507,13 @@ impl SshRemoteClient { self.client.clone().into() } + pub fn connection_string(&self) -> String { + self.connection_options.connection_string() + } + + pub fn is_reconnect_underway(&self) -> bool { + maybe!({ Some(self.inner_state.try_lock()?.is_none()) }).unwrap_or_default() + } #[cfg(any(test, feature = "test-support"))] pub fn fake( client_cx: &mut gpui::TestAppContext, @@ -519,6 +528,7 @@ impl SshRemoteClient { Arc::new(Self { client, inner_state: Mutex::new(None), + connection_options: SshConnectionOptions::default(), }) }), server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index d6cc839cfdb7b..81f908ce79790 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -18,7 +18,7 @@ use gpui::{ StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; use project::{Project, RepositoryEntry}; -use recent_projects::RecentProjects; +use recent_projects::{OpenRemote, RecentProjects}; use rpc::proto::{self, DevServerStatus}; use smallvec::SmallVec; use std::sync::Arc; @@ -262,6 +262,46 @@ impl TitleBar { self } + fn render_ssh_project_host(&self, cx: &mut ViewContext) -> Option { + let host = self.project.read(cx).ssh_connection_string(cx)?; + let meta = SharedString::from(format!("Connected to: {host}")); + let indicator_color = if self.project.read(cx).ssh_is_connected()? { + Color::Success + } else { + Color::Warning + }; + let indicator = div() + .absolute() + .w_1_4() + .h_1_4() + .right_0p5() + .bottom_0p5() + .p_1() + .rounded_2xl() + .bg(indicator_color.color(cx)); + + Some( + div() + .child( + IconButton::new("ssh-server-icon", IconName::Server) + .tooltip(move |cx| { + Tooltip::with_meta( + "Remote Project", + Some(&OpenRemote), + meta.clone(), + cx, + ) + }) + .shape(ui::IconButtonShape::Square) + .on_click(|_, cx| { + cx.dispatch_action(OpenRemote.boxed_clone()); + }), + ) + .child(indicator) + .into_any_element(), + ) + } + pub fn render_project_host(&self, cx: &mut ViewContext) -> Option { if let Some(dev_server) = self.project @@ -296,6 +336,9 @@ impl TitleBar { .into_any_element(), ); } + if self.project.read(cx).is_via_ssh() { + return self.render_ssh_project_host(cx); + } if self.project.read(cx).is_disconnected() { return Some( From c48d4dbc6bad8127d6992fea3fbf4c3091dc9650 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 2 Oct 2024 22:06:07 -0400 Subject: [PATCH 198/228] Add basic outline panel docs (#18674) Bandaid to: https://github.com/zed-industries/zed/issues/18672 Release Notes: - Added basic outline panel docs --- docs/src/SUMMARY.md | 1 + docs/src/outline-panel.md | 26 ++++++++++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 docs/src/outline-panel.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index f0e4784f89cd9..e7d837e479bce 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -22,6 +22,7 @@ # Using Zed - [Multibuffers](./multibuffers.md) +- [Outline Panel](./outline-panel.md) - [Code Completions](./completions.md) - [Channels](./channels.md) - [Collaboration](./collaboration.md) diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md new file mode 100644 index 0000000000000..ee654514b25c2 --- /dev/null +++ b/docs/src/outline-panel.md @@ -0,0 +1,26 @@ +# Outline Panel + +In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b`, or via the `Outline Panel` button in the status bar. + +When viewing a "singleton" buffer, the outline panel works similarly to that of the outline modal - it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. + +![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png) + +The outline panel truly excels when used with multi-buffers. Here are some examples of its versatility: + +1. Project Search Results: + +Get an overview of search results across your project. +![Using the outline panel in a project search multi-buffer](https://zed.dev/img/outline-panel/project-search.png) + +2. Project Diagnostics: + +View a summary of all errors and warnings reported by the language server. +![Using the outline panel while viewing project diagnostics multi-buffer](https://zed.dev/img/outline-panel/project-diagnostics.png) + +3. Find All References: + +Quickly navigate through all references when using the `editor: find all references` action. +![Using the outline panel while viewing `find all references` multi-buffer](https://zed.dev/img/outline-panel/find-all-references.png) + +The outline view provides a great way to quickly navigate to specific parts of your code and helps you maintain context when working with large result sets in multi-buffers. From df21fe174d91cd15ee984de3c78f018b347ad8e9 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 2 Oct 2024 22:16:56 -0400 Subject: [PATCH 199/228] Add command palette action name to outline panel docs (#18678) Release Notes: - N/A --- docs/src/outline-panel.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md index ee654514b25c2..998f310076f01 100644 --- a/docs/src/outline-panel.md +++ b/docs/src/outline-panel.md @@ -1,6 +1,6 @@ # Outline Panel -In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b`, or via the `Outline Panel` button in the status bar. +In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar. When viewing a "singleton" buffer, the outline panel works similarly to that of the outline modal - it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. From 9cd42427d88afad3423fa2546ed656839295cf3f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 23:28:00 -0400 Subject: [PATCH 200/228] Update Rust crate thiserror to v1.0.64 (#18677) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [thiserror](https://redirect.github.com/dtolnay/thiserror) | workspace.dependencies | patch | `1.0.63` -> `1.0.64` | --- ### Release Notes
dtolnay/thiserror (thiserror) ### [`v1.0.64`](https://redirect.github.com/dtolnay/thiserror/releases/tag/1.0.64) [Compare Source](https://redirect.github.com/dtolnay/thiserror/compare/1.0.63...1.0.64) - Exclude derived impls from coverage instrumentation ([#​322](https://redirect.github.com/dtolnay/thiserror/issues/322), thanks [@​oxalica](https://redirect.github.com/oxalica))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 187d0c92c806c..821bedbec040f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11702,18 +11702,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", From 1e8297a469a4c922c23d6c6912d13832a4a09b4b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 3 Oct 2024 15:38:42 +0300 Subject: [PATCH 201/228] Remove a debug dev config line (#18689) Follow-up of https://github.com/zed-industries/zed/pull/18645 Release Notes: - N/A --- crates/recent_projects/src/ssh_connections.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 1aff16a4a44f7..d0fffc031f0bf 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -317,7 +317,6 @@ impl SshClientDelegate { if release_channel == ReleaseChannel::Dev && platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS - && false { use smol::process::{Command, Stdio}; From dc85378b9679253c03d5a11a8c5f0f3ae0d641d7 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 3 Oct 2024 16:23:56 +0200 Subject: [PATCH 202/228] Clean up style properties on hunk controls (#18639) This PR removes some duplicate style properties on the hunk controls, namely padding, border, and background color. Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index ff3451fc9216b..7fbb07ae35c79 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -379,6 +379,7 @@ impl Editor { }); let border_color = cx.theme().colors().border_variant; + let bg_color = cx.theme().colors().editor_background; let gutter_color = match hunk.status { DiffHunkStatus::Added => cx.theme().status().created, DiffHunkStatus::Modified => cx.theme().status().modified, @@ -394,6 +395,7 @@ impl Editor { render: Box::new({ let editor = cx.view().clone(); let hunk = hunk.clone(); + move |cx| { let hunk_controls_menu_handle = editor.read(cx).hunk_controls_menu_handle.clone(); @@ -404,7 +406,7 @@ impl Editor { .w_full() .border_t_1() .border_color(border_color) - .bg(cx.theme().colors().editor_background) + .bg(bg_color) .child( div() .id("gutter-strip") @@ -424,14 +426,9 @@ impl Editor { ) .child( h_flex() - .pl_2() - .pr_6() + .px_6() .size_full() .justify_between() - .border_t_1() - .pl_6() - .pr_6() - .border_color(border_color) .child( h_flex() .gap_1() @@ -608,7 +605,7 @@ impl Editor { move |menu, _| { menu.context(focus.clone()) .action( - "Discard All", + "Discard All Hunks", RevertFile .boxed_clone(), ) From 773ad6bfd154b9f14b9b2b7009fd5d53926f9e3f Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 3 Oct 2024 10:27:19 -0400 Subject: [PATCH 203/228] Document the `theme` crate (#18690) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR enables required documentation for the `theme` crate starts on documenting it. The end goal is to have all meaningful documentation in the crate filled out – However I'm not sure that just adding `#![deny(missing_docs)]` to the whole crate is the right approach. I don't know that having 200+ "The color of the _ color" field docs is useful however–In the short term I've excluded some of the modules that contain structs with a ton of fields (`colors, `status`, etc.) until we decide what the right solution here is. Next steps are to clean up the crate, removing unused modules or those with low usage in favor of other approaches. Changes in this PR: - Enable the `deny(missing_docs)` lint for the `theme` crate - Start documenting a subset of the crate. - Enable `#![allow(missing_docs)]` for some modules. Release Notes: - N/A --- crates/theme/src/default_colors.rs | 15 +++-- crates/theme/src/default_theme.rs | 3 + crates/theme/src/font_family_cache.rs | 3 + crates/theme/src/registry.rs | 11 ++++ crates/theme/src/scale.rs | 1 + crates/theme/src/schema.rs | 2 + crates/theme/src/settings.rs | 70 +++++++++++++++++++++- crates/theme/src/styles/accents.rs | 3 + crates/theme/src/styles/colors.rs | 2 + crates/theme/src/styles/players.rs | 2 + crates/theme/src/styles/status.rs | 2 + crates/theme/src/styles/stories/color.rs | 1 + crates/theme/src/styles/stories/players.rs | 1 + crates/theme/src/styles/syntax.rs | 2 + crates/theme/src/styles/system.rs | 2 + crates/theme/src/theme.rs | 41 ++++++++++++- 16 files changed, 152 insertions(+), 9 deletions(-) diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index a7521bd374d1c..49c216c0e07e7 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -8,12 +8,13 @@ pub(crate) fn neutral() -> ColorScaleSet { sand() } -// Note: We aren't currently making use of the default colors, as all of the -// themes have a value set for each color. -// -// We'll need to revisit these once we're ready to launch user themes, which may -// not specify a value for each color (and thus should fall back to the defaults). +/// The default colors for the theme. +/// +/// Themes that do not specify all colors are refined off of these defaults. impl ThemeColors { + /// Returns the default colors for light themes. + /// + /// Themes that do not specify all colors are refined off of these defaults. pub fn light() -> Self { let system = SystemColors::default(); @@ -114,6 +115,9 @@ impl ThemeColors { } } + /// Returns the default colors for dark themes. + /// + /// Themes that do not specify all colors are refined off of these defaults. pub fn dark() -> Self { let system = SystemColors::default(); @@ -247,6 +251,7 @@ impl TryFrom for ColorScaleSet { } } +/// Color scales used to build the default themes. pub fn default_color_scales() -> ColorScales { ColorScales { gray: gray(), diff --git a/crates/theme/src/default_theme.rs b/crates/theme/src/default_theme.rs index 6722b847badb9..5806340916533 100644 --- a/crates/theme/src/default_theme.rs +++ b/crates/theme/src/default_theme.rs @@ -45,6 +45,9 @@ pub(crate) fn zed_pro_moonlight() -> Theme { } } +/// Returns the Zed Pro theme family. +/// +/// Note: To be removed until the theme is implemented. pub fn zed_pro_family() -> ThemeFamily { ThemeFamily { id: "zed_pro".to_string(), diff --git a/crates/theme/src/font_family_cache.rs b/crates/theme/src/font_family_cache.rs index c9583b9e8d3bc..755d829902a48 100644 --- a/crates/theme/src/font_family_cache.rs +++ b/crates/theme/src/font_family_cache.rs @@ -25,14 +25,17 @@ struct GlobalFontFamilyCache(Arc); impl Global for GlobalFontFamilyCache {} impl FontFamilyCache { + /// Initializes the global font family cache. pub fn init_global(cx: &mut AppContext) { cx.default_global::(); } + /// Returns the global font family cache. pub fn global(cx: &AppContext) -> Arc { GlobalFontFamilyCache::global(cx).0.clone() } + /// Returns the list of font families. pub fn list_font_families(&self, cx: &AppContext) -> Vec { if self.state.read().loaded_at.is_some() { return self.state.read().font_families.clone(); diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index b70377f4408b9..a77ab141a6547 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -17,9 +17,12 @@ use crate::{ ThemeStyles, }; +/// The metadata for a theme. #[derive(Debug, Clone)] pub struct ThemeMeta { + /// The name of the theme. pub name: SharedString, + /// The appearance of the theme. pub appearance: Appearance, } @@ -38,6 +41,7 @@ struct ThemeRegistryState { themes: HashMap>, } +/// The registry for themes. pub struct ThemeRegistry { state: RwLock, assets: Box, @@ -61,6 +65,7 @@ impl ThemeRegistry { cx.set_global(GlobalThemeRegistry(Arc::new(ThemeRegistry::new(assets)))); } + /// Creates a new [`ThemeRegistry`] with the given [`AssetSource`]. pub fn new(assets: Box) -> Self { let registry = Self { state: RwLock::new(ThemeRegistryState { @@ -99,6 +104,7 @@ impl ThemeRegistry { } } + /// Inserts user themes into the registry. pub fn insert_user_themes(&self, themes: impl IntoIterator) { self.insert_themes(themes.into_iter().map(|user_theme| { let mut theme_colors = match user_theme.appearance { @@ -185,16 +191,19 @@ impl ThemeRegistry { .retain(|name, _| !themes_to_remove.contains(name)) } + /// Removes all themes from the registry. pub fn clear(&mut self) { self.state.write().themes.clear(); } + /// Returns the names of all themes in the registry. pub fn list_names(&self, _staff: bool) -> Vec { let mut names = self.state.read().themes.keys().cloned().collect::>(); names.sort(); names } + /// Returns the metadata of all themes in the registry. pub fn list(&self, _staff: bool) -> Vec { self.state .read() @@ -207,6 +216,7 @@ impl ThemeRegistry { .collect() } + /// Returns the theme with the given name. pub fn get(&self, name: &str) -> Result> { self.state .read() @@ -261,6 +271,7 @@ impl ThemeRegistry { Ok(()) } + /// Asynchronously reads the user theme from the specified path. pub async fn read_user_theme(theme_path: &Path, fs: Arc) -> Result { let reader = fs.open_sync(theme_path).await?; let theme_family: ThemeFamilyContent = serde_json_lenient::from_reader(reader)?; diff --git a/crates/theme/src/scale.rs b/crates/theme/src/scale.rs index 1146090edcc1e..a70dcb9789a3c 100644 --- a/crates/theme/src/scale.rs +++ b/crates/theme/src/scale.rs @@ -1,3 +1,4 @@ +#![allow(missing_docs)] use gpui::{AppContext, Hsla, SharedString}; use crate::{ActiveTheme, Appearance}; diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 91863061236f2..af334d8aed54b 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use anyhow::Result; use gpui::{FontStyle, FontWeight, HighlightStyle, Hsla, WindowBackgroundAppearance}; use indexmap::IndexMap; diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 86383cec8ea07..0c8ea782cd608 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -35,6 +35,9 @@ const MIN_LINE_HEIGHT: f32 = 1.0; Deserialize, JsonSchema, )] + +/// Specifies the density of the UI. +/// Note: This setting is still experimental. See [this tracking issue](https://github.com/zed-industries/zed/issues/18078) #[serde(rename_all = "snake_case")] pub enum UiDensity { /// A denser UI with tighter spacing and smaller elements. @@ -50,6 +53,8 @@ pub enum UiDensity { } impl UiDensity { + /// The spacing ratio of a given density. + /// TODO: Standardize usage throughout the app or remove pub fn spacing_ratio(self) -> f32 { match self { UiDensity::Compact => 0.75, @@ -80,17 +85,43 @@ impl From for String { } } +/// Customizable settings for the UI and theme system. #[derive(Clone)] pub struct ThemeSettings { + /// The UI font size. Determines the size of text in the UI, + /// as well as the size of a [gpui::Rems] unit. + /// + /// Changing this will impact the size of all UI elements. pub ui_font_size: Pixels, + /// The font used for UI elements. pub ui_font: Font, - pub buffer_font: Font, + /// The font size used for buffers, and the terminal. + /// + /// The terminal font size can be overridden using it's own setting. pub buffer_font_size: Pixels, + /// The font used for buffers, and the terminal. + /// + /// The terminal font family can be overridden using it's own setting. + pub buffer_font: Font, + /// The line height for buffers, and the terminal. + /// + /// Changing this may affect the spacing of some UI elements. + /// + /// The terminal font family can be overridden using it's own setting. pub buffer_line_height: BufferLineHeight, + /// The current theme selection. + /// TODO: Document this further pub theme_selection: Option, + /// The active theme. pub active_theme: Arc, + /// Manual overrides for the active theme. + /// + /// Note: This setting is still experimental. See [this tracking issue](https://github.com/zed-industries/zed/issues/18078) pub theme_overrides: Option, + /// The density of the UI. + /// Note: This setting is still experimental. See [this tracking issue]( pub ui_density: UiDensity, + /// The amount of fading applied to unnecessary code. pub unnecessary_code_fade: f32, } @@ -181,15 +212,21 @@ pub(crate) struct AdjustedUiFontSize(Pixels); impl Global for AdjustedUiFontSize {} +/// Represents the selection of a theme, which can be either static or dynamic. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] #[serde(untagged)] pub enum ThemeSelection { + /// A static theme selection, represented by a single theme name. Static(#[schemars(schema_with = "theme_name_ref")] String), + /// A dynamic theme selection, which can change based the [ThemeMode]. Dynamic { + /// The mode used to determine which theme to use. #[serde(default)] mode: ThemeMode, + /// The theme to use for light mode. #[schemars(schema_with = "theme_name_ref")] light: String, + /// The theme to use for dark mode. #[schemars(schema_with = "theme_name_ref")] dark: String, }, @@ -199,6 +236,12 @@ fn theme_name_ref(_: &mut SchemaGenerator) -> Schema { Schema::new_ref("#/definitions/ThemeName".into()) } +// TODO: Rename ThemeMode -> ThemeAppearanceMode +/// The mode use to select a theme. +/// +/// `Light` and `Dark` will select their respective themes. +/// +/// `System` will select the theme based on the system's appearance. #[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum ThemeMode { @@ -214,6 +257,7 @@ pub enum ThemeMode { } impl ThemeSelection { + /// Returns the theme name for the selected [ThemeMode]. pub fn theme(&self, system_appearance: Appearance) -> &str { match self { Self::Static(theme) => theme, @@ -228,6 +272,7 @@ impl ThemeSelection { } } + /// Returns the [ThemeMode] for the [ThemeSelection]. pub fn mode(&self) -> Option { match self { ThemeSelection::Static(_) => None, @@ -327,6 +372,7 @@ impl ThemeSettingsContent { } } + /// Sets the mode for the theme. pub fn set_mode(&mut self, mode: ThemeMode) { if let Some(selection) = self.theme.as_mut() { match selection { @@ -355,16 +401,23 @@ impl ThemeSettingsContent { } } +/// The buffer's line height. #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)] #[serde(rename_all = "snake_case")] pub enum BufferLineHeight { + /// A less dense line height. #[default] Comfortable, + /// The default line height. Standard, + /// A custom line height. + /// + /// A line height of 1.0 is the height of the buffer's font size. Custom(f32), } impl BufferLineHeight { + /// Returns the value of the line height. pub fn value(&self) -> f32 { match self { BufferLineHeight::Comfortable => 1.618, @@ -375,12 +428,15 @@ impl BufferLineHeight { } impl ThemeSettings { + /// Returns the [AdjustedBufferFontSize]. pub fn buffer_font_size(&self, cx: &AppContext) -> Pixels { cx.try_global::() .map_or(self.buffer_font_size, |size| size.0) .max(MIN_FONT_SIZE) } + // TODO: Rename: `line_height` -> `buffer_line_height` + /// Returns the buffer's line height. pub fn line_height(&self) -> f32 { f32::max(self.buffer_line_height.value(), MIN_LINE_HEIGHT) } @@ -433,6 +489,7 @@ impl ThemeSettings { } } +/// Observe changes to the adjusted buffer font size. pub fn observe_buffer_font_size_adjustment( cx: &mut ViewContext, f: impl 'static + Fn(&mut V, &mut ViewContext), @@ -440,6 +497,7 @@ pub fn observe_buffer_font_size_adjustment( cx.observe_global::(f) } +/// Sets the adjusted buffer font size. pub fn adjusted_font_size(size: Pixels, cx: &mut AppContext) -> Pixels { if let Some(AdjustedBufferFontSize(adjusted_size)) = cx.try_global::() { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; @@ -451,12 +509,14 @@ pub fn adjusted_font_size(size: Pixels, cx: &mut AppContext) -> Pixels { .max(MIN_FONT_SIZE) } +/// Returns the adjusted buffer font size. pub fn get_buffer_font_size(cx: &AppContext) -> Pixels { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; cx.try_global::() .map_or(buffer_font_size, |adjusted_size| adjusted_size.0) } +/// Adjusts the buffer font size. pub fn adjust_buffer_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; let mut adjusted_size = cx @@ -469,10 +529,12 @@ pub fn adjust_buffer_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { cx.refresh(); } +/// Returns whether the buffer font size has been adjusted. pub fn has_adjusted_buffer_font_size(cx: &mut AppContext) -> bool { cx.has_global::() } +/// Resets the buffer font size to the default value. pub fn reset_buffer_font_size(cx: &mut AppContext) { if cx.has_global::() { cx.remove_global::(); @@ -480,6 +542,8 @@ pub fn reset_buffer_font_size(cx: &mut AppContext) { } } +// TODO: Make private, change usages to use `get_ui_font_size` instead. +#[allow(missing_docs)] pub fn setup_ui_font(cx: &mut WindowContext) -> gpui::Font { let (ui_font, ui_font_size) = { let theme_settings = ThemeSettings::get_global(cx); @@ -491,12 +555,14 @@ pub fn setup_ui_font(cx: &mut WindowContext) -> gpui::Font { ui_font } +/// Gets the adjusted UI font size. pub fn get_ui_font_size(cx: &AppContext) -> Pixels { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; cx.try_global::() .map_or(ui_font_size, |adjusted_size| adjusted_size.0) } +/// Sets the adjusted UI font size. pub fn adjust_ui_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; let mut adjusted_size = cx @@ -509,10 +575,12 @@ pub fn adjust_ui_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { cx.refresh(); } +/// Returns whether the UI font size has been adjusted. pub fn has_adjusted_ui_font_size(cx: &mut AppContext) -> bool { cx.has_global::() } +/// Resets the UI font size to the default value. pub fn reset_ui_font_size(cx: &mut AppContext) { if cx.has_global::() { cx.remove_global::(); diff --git a/crates/theme/src/styles/accents.rs b/crates/theme/src/styles/accents.rs index dfcd19911b26f..e4d7f03cf6220 100644 --- a/crates/theme/src/styles/accents.rs +++ b/crates/theme/src/styles/accents.rs @@ -20,6 +20,7 @@ impl Default for AccentColors { } impl AccentColors { + /// Returns the set of dark accent colors. pub fn dark() -> Self { Self(vec![ blue().dark().step_9(), @@ -38,6 +39,7 @@ impl AccentColors { ]) } + /// Returns the set of light accent colors. pub fn light() -> Self { Self(vec![ blue().light().step_9(), @@ -58,6 +60,7 @@ impl AccentColors { } impl AccentColors { + /// Returns the color for the given index. pub fn color_for_index(&self, index: u32) -> Hsla { self.0[index as usize % self.0.len()] } diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 225275f37b619..881a68334dcf6 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::{Hsla, WindowBackgroundAppearance}; use refineable::Refineable; use std::sync::Arc; diff --git a/crates/theme/src/styles/players.rs b/crates/theme/src/styles/players.rs index e80c7161b15b1..130721033239c 100644 --- a/crates/theme/src/styles/players.rs +++ b/crates/theme/src/styles/players.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::Hsla; use serde_derive::Deserialize; diff --git a/crates/theme/src/styles/status.rs b/crates/theme/src/styles/status.rs index 854b876ac20b3..84afae701d0f0 100644 --- a/crates/theme/src/styles/status.rs +++ b/crates/theme/src/styles/status.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::Hsla; use refineable::Refineable; diff --git a/crates/theme/src/styles/stories/color.rs b/crates/theme/src/styles/stories/color.rs index 90e84bcf0f2de..8e6c86ba762ba 100644 --- a/crates/theme/src/styles/stories/color.rs +++ b/crates/theme/src/styles/stories/color.rs @@ -4,6 +4,7 @@ use story::Story; use crate::{default_color_scales, ColorScaleStep}; +/// The story showcasing all the default color scales pub struct ColorsStory; impl Render for ColorsStory { diff --git a/crates/theme/src/styles/stories/players.rs b/crates/theme/src/styles/stories/players.rs index 2b356670bf66a..0d50c6edc9afc 100644 --- a/crates/theme/src/styles/stories/players.rs +++ b/crates/theme/src/styles/stories/players.rs @@ -3,6 +3,7 @@ use story::Story; use crate::{ActiveTheme, PlayerColors}; +/// The story showcasing the player colors pub struct PlayerStory; impl Render for PlayerStory { diff --git a/crates/theme/src/styles/syntax.rs b/crates/theme/src/styles/syntax.rs index 8016445c16c6a..0a97ff77f23c8 100644 --- a/crates/theme/src/styles/syntax.rs +++ b/crates/theme/src/styles/syntax.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use std::sync::Arc; use gpui::{HighlightStyle, Hsla}; diff --git a/crates/theme/src/styles/system.rs b/crates/theme/src/styles/system.rs index aeb0865155d68..54e892b79c49f 100644 --- a/crates/theme/src/styles/system.rs +++ b/crates/theme/src/styles/system.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::{hsla, Hsla}; #[derive(Clone)] diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index af38c9efc6f7e..a6ca59d734e24 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -1,3 +1,5 @@ +#![deny(missing_docs)] + //! # Theme //! //! This crate provides the theme system for Zed. @@ -10,6 +12,9 @@ mod default_colors; mod default_theme; mod font_family_cache; mod one_themes; +/// A prelude for working with the theme system. +/// +/// TODO: remove this. This only publishes default colors. pub mod prelude; mod registry; mod scale; @@ -35,16 +40,22 @@ use gpui::{ }; use serde::Deserialize; +/// Defines window border radius for platforms that use client side decorations. +pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); +/// Defines window shadow size for platforms that use client side decorations. +pub const CLIENT_SIDE_DECORATION_SHADOW: Pixels = px(10.0); + +/// The appearance of the theme. #[derive(Debug, PartialEq, Clone, Copy, Deserialize)] pub enum Appearance { + /// A light appearance. Light, + /// A dark appearance. Dark, } -pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); -pub const CLIENT_SIDE_DECORATION_SHADOW: Pixels = px(10.0); - impl Appearance { + /// Returns whether the appearance is light. pub fn is_light(&self) -> bool { match self { Self::Light => true, @@ -62,6 +73,7 @@ impl From for Appearance { } } +/// Which themes should be loaded. This is used primarlily for testing. pub enum LoadThemes { /// Only load the base theme. /// @@ -72,6 +84,7 @@ pub enum LoadThemes { All(Box), } +/// Initialize the theme system. pub fn init(themes_to_load: LoadThemes, cx: &mut AppContext) { let (assets, load_user_themes) = match themes_to_load { LoadThemes::JustBase => (Box::new(()) as Box, false), @@ -97,7 +110,9 @@ pub fn init(themes_to_load: LoadThemes, cx: &mut AppContext) { .detach(); } +/// Implementing this trait allows accessing the active theme. pub trait ActiveTheme { + /// Returns the active theme. fn theme(&self) -> &Arc; } @@ -107,21 +122,39 @@ impl ActiveTheme for AppContext { } } +/// A theme family is a grouping of themes under a single name. +/// +/// For example, the "One" theme family contains the "One Light" and "One Dark" themes. +/// +/// It can also be used to package themes with many variants. +/// +/// For example, the "Atelier" theme family contains "Cave", "Dune", "Estuary", "Forest", "Heath", etc. pub struct ThemeFamily { + /// The unique identifier for the theme family. pub id: String, + /// The name of the theme family. This will be displayed in the UI, such as when adding or removing a theme family. pub name: SharedString, + /// The author of the theme family. pub author: SharedString, + /// The [Theme]s in the family. pub themes: Vec, + /// The color scales used by the themes in the family. + /// Note: This will be removed in the future. pub scales: ColorScales, } impl ThemeFamily {} +/// A theme is the primary mechanism for defining the appearance of the UI. #[derive(Clone)] pub struct Theme { + /// The unique identifier for the theme. pub id: String, + /// The name of the theme. pub name: SharedString, + /// The appearance of the theme (light or dark). pub appearance: Appearance, + /// The colors and other styles for the theme. pub styles: ThemeStyles, } @@ -181,6 +214,8 @@ impl Theme { } } +/// Compounds a color with an alpha value. +/// TODO: Replace this with a method on Hsla. pub fn color_alpha(color: Hsla, alpha: f32) -> Hsla { let mut color = color; color.a = alpha; From 29796aa4128f917c8e3056d04f3801b2eac657d6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:14:22 -0400 Subject: [PATCH 204/228] Update Rust crate serde_json to v1.0.128 (#18669) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [serde_json](https://redirect.github.com/serde-rs/json) | dependencies | patch | `1.0.127` -> `1.0.128` | | [serde_json](https://redirect.github.com/serde-rs/json) | workspace.dependencies | patch | `1.0.127` -> `1.0.128` | --- ### Release Notes
serde-rs/json (serde_json) ### [`v1.0.128`](https://redirect.github.com/serde-rs/json/releases/tag/1.0.128) [Compare Source](https://redirect.github.com/serde-rs/json/compare/1.0.127...1.0.128) - Support serializing maps containing 128-bit integer keys to serde_json::Value ([#​1188](https://redirect.github.com/serde-rs/json/issues/1188), thanks [@​Mrreadiness](https://redirect.github.com/Mrreadiness))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 821bedbec040f..a94de65bc9896 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10194,9 +10194,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.127" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "indexmap 2.4.0", "itoa", From ddcd45bb457215afcd74f3d2bf995c7687604ff1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 3 Oct 2024 17:27:42 +0200 Subject: [PATCH 205/228] docs: Add tweaks to the outline panel page (#18697) Thought we could be extra clear here with the meaning of "singleton buffers". Release Notes: - N/A --- docs/src/outline-panel.md | 13 +++++++++---- docs/theme/css/general.css | 1 + docs/theme/css/variables.css | 4 ++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md index 998f310076f01..bc743596d6bcb 100644 --- a/docs/src/outline-panel.md +++ b/docs/src/outline-panel.md @@ -2,25 +2,30 @@ In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar. -When viewing a "singleton" buffer, the outline panel works similarly to that of the outline modal - it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. +When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png) +## Usage with multibuffers + The outline panel truly excels when used with multi-buffers. Here are some examples of its versatility: -1. Project Search Results: +### Project Search Results Get an overview of search results across your project. + ![Using the outline panel in a project search multi-buffer](https://zed.dev/img/outline-panel/project-search.png) -2. Project Diagnostics: +### Project Diagnostics View a summary of all errors and warnings reported by the language server. + ![Using the outline panel while viewing project diagnostics multi-buffer](https://zed.dev/img/outline-panel/project-diagnostics.png) -3. Find All References: +### Find All References Quickly navigate through all references when using the `editor: find all references` action. + ![Using the outline panel while viewing `find all references` multi-buffer](https://zed.dev/img/outline-panel/find-all-references.png) The outline view provides a great way to quickly navigate to specific parts of your code and helps you maintain context when working with large result sets in multi-buffers. diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index b422890751c50..d1b8e9b92653e 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -182,6 +182,7 @@ h6:target::before { .content img, .content video { max-width: 100%; + background-color: var(--media-bg); border: 1px solid; border-color: var(--border); border-radius: 8px; diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index a7c0ed7114b40..55ae4a427da26 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -26,6 +26,8 @@ --border-light: hsl(220, 13%, 90%); --border-hover: hsl(220, 13%, 70%); + --media-bg: hsl(50, 25%, 92%); + --sidebar-fg: hsl(0, 0%, 0%); --sidebar-non-existant: #aaaaaa; --sidebar-active: hsl(220, 93%, 42%); @@ -98,6 +100,8 @@ --border-light: hsl(220, 13%, 90%); --border-hover: hsl(220, 13%, 40%); + --media-bg: hsl(220, 13%, 8%); + --sidebar-bg: hsl(220, 13%, 10%); --sidebar-fg: hsl(220, 14%, 71%); --sidebar-non-existant: #505254; From ded3d3fc14a438f606d5e8b03eaf92310723fce4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:29:29 -0400 Subject: [PATCH 206/228] Update Python to v3.12.7 (#18652) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [python](https://redirect.github.com/containerbase/python-prebuild) | dependencies | patch | `3.12.6` -> `3.12.7` | --- ### Release Notes
containerbase/python-prebuild (python) ### [`v3.12.7`](https://redirect.github.com/containerbase/python-prebuild/releases/tag/3.12.7) [Compare Source](https://redirect.github.com/containerbase/python-prebuild/compare/3.12.6...3.12.7) ##### Bug Fixes - **deps:** update dependency python to v3.12.7
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- script/update_top_ranking_issues/poetry.lock | 4 ++-- script/update_top_ranking_issues/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/script/update_top_ranking_issues/poetry.lock b/script/update_top_ranking_issues/poetry.lock index a85844e645f10..c151c8bc1ddd7 100644 --- a/script/update_top_ranking_issues/poetry.lock +++ b/script/update_top_ranking_issues/poetry.lock @@ -529,5 +529,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "3.12.6" -content-hash = "7827704e06a8c195297507e0d05e7a7c3843ed299bd353f31570ee4c435c6896" +python-versions = "3.12.7" +content-hash = "809bd421af8a34dd500ba704d954ae8e1f6edf15b6af74a0d3fda987b69c8cbe" diff --git a/script/update_top_ranking_issues/pyproject.toml b/script/update_top_ranking_issues/pyproject.toml index 15d8346bb9910..cd5cd2cc2ed27 100644 --- a/script/update_top_ranking_issues/pyproject.toml +++ b/script/update_top_ranking_issues/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] mypy = "1.6.0" PyGithub = "1.55" -python = "3.12.6" +python = "3.12.7" pytz = "2022.1" typer = "0.9.0" types-pytz = "2023.3.1.1" From f7b3680e4dbd664dc72344f67b957907260bc2d7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:32:04 -0400 Subject: [PATCH 207/228] Update Rust crate pretty_assertions to v1.4.1 (#18668) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pretty_assertions](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions) | workspace.dependencies | patch | `1.4.0` -> `1.4.1` | --- ### Release Notes
rust-pretty-assertions/rust-pretty-assertions (pretty_assertions) ### [`v1.4.1`](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/blob/HEAD/CHANGELOG.md#v141) [Compare Source](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/compare/v1.4.0...v1.4.1) #### Fixed - Show feature-flagged code in documentation. Thanks to [@​sandydoo](https://redirect.github.com/sandydoo) for the fix! ([#​130](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/pull/130)) #### Internal - Bump `yansi` version to `1.x`. Thanks to [@​SergioBenitez](https://redirect.github.com/SergioBenitez) for the update, and maintaining this library! ([#​121](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/pull/121))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a94de65bc9896..16597cd5d2f12 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8370,9 +8370,9 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", @@ -14527,9 +14527,9 @@ dependencies = [ [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yazi" From a9f816d5fb143380a8c3876e7a31e2c10826bb45 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 3 Oct 2024 12:38:51 -0400 Subject: [PATCH 208/228] telemetry_events: Update crate-level docs (#18703) This PR updates the `telemetry_events` crate to use module-level documentation for its crate-level docs. Release Notes: - N/A --- crates/telemetry_events/src/telemetry_events.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index d6e737b929e1f..f7b18523df17e 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -1,4 +1,5 @@ -/// Please see: [Telemetry in Zed](https://zed.dev/docs/telemetry) for additional documentation. +//! See [Telemetry in Zed](https://zed.dev/docs/telemetry) for additional information. + use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; use std::{fmt::Display, sync::Arc, time::Duration}; From 8c95b8d89a9c7eb1a21fb88565806f2d780e45f8 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 3 Oct 2024 13:17:31 -0400 Subject: [PATCH 209/228] `theme` crate spring cleaning (#18695) This PR does some spring cleaning on the `theme` crate: - Removed two unused stories and the story dep - Removed the `one` theme family (from the `theme` crate, not the app), this is now `zed_default_themes`. - This will hopefully remove some confusion caused by this theme we started in rust but didn't end up using - Removed `theme::prelude` (it just re-exported scale colors, which we don't use outside `theme`) - Removed completely unused `zed_pro` themes (we started on these during the gpui2 port and didn't finish them.) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 1 - crates/theme/Cargo.toml | 2 - crates/theme/src/default_theme.rs | 71 --------- .../src/{one_themes.rs => fallback_themes.rs} | 18 +-- crates/theme/src/prelude.rs | 6 - crates/theme/src/registry.rs | 9 +- crates/theme/src/settings.rs | 4 +- crates/theme/src/styles.rs | 6 - crates/theme/src/styles/stories/color.rs | 40 ----- crates/theme/src/styles/stories/mod.rs | 5 - crates/theme/src/styles/stories/players.rs | 143 ------------------ crates/theme/src/theme.rs | 8 +- crates/theme/theme.md | 15 -- crates/theme/util/hex_to_hsla.py | 35 ----- 14 files changed, 15 insertions(+), 348 deletions(-) delete mode 100644 crates/theme/src/default_theme.rs rename crates/theme/src/{one_themes.rs => fallback_themes.rs} (96%) delete mode 100644 crates/theme/src/prelude.rs delete mode 100644 crates/theme/src/styles/stories/color.rs delete mode 100644 crates/theme/src/styles/stories/mod.rs delete mode 100644 crates/theme/src/styles/stories/players.rs delete mode 100644 crates/theme/theme.md delete mode 100644 crates/theme/util/hex_to_hsla.py diff --git a/Cargo.lock b/Cargo.lock index 16597cd5d2f12..0a14f04f892b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11655,7 +11655,6 @@ dependencies = [ "serde_json_lenient", "serde_repr", "settings", - "story", "util", "uuid", ] diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index 934faa1186286..b751bea727c75 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -10,7 +10,6 @@ workspace = true [features] default = [] -stories = ["dep:story"] test-support = ["gpui/test-support", "fs/test-support", "settings/test-support"] [lib] @@ -36,7 +35,6 @@ serde_json.workspace = true serde_json_lenient.workspace = true serde_repr.workspace = true settings.workspace = true -story = { workspace = true, optional = true } util.workspace = true uuid.workspace = true diff --git a/crates/theme/src/default_theme.rs b/crates/theme/src/default_theme.rs deleted file mode 100644 index 5806340916533..0000000000000 --- a/crates/theme/src/default_theme.rs +++ /dev/null @@ -1,71 +0,0 @@ -use std::sync::Arc; - -use gpui::WindowBackgroundAppearance; - -use crate::AccentColors; - -use crate::{ - default_color_scales, - one_themes::{one_dark, one_family}, - Appearance, PlayerColors, StatusColors, SyntaxTheme, SystemColors, Theme, ThemeColors, - ThemeFamily, ThemeStyles, -}; - -fn zed_pro_daylight() -> Theme { - Theme { - id: "zed_pro_daylight".to_string(), - name: "Zed Pro Daylight".into(), - appearance: Appearance::Light, - styles: ThemeStyles { - window_background_appearance: WindowBackgroundAppearance::Opaque, - system: SystemColors::default(), - colors: ThemeColors::light(), - status: StatusColors::light(), - player: PlayerColors::light(), - syntax: Arc::new(SyntaxTheme::default()), - accents: AccentColors::light(), - }, - } -} - -pub(crate) fn zed_pro_moonlight() -> Theme { - Theme { - id: "zed_pro_moonlight".to_string(), - name: "Zed Pro Moonlight".into(), - appearance: Appearance::Dark, - styles: ThemeStyles { - window_background_appearance: WindowBackgroundAppearance::Opaque, - system: SystemColors::default(), - colors: ThemeColors::dark(), - status: StatusColors::dark(), - player: PlayerColors::dark(), - syntax: Arc::new(SyntaxTheme::default()), - accents: AccentColors::dark(), - }, - } -} - -/// Returns the Zed Pro theme family. -/// -/// Note: To be removed until the theme is implemented. -pub fn zed_pro_family() -> ThemeFamily { - ThemeFamily { - id: "zed_pro".to_string(), - name: "Zed Pro".into(), - author: "Zed Team".into(), - themes: vec![zed_pro_daylight(), zed_pro_moonlight()], - scales: default_color_scales(), - } -} - -impl Default for ThemeFamily { - fn default() -> Self { - one_family() - } -} - -impl Default for Theme { - fn default() -> Self { - one_dark() - } -} diff --git a/crates/theme/src/one_themes.rs b/crates/theme/src/fallback_themes.rs similarity index 96% rename from crates/theme/src/one_themes.rs rename to crates/theme/src/fallback_themes.rs index 50a4184e8bc93..553c75623381d 100644 --- a/crates/theme/src/one_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -7,21 +7,21 @@ use crate::{ SystemColors, Theme, ThemeColors, ThemeFamily, ThemeStyles, }; -// Note: This theme family is not the one you see in Zed at the moment. -// This is a from-scratch rebuild that Nate started work on. We currently -// only use this in the tests, and the One family from the `themes/` directory -// is what gets loaded into Zed when running it. -pub fn one_family() -> ThemeFamily { +/// The default theme family for Zed. +/// +/// This is used to construct the default theme fallback values, as well as to +/// have a theme available at compile time for tests. +pub fn zed_default_themes() -> ThemeFamily { ThemeFamily { - id: "one".to_string(), - name: "One".into(), + id: "zed-default".to_string(), + name: "Zed Default".into(), author: "".into(), - themes: vec![one_dark()], + themes: vec![zed_default_dark()], scales: default_color_scales(), } } -pub(crate) fn one_dark() -> Theme { +pub(crate) fn zed_default_dark() -> Theme { let bg = hsla(215. / 360., 12. / 100., 15. / 100., 1.); let editor = hsla(220. / 360., 12. / 100., 18. / 100., 1.); let elevated_surface = hsla(225. / 360., 12. / 100., 17. / 100., 1.); diff --git a/crates/theme/src/prelude.rs b/crates/theme/src/prelude.rs deleted file mode 100644 index e8e2378249fd4..0000000000000 --- a/crates/theme/src/prelude.rs +++ /dev/null @@ -1,6 +0,0 @@ -#[allow(unused)] -pub(crate) use crate::default_colors::{ - amber, black, blue, bronze, brown, crimson, cyan, gold, grass, gray, green, indigo, iris, jade, - lime, mauve, mint, olive, orange, pink, plum, purple, red, ruby, sage, sand, sky, slate, teal, - tomato, violet, white, yellow, -}; diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index a77ab141a6547..771511973f574 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -74,12 +74,9 @@ impl ThemeRegistry { assets, }; - // We're loading our new versions of the One themes by default, as - // we need them to be loaded for tests. - // - // These themes will get overwritten when `load_user_themes` is called - // when Zed starts, so the One variants used will be the ones ported from Zed1. - registry.insert_theme_families([crate::one_themes::one_family()]); + // We're loading the Zed default theme, as we need a theme to be loaded + // for tests. + registry.insert_theme_families([crate::fallback_themes::zed_default_themes()]); registry } diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 0c8ea782cd608..d126ec058c2c6 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -1,4 +1,4 @@ -use crate::one_themes::one_dark; +use crate::fallback_themes::zed_default_dark; use crate::{Appearance, SyntaxTheme, Theme, ThemeRegistry, ThemeStyleContent}; use anyhow::Result; use derive_more::{Deref, DerefMut}; @@ -629,7 +629,7 @@ impl settings::Settings for ThemeSettings { theme_selection: defaults.theme.clone(), active_theme: themes .get(defaults.theme.as_ref().unwrap().theme(*system_appearance)) - .or(themes.get(&one_dark().name)) + .or(themes.get(&zed_default_dark().name)) .unwrap(), theme_overrides: None, ui_density: defaults.ui_density.unwrap_or(UiDensity::Default), diff --git a/crates/theme/src/styles.rs b/crates/theme/src/styles.rs index 137603113aadc..da22f8de1f5f1 100644 --- a/crates/theme/src/styles.rs +++ b/crates/theme/src/styles.rs @@ -5,15 +5,9 @@ mod status; mod syntax; mod system; -#[cfg(feature = "stories")] -mod stories; - pub use accents::*; pub use colors::*; pub use players::*; pub use status::*; pub use syntax::*; pub use system::*; - -#[cfg(feature = "stories")] -pub use stories::*; diff --git a/crates/theme/src/styles/stories/color.rs b/crates/theme/src/styles/stories/color.rs deleted file mode 100644 index 8e6c86ba762ba..0000000000000 --- a/crates/theme/src/styles/stories/color.rs +++ /dev/null @@ -1,40 +0,0 @@ -use gpui::prelude::*; -use gpui::{div, px, ViewContext}; -use story::Story; - -use crate::{default_color_scales, ColorScaleStep}; - -/// The story showcasing all the default color scales -pub struct ColorsStory; - -impl Render for ColorsStory { - fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let color_scales = default_color_scales(); - - Story::container().child(Story::title("Colors")).child( - div() - .id("colors") - .flex() - .flex_col() - .gap_1() - .overflow_y_scroll() - .text_color(gpui::white()) - .children(color_scales.into_iter().map(|scale| { - div() - .flex() - .child( - div() - .w(px(75.)) - .line_height(px(24.)) - .child(scale.name().clone()), - ) - .child( - div().flex().gap_1().children( - ColorScaleStep::ALL - .map(|step| div().flex().size_6().bg(scale.step(cx, step))), - ), - ) - })), - ) - } -} diff --git a/crates/theme/src/styles/stories/mod.rs b/crates/theme/src/styles/stories/mod.rs deleted file mode 100644 index af6af965484ad..0000000000000 --- a/crates/theme/src/styles/stories/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod color; -mod players; - -pub use color::*; -pub use players::*; diff --git a/crates/theme/src/styles/stories/players.rs b/crates/theme/src/styles/stories/players.rs deleted file mode 100644 index 0d50c6edc9afc..0000000000000 --- a/crates/theme/src/styles/stories/players.rs +++ /dev/null @@ -1,143 +0,0 @@ -use gpui::{div, img, px, IntoElement, ParentElement, Render, Styled, ViewContext}; -use story::Story; - -use crate::{ActiveTheme, PlayerColors}; - -/// The story showcasing the player colors -pub struct PlayerStory; - -impl Render for PlayerStory { - fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - Story::container().child( - div() - .flex() - .flex_col() - .gap_4() - .child(Story::title_for::()) - .child(Story::label("Player Colors")) - .child( - div() - .flex() - .flex_col() - .gap_1() - .child( - div().flex().gap_1().children( - cx.theme() - .players() - .0 - .clone() - .iter_mut() - .map(|player| div().w_8().h_8().rounded_md().bg(player.cursor)), - ), - ) - .child( - div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div().w_8().h_8().rounded_md().bg(player.background) - }), - ), - ) - .child( - div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div().w_8().h_8().rounded_md().bg(player.selection) - }), - ), - ), - ) - .child(Story::label("Avatar Rings")) - .child(div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div() - .my_1() - .rounded_full() - .border_2() - .border_color(player.cursor) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size_6() - .bg(gpui::red()), - ) - }), - )) - .child(Story::label("Player Backgrounds")) - .child(div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div() - .my_1() - .rounded_xl() - .flex() - .items_center() - .h_8() - .py_0p5() - .px_1p5() - .bg(player.background) - .child( - div() - .relative() - .mx_neg_1() - .rounded_full() - .border_2() - .border_color(player.background) - .size(px(28.)) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size(px(24.)) - .bg(gpui::red()), - ), - ) - .child( - div() - .relative() - .mx_neg_1() - .rounded_full() - .border_2() - .border_color(player.background) - .size(px(28.)) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size(px(24.)) - .bg(gpui::red()), - ), - ) - .child( - div() - .relative() - .mx_neg_1() - .rounded_full() - .border_2() - .border_color(player.background) - .size(px(28.)) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size(px(24.)) - .bg(gpui::red()), - ), - ) - }), - )) - .child(Story::label("Player Selections")) - .child(div().flex().flex_col().gap_px().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div() - .flex() - .child( - div() - .flex() - .flex_none() - .rounded_sm() - .px_0p5() - .text_color(cx.theme().colors().text) - .bg(player.selection) - .child("The brown fox jumped over the lazy dog."), - ) - .child(div().flex_1()) - }), - )), - ) - } -} diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index a6ca59d734e24..d4436e5329527 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -9,13 +9,8 @@ //! A theme is a collection of colors used to build a consistent appearance for UI components across the application. mod default_colors; -mod default_theme; +mod fallback_themes; mod font_family_cache; -mod one_themes; -/// A prelude for working with the theme system. -/// -/// TODO: remove this. This only publishes default colors. -pub mod prelude; mod registry; mod scale; mod schema; @@ -26,7 +21,6 @@ use std::sync::Arc; use ::settings::{Settings, SettingsStore}; pub use default_colors::*; -pub use default_theme::*; pub use font_family_cache::*; pub use registry::*; pub use scale::*; diff --git a/crates/theme/theme.md b/crates/theme/theme.md deleted file mode 100644 index 4cb19eb3c4ebe..0000000000000 --- a/crates/theme/theme.md +++ /dev/null @@ -1,15 +0,0 @@ -# Theme - -This crate provides the theme system for Zed. - -## Overview - -A theme is a collection of colors used to build a consistent appearance for UI components across the application. -To produce a theme in Zed, - -A theme is made of two parts: A [ThemeFamily] and one or more [Theme]s. - -// -A [ThemeFamily] contains metadata like theme name, author, and theme-specific [ColorScales] as well as a series of themes. - -- [ThemeColors] - A set of colors that are used to style the UI. Refer to the [ThemeColors] documentation for more information. diff --git a/crates/theme/util/hex_to_hsla.py b/crates/theme/util/hex_to_hsla.py deleted file mode 100644 index 17faa186d8c6e..0000000000000 --- a/crates/theme/util/hex_to_hsla.py +++ /dev/null @@ -1,35 +0,0 @@ -import colorsys -import sys - -def hex_to_rgb(hex): - hex = hex.lstrip('#') - if len(hex) == 8: # 8 digit hex color - r, g, b, a = (int(hex[i:i+2], 16) for i in (0, 2, 4, 6)) - return r, g, b, a / 255.0 - else: # 6 digit hex color - return tuple(int(hex[i:i+2], 16) for i in (0, 2, 4)) + (1.0,) - -def rgb_to_hsla(rgb): - h, l, s = colorsys.rgb_to_hls(rgb[0]/255.0, rgb[1]/255.0, rgb[2]/255.0) - a = rgb[3] # alpha value - return (round(h * 360, 1), round(s * 100, 1), round(l * 100, 1), round(a, 3)) - -def hex_to_hsla(hex): - return rgb_to_hsla(hex_to_rgb(hex)) - -if len(sys.argv) != 2: - print("Usage: python util/hex_to_hsla.py <6 or 8 digit hex color or comma-separated list of colors>") -else: - input_arg = sys.argv[1] - if ',' in input_arg: # comma-separated list of colors - hex_colors = input_arg.split(',') - hslas = [] # output array - for hex_color in hex_colors: - hex_color = hex_color.strip("'\" ") - h, s, l, a = hex_to_hsla(hex_color) - hslas.append(f"hsla({h} / 360., {s} / 100., {l} / 100., {a})") - print(hslas) - else: # single color - hex_color = input_arg.strip("'\"") - h, s, l, a = hex_to_hsla(hex_color) - print(f"hsla({h} / 360., {s} / 100., {l} / 100., {a})") From cddd7875a42cb017b2751e5b6639ba322f6629df Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 3 Oct 2024 13:37:43 -0400 Subject: [PATCH 210/228] Extract Protocol Buffers support into an extension (#18704) This PR extracts the Protocol Buffers support into an extension. Release Notes: - Removed built-in support for Protocol Buffers, in favor of making it available as an extension. The Protocol Buffers extension will be suggested for download when you open a `.proto` file. --- Cargo.lock | 10 ---------- Cargo.toml | 1 - crates/extensions_ui/src/extension_suggest.rs | 1 + crates/languages/Cargo.toml | 2 -- crates/languages/src/lib.rs | 4 +--- extensions/proto/extension.toml | 11 +++++++++++ .../proto/languages}/proto/config.toml | 0 .../proto/languages}/proto/highlights.scm | 0 .../proto/languages}/proto/outline.scm | 0 9 files changed, 13 insertions(+), 16 deletions(-) create mode 100644 extensions/proto/extension.toml rename {crates/languages/src => extensions/proto/languages}/proto/config.toml (100%) rename {crates/languages/src => extensions/proto/languages}/proto/highlights.scm (100%) rename {crates/languages/src => extensions/proto/languages}/proto/outline.scm (100%) diff --git a/Cargo.lock b/Cargo.lock index 0a14f04f892b6..12b38967cc3fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6386,7 +6386,6 @@ dependencies = [ "node_runtime", "paths", "project", - "protols-tree-sitter-proto", "regex", "rope", "rust-embed", @@ -8644,15 +8643,6 @@ version = "2.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" -[[package]] -name = "protols-tree-sitter-proto" -version = "0.2.0" -source = "git+https://github.com/zed-industries/tree-sitter-proto?rev=0848bd30a64be48772e15fbb9d5ba8c0cc5772ad#0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" -dependencies = [ - "cc", - "tree-sitter-language", -] - [[package]] name = "psm" version = "0.1.21" diff --git a/Cargo.toml b/Cargo.toml index 8feb93a57856a..a23663f5c818f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -388,7 +388,6 @@ profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" -protols-tree-sitter-proto = { git = "https://github.com/zed-industries/tree-sitter-proto", rev = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" } pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index 89f51fdebcfee..ed003f25b7f23 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -54,6 +54,7 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[ ("ocaml", &["ml", "mli"]), ("php", &["php"]), ("prisma", &["prisma"]), + ("proto", &["proto"]), ("purescript", &["purs"]), ("r", &["r", "R"]), ("racket", &["rkt"]), diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 19842efac20cf..dc7868ea86dda 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -21,7 +21,6 @@ load-grammars = [ "tree-sitter-jsdoc", "tree-sitter-json", "tree-sitter-md", - "protols-tree-sitter-proto", "tree-sitter-python", "tree-sitter-regex", "tree-sitter-rust", @@ -46,7 +45,6 @@ lsp.workspace = true node_runtime.workspace = true paths.workspace = true project.workspace = true -protols-tree-sitter-proto = { workspace = true, optional = true } regex.workspace = true rope.workspace = true rust-embed.workspace = true diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 295df6e419b7e..374b32c0ac349 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -45,7 +45,6 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu ("jsonc", tree_sitter_json::LANGUAGE), ("markdown", tree_sitter_md::LANGUAGE), ("markdown-inline", tree_sitter_md::INLINE_LANGUAGE), - ("proto", protols_tree_sitter_proto::LANGUAGE), ("python", tree_sitter_python::LANGUAGE), ("regex", tree_sitter_regex::LANGUAGE), ("rust", tree_sitter_rust::LANGUAGE), @@ -183,7 +182,6 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu "yaml", vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))] ); - language!("proto"); // Register globally available language servers. // @@ -277,7 +275,7 @@ pub fn language(name: &str, grammar: tree_sitter::Language) -> Arc { fn load_config(name: &str) -> LanguageConfig { let config_toml = String::from_utf8( LanguageDir::get(&format!("{}/config.toml", name)) - .unwrap() + .unwrap_or_else(|| panic!("missing config for language {:?}", name)) .data .to_vec(), ) diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml new file mode 100644 index 0000000000000..a49ba7a4c4a7e --- /dev/null +++ b/extensions/proto/extension.toml @@ -0,0 +1,11 @@ +id = "proto" +name = "Proto" +description = "Protocol Buffers support." +version = "0.1.0" +schema_version = 1 +authors = ["Zed Industries "] +repository = "https://github.com/zed-industries/zed" + +[grammars.proto] +repository = "https://github.com/zed-industries/tree-sitter-proto" +commit = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" diff --git a/crates/languages/src/proto/config.toml b/extensions/proto/languages/proto/config.toml similarity index 100% rename from crates/languages/src/proto/config.toml rename to extensions/proto/languages/proto/config.toml diff --git a/crates/languages/src/proto/highlights.scm b/extensions/proto/languages/proto/highlights.scm similarity index 100% rename from crates/languages/src/proto/highlights.scm rename to extensions/proto/languages/proto/highlights.scm diff --git a/crates/languages/src/proto/outline.scm b/extensions/proto/languages/proto/outline.scm similarity index 100% rename from crates/languages/src/proto/outline.scm rename to extensions/proto/languages/proto/outline.scm From cac98b7bbff89e6b0c7f632940ce55597fa1c55c Mon Sep 17 00:00:00 2001 From: Jordan Pittman Date: Thu, 3 Oct 2024 14:38:17 -0400 Subject: [PATCH 211/228] Show color swatches for LSP completions (#18665) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #11991 Release Notes: - Added support for color swatches for language server completions. Screenshot 2024-10-02 at 19 02 22 Screenshot 2024-10-02 at 19 02 48 This implementation is mostly a port of the VSCode version of the ColorExtractor. It seems reasonable the we should support _at least_ what VSCode does for detecting color swatches from LSP completions. This implementation could definitely be better perf-wise by writing a dedicated color parser. I also think it would be neat if, in the future, Zed handled _more_ color formats — especially wide-gamut colors. There are a few differences to the regexes in the VSCode implementation but mainly so simplify the implementation : - The hex vs rgb/hsl regexes were split into two parts - The rgb/hsl regexes allow 3 or 4 color components whether hsla/rgba or not and the parsing implementation accepts/rejects colors as needed --------- Co-authored-by: Marshall Bowers --- crates/editor/src/editor.rs | 5 + crates/project/src/color_extractor.rs | 297 ++++++++++++++++++++++++++ crates/project/src/project.rs | 17 +- typos.toml | 4 +- 4 files changed, 320 insertions(+), 3 deletions(-) create mode 100644 crates/project/src/color_extractor.rs diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 525a94f2582ab..e2355461046e3 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1228,6 +1228,10 @@ impl CompletionsMenu { None }; + let color_swatch = completion + .color() + .map(|color| div().size_4().bg(color).rounded(px(2.))); + div().min_w(px(220.)).max_w(px(540.)).child( ListItem::new(mat.candidate_id) .inset(true) @@ -1243,6 +1247,7 @@ impl CompletionsMenu { task.detach_and_log_err(cx) } })) + .start_slot::
(color_swatch) .child(h_flex().overflow_hidden().child(completion_label)) .end_slot::