diff --git a/.stylua.toml b/.stylua.toml
deleted file mode 100644
index 020ce912..00000000
--- a/.stylua.toml
+++ /dev/null
@@ -1,5 +0,0 @@
-column_width = 100
-indent_type = "Spaces"
-indent_width = 2
-[sort_requires]
-enabled = true
diff --git a/README.md b/README.md
index e98b783f..0cedbf60 100644
--- a/README.md
+++ b/README.md
@@ -32,7 +32,7 @@ Use the
- Chat buffer
+ Chat buffer
Code author
Code advisor
@@ -56,12 +56,13 @@ Use the **Note**: You only need to the call the `setup` function if you wish to change any of the defaults.
+
+
+ Click to see the default configuration
```lua
{
- api_key = "OPENAI_API_KEY", -- Your OpenAI API key
- org_api_key = "OPENAI_ORG_KEY", -- Your organisation OpenAI API key
+ api_key = "OPENAI_API_KEY", -- Your API key
+ org_api_key = "OPENAI_ORG_KEY", -- Your organisation API key
base_url = "https://api.openai.com", -- The URL to use for the API requests
ai_settings = {
-- Default settings for the Completions API
@@ -105,43 +110,97 @@ The plugin comes with the following defaults:
user = nil,
},
conversations = {
- auto_save = true, -- Once a conversation is created/loaded, automatically save it
- save_dir = vim.fn.stdpath("data") .. "/codecompanion/conversations",
+ save_dir = vim.fn.stdpath("data") .. "/codecompanion/conversations", -- Path to save conversations to
+ },
+ display = {
+ action_palette = {
+ width = 95,
+ height = 10,
+ },
+ chat = { -- Options for the chat strategy
+ type = "buffer", -- buffer|float
+ show_settings = false, -- Show the model settings in the chat window
+ float = {
+ border = "single",
+ max_height = 0,
+ max_width = 0,
+ padding = 1,
+ },
+ },
+ win_options = {
+ cursorcolumn = false,
+ cursorline = false,
+ foldcolumn = "0",
+ linebreak = true,
+ list = false,
+ signcolumn = "no",
+ spell = false,
+ wrap = true,
+ },
},
- display = { -- How to display `advisor` outputs
- type = "popup", -- popup|split
- split = "horizontal" -- horizontal|vertical
- height = 0.7,
- width = 0.8,
+ keymaps = {
+ [""] = "keymaps.close", -- Close the chat buffer
+ ["q"] = "keymaps.cancel_request", -- Cancel the currently streaming request
+ ["gc"] = "keymaps.clear", -- Clear the contents of the chat
+ ["ga"] = "keymaps.codeblock", -- Insert a codeblock in the chat
+ ["gs"] = "keymaps.save_conversation", -- Save the current chat as a conversation
+ ["]"] = "keymaps.next", -- Move to the next header in the chat
+ ["["] = "keymaps.previous", -- Move to the previous header in the chat
},
- log_level = "ERROR", -- One of: TRACE, DEBUG, ERROR
- send_code = true, -- Send your code to OpenAI
- show_token_count = true, -- Show the token count for the current chat
- use_default_actions = true, -- The actions that appear in the action palette
+ log_level = "ERROR", -- TRACE|DEBUG|ERROR
+ send_code = true, -- Send code context to the API?
+ show_token_count = true, -- Show the token count for the current chat?
+ use_default_actions = true, -- Use the default actions in the action palette?
+}
+```
+
+
+
+### Edgy.nvim Configuration
+
+The author recommends pairing with [edgy.nvim](https://github.com/folke/edgy.nvim) for a Co-Pilot Chat-like experience:
+
+```lua
+{
+ "folke/edgy.nvim",
+ event = "VeryLazy",
+ init = function()
+ vim.opt.laststatus = 3
+ vim.opt.splitkeep = "screen"
+ end,
+ opts = {
+ right = {
+ { ft = "codecompanion", title = "Code Companion Chat", size = { width = 0.45 } },
+ }
+ }
}
```
-Modify these settings via the `opts` table in Lazy.nvim or by calling the `require("codecompanion").setup()` function in Packer.
+### Highlight Groups
+
+The plugin sets a number of highlights during setup:
-> **Note**: The `send_code` option can prevent any visual selections from being sent to OpenAI for processing as part of any `advisor` or `author` actions
+- `CodeCompanionTokens` - Virtual text showing the token count when in a chat buffer
+- `CodeCompanionVirtualText` - All other virtual text in the chat buffer
## :rocket: Usage
The plugin has a number of commands:
- `CodeCompanionChat` - To open up a new chat buffer
-- `CodeCompanionActions` - To open up the action selector window
-- `CodeCompanionSaveConversationAs` - Saves a chat buffer as a conversation
+- `CodeCompanionToggle` - Toggle a chat buffer
+- `CodeCompanionActions` - To open up the action palette window
-They can be assigned to keymaps with:
+For an optimum workflow, the plugin author recommendeds the following keymaps:
```lua
vim.api.nvim_set_keymap("n", "", "CodeCompanionActions", { noremap = true, silent = true })
vim.api.nvim_set_keymap("v", "", "CodeCompanionActions", { noremap = true, silent = true })
-vim.api.nvim_set_keymap("n", "a", "CodeCompanionChat", { noremap = true, silent = true })
+vim.api.nvim_set_keymap("n", "a", "CodeCompanionToggle", { noremap = true, silent = true })
+vim.api.nvim_set_keymap("v", "a", "CodeCompanionToggle", { noremap = true, silent = true })
```
-> **Note**: For some actions, visual mode allows your selection to be sent directly to the chat buffer or OpenAI themselves (in the case of `author` actions).
+> **Note**: For some actions, visual mode allows your selection to be sent directly to the chat buffer or the API itself (in the case of `author` actions).
### The Action Palette
@@ -171,15 +230,27 @@ Or, if you wish to turn off the default actions, set `use_default_actions = fals
-The Chat Buffer is where you can converse with OpenAI, directly from Neovim. It behaves as a regular markdown buffer with some clever additions. When the buffer is written, autocmds trigger the sending of its content to the OpenAI API in the form of prompts. These prompts are segmented by H1 headers into `user` and `assistant` (see OpenAI's [Chat Completions API](https://platform.openai.com/docs/guides/text-generation/chat-completions-api) for more on this). When a response is received, it is then streamed back into the buffer. The result is that you experience the feel of conversing with ChatGPT, from within Neovim.
+The chat buffer is where you can converse with your GenAI API, directly from Neovim. It behaves as a regular markdown buffer with some clever additions. When the buffer is written (or "saved"), autocmds trigger the sending of its content to the API, in the form of prompts. These prompts are segmented by H1 headers: `user` and `assistant` (see OpenAI's [Chat Completions API](https://platform.openai.com/docs/guides/text-generation/chat-completions-api) for more on this). When a response is received, it is then streamed back into the buffer. The result is that you experience the feel of conversing with GenAI, from within Neovim.
-> **Note**: You can cancel a request at any point by pressing `q`.
+#### Keymaps
-At the very top of the Chat Buffer are the parameters which can be changed to affect the API's response back to you. You can find more detail about them by moving the cursor over them or referring to the [Chat Completions reference guide](https://platform.openai.com/docs/api-reference/chat). The parameters can be tweaked and modified throughout the conversation.
+When in the chat buffer, there are number of keymaps available to you (which can be changed in the config):
-Chat Buffers are not automatically saved owing to them being an `acwrite` buftype (see `:h buftype`). However, the plugin allows for this via the notion of Conversations. Simply run `:CodeCompanionSaveConversationAs` in the chat buffer you wish to save. Conversations can then be restored via the Action Palette and the _Load conversations_ actions.
+- `` - Close the buffer
+- `q` - Cancel streaming from the API
+- `gc` - Clear the buffer's contents
+- `ga` - Add a codeblock
+- `gs` - Save the chat as a conversation
+- `[` - Move to the next header in the buffer
+- `]` - Move to the previous header in the buffer
-> **Note**: When a conversation is saved or loaded it will automatically save any changes.
+#### Conversations
+
+Chat Buffers are not automatically saved to disk, owing to them being an `acwrite` buftype (see `:h buftype`). However, the plugin allows for this via the notion of Conversations and pressing `gs` in the buffer. Conversations can then be restored via the Action Palette and the _Load conversations_ action.
+
+#### Settings
+
+If `display.chat.show_settings` is set to `true`, at the very top of the chat buffer will be the parameters which can be changed to affect the API's response back to you. This enables fine-tuning and parameter tweaking throughout the chat. You can find more detail about them by moving the cursor over them or referring to the [Chat Completions reference guide](https://platform.openai.com/docs/api-reference/chat) if you're using OpenAI.
### In-Built Actions
@@ -201,9 +272,9 @@ This action utilises the `author` strategy. This action can be useful for genera
#### Code advisor
-As the name suggests, this action provides advice on a visual selection of code and utilises the `advisor` strategy. It uses the `display` configuration option to output the response from OpenAI into a split or a popup. Inevitably, the response back from OpenAI may lead to more questions. Pressing `c` in the advisor buffer will take the conversation to a chat buffer. Pressing `q` will close the buffer.
+As the name suggests, this action provides advice on a visual selection of code and utilises the `advisor` strategy. The response from the API is output into a chat buffer which follows the `display.chat` settings in your configuration.
-> **Note**: For some users, the sending of code to OpenAI may not be an option. In those instances, you can set `send_code = false` in your config.
+> **Note**: For some users, the sending of code to the GenAI may not be an option. In those instances, you can set `send_code = false` in your config.
#### LSP assistant
@@ -213,13 +284,22 @@ Taken from the fantastic [Wtf.nvim](https://github.com/piersolenski/wtf.nvim) pl
### Hooks / User events
-The plugin fires events at the start and the conclusion of an API request. A user can hook into these as follows:
+The plugin fires the following events during its lifecycle:
+
+- `CodeCompanionRequest` - Fired during the API request. Outputs `data.status` with a value of `started` or `finished`
+- `CodeCompanionConversation` - Fired after a conversation has been saved to disk
+- `CodeCompanionChat` - Fired at various points during the chat buffer. Comes with the following attributes:
+ - `data.action = close_buffer` - For when a chat buffer has been permanently closed
+ - `data.action = hide_buffer` - For when a chat buffer is now hidden
+ - `data.action = show_buffer` - For when a chat buffer is now visible after being hidden
+
+Events can be hooked into as follows:
```lua
local group = vim.api.nvim_create_augroup("CodeCompanionHooks", {})
vim.api.nvim_create_autocmd({ "User" }, {
- pattern = "CodeCompanion",
+ pattern = "CodeCompanionRequest",
group = group,
callback = function(request)
print(request.data.status) -- outputs "started" or "finished"
@@ -234,13 +314,13 @@ vim.api.nvim_create_autocmd({ "User" }, {
If you use the fantastic [Heirline.nvim](https://github.com/rebelot/heirline.nvim) plugin, consider the following snippet to display an icon in the statusline whilst CodeCompanion is speaking to the LLM:
```lua
-local OpenAI = {
+local GenAI = {
static = {
processing = false,
},
update = {
"User",
- pattern = "CodeCompanion",
+ pattern = "CodeCompanionRequest",
callback = function(self, args)
self.processing = (args.data.status == "started")
vim.cmd("redrawstatus")
diff --git a/doc/codecompanion.txt b/doc/codecompanion.txt
index 786d50f7..0f14dffe 100644
--- a/doc/codecompanion.txt
+++ b/doc/codecompanion.txt
@@ -23,7 +23,7 @@ REQUIREMENTS *codecompanion-requirements*
- An API key from OpenAI (get one here )
- The `curl` library installed
-- Neovim 0.9.0 or greater
+- Neovim 0.9.2 or greater
INSTALLATION *codecompanion-installation*
@@ -36,12 +36,13 @@ INSTALLATION *codecompanion-installation*
{
"olimorris/codecompanion.nvim",
dependencies = {
+ "nvim-treesitter/nvim-treesitter",
{
"stevearc/dressing.nvim", -- Optional: Improves the default Neovim UI
opts = {},
},
},
- cmd = { "CodeCompanionChat", "CodeCompanionActions" },
+ cmd = { "CodeCompanionToggle", "CodeCompanionActions" },
config = true
}
@@ -52,6 +53,7 @@ INSTALLATION *codecompanion-installation*
require("codecompanion").setup()
end,
requires = {
+ "nvim-treesitter/nvim-treesitter",
"stevearc/dressing.nvim"
}
})
@@ -60,12 +62,15 @@ INSTALLATION *codecompanion-installation*
CONFIGURATION *codecompanion-configuration*
-The plugin comes with the following defaults:
+
+ **Note**You only need to the call the `setup` function if you wish to change
+ any of the defaults.
+Click to see the default configuration ~
>lua
{
- api_key = "OPENAI_API_KEY", -- Your OpenAI API key
- org_api_key = "OPENAI_ORG_KEY", -- Your organisation OpenAI API key
+ api_key = "OPENAI_API_KEY", -- Your API key
+ org_api_key = "OPENAI_ORG_KEY", -- Your organisation API key
base_url = "https://api.openai.com", -- The URL to use for the API requests
ai_settings = {
-- Default settings for the Completions API
@@ -85,48 +90,94 @@ The plugin comes with the following defaults:
user = nil,
},
conversations = {
- auto_save = true, -- Once a conversation is created/loaded, automatically save it
- save_dir = vim.fn.stdpath("data") .. "/codecompanion/conversations",
+ save_dir = vim.fn.stdpath("data") .. "/codecompanion/conversations", -- Path to save conversations to
+ },
+ display = {
+ action_palette = {
+ width = 95,
+ height = 10,
+ },
+ chat = { -- Options for the chat strategy
+ type = "buffer", -- buffer|float
+ show_settings = false, -- Show the model settings in the chat window
+ float = {
+ border = "single",
+ max_height = 0,
+ max_width = 0,
+ padding = 1,
+ },
+ },
+ win_options = {
+ cursorcolumn = false,
+ cursorline = false,
+ foldcolumn = "0",
+ linebreak = true,
+ list = false,
+ signcolumn = "no",
+ spell = false,
+ wrap = true,
+ },
},
- display = { -- How to display `advisor` outputs
- type = "popup", -- popup|split
- split = "horizontal" -- horizontal|vertical
- height = 0.7,
- width = 0.8,
+ keymaps = {
+ [""] = "keymaps.close", -- Close the chat (can be toggled back)
+ ["q"] = "keymaps.cancel_request", -- Cancel the currently streaming request
+ ["gd"] = "keymaps.delete", -- Delete the chat permanently (cannot be toggled)
+ ["gc"] = "keymaps.clear", -- Clear the contents of the chat
+ ["ga"] = "keymaps.codeblock", -- Insert a codeblock in the chat
+ ["gs"] = "keymaps.save_conversation", -- Save the current chat as a conversation
+ ["]"] = "keymaps.next", -- Move to the next header in the chat
+ ["["] = "keymaps.previous", -- Move to the previous header in the chat
},
- log_level = "ERROR", -- One of: TRACE, DEBUG, ERROR
- send_code = true, -- Send your code to OpenAI
- show_token_count = true, -- Show the token count for the current chat
- use_default_actions = true, -- The actions that appear in the action palette
+ log_level = "ERROR", -- TRACE|DEBUG|ERROR
+ send_code = true, -- Send code context to the API?
+ show_token_count = true, -- Show the token count for the current chat?
+ use_default_actions = true, -- Use the default actions in the action palette?
}
<
-Modify these settings via the `opts` table in Lazy.nvim or by calling the
-`require("codecompanion").setup()` function in Packer.
+EDGY.NVIM CONFIGURATION ~
+
+The author recommends pairing with edgy.nvim
+ for a Co-Pilot Chat-like experience:
+
+>lua
+ {
+ "folke/edgy.nvim",
+ event = "VeryLazy",
+ init = function()
+ vim.opt.laststatus = 3
+ vim.opt.splitkeep = "screen"
+ end,
+ opts = {
+ right = {
+ { ft = "codecompanion", title = "Code Companion Chat", size = { width = 0.45 } },
+ }
+ }
+ }
+<
- **Note**The `send_code` option can prevent any visual selections from being
- sent to OpenAI for processing as part of any `advisor` or `author` actions
USAGE *codecompanion-usage*
The plugin has a number of commands:
- `CodeCompanionChat` - To open up a new chat buffer
-- `CodeCompanionActions` - To open up the action selector window
-- `CodeCompanionSaveConversationAs` - Saves a chat buffer as a conversation
+- `CodeCompanionToggle` - Toggle a chat buffer
+- `CodeCompanionActions` - To open up the action palette window
-They can be assigned to keymaps with:
+For an optimum workflow, the plugin author recommendeds the following keymaps:
>lua
vim.api.nvim_set_keymap("n", "", "CodeCompanionActions", { noremap = true, silent = true })
vim.api.nvim_set_keymap("v", "", "CodeCompanionActions", { noremap = true, silent = true })
- vim.api.nvim_set_keymap("n", "a", "CodeCompanionChat", { noremap = true, silent = true })
+ vim.api.nvim_set_keymap("n", "a", "CodeCompanionToggle", { noremap = true, silent = true })
+ vim.api.nvim_set_keymap("v", "a", "CodeCompanionToggle", { noremap = true, silent = true })
<
**Note**For some actions, visual mode allows your selection to be sent directly
- to the chat buffer or OpenAI themselves in the case of `author` actions
+ to the chat buffer or the API itself (in the case of `author` actions).
THE ACTION PALETTE ~
@@ -157,30 +208,48 @@ false` in your config.
THE CHAT BUFFER ~
-The Chat Buffer is where you can converse with OpenAI, directly from Neovim. It
-behaves as a regular markdown buffer with some clever additions. When the
-buffer is written, autocmds trigger the sending of its content to the OpenAI
-API in the form of prompts. These prompts are segmented by H1 headers into
-`user` and `assistant` (see OpenAI’s Chat Completions API
+The chat buffer is where you can converse with your GenAI API, directly from
+Neovim. It behaves as a regular markdown buffer with some clever additions.
+When the buffer is written (or "saved"), autocmds trigger the sending of its
+content to the API, in the form of prompts. These prompts are segmented by H1
+headers: `user` and `assistant` (see OpenAI’s Chat Completions API
for more on this). When a response is received, it is then streamed back into
the buffer. The result is that you experience the feel of conversing with
-ChatGPT, from within Neovim.
+GenAI, from within Neovim.
+
+
+KEYMAPS
+
+When in the chat buffer, there are number of keymaps available to you (which
+can be changed in the config):
+
+- `` - Close/hide the buffer (can be toggled back)
+- `q` - Cancel streaming from the API
+- `gd` - Delete the buffer (cannot be toggled back)
+- `gc` - Clear the buffer’s contents
+- `ga` - Add a codeblock
+- `gs` - Save the chat as a conversation
+- `[` - Move to the next header in the buffer
+- `]` - Move to the previous header in the buffer
- **Note**You can cancel a request at any point by pressing `q`.
-At the very top of the Chat Buffer are the parameters which can be changed to
-affect the API’s response back to you. You can find more detail about them by
-moving the cursor over them or referring to the Chat Completions reference
-guide . The parameters can
-be tweaked and modified throughout the conversation.
+CONVERSATIONS
-Chat Buffers are not automatically saved into sessions owing to them being an
-`acwrite` buftype (|buftype|). However the plugin allows for this via the
-notion of Conversations. Simply run `:CodeCompanionSaveConversationAs` in the
-buffer you wish to save. Conversations can then be restored via the Action
-Palette and the _Load conversations_ actions. When a conversation is saved or
-loaded it will automatically save to disk.
+Chat Buffers are not automatically saved to disk, owing to them being an
+`acwrite` buftype (see |buftype|). However, the plugin allows for this via the
+notion of Conversations and pressing `gs` in the buffer. Conversations can then
+be restored via the Action Palette and the _Load conversations_ action.
+
+
+SETTINGS
+
+If `display.chat.show_settings` is set to `true`, at the very top of the chat
+buffer will be the parameters which can be changed to affect the API’s
+response back to you. This enables fine-tuning and parameter tweaking
+throughout the chat. You can find more detail about them by moving the cursor
+over them or referring to the Chat Completions reference guide
+ if you’re using OpenAI.
IN-BUILT ACTIONS ~
@@ -218,15 +287,12 @@ initated in, or, if run from a terminal prompt, to write commands.
CODE ADVISOR
As the name suggests, this action provides advice on a visual selection of code
-and utilises the `advisor` strategy. It uses the `display` configuration option
-to output the response from OpenAI into a split or a popup. Inevitably, the
-response back from OpenAI may lead to more questions. Pressing `c` in the
-advisor buffer will take the conversation to a chat buffer. Pressing `q` will
-close the buffer.
+and utilises the `advisor` strategy. The response from the API is output into a
+chat buffer which follows the `display.chat` settings in your configuration.
- **Note**For some users, the sending of code to OpenAI may not be an option. In
- those instances, you can set `send_code = false` in your config.
+ **Note**For some users, the sending of code to the GenAI may not be an option.
+ In those instances, you can set `send_code = false` in your config.
LSP ASSISTANT
@@ -242,8 +308,12 @@ HELPERS *codecompanion-helpers*
HOOKS / USER EVENTS ~
-The plugin fires events at the start and the conclusion of an API request. A
-user can hook into these as follows:
+The plugin fires the following events during its lifecycle:
+
+- `CodeCompanionRequest` - Fired during the API request. Outputs `data.status` with a value of `started` or `finished`
+- `CodeCompanionConversation` - Fired after a conversation has been saved to disk
+
+Events can be hooked into as follows:
>lua
local group = vim.api.nvim_create_augroup("CodeCompanionHooks", {})
@@ -268,13 +338,13 @@ snippet to display an icon in the statusline whilst CodeCompanion is speaking
to the LLM:
>lua
- local OpenAI = {
+ local GenAI = {
static = {
processing = false,
},
update = {
"User",
- pattern = "CodeCompanion",
+ pattern = "CodeCompanionRequest",
callback = function(self, args)
self.processing = (args.data.status == "started")
vim.cmd("redrawstatus")
diff --git a/lua/codecompanion/actions.lua b/lua/codecompanion/actions.lua
index 462d8885..e811be38 100644
--- a/lua/codecompanion/actions.lua
+++ b/lua/codecompanion/actions.lua
@@ -22,7 +22,11 @@ M.validate = function(items, context)
local mode = context.mode:lower()
for _, item in ipairs(items) do
- if item.opts and item.opts.modes then
+ if item.condition and type(item.condition) == "function" then
+ if item.condition(context) then
+ table.insert(validated_items, item)
+ end
+ elseif item.opts and item.opts.modes then
if utils.contains(item.opts.modes, mode) then
table.insert(validated_items, item)
end
@@ -38,7 +42,8 @@ M.static.actions = {
{
name = "Chat",
strategy = "chat",
- description = "Open a chat buffer to converse with OpenAI",
+ description = "Open/restore a chat buffer to converse with your GenAI",
+ type = nil,
prompts = {
n = function()
return require("codecompanion").chat()
@@ -62,6 +67,46 @@ M.static.actions = {
},
},
},
+ {
+ name = "Open chats",
+ strategy = "chat",
+ description = "Your currently open chats",
+ condition = function()
+ return _G.codecompanion_chats and utils.count(_G.codecompanion_chats) > 0
+ end,
+ picker = {
+ prompt = "Select a chat",
+ items = function()
+ local ui = require("codecompanion.utils.ui")
+ local chats = {}
+
+ for bufnr, chat in pairs(_G.codecompanion_chats) do
+ table.insert(chats, {
+ name = chat.name,
+ strategy = "chat",
+ description = chat.description,
+ callback = function()
+ _G.codecompanion_chats[bufnr] = nil
+
+ local winid = 0
+ if config.options.display.chat.type == "float" then
+ winid = ui.open_float(bufnr, {
+ display = config.options.display.chat.float,
+ })
+ else
+ vim.api.nvim_set_current_buf(bufnr)
+ end
+
+ ui.set_options(config.options.display.win_options, winid)
+ ui.buf_scroll_to_end(bufnr)
+ end,
+ })
+ end
+
+ return chats
+ end,
+ },
+ },
{
name = "Chat as ...",
strategy = "chat",
@@ -73,6 +118,7 @@ M.static.actions = {
name = "JavaScript",
strategy = "chat",
description = "Chat as a senior JavaScript developer",
+ type = "javascript",
prompts = {
{
role = "system",
@@ -93,7 +139,7 @@ M.static.actions = {
condition = function(context)
return not context.is_visual
end,
- content = "",
+ content = "\n \n",
},
},
},
@@ -101,6 +147,7 @@ M.static.actions = {
name = "Lua",
strategy = "chat",
description = "Chat as a senior Lua developer",
+ type = "lua",
prompts = {
{
role = "system",
@@ -121,7 +168,7 @@ M.static.actions = {
condition = function(context)
return not context.is_visual
end,
- content = "",
+ content = "\n \n",
},
},
},
@@ -129,6 +176,7 @@ M.static.actions = {
name = "PHP",
strategy = "chat",
description = "Chat as a senior PHP developer",
+ type = "php",
prompts = {
{
role = "system",
@@ -149,7 +197,7 @@ M.static.actions = {
condition = function(context)
return not context.is_visual
end,
- content = "",
+ content = "\n \n",
},
},
},
@@ -157,6 +205,7 @@ M.static.actions = {
name = "Python",
strategy = "chat",
description = "Chat as a senior Python developer",
+ type = "python",
prompts = {
{
role = "system",
@@ -177,7 +226,7 @@ M.static.actions = {
condition = function(context)
return not context.is_visual
end,
- content = "",
+ content = "\n \n",
},
},
},
@@ -185,6 +234,7 @@ M.static.actions = {
name = "Ruby",
strategy = "chat",
description = "Chat as a senior Ruby developer",
+ type = "ruby",
prompts = {
{
role = "system",
@@ -205,7 +255,7 @@ M.static.actions = {
condition = function(context)
return not context.is_visual
end,
- content = "",
+ content = "\n \n",
},
},
},
@@ -215,7 +265,7 @@ M.static.actions = {
{
name = "Code author",
strategy = "author",
- description = "Get OpenAI to write/refactor code for you",
+ description = "Get GenAI to write/refactor code for you",
opts = {
model = config.options.ai_settings.models.author,
user_input = true,
@@ -261,7 +311,7 @@ M.static.actions = {
{
name = "LSP assistant",
strategy = "advisor",
- description = "Get help from OpenAI to fix LSP diagnostics",
+ description = "Get help from GenAI to fix LSP diagnostics",
opts = {
model = config.options.ai_settings.models.advisor,
modes = { "v" },
diff --git a/lua/codecompanion/client.lua b/lua/codecompanion/client.lua
index cbe0b20f..1552afa6 100644
--- a/lua/codecompanion/client.lua
+++ b/lua/codecompanion/client.lua
@@ -1,16 +1,48 @@
local config = require("codecompanion.config")
local log = require("codecompanion.utils.log")
+local schema = require("codecompanion.schema")
_G.codecompanion_jobs = {}
+---@param status string
+local function fire_autocmd(status)
+ vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanionRequest", data = { status = status } })
+end
+
+---@param bufnr? number
+---@param handler? table
+local function start_request(bufnr, handler)
+ if bufnr and handler then
+ _G.codecompanion_jobs[bufnr] = {
+ status = "running",
+ handler = handler,
+ }
+ end
+ fire_autocmd("started")
+end
+
+---@param bufnr? number
+---@param opts? table
+local function close_request(bufnr, opts)
+ if bufnr then
+ if opts and opts.shutdown then
+ _G.codecompanion_jobs[bufnr].handler:shutdown()
+ end
+ _G.codecompanion_jobs[bufnr] = nil
+ end
+ fire_autocmd("finished")
+end
+
---@class CodeCompanion.Client
---@field secret_key string
---@field organization nil|string
+---@field settings nil|table
local Client = {}
---@class CodeCompanion.ClientArgs
---@field secret_key string
---@field organization nil|string
+---@field settings nil|table
---@param args CodeCompanion.ClientArgs
---@return CodeCompanion.Client
@@ -18,20 +50,36 @@ function Client.new(args)
return setmetatable({
secret_key = args.secret_key,
organization = args.organization,
+ settings = args.settings or schema.get_default(schema.static.client_settings, args.settings),
}, { __index = Client })
end
+---@param client CodeCompanion.Client
+---@return table
+local function headers(client)
+ local group = {
+ content_type = "application/json",
+ Authorization = "Bearer " .. client.secret_key,
+ OpenAI_Organization = client.organization,
+ }
+
+ log:trace("Request Headers: %s", group)
+
+ return group
+end
+
---@param code integer
---@param stdout string
+---@param settings table
---@return nil|string
---@return nil|any
-local function parse_response(code, stdout)
+local function parse_response(code, stdout, settings)
if code ~= 0 then
log:error("Error: %s", stdout)
return string.format("Error: %s", stdout)
end
- local ok, data = pcall(vim.json.decode, stdout, { luanil = { object = true } })
+ local ok, data = pcall(settings.decode, stdout, { luanil = { object = true } })
if not ok then
log:error("Error malformed json: %s", data)
return string.format("Error malformed json: %s", data)
@@ -48,181 +96,102 @@ end
---@param url string
---@param payload table
---@param cb fun(err: nil|string, response: nil|table)
----@return integer The job ID
function Client:call(url, payload, cb)
cb = log:wrap_cb(cb, "Response error: %s")
- local cmd = {
- "curl",
- url,
- "-H",
- "Content-Type: application/json",
- "-H",
- string.format("Authorization: Bearer %s", self.secret_key),
- }
- if self.organization then
- table.insert(cmd, "-H")
- table.insert(cmd, string.format("OpenAI-Organization: %s", self.organization))
- end
- log:trace("request command: %s", cmd)
- table.insert(cmd, "-d")
- table.insert(cmd, vim.json.encode(payload))
- log:trace("request payload: %s", payload)
- local stdout = ""
-
- vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanion", data = { status = "started" } })
-
- local jid = vim.fn.jobstart(cmd, {
- stdout_buffered = true,
- on_stdout = function(_, output)
- stdout = table.concat(output, "\n")
- end,
- on_exit = vim.schedule_wrap(function(_, code)
- log:trace("response: %s", stdout)
- local err, data = parse_response(code, stdout)
- if err then
- cb(err)
- else
- cb(nil, data)
- end
-
- vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanion", data = { status = "finished" } })
- end),
- })
-
- if jid == 0 then
- cb("Passed invalid arguments to curl")
- elseif jid == -1 then
- cb("'curl' is not executable")
- end
-
- return jid
-end
-local function get_stdout_line_iter()
- local pending = ""
- return function(data)
- local ret = {}
- for i, chunk in ipairs(data) do
- if i == 1 then
- if chunk == "" then
- table.insert(ret, pending)
- pending = ""
+ local handler = self.settings.request({
+ url = url,
+ raw = { "--no-buffer" },
+ headers = headers(self),
+ body = self.settings.encode(payload),
+ callback = function(out)
+ if out.exit then
+ local err, data = parse_response(out.exit, out.body, self.settings)
+ if err then
+ self.settings.schedule(function()
+ cb(err)
+ log:error("Error: %s", err)
+ close_request()
+ end)
else
- pending = pending .. chunk
- end
- else
- if data[1] ~= "" then
- table.insert(ret, pending)
+ self.settings.schedule(function()
+ cb(nil, data)
+ log:trace("Response: %s", data)
+ close_request()
+ end)
end
- pending = chunk
end
- end
- return ret
- end
+ end,
+ on_error = function(err, _, _)
+ log:error("Error: %s", err)
+ close_request()
+ end,
+ })
+
+ log:trace("Request: %s", handler.args)
+ start_request()
end
---@param url string
---@param payload table
---@param bufnr number
---@param cb fun(err: nil|string, chunk: nil|table, done: nil|boolean) Will be called multiple times until done is true
----@return integer The job ID
+---@return nil
function Client:stream_call(url, payload, bufnr, cb)
cb = log:wrap_cb(cb, "Response error: %s")
- payload.stream = true
- local cmd = {
- "curl",
- url,
- "-H",
- "Content-Type: application/json",
- "-H",
- string.format("Authorization: Bearer %s", self.secret_key),
- }
- if self.organization then
- table.insert(cmd, "-H")
- table.insert(cmd, string.format("OpenAI-Organization: %s", self.organization))
- end
- log:trace("stream request command: %s", cmd)
- table.insert(cmd, "-d")
- table.insert(cmd, vim.json.encode(payload))
- log:trace("stream request payload: %s", payload)
- local line_iter = get_stdout_line_iter()
- local stdout = ""
- local done = false
- local found_any_stream = false
-
- vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanion", data = { status = "started" } })
-
- local jid = vim.fn.jobstart(cmd, {
- on_stdout = function(_, output)
- if done then
- return
- end
- if not found_any_stream then
- stdout = stdout .. table.concat(output, "\n")
- end
- for _, line in ipairs(line_iter(output)) do
- log:trace("stream response line: %s", line)
- if vim.startswith(line, "data: ") then
- found_any_stream = true
- local chunk = line:sub(7)
-
- if chunk == "[DONE]" then
- return cb(nil, nil, true)
- end
- if _G.codecompanion_jobs[bufnr].status == "stopping" then
- done = true
- vim.fn.jobstop(_G.codecompanion_jobs[bufnr].jid)
- _G.codecompanion_jobs[bufnr] = nil
+ local handler = self.settings.request({
+ url = url,
+ raw = { "--no-buffer" },
+ headers = headers(self),
+ body = self.settings.encode(payload),
+ stream = function(_, chunk)
+ chunk = chunk:sub(7)
+
+ if chunk ~= "" then
+ if chunk == "[DONE]" then
+ self.settings.schedule(function()
+ close_request(bufnr)
return cb(nil, nil, true)
- end
-
- local ok, data = pcall(vim.json.decode, chunk, { luanil = { object = true } })
- if not ok then
- done = true
- log:error("Error malformed json: %s", data)
- return cb(string.format("Error malformed json: %s", data))
- end
-
- -- Check if the token limit has been reached
- log:debug("Finish Reason: %s", data.choices[1].finish_reason)
- if data.choices[1].finish_reason == "length" then
- log:debug("Token limit reached")
- done = true
- return cb("[CodeCompanion.nvim]\nThe token limit for the current chat has been reached")
- end
-
- cb(nil, data)
- end
- end
- end,
- on_exit = function(_, code)
- vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanion", data = { status = "finished" } })
-
- if not found_any_stream then
- local err, data = parse_response(code, stdout)
- if err then
- cb(err)
+ end)
else
- cb(nil, data, true)
+ self.settings.schedule(function()
+ if _G.codecompanion_jobs[bufnr] and _G.codecompanion_jobs[bufnr].status == "stopping" then
+ close_request(bufnr, { shutdown = true })
+ return cb(nil, nil, true)
+ end
+
+ local ok, data = pcall(self.settings.decode, chunk, { luanil = { object = true } })
+
+ if not ok then
+ log:error("Error malformed json: %s", data)
+ close_request(bufnr)
+ return cb(string.format("Error malformed json: %s", data))
+ end
+
+ if data.choices[1].finish_reason then
+ log:debug("Finish Reason: %s", data.choices[1].finish_reason)
+ end
+
+ if data.choices[1].finish_reason == "length" then
+ log:debug("Token limit reached")
+ close_request(bufnr)
+ return cb("[CodeCompanion.nvim]\nThe token limit for the current chat has been reached")
+ end
+
+ cb(nil, data)
+ end)
end
end
end,
+ on_error = function(err, _, _)
+ close_request(bufnr)
+ log:error("Error: %s", err)
+ end,
})
- if jid == 0 then
- cb("Passed invalid arguments to curl")
- elseif jid == -1 then
- cb("'curl' is not executable")
- else
- _G.codecompanion_jobs[bufnr] = {
- jid = jid,
- status = "running",
- strategy = "chat",
- }
- end
-
- return jid
+ log:trace("Stream Request: %s", handler.args)
+ start_request(bufnr, handler)
end
---@class CodeCompanion.ChatMessage
@@ -247,17 +216,17 @@ end
---@param args CodeCompanion.ChatArgs
---@param cb fun(err: nil|string, response: nil|table)
----@return integer
+---@return nil
function Client:chat(args, cb)
- args.stream = false
return self:call(config.options.base_url .. "/v1/chat/completions", args, cb)
end
---@param args CodeCompanion.ChatArgs
---@param bufnr integer
---@param cb fun(err: nil|string, chunk: nil|table, done: nil|boolean) Will be called multiple times until done is true
----@return integer
+---@return nil
function Client:stream_chat(args, bufnr, cb)
+ args.stream = true
return self:stream_call(config.options.base_url .. "/v1/chat/completions", args, bufnr, cb)
end
diff --git a/lua/codecompanion/config.lua b/lua/codecompanion/config.lua
index d710c9cb..cace34ba 100644
--- a/lua/codecompanion/config.lua
+++ b/lua/codecompanion/config.lua
@@ -20,18 +20,48 @@ local defaults = {
user = nil,
},
conversations = {
- auto_save = true,
save_dir = vim.fn.stdpath("data") .. "/codecompanion/conversations",
},
display = {
- type = "popup",
- split = "horizontal",
- height = 0.7,
- width = 0.8,
+ action_palette = {
+ width = 95,
+ height = 10,
+ },
+ chat = {
+ type = "float",
+ show_settings = false,
+ float = {
+ border = "single",
+ buflisted = false,
+ max_height = 0,
+ max_width = 0,
+ padding = 1,
+ },
+ },
+ win_options = {
+ cursorcolumn = false,
+ cursorline = false,
+ foldcolumn = "0",
+ linebreak = true,
+ list = false,
+ signcolumn = "no",
+ spell = false,
+ wrap = true,
+ },
+ },
+ keymaps = {
+ [""] = "keymaps.close",
+ ["q"] = "keymaps.cancel_request",
+ ["gc"] = "keymaps.clear",
+ ["ga"] = "keymaps.codeblock",
+ ["gs"] = "keymaps.save_conversation",
+ ["]"] = "keymaps.next",
+ ["["] = "keymaps.previous",
},
- log_level = "TRACE",
+ log_level = "ERROR",
send_code = true,
show_token_count = true,
+ silence_notifications = false,
use_default_actions = true,
}
@@ -57,6 +87,8 @@ M.setup = function(opts)
},
}))
+ vim.treesitter.language.register("markdown", "codecompanion")
+
local diagnostic_config = {
underline = false,
virtual_text = {
diff --git a/lua/codecompanion/init.lua b/lua/codecompanion/init.lua
index 2b4ec179..cdb5da28 100644
--- a/lua/codecompanion/init.lua
+++ b/lua/codecompanion/init.lua
@@ -1,6 +1,6 @@
-local Client = require("codecompanion.client")
local config = require("codecompanion.config")
-local utils = require("codecompanion.utils.util")
+local ui = require("codecompanion.utils.ui")
+local util = require("codecompanion.utils.util")
local M = {}
@@ -16,11 +16,15 @@ local function get_client()
)
return nil
end
+
+ local Client = require("codecompanion.client")
+
_client = Client.new({
secret_key = secret_key,
organization = os.getenv(config.options.org_api_key),
})
end
+
return _client
end
@@ -36,15 +40,52 @@ M.chat = function()
return
end
+ local context = util.get_context(vim.api.nvim_get_current_buf())
+
local Chat = require("codecompanion.strategy.chat")
local chat = Chat.new({
client = client,
+ context = context,
})
vim.api.nvim_win_set_buf(0, chat.bufnr)
- utils.scroll_to_end(0)
+ ui.scroll_to_end(0)
+end
- vim.bo[chat.bufnr].filetype = "markdown"
+M.toggle = function()
+ local function buf_toggle(buf, action)
+ if action == "show" then
+ if config.options.display.chat.type == "float" then
+ ui.open_float(buf, {
+ display = config.options.display.chat.float,
+ })
+ else
+ vim.cmd("buffer " .. buf)
+ end
+ elseif action == "hide" then
+ if config.options.display.chat.type == "float" then
+ vim.cmd("hide")
+ else
+ -- Show the previous buffer
+ vim.cmd("buffer " .. vim.fn.buf("#"))
+ end
+ end
+ end
+
+ local function fire_event(status, buf)
+ return vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanionChat", data = { action = status, buf = buf } })
+ end
+
+ if vim.bo.filetype == "codecompanion" then
+ local buf = vim.api.nvim_get_current_buf()
+ buf_toggle(buf, "hide")
+ fire_event("hide_buffer", buf)
+ elseif _G.codecompanion_last_chat_buffer then
+ buf_toggle(_G.codecompanion_last_chat_buffer, "show")
+ fire_event("show_buffer")
+ else
+ M.chat()
+ end
end
local _cached_actions = {}
@@ -55,7 +96,7 @@ M.actions = function()
end
local actions = require("codecompanion.actions")
- local context = utils.get_context(vim.api.nvim_get_current_buf())
+ local context = util.get_context(vim.api.nvim_get_current_buf())
local function picker(items, opts, callback)
opts = opts or {}
@@ -64,6 +105,8 @@ M.actions = function()
require("codecompanion.utils.ui").selector(items, {
prompt = opts.prompt,
+ width = config.options.display.action_palette.width,
+ height = config.options.display.action_palette.height,
format = function(item)
local formatted_item = {}
for _, column in ipairs(opts.columns) do
@@ -81,7 +124,15 @@ M.actions = function()
prompt = item.picker.prompt,
columns = item.picker.columns,
}
- picker(item.picker.items, picker_opts, selection)
+ return picker(item.picker.items, picker_opts, selection)
+ elseif item.picker and type(item.picker.items) == "function" then
+ local picker_opts = {
+ prompt = item.picker.prompt,
+ columns = item.picker.columns,
+ }
+ picker(item.picker.items(), picker_opts, selection)
+ elseif item and type(item.callback) == "function" then
+ return item.callback(selection)
else
local Strategy = require("codecompanion.strategy")
return Strategy.new({
@@ -120,6 +171,7 @@ end
---@param opts nil|table
M.setup = function(opts)
vim.api.nvim_set_hl(0, "CodeCompanionTokens", { link = "Comment", default = true })
+ vim.api.nvim_set_hl(0, "CodeCompanionVirtualText", { link = "Comment", default = true })
config.setup(opts)
end
diff --git a/lua/codecompanion/keymaps.lua b/lua/codecompanion/keymaps.lua
new file mode 100644
index 00000000..abecfb48
--- /dev/null
+++ b/lua/codecompanion/keymaps.lua
@@ -0,0 +1,98 @@
+local config = require("codecompanion.config")
+local ts = require("codecompanion.utils.ts")
+
+local M = {}
+
+M.close = {
+ desc = "Close the chat window",
+ callback = function(args)
+ vim.api.nvim_exec_autocmds(
+ "User",
+ { pattern = "CodeCompanionChat", data = { action = "close_buffer", buf = args.bufnr } }
+ )
+ end,
+}
+
+M.cancel_request = {
+ desc = "Cancel the current request",
+ callback = function(args)
+ if _G.codecompanion_jobs[args.bufnr] == nil then
+ return
+ end
+ _G.codecompanion_jobs[args.bufnr].status = "stopping"
+ end,
+}
+
+M.save_conversation = {
+ desc = "Save the chat as a conversation",
+ callback = function(args)
+ local chat = require("codecompanion.strategy.chat")
+ local conversation = require("codecompanion.strategy.conversation").new({})
+
+ if args.conversation then
+ conversation.filename = args.conversation
+ conversation:save(args.bufnr, chat.buf_get_messages(args.bufnr))
+
+ if config.options.silence_notifications then
+ return
+ end
+
+ return vim.notify("[CodeCompanion.nvim]\nConversation has been saved", vim.log.levels.INFO)
+ end
+
+ vim.ui.input({ prompt = "Conversation Name" }, function(filename)
+ if not filename then
+ return
+ end
+ conversation.filename = filename
+ conversation:save(args.bufnr, chat.buf_get_messages(args.bufnr))
+ args.conversation = filename
+ end)
+ end,
+}
+
+M.clear = {
+ desc = "Clear the current chat",
+ callback = function(args)
+ local ns_id = vim.api.nvim_create_namespace("CodeCompanionTokens")
+ vim.api.nvim_buf_clear_namespace(args.bufnr, ns_id, 0, -1)
+
+ vim.api.nvim_buf_set_lines(args.bufnr, 0, -1, false, {})
+ end,
+}
+
+M.codeblock = {
+ desc = "Insert a codeblock",
+ callback = function(args)
+ local bufnr = vim.api.nvim_get_current_buf()
+ local cursor_pos = vim.api.nvim_win_get_cursor(0)
+ local line = cursor_pos[1]
+
+ args.type = args.type or ""
+
+ local codeblock = {
+ "```" .. args.type,
+ "",
+ "```",
+ }
+
+ vim.api.nvim_buf_set_lines(bufnr, line - 1, line, false, codeblock)
+ vim.api.nvim_win_set_cursor(0, { line + 1, vim.fn.indent(line) })
+ end,
+}
+
+M.next = {
+ desc = "Go to the next message",
+ callback = function()
+ ts.goto_heading("next", 1)
+ end,
+}
+
+M.previous = {
+ desc = "Go to the previous message",
+ callback = function()
+ ts.goto_heading("prev", 1)
+ end,
+}
+
+return M
diff --git a/lua/codecompanion/schema.lua b/lua/codecompanion/schema.lua
index 34f868fd..196e1506 100644
--- a/lua/codecompanion/schema.lua
+++ b/lua/codecompanion/schema.lua
@@ -1,4 +1,5 @@
local config = require("codecompanion.config")
+local curl = require("plenary.curl")
local M = {}
@@ -227,4 +228,11 @@ M.static.chat_settings = {
},
}
+M.static.client_settings = {
+ request = { default = curl.post },
+ encode = { default = vim.json.encode },
+ decode = { default = vim.json.decode },
+ schedule = { default = vim.schedule },
+}
+
return M
diff --git a/lua/codecompanion/strategy.lua b/lua/codecompanion/strategy.lua
index c09f2b92..9fa306b8 100644
--- a/lua/codecompanion/strategy.lua
+++ b/lua/codecompanion/strategy.lua
@@ -69,6 +69,7 @@ function Strategy:chat()
return require("codecompanion.strategy.chat").new({
client = self.client,
+ type = self.selected.type,
messages = messages,
show_buffer = true,
})
diff --git a/lua/codecompanion/strategy/advisor.lua b/lua/codecompanion/strategy/advisor.lua
index 65774f19..4c3bf2b7 100644
--- a/lua/codecompanion/strategy/advisor.lua
+++ b/lua/codecompanion/strategy/advisor.lua
@@ -87,16 +87,13 @@ function Advisor:execute(user_input)
content = "",
})
- if config.options.display == "chat" then
- return require("codecompanion.strategy.chat").new({
- client = self.client,
- messages = messages,
- show_buffer = true,
- })
- else
- local response = data.choices[1].message.content
- return require("codecompanion.utils.ui").display(config.options.display, response, messages, self.client)
- end
+ require("codecompanion.strategy.chat").new({
+ client = self.client,
+ messages = messages,
+ show_buffer = true,
+ })
+
+ require("codecompanion.utils.ts").goto_heading("prev", 1)
end)
end
diff --git a/lua/codecompanion/strategy/chat.lua b/lua/codecompanion/strategy/chat.lua
index 8ffeea12..2a318317 100644
--- a/lua/codecompanion/strategy/chat.lua
+++ b/lua/codecompanion/strategy/chat.lua
@@ -1,9 +1,12 @@
local config = require("codecompanion.config")
local log = require("codecompanion.utils.log")
local schema = require("codecompanion.schema")
-local util = require("codecompanion.utils.util")
+local ui = require("codecompanion.utils.ui")
+local utils = require("codecompanion.utils.util")
local yaml = require("codecompanion.utils.yaml")
+local api = vim.api
+
local yaml_query = [[
(block_mapping_pair
key: (_) @key
@@ -21,16 +24,30 @@ local chat_query = [[
)
]]
+local config_settings = {}
---@param bufnr integer
---@return table
local function parse_settings(bufnr)
+ if config_settings[bufnr] then
+ return config_settings[bufnr]
+ end
+
+ if not config.options.display.chat.show_settings then
+ config_settings[bufnr] = vim.deepcopy(config.options.ai_settings)
+ config_settings[bufnr].model = config_settings[bufnr].models.chat
+ config_settings[bufnr].models = nil
+
+ log:trace("Using the settings from the user's config: %s", config_settings[bufnr])
+ return config_settings[bufnr]
+ end
+
+ local settings = {}
local parser = vim.treesitter.get_parser(bufnr, "yaml")
local query = vim.treesitter.query.parse("yaml", yaml_query)
local root = parser:parse()[1]:root()
pcall(vim.tbl_add_reverse_lookup, query.captures)
- local settings = {}
for _, match in query:iter_matches(root, bufnr) do
local key = vim.treesitter.get_node_text(match[query.captures.key], bufnr)
local value = vim.treesitter.get_node_text(match[query.captures.value], bufnr)
@@ -82,16 +99,20 @@ end
---@param bufnr integer
---@param settings CodeCompanion.ChatSettings
---@param messages CodeCompanion.ChatMessage[]
-local function render_messages(bufnr, settings, messages)
- -- Put the settings at the top of the buffer
- local lines = { "---" }
- local keys = schema.get_ordered_keys(schema.static.chat_settings)
- for _, key in ipairs(keys) do
- table.insert(lines, string.format("%s: %s", key, yaml.encode(settings[key])))
- end
+---@param context table
+local function render_messages(bufnr, settings, messages, context)
+ local lines = {}
+ if config.options.display.chat.show_settings then
+ -- Put the settings at the top of the buffer
+ lines = { "---" }
+ local keys = schema.get_ordered_keys(schema.static.chat_settings)
+ for _, key in ipairs(keys) do
+ table.insert(lines, string.format("%s: %s", key, yaml.encode(settings[key])))
+ end
- table.insert(lines, "---")
- table.insert(lines, "")
+ table.insert(lines, "---")
+ table.insert(lines, "")
+ end
-- Put the messages in the buffer
for i, message in ipairs(messages) do
@@ -105,9 +126,18 @@ local function render_messages(bufnr, settings, messages)
end
end
+ if context and context.is_visual then
+ table.insert(lines, "")
+ table.insert(lines, "```" .. context.filetype)
+ for _, line in ipairs(context.lines) do
+ table.insert(lines, line)
+ end
+ table.insert(lines, "```")
+ end
+
local modifiable = vim.bo[bufnr].modifiable
vim.bo[bufnr].modifiable = true
- vim.api.nvim_buf_set_lines(bufnr, 0, -1, true, lines)
+ api.nvim_buf_set_lines(bufnr, 0, -1, false, lines)
vim.bo[bufnr].modified = false
vim.bo[bufnr].modifiable = modifiable
end
@@ -118,57 +148,6 @@ local display_tokens = function(bufnr)
end
end
----@param bufnr number
----@param conversation CodeCompanion.Conversation
-local function create_conversation_autocmds(bufnr, conversation)
- if config.options.conversations.auto_save then
- local group = vim.api.nvim_create_augroup("CodeCompanionConversations", {})
-
- local function save()
- vim.schedule(function()
- conversation:save(bufnr, parse_messages_buffer(bufnr))
- end)
- end
-
- vim.api.nvim_create_autocmd("InsertLeave", {
- buffer = bufnr,
- group = group,
- callback = function()
- log:trace("Conversation automatically saved")
- save()
- end,
- })
- vim.api.nvim_create_autocmd({ "User" }, {
- group = group,
- pattern = "CodeCompanion",
- callback = function(request)
- if request.buf == bufnr and request.data.status == "finished" then
- log:trace("Conversation automatically saved")
- save()
- end
- end,
- })
- end
-end
-
----@param bufnr number
-local function create_conversation_commands(bufnr)
- local conversation = require("codecompanion.strategy.conversation").new({})
-
- vim.api.nvim_buf_create_user_command(bufnr, "CodeCompanionConversationSaveAs", function()
- vim.ui.input({ prompt = "Conversation Name" }, function(filename)
- if not filename then
- return
- end
- conversation.filename = filename
- conversation:save(bufnr, parse_messages_buffer(bufnr))
- create_conversation_autocmds(bufnr, conversation)
- end)
- end, { desc = "Save the conversation" })
-
- -- Create manual save
-end
-
---@type table
local chatmap = {}
@@ -177,12 +156,12 @@ local function watch_cursor()
if cursor_moved_autocmd then
return
end
- cursor_moved_autocmd = vim.api.nvim_create_autocmd({ "CursorMoved", "BufEnter" }, {
+ cursor_moved_autocmd = api.nvim_create_autocmd({ "CursorMoved", "BufEnter" }, {
desc = "Show line information in a Code Companion buffer",
callback = function(args)
local chat = chatmap[args.buf]
if chat then
- if vim.api.nvim_win_get_buf(0) == args.buf then
+ if api.nvim_win_get_buf(0) == args.buf then
chat:on_cursor_moved()
end
end
@@ -190,31 +169,20 @@ local function watch_cursor()
})
end
-local registered_cmp = false
-
----@class CodeCompanion.Chat
----@field client CodeCompanion.Client
----@field bufnr integer
----@field settings CodeCompanion.ChatSettings
-local Chat = {}
-
----@class CodeCompanion.ChatArgs
----@field client CodeCompanion.Client
----@field messages nil|CodeCompanion.ChatMessage[]
----@field show_buffer nil|boolean
----@field conversation nil|CodeCompanion.Conversation
----@field settings nil|CodeCompanion.ChatSettings
+_G.codecompanion_chats = {}
----@param args CodeCompanion.ChatArgs
-function Chat.new(args)
- local bufnr = vim.api.nvim_create_buf(true, false)
- vim.api.nvim_buf_set_name(bufnr, string.format("[OpenAI Chat] %d", math.random(10000000)))
+local registered_cmp = false
- vim.bo[bufnr].filetype = "markdown"
- vim.bo[bufnr].buftype = "acwrite"
- vim.b[bufnr].codecompanion_type = "chat"
+---@param bufnr number
+---@param args table
+local function chat_autocmds(bufnr, args)
+ local aug = api.nvim_create_augroup("CodeCompanion", {
+ clear = false,
+ })
- vim.api.nvim_create_autocmd("BufWriteCmd", {
+ -- Submit the chat
+ api.nvim_create_autocmd("BufWriteCmd", {
+ group = aug,
buffer = bufnr,
callback = function()
local chat = chatmap[bufnr]
@@ -225,41 +193,51 @@ function Chat.new(args)
end
end,
})
- vim.api.nvim_create_autocmd("InsertLeave", {
- buffer = bufnr,
- callback = function()
- local settings = parse_settings(bufnr)
- local errors = schema.validate(schema.static.chat_settings, settings)
- local node = settings.__ts_node
- local items = {}
- if errors and node then
- for child in node:iter_children() do
- assert(child:type() == "block_mapping_pair")
- local key = vim.treesitter.get_node_text(child:named_child(0), bufnr)
- if errors[key] then
- local lnum, col, end_lnum, end_col = child:range()
- table.insert(items, {
- lnum = lnum,
- col = col,
- end_lnum = end_lnum,
- end_col = end_col,
- severity = vim.diagnostic.severity.ERROR,
- message = errors[key],
- })
+
+ if config.options.display.chat.show_settings then
+ -- Virtual text for the settings
+ api.nvim_create_autocmd("InsertLeave", {
+ group = aug,
+ buffer = bufnr,
+ callback = function()
+ local settings = parse_settings(bufnr)
+ local errors = schema.validate(schema.static.chat_settings, settings)
+ local node = settings.__ts_node
+ local items = {}
+ if errors and node then
+ for child in node:iter_children() do
+ assert(child:type() == "block_mapping_pair")
+ local key = vim.treesitter.get_node_text(child:named_child(0), bufnr)
+ if errors[key] then
+ local lnum, col, end_lnum, end_col = child:range()
+ table.insert(items, {
+ lnum = lnum,
+ col = col,
+ end_lnum = end_lnum,
+ end_col = end_col,
+ severity = vim.diagnostic.severity.ERROR,
+ message = errors[key],
+ })
+ end
end
end
- end
- vim.diagnostic.set(config.ERROR_NS, bufnr, items)
- end,
- })
-
- watch_cursor()
+ vim.diagnostic.set(config.ERROR_NS, bufnr, items)
+ end,
+ })
+ end
+ -- Enable cmp and add virtual text to the empty buffer
local bufenter_autocmd
- bufenter_autocmd = vim.api.nvim_create_autocmd("BufEnter", {
- callback = function(params)
- if params.buf ~= bufnr then
- return
+ bufenter_autocmd = api.nvim_create_autocmd("BufEnter", {
+ group = aug,
+ buffer = bufnr,
+ callback = function()
+ if #_G.codecompanion_chats == 0 then
+ local ns_id = api.nvim_create_namespace("CodeCompanionChatVirtualText")
+ api.nvim_buf_set_extmark(bufnr, ns_id, api.nvim_buf_line_count(bufnr) - 1, 0, {
+ virt_text = { { "Save the buffer to send a message to OpenAI...", "CodeCompanionVirtualText" } },
+ virt_text_pos = "eol",
+ })
end
local has_cmp, cmp = pcall(require, "cmp")
@@ -275,34 +253,142 @@ function Chat.new(args)
},
})
end
- vim.api.nvim_del_autocmd(bufenter_autocmd)
+ api.nvim_del_autocmd(bufenter_autocmd)
+ end,
+ })
+
+ -- Clear the virtual text when the user starts typing
+ if #_G.codecompanion_chats == 0 then
+ local insertenter_autocmd
+ insertenter_autocmd = api.nvim_create_autocmd("InsertEnter", {
+ group = aug,
+ buffer = bufnr,
+ callback = function()
+ local ns_id = api.nvim_create_namespace("CodeCompanionChatVirtualText")
+ api.nvim_buf_clear_namespace(bufnr, ns_id, 0, -1)
+
+ api.nvim_del_autocmd(insertenter_autocmd)
+ end,
+ })
+ end
+
+ -- Handle toggling the buffer and chat window
+ api.nvim_create_autocmd("User", {
+ desc = "Store the current chat buffer",
+ group = aug,
+ pattern = "CodeCompanionChat",
+ callback = function(request)
+ if request.data.buf ~= bufnr or request.data.action ~= "hide_buffer" then
+ return
+ end
+
+ _G.codecompanion_last_chat_buffer = bufnr
+
+ if _G.codecompanion_chats[bufnr] == nil then
+ local description
+ local _, messages = parse_messages_buffer(bufnr)
+
+ if messages[1] and messages[1].content then
+ description = messages[1].content
+ else
+ description = "[No messages]"
+ end
+
+ _G.codecompanion_chats[bufnr] = {
+ name = "Chat " .. utils.count(_G.codecompanion_chats) + 1,
+ description = description,
+ }
+ end
+ end,
+ })
+
+ api.nvim_create_autocmd("User", {
+ desc = "Remove the chat buffer from the stored chats",
+ group = aug,
+ pattern = "CodeCompanionChat",
+ callback = function(request)
+ if request.data.buf ~= bufnr or request.data.action ~= "close_buffer" then
+ return
+ end
+
+ if _G.codecompanion_last_chat_buffer == bufnr then
+ _G.codecompanion_last_chat_buffer = nil
+ end
+
+ _G.codecompanion_chats[bufnr] = nil
+ vim.cmd("bd!")
end,
})
+end
- local settings = schema.get_default(schema.static.chat_settings, args.settings)
+---@class CodeCompanion.Chat
+---@field client CodeCompanion.Client
+---@field bufnr integer
+---@field settings CodeCompanion.ChatSettings
+local Chat = {}
+
+---@class CodeCompanion.ChatArgs
+---@field client CodeCompanion.Client
+---@field context table
+---@field messages nil|CodeCompanion.ChatMessage[]
+---@field show_buffer nil|boolean
+---@field settings nil|CodeCompanion.ChatSettings
+---@field type nil|string
+---@field conversation nil|string
+
+---@param args CodeCompanion.ChatArgs
+function Chat.new(args)
+ local bufnr
+ local winid
+
+ if config.options.display.chat.type == "float" then
+ bufnr = api.nvim_create_buf(false, false)
+ else
+ bufnr = api.nvim_create_buf(true, false)
+ winid = api.nvim_get_current_win()
+ end
+
+ api.nvim_buf_set_name(bufnr, string.format("[CodeCompanion] %d", math.random(10000000)))
+ api.nvim_buf_set_option(bufnr, "buftype", "acwrite")
+ api.nvim_buf_set_option(bufnr, "filetype", "codecompanion")
+ api.nvim_buf_set_option(bufnr, "syntax", "markdown")
+ vim.b[bufnr].codecompanion_type = "chat"
+
+ watch_cursor()
+ chat_autocmds(bufnr, args)
+
+ local settings = args.settings or schema.get_default(schema.static.chat_settings, args.settings)
local self = setmetatable({
- client = args.client,
bufnr = bufnr,
- settings = settings,
+ client = args.client,
+ context = args.context,
conversation = args.conversation,
+ settings = settings,
+ type = args.type,
}, { __index = Chat })
chatmap[bufnr] = self
- render_messages(bufnr, settings, args.messages or {})
+
+ local keys = require("codecompanion.utils.keymaps")
+ keys.set_keymaps(config.options.keymaps, bufnr, self)
+
+ render_messages(bufnr, settings, args.messages or {}, args.context or {})
display_tokens(bufnr)
- vim.api.nvim_buf_set_option(bufnr, "wrap", true)
- if args.show_buffer then
- vim.api.nvim_set_current_buf(bufnr)
- util.buf_scroll_to_end(bufnr)
+ if config.options.display.chat.type == "float" then
+ winid = ui.open_float(bufnr, {
+ display = config.options.display.chat.float,
+ })
end
- if self.conversation then
- create_conversation_autocmds(bufnr, self.conversation)
+ if config.options.display.chat.type == "buffer" and args.show_buffer then
+ api.nvim_set_current_buf(bufnr)
end
- create_conversation_commands(bufnr)
+ ui.set_options(config.options.display.win_options, winid)
+ vim.cmd("setlocal formatoptions-=t")
+ ui.buf_scroll_to_end(bufnr)
return self
end
@@ -310,15 +396,12 @@ end
function Chat:submit()
local settings, messages = parse_messages_buffer(self.bufnr)
+ if not messages or #messages == 0 then
+ return
+ end
+
vim.bo[self.bufnr].modified = false
vim.bo[self.bufnr].modifiable = false
- vim.api.nvim_buf_set_keymap(self.bufnr, "n", "q", "", {
- noremap = true,
- silent = true,
- callback = function()
- _G.codecompanion_jobs[self.bufnr].status = "stopping"
- end,
- })
local function finalize()
vim.bo[self.bufnr].modified = false
@@ -326,19 +409,20 @@ function Chat:submit()
end
local function render_buffer()
- local line_count = vim.api.nvim_buf_line_count(self.bufnr)
- local current_line = vim.api.nvim_win_get_cursor(0)[1]
+ local line_count = api.nvim_buf_line_count(self.bufnr)
+ local current_line = api.nvim_win_get_cursor(0)[1]
local cursor_moved = current_line == line_count
- render_messages(self.bufnr, settings, messages)
+ render_messages(self.bufnr, settings, messages, {})
- if cursor_moved and util.buf_is_active(self.bufnr) then
- util.buf_scroll_to_end()
+ if cursor_moved and ui.buf_is_active(self.bufnr) then
+ ui.buf_scroll_to_end()
end
end
local new_message = messages[#messages]
- if new_message.role == "user" and new_message.content == "" then
+
+ if new_message and new_message.role == "user" and new_message.content == "" then
return finalize()
end
@@ -369,7 +453,6 @@ function Chat:submit()
end
if done then
- vim.api.nvim_buf_del_keymap(self.bufnr, "n", "q")
table.insert(messages, { role = "user", content = "" })
render_buffer()
display_tokens(self.bufnr)
@@ -424,7 +507,7 @@ end
function Chat:complete(request, callback)
local items = {}
- local cursor = vim.api.nvim_win_get_cursor(0)
+ local cursor = api.nvim_win_get_cursor(0)
local key_name, node = self:_get_settings_key({ pos = { cursor[1] - 1, 1 } })
if not key_name or not node then
callback({ items = items, isIncomplete = false })
@@ -448,9 +531,19 @@ end
---@return nil|CodeCompanion.Chat
function Chat.buf_get_chat(bufnr)
if not bufnr or bufnr == 0 then
- bufnr = vim.api.nvim_get_current_buf()
+ bufnr = api.nvim_get_current_buf()
end
return chatmap[bufnr]
end
+---@param bufnr nil|integer
+---@return table
+---@return nil|CodeCompanion.Chat
+function Chat.buf_get_messages(bufnr)
+ if not bufnr or bufnr == 0 then
+ bufnr = api.nvim_get_current_buf()
+ end
+ return parse_messages_buffer(bufnr)
+end
+
return Chat
diff --git a/lua/codecompanion/strategy/conversation.lua b/lua/codecompanion/strategy/conversation.lua
index 2b75446a..b40a4308 100644
--- a/lua/codecompanion/strategy/conversation.lua
+++ b/lua/codecompanion/strategy/conversation.lua
@@ -1,4 +1,4 @@
-local chat = require("codecompanion.strategy.chat")
+local Chat = require("codecompanion.strategy.chat")
local config = require("codecompanion.config")
local log = require("codecompanion.utils.log")
@@ -12,7 +12,7 @@ end
---@param bufnr number
---@param name string
local function rename_buffer(bufnr, name)
- vim.api.nvim_buf_set_name(bufnr, "[OpenAI Chat] " .. name .. ".md")
+ vim.api.nvim_buf_set_name(bufnr, "[CodeCompanion Chat] " .. name .. ".md")
end
---@class CodeCompanion.Conversation
@@ -53,6 +53,7 @@ local function save(filename, bufnr, conversation)
log:debug('Conversation: "%s.json" saved', filename)
file:write(vim.json.encode(conversation))
file:close()
+ vim.api.nvim_exec_autocmds("User", { pattern = "CodeCompanionConversation", data = { status = "finished" } })
else
log:debug("Conversation could not be saved. Error: %s", err)
vim.notify("[CodeCompanion.nvim]\nCannot save conversation: " .. err, vim.log.levels.ERROR)
@@ -123,16 +124,15 @@ function Conversation:load(client, opts)
self.filename = opts.filename
local content = vim.fn.json_decode(table.concat(vim.fn.readfile(opts.path), "\n"))
- local chat_buf = chat.new({
+ local chat_buf = Chat.new({
client = client,
- settings = content.settings,
+ conversation = self.filename,
messages = content.messages,
+ settings = content.settings,
show_buffer = true,
- conversation = self,
})
rename_buffer(chat_buf.bufnr, opts.filename)
- vim.api.nvim_buf_set_option(chat_buf.bufnr, "wrap", true)
end
return Conversation
diff --git a/lua/codecompanion/utils/keymaps.lua b/lua/codecompanion/utils/keymaps.lua
new file mode 100644
index 00000000..f2c004f5
--- /dev/null
+++ b/lua/codecompanion/utils/keymaps.lua
@@ -0,0 +1,54 @@
+-- Taken from:
+-- https://github.com/stevearc/oil.nvim/blob/master/lua/oil/keymap_util.lua
+
+local keymaps = require("codecompanion.keymaps")
+
+local M = {}
+
+---@param rhs string|table|fun()
+---@return string|fun() rhs
+---@return table opts
+---@return string|nil mode
+local function resolve(rhs)
+ if type(rhs) == "string" and vim.startswith(rhs, "keymaps.") then
+ return resolve(keymaps[vim.split(rhs, ".", { plain = true })[2]])
+ elseif type(rhs) == "table" then
+ local opts = vim.deepcopy(rhs)
+ local callback = opts.callback
+ local mode = opts.mode
+ if type(rhs.callback) == "string" then
+ local action_opts, action_mode
+ callback, action_opts, action_mode = resolve(rhs.callback)
+ opts = vim.tbl_extend("keep", opts, action_opts)
+ mode = mode or action_mode
+ end
+ opts.callback = nil
+ opts.mode = nil
+ return callback, opts, mode
+ else
+ return rhs, {}
+ end
+end
+
+---@param keymaps table
+---@param bufnr integer
+---@param data? table
+M.set_keymaps = function(keymaps, bufnr, data)
+ for k, v in pairs(keymaps) do
+ local rhs, opts, mode = resolve(v)
+ if rhs then
+ local callback
+ if type(rhs) == "function" then
+ callback = function()
+ rhs(data or {})
+ end
+ else
+ callback = rhs
+ end
+ opts = vim.tbl_extend("keep", opts, { buffer = bufnr })
+ vim.keymap.set(mode or "", k, callback, opts)
+ end
+ end
+end
+
+return M
diff --git a/lua/codecompanion/utils/tokens.lua b/lua/codecompanion/utils/tokens.lua
index f9a3daa5..a7d889d0 100644
--- a/lua/codecompanion/utils/tokens.lua
+++ b/lua/codecompanion/utils/tokens.lua
@@ -73,7 +73,7 @@ end
function M.display_tokens(bufnr)
bufnr = bufnr or vim.api.nvim_get_current_buf()
- local ns_id = vim.api.nvim_create_namespace("CodeCompanion")
+ local ns_id = vim.api.nvim_create_namespace("CodeCompanionTokens")
local parser = vim.treesitter.get_parser(bufnr, "markdown", {})
local tree = parser:parse()[1]
diff --git a/lua/codecompanion/utils/ts.lua b/lua/codecompanion/utils/ts.lua
new file mode 100644
index 00000000..ff0f0f19
--- /dev/null
+++ b/lua/codecompanion/utils/ts.lua
@@ -0,0 +1,54 @@
+local ts_parsers = require("nvim-treesitter.parsers")
+local ts_utils = require("nvim-treesitter.ts_utils")
+
+local M = {}
+
+function M.goto_heading(direction, count)
+ local bufnr = vim.api.nvim_get_current_buf()
+ local cursor = vim.api.nvim_win_get_cursor(0)
+ local current_row = cursor[1] - 1
+
+ local parser = ts_parsers.get_parser(bufnr, "markdown")
+ local root_tree = parser:parse()[1]:root()
+
+ local query = vim.treesitter.query.parse("markdown", [[(atx_heading) @heading]])
+
+ local from_row, to_row, found_headings
+ if direction == "next" then
+ from_row = current_row + 1
+ to_row = -1 -- End of document
+ found_headings = {}
+ for id, node in query:iter_captures(root_tree, bufnr, from_row, to_row) do
+ if query.captures[id] == "heading" then
+ local node_start, _, _, _ = node:range()
+ if node_start >= from_row then
+ table.insert(found_headings, node) -- Collect valid headings in a table
+ if #found_headings == count then -- Check if we have reached the desired count
+ ts_utils.goto_node(found_headings[count], false, true)
+ return
+ end
+ end
+ end
+ end
+ elseif direction == "prev" then
+ from_row = 0
+ to_row = current_row
+ found_headings = {}
+ for id, node in query:iter_captures(root_tree, bufnr, from_row, to_row) do
+ if query.captures[id] == "heading" then
+ local _, _, node_end, _ = node:range()
+ if node_end < current_row then
+ table.insert(found_headings, node)
+ end
+ end
+ end
+ if #found_headings >= count then
+ ts_utils.goto_node(found_headings[#found_headings - count + 1], false, true)
+ end
+ end
+
+ -- If we haven't found the desired heading, we can stay at current position,
+ -- or implement some behavior like wrapping around or signaling an error.
+end
+
+return M
diff --git a/lua/codecompanion/utils/ui.lua b/lua/codecompanion/utils/ui.lua
index 2804bd40..b9ab1efb 100644
--- a/lua/codecompanion/utils/ui.lua
+++ b/lua/codecompanion/utils/ui.lua
@@ -1,7 +1,64 @@
local log = require("codecompanion.utils.log")
+local api = vim.api
+
local M = {}
+---@param bufnr number
+---@return boolean
+M.buf_is_active = function(bufnr)
+ return api.nvim_get_current_buf() == bufnr
+end
+
+---@param bufnr nil|integer
+---@return integer[]
+M.buf_list_wins = function(bufnr)
+ local wins = {}
+
+ if not bufnr or bufnr == 0 then
+ bufnr = api.nvim_get_current_buf()
+ end
+
+ for _, winid in ipairs(api.nvim_list_wins()) do
+ if api.nvim_win_is_valid(winid) and api.nvim_win_get_buf(winid) == bufnr then
+ table.insert(wins, winid)
+ end
+ end
+
+ return wins
+end
+
+---@param winid? number
+M.scroll_to_end = function(winid)
+ winid = winid or 0
+ local bufnr = api.nvim_win_get_buf(winid)
+ local lnum = api.nvim_buf_line_count(bufnr)
+ local last_line = api.nvim_buf_get_lines(bufnr, -2, -1, true)[1]
+ api.nvim_win_set_cursor(winid, { lnum, api.nvim_strwidth(last_line) })
+end
+
+---@param bufnr nil|integer
+M.buf_scroll_to_end = function(bufnr)
+ for _, winid in ipairs(M.buf_list_wins(bufnr or 0)) do
+ M.scroll_to_end(winid)
+ end
+end
+
+---Source: https://github.com/stevearc/oil.nvim/blob/dd432e76d01eda08b8658415588d011009478469/lua/oil/layout.lua#L22C8-L22C8
+---@return integer
+M.get_editor_height = function()
+ local editor_height = vim.o.lines - vim.o.cmdheight
+ -- Subtract 1 if tabline is visible
+ if vim.o.showtabline == 2 or (vim.o.showtabline == 1 and #api.nvim_list_tabpages() > 1) then
+ editor_height = editor_height - 1
+ end
+ -- Subtract 1 if statusline is visible
+ if vim.o.laststatus >= 2 or (vim.o.laststatus == 1 and #api.nvim_tabpage_list_wins(0) > 1) then
+ editor_height = editor_height - 1
+ end
+ return editor_height
+end
+
local function get_max_lengths(items, format)
local max_lengths = {}
for _, item in ipairs(items) do
@@ -53,6 +110,12 @@ function M.selector(items, opts)
vim.ui.select(items, {
prompt = opts.prompt,
kind = "codecompanion.nvim",
+ telescope = require("telescope.themes").get_cursor({
+ layout_config = {
+ width = opts.width,
+ height = opts.height,
+ },
+ }),
format_item = function(item)
local formatted = opts.format(item)
return pad_item(formatted, max_lengths)
@@ -66,152 +129,48 @@ function M.selector(items, opts)
end)
end
----@param win number
----@param bufnr number|nil
-local function close(win, bufnr)
- vim.api.nvim_win_close(win, true)
- if bufnr then
- vim.api.nvim_buf_delete(bufnr, { force = true })
- end
-end
-
-local function set_keymaps(win, bufnr, client, conversation)
- vim.api.nvim_buf_set_keymap(bufnr, "n", "q", "", {
- noremap = true,
- silent = true,
- callback = function()
- pcall(close, win, bufnr)
- end,
- })
-
- vim.api.nvim_buf_set_keymap(bufnr, "n", "c", "", {
- noremap = true,
- silent = true,
- callback = function()
- pcall(close, win, bufnr)
- return require("codecompanion.strategy.chat").new({
- client = client,
- messages = conversation,
- show_buffer = true,
- })
- end,
- })
-end
-
----@param size number
----@param max_value number
-local function calculate_split_size(size, max_value)
- if type(size) == "number" then
- if size < 1 then
- -- Treat as a percentage
- return math.floor(max_value * size)
- else
- -- Treat as an absolute value
- return math.floor(size)
- end
- else
- error("Size must be a number")
+---@param opts table
+---@param winid number
+function M.set_options(opts, winid)
+ for k, v in pairs(opts) do
+ api.nvim_set_option_value(k, v, { scope = "local", win = winid })
end
end
+---@param bufnr number
---@param opts table
----@param response string
----@param conversation table
----@param client CodeCompanion.Client
-local function split(opts, response, conversation, client)
- if not response or response == "" then
- return
+function M.open_float(bufnr, opts)
+ local total_width = vim.o.columns
+ local total_height = M.get_editor_height()
+ local width = total_width - 2 * opts.display.padding
+ if opts.display.border ~= "none" then
+ width = width - 2 -- The border consumes 1 col on each side
end
-
- local buf = vim.api.nvim_create_buf(false, true)
-
- vim.api.nvim_buf_set_option(buf, "filetype", "markdown")
- vim.api.nvim_buf_set_option(buf, "buftype", "nofile")
- vim.api.nvim_buf_set_lines(buf, 0, -1, false, vim.split(response, "\n"))
- vim.api.nvim_buf_set_option(buf, "modifiable", false)
-
- local win_original = vim.api.nvim_get_current_win()
- -- Determine split dimensions
- local win_height = vim.api.nvim_win_get_height(win_original)
- local win_width = vim.api.nvim_win_get_width(win_original)
- local split_height, split_width
-
- -- Size calculation based on opts
- if opts.split == "horizontal" then
- split_height = calculate_split_size(opts.height or 0.5, win_height)
- elseif opts.split == "vertical" then
- split_width = calculate_split_size(opts.width or 0.5, win_width)
- else
- error("Invalid split option. Use 'vertical' or 'horizontal'.")
- return
+ if opts.display.max_width > 0 then
+ width = math.min(width, opts.display.max_width)
end
- -- Creating the split
- local win_new
- if opts.split == "vertical" then
- vim.cmd("vsplit")
- win_new = vim.api.nvim_get_current_win()
- if split_width then
- vim.api.nvim_win_set_width(win_new, split_width)
- end
- elseif opts.split == "horizontal" then
- vim.cmd("split")
- win_new = vim.api.nvim_get_current_win()
- if split_height then
- vim.api.nvim_win_set_height(win_new, split_height)
- end
+ local height = total_height - 2 * opts.display.padding
+ if opts.display.max_height > 0 then
+ height = math.min(height, opts.display.max_height)
end
- vim.api.nvim_win_set_buf(win_new, buf)
- vim.api.nvim_win_set_option(win_new, "wrap", true)
-
- set_keymaps(win_new, buf, client, conversation)
-
- -- Return focus to original window
- vim.api.nvim_set_current_win(win_original)
-end
+ local row = math.floor((total_height - height) / 2)
+ local col = math.floor((total_width - width) / 2) - 1 -- adjust for border width
----@param opts table
----@param response string
----@param conversation table
----@param client CodeCompanion.Client
-local function popup(opts, response, conversation, client)
- local buf = vim.api.nvim_create_buf(false, true)
-
- vim.api.nvim_buf_set_option(buf, "filetype", "markdown")
- vim.api.nvim_buf_set_lines(buf, 0, -1, false, vim.split(response, "\n"))
- vim.api.nvim_buf_set_option(buf, "modifiable", false)
- vim.api.nvim_buf_set_option(buf, "buftype", "nofile")
-
- local win_width = math.floor(vim.o.columns * (opts.width or 0.8))
- local win_height = math.floor(vim.o.lines * (opts.height or 0.8))
- local row = math.floor((vim.o.lines - win_height) / 2)
- local col = math.floor((vim.o.columns - win_width) / 2)
-
- local win = vim.api.nvim_open_win(buf, true, {
+ local winid = api.nvim_open_win(bufnr, true, {
relative = "editor",
- width = win_width,
- height = win_height,
+ width = width,
+ height = height,
row = row,
col = col,
- border = "single",
- style = "minimal",
- noautocmd = true,
+ border = opts.display.border,
+ zindex = 45,
+ title = "Code Companion",
+ title_pos = "center",
})
- set_keymaps(win, buf, client, conversation)
- vim.api.nvim_win_set_option(win, "wrap", true)
-end
-
----@param opts table
----@param response string
----@param client CodeCompanion.Client
-function M.display(opts, response, conversation, client)
- if opts.type == "split" then
- split(opts, response, conversation, client)
- elseif opts.type == "popup" then
- popup(opts, response, conversation, client)
- end
+ return winid
end
return M
diff --git a/lua/codecompanion/utils/util.lua b/lua/codecompanion/utils/util.lua
index 88c488e2..5b7dd089 100644
--- a/lua/codecompanion/utils/util.lua
+++ b/lua/codecompanion/utils/util.lua
@@ -2,6 +2,16 @@ local M = {}
local ESC_FEEDKEY = vim.api.nvim_replace_termcodes("", true, false, true)
+---@param table table
+M.count = function(table)
+ local count = 0
+ for _ in pairs(table) do
+ count = count + 1
+ end
+
+ return count
+end
+
---@param table table
---@param value string
M.contains = function(table, value)
@@ -13,42 +23,6 @@ M.contains = function(table, value)
return false
end
----@param bufnr number
-M.buf_is_active = function(bufnr)
- return vim.api.nvim_get_current_buf() == bufnr
-end
-
----@param winid? number
-M.scroll_to_end = function(winid)
- winid = winid or 0
- local bufnr = vim.api.nvim_win_get_buf(winid)
- local lnum = vim.api.nvim_buf_line_count(bufnr)
- local last_line = vim.api.nvim_buf_get_lines(bufnr, -2, -1, true)[1]
- vim.api.nvim_win_set_cursor(winid, { lnum, vim.api.nvim_strwidth(last_line) })
-end
-
----@param bufnr nil|integer
-M.buf_scroll_to_end = function(bufnr)
- for _, winid in ipairs(M.buf_list_wins(bufnr or 0)) do
- M.scroll_to_end(winid)
- end
-end
-
----@param bufnr nil|integer
----@return integer[]
-M.buf_list_wins = function(bufnr)
- local ret = {}
- if not bufnr or bufnr == 0 then
- bufnr = vim.api.nvim_get_current_buf()
- end
- for _, winid in ipairs(vim.api.nvim_list_wins()) do
- if vim.api.nvim_win_is_valid(winid) and vim.api.nvim_win_get_buf(winid) == bufnr then
- table.insert(ret, winid)
- end
- end
- return ret
-end
-
M._noop = function() end
---@param name string
diff --git a/lua/spec/codecompanion/client_spec.lua b/lua/spec/codecompanion/client_spec.lua
index 053423d9..7973ef79 100644
--- a/lua/spec/codecompanion/client_spec.lua
+++ b/lua/spec/codecompanion/client_spec.lua
@@ -1,53 +1,48 @@
-local mock = require("luassert.mock")
+local assert = require("luassert")
+local codecompanion = require("codecompanion")
local stub = require("luassert.stub")
-local match = require("luassert.match")
-local spy = require("luassert.spy")
-local Client = require("codecompanion.client")
-
-local function setup(opts)
- require("codecompanion").setup(opts)
-end
+local schema
+local Client
describe("Client", function()
- it("should call API correctly when chat is invoked", function()
- local fn_mock = mock(vim.fn, true)
- local log_mock = mock(require("codecompanion.utils.log"), true)
- local autocmds_spy = spy.on(vim.api, "nvim_exec_autocmds")
-
- local jobstart_stub = stub(fn_mock, "jobstart", function(_, opts)
- local stdout_response = { vim.json.encode("SOME JSON RESPONSE") }
-
- if opts.on_stdout then
- opts.on_stdout(nil, stdout_response)
- end
-
- local exit_code = 0
- if opts.on_exit then
- opts.on_exit(nil, exit_code)
- end
+ before_each(function()
+ codecompanion.setup()
+ schema = require("codecompanion.schema")
+ Client = require("codecompanion.client") -- Now that setup has been called, we can require the client
+ end)
- return 1
- end)
+ after_each(function()
+ schema.static.client_settings = nil
+ _G.codecompanion_jobs = nil
+ end)
- setup({
- base_url = "https://api.example.com",
+ it("stream_call should work with mocked dependencies", function()
+ local mock_request = stub.new().returns({ args = "mocked args" })
+ local mock_encode = stub.new().returns("{}")
+ local mock_decode = stub.new().returns({ choices = { { finish_reason = nil } } })
+ local mock_schedule = stub.new().returns(1)
+
+ -- Mock globals
+ _G.codecompanion_jobs = {}
+
+ schema.static.client_settings = {
+ request = { default = mock_request },
+ encode = { default = mock_encode },
+ decode = { default = mock_decode },
+ schedule = { default = mock_schedule },
+ }
+
+ local client = Client.new({
+ secret_key = "fake_key",
+ organization = "fake_org",
})
- local client = Client.new({ secret_key = "TEST_SECRET_KEY" })
- local cb_stub = stub.new()
-
- client:chat({ messages = { { role = "user", content = "hello" } } }, cb_stub)
-
- assert.stub(jobstart_stub).was_called(1)
- assert.stub(jobstart_stub).was_called_with(match.is_table(), match.is_table())
+ local cb = stub.new()
- -- It's only called once as the jobstart_stub is stubbed to not fire an event
- assert.spy(autocmds_spy).was_called(1)
+ client:stream_chat({}, 0, cb)
- autocmds_spy:revert()
- jobstart_stub:revert()
- mock.revert(fn_mock)
- mock.revert(log_mock)
+ assert.stub(mock_request).was_called()
+ -- assert.stub(cb).was_called()
end)
end)
diff --git a/plugin/codecompanion.lua b/plugin/codecompanion.lua
index eb66e814..1c5ab1d5 100644
--- a/plugin/codecompanion.lua
+++ b/plugin/codecompanion.lua
@@ -16,4 +16,8 @@ vim.api.nvim_create_user_command("CodeCompanionActions", function()
codecompanion.actions()
end, { desc = "", range = true })
+vim.api.nvim_create_user_command("CodeCompanionToggle", function()
+ codecompanion.toggle()
+end, { desc = "" })
+
vim.g.loaded_codecompanion = true
diff --git a/stylua.toml b/stylua.toml
index 0f900306..febcadbc 100644
--- a/stylua.toml
+++ b/stylua.toml
@@ -1,3 +1,5 @@
+column_width = 120
indent_type = "Spaces"
indent_width = 2
-column_width = 120
+[sort_requires]
+enabled = true