Skip to content

Commit e8a6afa

Browse files
committed
feat(context): rewrite context system with input and resolving
Now contexts are defined in configuration with input and resolve functions. This allows for more flexible and extensible context system. It also makes the system more robust and easier to understand. The changes include: - Add context inputs (with `:` after context) for additional parameters - Add proper model selection (with `$` prefix) - Move context resolution logic to config.lua - Extract git diff to context.gitdiff - Simplify outline/files logic, make it more modular - Improve pattern matching for context/model/agent parsing - Add truncation for large selections Signed-off-by: Tomas Slusny <[email protected]>
1 parent 33c350e commit e8a6afa

File tree

7 files changed

+273
-133
lines changed

7 files changed

+273
-133
lines changed

README.md

Lines changed: 42 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,7 @@ What is 1 + 11
153153
### Models
154154

155155
You can list available models with `:CopilotChatModels` command. Model determines the AI model used for the chat.
156+
You can set the model in the prompt by using `$` followed by the model name.
156157
Default models are:
157158

158159
- `gpt-4o` - This is the default Copilot Chat model. It is a versatile, multimodal model that excels in both text and image processing and is designed to provide fast, reliable responses. It also has superior performance in non-English languages. Gpt-4o is hosted on Azure.
@@ -176,11 +177,14 @@ You can install more agents from [here](https://github.com/marketplace?type=apps
176177

177178
Contexts are used to determine the context of the chat.
178179
You can set the context in the prompt by using `#` followed by the context name.
179-
Supported contexts are:
180+
If context supports input, you can set the input in the prompt by using `:` followed by the input (or pressing `complete` key after `:`).
181+
Default contexts are:
180182

183+
- `buffer` - Includes only the current buffer in chat context. Supports input.
181184
- `buffers` - Includes all open buffers in chat context
182-
- `buffer` - Includes only the current buffer in chat context
183-
- `files` - Includes all non-hidden filenames in the current workspace in chat context
185+
- `file` - Includes content of provided file in chat context. Supports input.
186+
- `files` - Includes all non-hidden filenames in the current workspace in chat context. Supports input.
187+
- `git` - Includes current git diff in chat context. Supports input.
184188

185189
### API
186190

@@ -261,17 +265,16 @@ Also see [here](/lua/CopilotChat/config.lua):
261265
proxy = nil, -- [protocol://]host[:port] Use this proxy
262266
allow_insecure = false, -- Allow insecure server connections
263267

264-
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
265-
model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models
268+
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use (can be specified manually in prompt via /).
269+
model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models (can be specified manually in prompt via $).
266270
agent = 'copilot', -- Default agent to use, see ':CopilotChatAgents' for available agents (can be specified manually in prompt via @).
267-
context = nil, -- Default context to use, 'buffers', 'buffer', 'files' or none (can be specified manually in prompt via #).
271+
context = nil, -- Default context to use (can be specified manually in prompt via #).
268272
temperature = 0.1, -- GPT result temperature
269273

270274
question_header = '## User ', -- Header to use for user questions
271275
answer_header = '## Copilot ', -- Header to use for AI answers
272276
error_header = '## Error ', -- Header to use for errors
273277
separator = '───', -- Separator to use in chat
274-
highlight_headers = true, -- Highlight headers in chat, disable if using markdown renderers (like render-markdown.nvim)
275278

276279
show_folds = true, -- Shows folds for sections in chat
277280
show_help = true, -- Shows help message as virtual lines when waiting for user input
@@ -280,6 +283,7 @@ Also see [here](/lua/CopilotChat/config.lua):
280283
insert_at_end = false, -- Move cursor to end of buffer when inserting text
281284
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
282285
highlight_selection = true, -- Highlight selection in the source buffer when in the chat window
286+
highlight_headers = true, -- Highlight headers in chat, disable if using markdown renderers (like render-markdown.nvim)
283287

284288
history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history
285289
callback = nil, -- Callback to use when ask response is received
@@ -289,16 +293,43 @@ Also see [here](/lua/CopilotChat/config.lua):
289293
return select.visual(source) or select.buffer(source)
290294
end,
291295

296+
-- default contexts
297+
contexts = {
298+
buffer = {
299+
-- see config.lua for implementation
300+
input = function(callback) end,
301+
resolve = function(input, source) end,
302+
},
303+
buffers = {
304+
-- see config.lua for implementation
305+
resolve = function(input, source) end,
306+
},
307+
file = {
308+
-- see config.lua for implementation
309+
input = function(callback) end,
310+
resolve = function(input, source) end,
311+
},
312+
files = {
313+
-- see config.lua for implementation
314+
input = function(callback) end,
315+
resolve = function(input, source) end,
316+
},
317+
git = {
318+
-- see config.lua for implementation
319+
input = function(callback) end,
320+
resolve = function(input, source) end,
321+
},
322+
},
323+
292324
-- default prompts
293325
prompts = {
294326
Explain = {
295327
prompt = '> /COPILOT_EXPLAIN\n\nWrite an explanation for the selected code and diagnostics as paragraphs of text.',
296328
},
297329
Review = {
330+
-- see config.lua for implementation
298331
prompt = '> /COPILOT_REVIEW\n\nReview the selected code.',
299-
callback = function(response, source)
300-
-- see config.lua for implementation
301-
end,
332+
callback = function(response, source) end,
302333
},
303334
Fix = {
304335
prompt = '> /COPILOT_GENERATE\n\nThere is a problem in this code. Rewrite the code to show it with the bug fixed.',
@@ -313,8 +344,7 @@ Also see [here](/lua/CopilotChat/config.lua):
313344
prompt = '> /COPILOT_GENERATE\n\nPlease generate tests for my code.',
314345
},
315346
Commit = {
316-
prompt = 'Write commit message for the change with commitizen convention. Make sure the title has maximum 50 characters and message is wrapped at 72 characters. Wrap the whole message in code block with language gitcommit.',
317-
selection = select.gitdiff,
347+
prompt = '> #git:staged\n\nWrite commit message for the change with commitizen convention. Make sure the title has maximum 50 characters and message is wrapped at 72 characters. Wrap the whole message in code block with language gitcommit.',
318348
},
319349
},
320350

lua/CopilotChat/config.lua

Lines changed: 83 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
local prompts = require('CopilotChat.prompts')
2+
local context = require('CopilotChat.context')
23
local select = require('CopilotChat.select')
34

45
--- @class CopilotChat.config.source
@@ -23,6 +24,11 @@ local select = require('CopilotChat.select')
2324
---@field end_row number?
2425
---@field end_col number?
2526

27+
---@class CopilotChat.config.context
28+
---@field description string?
29+
---@field input fun(callback: fun(input: string?))?
30+
---@field resolve fun(input: string?, source: CopilotChat.config.source):table<CopilotChat.copilot.embed>
31+
2632
---@class CopilotChat.config.prompt
2733
---@field prompt string?
2834
---@field description string?
@@ -83,9 +89,11 @@ local select = require('CopilotChat.select')
8389
---@field auto_insert_mode boolean?
8490
---@field clear_chat_on_new_prompt boolean?
8591
---@field highlight_selection boolean?
92+
---@field highlight_headers boolean?
8693
---@field history_path string?
8794
---@field callback fun(response: string, source: CopilotChat.config.source)?
8895
---@field selection nil|fun(source: CopilotChat.config.source):CopilotChat.config.selection?
96+
---@field contexts table<string, CopilotChat.config.context>?
8997
---@field prompts table<string, CopilotChat.config.prompt|string>?
9098
---@field window CopilotChat.config.window?
9199
---@field mappings CopilotChat.config.mappings?
@@ -95,17 +103,16 @@ return {
95103
proxy = nil, -- [protocol://]host[:port] Use this proxy
96104
allow_insecure = false, -- Allow insecure server connections
97105

98-
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
99-
model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models
106+
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use (can be specified manually in prompt via /).
107+
model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models (can be specified manually in prompt via $).
100108
agent = 'copilot', -- Default agent to use, see ':CopilotChatAgents' for available agents (can be specified manually in prompt via @).
101-
context = nil, -- Default context to use, 'buffers', 'buffer', 'files' or none (can be specified manually in prompt via #).
109+
context = nil, -- Default context to use (can be specified manually in prompt via #).
102110
temperature = 0.1, -- GPT result temperature
103111

104112
question_header = '## User ', -- Header to use for user questions
105113
answer_header = '## Copilot ', -- Header to use for AI answers
106114
error_header = '## Error ', -- Header to use for errors
107115
separator = '───', -- Separator to use in chat
108-
highlight_headers = true, -- Highlight headers in chat, disable if using markdown renderers (like render-markdown.nvim)
109116

110117
show_folds = true, -- Shows folds for sections in chat
111118
show_help = true, -- Shows help message as virtual lines when waiting for user input
@@ -114,6 +121,7 @@ return {
114121
insert_at_end = false, -- Move cursor to end of buffer when inserting text
115122
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
116123
highlight_selection = true, -- Highlight selection
124+
highlight_headers = true, -- Highlight headers in chat, disable if using markdown renderers (like render-markdown.nvim)
117125

118126
history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history
119127
callback = nil, -- Callback to use when ask response is received
@@ -123,6 +131,76 @@ return {
123131
return select.visual(source) or select.buffer(source)
124132
end,
125133

134+
-- default contexts
135+
contexts = {
136+
buffer = {
137+
description = 'Includes only the current buffer in chat context. Supports input.',
138+
input = function(callback)
139+
vim.ui.select(vim.api.nvim_list_bufs(), {
140+
prompt = 'Select a buffer> ',
141+
}, callback)
142+
end,
143+
resolve = function(input, source)
144+
return {
145+
context.outline(input and tonumber(input) or source.bufnr),
146+
}
147+
end,
148+
},
149+
buffers = {
150+
description = 'Includes all open buffers in chat context.',
151+
resolve = function()
152+
return vim.tbl_map(
153+
context.outline,
154+
vim.tbl_filter(function(b)
155+
return vim.api.nvim_buf_is_loaded(b) and vim.fn.buflisted(b) == 1
156+
end, vim.api.nvim_list_bufs())
157+
)
158+
end,
159+
},
160+
file = {
161+
description = 'Includes content of provided file in chat context. Supports input.',
162+
input = function(callback)
163+
local files = vim.tbl_filter(function(file)
164+
return vim.fn.isdirectory(file) == 0
165+
end, vim.fn.glob('**/*', false, true))
166+
167+
vim.ui.select(files, {
168+
prompt = 'Select a file> ',
169+
}, callback)
170+
end,
171+
resolve = function(input)
172+
return {
173+
context.file(input),
174+
}
175+
end,
176+
},
177+
files = {
178+
description = 'Includes all non-hidden filenames in the current workspace in chat context. Supports input.',
179+
input = function(callback)
180+
vim.ui.input({
181+
prompt = 'Enter a file pattern> ',
182+
default = '**/*',
183+
}, callback)
184+
end,
185+
resolve = function(input)
186+
return context.files(input)
187+
end,
188+
},
189+
git = {
190+
description = 'Includes current git diff in chat context. Supports input.',
191+
input = function(callback)
192+
vim.ui.select({ 'unstaged', 'staged' }, {
193+
prompt = 'Select diff type> ',
194+
}, callback)
195+
end,
196+
resolve = function(input, source)
197+
return {
198+
context.gitdiff(input, source.bufnr),
199+
}
200+
end,
201+
},
202+
},
203+
126204
-- default prompts
127205
prompts = {
128206
Explain = {
@@ -183,8 +261,7 @@ return {
183261
prompt = '> /COPILOT_GENERATE\n\nPlease generate tests for my code.',
184262
},
185263
Commit = {
186-
prompt = 'Write commit message for the change with commitizen convention. Make sure the title has maximum 50 characters and message is wrapped at 72 characters. Wrap the whole message in code block with language gitcommit.',
187-
selection = select.gitdiff,
264+
prompt = '> #git:staged\n\nWrite commit message for the change with commitizen convention. Make sure the title has maximum 50 characters and message is wrapped at 72 characters. Wrap the whole message in code block with language gitcommit.',
188265
},
189266
},
190267

0 commit comments

Comments
 (0)