Nvim: new pugins + some disabled ones

Some experiments with AI
This commit is contained in:
Robert Kmieć
2024-08-06 23:55:21 +02:00
parent 0daaf4add5
commit 801ba0dfdf
18 changed files with 736 additions and 89 deletions

View File

@@ -1,10 +1,23 @@
return {
{ 'tzachar/cmp-ai', dependencies = 'nvim-lua/plenary.nvim', enabled = false},
{ 'petertriho/cmp-git', enabled = false},
{ 'JoseConseco/cmp-ai', dependencies = 'nvim-lua/plenary.nvim', enabled = false},
{ 'petertriho/cmp-git', enabled = true},
{
"L3MON4D3/LuaSnip",
-- follow latest release.
version = "v2.*", -- Replace <CurrentMajor> by the latest released major (first number of latest release)
dependencies = {
--'saadparwaiz1/cmp_luasnip'
},
-- install jsregexp (optional!).
build = "make install_jsregexp",
config = function()
local ls = require("luasnip")
end
},
{
'hrsh7th/nvim-cmp',
dependencies = {
'dcampos/cmp-snippy',
"L3MON4D3/LuaSnip",
--'mstanciu552/cmp-matlab',
'petertriho/cmp-git',
'hrsh7th/cmp-buffer',
@@ -13,44 +26,44 @@ return {
'hrsh7th/cmp-nvim-lsp-signature-help',
'hrsh7th/cmp-path',
'hrsh7th/cmp-calc',
'tzachar/cmp-ai'
'JoseConseco/cmp-ai'
},
event = "InsertEnter",
config = function()
local cmp = require'cmp'
local snippy = require'snippy'
local cmp_ai = require('cmp_ai.config')
--local cmp_ai = require('cmp_ai.config')
local luasnip = require('luasnip')
cmp_ai:setup({
max_lines = 100,
provider = 'Ollama',
provider_options = {
model = 'deepseek-coder:6.7b',
base_url = 'http://batman.local:11434/api/generate',
prompt = function(lines_before, lines_after)
-- prompt depends on the model you use. Here is an example for deepseek coder
return '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>' -- for codellama
end,
},
debounce_delay = 600, -- ms llama may be GPU hungry, wait x ms after last key input, before sending request to it
notify = true,
notify_callback = function(msg)
vim.notify(msg)
end,
run_on_every_keystroke = true,
ignored_file_types = {
-- default is not to ignore
-- uncomment to ignore in lua:
-- lua = true
},
})
-- cmp_ai:setup({
-- max_lines = 100,
-- provider = 'Ollama',
-- provider_options = {
-- model = 'codellama',
-- base_url = 'http://batman.local:11434/api/generate',
-- prompt = function(lines_before, lines_after)
-- -- prompt depends on the model you use. Here is an example for deepseek coder
-- return '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>' -- for codellama
-- end,
-- },
-- debounce_delay = 600, -- ms llama may be GPU hungry, wait x ms after last key input, before sending request to it
-- notify = true,
-- notify_callback = function(msg)
-- vim.notify(msg)
-- end,
-- run_on_every_keystroke = true,
-- ignored_file_types = {
-- -- default is not to ignore
-- -- uncomment to ignore in lua:
-- -- lua = true
-- },
-- })
local default_cmp_sources = cmp.config.sources({
{ name = 'nvim_lsp' },
{ name = 'nvim_lsp_signature_help' },
{ name = 'path' },
{ name = 'snippy' },
{ name = 'luasnip' },
{ name = 'calc' },
--{ name = 'cmp_git' },
--{ name = 'cmp_ai' },
@@ -82,7 +95,7 @@ return {
cmp.setup({
snippet = {
expand = function(args)
snippy.expand_snippet(args.body)
luasnip.lsp_expand(args.body)
end,
},
window = {
@@ -99,22 +112,26 @@ return {
['<C-f>'] = cmp.mapping.scroll_docs(4),
['<C-Space>'] = cmp.mapping.complete(),
['<C-e>'] = cmp.mapping.abort(),
['<C-y>'] = cmp.mapping.confirm({ select = true }),
['<C-y>'] = cmp.mapping(function(fallback)
cmp.confirm({ select = true })
end)
--cmp.mapping.confirm({ select = true }),
}),
sources = cmp.config.sources({
{ name = 'nvim_lsp' },
{ name = 'nvim_lsp_signature_help' },
{ name = 'path' },
{ name = 'snippy' },
{ name = 'luasnip' },
{ name = 'calc' },
{ name = 'cmp_git' },
}, {
{ name = 'buffer', keyword_length = 5, max_item_count = 10, priority = -5 },
}),
sources = sources,
--sources = sources,
sorting = {
priority_weight = 2,
comparators = {
require('cmp_ai.compare'),
--require('cmp_ai.compare'),
cmp.config.compare.offset,
cmp.config.compare.exact,
cmp.config.compare.recently_used,
@@ -153,6 +170,6 @@ return {
require("cmp_git").setup()
end,
enabled = false
enabled = true
}
}