1
0
Fork 0
mirror of https://github.com/nix-community/nixvim.git synced 2025-11-08 11:36:07 +01:00

plugins/ollama: migrate to mkNeovimPlugin

This commit is contained in:
Austin Horstman 2025-10-06 14:46:34 -05:00
parent a68291151c
commit 9bdedc0510
3 changed files with 100 additions and 255 deletions

View file

@ -1,232 +1,46 @@
{
lib,
helpers,
config,
pkgs,
...
}:
with lib;
let
cfg = config.plugins.ollama;
lib.nixvim.plugins.mkNeovimPlugin {
name = "ollama";
package = "ollama-nvim";
actionOptionType =
with lib.types;
oneOf [
rawLua
(enum [
"display"
"replace"
"insert"
"display_replace"
"display_insert"
"display_prompt"
])
(submodule {
options = {
fn = helpers.mkNullOrStrLuaFnOr (enum [ false ]) ''
fun(prompt: table): Ollama.PromptActionResponseCallback
maintainers = [ lib.maintainers.GaetanLepage ];
Example:
```lua
function(prompt)
-- This function is called when the prompt is selected
-- just before sending the prompt to the LLM.
-- Useful for setting up UI or other state.
# TODO: introduced 2025-10-06
inherit (import ./deprecations.nix) deprecateExtraOptions optionsRenamedToSettings;
-- Return a function that will be used as a callback
-- when a response is received.
---@type Ollama.PromptActionResponseCallback
return function(body, job)
-- body is a table of the json response
-- body.response is the response text received
-- job is the plenary.job object when opts.stream = true
-- job is nil otherwise
end
end
```
'';
opts = {
stream = helpers.defaultNullOpts.mkBool false ''
Whether to stream the response.
'';
};
};
})
];
in
{
meta.maintainers = [ maintainers.GaetanLepage ];
options.plugins.ollama = lib.nixvim.plugins.neovim.extraOptionsOptions // {
enable = mkEnableOption "ollama.nvim";
package = lib.mkPackageOption pkgs "ollama.nvim" {
default = [
"vimPlugins"
"ollama-nvim"
settingsExample = {
model = "mistral";
action = "display";
url = "http://127.0.0.1:11434";
serve = {
on_start = false;
command = "ollama";
args = [ "serve" ];
stop_command = "pkill";
stop_args = [
"-SIGTERM"
"ollama"
];
};
model = helpers.defaultNullOpts.mkStr "mistral" ''
The default model to use.
'';
prompts =
let
promptOptions = {
prompt = mkOption {
type = with lib.types; maybeRaw str;
description = ''
The prompt to send to the model.
Replaces the following tokens:
- `$input`: The input from the user
- `$sel`: The currently selected text
- `$ftype`: The filetype of the current buffer
- `$fname`: The filename of the current buffer
- `$buf`: The contents of the current buffer
- `$line`: The current line in the buffer
- `$lnum`: The current line number in the buffer
'';
};
inputLabel = helpers.defaultNullOpts.mkStr "> " ''
The label to use for an input field.
'';
action = helpers.mkNullOrOption actionOptionType ''
How to handle the output.
See [here](https://github.com/nomnivore/ollama.nvim/tree/main#actions) for more details.
Defaults to the value of `plugins.ollama.action`.
'';
model = helpers.mkNullOrStr ''
The model to use for this prompt.
Defaults to the value of `plugins.ollama.model`.
'';
extract =
helpers.defaultNullOpts.mkNullable (with lib.types; maybeRaw (either str (enum [ false ])))
"```$ftype\n(.-)```"
''
A `string.match` pattern to use for an Action to extract the output from the response
(Insert/Replace).
'';
options = helpers.mkNullOrOption (with types; attrsOf anything) ''
Additional model parameters, such as temperature, listed in the documentation for the [Modelfile](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
'';
system = helpers.mkNullOrStr ''
The SYSTEM instruction specifies the system prompt to be used in the Modelfile template,
if applicable.
(overrides what's in the Modelfile).
'';
format = helpers.defaultNullOpts.mkEnumFirstDefault [ "json" ] ''
The format to return a response in.
Currently the only accepted value is `"json"`.
'';
prompts = {
my-prompt = {
prompt = "Hello $input $sel. J'aime le fromage.";
input_label = "> ";
action = "display";
model = "foo";
extract = "```$ftype\n(.-)```";
options = {
mirostat_eta = 0.1;
num_thread = 8;
repeat_last_n = -1;
stop = "arrêt";
};
processPrompt =
prompt:
if isAttrs prompt then
{
inherit (prompt) prompt;
input_label = prompt.inputLabel;
inherit (prompt)
action
model
extract
options
system
format
;
}
else
prompt;
in
mkOption {
type = with types; attrsOf (either (submodule { options = promptOptions; }) (enum [ false ]));
default = { };
apply = v: mapAttrs (_: processPrompt) v;
description = ''
A table of prompts to use for each model.
Default prompts are defined [here](https://github.com/nomnivore/ollama.nvim/blob/main/lua/ollama/prompts.lua).
'';
system = "system";
format = "json";
};
action = helpers.defaultNullOpts.mkNullable actionOptionType "display" ''
How to handle prompt outputs when not specified by prompt.
See [here](https://github.com/nomnivore/ollama.nvim/tree/main#actions) for more details.
'';
url = helpers.defaultNullOpts.mkStr "http://127.0.0.1:11434" ''
The url to use to connect to the ollama server.
'';
serve = {
onStart = helpers.defaultNullOpts.mkBool false ''
Whether to start the ollama server on startup.
'';
command = helpers.defaultNullOpts.mkStr "ollama" ''
The command to use to start the ollama server.
'';
args = helpers.defaultNullOpts.mkListOf types.str [ "serve" ] ''
The arguments to pass to the serve command.
'';
stopCommand = helpers.defaultNullOpts.mkStr "pkill" ''
The command to use to stop the ollama server.
'';
stopArgs =
helpers.defaultNullOpts.mkListOf types.str
[
"-SIGTERM"
"ollama"
]
''
The arguments to pass to the stop command.
'';
};
};
config = mkIf cfg.enable {
extraPlugins = [ cfg.package ];
extraConfigLua =
let
setupOptions =
with cfg;
{
inherit
model
prompts
action
url
;
serve = with serve; {
on_start = onStart;
inherit command args;
stop_command = stopCommand;
stop_args = stopArgs;
};
}
// cfg.extraOptions;
in
''
require('ollama').setup(${lib.nixvim.toLuaObject setupOptions})
'';
};
}

View file

@ -0,0 +1,30 @@
{
# TODO: introduced 2025-10-06
deprecateExtraOptions = true;
optionsRenamedToSettings = [
"model"
"prompts"
"action"
"url"
[
"serve"
"onStart"
]
[
"serve"
"command"
]
[
"serve"
"args"
]
[
"serve"
"stopCommand"
]
[
"serve"
"stopArgs"
]
];
}

View file

@ -6,46 +6,47 @@
example = {
plugins.ollama = {
enable = true;
model = "mistral";
prompts = {
# disable prompt
Sample_Prompt = false;
my-prompt = {
prompt = "Hello $input $sel. J'aime le fromage.";
inputLabel = "> ";
action = {
fn = ''
function(prompt)
return function(body, job)
settings = {
model = "mistral";
prompts = {
# disable prompt
Sample_Prompt = false;
my-prompt = {
prompt = "Hello $input $sel. J'aime le fromage.";
input_label = "> ";
action = {
fn = ''
function(prompt)
return function(body, job)
end
end
end
'';
opts.stream = true;
'';
opts.stream = true;
};
model = "foo";
extract = "```$ftype\n(.-)```";
options = {
mirostat_eta = 0.1;
num_thread = 8;
repeat_last_n = -1;
stop = "arrêt";
};
system = "system";
format = "json";
};
model = "foo";
extract = "```$ftype\n(.-)```";
options = {
mirostat_eta = 0.1;
num_thread = 8;
repeat_last_n = -1;
stop = "arrêt";
};
system = "system";
format = "json";
};
};
action = "display";
url = "http://127.0.0.1:11434";
serve = {
onStart = false;
command = "ollama";
args = [ "serve" ];
stopCommand = "pkill";
stopArgs = [
"-SIGTERM"
"ollama"
];
action = "display";
url = "http://127.0.0.1:11434";
serve = {
on_start = false;
command = "ollama";
args = [ "serve" ];
stop_command = "pkill";
stop_args = [
"-SIGTERM"
"ollama"
];
};
};
};
};