diff --git a/modules/modules.nix b/modules/modules.nix index 5020c666e..d3d29b811 100644 --- a/modules/modules.nix +++ b/modules/modules.nix @@ -61,6 +61,7 @@ let ./programs/aerc.nix ./programs/aerospace.nix ./programs/afew.nix + ./programs/aichat.nix ./programs/alacritty.nix ./programs/alot.nix ./programs/antidote.nix diff --git a/modules/programs/aichat.nix b/modules/programs/aichat.nix new file mode 100644 index 000000000..38895b269 --- /dev/null +++ b/modules/programs/aichat.nix @@ -0,0 +1,72 @@ +{ + config, + lib, + pkgs, + ... +}: +let + inherit (lib) mkIf; + + cfg = config.programs.aichat; + + settingsFormat = pkgs.formats.yaml { }; + + inherit (pkgs.stdenv.hostPlatform) isLinux isDarwin; +in +{ + meta.maintainers = [ + lib.maintainers.jaredmontoya + ]; + + options.programs.aichat = { + enable = lib.mkEnableOption "aichat, an All-in-one LLM CLI tool"; + + package = lib.mkPackageOption pkgs "aichat" { nullable = true; }; + + settings = lib.mkOption { + inherit (settingsFormat) type; + default = { }; + defaultText = lib.literalExpression "{ }"; + example = lib.literalExpression '' + { + model = "Ollama:mistral-small:latest"; + clients = [ + { + type = "openai-compatible"; + name = "Ollama"; + api_base = "http://localhost:11434/v1"; + models = [ + { + name = "llama3.2:latest"; + } + ]; + } + ]; + } + ''; + description = '' + Configuration written to + {file}`$XDG_CONFIG_HOME/aichat/config.yaml` + on Linux or on Darwin if [](#opt-xdg.enable) is set, otherwise + {file}`~/Library/Application Support/aichat/config.yaml`. + See + + for supported values. + ''; + }; + }; + + config = mkIf cfg.enable { + home.packages = mkIf (cfg.package != null) [ cfg.package ]; + + home.file."Library/Application Support/aichat/config.yaml" = + mkIf (cfg.settings != { } && (isDarwin && !config.xdg.enable)) + { + source = settingsFormat.generate "aichat-config" cfg.settings; + }; + + xdg.configFile."aichat/config.yaml" = mkIf (cfg.settings != { } && (isLinux || config.xdg.enable)) { + source = settingsFormat.generate "aichat-config" cfg.settings; + }; + }; +} diff --git a/tests/darwinScrublist.nix b/tests/darwinScrublist.nix index e5c4e7a0f..fff07fcf4 100644 --- a/tests/darwinScrublist.nix +++ b/tests/darwinScrublist.nix @@ -7,6 +7,7 @@ let # keep-sorted start case=no numeric=yes "aerc" "aerospace" + "aichat" "alacritty" "alot" "antidote" diff --git a/tests/default.nix b/tests/default.nix index 410e44997..4f4e10c19 100644 --- a/tests/default.nix +++ b/tests/default.nix @@ -171,6 +171,7 @@ import nmtSrc { ./modules/misc/specialisation ./modules/misc/xdg ./modules/programs/aerc + ./modules/programs/aichat ./modules/programs/alacritty ./modules/programs/alot ./modules/programs/antidote diff --git a/tests/modules/programs/aichat/default.nix b/tests/modules/programs/aichat/default.nix new file mode 100644 index 000000000..57e830967 --- /dev/null +++ b/tests/modules/programs/aichat/default.nix @@ -0,0 +1,3 @@ +{ + aichat-settings = ./settings.nix; +} diff --git a/tests/modules/programs/aichat/settings.nix b/tests/modules/programs/aichat/settings.nix new file mode 100644 index 000000000..899e115de --- /dev/null +++ b/tests/modules/programs/aichat/settings.nix @@ -0,0 +1,26 @@ +{ ... }: +{ + programs.aichat = { + enable = true; + settings = { + model = "Ollama:mistral-small:latest"; + clients = [ + { + type = "openai-compatible"; + name = "Ollama"; + api_base = "http://localhost:11434/v1"; + models = [ + { + name = "llama3.2:latest"; + } + ]; + } + ]; + }; + }; + nmt.script = '' + assertFileExists home-files/.config/aichat/config.yaml + assertFileContent home-files/.config/aichat/config.yaml \ + ${./settings.yml} + ''; +} diff --git a/tests/modules/programs/aichat/settings.yml b/tests/modules/programs/aichat/settings.yml new file mode 100644 index 000000000..e43464d69 --- /dev/null +++ b/tests/modules/programs/aichat/settings.yml @@ -0,0 +1,7 @@ +clients: +- api_base: http://localhost:11434/v1 + models: + - name: llama3.2:latest + name: Ollama + type: openai-compatible +model: Ollama:mistral-small:latest