1
0
Fork 0
mirror of https://github.com/nix-community/home-manager.git synced 2025-11-08 19:46:05 +01:00

aichat: init (#7207)

aichat is an all in one CLI tool for AI interactions.
On first run it prompts you to create a config so I made a home manager module to do this declaratively.
This commit is contained in:
jaredmontoya 2025-06-04 20:35:39 +02:00 committed by GitHub
parent ffab96a8b4
commit 86b95fc1ed
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 111 additions and 0 deletions

View file

@ -61,6 +61,7 @@ let
./programs/aerc.nix
./programs/aerospace.nix
./programs/afew.nix
./programs/aichat.nix
./programs/alacritty.nix
./programs/alot.nix
./programs/antidote.nix

View file

@ -0,0 +1,72 @@
{
config,
lib,
pkgs,
...
}:
let
inherit (lib) mkIf;
cfg = config.programs.aichat;
settingsFormat = pkgs.formats.yaml { };
inherit (pkgs.stdenv.hostPlatform) isLinux isDarwin;
in
{
meta.maintainers = [
lib.maintainers.jaredmontoya
];
options.programs.aichat = {
enable = lib.mkEnableOption "aichat, an All-in-one LLM CLI tool";
package = lib.mkPackageOption pkgs "aichat" { nullable = true; };
settings = lib.mkOption {
inherit (settingsFormat) type;
default = { };
defaultText = lib.literalExpression "{ }";
example = lib.literalExpression ''
{
model = "Ollama:mistral-small:latest";
clients = [
{
type = "openai-compatible";
name = "Ollama";
api_base = "http://localhost:11434/v1";
models = [
{
name = "llama3.2:latest";
}
];
}
];
}
'';
description = ''
Configuration written to
{file}`$XDG_CONFIG_HOME/aichat/config.yaml`
on Linux or on Darwin if [](#opt-xdg.enable) is set, otherwise
{file}`~/Library/Application Support/aichat/config.yaml`.
See
<https://github.com/sigoden/aichat/blob/main/config.example.yaml>
for supported values.
'';
};
};
config = mkIf cfg.enable {
home.packages = mkIf (cfg.package != null) [ cfg.package ];
home.file."Library/Application Support/aichat/config.yaml" =
mkIf (cfg.settings != { } && (isDarwin && !config.xdg.enable))
{
source = settingsFormat.generate "aichat-config" cfg.settings;
};
xdg.configFile."aichat/config.yaml" = mkIf (cfg.settings != { } && (isLinux || config.xdg.enable)) {
source = settingsFormat.generate "aichat-config" cfg.settings;
};
};
}

View file

@ -7,6 +7,7 @@ let
# keep-sorted start case=no numeric=yes
"aerc"
"aerospace"
"aichat"
"alacritty"
"alot"
"antidote"

View file

@ -171,6 +171,7 @@ import nmtSrc {
./modules/misc/specialisation
./modules/misc/xdg
./modules/programs/aerc
./modules/programs/aichat
./modules/programs/alacritty
./modules/programs/alot
./modules/programs/antidote

View file

@ -0,0 +1,3 @@
{
aichat-settings = ./settings.nix;
}

View file

@ -0,0 +1,26 @@
{ ... }:
{
programs.aichat = {
enable = true;
settings = {
model = "Ollama:mistral-small:latest";
clients = [
{
type = "openai-compatible";
name = "Ollama";
api_base = "http://localhost:11434/v1";
models = [
{
name = "llama3.2:latest";
}
];
}
];
};
};
nmt.script = ''
assertFileExists home-files/.config/aichat/config.yaml
assertFileContent home-files/.config/aichat/config.yaml \
${./settings.yml}
'';
}

View file

@ -0,0 +1,7 @@
clients:
- api_base: http://localhost:11434/v1
models:
- name: llama3.2:latest
name: Ollama
type: openai-compatible
model: Ollama:mistral-small:latest