llama swap
This commit is contained in:
@@ -2,9 +2,8 @@
|
|||||||
# your system. Help is available in the configuration.nix(5) man page, on
|
# your system. Help is available in the configuration.nix(5) man page, on
|
||||||
# https://search.nixos.org/options and in the NixOS manual (`nixos-help`).
|
# https://search.nixos.org/options and in the NixOS manual (`nixos-help`).
|
||||||
|
|
||||||
{ inputs, lib, pkgs, self, ... }:
|
{ inputs, lib, pkgs, self, ... }:{
|
||||||
{
|
flake.nixosModules.lily-desktop = { pkgs, ... }:{
|
||||||
flake.nixosModules.lily-desktop = { pkgs, ... }: {
|
|
||||||
# Use the systemd-boot EFI boot loader.
|
# Use the systemd-boot EFI boot loader.
|
||||||
boot.loader = {
|
boot.loader = {
|
||||||
systemd-boot.enable = true;
|
systemd-boot.enable = true;
|
||||||
@@ -73,7 +72,8 @@ flake.nixosModules.lily-desktop = { pkgs, ... }: {
|
|||||||
vim
|
vim
|
||||||
wget
|
wget
|
||||||
home-manager
|
home-manager
|
||||||
] ++ [ self.packages.${pkgs.stdenv.hostPlatform.system}.nh ];
|
self.packages.${pkgs.stdenv.hostPlatform.system}.nh
|
||||||
|
];
|
||||||
|
|
||||||
# Some programs need SUID wrappers, can be configured further or are
|
# Some programs need SUID wrappers, can be configured further or are
|
||||||
# started in user sessions.
|
# started in user sessions.
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
modules = [
|
modules = [
|
||||||
self.nixosModules.lily-desktop
|
self.nixosModules.lily-desktop
|
||||||
self.nixosModules.lily
|
self.nixosModules.lily
|
||||||
|
self.nixosModules.llama
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
BIN
modules/nixosModules/.localization.nix.swp
Normal file
BIN
modules/nixosModules/.localization.nix.swp
Normal file
Binary file not shown.
29
modules/nixosModules/ai/llama.nix
Normal file
29
modules/nixosModules/ai/llama.nix
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{self, inputs, ...}: {
|
||||||
|
flake.nixosModules.llama = { lib, pkgs, ... }: {
|
||||||
|
nixpkgs.overlays = [
|
||||||
|
(final: prev: {
|
||||||
|
llama-cpp-cuda = prev.llama-cpp.override {
|
||||||
|
cudaSupport = true;
|
||||||
|
blasSupport = true;
|
||||||
|
};
|
||||||
|
})
|
||||||
|
];
|
||||||
|
services.llama-swap = {
|
||||||
|
enable = true;
|
||||||
|
port = 9001;
|
||||||
|
openFirewall = true;
|
||||||
|
listenAddress = "0.0.0.0";
|
||||||
|
settings = {
|
||||||
|
models = {
|
||||||
|
"magidonia" = {
|
||||||
|
ttl = 3600;
|
||||||
|
cmd = "${pkgs.llama-cpp-cuda}/bin/llama-server --port \${PORT} -m /var/AI/Models/Chat/ggufs/Magidonia-24B-v4.3-Q4_K_M.gguf";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
environment.systemPackages = with pkgs; [
|
||||||
|
llama-cpp-cuda
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user