diff --git a/modules/hosts/desktop/configuration.nix b/modules/hosts/desktop/configuration.nix index af182f1..83bd914 100644 --- a/modules/hosts/desktop/configuration.nix +++ b/modules/hosts/desktop/configuration.nix @@ -2,9 +2,8 @@ # your system. Help is available in the configuration.nix(5) man page, on # https://search.nixos.org/options and in the NixOS manual (`nixos-help`). -{ inputs, lib, pkgs, self, ... }: -{ -flake.nixosModules.lily-desktop = { pkgs, ... }: { +{ inputs, lib, pkgs, self, ... }:{ +flake.nixosModules.lily-desktop = { pkgs, ... }:{ # Use the systemd-boot EFI boot loader. boot.loader = { systemd-boot.enable = true; @@ -73,7 +72,8 @@ flake.nixosModules.lily-desktop = { pkgs, ... }: { vim wget home-manager - ] ++ [ self.packages.${pkgs.stdenv.hostPlatform.system}.nh ]; + self.packages.${pkgs.stdenv.hostPlatform.system}.nh +]; # Some programs need SUID wrappers, can be configured further or are # started in user sessions. diff --git a/modules/hosts/desktop/desktop.nix b/modules/hosts/desktop/desktop.nix index 4b170b2..f70c1c3 100644 --- a/modules/hosts/desktop/desktop.nix +++ b/modules/hosts/desktop/desktop.nix @@ -4,6 +4,7 @@ modules = [ self.nixosModules.lily-desktop self.nixosModules.lily + self.nixosModules.llama ]; }; } diff --git a/modules/nixosModules/.localization.nix.swp b/modules/nixosModules/.localization.nix.swp new file mode 100644 index 0000000..9f94904 Binary files /dev/null and b/modules/nixosModules/.localization.nix.swp differ diff --git a/modules/nixosModules/ai/llama.nix b/modules/nixosModules/ai/llama.nix new file mode 100644 index 0000000..336f042 --- /dev/null +++ b/modules/nixosModules/ai/llama.nix @@ -0,0 +1,29 @@ +{self, inputs, ...}: { + flake.nixosModules.llama = { lib, pkgs, ... }: { + nixpkgs.overlays = [ + (final: prev: { + llama-cpp-cuda = prev.llama-cpp.override { + cudaSupport = true; + blasSupport = true; + }; + }) + ]; + services.llama-swap = { + enable = true; + port = 9001; + openFirewall = true; + listenAddress = "0.0.0.0"; + settings = { + models = { + "magidonia" = { + ttl = 3600; + cmd = "${pkgs.llama-cpp-cuda}/bin/llama-server --port \${PORT} -m /var/AI/Models/Chat/ggufs/Magidonia-24B-v4.3-Q4_K_M.gguf"; + }; + }; + }; + }; + environment.systemPackages = with pkgs; [ + llama-cpp-cuda + ]; + }; +}