Updated ai config to use 1080ti with ollama
This commit is contained in:
@@ -13,9 +13,35 @@
|
||||
|
||||
users.users.mindforge = import ./user.nix;
|
||||
|
||||
# Setup drivers for NVIDIA GPU
|
||||
services.xserver = {
|
||||
enable = false;
|
||||
videoDrivers = [ "nvidia" ];
|
||||
};
|
||||
|
||||
hardware = {
|
||||
nvidia = {
|
||||
open = false;
|
||||
modesetting.enable = true;
|
||||
powerManagement.enable = false;
|
||||
powerManagement.finegrained = false;
|
||||
nvidiaSettings = true;
|
||||
};
|
||||
|
||||
graphics = {
|
||||
enable = true;
|
||||
enable32Bit = true;
|
||||
};
|
||||
};
|
||||
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
host = "0.0.0.0";
|
||||
acceleration = "cuda";
|
||||
package = pkgs.ollama-cuda.override {
|
||||
cudaArches = [
|
||||
"61"
|
||||
];
|
||||
};
|
||||
openFirewall = true;
|
||||
environmentVariables = {
|
||||
OLLAMA_CONTEXT_LENGTH = "8192";
|
||||
|
||||
@@ -13,13 +13,13 @@
|
||||
boot.kernelModules = [ "kvm-intel" ];
|
||||
boot.extraModulePackages = [ ];
|
||||
|
||||
fileSystems."/" =
|
||||
{ device = "/dev/disk/by-uuid/b98eb7bb-f58c-4862-a234-1d72c9ff1187";
|
||||
fileSystems."/" = {
|
||||
device = "/dev/disk/by-partlabel/root";
|
||||
fsType = "ext4";
|
||||
};
|
||||
|
||||
fileSystems."/boot" =
|
||||
{ device = "/dev/disk/by-uuid/3D03-9579";
|
||||
fileSystems."/boot" = {
|
||||
device = "/dev/disk/by-partlabel/EFI";
|
||||
fsType = "vfat";
|
||||
options = [ "fmask=0022" "dmask=0022" ];
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user