make ollama and open-webui available on the local network for now
This commit is contained in:
@@ -40,11 +40,13 @@ in
|
||||
OLLAMA_CONTEXT_LENGTH = "64000";
|
||||
};
|
||||
openFirewall = true;
|
||||
host = "0.0.0.0"; # don't want to make this available via load-balancer yet, so making it available on the local network
|
||||
};
|
||||
open-webui = {
|
||||
enable = true;
|
||||
port = 21212;
|
||||
openFirewall = true;
|
||||
host = "0.0.0.0"; # don't want to make this available via load-balancer yet, so making it available on the local network
|
||||
};
|
||||
};
|
||||
users.users.ollama = {
|
||||
|
||||
Reference in New Issue
Block a user