make ollama and open-webui available on the local network for now #194
@@ -40,11 +40,13 @@ in
|
|||||||
OLLAMA_CONTEXT_LENGTH = "64000";
|
OLLAMA_CONTEXT_LENGTH = "64000";
|
||||||
};
|
};
|
||||||
openFirewall = true;
|
openFirewall = true;
|
||||||
|
host = "0.0.0.0"; # don't want to make this available via load-balancer yet, so making it available on the local network
|
||||||
};
|
};
|
||||||
open-webui = {
|
open-webui = {
|
||||||
enable = true;
|
enable = true;
|
||||||
port = 21212;
|
port = 21212;
|
||||||
openFirewall = true;
|
openFirewall = true;
|
||||||
|
host = "0.0.0.0"; # don't want to make this available via load-balancer yet, so making it available on the local network
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
users.users.ollama = {
|
users.users.ollama = {
|
||||||
|
|||||||
Reference in New Issue
Block a user