Last active
June 27, 2024 15:53
-
-
Save logikal/02b34d1a106ee20c6dfad3e25bc0eef1 to your computer and use it in GitHub Desktop.
docker compose for locally hosted llms with tailscale
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
services: | |
# More info on using tailscale like this can be found in the | |
# tailscale blog entry about it: https://tailscale.com/blog/docker-tailscale-guide | |
ts-aichat-server: | |
image: tailscale/tailscale:latest | |
container_name: ts-aichat-server | |
hostname: aichat | |
environment: | |
- TS_AUTH_KEY=tskey-auth-REPLACEME | |
- TS_EXTRA_ARGS=--advertise-tags=tag:container --reset | |
- TS_STATE_DIR=/var/lib/tailscale | |
volumes: | |
# you'll probably want to map these to directories rather than volumes | |
- ts-aichat-state:/var/lib/tailscale | |
- ts-aichat-config:/config | |
- /dev/net/tun:/dev/net/tun | |
cap_add: | |
- net_admin | |
- sys_module | |
restart: unless-stopped | |
ollama: | |
deploy: | |
resources: | |
reservations: | |
devices: | |
- driver: nvidia | |
count: all | |
capabilities: | |
- gpu | |
volumes: | |
- ollama:/root/.ollama/ | |
# we don't publish ports when we're using network_mode: service | |
#ports: | |
# - 11434:11434 | |
container_name: ollama | |
image: ollama/ollama:latest | |
network_mode: service:ts-aichat-server | |
depends_on: | |
- ts-aichat-server | |
restart: unless-stopped | |
open-webui: | |
environment: | |
- OLLAMA_BASE_URL= | |
volumes: | |
- open-webui:/app/backend/data | |
container_name: open-webui | |
# we don't publish ports when we're using network_mode: service* | |
#ports: | |
# - 3000:8080 | |
image: ghcr.io/open-webui/open-webui:main | |
network_mode: service:ts-aichat-server | |
depends_on: | |
- ts-aichat-server | |
restart: unless-stopped | |
litellm: | |
image: ghcr.io/berriai/litellm:main-latest | |
command: --api_base https://REPLACE_WITH_OLLAMA_API_BASE -m ollama/mistral-openorca | |
# we don't publish ports when we're using network_mode: service* | |
#ports: | |
# - 4000:4000 | |
network_mode: service:ts-aichat-server | |
depends_on: | |
- ts-aichat-server | |
restart: unless-stopped | |
version: "3" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment