local-llm/compose.yml
Nareshkumar Rao 72c538272a initial
2025-04-16 15:08:41 +02:00

38 lines
1.1 KiB
YAML

services:
ollama:
image: ollama/ollama:latest
volumes:
- ./models:/root/.ollama/models
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
# sillytavern:
# image: ghcr.io/sillytavern/sillytavern:latest
# environment:
# - NODE_ENV=production
# - FORCE_COLOR=1
# - SILLYTAVERN_LISTEN=true
# - SILLYTAVERN_WHITELISTMODE=false
# - SILLYTAVERN_SECURITYOVERRIDE=true
# ports:
# - "8000:8000"
# volumes:
# - "./data/sillytavern/config:/home/node/app/config"
# - "./data/sillytavern/data:/home/node/app/data"
# - "./data/sillytavern/plugins:/home/node/app/plugins"
# - "./data/sillytavern/extensions:/home/node/app/public/scripts/extensions/third-party"
# restart: unless-stopped
open-webui:
image: ghcr.io/open-webui/open-webui:main
ports:
- "3000:8080"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
- ./data/open-webui:/app/backend/data
restart: unless-stopped