-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
52 lines (46 loc) · 1.78 KB
/
docker-compose.yml
File metadata and controls
52 lines (46 loc) · 1.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
services:
mediasage:
image: ecwilson/mediasage:latest
# Alternative registries:
# image: ghcr.io/ecwilsonaz/mediasage:latest
# To build locally instead, comment out 'image' and uncomment 'build':
# build: .
container_name: mediasage
# Optional: Run as specific user for NAS/Synology (use your PUID:PGID)
# user: "1000:1000"
ports:
- "5765:5765"
environment:
# Required: Plex server settings
- PLEX_URL=${PLEX_URL}
- PLEX_TOKEN=${PLEX_TOKEN}
- PLEX_MUSIC_LIBRARY=${PLEX_MUSIC_LIBRARY:-Music}
# Performance tuning
- UVICORN_WORKERS=${UVICORN_WORKERS:-1}
# LLM Provider (anthropic, openai, gemini, ollama, or custom)
# Defaults to gemini. Can also be configured in the web UI.
#- LLM_PROVIDER=gemini
# Cloud provider API keys (set one)
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- GEMINI_API_KEY=${GEMINI_API_KEY:-}
# Optional: Override default models
#- LLM_MODEL_ANALYSIS=gemini-2.5-flash
#- LLM_MODEL_GENERATION=gemini-2.5-flash
# Local LLM: Ollama (experimental)
#- LLM_PROVIDER=ollama
#- OLLAMA_URL=http://host.docker.internal:11434
#- OLLAMA_CONTEXT_WINDOW=32768
# Local LLM: Custom OpenAI-compatible API (experimental)
#- LLM_PROVIDER=custom
#- CUSTOM_LLM_URL=https://openrouter.ai/api/v1
#- CUSTOM_LLM_API_KEY=sk-or-your-key
#- CUSTOM_CONTEXT_WINDOW=32768
volumes:
# Persist library cache across restarts
# Ensure directory is writable: either set 'user' above, or run:
# mkdir -p ./data && chmod 777 ./data
- ./data:/app/data
# Optional: persist UI settings
# - ./config.user.yaml:/app/config.user.yaml
restart: unless-stopped