services: app: build: . image: local/llama-swap-rocm:latest restart: unless-stopped hostname: ollama container_name: ollama volumes: - "/srv/docker/ollama/data/models:/models:ro" - "./llama-swap.yaml:/etc/llama-swap/config.yaml:ro" devices: - "/dev/kfd:/dev/kfd" - "/dev/dri:/dev/dri" group_add: - video - render networks: internal: ipv4_address: 172.23.0.5 logging: driver: "json-file" options: mode: "non-blocking" max-size: "10m" max-file: "3" tailscale: hostname: ollama image: tailscale/tailscale:latest restart: unless-stopped healthcheck: test: ["CMD-SHELL", "tailscale status"] interval: 1s timeout: 5s retries: 60 volumes: - /srv/docker/ollama/tailscale:/var/lib - /lib/modules:/lib/modules:ro devices: - /dev/net/tun:/dev/net/tun cap_add: - net_admin - sys_module - net_raw command: tailscaled networks: - internal logging: driver: "json-file" options: mode: "non-blocking" max-size: "10m" max-file: "3" reverse_proxy: image: caddybuilds/caddy-namecheap:2-alpine restart: unless-stopped network_mode: service:tailscale volumes: - ./Caddyfile:/etc/caddy/Caddyfile:ro - /srv/docker/ollama/caddy/config/:/config/caddy:rw - /srv/docker/ollama/caddy/data/:/data/caddy:rw - /srv/docker/ollama/caddy/share/:/usr/share/caddy:rw env_file: - caddy.env healthcheck: test: ["CMD", "wget", "--spider", "-q", "https://ollama.lan.poldebra.me"] interval: 10s timeout: 30s retries: 5 start_period: 90s depends_on: app: condition: service_started tailscale: condition: service_healthy logging: driver: "json-file" options: mode: "non-blocking" max-size: "10m" max-file: "3" networks: internal: ipam: config: - subnet: 172.23.0.0/24