networks: assistant-net: driver: bridge services: # anthropic-proxy: OAuth proxy for Anthropic API # Internal only - accessed by auth-adapter via Docker network # For OAuth setup, use SSH tunnel: ssh -L 4001:localhost:4001 root@SERVER anthropic-proxy: build: context: . dockerfile: Dockerfile.anthropic-proxy # No external port exposure - internal Docker network only environment: - PORT=4001 - SESSION_SECRET=${ANTHROPIC_PROXY_SESSION_SECRET} - CLIENT_ID=9d1c250a-e61b-44d9-88ed-5944d1962f5e - REDIRECT_URI=https://console.anthropic.com/oauth/code/callback - OAUTH_BASE_URL=https://claude.ai - API_BASE_URL=https://api.anthropic.com/v1 volumes: - anthropic-proxy-data:/root/.local/share/anthropic-proxy networks: - assistant-net restart: unless-stopped healthcheck: test: ['CMD', 'curl', '-f', 'http://localhost:4001/health'] interval: 30s start_period: 10s start_interval: 2s timeout: 10s retries: 3 # Auth adapter: translates Bearer tokens to x-api-key headers # LiteLLM's os.environ/ substitution doesn't work in extra_headers # Internal only - accessed by LiteLLM via Docker network auth-adapter: image: oven/bun:latest working_dir: /app command: ['bun', 'run', 'src/auth-adapter.ts'] # No external port exposure - internal Docker network only environment: - ANTHROPIC_PROXY_INTERNAL_URL=http://anthropic-proxy:4001 - AUTH_ADAPTER_PORT=4002 volumes: - ./src/auth-adapter.ts:/app/src/auth-adapter.ts:ro networks: - assistant-net restart: unless-stopped depends_on: anthropic-proxy: condition: service_healthy healthcheck: test: ['CMD', 'bun', '-e', "fetch('http://localhost:4002/health').then(r => process.exit(r.ok ? 0 : 1))"] interval: 30s start_period: 10s start_interval: 2s timeout: 10s retries: 3 # LiteLLM: OpenAI-compatible API that proxies to auth-adapter -> anthropic-proxy # Internal only - accessed by Letta and app via Docker network litellm: # Using specific version that includes fix for tools=None bug # https://github.com/BerriAI/litellm/commit/7c2e2111c0cc3372ca0ce911d0b6d45c22794d7f image: ghcr.io/berriai/litellm:litellm_embedding_header_forwarding-v1.80.9.dev6 # No external port exposure - internal Docker network only env_file: .env volumes: - ./litellm-config.yaml:/app/config.yaml:ro command: ['--config', '/app/config.yaml', '--port', '4000'] networks: - assistant-net restart: unless-stopped depends_on: auth-adapter: condition: service_healthy healthcheck: test: ['CMD', 'python', '-c', "import urllib.request; urllib.request.urlopen('http://localhost:4000/health')"] interval: 30s start_period: 10s start_interval: 2s timeout: 10s retries: 3 # Letta: Agent framework with memory # Access via Tailscale: http://TAILSCALE_IP:8283 # Or via Caddy (if configured): https://letta.yourdomain.com letta: image: letta/letta:latest ports: - '8283:8283' environment: # Use LiteLLM as OpenAI-compatible endpoint for Claude models - OPENAI_API_BASE=http://litellm:4000 - OPENAI_API_KEY=${ANTHROPIC_PROXY_SESSION_ID} # Password protection for remote access via Letta Cloud ADE - SECURE=${LETTA_SERVER_PASSWORD:+true} - LETTA_SERVER_PASSWORD=${LETTA_SERVER_PASSWORD:-} volumes: - letta-data:/var/lib/postgresql/data networks: - assistant-net restart: unless-stopped depends_on: litellm: condition: service_healthy healthcheck: test: ['CMD', 'curl', '-f', 'http://localhost:8283/v1/health'] interval: 30s start_period: 30s start_interval: 2s timeout: 10s retries: 3 volumes: letta-data: anthropic-proxy-data: