services: db: image: postgres:16-alpine environment: POSTGRES_USER: ${POSTGRES_USER:-cv} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-cv} POSTGRES_DB: ${POSTGRES_DB:-cv} ports: - "${DB_PORT:-5432}:5432" volumes: - db-data:/var/lib/postgresql/data healthcheck: test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-cv} -d ${POSTGRES_DB:-cv}"] interval: 10s timeout: 5s retries: 3 server: build: context: . dockerfile: .docker/server.Dockerfile target: development additional_contexts: project-q: ${PROJECT_Q_PATH:?Set PROJECT_Q_PATH to your local project-q checkout} nest-service-locator: ${NEST_SERVICE_LOCATOR_PATH:-/Users/niels/Developer/riotbyte/nest-service-locator} environment: PORT: ${SERVER_PORT:-3000} JWT_SECRET: ${JWT_SECRET:-your-super-secret-jwt-key-here} JWT_ACCESS_TOKEN_EXPIRY: ${JWT_ACCESS_TOKEN_EXPIRY:-15m} JWT_REFRESH_TOKEN_EXPIRY: ${JWT_REFRESH_TOKEN_EXPIRY:-7d} DATABASE_URL: ${DATABASE_URL:-postgresql://cv:cv@db:5432/cv} POSTGRES_USER: ${POSTGRES_USER:-cv} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-cv} POSTGRES_DB: ${POSTGRES_DB:-cv} ENCRYPTION_KEY: ${ENCRYPTION_KEY:-dev-encryption-key-32-chars-long!} RESEND_API_KEY: ${RESEND_API_KEY:-} LLAMA_URL: ${LLAMA_URL:-http://host.docker.internal:8080} AI_TIMEOUT: ${AI_TIMEOUT:-300000} AI_MAX_TOKENS: ${AI_MAX_TOKENS:-8192} PDF_OUTPUT_DIR: /app/pdf-output depends_on: db: condition: service_healthy ports: - "${SERVER_PORT:-3000}:3000" volumes: - ./apps/server/src:/app/apps/server/src - ./apps/server/prisma:/app/apps/server/prisma - ./packages/ai-parser/src:/app/packages/ai-parser/src - ./packages/ai-provider/src:/app/packages/ai-provider/src - ./packages/auth/src:/app/packages/auth/src - ./packages/cv-renderer/src:/app/packages/cv-renderer/src - ./packages/file-upload/src:/app/packages/file-upload/src - ./packages/system/src:/app/packages/system/src - worker-output:/app/pdf-output:ro command: sh -c "cd /app/apps/server && pnpm prisma generate && pnpm prisma:deploy && pnpm dev" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:3000/health"] interval: 15s timeout: 5s retries: 3 start_period: 30s client: build: context: . dockerfile: .docker/client.Dockerfile target: development environment: VITE_SERVER_URL: ${VITE_SERVER_URL:-http://localhost:3000} VITE_PROXY_TARGET: http://server:3000 VITE_DOCS_URL: ${VITE_DOCS_URL:-http://localhost:3001} GRAPHQL_SCHEMA_URL: ${GRAPHQL_SCHEMA_URL:-http://server:3000/graphql} depends_on: server: condition: service_healthy ports: - "${CLIENT_PORT:-5173}:5173" volumes: - ./apps/client/src:/app/apps/client/src - ./packages/routing/src:/app/packages/routing/src - ./packages/ui/src:/app/packages/ui/src - ./packages/system/src:/app/packages/system/src command: sh -c "cd /app/apps/client && (pnpm codegen || true) && pnpm dev" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:5173"] interval: 15s timeout: 5s retries: 3 start_period: 20s worker: build: context: . dockerfile: .docker/worker.Dockerfile target: development additional_contexts: project-q: ${PROJECT_Q_PATH:?Set PROJECT_Q_PATH to your local project-q checkout} nest-service-locator: ${NEST_SERVICE_LOCATOR_PATH:-/Users/niels/Developer/riotbyte/nest-service-locator} environment: DATABASE_URL: ${DATABASE_URL:-postgresql://cv:cv@db:5432/cv} QUEUE_SCHEMA: ${QUEUE_SCHEMA:-queue} QUEUE_NAME: ${QUEUE_NAME:-default} POLL_INTERVAL_MS: ${POLL_INTERVAL_MS:-1000} PDF_OUTPUT_DIR: /app/pdf-output HEARTBEAT_FILE_PATH: /tmp/worker-heartbeat depends_on: db: condition: service_healthy volumes: - ./apps/worker/src:/app/apps/worker/src - worker-output:/app/pdf-output restart: unless-stopped healthcheck: test: ["CMD-SHELL", "find /tmp/worker-heartbeat -mmin -1 | grep -q ."] interval: 30s timeout: 5s retries: 3 start_period: 10s docs: build: context: . dockerfile: .docker/docs.Dockerfile target: development environment: VITE_CLIENT_URL: ${VITE_CLIENT_URL:-http://localhost:5173} VITE_SERVER_URL: ${VITE_SERVER_URL:-http://localhost:3000} ports: - "${DOCS_PORT:-3001}:3001" volumes: - ./apps/docs/src:/app/apps/docs/src - ./apps/docs/content:/app/apps/docs/content - ./packages/routing/src:/app/packages/routing/src - ./packages/ui/src:/app/packages/ui/src - ./packages/system/src:/app/packages/system/src healthcheck: test: ["CMD", "curl", "-f", "http://localhost:3001"] interval: 15s timeout: 5s retries: 3 start_period: 20s model-download: image: alpine:latest volumes: - ./ai-models:/models command: | sh -c ' MODEL_FILE="/models/mistral-7b-instruct-v0.2.Q4_K_M.gguf" if [ -f "$$MODEL_FILE" ]; then echo "Model already exists, skipping download" exit 0 fi echo "Downloading Mistral 7B model (~4.4GB)..." apk add --no-cache wget wget -c -O "$$MODEL_FILE" \ "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_K_M.gguf" echo "Model download complete" ' llama: # CPU-only llama.cpp server (slow: ~6 tokens/sec) # For GPU acceleration: # - macOS: Use hybrid mode with native Metal (see DOCKER_GPU.md) # - Linux: Use docker-compose.nvidia.yml for NVIDIA GPU support image: ghcr.io/ggml-org/llama.cpp:server profiles: - docker-llama ports: - "${LLAMA_PORT:-8080}:8080" volumes: - ./ai-models:/models command: -m /models/mistral-7b-instruct-v0.2.Q4_K_M.gguf --port 8080 --host 0.0.0.0 -c 16384 -ngl 0 # -ngl 0 = CPU only depends_on: model-download: condition: service_completed_successfully healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8080/health"] interval: 30s timeout: 10s retries: 3 start_period: 30s volumes: db-data: worker-output: