ai-hackaton-backend/docker-compose.yml

123 lines
3.1 KiB
YAML

services:
# PostgreSQL Database
postgres:
image: postgres:15
environment:
POSTGRES_DB: hr_ai
POSTGRES_USER: hr_user
POSTGRES_PASSWORD: hr_password
volumes:
- postgres_data:/var/lib/postgresql/data
ports:
- "5432:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U hr_user -d hr_ai"]
interval: 30s
timeout: 10s
retries: 5
# Redis for Celery and caching
redis:
image: redis:7-alpine
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 30s
timeout: 10s
retries: 3
# HR AI Backend
backend:
image: cr.yandex/crp9p5rtbnbop36duusi/hr-ai-backend:latest
expose:
- "8000"
env_file:
- .env
environment:
- DATABASE_URL=postgresql+asyncpg://hr_user:hr_password@postgres:5432/hr_ai
- REDIS_CACHE_URL=redis
- REDIS_CACHE_PORT=6379
- REDIS_CACHE_DB=0
- LIVEKIT_URL=wss://hackaton-eizc9zqk.livekit.cloud
- LIVEKIT_API_KEY=APIeGb3j9Lwy6mK
- LIVEKIT_API_SECRET=7nb0gKY97sZ42RneonSnsXwf3RRqL4c1JB6zgAyhqES
- APP_ENV=development
- DEBUG=true
command: ["uv", "run", "fastapi", "dev", "main.py", "--host", "0.0.0.0", "--port", "8000"]
volumes:
- ./agent_commands:/tmp/agent_commands
- backend_uploads:/app/uploads
- shared_tmp:/tmp
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
# CELERY
celery:
image: cr.yandex/crp9p5rtbnbop36duusi/hr-ai-backend:latest
env_file:
- .env
environment:
- DATABASE_URL=postgresql+asyncpg://hr_user:hr_password@postgres:5432/hr_ai
- REDIS_CACHE_URL=redis
- REDIS_CACHE_PORT=6379
- REDIS_CACHE_DB=0
- LIVEKIT_URL=wss://hackaton-eizc9zqk.livekit.cloud
- LIVEKIT_API_KEY=APIeGb3j9Lwy6mK
- LIVEKIT_API_SECRET=7nb0gKY97sZ42RneonSnsXwf3RRqL4c1JB6zgAyhqES
- APP_ENV=development
- DEBUG=true
container_name: celery
restart: always
command: ["uv", "run", "celery", "-A", "celery_worker.celery_app", "worker", "--loglevel=info"]
volumes:
- shared_tmp:/tmp
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
# Caddy reverse proxy with automatic HTTPS
caddy:
image: caddy:2-alpine
ports:
- "80:80"
- "443:443"
volumes:
- ./Caddyfile:/etc/caddy/Caddyfile:ro
- caddy_data:/data
- caddy_config:/config
depends_on:
- backend
environment:
- DOMAIN=${DOMAIN:-localhost}
restart: unless-stopped
frontend:
image: cr.yandex/crp9p5rtbnbop36duusi/hr-ai-frontend:latest
platform: linux/amd64
expose:
- "3000"
environment:
- NODE_ENV=production
restart: unless-stopped
volumes:
- ./.env.local:/app/.env.local:ro
volumes:
postgres_data:
redis_data:
backend_uploads:
shared_tmp:
caddy_data:
caddy_config: