-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
130 lines (124 loc) · 3.94 KB
/
docker-compose.yml
File metadata and controls
130 lines (124 loc) · 3.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
services:
# Redis - Message broker for Celery
redis:
image: redis:7-alpine
container_name: smartcache-redis
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
# Django Backend (with Channels for WebSocket)
backend:
build:
context: .
dockerfile: Dockerfile
container_name: smartcache-backend
ports:
- "8000:8000"
environment:
- DEBUG=True
- SECRET_KEY=${SECRET_KEY:-dev-secret-key-change-in-production}
- DATABASE_URL=sqlite:////app/db.sqlite3
- REDIS_URL=redis://redis:6379/0
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
- ALLOWED_HOSTS=localhost,127.0.0.1,backend
- CORS_ALLOWED_ORIGINS=http://localhost:5173,http://localhost:3000,http://frontend:5173
# AWS S3 credentials (set in .env file)
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-}
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-}
- AWS_S3_REGION=${AWS_S3_REGION:-us-east-1}
# NewsAPI key for news sources
- NEWSAPI_KEY=${NEWSAPI_KEY:-}
# Ollama for AI agents
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- OLLAMA_MODEL=${OLLAMA_MODEL:-llama3.1}
volumes:
- .:/app
- static_volume:/app/staticfiles
- media_volume:/app/media
depends_on:
redis:
condition: service_healthy
command: >
sh -c "python manage.py migrate &&
python manage.py collectstatic --noinput &&
daphne -b 0.0.0.0 -p 8000 smartcache.asgi:application"
extra_hosts:
- "host.docker.internal:host-gateway"
# Celery Worker - Background task processing
celery-worker:
build:
context: .
dockerfile: Dockerfile
container_name: smartcache-celery-worker
environment:
- DEBUG=True
- SECRET_KEY=${SECRET_KEY:-dev-secret-key-change-in-production}
- DATABASE_URL=sqlite:////app/db.sqlite3
- REDIS_URL=redis://redis:6379/0
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-}
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-}
- AWS_S3_REGION=${AWS_S3_REGION:-us-east-1}
- NEWSAPI_KEY=${NEWSAPI_KEY:-}
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- OLLAMA_MODEL=${OLLAMA_MODEL:-llama3.1}
volumes:
- .:/app
- media_volume:/app/media
depends_on:
redis:
condition: service_healthy
backend:
condition: service_started
command: celery -A smartcache worker --loglevel=info --concurrency=2
extra_hosts:
- "host.docker.internal:host-gateway"
# Celery Beat - Scheduled task scheduler (OPTIONAL - disable for manual control)
# Uncomment if you want automatic hourly ETL runs
# celery-beat:
# build:
# context: .
# dockerfile: Dockerfile
# container_name: smartcache-celery-beat
# environment:
# - DEBUG=True
# - SECRET_KEY=${SECRET_KEY:-dev-secret-key-change-in-production}
# - DATABASE_URL=sqlite:////app/db.sqlite3
# - REDIS_URL=redis://redis:6379/0
# - CELERY_BROKER_URL=redis://redis:6379/0
# - CELERY_RESULT_BACKEND=redis://redis:6379/0
# volumes:
# - .:/app
# depends_on:
# redis:
# condition: service_healthy
# backend:
# condition: service_started
# command: celery -A smartcache beat --loglevel=info
# React Frontend
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
container_name: smartcache-frontend
ports:
- "5173:80"
environment:
- VITE_API_URL=http://localhost:8000
- VITE_WS_URL=ws://localhost:8000
depends_on:
- backend
volumes:
redis_data:
static_volume:
media_volume: