-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdocker-compose.local.yml
More file actions
96 lines (81 loc) · 2.32 KB
/
docker-compose.local.yml
File metadata and controls
96 lines (81 loc) · 2.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# Docker Compose for running AI Service locally
# This file is optimized for local development and hybrid deployment
version: '3.8'
services:
# AI Service - runs on local machine
ai-service:
build:
context: ./ai-service
dockerfile: Dockerfile
container_name: lexilingo-ai-local
ports:
- "8001:8001"
environment:
# Gemini API Key for conversational AI
- GEMINI_API_KEY=${GEMINI_API_KEY}
# Model cache directory
- MODEL_CACHE_DIR=/app/models
# Environment
- ENVIRONMENT=production
- LOG_LEVEL=INFO
# Dedicated AI Redis (isolated from other services)
- REDIS_HOST=redis-ai
- REDIS_PORT=6379
- REDIS_DB=0
- REDIS_URL=redis://redis-ai:6379/0
# Performance tuning
- WORKERS=1
- TIMEOUT=300
volumes:
# Cache downloaded models to avoid re-downloading
- ./ai-service/models:/app/models
- ./ai-service/data:/app/data
- ./logs:/app/logs
restart: unless-stopped
mem_limit: 2g
memswap_limit: 2g
# Resource limits (adjust based on your machine)
deploy:
resources:
limits:
cpus: '2.0' # Use 2 CPU cores
memory: 2G # Max 2GB RAM
reservations:
cpus: '1.0'
memory: 1G # Reserve 1GB RAM
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8001/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
networks:
- lexilingo-network
depends_on:
redis-ai:
condition: service_started
# Redis cache dedicated for AI
redis-ai:
image: redis:7-alpine
container_name: lexilingo-redis-ai-local
ports:
- "6381:6379"
volumes:
- redis-ai-data:/data
command: redis-server --appendonly yes --maxmemory 512mb --maxmemory-policy allkeys-lfu
restart: unless-stopped
mem_limit: 256m
memswap_limit: 256m
networks:
- lexilingo-network
networks:
lexilingo-network:
driver: bridge
volumes:
redis-ai-data:
driver: local
# Usage:
# 1. Set GEMINI_API_KEY in .env file
# 2. Run: docker-compose -f docker-compose.local.yml up -d
# 3. Check logs: docker-compose -f docker-compose.local.yml logs -f ai-service
# 4. Stop: docker-compose -f docker-compose.local.yml down