-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
78 lines (74 loc) · 2.64 KB
/
docker-compose.yml
File metadata and controls
78 lines (74 loc) · 2.64 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
# Engram — Server Stack
#
# Provides: PostgreSQL + pgvector, Engram Server (API + MCP SSE on single port)
# Client (hooks + MCP proxy) runs locally on each workstation.
#
# Usage:
# cp .env.example .env # edit with your settings
# docker compose up -d
#
# See docs/DEPLOYMENT.md for full setup instructions.
services:
postgres:
image: pgvector/pgvector:pg17
environment:
POSTGRES_DB: engram
POSTGRES_USER: engram
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-engram}
volumes:
- pgdata:/var/lib/postgresql/data
ports:
- "${POSTGRES_PORT:-5432}:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U engram -d engram"]
interval: 10s
timeout: 5s
retries: 5
restart: unless-stopped
# Worker with integrated MCP SSE (single port, single process)
server:
image: ghcr.io/thebtf/engram:main
# To build locally instead: docker compose build server
build:
context: .
target: server
ports:
- "${WORKER_PORT:-37777}:37777"
environment:
DATABASE_DSN: "postgres://engram:${POSTGRES_PASSWORD:-engram}@postgres:5432/engram?sslmode=disable"
ENGRAM_WORKER_HOST: "0.0.0.0"
ENGRAM_WORKER_PORT: "37777"
ENGRAM_API_TOKEN: "${API_TOKEN:-}"
ENGRAM_EMBEDDING_PROVIDER: "${EMBEDDING_PROVIDER:-openai}"
ENGRAM_EMBEDDING_BASE_URL: "${EMBEDDING_BASE_URL:-}"
ENGRAM_EMBEDDING_API_KEY: "${EMBEDDING_API_KEY:-}"
ENGRAM_EMBEDDING_MODEL_NAME: "${EMBEDDING_MODEL_NAME:-}"
ENGRAM_EMBEDDING_DIMENSIONS: "${EMBEDDING_DIMENSIONS:-4096}"
ENGRAM_EMBEDDING_TRUNCATE: "${EMBEDDING_TRUNCATE:-true}"
# LLM for observation extraction (OpenAI-compatible)
ENGRAM_LLM_URL: "${ENGRAM_LLM_URL:-${EMBEDDING_BASE_URL:-}}"
ENGRAM_LLM_API_KEY: "${ENGRAM_LLM_API_KEY:-${EMBEDDING_API_KEY:-}}"
ENGRAM_LLM_MODEL: "${ENGRAM_LLM_MODEL:-gpt-4o-mini}"
# Optional: FalkorDB graph backend
ENGRAM_GRAPH_PROVIDER: "${GRAPH_PROVIDER:-}"
ENGRAM_FALKORDB_ADDR: "${FALKORDB_ADDR:-}"
ENGRAM_FALKORDB_PASSWORD: "${FALKORDB_PASSWORD:-}"
ENGRAM_FALKORDB_GRAPH_NAME: "${FALKORDB_GRAPH_NAME:-engram}"
depends_on:
postgres:
condition: service_healthy
restart: unless-stopped
# Optional: FalkorDB graph database for multi-hop relation traversal.
# Uncomment to enable graph-augmented search and relation exploration.
# Then set GRAPH_PROVIDER=falkordb and FALKORDB_ADDR=falkordb:6379 in .env
#
# falkordb:
# image: falkordb/falkordb:latest
# ports:
# - "${FALKORDB_PORT:-6379}:6379"
# volumes:
# - falkordb_data:/data
# restart: unless-stopped
volumes:
pgdata:
# falkordb_data: