-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
87 lines (86 loc) · 3.71 KB
/
docker-compose.yml
File metadata and controls
87 lines (86 loc) · 3.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
services:
# ---------------------------------------------------------------------------
# groundmemory MCP Server - primary workspace
#
# Usage:
# cp .env.example .env # configure your embedding provider (optional)
# docker compose up -d
#
# MCP endpoint: http://localhost:4242/mcp
# Workspace data: ./data/default/
# ---------------------------------------------------------------------------
groundmemory:
build:
context: .
# To include sentence-transformers (local offline embeddings, ~1 GB):
# args:
# EXTRAS: local
image: groundmemory:latest
restart: unless-stopped
ports:
- "4242:4242"
volumes:
# Workspace Markdown files and SQLite index are persisted here.
# You can read and edit MEMORY.md, USER.md, etc. directly on the host.
- ./data:/data
env_file:
# Copy .env.example to .env and set your values.
# At minimum, GROUNDMEMORY_WORKSPACE controls which subdirectory is used.
#
# Embedding providers (set in .env - no rebuild required):
# BM25-only (default): GROUNDMEMORY_EMBEDDING__PROVIDER=none
# OpenAI-compatible API: GROUNDMEMORY_EMBEDDING__PROVIDER=openai
# GROUNDMEMORY_EMBEDDING__BASE_URL=http://...
# GROUNDMEMORY_EMBEDDING__API_KEY=sk-...
# GROUNDMEMORY_EMBEDDING__MODEL=text-embedding-3-small
# Local (sentence-transformers): requires rebuild with EXTRAS=local
- path: .env
required: false
# environment:
# GROUNDMEMORY_WORKSPACE: default
# ---------------------------------------------------------------------------
# Network access - commented out by default (local-only is the safe default).
#
# By default the server binds to 127.0.0.1 and only accepts connections from
# the same machine. To allow access from other machines on your network:
#
# 1. Bind to all interfaces so Docker can route external traffic in:
# GROUNDMEMORY_MCP__HOST: "0.0.0.0"
#
# 2. Allow the Host header your client sends (your machine's LAN IP):
# GROUNDMEMORY_MCP__ALLOWED_HOSTS: "192.168.1.50:4242"
# (list multiple values separated by commas)
#
# 3. FORWARDED_ALLOW_IPS is NOT needed for plain LAN access (steps 1+2 above
# are sufficient). Only set it when a reverse proxy (nginx, Caddy, Traefik)
# sits in front of GroundMemory. Set it to the proxy's internal IP so
# uvicorn trusts the X-Forwarded-For headers that proxy sends:
# GROUNDMEMORY_MCP__FORWARDED_ALLOW_IPS: "172.17.0.1"
# (use "*" only when a trusted proxy exclusively controls all ingress)
#
# See DOCS.md - Network Access for the full explanation.
# ---------------------------------------------------------------------------
# GROUNDMEMORY_MCP__HOST: "0.0.0.0"
# GROUNDMEMORY_MCP__ALLOWED_HOSTS: "192.168.1.50:4242"
# GROUNDMEMORY_MCP__FORWARDED_ALLOW_IPS: "172.17.0.1"
# ---------------------------------------------------------------------------
# Second workspace example - uncomment to run a second isolated workspace
# on a different port (useful for separate projects or personas).
#
# MCP endpoint: http://localhost:4243/mcp
# Workspace data: ./data/personal/
# ---------------------------------------------------------------------------
# groundmemory-personal:
# build:
# context: .
# image: groundmemory:latest
# restart: unless-stopped
# ports:
# - "4243:4242"
# volumes:
# - ./data:/data
# env_file:
# - path: .env
# required: false
# environment:
# GROUNDMEMORY_WORKSPACE: personal