-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
77 lines (62 loc) · 2.65 KB
/
.env.example
File metadata and controls
77 lines (62 loc) · 2.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# FactoryLM Environment Variables
# Copy this to .env and fill in your values
# NEVER commit .env to git (it's in .gitignore)
# =============================================================================
# TELEGRAM BOTS
# =============================================================================
# Get tokens from @BotFather on Telegram
TELEGRAM_BOT_TOKEN= # Gus (@FACTORYLM_bot / MikesOPENCLAW)
PEPPER_BOT_TOKEN= # PEPPER (@Spicyclawd_bot)
FRIDAY_BOT_TOKEN= # FRIDAY (@FRIDAY_MCU_bot)
REMOTEME_BOT_TOKEN= # RemoteMe (@JarvisMIO_bot)
# =============================================================================
# OBSERVABILITY
# =============================================================================
# Sentry - Error tracking and performance monitoring
# Get DSN from https://sentry.io -> Settings -> Projects -> Client Keys
SENTRY_DSN=
# Honeycomb - Distributed tracing (optional)
HONEYCOMB_API_KEY=
# =============================================================================
# AI PROVIDERS
# =============================================================================
# Groq - Fast inference for Whisper and LLaMA
GROQ_API_KEY=
# Anthropic - Claude API
ANTHROPIC_API_KEY=
# OpenAI - GPT-4V for vision (optional)
OPENAI_API_KEY=
# NVIDIA Cosmos Reason 2
NVIDIA_COSMOS_API_KEY= # Cloud API key from build.nvidia.com
COSMOS_NIM_URL= # Self-hosted NIM container (e.g. http://runpod-ip:8000/v1)
# vLLM (Cosmos R2 on Vast.ai via SSH tunnel)
VLLM_URL=http://localhost:8000/v1/chat/completions
VLLM_API_KEY=dummy # vLLM doesn't require auth; router needs a non-empty value
# DeepSeek
DEEPSEEK_API_KEY=
# OpenRouter (free tier)
OPENROUTER_API_KEY=
# Cerebras
CEREBRAS_API_KEY=
# =============================================================================
# INFRASTRUCTURE
# =============================================================================
# PLC Laptop (Factory I/O + Micro 820)
MATRIX_API=http://100.72.2.99:8000
MATRIX_URL=http://localhost:8000 # Local Matrix API (same machine)
DEMO_UI=http://100.72.2.99:8080
JARVIS_NODE=http://100.72.2.99:8765
# LiteLLM Proxy (replaces custom LLM router)
LITELLM_URL=http://localhost:4000
LITELLM_KEY=sk-factorylm
# =============================================================================
# MEDIA STORAGE
# =============================================================================
# v1: Local disk at ~/.openclaw/media-inbox/ (base64 inline for files <20MB)
# v2 (future): Cloudflare R2 for persistent URLs
# R2_ACCOUNT_ID=
# R2_ACCESS_KEY_ID=
# R2_SECRET_ACCESS_KEY=
# R2_BUCKET_NAME=factorylm-media
# Environment
ENVIRONMENT=development