forked from Chevron7Locked/kima-hub
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathDockerfile
More file actions
359 lines (308 loc) · 13.3 KB
/
Dockerfile
File metadata and controls
359 lines (308 loc) · 13.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
# Lidify All-in-One Docker Image (Hardened)
# Contains: Backend, Frontend, PostgreSQL, Redis, Audio Analyzer (Essentia AI)
# Usage: docker run -d -p 3030:3030 -v /path/to/music:/music lidify/lidify
FROM node:20-slim
# Add PostgreSQL 16 repository (Debian Bookworm only has PG15 by default)
RUN apt-get update && apt-get install -y --no-install-recommends \
gnupg lsb-release curl ca-certificates && \
echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg && \
apt-get update
# Install system dependencies including Python for audio analysis
RUN apt-get install -y --no-install-recommends \
postgresql-16 \
postgresql-contrib-16 \
redis-server \
supervisor \
ffmpeg \
tini \
openssl \
bash \
gosu \
# Python for audio analyzer
python3 \
python3-pip \
python3-numpy \
# Build tools (needed for some Python packages)
build-essential \
python3-dev \
&& rm -rf /var/lib/apt/lists/*
# Create directories
RUN mkdir -p /app/backend /app/frontend /app/audio-analyzer /app/models \
/data/postgres /data/redis /run/postgresql /var/log/supervisor \
&& chown -R postgres:postgres /data/postgres /run/postgresql
# ============================================
# AUDIO ANALYZER SETUP (Essentia AI)
# ============================================
WORKDIR /app/audio-analyzer
# Install Python dependencies for audio analysis
RUN pip3 install --no-cache-dir --break-system-packages \
essentia-tensorflow \
redis \
psycopg2-binary
# Download Essentia ML models (~200MB total) - these enable Enhanced vibe matching
RUN echo "Downloading Essentia ML models for Enhanced vibe matching..." && \
# Base embedding model (required for all predictions)
curl -L --progress-bar -o /app/models/discogs-effnet-bs64-1.pb \
"https://essentia.upf.edu/models/feature-extractors/discogs-effnet/discogs-effnet-bs64-1.pb" && \
# Mood models
curl -L --progress-bar -o /app/models/mood_happy-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_happy/mood_happy-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_sad-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_sad/mood_sad-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_relaxed-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_relaxed/mood_relaxed-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_aggressive-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_aggressive/mood_aggressive-discogs-effnet-1.pb" && \
# Arousal and Valence (key for vibe matching)
curl -L --progress-bar -o /app/models/mood_arousal-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_arousal/mood_arousal-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_valence-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_valence/mood_valence-discogs-effnet-1.pb" && \
# Danceability and Voice/Instrumental
curl -L --progress-bar -o /app/models/danceability-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/danceability/danceability-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/voice_instrumental-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/voice_instrumental/voice_instrumental-discogs-effnet-1.pb" && \
echo "ML models downloaded successfully" && \
ls -lh /app/models/
# Copy audio analyzer script
COPY services/audio-analyzer/analyzer.py /app/audio-analyzer/
# ============================================
# BACKEND BUILD
# ============================================
WORKDIR /app/backend
# Copy backend package files and install dependencies
COPY backend/package*.json ./
COPY backend/prisma ./prisma/
RUN echo "=== Migrations copied ===" && ls -la prisma/migrations/ && echo "=== End migrations ==="
RUN npm ci && npm cache clean --force
RUN npx prisma generate
# Copy backend source
COPY backend/src ./src
COPY backend/docker-entrypoint.sh ./
COPY backend/healthcheck.js ./healthcheck-backend.js
# Create log directory (cache will be in /data volume)
RUN mkdir -p /app/backend/logs
# ============================================
# FRONTEND BUILD
# ============================================
WORKDIR /app/frontend
# Copy frontend package files and install dependencies
COPY frontend/package*.json ./
RUN npm ci && npm cache clean --force
# Copy frontend source and build
COPY frontend/ ./
# Build Next.js (production)
ENV NEXT_PUBLIC_API_URL=
RUN npm run build
# ============================================
# SECURITY HARDENING
# ============================================
# Remove dangerous tools and build dependencies AFTER all builds are complete
# Keep: bash (supervisor), gosu (postgres user switching), python3 (audio analyzer)
RUN apt-get purge -y --auto-remove build-essential python3-dev 2>/dev/null || true && \
rm -f /usr/bin/wget /bin/wget 2>/dev/null || true && \
rm -f /usr/bin/curl /bin/curl 2>/dev/null || true && \
rm -f /usr/bin/nc /bin/nc /usr/bin/ncat /usr/bin/netcat 2>/dev/null || true && \
rm -f /usr/bin/ftp /usr/bin/tftp /usr/bin/telnet 2>/dev/null || true && \
rm -rf /var/lib/apt/lists/*
# ============================================
# CONFIGURATION
# ============================================
WORKDIR /app
# Copy healthcheck script
COPY healthcheck-prod.js /app/healthcheck.js
# Create supervisord config - logs to stdout/stderr for Docker visibility
RUN cat > /etc/supervisor/conf.d/lidify.conf << 'EOF'
[supervisord]
nodaemon=true
logfile=/dev/null
logfile_maxbytes=0
pidfile=/var/run/supervisord.pid
user=root
[program:postgres]
command=/usr/lib/postgresql/16/bin/postgres -D /data/postgres
user=postgres
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
priority=10
[program:redis]
command=/usr/bin/redis-server --dir /data/redis --appendonly yes
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
priority=20
[program:backend]
command=/bin/bash -c "sleep 5 && cd /app/backend && npx tsx src/index.ts"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
directory=/app/backend
priority=30
[program:frontend]
command=/bin/bash -c "sleep 10 && cd /app/frontend && npm start"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
environment=NODE_ENV="production",BACKEND_URL="http://localhost:3006",PORT="3030"
priority=40
[program:audio-analyzer]
command=/bin/bash -c "sleep 15 && cd /app/audio-analyzer && python3 analyzer.py"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
environment=DATABASE_URL="postgresql://lidify:lidify@localhost:5432/lidify",REDIS_URL="redis://localhost:6379",MUSIC_PATH="/music",BATCH_SIZE="10",SLEEP_INTERVAL="5"
priority=50
EOF
# Fix Windows line endings in supervisor config
RUN sed -i 's/\r$//' /etc/supervisor/conf.d/lidify.conf
# Create startup script with root check
RUN cat > /app/start.sh << 'EOF'
#!/bin/bash
set -e
# Security check: Warn if running internal services as root
# Note: This container runs multiple services, some require root for initial setup
# but individual services (postgres, backend processes) run as non-root users
echo ""
echo "============================================================"
echo " Lidify - Premium Self-Hosted Music Server"
echo ""
echo " Features:"
echo " - AI-Powered Vibe Matching (Essentia ML)"
echo " - Smart Playlists & Mood Detection"
echo " - High-Quality Audio Streaming"
echo ""
echo " Security:"
echo " - Hardened container (no wget/curl/nc)"
echo " - Auto-generated encryption keys"
echo "============================================================"
echo ""
# Find PostgreSQL binaries (version may vary)
PG_BIN=$(find /usr/lib/postgresql -name "bin" -type d | head -1)
if [ -z "$PG_BIN" ]; then
echo "ERROR: PostgreSQL binaries not found!"
exit 1
fi
echo "Using PostgreSQL from: $PG_BIN"
# Fix permissions on data directories (may have different UID from previous container)
echo "Fixing data directory permissions..."
chown -R postgres:postgres /data/postgres /run/postgresql 2>/dev/null || true
chmod 700 /data/postgres 2>/dev/null || true
# Clean up stale PID file if exists
rm -f /data/postgres/postmaster.pid 2>/dev/null || true
# Initialize PostgreSQL if not already done
if [ ! -f /data/postgres/PG_VERSION ]; then
echo "Initializing PostgreSQL database..."
gosu postgres $PG_BIN/initdb -D /data/postgres
# Configure PostgreSQL
echo "host all all 0.0.0.0/0 md5" >> /data/postgres/pg_hba.conf
echo "listen_addresses='*'" >> /data/postgres/postgresql.conf
fi
# Start PostgreSQL temporarily to create database and user
gosu postgres $PG_BIN/pg_ctl -D /data/postgres -w start
# Create user and database if they don't exist
gosu postgres psql -tc "SELECT 1 FROM pg_roles WHERE rolname = 'lidify'" | grep -q 1 || \
gosu postgres psql -c "CREATE USER lidify WITH PASSWORD 'lidify';"
gosu postgres psql -tc "SELECT 1 FROM pg_database WHERE datname = 'lidify'" | grep -q 1 || \
gosu postgres psql -c "CREATE DATABASE lidify OWNER lidify;"
# Run Prisma migrations
cd /app/backend
export DATABASE_URL="postgresql://lidify:lidify@localhost:5432/lidify"
echo "Running Prisma migrations..."
ls -la prisma/migrations/ || echo "No migrations directory!"
# Check if _prisma_migrations table exists (indicates previous Prisma setup)
MIGRATIONS_EXIST=$(gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = '_prisma_migrations')" 2>/dev/null || echo "f")
# Check if User table exists (indicates existing data)
USER_TABLE_EXIST=$(gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'User')" 2>/dev/null || echo "f")
if [ "$MIGRATIONS_EXIST" = "t" ]; then
# Normal migration flow - migrations table exists
echo "Migration history found, running migrate deploy..."
npx prisma migrate deploy 2>&1 || {
echo "WARNING: Migration failed, but database preserved."
echo "You may need to manually resolve migration issues."
}
elif [ "$USER_TABLE_EXIST" = "t" ]; then
# Database has data but no migrations table - needs baseline
echo "Existing database detected without migration history."
echo "Creating baseline from current schema..."
# Mark the init migration as already applied (baseline)
npx prisma migrate resolve --applied 20251130000000_init 2>&1 || true
# Now run any subsequent migrations
npx prisma migrate deploy 2>&1 || {
echo "WARNING: Migration after baseline failed."
echo "Database preserved - check migration status manually."
}
else
# Fresh database - run migrations normally
echo "Fresh database detected, running initial migrations..."
npx prisma migrate deploy 2>&1 || {
echo "WARNING: Initial migration failed."
echo "Check database connection and schema."
}
fi
# Stop PostgreSQL (supervisord will start it)
gosu postgres $PG_BIN/pg_ctl -D /data/postgres -w stop
# Create persistent cache directories in /data volume
mkdir -p /data/cache/covers /data/cache/transcodes /data/secrets
# Load or generate persistent secrets
if [ -f /data/secrets/session_secret ]; then
SESSION_SECRET=$(cat /data/secrets/session_secret)
echo "Loaded existing SESSION_SECRET"
else
SESSION_SECRET=$(openssl rand -hex 32)
echo "$SESSION_SECRET" > /data/secrets/session_secret
chmod 600 /data/secrets/session_secret
echo "Generated and saved new SESSION_SECRET"
fi
if [ -f /data/secrets/encryption_key ]; then
SETTINGS_ENCRYPTION_KEY=$(cat /data/secrets/encryption_key)
echo "Loaded existing SETTINGS_ENCRYPTION_KEY"
else
SETTINGS_ENCRYPTION_KEY=$(openssl rand -hex 32)
echo "$SETTINGS_ENCRYPTION_KEY" > /data/secrets/encryption_key
chmod 600 /data/secrets/encryption_key
echo "Generated and saved new SETTINGS_ENCRYPTION_KEY"
fi
# Write environment file for backend
cat > /app/backend/.env << ENVEOF
NODE_ENV=production
DATABASE_URL=postgresql://lidify:lidify@localhost:5432/lidify
REDIS_URL=redis://localhost:6379
PORT=3006
MUSIC_PATH=/music
TRANSCODE_CACHE_PATH=/data/cache/transcodes
SESSION_SECRET=$SESSION_SECRET
SETTINGS_ENCRYPTION_KEY=$SETTINGS_ENCRYPTION_KEY
ENVEOF
echo "Starting Lidify..."
exec /usr/bin/supervisord -c /etc/supervisor/supervisord.conf
EOF
# Fix Windows line endings (CRLF -> LF) and make executable
RUN sed -i 's/\r$//' /app/start.sh && chmod +x /app/start.sh
# Expose ports
EXPOSE 3030
# Health check using Node.js (no wget)
HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
CMD ["node", "/app/healthcheck.js"]
# Volumes
VOLUME ["/music", "/data"]
# Use tini for proper signal handling
ENTRYPOINT ["/usr/bin/tini", "--"]
CMD ["/app/start.sh"]