diff --git a/src/app/api/media/jobs/plan/route.ts b/src/app/api/media/jobs/plan/route.ts index a9c61ebf..cb833c54 100644 --- a/src/app/api/media/jobs/plan/route.ts +++ b/src/app/api/media/jobs/plan/route.ts @@ -1,6 +1,6 @@ import { NextRequest } from 'next/server'; import { streamTextFromProvider } from '@/lib/text-generator'; -import { resolveProvider } from '@/lib/provider-resolver'; +import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver'; import fs from 'fs'; import type { PlanMediaJobRequest } from '@/types'; @@ -62,7 +62,7 @@ export async function POST(request: NextRequest) { }); // Preserve 'env' semantics (see onboarding route for rationale) const providerId = resolved.provider?.id || 'env'; - const modelId = resolved.upstreamModel || resolved.model || session?.model || 'claude-sonnet-4-20250514'; + const modelId = resolved.upstreamModel || resolved.model || session?.model || DEFAULT_MODEL_ID; // Read document content let docContent = body.docContent || ''; diff --git a/src/app/api/skills/search/route.ts b/src/app/api/skills/search/route.ts index f49e79d2..3ef2cc9a 100644 --- a/src/app/api/skills/search/route.ts +++ b/src/app/api/skills/search/route.ts @@ -1,5 +1,6 @@ import { NextResponse } from 'next/server'; import { getActiveProvider, getSetting } from '@/lib/db'; +import { CLAUDE_MODELS } from '@/lib/provider-resolver'; interface SkillInfo { name: string; @@ -12,12 +13,10 @@ interface SearchRequest { model?: string; } -// Model alias -> full model ID -const MODEL_MAP: Record = { - sonnet: 'claude-sonnet-4-20250514', - opus: 'claude-opus-4-20250514', - haiku: 'claude-haiku-4-20250414', -}; +// Model alias -> full model ID (derived from central CLAUDE_MODELS) +const MODEL_MAP: Record = Object.fromEntries( + Object.entries(CLAUDE_MODELS).map(([alias, m]) => [alias, m.id]) +); interface ApiConfig { supported: boolean; diff --git a/src/app/chat/page.tsx b/src/app/chat/page.tsx index a6d66aea..4034226d 100644 --- a/src/app/chat/page.tsx +++ b/src/app/chat/page.tsx @@ -268,7 +268,7 @@ export default function NewChatPage() { try { const statusData = JSON.parse(event.data); if (statusData.session_id) { - setStatusText(`Connected (${statusData.model || 'claude'})`); + setStatusText(`Connected (${statusData.requested_model || statusData.model || 'claude'})`); setTimeout(() => setStatusText(undefined), 2000); } else if (statusData.notification) { setStatusText(statusData.message || statusData.title || undefined); diff --git a/src/hooks/useSSEStream.ts b/src/hooks/useSSEStream.ts index 14c39fc3..be278ee4 100644 --- a/src/hooks/useSSEStream.ts +++ b/src/hooks/useSSEStream.ts @@ -203,11 +203,12 @@ function handleSSEEvent( export async function consumeSSEStream( reader: ReadableStreamDefaultReader, callbacks: SSECallbacks, -): Promise<{ accumulated: string; tokenUsage: TokenUsage | null }> { +): Promise<{ accumulated: string; tokenUsage: TokenUsage | null; receivedDone: boolean }> { const decoder = new TextDecoder(); let buffer = ''; let accumulated = ''; let tokenUsage: TokenUsage | null = null; + let receivedDone = false; const wrappedCallbacks: SSECallbacks = { ...callbacks, @@ -230,6 +231,9 @@ export async function consumeSSEStream( try { const event: SSEEvent = JSON.parse(line.slice(6)); + if (event.type === 'done') { + receivedDone = true; + } accumulated = handleSSEEvent(event, accumulated, wrappedCallbacks); } catch { // skip malformed SSE lines @@ -237,7 +241,20 @@ export async function consumeSSEStream( } } - return { accumulated, tokenUsage }; + // Flush any residual buffer in case the final chunk didn't end with \n + if (buffer.trim().startsWith('data: ')) { + try { + const event: SSEEvent = JSON.parse(buffer.trim().slice(6)); + if (event.type === 'done') { + receivedDone = true; + } + accumulated = handleSSEEvent(event, accumulated, wrappedCallbacks); + } catch { + // skip malformed residual data + } + } + + return { accumulated, tokenUsage, receivedDone }; } /** diff --git a/src/i18n/en.ts b/src/i18n/en.ts index d81877da..989eecaa 100644 --- a/src/i18n/en.ts +++ b/src/i18n/en.ts @@ -54,6 +54,7 @@ const en = { 'streaming.allowForSession': 'Allow for Session', 'streaming.allowed': 'Allowed', 'streaming.denied': 'Denied', + 'streaming.connectionDrop': 'Connection lost — the server stream ended unexpectedly. Claude may still be running in the background. Please try sending your message again.', // ── Chat view / session page ──────────────────────────────── 'chat.newConversation': 'New Conversation', diff --git a/src/i18n/zh.ts b/src/i18n/zh.ts index abb540f8..d740e1e4 100644 --- a/src/i18n/zh.ts +++ b/src/i18n/zh.ts @@ -51,6 +51,7 @@ const zh: Record = { 'streaming.allowForSession': '本次会话允许', 'streaming.allowed': '已允许', 'streaming.denied': '已拒绝', + 'streaming.connectionDrop': '连接中断 — 服务器流意外结束。Claude 可能仍在后台运行,请尝试重新发送消息。', // ── Chat view / session page ──────────────────────────────── 'chat.newConversation': '新对话', diff --git a/src/lib/checkin-processor.ts b/src/lib/checkin-processor.ts index 0dee1eac..79542ca8 100644 --- a/src/lib/checkin-processor.ts +++ b/src/lib/checkin-processor.ts @@ -6,7 +6,7 @@ import fs from 'fs'; import path from 'path'; import { getSetting, getSession } from '@/lib/db'; -import { resolveProvider } from '@/lib/provider-resolver'; +import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver'; import { loadState, saveState, writeDailyMemory } from '@/lib/assistant-workspace'; import { getLocalDateString } from '@/lib/utils'; import { generateTextFromProvider } from '@/lib/text-generator'; @@ -70,7 +70,7 @@ export async function processCheckin( sessionModel: session?.model || undefined, }); const providerId = resolved.provider?.id || 'env'; - const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || 'claude-sonnet-4-20250514'; + const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || DEFAULT_MODEL_ID; const dailyMemoryPrompt = `You maintain daily memory entries for an AI assistant. Given the user's daily check-in answers, generate a daily memory entry for ${today}. diff --git a/src/lib/claude-client.ts b/src/lib/claude-client.ts index 57345f28..e7546deb 100644 --- a/src/lib/claude-client.ts +++ b/src/lib/claude-client.ts @@ -395,6 +395,8 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream | null = null; + return new ReadableStream({ async start(controller) { // Resolve provider via the unified resolver. The caller may pass an explicit @@ -792,6 +794,21 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream>(); + // Server-side heartbeat: send keep_alive every 30s to prevent + // transport-level idle connection drops (Electron, OS TCP, proxies). + // This is independent of the SDK's own keep_alive messages. + heartbeatTimer = setInterval(() => { + try { + controller.enqueue(formatSSE({ type: 'keep_alive', data: '' })); + } catch { + // Controller may be closed — stop heartbeat + if (heartbeatTimer) { + clearInterval(heartbeatTimer); + heartbeatTimer = null; + } + } + }, 30_000); + for await (const message of conversation) { if (abortController?.signal.aborted) { break; @@ -1027,9 +1044,11 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream = { - 'sonnet': 200000, - 'opus': 200000, - 'haiku': 200000, - 'claude-sonnet-4-20250514': 200000, - 'claude-opus-4-20250514': 200000, - 'claude-haiku-4-5-20251001': 200000, + // Short aliases + ...Object.fromEntries(Object.entries(CLAUDE_MODELS).map(([alias, m]) => [alias, m.contextWindow])), + // Full model IDs + ...Object.fromEntries(Object.values(CLAUDE_MODELS).map(m => [m.id, m.contextWindow])), }; export function getContextWindow(model: string): number | null { diff --git a/src/lib/model-ids.ts b/src/lib/model-ids.ts new file mode 100644 index 00000000..48d36d92 --- /dev/null +++ b/src/lib/model-ids.ts @@ -0,0 +1,17 @@ +/** + * Canonical Claude model definitions — single source of truth. + * + * This file has ZERO server-side imports (no fs, no db) so it can be + * safely imported from both server code and client-side React hooks. + * + * Update these when Anthropic releases new model generations. + */ + +export const CLAUDE_MODELS = { + sonnet: { id: 'claude-sonnet-4-6', displayName: 'Sonnet 4.6', contextWindow: 200000 }, + opus: { id: 'claude-opus-4-6', displayName: 'Opus 4.6', contextWindow: 200000 }, + haiku: { id: 'claude-haiku-4-5-20251001', displayName: 'Haiku 4.5', contextWindow: 200000 }, +} as const; + +/** Default model ID used as a last-resort fallback */ +export const DEFAULT_MODEL_ID = CLAUDE_MODELS.sonnet.id; diff --git a/src/lib/onboarding-processor.ts b/src/lib/onboarding-processor.ts index 5dfb81c2..8df1661c 100644 --- a/src/lib/onboarding-processor.ts +++ b/src/lib/onboarding-processor.ts @@ -6,7 +6,7 @@ import fs from 'fs'; import path from 'path'; import { getSetting, getSession } from '@/lib/db'; -import { resolveProvider } from '@/lib/provider-resolver'; +import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver'; import { loadState, saveState, ensureDailyDir, generateRootDocs } from '@/lib/assistant-workspace'; import { getLocalDateString } from '@/lib/utils'; import { generateTextFromProvider } from '@/lib/text-generator'; @@ -74,7 +74,7 @@ export async function processOnboarding( sessionModel: session?.model || undefined, }); const providerId = resolved.provider?.id || 'env'; - const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || 'claude-sonnet-4-20250514'; + const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || DEFAULT_MODEL_ID; const soulPrompt = `Based on the following user onboarding answers, generate a concise "soul.md" file that defines an AI assistant's personality, communication style, and behavioral rules. Write in second person ("You are..."). Keep it under 2000 characters. Use markdown headers and bullet points.\n\n${qaText}`; diff --git a/src/lib/provider-resolver.ts b/src/lib/provider-resolver.ts index 0e59fa80..80239548 100644 --- a/src/lib/provider-resolver.ts +++ b/src/lib/provider-resolver.ts @@ -25,6 +25,11 @@ import { getModelsForProvider, } from './db'; +// Canonical model definitions live in model-ids.ts (client-safe, no fs/db imports). +// Import for local use and re-export for external consumers. +import { CLAUDE_MODELS, DEFAULT_MODEL_ID } from './model-ids'; +export { CLAUDE_MODELS, DEFAULT_MODEL_ID }; + // ── Resolution result ─────────────────────────────────────────── export interface ResolvedProvider { @@ -285,7 +290,7 @@ export function toAiSdkConfig( const catalogEntry = resolved.availableModels.find(m => m.modelId === modelOverride); modelId = catalogEntry?.upstreamModelId || modelOverride; } else { - modelId = resolved.upstreamModel || resolved.model || 'claude-sonnet-4-20250514'; + modelId = resolved.upstreamModel || resolved.model || DEFAULT_MODEL_ID; } const provider = resolved.provider; const protocol = resolved.protocol; @@ -461,11 +466,11 @@ function buildResolution( // Env mode uses short aliases (sonnet/opus/haiku) in the UI. // Map them to full Anthropic model IDs so toAiSdkConfig can resolve correctly. - const envModels: CatalogModel[] = [ - { modelId: 'sonnet', upstreamModelId: 'claude-sonnet-4-20250514', displayName: 'Sonnet 4.6' }, - { modelId: 'opus', upstreamModelId: 'claude-opus-4-20250514', displayName: 'Opus 4.6' }, - { modelId: 'haiku', upstreamModelId: 'claude-haiku-4-5-20251001', displayName: 'Haiku 4.5' }, - ]; + const envModels: CatalogModel[] = Object.entries(CLAUDE_MODELS).map(([alias, m]) => ({ + modelId: alias, + upstreamModelId: m.id, + displayName: m.displayName, + })); // Resolve upstream model from the alias table const catalogEntry = model ? envModels.find(m => m.modelId === model) : undefined; diff --git a/src/lib/stream-session-manager.ts b/src/lib/stream-session-manager.ts index 8371f401..34115b90 100644 --- a/src/lib/stream-session-manager.ts +++ b/src/lib/stream-session-manager.ts @@ -358,6 +358,42 @@ async function runStream(stream: ActiveStream, params: StartStreamParams): Promi }, }); + // Detect premature stream end (connection drop without server 'done' event) + if (!result.receivedDone) { + cleanupTimers(stream); + + const dropMsg = 'Connection lost — the server stream ended unexpectedly. Claude may still be running in the background. Please try sending your message again.'; + const errContent = stream.accumulatedText.trim() + ? stream.accumulatedText.trim() + `\n\n**Error:** ${dropMsg}` + : `**Error:** ${dropMsg}`; + + stream.snapshot = { + ...buildSnapshot(stream), + phase: 'error', + completedAt: Date.now(), + error: dropMsg, + finalMessageContent: errContent, + statusText: undefined, + pendingPermission: null, + permissionResolved: null, + }; + stream.accumulatedText = ''; + stream.toolUsesArray = []; + stream.toolResultsArray = []; + stream.toolOutputAccumulated = ''; + emit(stream, 'completed'); + + // Clear stale SDK session so next message starts fresh + fetch(`/api/chat/sessions/${encodeURIComponent(stream.sessionId)}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ sdk_session_id: '' }), + }).catch(() => {}); + + scheduleGC(stream); + return; + } + // Stream completed successfully — build final message content const accumulated = result.accumulated; const finalToolUses = stream.toolUsesArray;