Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/app/api/media/jobs/plan/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NextRequest } from 'next/server';
import { streamTextFromProvider } from '@/lib/text-generator';
import { resolveProvider } from '@/lib/provider-resolver';
import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver';
import fs from 'fs';
import type { PlanMediaJobRequest } from '@/types';

Expand Down Expand Up @@ -62,7 +62,7 @@ export async function POST(request: NextRequest) {
});
// Preserve 'env' semantics (see onboarding route for rationale)
const providerId = resolved.provider?.id || 'env';
const modelId = resolved.upstreamModel || resolved.model || session?.model || 'claude-sonnet-4-20250514';
const modelId = resolved.upstreamModel || resolved.model || session?.model || DEFAULT_MODEL_ID;

// Read document content
let docContent = body.docContent || '';
Expand Down
11 changes: 5 additions & 6 deletions src/app/api/skills/search/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { NextResponse } from 'next/server';
import { getActiveProvider, getSetting } from '@/lib/db';
import { CLAUDE_MODELS } from '@/lib/provider-resolver';

interface SkillInfo {
name: string;
Expand All @@ -12,12 +13,10 @@ interface SearchRequest {
model?: string;
}

// Model alias -> full model ID
const MODEL_MAP: Record<string, string> = {
sonnet: 'claude-sonnet-4-20250514',
opus: 'claude-opus-4-20250514',
haiku: 'claude-haiku-4-20250414',
};
// Model alias -> full model ID (derived from central CLAUDE_MODELS)
const MODEL_MAP: Record<string, string> = Object.fromEntries(
Object.entries(CLAUDE_MODELS).map(([alias, m]) => [alias, m.id])
);

interface ApiConfig {
supported: boolean;
Expand Down
2 changes: 1 addition & 1 deletion src/app/chat/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ export default function NewChatPage() {
try {
const statusData = JSON.parse(event.data);
if (statusData.session_id) {
setStatusText(`Connected (${statusData.model || 'claude'})`);
setStatusText(`Connected (${statusData.requested_model || statusData.model || 'claude'})`);
setTimeout(() => setStatusText(undefined), 2000);
} else if (statusData.notification) {
setStatusText(statusData.message || statusData.title || undefined);
Expand Down
21 changes: 19 additions & 2 deletions src/hooks/useSSEStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -203,11 +203,12 @@ function handleSSEEvent(
export async function consumeSSEStream(
reader: ReadableStreamDefaultReader<Uint8Array>,
callbacks: SSECallbacks,
): Promise<{ accumulated: string; tokenUsage: TokenUsage | null }> {
): Promise<{ accumulated: string; tokenUsage: TokenUsage | null; receivedDone: boolean }> {
const decoder = new TextDecoder();
let buffer = '';
let accumulated = '';
let tokenUsage: TokenUsage | null = null;
let receivedDone = false;

const wrappedCallbacks: SSECallbacks = {
...callbacks,
Expand All @@ -230,14 +231,30 @@ export async function consumeSSEStream(

try {
const event: SSEEvent = JSON.parse(line.slice(6));
if (event.type === 'done') {
receivedDone = true;
}
accumulated = handleSSEEvent(event, accumulated, wrappedCallbacks);
} catch {
// skip malformed SSE lines
}
}
}

return { accumulated, tokenUsage };
// Flush any residual buffer in case the final chunk didn't end with \n
if (buffer.trim().startsWith('data: ')) {
try {
const event: SSEEvent = JSON.parse(buffer.trim().slice(6));
if (event.type === 'done') {
receivedDone = true;
}
accumulated = handleSSEEvent(event, accumulated, wrappedCallbacks);
} catch {
// skip malformed residual data
}
}

return { accumulated, tokenUsage, receivedDone };
}

/**
Expand Down
1 change: 1 addition & 0 deletions src/i18n/en.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ const en = {
'streaming.allowForSession': 'Allow for Session',
'streaming.allowed': 'Allowed',
'streaming.denied': 'Denied',
'streaming.connectionDrop': 'Connection lost — the server stream ended unexpectedly. Claude may still be running in the background. Please try sending your message again.',

// ── Chat view / session page ────────────────────────────────
'chat.newConversation': 'New Conversation',
Expand Down
1 change: 1 addition & 0 deletions src/i18n/zh.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ const zh: Record<TranslationKey, string> = {
'streaming.allowForSession': '本次会话允许',
'streaming.allowed': '已允许',
'streaming.denied': '已拒绝',
'streaming.connectionDrop': '连接中断 — 服务器流意外结束。Claude 可能仍在后台运行,请尝试重新发送消息。',

// ── Chat view / session page ────────────────────────────────
'chat.newConversation': '新对话',
Expand Down
4 changes: 2 additions & 2 deletions src/lib/checkin-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import fs from 'fs';
import path from 'path';
import { getSetting, getSession } from '@/lib/db';
import { resolveProvider } from '@/lib/provider-resolver';
import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver';
import { loadState, saveState, writeDailyMemory } from '@/lib/assistant-workspace';
import { getLocalDateString } from '@/lib/utils';
import { generateTextFromProvider } from '@/lib/text-generator';
Expand Down Expand Up @@ -70,7 +70,7 @@ export async function processCheckin(
sessionModel: session?.model || undefined,
});
const providerId = resolved.provider?.id || 'env';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || 'claude-sonnet-4-20250514';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || DEFAULT_MODEL_ID;

const dailyMemoryPrompt = `You maintain daily memory entries for an AI assistant. Given the user's daily check-in answers, generate a daily memory entry for ${today}.

Expand Down
20 changes: 20 additions & 0 deletions src/lib/claude-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -395,6 +395,8 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream<strin
context1m,
} = options;

let heartbeatTimer: ReturnType<typeof setInterval> | null = null;

return new ReadableStream<string>({
async start(controller) {
// Resolve provider via the unified resolver. The caller may pass an explicit
Expand Down Expand Up @@ -792,6 +794,21 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream<strin
// Track pending TodoWrite tool_use_ids so we can sync after successful execution
const pendingTodoWrites = new Map<string, Array<{ content: string; status: string; activeForm?: string }>>();

// Server-side heartbeat: send keep_alive every 30s to prevent
// transport-level idle connection drops (Electron, OS TCP, proxies).
// This is independent of the SDK's own keep_alive messages.
heartbeatTimer = setInterval(() => {
try {
controller.enqueue(formatSSE({ type: 'keep_alive', data: '' }));
} catch {
// Controller may be closed — stop heartbeat
if (heartbeatTimer) {
clearInterval(heartbeatTimer);
heartbeatTimer = null;
}
}
}, 30_000);

for await (const message of conversation) {
if (abortController?.signal.aborted) {
break;
Expand Down Expand Up @@ -1027,9 +1044,11 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream<strin
}
}

if (heartbeatTimer) { clearInterval(heartbeatTimer); heartbeatTimer = null; }
controller.enqueue(formatSSE({ type: 'done', data: '' }));
controller.close();
} catch (error) {
if (heartbeatTimer) { clearInterval(heartbeatTimer); heartbeatTimer = null; }
const rawMessage = error instanceof Error ? error.message : 'Unknown error';
// Log full error details for debugging (visible in terminal / dev tools)
console.error('[claude-client] Stream error:', {
Expand Down Expand Up @@ -1097,6 +1116,7 @@ export function streamClaude(options: ClaudeStreamOptions): ReadableStream<strin
},

cancel() {
if (heartbeatTimer) { clearInterval(heartbeatTimer); heartbeatTimer = null; }
abortController?.abort();
},
});
Expand Down
12 changes: 6 additions & 6 deletions src/lib/model-context.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { CLAUDE_MODELS } from './model-ids';

export const MODEL_CONTEXT_WINDOWS: Record<string, number> = {
'sonnet': 200000,
'opus': 200000,
'haiku': 200000,
'claude-sonnet-4-20250514': 200000,
'claude-opus-4-20250514': 200000,
'claude-haiku-4-5-20251001': 200000,
// Short aliases
...Object.fromEntries(Object.entries(CLAUDE_MODELS).map(([alias, m]) => [alias, m.contextWindow])),
// Full model IDs
...Object.fromEntries(Object.values(CLAUDE_MODELS).map(m => [m.id, m.contextWindow])),
};

export function getContextWindow(model: string): number | null {
Expand Down
17 changes: 17 additions & 0 deletions src/lib/model-ids.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/**
* Canonical Claude model definitions — single source of truth.
*
* This file has ZERO server-side imports (no fs, no db) so it can be
* safely imported from both server code and client-side React hooks.
*
* Update these when Anthropic releases new model generations.
*/

export const CLAUDE_MODELS = {
sonnet: { id: 'claude-sonnet-4-6', displayName: 'Sonnet 4.6', contextWindow: 200000 },
opus: { id: 'claude-opus-4-6', displayName: 'Opus 4.6', contextWindow: 200000 },
haiku: { id: 'claude-haiku-4-5-20251001', displayName: 'Haiku 4.5', contextWindow: 200000 },
} as const;

/** Default model ID used as a last-resort fallback */
export const DEFAULT_MODEL_ID = CLAUDE_MODELS.sonnet.id;
4 changes: 2 additions & 2 deletions src/lib/onboarding-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import fs from 'fs';
import path from 'path';
import { getSetting, getSession } from '@/lib/db';
import { resolveProvider } from '@/lib/provider-resolver';
import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver';
import { loadState, saveState, ensureDailyDir, generateRootDocs } from '@/lib/assistant-workspace';
import { getLocalDateString } from '@/lib/utils';
import { generateTextFromProvider } from '@/lib/text-generator';
Expand Down Expand Up @@ -74,7 +74,7 @@ export async function processOnboarding(
sessionModel: session?.model || undefined,
});
const providerId = resolved.provider?.id || 'env';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || 'claude-sonnet-4-20250514';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || DEFAULT_MODEL_ID;

const soulPrompt = `Based on the following user onboarding answers, generate a concise "soul.md" file that defines an AI assistant's personality, communication style, and behavioral rules. Write in second person ("You are..."). Keep it under 2000 characters. Use markdown headers and bullet points.\n\n${qaText}`;

Expand Down
17 changes: 11 additions & 6 deletions src/lib/provider-resolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@ import {
getModelsForProvider,
} from './db';

// Canonical model definitions live in model-ids.ts (client-safe, no fs/db imports).
// Import for local use and re-export for external consumers.
import { CLAUDE_MODELS, DEFAULT_MODEL_ID } from './model-ids';
export { CLAUDE_MODELS, DEFAULT_MODEL_ID };

// ── Resolution result ───────────────────────────────────────────

export interface ResolvedProvider {
Expand Down Expand Up @@ -285,7 +290,7 @@ export function toAiSdkConfig(
const catalogEntry = resolved.availableModels.find(m => m.modelId === modelOverride);
modelId = catalogEntry?.upstreamModelId || modelOverride;
} else {
modelId = resolved.upstreamModel || resolved.model || 'claude-sonnet-4-20250514';
modelId = resolved.upstreamModel || resolved.model || DEFAULT_MODEL_ID;
}
const provider = resolved.provider;
const protocol = resolved.protocol;
Expand Down Expand Up @@ -461,11 +466,11 @@ function buildResolution(

// Env mode uses short aliases (sonnet/opus/haiku) in the UI.
// Map them to full Anthropic model IDs so toAiSdkConfig can resolve correctly.
const envModels: CatalogModel[] = [
{ modelId: 'sonnet', upstreamModelId: 'claude-sonnet-4-20250514', displayName: 'Sonnet 4.6' },
{ modelId: 'opus', upstreamModelId: 'claude-opus-4-20250514', displayName: 'Opus 4.6' },
{ modelId: 'haiku', upstreamModelId: 'claude-haiku-4-5-20251001', displayName: 'Haiku 4.5' },
];
const envModels: CatalogModel[] = Object.entries(CLAUDE_MODELS).map(([alias, m]) => ({
modelId: alias,
upstreamModelId: m.id,
displayName: m.displayName,
}));

// Resolve upstream model from the alias table
const catalogEntry = model ? envModels.find(m => m.modelId === model) : undefined;
Expand Down
36 changes: 36 additions & 0 deletions src/lib/stream-session-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,42 @@ async function runStream(stream: ActiveStream, params: StartStreamParams): Promi
},
});

// Detect premature stream end (connection drop without server 'done' event)
if (!result.receivedDone) {
cleanupTimers(stream);

const dropMsg = 'Connection lost — the server stream ended unexpectedly. Claude may still be running in the background. Please try sending your message again.';
const errContent = stream.accumulatedText.trim()
? stream.accumulatedText.trim() + `\n\n**Error:** ${dropMsg}`
: `**Error:** ${dropMsg}`;

stream.snapshot = {
...buildSnapshot(stream),
phase: 'error',
completedAt: Date.now(),
error: dropMsg,
finalMessageContent: errContent,
statusText: undefined,
pendingPermission: null,
permissionResolved: null,
};
stream.accumulatedText = '';
stream.toolUsesArray = [];
stream.toolResultsArray = [];
stream.toolOutputAccumulated = '';
emit(stream, 'completed');

// Clear stale SDK session so next message starts fresh
fetch(`/api/chat/sessions/${encodeURIComponent(stream.sessionId)}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ sdk_session_id: '' }),
}).catch(() => {});

scheduleGC(stream);
return;
}

// Stream completed successfully — build final message content
const accumulated = result.accumulated;
const finalToolUses = stream.toolUsesArray;
Expand Down
Loading