Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 13 additions & 11 deletions src/main/lib/trpc/routers/claude.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1504,7 +1504,7 @@ ${prompt}

// When result arrives, assign the last assistant UUID to metadata
// It will be emitted as part of the merged message-metadata chunk below
if (msgAny.type === "result" && historyEnabled && lastAssistantUuid) {
if (msgAny.type === "result" && historyEnabled && lastAssistantUuid && !abortController.signal.aborted) {
metadata.sdkMessageUuid = lastAssistantUuid
}

Expand Down Expand Up @@ -1832,6 +1832,8 @@ ${prompt}
parts.push({ type: "text", text: currentText })
}

const savedSessionId = metadata.sessionId

if (parts.length > 0) {
const assistantMessage = {
id: crypto.randomUUID(),
Expand All @@ -1845,7 +1847,7 @@ ${prompt}
db.update(subChats)
.set({
messages: JSON.stringify(finalMessages),
sessionId: metadata.sessionId,
sessionId: savedSessionId,
streamId: null,
updatedAt: new Date(),
})
Expand All @@ -1855,7 +1857,7 @@ ${prompt}
// No assistant response - just clear streamId
db.update(subChats)
.set({
sessionId: metadata.sessionId,
sessionId: savedSessionId,
streamId: null,
updatedAt: new Date(),
})
Expand Down Expand Up @@ -1896,14 +1898,13 @@ ${prompt}
activeSessions.delete(input.subChatId)
clearPendingApprovals("Session ended.", input.subChatId)

// Save sessionId on abort so conversation can be resumed
// Clear streamId since we're no longer streaming
// Clear streamId since we're no longer streaming.
// sessionId is NOT saved here — the save block in the async function
// handles it (saves on normal completion, clears on abort). This avoids
// a redundant DB write that the cancel mutation would then overwrite.
const db = getDatabase()
db.update(subChats)
.set({
streamId: null,
...(currentSessionId && { sessionId: currentSessionId })
})
.set({ streamId: null })
.where(eq(subChats.id, input.subChatId))
.run()
}
Expand Down Expand Up @@ -1964,9 +1965,10 @@ ${prompt}
controller.abort()
activeSessions.delete(input.subChatId)
clearPendingApprovals("Session cancelled.", input.subChatId)
return { cancelled: true }
}
return { cancelled: false }


return { cancelled: !!controller }
}),

/**
Expand Down
8 changes: 5 additions & 3 deletions src/renderer/features/agents/lib/ipc-chat-transport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import {
pendingUserQuestionsAtom,
} from "../atoms"
import { useAgentSubChatStore } from "../stores/sub-chat-store"
import type { AgentMessageMetadata } from "../ui/agent-message-usage"

// Error categories and their user-friendly messages
const ERROR_TOAST_CONFIG: Record<
Expand Down Expand Up @@ -149,11 +150,13 @@ export class IPCChatTransport implements ChatTransport<UIMessage> {
const prompt = this.extractText(lastUser)
const images = this.extractImages(lastUser)

// Get sessionId for resume
// Get sessionId for resume (server preserves sessionId on abort so
// the next message can resume with full conversation context)
const lastAssistant = [...options.messages]
.reverse()
.find((m) => m.role === "assistant")
const sessionId = (lastAssistant as any)?.metadata?.sessionId
const metadata = lastAssistant?.metadata as AgentMessageMetadata | undefined
const sessionId = metadata?.sessionId

// Read extended thinking setting dynamically (so toggle applies to existing chats)
const thinkingEnabled = appStore.get(extendedThinkingEnabledAtom)
Expand Down Expand Up @@ -441,7 +444,6 @@ export class IPCChatTransport implements ChatTransport<UIMessage> {
options.abortSignal?.addEventListener("abort", () => {
console.log(`[SD] R:ABORT sub=${subId} n=${chunkCount} last=${lastChunkType}`)
sub.unsubscribe()
trpcClient.claude.cancel.mutate({ subChatId: this.config.subChatId })
try {
controller.close()
} catch {
Expand Down
201 changes: 113 additions & 88 deletions src/renderer/features/agents/main/active-chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,39 @@ function utf8ToBase64(str: string): string {
return btoa(binString)
}

/** Wait for streaming to finish by subscribing to the status store.
* Includes a 30s safety timeout — if the store never transitions to "ready",
* the promise resolves anyway to prevent hanging the UI indefinitely. */
const STREAMING_READY_TIMEOUT_MS = 30_000

function waitForStreamingReady(subChatId: string): Promise<void> {
return new Promise((resolve) => {
if (!useStreamingStatusStore.getState().isStreaming(subChatId)) {
resolve()
return
}

const timeout = setTimeout(() => {
console.warn(
`[waitForStreamingReady] Timed out after ${STREAMING_READY_TIMEOUT_MS}ms for subChat ${subChatId.slice(-8)}, proceeding anyway`
)
unsub()
resolve()
}, STREAMING_READY_TIMEOUT_MS)

const unsub = useStreamingStatusStore.subscribe(
(state) => state.statuses[subChatId],
(status) => {
if (status === "ready" || status === undefined) {
clearTimeout(timeout)
unsub()
resolve()
}
}
)
})
}

// Exploring tools - these get grouped when 2+ consecutive
const EXPLORING_TOOLS = new Set([
"tool-Read",
Expand Down Expand Up @@ -2326,11 +2359,6 @@ const ChatViewInner = memo(function ChatViewInner({
// Mark as manually aborted to prevent completion sound
agentChatStore.setManuallyAborted(subChatId, true)
await stopRef.current()
// Call DELETE endpoint to cancel server-side stream
await fetch(
`/api/agents/chat?id=${encodeURIComponent(subChatId)}`,
{ method: "DELETE", credentials: "include" },
)
}, [subChatId])

// Wrapper for addTextContext that handles TextSelectionSource
Expand Down Expand Up @@ -3078,11 +3106,6 @@ const ChatViewInner = memo(function ChatViewInner({
// Mark as manually aborted to prevent completion sound
agentChatStore.setManuallyAborted(subChatId, true)
await stop()
// Call DELETE endpoint to cancel server-side stream
await fetch(`/api/agents/chat?id=${encodeURIComponent(subChatId)}`, {
method: "DELETE",
credentials: "include",
})
}
}

Expand Down Expand Up @@ -3504,82 +3527,83 @@ const ChatViewInner = memo(function ChatViewInner({
const item = popItemFromQueue(subChatId, itemId)
if (!item) return

// Stop current stream if streaming and wait for status to become ready
if (isStreamingRef.current) {
await handleStop()
// Wait for status to become "ready" (max 2 seconds)
const maxWait = 2000
const pollInterval = 50
let waited = 0
while (isStreamingRef.current && waited < maxWait) {
await new Promise((resolve) => setTimeout(resolve, pollInterval))
waited += pollInterval
try {
// Stop current stream if streaming and wait for status to become ready.
// The server-side save block preserves sessionId on abort, so the next
// message can resume the session with full conversation context.
if (isStreamingRef.current) {
await handleStop()
await waitForStreamingReady(subChatId)
}
}

// Build message parts from queued item
const parts: any[] = [
...(item.images || []).map((img) => ({
type: "data-image" as const,
data: {
url: img.url,
mediaType: img.mediaType,
filename: img.filename,
base64Data: img.base64Data,
},
})),
...(item.files || []).map((f) => ({
type: "data-file" as const,
data: {
url: f.url,
mediaType: f.mediaType,
filename: f.filename,
size: f.size,
},
})),
]

// Add text contexts as mention tokens
let mentionPrefix = ""
if (item.textContexts && item.textContexts.length > 0) {
const quoteMentions = item.textContexts.map((tc) => {
const preview = tc.text.slice(0, 50).replace(/[:\[\]]/g, "") // Create and sanitize preview
const encodedText = utf8ToBase64(tc.text) // Base64 encode full text
return `@[${MENTION_PREFIXES.QUOTE}${preview}:${encodedText}]`
})
mentionPrefix = quoteMentions.join(" ") + " "
}
// Build message parts from queued item
const parts: any[] = [
...(item.images || []).map((img) => ({
type: "data-image" as const,
data: {
url: img.url,
mediaType: img.mediaType,
filename: img.filename,
base64Data: img.base64Data,
},
})),
...(item.files || []).map((f) => ({
type: "data-file" as const,
data: {
url: f.url,
mediaType: f.mediaType,
filename: f.filename,
size: f.size,
},
})),
]

// Add text contexts as mention tokens
let mentionPrefix = ""
if (item.textContexts && item.textContexts.length > 0) {
const quoteMentions = item.textContexts.map((tc) => {
const preview = tc.text.slice(0, 50).replace(/[:\[\]]/g, "") // Create and sanitize preview
const encodedText = utf8ToBase64(tc.text) // Base64 encode full text
return `@[${MENTION_PREFIXES.QUOTE}${preview}:${encodedText}]`
})
mentionPrefix = quoteMentions.join(" ") + " "
}

// Add diff text contexts as mention tokens
if (item.diffTextContexts && item.diffTextContexts.length > 0) {
const diffMentions = item.diffTextContexts.map((dtc) => {
const preview = dtc.text.slice(0, 50).replace(/[:\[\]]/g, "") // Create and sanitize preview
const encodedText = utf8ToBase64(dtc.text) // Base64 encode full text
const lineNum = dtc.lineNumber || 0
return `@[${MENTION_PREFIXES.DIFF}${dtc.filePath}:${lineNum}:${preview}:${encodedText}]`
})
mentionPrefix += diffMentions.join(" ") + " "
}
// Add diff text contexts as mention tokens
if (item.diffTextContexts && item.diffTextContexts.length > 0) {
const diffMentions = item.diffTextContexts.map((dtc) => {
const preview = dtc.text.slice(0, 50).replace(/[:\[\]]/g, "") // Create and sanitize preview
const encodedText = utf8ToBase64(dtc.text) // Base64 encode full text
const lineNum = dtc.lineNumber || 0
return `@[${MENTION_PREFIXES.DIFF}${dtc.filePath}:${lineNum}:${preview}:${encodedText}]`
})
mentionPrefix += diffMentions.join(" ") + " "
}

if (item.message || mentionPrefix) {
parts.push({ type: "text", text: mentionPrefix + (item.message || "") })
}
if (item.message || mentionPrefix) {
parts.push({ type: "text", text: mentionPrefix + (item.message || "") })
}

// Track message sent
trackMessageSent({
workspaceId: subChatId,
messageLength: item.message.length,
mode: subChatModeRef.current,
})
// Track message sent
trackMessageSent({
workspaceId: subChatId,
messageLength: item.message.length,
mode: subChatModeRef.current,
})

// Update timestamps
useAgentSubChatStore.getState().updateSubChatTimestamp(subChatId)
// Update timestamps
useAgentSubChatStore.getState().updateSubChatTimestamp(subChatId)

// Enable auto-scroll and immediately scroll to bottom
shouldAutoScrollRef.current = true
scrollToBottom()
// Enable auto-scroll and immediately scroll to bottom
shouldAutoScrollRef.current = true
scrollToBottom()

await sendMessageRef.current({ role: "user", parts })
await sendMessageRef.current({ role: "user", parts })
} catch (error) {
console.error("[handleSendFromQueue] Error sending queued message:", error)
// Requeue the item at the front so it isn't lost
useMessageQueueStore.getState().prependItem(subChatId, item)
}
}, [subChatId, popItemFromQueue, handleStop])

const handleRemoveFromQueue = useCallback((itemId: string) => {
Expand All @@ -3603,17 +3627,12 @@ const ChatViewInner = memo(function ChatViewInner({

if (!hasText && !hasImages) return

// Stop current stream if streaming and wait for status to become ready
// Stop current stream if streaming and wait for status to become ready.
// The server-side save block sets sessionId=null on abort, so the next
// message starts fresh without needing an explicit cancel mutation.
if (isStreamingRef.current) {
await handleStop()
// Wait for status to become "ready" (max 2 seconds)
const maxWait = 2000
const pollInterval = 50
let waited = 0
while (isStreamingRef.current && waited < maxWait) {
await new Promise((resolve) => setTimeout(resolve, pollInterval))
waited += pollInterval
}
await waitForStreamingReady(subChatId)
}

// Auto-restore archived workspace when sending a message
Expand Down Expand Up @@ -3704,7 +3723,13 @@ const ChatViewInner = memo(function ChatViewInner({
shouldAutoScrollRef.current = true
scrollToBottom()

await sendMessageRef.current({ role: "user", parts })
try {
await sendMessageRef.current({ role: "user", parts })
} catch (error) {
console.error("[handleForceSend] Error sending message:", error)
// Restore editor content so the user can retry
editorRef.current?.setValue(finalText)
}
}, [
sandboxSetupStatus,
isArchived,
Expand Down
7 changes: 5 additions & 2 deletions src/renderer/features/agents/main/chat-input-area.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,9 @@ function arePropsEqual(prevProps: ChatInputAreaProps, nextProps: ChatInputAreaPr
prevProps.repository !== nextProps.repository ||
prevProps.sandboxId !== nextProps.sandboxId ||
prevProps.projectPath !== nextProps.projectPath ||
prevProps.isMobile !== nextProps.isMobile
prevProps.isMobile !== nextProps.isMobile ||
prevProps.queueLength !== nextProps.queueLength ||
prevProps.firstQueueItemId !== nextProps.firstQueueItemId
) {
return false
}
Expand Down Expand Up @@ -209,7 +211,8 @@ function arePropsEqual(prevProps: ChatInputAreaProps, nextProps: ChatInputAreaPr
prevProps.onRemovePastedText !== nextProps.onRemovePastedText ||
prevProps.onCacheFileContent !== nextProps.onCacheFileContent ||
prevProps.onInputContentChange !== nextProps.onInputContentChange ||
prevProps.onSubmitWithQuestionAnswer !== nextProps.onSubmitWithQuestionAnswer
prevProps.onSubmitWithQuestionAnswer !== nextProps.onSubmitWithQuestionAnswer ||
prevProps.onSendFromQueue !== nextProps.onSendFromQueue
) {
return false
}
Expand Down