Skip to content

Commit 5e17841

Browse files
isaacbowenclaude
andcommitted
Add streaming-into-view display pacing
Text chunks are queued and released at 30-80ms intervals for natural reading pace, matching the spirit of the lightward.com JS client's MessageStreamController (adapted timing for iOS's smaller chunks). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 82850cb commit 5e17841

2 files changed

Lines changed: 82 additions & 18 deletions

File tree

Lightward/Chat/ChatView.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ struct ChatView: View {
4747
.frame(maxWidth: 500, alignment: .leading)
4848
}
4949
.scrollDismissesKeyboard(.interactively)
50-
.onChange(of: vm.streamingText) {
50+
.onChange(of: vm.displayedText) {
5151
withAnimation(.spring(duration: 0.3)) {
5252
proxy.scrollTo("bottom", anchor: .bottom)
5353
}

Lightward/Chat/ChatViewModel.swift

Lines changed: 81 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,19 @@ final class ChatViewModel {
88
var messages: [ChatMessage] = []
99
var inputText = ""
1010
var streaming = false
11-
var streamingText = ""
11+
var displayedText = ""
1212
var error: String?
1313

1414
private let store: Store
1515
private let phoropterTrail: [String]
1616
private var hasInitiated = false
1717
private var currentTask: Task<Void, Never>?
1818

19+
// Streaming display — queue chunks, release at human reading pace
20+
private var chunkQueue: [String] = []
21+
private var fullText = ""
22+
private var displayTask: Task<Void, Never>?
23+
1924
init(store: Store, phoropterTrail: [String]) {
2025
self.store = store
2126
self.phoropterTrail = phoropterTrail
@@ -26,7 +31,6 @@ final class ChatViewModel {
2631
}
2732

2833
/// Initiates the conversation with the phoropter trajectory as context.
29-
/// Called from ChatView.onAppear so it fires when the view is actually visible.
3034
func initiateIfNeeded() {
3135
guard !hasInitiated else { return }
3236
hasInitiated = true
@@ -62,8 +66,11 @@ final class ChatViewModel {
6266

6367
private func streamResponse(chatLog: [[String: Any]]) {
6468
currentTask?.cancel()
69+
displayTask?.cancel()
6570
streaming = true
66-
streamingText = ""
71+
fullText = ""
72+
displayedText = ""
73+
chunkQueue = []
6774
error = nil
6875

6976
currentTask = Task {
@@ -75,13 +82,8 @@ final class ChatViewModel {
7582
switch event {
7683
case .text(let chunk):
7784
chunkCount += 1
78-
streamingText += chunk
79-
// Update or create the assistant message
80-
if let last = messages.indices.last, messages[last].role == .assistant {
81-
messages[last].text = streamingText
82-
} else {
83-
messages.append(ChatMessage(role: .assistant, text: streamingText))
84-
}
85+
fullText += chunk
86+
enqueueChunk(chunk)
8587

8688
case .started:
8789
Log.chat.debug("Stream: started")
@@ -91,22 +93,21 @@ final class ChatViewModel {
9193
}
9294
}
9395

94-
streaming = false
95-
Log.chat.info("Stream: complete, \(chunkCount) chunks, final length: \(self.streamingText.count)")
96+
Log.chat.info("Stream: complete, \(chunkCount) chunks, length: \(self.fullText.count)")
9697

9798
if chunkCount == 0 {
98-
Log.chat.error("Stream: completed with zero chunks — no content received")
99+
Log.chat.error("Stream: zero chunks received")
100+
streaming = false
99101
self.error = "No response received"
100102
ErrorReporter.report(category: "chat", message: "Stream completed with zero text chunks")
101-
} else if let last = messages.last, !last.text.isEmpty {
102-
store.appendMessage(last)
103103
}
104+
// streaming = false is set when the display queue drains
104105
} catch {
105106
Log.chat.error("Stream error: \(error, privacy: .public)")
107+
flushDisplay()
106108
streaming = false
107109
if !Task.isCancelled {
108-
// Remove empty assistant message on error
109-
if let last = messages.indices.last, messages[last].text.isEmpty {
110+
if messages.last?.text.isEmpty == true {
110111
messages.removeLast()
111112
}
112113
self.error = error.localizedDescription
@@ -115,4 +116,67 @@ final class ChatViewModel {
115116
}
116117
}
117118
}
119+
120+
// MARK: - Throttled display (matches JS client's MessageStreamController)
121+
122+
private func enqueueChunk(_ chunk: String) {
123+
chunkQueue.append(chunk)
124+
if displayTask == nil {
125+
startDisplayLoop()
126+
}
127+
}
128+
129+
private func startDisplayLoop() {
130+
displayTask = Task {
131+
while !Task.isCancelled {
132+
guard !chunkQueue.isEmpty else {
133+
// Queue empty — check if stream is done
134+
if currentTask == nil || fullText == displayedText {
135+
finishDisplay()
136+
return
137+
}
138+
// Wait a tick for more chunks
139+
try? await Task.sleep(for: .milliseconds(50))
140+
continue
141+
}
142+
143+
let chunk = chunkQueue.removeFirst()
144+
displayedText += chunk
145+
146+
// Update or create the assistant message
147+
if let last = messages.indices.last, messages[last].role == .assistant {
148+
messages[last].text = displayedText
149+
} else {
150+
messages.append(ChatMessage(role: .assistant, text: displayedText))
151+
}
152+
153+
// Random delay between 30-80ms per chunk for natural reading pace
154+
// (shorter than the JS client's 200-400ms because iOS chunks are smaller)
155+
let delay = Int.random(in: 30...80)
156+
try? await Task.sleep(for: .milliseconds(delay))
157+
}
158+
}
159+
}
160+
161+
private func flushDisplay() {
162+
displayTask?.cancel()
163+
displayTask = nil
164+
displayedText = fullText
165+
if !displayedText.isEmpty {
166+
if let last = messages.indices.last, messages[last].role == .assistant {
167+
messages[last].text = displayedText
168+
} else {
169+
messages.append(ChatMessage(role: .assistant, text: displayedText))
170+
}
171+
}
172+
}
173+
174+
private func finishDisplay() {
175+
displayTask = nil
176+
streaming = false
177+
// Save completed message
178+
if let last = messages.last, !last.text.isEmpty {
179+
store.appendMessage(last)
180+
}
181+
}
118182
}

0 commit comments

Comments
 (0)