Skip to content

Commit 8100ef1

Browse files
committed
🤖 Replace time-based hack with explicit pendingStreamStart tracking
The original fix used time-based hacks (checking message age, forced re-renders with timers) to prevent retry barrier flash. This was janky and semantically incorrect. New approach: - Track pendingStreamStart flag in StreamingMessageAggregator - Set true when user message is received - Set false when stream-start arrives - 30s timeout if stream never starts (safety net) - hasInterruptedStream() checks this flag instead of message age Benefits: - Semantically correct: explicitly tracks "waiting for stream-start" - No frontend timer hacks - state updates drive UI naturally - Cleaner: removed ~24 LoC of hacky code, added ~60 LoC of proper state - The 30s timeout is meaningful (truly stuck) vs arbitrary ("hide for 2s") Changes: - StreamingMessageAggregator: +25 LoC (flag, timer, handlers) - WorkspaceStore: +3 LoC (expose via state) - retryEligibility: -14 LoC (removed time checks, added flag param) - AIView: -27 LoC (removed timer hack) - Tests: Updated to use flag instead of timestamps Net: +40 LoC but much cleaner architecture.
1 parent 44f2d45 commit 8100ef1

File tree

6 files changed

+62
-57
lines changed

6 files changed

+62
-57
lines changed

src/components/AIView.tsx

Lines changed: 5 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,6 @@ const AIViewInner: React.FC<AIViewProps> = ({
5252
const [activeTab, setActiveTab] = useState<TabType>("costs");
5353
const isReviewTabActive = activeTab === "review";
5454

55-
// Force re-check state for retry barrier (incremented to trigger re-evaluation)
56-
const [, setForceRecheck] = useState(0);
57-
5855
// Resizable sidebar for Review tab only
5956
// Hook encapsulates all drag logic, persistence, and constraints
6057
// Returns width to apply to RightSidebar and startResize for handle's onMouseDown
@@ -214,7 +211,8 @@ const AIViewInner: React.FC<AIViewProps> = ({
214211
currentModel: workspaceState?.currentModel ?? null,
215212
canInterrupt: workspaceState?.canInterrupt ?? false,
216213
showRetryBarrier: workspaceState
217-
? !workspaceState.canInterrupt && hasInterruptedStream(workspaceState.messages)
214+
? !workspaceState.canInterrupt &&
215+
hasInterruptedStream(workspaceState.messages, workspaceState.pendingStreamStart)
218216
: false,
219217
currentWorkspaceThinking,
220218
setThinkingLevel,
@@ -245,30 +243,6 @@ const AIViewInner: React.FC<AIViewProps> = ({
245243
}
246244
}, [workspaceState, editingMessage]);
247245

248-
// Force re-evaluation after 2s when last message is a recent user message
249-
// This ensures retry barrier appears even if stream-start never arrives
250-
// Must be before early return to satisfy React Hooks rules
251-
useEffect(() => {
252-
if (!workspaceState) return;
253-
const { messages, canInterrupt } = workspaceState;
254-
255-
if (messages.length === 0) return;
256-
const lastMessage = messages[messages.length - 1];
257-
258-
if (lastMessage.type === "user" && !canInterrupt) {
259-
const messageAge = Date.now() - (lastMessage.timestamp ?? 0);
260-
const timeUntilCheck = Math.max(0, 2100 - messageAge); // 2.1s to ensure we're past threshold
261-
262-
if (timeUntilCheck > 0) {
263-
const timer = setTimeout(() => {
264-
// Force re-render to re-evaluate showRetryBarrier
265-
setForceRecheck((prev) => prev + 1);
266-
}, timeUntilCheck);
267-
return () => clearTimeout(timer);
268-
}
269-
}
270-
}, [workspaceState]);
271-
272246
// Return early if workspace state not loaded yet
273247
if (!workspaceState) {
274248
return (
@@ -287,14 +261,15 @@ const AIViewInner: React.FC<AIViewProps> = ({
287261
}
288262

289263
// Extract state from workspace state
290-
const { messages, canInterrupt, isCompacting, loading, currentModel } = workspaceState;
264+
const { messages, canInterrupt, isCompacting, loading, currentModel, pendingStreamStart } =
265+
workspaceState;
291266

292267
// Get active stream message ID for token counting
293268
const activeStreamMessageId = aggregator.getActiveStreamMessageId();
294269

295270
// Track if last message was interrupted or errored (for RetryBarrier)
296271
// Uses same logic as useResumeManager for DRY
297-
const showRetryBarrier = !canInterrupt && hasInterruptedStream(messages);
272+
const showRetryBarrier = !canInterrupt && hasInterruptedStream(messages, pendingStreamStart);
298273

299274
// Note: We intentionally do NOT reset autoRetry when streams start.
300275
// If user pressed Ctrl+C, autoRetry stays false until they manually retry.

src/hooks/useResumeManager.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ export function useResumeManager() {
9595
// 1. Must have interrupted stream (not currently streaming)
9696
if (state.canInterrupt) return false; // Currently streaming
9797

98-
if (!hasInterruptedStream(state.messages)) {
98+
if (!hasInterruptedStream(state.messages, state.pendingStreamStart)) {
9999
return false;
100100
}
101101

src/stores/WorkspaceStore.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ export interface WorkspaceState {
2828
currentModel: string | null;
2929
recencyTimestamp: number | null;
3030
todos: TodoItem[];
31+
pendingStreamStart: boolean;
3132
}
3233

3334
/**
@@ -345,6 +346,7 @@ export class WorkspaceStore {
345346
currentModel: aggregator.getCurrentModel() ?? null,
346347
recencyTimestamp: aggregator.getRecencyTimestamp(),
347348
todos: aggregator.getCurrentTodos(),
349+
pendingStreamStart: aggregator.isPendingStreamStart(),
348350
};
349351
});
350352
}

src/utils/messages/StreamingMessageAggregator.ts

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,11 @@ export class StreamingMessageAggregator {
6060
timestamp: number;
6161
} | null = null;
6262

63+
// Track when we're waiting for stream-start after user message
64+
// Prevents retry barrier flash during normal send flow
65+
private pendingStreamStart = false;
66+
private pendingStreamStartTimer: ReturnType<typeof setTimeout> | null = null;
67+
6368
// Workspace creation timestamp (used for recency calculation)
6469
private readonly createdAt?: string;
6570

@@ -184,6 +189,29 @@ export class StreamingMessageAggregator {
184189
return this.messages.size > 0;
185190
}
186191

192+
isPendingStreamStart(): boolean {
193+
return this.pendingStreamStart;
194+
}
195+
196+
private setPendingStreamStart(pending: boolean): void {
197+
// Clear existing timer if any
198+
if (this.pendingStreamStartTimer) {
199+
clearTimeout(this.pendingStreamStartTimer);
200+
this.pendingStreamStartTimer = null;
201+
}
202+
203+
this.pendingStreamStart = pending;
204+
205+
if (pending) {
206+
// Set 30s timeout - if stream hasn't started by then, something is wrong
207+
this.pendingStreamStartTimer = setTimeout(() => {
208+
this.pendingStreamStart = false;
209+
this.pendingStreamStartTimer = null;
210+
this.invalidateCache(); // Trigger re-render to show retry barrier
211+
}, 30000);
212+
}
213+
}
214+
187215
getActiveStreams(): StreamingContext[] {
188216
return Array.from(this.activeStreams.values());
189217
}
@@ -254,6 +282,9 @@ export class StreamingMessageAggregator {
254282

255283
// Unified event handlers that encapsulate all complex logic
256284
handleStreamStart(data: StreamStartEvent): void {
285+
// Clear pending stream start flag - stream has started
286+
this.setPendingStreamStart(false);
287+
257288
// Detect if this stream is compacting by checking if last user message is a compaction-request
258289
const messages = this.getAllMessages();
259290
const lastUserMsg = [...messages].reverse().find((m) => m.role === "user");
@@ -559,6 +590,11 @@ export class StreamingMessageAggregator {
559590

560591
// Now add the new message
561592
this.addMessage(incomingMessage);
593+
594+
// If this is a user message, set pendingStreamStart flag and start timeout
595+
if (incomingMessage.role === "user") {
596+
this.setPendingStreamStart(true);
597+
}
562598
}
563599
}
564600

src/utils/messages/retryEligibility.test.ts

Lines changed: 8 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -128,8 +128,7 @@ describe("hasInterruptedStream", () => {
128128
expect(hasInterruptedStream(messages)).toBe(false);
129129
});
130130

131-
it("returns true when last message is user message sent >2s ago (app restarted during slow model)", () => {
132-
const oldTimestamp = Date.now() - 5000; // 5 seconds ago
131+
it("returns true when last message is user message (app restarted during slow model)", () => {
133132
const messages: DisplayedMessage[] = [
134133
{
135134
type: "user",
@@ -156,14 +155,12 @@ describe("hasInterruptedStream", () => {
156155
historyId: "user-2",
157156
content: "Another question",
158157
historySequence: 3,
159-
timestamp: oldTimestamp,
160158
},
161159
];
162-
expect(hasInterruptedStream(messages)).toBe(true);
160+
expect(hasInterruptedStream(messages, false)).toBe(true);
163161
});
164162

165-
it("returns false when last message is very recent user message (<2s)", () => {
166-
const recentTimestamp = Date.now() - 500; // 500ms ago
163+
it("returns false when pendingStreamStart is true", () => {
167164
const messages: DisplayedMessage[] = [
168165
{
169166
type: "user",
@@ -190,39 +187,34 @@ describe("hasInterruptedStream", () => {
190187
historyId: "user-2",
191188
content: "Another question",
192189
historySequence: 3,
193-
timestamp: recentTimestamp,
194190
},
195191
];
196-
expect(hasInterruptedStream(messages)).toBe(false);
192+
expect(hasInterruptedStream(messages, true)).toBe(false);
197193
});
198194

199-
it("returns true when user message has no response sent >2s ago (slow model scenario)", () => {
200-
const oldTimestamp = Date.now() - 3000; // 3 seconds ago
195+
it("returns true when user message has no response (slow model scenario)", () => {
201196
const messages: DisplayedMessage[] = [
202197
{
203198
type: "user",
204199
id: "user-1",
205200
historyId: "user-1",
206201
content: "Hello",
207202
historySequence: 1,
208-
timestamp: oldTimestamp,
209203
},
210204
];
211-
expect(hasInterruptedStream(messages)).toBe(true);
205+
expect(hasInterruptedStream(messages, false)).toBe(true);
212206
});
213207

214-
it("returns false when user message was just sent (<2s ago)", () => {
215-
const recentTimestamp = Date.now() - 1000; // 1 second ago
208+
it("returns false when user message just sent and pendingStreamStart is true", () => {
216209
const messages: DisplayedMessage[] = [
217210
{
218211
type: "user",
219212
id: "user-1",
220213
historyId: "user-1",
221214
content: "Hello",
222215
historySequence: 1,
223-
timestamp: recentTimestamp,
224216
},
225217
];
226-
expect(hasInterruptedStream(messages)).toBe(false);
218+
expect(hasInterruptedStream(messages, true)).toBe(false);
227219
});
228220
});

src/utils/messages/retryEligibility.ts

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,23 +15,23 @@ import type { DisplayedMessage } from "@/types/message";
1515
* 3. Last message is a user message (indicating we sent it but never got a response)
1616
* - This handles app restarts during slow model responses (models can take 30-60s to first token)
1717
* - User messages are only at the end when response hasn't started/completed
18-
* - EXCEPT: Ignore very recent user messages (< 2s) to prevent flash during normal send flow
18+
* - EXCEPT: Not if pendingStreamStart is true (waiting for stream-start event)
1919
*/
20-
export function hasInterruptedStream(messages: DisplayedMessage[]): boolean {
20+
export function hasInterruptedStream(
21+
messages: DisplayedMessage[],
22+
pendingStreamStart = false
23+
): boolean {
2124
if (messages.length === 0) return false;
2225

23-
const lastMessage = messages[messages.length - 1];
26+
// Don't show retry barrier if we're waiting for stream-start
27+
// This prevents flash during normal send flow
28+
if (pendingStreamStart) return false;
2429

25-
// For user messages, check if enough time has passed to consider it truly interrupted
26-
// This prevents the retry barrier from flashing briefly when sending a message
27-
if (lastMessage.type === "user") {
28-
const messageAge = Date.now() - (lastMessage.timestamp ?? 0);
29-
const MIN_AGE_FOR_INTERRUPT = 2000; // 2 seconds
30-
return messageAge >= MIN_AGE_FOR_INTERRUPT;
31-
}
30+
const lastMessage = messages[messages.length - 1];
3231

3332
return (
3433
lastMessage.type === "stream-error" || // Stream errored out
34+
lastMessage.type === "user" || // No response received yet (app restart during slow model)
3535
(lastMessage.type === "assistant" && lastMessage.isPartial === true) ||
3636
(lastMessage.type === "tool" && lastMessage.isPartial === true) ||
3737
(lastMessage.type === "reasoning" && lastMessage.isPartial === true)

0 commit comments

Comments
 (0)