Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ function createWorkspaceSidebarState(
awaitingUserQuestion: false,
lastAbortReason: null,
currentModel: null,
pendingStreamModel: null,
recencyTimestamp: null,
loadedSkills: [],
skillLoadErrors: [],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { cleanup, render } from "@testing-library/react";
import { installDom } from "../../../../tests/ui/dom";
import * as WorkspaceStoreModule from "@/browser/stores/WorkspaceStore";

import { formatModelDisplayName } from "@/common/utils/ai/modelDisplay";
import { getModelName } from "@/common/utils/ai/models";
import { WorkspaceStatusIndicator } from "./WorkspaceStatusIndicator";

function mockSidebarState(
Expand All @@ -16,6 +18,7 @@ function mockSidebarState(
awaitingUserQuestion: false,
lastAbortReason: null,
currentModel: null,
pendingStreamModel: null,
recencyTimestamp: null,
loadedSkills: [],
skillLoadErrors: [],
Expand Down Expand Up @@ -68,4 +71,79 @@ describe("WorkspaceStatusIndicator", () => {
expect(icon).toBeTruthy();
expect(icon?.getAttribute("class") ?? "").toContain("animate-spin");
});

test("keeps the steady streaming layout free of the transient handoff slot", () => {
mockSidebarState({
canInterrupt: true,
currentModel: "openai:gpt-4o-mini",
});

const view = render(
<WorkspaceStatusIndicator
workspaceId="workspace-live-stream"
fallbackModel="anthropic:claude-sonnet-4-5"
/>
);

expect(view.container.querySelector("[data-phase-slot]")).toBeNull();
expect(view.container.textContent?.toLowerCase()).toContain("streaming");
});

test("keeps the model label anchored when starting hands off to streaming", () => {
const pendingModel = "openai:gpt-4o-mini";
const fallbackModel = "anthropic:claude-sonnet-4-5";
const pendingDisplayName = formatModelDisplayName(getModelName(pendingModel));
const fallbackDisplayName = formatModelDisplayName(getModelName(fallbackModel));
const state: WorkspaceStoreModule.WorkspaceSidebarState = {
canInterrupt: false,
isStarting: true,
awaitingUserQuestion: false,
lastAbortReason: null,
currentModel: null,
pendingStreamModel: pendingModel,
recencyTimestamp: null,
loadedSkills: [],
skillLoadErrors: [],
agentStatus: undefined,
terminalActiveCount: 0,
terminalSessionCount: 0,
};
spyOn(WorkspaceStoreModule, "useWorkspaceSidebarState").mockImplementation(() => state);

const view = render(
<WorkspaceStatusIndicator
workspaceId="workspace-phase-shift-starting"
fallbackModel={fallbackModel}
/>
);

const getPhaseSlot = () => view.container.querySelector("[data-phase-slot]");
const getPhaseIcon = () => getPhaseSlot()?.querySelector("svg");
const getModelDisplay = () => view.container.querySelector("[data-model-display]");

expect(getPhaseSlot()?.getAttribute("class") ?? "").toContain("w-3");
expect(getPhaseSlot()?.getAttribute("class") ?? "").toContain("mr-1.5");
expect(getPhaseIcon()?.getAttribute("class") ?? "").toContain("animate-spin");
expect(getModelDisplay()?.textContent ?? "").toContain(pendingDisplayName);
expect(getModelDisplay()?.textContent ?? "").not.toContain(fallbackDisplayName);
expect(view.container.textContent?.toLowerCase()).toContain("starting");

state.isStarting = false;
state.canInterrupt = true;
state.currentModel = pendingModel;
state.pendingStreamModel = null;
view.rerender(
<WorkspaceStatusIndicator
workspaceId="workspace-phase-shift-streaming"
fallbackModel={fallbackModel}
/>
);

expect(getPhaseSlot()?.getAttribute("class") ?? "").toContain("w-0");
expect(getPhaseSlot()?.getAttribute("class") ?? "").toContain("mr-0");
expect(getPhaseIcon()?.getAttribute("class") ?? "").not.toContain("animate-spin");
expect(getModelDisplay()?.textContent ?? "").toContain(pendingDisplayName);
expect(getModelDisplay()?.textContent ?? "").not.toContain(fallbackDisplayName);
expect(view.container.textContent?.toLowerCase()).toContain("streaming");
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { useWorkspaceSidebarState } from "@/browser/stores/WorkspaceStore";
import { ModelDisplay } from "@/browser/features/Messages/ModelDisplay";
import { EmojiIcon } from "@/browser/components/icons/EmojiIcon/EmojiIcon";
import { CircleHelp, ExternalLinkIcon, Loader2 } from "lucide-react";
import { memo } from "react";
import { memo, useEffect, useRef, useState } from "react";
import { Tooltip, TooltipTrigger, TooltipContent } from "../Tooltip/Tooltip";

export const WorkspaceStatusIndicator = memo<{
Expand All @@ -12,8 +12,40 @@ export const WorkspaceStatusIndicator = memo<{
* a prop so this component doesn't need to subscribe to the full WorkspaceContext. */
isCreating?: boolean;
}>(({ workspaceId, fallbackModel, isCreating }) => {
const { canInterrupt, isStarting, awaitingUserQuestion, currentModel, agentStatus } =
useWorkspaceSidebarState(workspaceId);
const {
canInterrupt,
isStarting,
awaitingUserQuestion,
currentModel,
pendingStreamModel,
agentStatus,
} = useWorkspaceSidebarState(workspaceId);

const phase: "starting" | "streaming" | null = canInterrupt
? "streaming"
: isStarting || isCreating
? "starting"
: null;

const previousPhaseRef = useRef<typeof phase>(phase);
const [isCollapsingPhaseSlot, setIsCollapsingPhaseSlot] = useState(false);
const shouldCollapsePhaseSlot =
isCollapsingPhaseSlot || (previousPhaseRef.current === "starting" && phase === "streaming");

useEffect(() => {
const previousPhase = previousPhaseRef.current;
previousPhaseRef.current = phase;

if (previousPhase === "starting" && phase === "streaming") {
setIsCollapsingPhaseSlot(true);
const timeoutId = window.setTimeout(() => {
setIsCollapsingPhaseSlot(false);
}, 150);
return () => window.clearTimeout(timeoutId);
}

setIsCollapsingPhaseSlot(false);
}, [phase]);

// Show prompt when ask_user_question is pending - make it prominent
if (awaitingUserQuestion) {
Expand Down Expand Up @@ -67,31 +99,61 @@ export const WorkspaceStatusIndicator = memo<{
);
}

const phase: "starting" | "streaming" | null = canInterrupt
? "streaming"
: isStarting || isCreating
? "starting"
: null;

if (!phase) {
return null;
}

const modelToShow = canInterrupt ? (currentModel ?? fallbackModel) : fallbackModel;
const modelToShow =
phase === "starting"
? (pendingStreamModel ?? fallbackModel)
: (currentModel ?? pendingStreamModel ?? fallbackModel);
const suffix = phase === "starting" ? "- starting..." : "- streaming...";

if (phase === "streaming" && !shouldCollapsePhaseSlot) {
return (
<div className="text-muted flex min-w-0 items-center gap-1.5 text-xs">
{modelToShow ? (
<>
<span className="min-w-0 truncate">
<ModelDisplay modelString={modelToShow} showTooltip={false} />
</span>
<span className="shrink-0 opacity-70">{suffix}</span>
</>
) : (
<span className="min-w-0 truncate">Assistant - streaming...</span>
)}
</div>
);
}

return (
<div className="text-muted flex min-w-0 items-center gap-1.5 text-xs">
{phase === "starting" && (
<Loader2 aria-hidden="true" className="h-3 w-3 shrink-0 animate-spin opacity-70" />
<div className="text-muted flex min-w-0 items-center text-xs">
{/* Keep the old steady-state layout, but hold the spinner slot just long enough to
animate the start -> stream handoff instead of flashing the label left. */}
{(phase === "starting" || shouldCollapsePhaseSlot) && (
<span
className={
phase === "starting"
? "mr-1.5 inline-flex w-3 shrink-0 overflow-hidden opacity-100"
: "mr-0 inline-flex w-0 shrink-0 overflow-hidden opacity-0 transition-[margin,width,opacity] duration-150 ease-out"
}
data-phase-slot
>
<Loader2
aria-hidden="true"
className={
phase === "starting" ? "h-3 w-3 shrink-0 animate-spin opacity-70" : "h-3 w-3 shrink-0"
}
/>
</span>
)}
{modelToShow ? (
<>
<div className="flex min-w-0 items-center gap-1.5">
<span className="min-w-0 truncate">
<ModelDisplay modelString={modelToShow} showTooltip={false} />
</span>
<span className="shrink-0 opacity-70">{suffix}</span>
</>
</div>
) : (
<span className="min-w-0 truncate">
{phase === "starting" ? "Assistant - starting..." : "Assistant - streaming..."}
Expand Down
50 changes: 48 additions & 2 deletions src/browser/stores/WorkspaceStore.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -199,14 +199,26 @@ function createUserMessageEvent(
id: string,
text: string,
historySequence: number,
timestamp: number
timestamp: number,
requestedModel?: string
): WorkspaceChatMessage {
return {
type: "message",
id,
role: "user",
parts: [{ type: "text", text }],
metadata: { historySequence, timestamp },
metadata: {
historySequence,
timestamp,
...(requestedModel
? {
muxMetadata: {
type: "normal",
requestedModel,
},
}
: {}),
},
};
}

Expand Down Expand Up @@ -1585,6 +1597,40 @@ describe("WorkspaceStore", () => {
});
expect(sawStarting).toBe(true);
});

it("exposes the pending requested model in sidebar state during startup", async () => {
const workspaceId = "stream-starting-pending-model-workspace";
const requestedModel = "openai:gpt-4o-mini";

mockOnChat.mockImplementation(async function* (
input?: { workspaceId: string; mode?: unknown },
options?: { signal?: AbortSignal }
): AsyncGenerator<WorkspaceChatMessage, void, unknown> {
if (input?.workspaceId !== workspaceId) {
await waitForAbortSignal(options?.signal);
return;
}

yield { type: "caught-up" };
await Promise.resolve();
yield createUserMessageEvent("pending-model-message", "hello", 1, 2_500, requestedModel);
await waitForAbortSignal(options?.signal);
});

createAndAddWorkspace(store, workspaceId);

const sawPendingModel = await waitUntil(() => {
const state = store.getWorkspaceState(workspaceId);
const sidebarState = store.getWorkspaceSidebarState(workspaceId);
return (
state.isStreamStarting === true &&
state.pendingStreamModel === requestedModel &&
sidebarState.isStarting === true &&
sidebarState.pendingStreamModel === requestedModel
);
});
expect(sawPendingModel).toBe(true);
});
});

describe("history pagination", () => {
Expand Down
5 changes: 5 additions & 0 deletions src/browser/stores/WorkspaceStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,9 @@ export interface WorkspaceSidebarState {
awaitingUserQuestion: boolean;
lastAbortReason: StreamAbortReasonSnapshot | null;
currentModel: string | null;
// Requested model for the pending send so the sidebar keeps the same label while
// the turn transitions from pre-stream "starting" into the live stream.
pendingStreamModel: string | null;
recencyTimestamp: number | null;
loadedSkills: LoadedSkill[];
skillLoadErrors: SkillLoadError[];
Expand Down Expand Up @@ -1651,6 +1654,7 @@ export class WorkspaceStore {
cached.awaitingUserQuestion === fullState.awaitingUserQuestion &&
cached.lastAbortReason === fullState.lastAbortReason &&
cached.currentModel === fullState.currentModel &&
cached.pendingStreamModel === fullState.pendingStreamModel &&
cached.recencyTimestamp === fullState.recencyTimestamp &&
cached.loadedSkills === fullState.loadedSkills &&
cached.skillLoadErrors === fullState.skillLoadErrors &&
Expand All @@ -1671,6 +1675,7 @@ export class WorkspaceStore {
awaitingUserQuestion: fullState.awaitingUserQuestion,
lastAbortReason: fullState.lastAbortReason,
currentModel: fullState.currentModel,
pendingStreamModel: fullState.pendingStreamModel,
recencyTimestamp: fullState.recencyTimestamp,
loadedSkills: fullState.loadedSkills,
skillLoadErrors: fullState.skillLoadErrors,
Expand Down
Loading