diff --git a/.gitignore b/.gitignore index bde1cf453..06424dbb3 100644 --- a/.gitignore +++ b/.gitignore @@ -4,5 +4,6 @@ .claude/ node_modules dist +test-servers/build .env /.playwright-mcp/ diff --git a/AGENTS.md b/AGENTS.md index 5ee9d040a..b09cc371f 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -26,8 +26,15 @@ inspector/ │ ├── react/ # React hooks over the state stores │ └── storage/ # File and remote storage adapters (Zustand middleware) ├── test-servers/ # Composable MCP test servers + fixtures used by integration tests. -│ # Aliased as `@modelcontextprotocol/inspector-test-server` -│ # in clients/web/vite.config.ts and tsconfig.test.json. +│ ├── src/ # TypeScript sources. +│ ├── build/ # Built JS (gitignored). Produced by `npm run test-servers:build` +│ │ # so integration tests can spawn the stdio server as a real +│ │ # subprocess via `node test-servers/build/test-server-stdio.js`. +│ └── tsconfig.json # tsc build config (NodeNext, outDir ./build). +│ # The Vite alias `@modelcontextprotocol/inspector-test-server` +│ # in clients/web/vite.config.ts points at build/index.js +│ # (not src/) so `getTestMcpServerPath()` returns a `.js` path. +│ # tsconfig.test.json keeps paths pointing at src for typecheck. ├── specification/ # Build specification ... ``` @@ -82,7 +89,8 @@ All work should be driven by items on the project board. - In unit tests that expect error output, suppress it from the console - Run unit tests with `npm run test` (or `npm run test:watch` during development) from `clients/web/` - Run `npm run test:coverage` to verify the per-file gate: lines ≥ 90, statements ≥ 85, functions ≥ 80, branches ≥ 50 (CI enforces this gate). Branches is intentionally relaxed because Mantine portal/media-query branches are not exercisable under happy-dom; new business-logic branches should still be covered. -- Test files live alongside the source as `.test.tsx` (or `.test.ts` for non-React modules) +- Run `npm run test:integration` (also from `clients/web/`) for the v1.5-ported InspectorClient + transport + auth integration suite. It runs under a separate `integration` vitest project in node env (no happy-dom) with 30s timeouts. The script builds `test-servers/` first via `tsc -p ../../test-servers --noCheck` so the stdio MCP test server can be spawned as a real subprocess. CI runs it as its own step after unit tests. +- Test files live alongside the source as `.test.tsx` (or `.test.ts` for non-React modules). v1.5-ported integration tests live under `clients/web/src/test/core/` and are wired into the `integration` project via the `integrationTests` list in `vite.config.ts`. - Use `renderWithMantine` from `src/test/renderWithMantine.tsx` to render components — it wraps in `MantineProvider` with the project theme ### Responding to Code Reviews diff --git a/clients/web/package.json b/clients/web/package.json index 68efa1e03..7052526c0 100644 --- a/clients/web/package.json +++ b/clients/web/package.json @@ -15,9 +15,12 @@ "build-storybook": "storybook build", "test": "vitest run --project=unit", "test:watch": "vitest --project=unit", - "test:coverage": "vitest run --project=unit --coverage", + "test:coverage": "npm run test-servers:build && vitest run --project=unit --project=integration --coverage", "test:storybook": "vitest run --project=storybook", - "test:storybook:watch": "vitest --project=storybook" + "test:storybook:watch": "vitest --project=storybook", + "test-servers:build": "tsc -p ../../test-servers --noCheck", + "test:integration": "npm run test-servers:build && vitest run --project=integration", + "test:integration:watch": "npm run test-servers:build && vitest --project=integration" }, "dependencies": { "@emotion/react": "^11.14.0", diff --git a/clients/web/src/test/core/auth/oauth-callback-server.test.ts b/clients/web/src/test/core/auth/oauth-callback-server.test.ts index 1586dd07b..0e7459faf 100644 --- a/clients/web/src/test/core/auth/oauth-callback-server.test.ts +++ b/clients/web/src/test/core/auth/oauth-callback-server.test.ts @@ -175,6 +175,50 @@ describe("OAuthCallbackServer", () => { ).rejects.toThrow(); }); + it("succeeds with 200 when no onCallback handler is configured", async () => { + server = createOAuthCallbackServer(); + const result = await server.start({ port: 0 }); + + const res = await fetch( + `http://localhost:${result.port}/oauth/callback?code=lonely`, + ); + + expect(res.status).toBe(200); + const html = await res.text(); + expect(html).toContain("OAuth complete"); + }); + + it("returns 409 when a callback arrives after handled=true but before stop completes", async () => { + let release!: () => void; + const gate = new Promise((resolve) => { + release = resolve; + }); + + server = createOAuthCallbackServer(); + const result = await server.start({ + port: 0, + // Hold onCallback open so handled=true is set but the server hasn't + // yet finished stopping — second request slips into the 409 branch. + onCallback: async () => { + await gate; + }, + }); + + const firstP = fetch( + `http://localhost:${result.port}/oauth/callback?code=first`, + ); + // Give the server time to register handled=true before second hit. + await new Promise((r) => setTimeout(r, 50)); + const secondP = fetch( + `http://localhost:${result.port}/oauth/callback?code=second`, + ); + const second = await secondP; + expect(second.status).toBe(409); + release(); + const first = await firstP; + expect(first.status).toBe(200); + }); + it("onCallback rejection returns 500 and error HTML", async () => { server = createOAuthCallbackServer(); const result = await server.start({ diff --git a/clients/web/src/test/core/auth/providers.test.ts b/clients/web/src/test/core/auth/providers.test.ts index 19adfbc4d..77734cf8e 100644 --- a/clients/web/src/test/core/auth/providers.test.ts +++ b/clients/web/src/test/core/auth/providers.test.ts @@ -3,7 +3,10 @@ import { ConsoleNavigation, CallbackNavigation, } from "@inspector/core/auth/providers.js"; -import { BrowserNavigation } from "@inspector/core/auth/browser/providers.js"; +import { + BrowserNavigation, + BrowserOAuthClientProvider, +} from "@inspector/core/auth/browser/providers.js"; describe("OAuthNavigation", () => { describe("ConsoleNavigation", () => { @@ -77,4 +80,68 @@ describe("OAuthNavigation", () => { ); }); }); + + describe("BrowserOAuthClientProvider", () => { + // Cast through unknown so we can install a minimal { location } stub + // without needing the full Window surface in tests. + type GlobalWithWindow = typeof globalThis & { + window?: unknown; + sessionStorage?: Storage; + }; + const originalWindow = (global as GlobalWithWindow).window; + const originalSessionStorage = (global as GlobalWithWindow).sessionStorage; + + class MemorySessionStorage implements Storage { + private map = new Map(); + get length() { + return this.map.size; + } + key(i: number) { + return [...this.map.keys()][i] ?? null; + } + getItem(k: string) { + return this.map.get(k) ?? null; + } + setItem(k: string, v: string) { + this.map.set(k, v); + } + removeItem(k: string) { + this.map.delete(k); + } + clear() { + this.map.clear(); + } + } + + beforeEach(() => { + // Cast through `unknown` so we can install a minimal { location } stub + // without needing the full Window surface in tests. + (global as unknown as { window?: unknown }).window = { + location: { + origin: "http://localhost:5173", + href: "http://localhost:5173", + }, + }; + (global as GlobalWithWindow).sessionStorage = new MemorySessionStorage(); + }); + + afterEach(() => { + (global as unknown as { window?: unknown }).window = originalWindow; + (global as GlobalWithWindow).sessionStorage = originalSessionStorage; + }); + + it("constructs and exposes redirectUrl derived from window.location.origin", () => { + const provider = new BrowserOAuthClientProvider( + "https://mcp.example.com", + ); + expect(provider.redirectUrl).toBe("http://localhost:5173/oauth/callback"); + }); + + it("throws if window is undefined", () => { + (global as unknown as { window?: unknown }).window = undefined; + expect( + () => new BrowserOAuthClientProvider("https://mcp.example.com"), + ).toThrow(/requires browser environment/); + }); + }); }); diff --git a/clients/web/src/test/core/auth/state-machine.test.ts b/clients/web/src/test/core/auth/state-machine.test.ts index ac2ddd29f..a13164bbc 100644 --- a/clients/web/src/test/core/auth/state-machine.test.ts +++ b/clients/web/src/test/core/auth/state-machine.test.ts @@ -370,5 +370,72 @@ describe("OAuthStateMachine", () => { }), ); }); + + it("token_request execute throws when client information cannot be obtained", async () => { + const metadata = { + issuer: "http://localhost:3000", + authorization_endpoint: "http://localhost:3000/authorize", + token_endpoint: "http://localhost:3000/token", + response_types_supported: ["code"], + }; + const providerNoClient = { + ...mockProvider, + getServerMetadata: vi.fn(() => metadata), + clientInformation: vi.fn(async () => undefined), + } as unknown as BaseOAuthClientProvider; + + const tokenState: AuthGuidedState = { + ...EMPTY_GUIDED_STATE, + oauthStep: "token_request", + oauthMetadata: metadata as OAuthMetadata, + authorizationCode: "code-without-client", + }; + + await expect( + oauthTransitions.token_request.execute({ + state: tokenState, + serverUrl: "http://localhost:3000", + provider: providerNoClient, + updateState, + }), + ).rejects.toThrow("Client information not available for token exchange"); + }); + + it("complete.canTransition always returns false (terminal state)", async () => { + const result = await oauthTransitions.complete.canTransition({ + state: { ...EMPTY_GUIDED_STATE, oauthStep: "complete" }, + serverUrl: "http://localhost:3000", + provider: mockProvider, + updateState, + }); + expect(result).toBe(false); + // execute is a no-op + await expect( + oauthTransitions.complete.execute({ + state: { ...EMPTY_GUIDED_STATE, oauthStep: "complete" }, + serverUrl: "http://localhost:3000", + provider: mockProvider, + updateState, + }), + ).resolves.toBeUndefined(); + }); + + it("executeStep throws when the current step cannot transition", async () => { + // metadata_discovery.canTransition is unconditional (returns true), but + // token_request requires authorizationCode + metadata + clientInfo; an + // empty state will refuse to transition. + const stateMachine = new OAuthStateMachine( + "http://localhost:3000", + mockProvider, + updateState, + ); + const blockedState: AuthGuidedState = { + ...EMPTY_GUIDED_STATE, + oauthStep: "token_request", + }; + await expect(stateMachine.executeStep(blockedState)).rejects.toThrow( + /Cannot transition from token_request/, + ); + }); }); }); diff --git a/clients/web/src/test/core/auth/storage-browser.test.ts b/clients/web/src/test/core/auth/storage-browser.test.ts index dbc493e95..f78f74b21 100644 --- a/clients/web/src/test/core/auth/storage-browser.test.ts +++ b/clients/web/src/test/core/auth/storage-browser.test.ts @@ -248,6 +248,69 @@ describe("BrowserOAuthStorage", () => { }); }); + describe("clearClientInformation", () => { + it("removes the dynamically-registered client info by default", async () => { + storage.saveClientInformation(testServerUrl, { client_id: "dyn" }); + expect(await storage.getClientInformation(testServerUrl)).toEqual({ + client_id: "dyn", + }); + storage.clearClientInformation(testServerUrl); + expect(await storage.getClientInformation(testServerUrl)).toBeUndefined(); + }); + + it("removes the preregistered client info when isPreregistered=true", async () => { + storage.savePreregisteredClientInformation(testServerUrl, { + client_id: "pre", + }); + expect(await storage.getClientInformation(testServerUrl, true)).toEqual({ + client_id: "pre", + }); + storage.clearClientInformation(testServerUrl, true); + expect( + await storage.getClientInformation(testServerUrl, true), + ).toBeUndefined(); + }); + }); + + describe("individual clear methods", () => { + it("clearTokens removes only tokens", async () => { + storage.saveTokens(testServerUrl, { + access_token: "t", + token_type: "Bearer", + }); + expect(await storage.getTokens(testServerUrl)).toBeDefined(); + storage.clearTokens(testServerUrl); + expect(await storage.getTokens(testServerUrl)).toBeUndefined(); + }); + + it("clearCodeVerifier removes only the PKCE verifier", async () => { + storage.saveCodeVerifier(testServerUrl, "verifier"); + expect(storage.getCodeVerifier(testServerUrl)).toBe("verifier"); + storage.clearCodeVerifier(testServerUrl); + expect(storage.getCodeVerifier(testServerUrl)).toBeUndefined(); + }); + + it("clearScope removes only the scope", async () => { + storage.saveScope(testServerUrl, "read"); + expect(storage.getScope(testServerUrl)).toBe("read"); + storage.clearScope(testServerUrl); + expect(storage.getScope(testServerUrl)).toBeUndefined(); + }); + + it("clearServerMetadata removes only the cached metadata", async () => { + const metadata: OAuthMetadata = { + issuer: "http://localhost:3000", + authorization_endpoint: "http://localhost:3000/authorize", + token_endpoint: "http://localhost:3000/token", + response_types_supported: ["code"], + }; + storage.saveServerMetadata(testServerUrl, metadata); + expect(storage.getServerMetadata(testServerUrl)).toEqual(metadata); + storage.clearServerMetadata(testServerUrl); + expect(storage.getServerMetadata(testServerUrl)).toBeNull(); + }); + }); + describe("clearServerState", () => { it("should clear all state for a server", async () => { const clientInfo: OAuthClientInformation = { diff --git a/clients/web/src/test/core/auth/storage-node.test.ts b/clients/web/src/test/core/auth/storage-node.test.ts index 860ab5bdd..1cf79d4be 100644 --- a/clients/web/src/test/core/auth/storage-node.test.ts +++ b/clients/web/src/test/core/auth/storage-node.test.ts @@ -2,6 +2,7 @@ import { describe, it, expect, beforeEach, afterEach } from "vitest"; import { NodeOAuthStorage, getOAuthStore, + clearAllOAuthClientState, } from "@inspector/core/auth/node/storage-node.js"; import type { OAuthClientInformation, @@ -289,6 +290,71 @@ describe("NodeOAuthStorage", () => { }); }); + describe("clearClientInformation", () => { + it("removes the dynamically-registered client information by default", async () => { + await storage.saveClientInformation(testServerUrl, { + client_id: "dyn", + }); + expect(await storage.getClientInformation(testServerUrl)).toEqual({ + client_id: "dyn", + }); + storage.clearClientInformation(testServerUrl); + expect(await storage.getClientInformation(testServerUrl)).toBeUndefined(); + }); + + it("removes the preregistered client information when isPreregistered=true", async () => { + await storage.savePreregisteredClientInformation(testServerUrl, { + client_id: "pre", + }); + expect(await storage.getClientInformation(testServerUrl, true)).toEqual({ + client_id: "pre", + }); + storage.clearClientInformation(testServerUrl, true); + expect( + await storage.getClientInformation(testServerUrl, true), + ).toBeUndefined(); + }); + }); + + describe("individual clear methods", () => { + it("clearTokens removes only tokens", async () => { + await storage.saveTokens(testServerUrl, { + access_token: "t", + token_type: "Bearer", + }); + expect(await storage.getTokens(testServerUrl)).toBeDefined(); + storage.clearTokens(testServerUrl); + expect(await storage.getTokens(testServerUrl)).toBeUndefined(); + }); + + it("clearCodeVerifier removes only the PKCE verifier", async () => { + await storage.saveCodeVerifier(testServerUrl, "verifier"); + expect(storage.getCodeVerifier(testServerUrl)).toBe("verifier"); + storage.clearCodeVerifier(testServerUrl); + expect(storage.getCodeVerifier(testServerUrl)).toBeUndefined(); + }); + + it("clearScope removes only the scope", async () => { + await storage.saveScope(testServerUrl, "read write"); + expect(storage.getScope(testServerUrl)).toBe("read write"); + storage.clearScope(testServerUrl); + expect(storage.getScope(testServerUrl)).toBeUndefined(); + }); + + it("clearServerMetadata removes only the cached metadata", async () => { + const metadata: OAuthMetadata = { + issuer: "http://localhost:3000", + authorization_endpoint: "http://localhost:3000/authorize", + token_endpoint: "http://localhost:3000/token", + response_types_supported: ["code"], + }; + await storage.saveServerMetadata(testServerUrl, metadata); + expect(storage.getServerMetadata(testServerUrl)).toEqual(metadata); + storage.clearServerMetadata(testServerUrl); + expect(storage.getServerMetadata(testServerUrl)).toBeNull(); + }); + }); + describe("clearServerState", () => { it("should clear all state for a server", async () => { const clientInfo: OAuthClientInformation = { @@ -481,6 +547,20 @@ describe("NodeOAuthStorage with custom storagePath", () => { } }); + it("clearAllOAuthClientState clears every server in the default store", async () => { + const defaultStore = getOAuthStore(); + defaultStore.getState().setServerState("http://server-a.test", { + tokens: { access_token: "a", token_type: "Bearer" }, + }); + defaultStore.getState().setServerState("http://server-b.test", { + tokens: { access_token: "b", token_type: "Bearer" }, + }); + clearAllOAuthClientState(); + const all = defaultStore.getState(); + expect(all.getServerState("http://server-a.test").tokens).toBeUndefined(); + expect(all.getServerState("http://server-b.test").tokens).toBeUndefined(); + }); + it("should isolate state from default store", async () => { const customPath = path.join( os.tmpdir(), diff --git a/clients/web/src/test/core/auth/storage-remote.test.ts b/clients/web/src/test/core/auth/storage-remote.test.ts new file mode 100644 index 000000000..b9075e14d --- /dev/null +++ b/clients/web/src/test/core/auth/storage-remote.test.ts @@ -0,0 +1,111 @@ +import { describe, it, expect, beforeEach, vi } from "vitest"; +import { RemoteOAuthStorage } from "@inspector/core/auth/remote/storage-remote.js"; + +const NOOP_FETCH = vi.fn( + async () => + new Response(JSON.stringify({ state: { servers: {} }, version: 0 }), { + status: 200, + }), +) as unknown as typeof fetch; + +describe("RemoteOAuthStorage (unit, mocked fetch)", () => { + let storage: RemoteOAuthStorage; + const serverUrl = "http://localhost:3000"; + + beforeEach(() => { + storage = new RemoteOAuthStorage({ + baseUrl: "http://remote.example", + storeId: `unit-${Math.random().toString(36).slice(2)}`, + fetchFn: NOOP_FETCH, + }); + }); + + it("getClientInformation returns undefined when nothing is stored", async () => { + expect(await storage.getClientInformation(serverUrl)).toBeUndefined(); + }); + + it("saveClientInformation + getClientInformation round-trip", async () => { + await storage.saveClientInformation(serverUrl, { client_id: "dyn" }); + expect(await storage.getClientInformation(serverUrl)).toEqual({ + client_id: "dyn", + }); + }); + + it("savePreregisteredClientInformation + getClientInformation(isPreregistered=true)", async () => { + await storage.savePreregisteredClientInformation(serverUrl, { + client_id: "pre", + }); + expect(await storage.getClientInformation(serverUrl, true)).toEqual({ + client_id: "pre", + }); + }); + + it("clearClientInformation default branch removes dynamic info", async () => { + await storage.saveClientInformation(serverUrl, { client_id: "dyn" }); + storage.clearClientInformation(serverUrl); + expect(await storage.getClientInformation(serverUrl)).toBeUndefined(); + }); + + it("clearClientInformation(isPreregistered=true) removes preregistered info", async () => { + await storage.savePreregisteredClientInformation(serverUrl, { + client_id: "pre", + }); + storage.clearClientInformation(serverUrl, true); + expect(await storage.getClientInformation(serverUrl, true)).toBeUndefined(); + }); + + it("tokens round-trip and clearTokens", async () => { + const tokens = { access_token: "t", token_type: "Bearer" }; + await storage.saveTokens(serverUrl, tokens); + expect(await storage.getTokens(serverUrl)).toEqual(tokens); + storage.clearTokens(serverUrl); + expect(await storage.getTokens(serverUrl)).toBeUndefined(); + }); + + it("codeVerifier round-trip and clearCodeVerifier", async () => { + await storage.saveCodeVerifier(serverUrl, "verifier"); + expect(storage.getCodeVerifier(serverUrl)).toBe("verifier"); + storage.clearCodeVerifier(serverUrl); + expect(storage.getCodeVerifier(serverUrl)).toBeUndefined(); + }); + + it("scope round-trip and clearScope", async () => { + await storage.saveScope(serverUrl, "read write"); + expect(storage.getScope(serverUrl)).toBe("read write"); + storage.clearScope(serverUrl); + expect(storage.getScope(serverUrl)).toBeUndefined(); + }); + + it("serverMetadata round-trip and clearServerMetadata", async () => { + const md = { + issuer: serverUrl, + authorization_endpoint: `${serverUrl}/authorize`, + token_endpoint: `${serverUrl}/token`, + response_types_supported: ["code"], + }; + await storage.saveServerMetadata(serverUrl, md); + expect(storage.getServerMetadata(serverUrl)).toEqual(md); + storage.clearServerMetadata(serverUrl); + expect(storage.getServerMetadata(serverUrl)).toBeNull(); + }); + + it("clear() wipes all state for a server", async () => { + await storage.saveClientInformation(serverUrl, { client_id: "x" }); + await storage.saveTokens(serverUrl, { + access_token: "t", + token_type: "Bearer", + }); + storage.clear(serverUrl); + expect(await storage.getClientInformation(serverUrl)).toBeUndefined(); + expect(await storage.getTokens(serverUrl)).toBeUndefined(); + }); + + it("default storeId is 'oauth' when omitted", () => { + const s = new RemoteOAuthStorage({ + baseUrl: "http://r.example", + fetchFn: NOOP_FETCH, + }); + // No public accessor; constructing without throwing covers the default-branch. + expect(s).toBeInstanceOf(RemoteOAuthStorage); + }); +}); diff --git a/clients/web/src/test/core/auth/storage.test.ts b/clients/web/src/test/core/auth/storage.test.ts new file mode 100644 index 000000000..1cf327d6c --- /dev/null +++ b/clients/web/src/test/core/auth/storage.test.ts @@ -0,0 +1,35 @@ +import { describe, it, expect } from "vitest"; +import { + getServerSpecificKey, + OAUTH_STORAGE_KEYS, +} from "@inspector/core/auth/storage.js"; + +describe("OAuthStorage utilities", () => { + describe("getServerSpecificKey", () => { + it("prepends server URL in brackets to the base key", () => { + expect(getServerSpecificKey("mcp_tokens", "https://example.com")).toBe( + "[https://example.com] mcp_tokens", + ); + }); + + it("handles different server URLs independently", () => { + const a = getServerSpecificKey("mcp_scope", "https://a.example"); + const b = getServerSpecificKey("mcp_scope", "https://b.example"); + expect(a).not.toBe(b); + }); + }); + + describe("OAUTH_STORAGE_KEYS", () => { + it("exposes the canonical sessionStorage keys", () => { + expect(OAUTH_STORAGE_KEYS).toEqual({ + CODE_VERIFIER: "mcp_code_verifier", + TOKENS: "mcp_tokens", + CLIENT_INFORMATION: "mcp_client_information", + PREREGISTERED_CLIENT_INFORMATION: + "mcp_preregistered_client_information", + SERVER_METADATA: "mcp_server_metadata", + SCOPE: "mcp_scope", + }); + }); + }); +}); diff --git a/clients/web/src/test/core/mcp/fetchTracking.test.ts b/clients/web/src/test/core/mcp/fetchTracking.test.ts new file mode 100644 index 000000000..0c8e8e946 --- /dev/null +++ b/clients/web/src/test/core/mcp/fetchTracking.test.ts @@ -0,0 +1,165 @@ +import { describe, it, expect, vi } from "vitest"; +import { createFetchTracker } from "@inspector/core/mcp/fetchTracking.js"; +import type { FetchRequestEntryBase } from "@inspector/core/mcp/types.js"; + +describe("createFetchTracker", () => { + it("tracks a successful GET request with response body", async () => { + const baseFetch = vi.fn( + async () => + new Response("hello", { + status: 200, + statusText: "OK", + headers: { "content-type": "text/plain" }, + }), + ); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + + const res = await fetcher("https://example.com/data"); + expect(res.status).toBe(200); + expect(tracked).toHaveLength(1); + expect(tracked[0]?.method).toBe("GET"); + expect(tracked[0]?.url).toBe("https://example.com/data"); + expect(tracked[0]?.responseBody).toBe("hello"); + expect(tracked[0]?.responseStatus).toBe(200); + }); + + it("accepts URL objects and Request instances as input", async () => { + const baseFetch = vi.fn(async () => new Response("ok")); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + + await fetcher(new URL("https://example.com/foo")); + await fetcher( + new Request("https://example.com/bar", { + method: "POST", + body: "hello", + headers: { "x-custom": "yes" }, + }), + ); + expect(tracked).toHaveLength(2); + expect(tracked[0]?.url).toBe("https://example.com/foo"); + expect(tracked[1]?.url).toBe("https://example.com/bar"); + expect(tracked[1]?.requestHeaders["x-custom"]).toBe("yes"); + expect(tracked[1]?.requestBody).toBe("hello"); + }); + + it("falls back to String() for non-string init bodies and yields undefined when conversion throws", async () => { + const baseFetch = vi.fn(async () => new Response("ok")); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + + const throwingBody = { + toString() { + throw new Error("not coercible"); + }, + }; + await fetcher("https://example.com/x", { + method: "POST", + body: throwingBody as unknown as BodyInit, + }); + expect(tracked[0]?.requestBody).toBeUndefined(); + }); + + it("captures the error path when baseFetch throws", async () => { + const baseFetch = vi.fn(async () => { + throw new Error("network down"); + }); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + + await expect( + fetcher("https://example.com/fail", { method: "POST" }), + ).rejects.toThrow("network down"); + expect(tracked).toHaveLength(1); + expect(tracked[0]?.error).toBe("network down"); + expect(tracked[0]?.responseStatus).toBeUndefined(); + }); + + it("captures the error path when baseFetch throws a non-Error", async () => { + const baseFetch = vi.fn(async () => { + throw "stringly-typed"; + }); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + + await expect(fetcher("https://example.com/fail")).rejects.toBe( + "stringly-typed", + ); + expect(tracked[0]?.error).toBe("stringly-typed"); + }); + + it("skips body reading on event-stream responses", async () => { + const baseFetch = vi.fn( + async () => + new Response("ignored", { + headers: { "content-type": "text/event-stream" }, + }), + ); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + await fetcher("https://example.com/events"); + expect(tracked[0]?.responseBody).toBeUndefined(); + }); + + it("skips body reading for POST /mcp streamable responses", async () => { + const baseFetch = vi.fn(async () => new Response("streamed")); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + await fetcher("https://example.com/mcp", { method: "POST" }); + expect(tracked[0]?.responseBody).toBeUndefined(); + }); + + it("survives a Request whose body cannot be cloned/read", async () => { + const baseFetch = vi.fn(async () => new Response("ok")); + const tracked: FetchRequestEntryBase[] = []; + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + + const req = new Request("https://example.com/post", { + method: "POST", + body: "payload", + }); + // Force clone() to throw, exercising the inner catch + Object.defineProperty(req, "clone", { + value: () => { + throw new Error("clone failed"); + }, + }); + await fetcher(req); + expect(tracked[0]?.requestBody).toBeUndefined(); + }); + + it("falls back to undefined when response.clone() throws", async () => { + const tracked: FetchRequestEntryBase[] = []; + const baseFetch = vi.fn(async () => { + const r = new Response("body"); + Object.defineProperty(r, "clone", { + value: () => { + throw new Error("nope"); + }, + }); + return r; + }); + const fetcher = createFetchTracker(baseFetch as typeof fetch, { + trackRequest: (entry) => tracked.push(entry), + }); + await fetcher("https://example.com/data"); + expect(tracked[0]?.responseBody).toBeUndefined(); + }); +}); diff --git a/clients/web/src/test/core/mcp/node/config.test.ts b/clients/web/src/test/core/mcp/node/config.test.ts new file mode 100644 index 000000000..cad08eb26 --- /dev/null +++ b/clients/web/src/test/core/mcp/node/config.test.ts @@ -0,0 +1,486 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import { mkdtempSync, writeFileSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { + parseKeyValuePair, + parseHeaderPair, + resolveServerConfigs, + getNamedServerConfigs, +} from "@inspector/core/mcp/node/config.js"; + +describe("parseKeyValuePair", () => { + it("returns a single-entry record on a clean key=value", () => { + expect(parseKeyValuePair("a=1")).toEqual({ a: "1" }); + }); + + it("accumulates into the previous record", () => { + const r1 = parseKeyValuePair("a=1"); + const r2 = parseKeyValuePair("b=2", r1); + expect(r2).toEqual({ a: "1", b: "2" }); + }); + + it("preserves later '=' inside the value", () => { + expect(parseKeyValuePair("token=abc=def")).toEqual({ token: "abc=def" }); + }); + + it("throws when the format is invalid", () => { + expect(() => parseKeyValuePair("nope")).toThrow(/Invalid parameter format/); + expect(() => parseKeyValuePair("=value")).toThrow( + /Invalid parameter format/, + ); + expect(() => parseKeyValuePair("key=")).toThrow(/Invalid parameter format/); + }); +}); + +describe("parseHeaderPair", () => { + it("splits on the first colon and trims both sides", () => { + expect(parseHeaderPair("X-Test: value")).toEqual({ + "X-Test": "value", + }); + }); + + it("supports colons inside the value", () => { + expect(parseHeaderPair("X: a:b:c")).toEqual({ X: "a:b:c" }); + }); + + it("accumulates into the previous record", () => { + const r1 = parseHeaderPair("X: 1"); + const r2 = parseHeaderPair("Y: 2", r1); + expect(r2).toEqual({ X: "1", Y: "2" }); + }); + + it("throws on missing colon or empty key/value", () => { + expect(() => parseHeaderPair("X-Test")).toThrow(/Invalid header format/); + expect(() => parseHeaderPair(": value")).toThrow(/Invalid header format/); + expect(() => parseHeaderPair("X:")).toThrow(/Invalid header format/); + }); +}); + +describe("resolveServerConfigs — single mode", () => { + let tempDir: string; + let configPath: string; + + beforeEach(() => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-config-test-")); + configPath = join(tempDir, "mcp.json"); + }); + + afterEach(() => { + rmSync(tempDir, { recursive: true, force: true }); + }); + + it("builds a stdio config from positional target args", () => { + const [config] = resolveServerConfigs( + { target: ["my-server", "--flag"] }, + "single", + ); + expect(config).toMatchObject({ + type: "stdio", + command: "my-server", + args: ["--flag"], + }); + }); + + it("applies env and cwd overrides to a stdio config", () => { + const [config] = resolveServerConfigs( + { + target: ["cmd"], + env: { FOO: "bar" }, + cwd: "/tmp", + }, + "single", + ); + expect(config).toMatchObject({ + type: "stdio", + command: "cmd", + env: { FOO: "bar" }, + cwd: "/tmp", + }); + }); + + it("infers streamable-http from a /mcp URL", () => { + const [config] = resolveServerConfigs( + { target: ["http://example.com/mcp"] }, + "single", + ); + expect(config).toEqual({ + type: "streamable-http", + url: "http://example.com/mcp", + }); + }); + + it("infers sse from a /sse URL", () => { + const [config] = resolveServerConfigs( + { target: ["http://example.com/sse"] }, + "single", + ); + expect(config).toEqual({ + type: "sse", + url: "http://example.com/sse", + }); + }); + + it("respects an explicit --transport=streamable-http override", () => { + const [config] = resolveServerConfigs( + { + target: ["http://example.com/other"], + transport: "http", + }, + "single", + ); + expect(config).toMatchObject({ + type: "streamable-http", + url: "http://example.com/other", + }); + }); + + it("uses --server-url when no positional URL is provided", () => { + const [config] = resolveServerConfigs( + { serverUrl: "http://example.com/sse" }, + "single", + ); + expect(config).toEqual({ + type: "sse", + url: "http://example.com/sse", + }); + }); + + it("attaches headers on remote configs when provided", () => { + const [config] = resolveServerConfigs( + { + target: ["http://example.com/mcp"], + headers: { Authorization: "Bearer x" }, + }, + "single", + ); + expect(config).toMatchObject({ + type: "streamable-http", + url: "http://example.com/mcp", + headers: { Authorization: "Bearer x" }, + }); + }); + + it("attaches headers on sse configs when provided", () => { + const [config] = resolveServerConfigs( + { + target: ["http://example.com/sse"], + headers: { Authorization: "Bearer x" }, + }, + "single", + ); + expect(config).toMatchObject({ + type: "sse", + url: "http://example.com/sse", + headers: { Authorization: "Bearer x" }, + }); + }); + + it("rejects args passed alongside a URL target", () => { + expect(() => + resolveServerConfigs( + { target: ["http://example.com/mcp", "extra"] }, + "single", + ), + ).toThrow(/cannot be passed to a URL-based MCP server/); + }); + + it("rejects ambiguous URLs with no transport hint", () => { + expect(() => + resolveServerConfigs( + { target: ["http://example.com/unknown"] }, + "single", + ), + ).toThrow(/Transport type not specified/); + }); + + it("rejects --transport other than stdio for local commands", () => { + expect(() => + resolveServerConfigs({ target: ["cmd"], transport: "sse" }, "single"), + ).toThrow(/Only stdio transport can be used with local commands/); + }); + + it("rejects an empty target with no URL", () => { + expect(() => resolveServerConfigs({ target: [] }, "single")).toThrow( + /Target is required/, + ); + }); + + it("loads a named server from config", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + foo: { command: "node", args: ["foo.js"] }, + }, + }), + ); + const [config] = resolveServerConfigs( + { configPath, serverName: "foo" }, + "single", + ); + expect(config).toMatchObject({ + type: "stdio", + command: "node", + args: ["foo.js"], + }); + }); + + it("auto-picks the single server from a config when --server is omitted", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + only: { command: "node", args: ["only.js"] }, + }, + }), + ); + const [config] = resolveServerConfigs({ configPath }, "single"); + expect(config).toMatchObject({ type: "stdio", command: "node" }); + }); + + it("throws when the config has multiple servers and no --server", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + a: { command: "a" }, + b: { command: "b" }, + }, + }), + ); + expect(() => resolveServerConfigs({ configPath }, "single")).toThrow( + /Multiple servers found/, + ); + }); + + it("throws when the config file is missing", () => { + expect(() => + resolveServerConfigs( + { configPath: join(tempDir, "nonexistent.json") }, + "single", + ), + ).toThrow(/Config file not found/); + }); + + it("throws when the config file has no mcpServers element", () => { + writeFileSync(configPath, JSON.stringify({})); + expect(() => resolveServerConfigs({ configPath }, "single")).toThrow( + /must contain an mcpServers element/, + ); + }); + + it("throws when the config file is malformed JSON", () => { + writeFileSync(configPath, "not json"); + expect(() => resolveServerConfigs({ configPath }, "single")).toThrow( + /Error loading configuration/, + ); + }); + + it("normalizes 'http' → 'streamable-http' in stored configs", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + server1: { type: "http", url: "http://example.com/mcp" }, + }, + }), + ); + const [config] = resolveServerConfigs( + { configPath, serverName: "server1" }, + "single", + ); + expect(config.type).toBe("streamable-http"); + }); + + it("defaults missing type to stdio", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + server1: { command: "echo" }, + }, + }), + ); + const [config] = resolveServerConfigs( + { configPath, serverName: "server1" }, + "single", + ); + expect(config.type).toBe("stdio"); + }); + + it("throws when the named server is missing", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + foo: { command: "node" }, + }, + }), + ); + expect(() => + resolveServerConfigs({ configPath, serverName: "bar" }, "single"), + ).toThrow(/Server 'bar' not found/); + }); + + it("applies env/cwd/headers overrides when loading from config", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + foo: { command: "node", args: ["foo.js"] }, + bar: { + type: "streamable-http", + url: "http://example.com/mcp", + }, + }, + }), + ); + const [stdio] = resolveServerConfigs( + { + configPath, + serverName: "foo", + env: { X: "1" }, + cwd: "/tmp", + }, + "single", + ); + expect(stdio).toMatchObject({ + type: "stdio", + env: { X: "1" }, + cwd: "/tmp", + }); + + const [http] = resolveServerConfigs( + { + configPath, + serverName: "bar", + headers: { Authorization: "Bearer x" }, + }, + "single", + ); + expect(http).toMatchObject({ + type: "streamable-http", + headers: { Authorization: "Bearer x" }, + }); + }); +}); + +describe("resolveServerConfigs — multi mode", () => { + let tempDir: string; + let configPath: string; + + beforeEach(() => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-config-test-")); + configPath = join(tempDir, "mcp.json"); + }); + + afterEach(() => { + rmSync(tempDir, { recursive: true, force: true }); + }); + + it("returns all servers from a config file", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + a: { command: "a" }, + b: { type: "http", url: "http://example.com/mcp" }, + }, + }), + ); + const configs = resolveServerConfigs({ configPath }, "multi"); + expect(configs).toHaveLength(2); + expect(configs[0]?.type).toBe("stdio"); + expect(configs[1]?.type).toBe("streamable-http"); + }); + + it("returns a single ad-hoc config when no config file is given", () => { + const configs = resolveServerConfigs({ target: ["cmd"] }, "multi"); + expect(configs).toHaveLength(1); + expect(configs[0]?.type).toBe("stdio"); + }); + + it("rejects ad-hoc flags alongside a config path", () => { + writeFileSync( + configPath, + JSON.stringify({ mcpServers: { a: { command: "a" } } }), + ); + expect(() => + resolveServerConfigs({ configPath, target: ["other"] }, "multi"), + ).toThrow(/do not pass --transport, --server-url/); + }); + + it("returns an empty array for an unknown mode", () => { + const configs = resolveServerConfigs( + { target: ["cmd"] }, + "unknown" as unknown as "single", + ); + expect(configs).toEqual([]); + }); +}); + +describe("getNamedServerConfigs", () => { + let tempDir: string; + let configPath: string; + + beforeEach(() => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-config-test-")); + configPath = join(tempDir, "mcp.json"); + }); + + afterEach(() => { + rmSync(tempDir, { recursive: true, force: true }); + }); + + it("returns a named record of servers from a config file", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + a: { command: "a" }, + b: { command: "b" }, + }, + }), + ); + const named = getNamedServerConfigs({ configPath }); + expect(Object.keys(named)).toEqual(["a", "b"]); + }); + + it("applies overrides to each server", () => { + writeFileSync( + configPath, + JSON.stringify({ + mcpServers: { + a: { command: "a" }, + b: { type: "http", url: "http://example.com/mcp" }, + }, + }), + ); + const named = getNamedServerConfigs({ + configPath, + env: { X: "1" }, + headers: { Authorization: "Bearer x" }, + }); + expect((named.a as { env: Record }).env).toEqual({ + X: "1", + }); + expect((named.b as { headers: Record }).headers).toEqual({ + Authorization: "Bearer x", + }); + }); + + it("throws when configPath is missing", () => { + expect(() => getNamedServerConfigs({})).toThrow(/Config path is required/); + }); + + it("throws when ad-hoc flags are also given", () => { + writeFileSync( + configPath, + JSON.stringify({ mcpServers: { a: { command: "a" } } }), + ); + expect(() => + getNamedServerConfigs({ configPath, target: ["other"] }), + ).toThrow(/do not pass --transport, --server-url/); + }); +}); diff --git a/clients/web/src/test/core/mcp/remote/createRemoteFetch.test.ts b/clients/web/src/test/core/mcp/remote/createRemoteFetch.test.ts new file mode 100644 index 000000000..7ae9edbf6 --- /dev/null +++ b/clients/web/src/test/core/mcp/remote/createRemoteFetch.test.ts @@ -0,0 +1,171 @@ +import { describe, it, expect, vi } from "vitest"; +import { createRemoteFetch } from "@inspector/core/mcp/remote/createRemoteFetch.js"; + +function ok(body: object): Response { + return new Response(JSON.stringify(body), { + status: 200, + headers: { "content-type": "application/json" }, + }); +} + +const standardRemoteBody = { + ok: true, + status: 200, + statusText: "OK", + headers: { "content-type": "text/plain" }, + body: "echoed", +}; + +describe("createRemoteFetch", () => { + it("forwards a string URL + GET to /api/fetch", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(ok(standardRemoteBody)); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example/", + fetchFn: fetchFn as unknown as typeof fetch, + }); + const res = await remoteFetch("http://upstream.example/data"); + expect(res.status).toBe(200); + expect(await res.text()).toBe("echoed"); + expect(fetchFn).toHaveBeenCalledTimes(1); + const init = fetchFn.mock.calls[0]?.[1]; + expect(init?.method).toBe("POST"); + const payload = JSON.parse(init?.body as string); + expect(payload.url).toBe("http://upstream.example/data"); + expect(payload.method).toBe("GET"); + }); + + it("serializes a URL object input and an explicit method override", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(ok(standardRemoteBody)); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + fetchFn: fetchFn as unknown as typeof fetch, + }); + await remoteFetch(new URL("http://upstream.example/data"), { + method: "DELETE", + }); + const payload = JSON.parse(fetchFn.mock.calls[0]?.[1]?.body as string); + expect(payload.url).toBe("http://upstream.example/data"); + expect(payload.method).toBe("DELETE"); + }); + + it("serializes Request bodies by cloning and reading text", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(ok(standardRemoteBody)); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + fetchFn: fetchFn as unknown as typeof fetch, + }); + const req = new Request("http://upstream.example/post", { + method: "POST", + body: "from-request", + }); + await remoteFetch(req); + const payload = JSON.parse(fetchFn.mock.calls[0]?.[1]?.body as string); + expect(payload.body).toBe("from-request"); + }); + + it("serializes URLSearchParams and FormData bodies into form-urlencoded strings", async () => { + // mockImplementation creates a fresh Response per call — mockResolvedValue + // returns the same instance and the second call's body is "already used". + const fetchFn = vi + .fn() + .mockImplementation(async () => ok(standardRemoteBody)); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + fetchFn: fetchFn as unknown as typeof fetch, + }); + await remoteFetch("http://upstream.example/", { + method: "POST", + body: new URLSearchParams({ a: "1", b: "2" }), + }); + const usp = JSON.parse(fetchFn.mock.calls[0]?.[1]?.body as string); + expect(usp.body).toBe("a=1&b=2"); + + const fd = new FormData(); + fd.set("x", "10"); + fd.set("y", "20"); + await remoteFetch("http://upstream.example/", { + method: "POST", + body: fd, + }); + const fdPayload = JSON.parse(fetchFn.mock.calls[1]?.[1]?.body as string); + expect(fdPayload.body).toBe("x=10&y=20"); + }); + + it("falls back to String() for non-string, non-stream init.body", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(ok(standardRemoteBody)); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + fetchFn: fetchFn as unknown as typeof fetch, + }); + const body = { + toString() { + return "stringified-body"; + }, + }; + await remoteFetch("http://upstream.example/", { + method: "POST", + body: body as unknown as BodyInit, + }); + const payload = JSON.parse(fetchFn.mock.calls[0]?.[1]?.body as string); + expect(payload.body).toBe("stringified-body"); + }); + + it("sets x-mcp-remote-auth when authToken is provided", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(ok(standardRemoteBody)); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + authToken: "shh", + fetchFn: fetchFn as unknown as typeof fetch, + }); + await remoteFetch("http://upstream.example/"); + const headers = fetchFn.mock.calls[0]?.[1]?.headers as Record< + string, + string + >; + expect(headers["x-mcp-remote-auth"]).toBe("Bearer shh"); + }); + + it("throws when the remote returns a non-ok status", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(new Response("upstream blew up", { status: 502 })); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + fetchFn: fetchFn as unknown as typeof fetch, + }); + await expect(remoteFetch("http://upstream.example/")).rejects.toThrow( + /Remote fetch failed \(502\): upstream blew up/, + ); + }); + + it("rebuilds a Response from the remote's deserialized payload", async () => { + const fetchFn = vi.fn().mockResolvedValue( + ok({ + ok: true, + status: 201, + statusText: "Created", + headers: { "x-test": "yes" }, + body: "hello", + }), + ); + const remoteFetch = createRemoteFetch({ + baseUrl: "http://remote.example", + fetchFn: fetchFn as unknown as typeof fetch, + }); + const res = await remoteFetch("http://upstream.example/"); + expect(res.status).toBe(201); + expect(res.statusText).toBe("Created"); + expect(res.headers.get("x-test")).toBe("yes"); + expect(await res.text()).toBe("hello"); + }); +}); diff --git a/clients/web/src/test/core/mcp/remote/createRemoteLogger.test.ts b/clients/web/src/test/core/mcp/remote/createRemoteLogger.test.ts new file mode 100644 index 000000000..e421dd0de --- /dev/null +++ b/clients/web/src/test/core/mcp/remote/createRemoteLogger.test.ts @@ -0,0 +1,75 @@ +import { describe, it, expect, vi } from "vitest"; +import { createRemoteLogger } from "@inspector/core/mcp/remote/createRemoteLogger.js"; + +describe("createRemoteLogger", () => { + it("transmits log events to /api/log with the configured auth header", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(new Response("ok", { status: 204 })); + const logger = createRemoteLogger({ + baseUrl: "http://example.com/", + authToken: "secret", + fetchFn: fetchFn as unknown as typeof fetch, + level: "info", + }); + + logger.info({ msg: "hello" }); + + // pino transmit is synchronous but the fetch is fire-and-forget — wait a + // tick so the promise resolves and the call records. + await new Promise((r) => setTimeout(r, 0)); + expect(fetchFn).toHaveBeenCalledTimes(1); + const [url, init] = fetchFn.mock.calls[0]!; + expect(url).toBe("http://example.com/api/log"); + expect(init?.method).toBe("POST"); + expect((init?.headers as Record)["Content-Type"]).toBe( + "application/json", + ); + expect((init?.headers as Record)["x-mcp-remote-auth"]).toBe( + "Bearer secret", + ); + }); + + it("omits the auth header when no token is provided and uses default level", async () => { + const fetchFn = vi.fn().mockResolvedValue(new Response("ok")); + const logger = createRemoteLogger({ + baseUrl: "http://example.com", + fetchFn: fetchFn as unknown as typeof fetch, + }); + logger.info({ msg: "no auth" }); + await new Promise((r) => setTimeout(r, 0)); + expect(fetchFn).toHaveBeenCalledTimes(1); + const init = fetchFn.mock.calls[0]?.[1]; + expect( + (init?.headers as Record)["x-mcp-remote-auth"], + ).toBeUndefined(); + }); + + it("silently swallows fetch rejections so logging never throws", async () => { + const fetchFn = vi + .fn() + .mockRejectedValue(new Error("network")); + const logger = createRemoteLogger({ + baseUrl: "http://example.com", + fetchFn: fetchFn as unknown as typeof fetch, + }); + expect(() => logger.info({ msg: "boom" })).not.toThrow(); + await new Promise((r) => setTimeout(r, 0)); + expect(fetchFn).toHaveBeenCalledTimes(1); + }); + + it("falls back to globalThis.fetch when no fetchFn is provided", () => { + const originalFetch = globalThis.fetch; + const fetchMock = vi + .fn() + .mockResolvedValue(new Response("ok")); + globalThis.fetch = fetchMock as unknown as typeof fetch; + try { + const logger = createRemoteLogger({ baseUrl: "http://example.com" }); + logger.info({ msg: "ping" }); + } finally { + globalThis.fetch = originalFetch; + } + expect(fetchMock).toHaveBeenCalled(); + }); +}); diff --git a/clients/web/src/test/core/mcp/remote/remoteClientTransport.test.ts b/clients/web/src/test/core/mcp/remote/remoteClientTransport.test.ts new file mode 100644 index 000000000..17c7a1ff3 --- /dev/null +++ b/clients/web/src/test/core/mcp/remote/remoteClientTransport.test.ts @@ -0,0 +1,325 @@ +import { describe, it, expect, vi } from "vitest"; +import { RemoteClientTransport } from "@inspector/core/mcp/remote/remoteClientTransport.js"; +import type { MCPServerConfig } from "@inspector/core/mcp/types.js"; + +function sseFromString(payload: string): ReadableStream { + const encoder = new TextEncoder(); + return new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode(payload)); + controller.close(); + }, + }); +} + +function sseResponse(payload: string): Response { + return new Response(sseFromString(payload), { + status: 200, + headers: { "content-type": "text/event-stream" }, + }); +} + +const config: MCPServerConfig = { + type: "stdio", + command: "echo", + args: ["hello"], +}; + +const baseUrl = "http://remote.example/"; + +function sseStreamResponse(): Response { + const encoder = new TextEncoder(); + // Stream stays open with no events — we'll abort it when the transport closes. + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode(": keepalive\n\n")); + }, + }); + return new Response(stream, { + status: 200, + headers: { "content-type": "text/event-stream" }, + }); +} + +describe("RemoteClientTransport", () => { + it("send() throws when called before start()", async () => { + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: vi.fn() as unknown as typeof fetch }, + config, + ); + await expect( + transport.send({ jsonrpc: "2.0", id: 1, method: "ping" }), + ).rejects.toThrow(/Transport not started/); + }); + + it("send() throws Transport is closed after a transport_error SSE event sets closed", async () => { + // After the SSE consumer processes a transport_error event the transport + // marks itself closed (closed=true) without unsetting _sessionId — so a + // subsequent send() must hit the "Transport is closed" branch rather than + // the "Transport not started" branch. + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + sseResponse( + 'data: {"type":"transport_error","data":{"error":"upstream died"}}\n\n', + ), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + const onclose = vi.fn(); + transport.onclose = onclose; + await transport.start(); + // Wait for the SSE consumer to observe the transport_error event and + // fire onclose — at that point `closed` is true but `_sessionId` is still + // set, which is exactly the precondition we need for the send() branch. + await vi.waitFor(() => expect(onclose).toHaveBeenCalled(), { + timeout: 1000, + interval: 10, + }); + await expect( + transport.send({ jsonrpc: "2.0", id: 1, method: "ping" }), + ).rejects.toThrow(/Transport is closed/); + }); + + it("send() throws a tagged error on non-ok response", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce(sseStreamResponse()) + .mockResolvedValueOnce(new Response("upstream blew up", { status: 502 })); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + await transport.start(); + try { + await transport.send({ jsonrpc: "2.0", id: 7, method: "ping" }); + throw new Error("expected send() to throw"); + } catch (err) { + expect(err).toBeInstanceOf(Error); + expect((err as Error).message).toMatch(/Remote send failed \(502\)/); + expect((err as { status?: number }).status).toBe(502); + } + await transport.close(); + }); + + it("start() throws when remote returns no sessionId", async () => { + const fetchFn = vi.fn( + async () => new Response(JSON.stringify({}), { status: 200 }), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + await expect(transport.start()).rejects.toThrow(/did not return sessionId/); + }); + + it("calls onclose when the SSE stream ends without an explicit close", async () => { + // Stream that closes immediately — covers the finally branch that sets + // closed=true and fires onclose when the consumer exits cleanly. + const closedStream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + new Response(closedStream, { + status: 200, + headers: { "content-type": "text/event-stream" }, + }), + ); + + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + const onclose = vi.fn(); + transport.onclose = onclose; + await transport.start(); + // The consumer runs as a fire-and-forget promise — wait for it to settle. + await vi.waitFor(() => expect(onclose).toHaveBeenCalled(), { + timeout: 1000, + interval: 10, + }); + expect((transport as unknown as { closed: boolean }).closed).toBe(true); + }); + + it("forwards a non-AbortError stream failure to onerror", async () => { + const failingStream = new ReadableStream({ + start(controller) { + controller.error(new Error("network gone")); + }, + }); + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + new Response(failingStream, { + status: 200, + headers: { "content-type": "text/event-stream" }, + }), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + const onerror = vi.fn(); + const onclose = vi.fn(); + transport.onerror = onerror; + transport.onclose = onclose; + await transport.start(); + await vi.waitFor(() => expect(onerror).toHaveBeenCalled(), { + timeout: 1000, + interval: 10, + }); + expect(onerror.mock.calls[0]?.[0]?.message).toMatch(/network gone/); + expect(onclose).toHaveBeenCalled(); + }); + + it("dispatches transport_error events to onerror and triggers onclose", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + sseResponse( + 'data: {"type":"transport_error","data":{"error":"upstream died","code":42}}\n\n', + ), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + const onerror = vi.fn(); + const onclose = vi.fn(); + transport.onerror = onerror; + transport.onclose = onclose; + await transport.start(); + await vi.waitFor(() => expect(onerror).toHaveBeenCalled(), { + timeout: 1000, + interval: 10, + }); + const err = onerror.mock.calls[0]?.[0]; + expect(err?.message).toBe("upstream died"); + expect((err as { code?: number }).code).toBe(42); + expect(onclose).toHaveBeenCalled(); + }); + + it("forwards fetch_request and stdio_log SSE events to user callbacks", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + sseResponse( + 'data: {"type":"fetch_request","data":{"id":"f1","timestamp":"2026-01-01T00:00:00.000Z","method":"GET","url":"http://x","requestHeaders":{}}}\n\n' + + 'data: {"type":"stdio_log","data":{"timestamp":"2026-01-01T00:00:00.000Z","message":"hello stderr"}}\n\n', + ), + ); + const fetchCalls: unknown[] = []; + const stderrCalls: unknown[] = []; + const transport = new RemoteClientTransport( + { + baseUrl, + fetchFn: fetchFn as unknown as typeof fetch, + onFetchRequest: (e) => fetchCalls.push(e), + onStderr: (e) => stderrCalls.push(e), + }, + config, + ); + await transport.start(); + await vi.waitFor( + () => { + expect(fetchCalls.length).toBeGreaterThan(0); + expect(stderrCalls.length).toBeGreaterThan(0); + }, + { timeout: 1000, interval: 10 }, + ); + const fr = fetchCalls[0] as { timestamp: Date }; + expect(fr.timestamp).toBeInstanceOf(Date); + const log = stderrCalls[0] as { message: string }; + expect(log.message).toBe("hello stderr"); + }); + + it("forwards JSON parse errors from SSE data to onerror but keeps the loop alive", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + sseResponse( + "data: this is not json\n\n" + + 'data: {"type":"message","data":{"jsonrpc":"2.0","id":1,"result":{}}}\n\n', + ), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + const onerror = vi.fn(); + const onmessage = vi.fn(); + transport.onerror = onerror; + transport.onmessage = onmessage; + await transport.start(); + await vi.waitFor( + () => { + expect(onerror).toHaveBeenCalled(); + expect(onmessage).toHaveBeenCalled(); + }, + { timeout: 1000, interval: 10 }, + ); + }); + + it("throws when start() receives an SSE response with no body", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ sessionId: "abc" }), { status: 200 }), + ) + .mockResolvedValueOnce( + new Response(null, { + status: 200, + headers: { "content-type": "text/event-stream" }, + }), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + await expect(transport.start()).rejects.toThrow(/has no body/); + }); + + it("start() preserves status code on connect failure", async () => { + const fetchFn = vi.fn( + async () => new Response("unauthorized", { status: 401 }), + ); + const transport = new RemoteClientTransport( + { baseUrl, fetchFn: fetchFn as unknown as typeof fetch }, + config, + ); + try { + await transport.start(); + throw new Error("expected start() to throw"); + } catch (err) { + expect((err as { status?: number }).status).toBe(401); + expect((err as Error).message).toMatch(/Remote connect failed \(401\)/); + } + }); +}); diff --git a/clients/web/src/test/core/mcp/remote/sessionStorage.test.ts b/clients/web/src/test/core/mcp/remote/sessionStorage.test.ts new file mode 100644 index 000000000..e091c30d5 --- /dev/null +++ b/clients/web/src/test/core/mcp/remote/sessionStorage.test.ts @@ -0,0 +1,163 @@ +import { describe, it, expect, vi } from "vitest"; +import { RemoteInspectorClientStorage } from "@inspector/core/mcp/remote/sessionStorage.js"; +import type { InspectorClientSessionState } from "@inspector/core/mcp/sessionStorage.js"; + +function makeStorage(fetchFn: typeof fetch, authToken?: string) { + return new RemoteInspectorClientStorage({ + baseUrl: "http://remote.example/", + fetchFn, + authToken, + }); +} + +function sampleState(): InspectorClientSessionState { + return { + fetchRequests: [ + { + id: "1", + timestamp: new Date("2026-01-01T00:00:00Z"), + method: "GET", + url: "http://example.com/", + requestHeaders: {}, + category: "transport" as const, + }, + ], + createdAt: 1735689600000, + updatedAt: 1735689600001, + }; +} + +describe("RemoteInspectorClientStorage", () => { + it("saveSession POSTs serialized state with timestamps as ISO strings", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(new Response(null, { status: 204 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch, "tok"); + + await storage.saveSession("sid", sampleState()); + expect(fetchFn).toHaveBeenCalledTimes(1); + const [url, init] = fetchFn.mock.calls[0]!; + expect(url).toBe("http://remote.example/api/storage/inspector-session-sid"); + expect(init?.method).toBe("POST"); + expect((init?.headers as Record)["x-mcp-remote-auth"]).toBe( + "Bearer tok", + ); + const body = JSON.parse(init?.body as string); + expect(body.fetchRequests[0].timestamp).toBe("2026-01-01T00:00:00.000Z"); + }); + + it("saveSession leaves non-Date timestamps untouched", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(new Response(null, { status: 204 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + const state = sampleState(); + // Pre-coerce to a number to exercise the ternary's else branch. + state.fetchRequests[0]!.timestamp = 99 as unknown as Date; + await storage.saveSession("sid", state); + const body = JSON.parse(fetchFn.mock.calls[0]?.[1]?.body as string); + expect(body.fetchRequests[0].timestamp).toBe(99); + }); + + it("saveSession throws on non-ok response", async () => { + const fetchFn = vi.fn(async () => new Response("nope", { status: 500 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + await expect(storage.saveSession("sid", sampleState())).rejects.toThrow( + /500 nope/, + ); + }); + + it("loadSession returns undefined on 404", async () => { + const fetchFn = vi.fn(async () => new Response(null, { status: 404 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + expect(await storage.loadSession("missing")).toBeUndefined(); + }); + + it("loadSession throws on other error statuses", async () => { + const fetchFn = vi.fn(async () => new Response("broken", { status: 500 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + await expect(storage.loadSession("any")).rejects.toThrow(/500 broken/); + }); + + it("loadSession converts ISO-string timestamps back into Date objects", async () => { + const fetchFn = vi.fn( + async () => + new Response( + JSON.stringify({ + fetchRequests: [ + { + id: "1", + timestamp: "2026-01-01T00:00:00.000Z", + method: "GET", + url: "http://example.com/", + requestHeaders: {}, + }, + ], + createdAt: 1735689600000, + updatedAt: 1735689600001, + }), + { + status: 200, + headers: { "content-type": "application/json" }, + }, + ), + ); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + const state = await storage.loadSession("sid"); + expect(state?.fetchRequests[0]?.timestamp).toBeInstanceOf(Date); + expect((state?.fetchRequests[0]?.timestamp as Date).toISOString()).toBe( + "2026-01-01T00:00:00.000Z", + ); + }); + + it("loadSession preserves Date timestamps already provided as Date instances", async () => { + const fetchFn = vi.fn( + async () => + new Response( + JSON.stringify({ + fetchRequests: [ + { + id: "1", + timestamp: new Date("2026-02-02T00:00:00Z").toISOString(), + method: "GET", + url: "http://example.com/", + requestHeaders: {}, + }, + ], + createdAt: 1, + updatedAt: 2, + }), + ), + ); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + const state = await storage.loadSession("sid"); + expect((state?.fetchRequests[0]?.timestamp as Date).getTime()).toBe( + new Date("2026-02-02T00:00:00Z").getTime(), + ); + }); + + it("deleteSession tolerates 404 and rethrows on other errors", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce(new Response(null, { status: 204 })) + .mockResolvedValueOnce(new Response(null, { status: 404 })) + .mockResolvedValueOnce(new Response("oops", { status: 500 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + await expect(storage.deleteSession("sid")).resolves.toBeUndefined(); + await expect(storage.deleteSession("sid")).resolves.toBeUndefined(); + await expect(storage.deleteSession("sid")).rejects.toThrow(/500 oops/); + }); + + it("does not send auth header when no token is configured", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(new Response(null, { status: 204 })); + const storage = makeStorage(fetchFn as unknown as typeof fetch); + await storage.saveSession("sid", sampleState()); + expect( + (fetchFn.mock.calls[0]?.[1]?.headers as Record)[ + "x-mcp-remote-auth" + ], + ).toBeUndefined(); + }); +}); diff --git a/clients/web/src/test/core/remote-transport.test.ts b/clients/web/src/test/core/remote-transport.test.ts index 67fd281d2..4da591803 100644 --- a/clients/web/src/test/core/remote-transport.test.ts +++ b/clients/web/src/test/core/remote-transport.test.ts @@ -848,6 +848,100 @@ describe("Remote transport e2e", () => { expect(readAfterDeleteJson).toEqual({}); }); + it("rejects same-length tokens with mismatched content (timing-safe path)", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + // Build a wrong token of the same length so the timing-safe compare + // (server.ts ~L174) is exercised instead of the early length check. + const wrong = authToken.split("").reverse().join(""); + const fakeSameLength = + wrong === authToken ? `${"X".repeat(authToken.length - 1)}!` : wrong; + const res = await fetch(`${baseUrl}/api/mcp/connect`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${fakeSameLength}`, + }, + body: JSON.stringify({ + config: { type: "sse" as const, url: "http://localhost:3000" }, + }), + }); + expect(res.status).toBe(401); + }); + + it("DELETE rejects an invalid storeId", async () => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-storage-test-")); + const { baseUrl, server, authToken } = await startRemoteServer(0, { + storageDir: tempDir, + }); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/storage/bad..id`, { + method: "DELETE", + headers: { "x-mcp-remote-auth": `Bearer ${authToken}` }, + }); + expect(res.status).toBe(400); + expect((await res.json()).error).toBe("Invalid storeId"); + }); + + it("POST returns 500 when the file write fails", async () => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-storage-test-")); + const { baseUrl, server, authToken } = await startRemoteServer(0, { + storageDir: tempDir, + }); + remoteServer = server; + // Pre-create a *directory* at the would-be storage path so write fails. + const writeRes = await fetch(`${baseUrl}/api/storage/test-store`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: JSON.stringify({ ok: 1 }), + }); + // First write succeeds. + expect(writeRes.status).toBe(200); + // Replace the file with a directory of the same name, so subsequent writes + // fail at writeStoreFile. + const filePath = join(tempDir!, "test-store.json"); + rmSync(filePath); + // mkdtempSync needs a parent dir; we already have tempDir. Create a dir + // at the would-be file path. + const { mkdirSync } = await import("node:fs"); + mkdirSync(filePath); + + const failRes = await fetch(`${baseUrl}/api/storage/test-store`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: JSON.stringify({ ok: 2 }), + }); + expect(failRes.status).toBe(500); + expect((await failRes.json()).error).toMatch(/Failed to write store/); + }); + + it("DELETE returns 500 when the file delete fails", async () => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-storage-test-")); + const { baseUrl, server, authToken } = await startRemoteServer(0, { + storageDir: tempDir, + }); + remoteServer = server; + // Replace the would-be file path with a non-empty directory so + // deleteStoreFile cannot remove it cleanly. + const filePath = join(tempDir!, "test-store.json"); + const { mkdirSync, writeFileSync } = await import("node:fs"); + mkdirSync(filePath); + writeFileSync(join(filePath, "sentinel.txt"), "data"); + + const res = await fetch(`${baseUrl}/api/storage/test-store`, { + method: "DELETE", + headers: { "x-mcp-remote-auth": `Bearer ${authToken}` }, + }); + expect(res.status).toBe(500); + expect((await res.json()).error).toMatch(/Failed to delete store/); + }); + it("DELETE returns success for non-existent store", async () => { tempDir = mkdtempSync(join(tmpdir(), "inspector-storage-test-")); const { baseUrl, server, authToken } = await startRemoteServer(0, { @@ -867,6 +961,143 @@ describe("Remote transport e2e", () => { }); }); + describe("endpoint error paths", () => { + it("/api/mcp/connect returns 400 on invalid JSON body", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/connect`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: "not json", + }); + expect(res.status).toBe(400); + expect((await res.json()).error).toBe("Invalid JSON body"); + }); + + it("/api/mcp/connect returns 400 when config is missing", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/connect`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: JSON.stringify({}), + }); + expect(res.status).toBe(400); + expect((await res.json()).error).toBe("Missing config"); + }); + + it("/api/mcp/send returns 400 on invalid JSON body", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/send`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: "not json", + }); + expect(res.status).toBe(400); + }); + + it("/api/mcp/send returns 400 when sessionId or message is missing", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/send`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: JSON.stringify({ sessionId: "x" }), + }); + expect(res.status).toBe(400); + }); + + it("/api/mcp/send returns 404 for unknown sessionId", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/send`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: JSON.stringify({ + sessionId: "no-such-session", + message: { jsonrpc: "2.0", id: 1, method: "ping" }, + }), + }); + expect(res.status).toBe(404); + }); + + it("/api/mcp/events returns 400 when sessionId is missing", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/events`, { + headers: { "x-mcp-remote-auth": `Bearer ${authToken}` }, + }); + expect(res.status).toBe(400); + }); + + it("/api/mcp/events returns 404 for unknown sessionId", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/events?sessionId=missing`, { + headers: { "x-mcp-remote-auth": `Bearer ${authToken}` }, + }); + expect(res.status).toBe(404); + }); + + it("/api/mcp/disconnect returns 400 on invalid JSON body", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/disconnect`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: "not json", + }); + expect(res.status).toBe(400); + }); + + it("/api/mcp/disconnect returns 400 when sessionId is missing", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/mcp/disconnect`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: JSON.stringify({}), + }); + expect(res.status).toBe(400); + }); + + it("/api/fetch returns 400 on invalid JSON body", async () => { + const { baseUrl, server, authToken } = await startRemoteServer(0); + remoteServer = server; + const res = await fetch(`${baseUrl}/api/fetch`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-mcp-remote-auth": `Bearer ${authToken}`, + }, + body: "not json", + }); + expect(res.status).toBe(400); + }); + }); + describe("Origin validation", () => { it("allows requests with valid origin", async () => { const { baseUrl, server, authToken } = await startRemoteServer(0, { diff --git a/clients/web/src/test/core/storage-adapters.test.ts b/clients/web/src/test/core/storage-adapters.test.ts index b126c349a..fc6d00b6b 100644 --- a/clients/web/src/test/core/storage-adapters.test.ts +++ b/clients/web/src/test/core/storage-adapters.test.ts @@ -50,6 +50,78 @@ async function startRemoteServer( } describe("Storage adapters", () => { + describe("createRemoteStorageAdapter (unit, mocked fetch)", () => { + function makeAdapter(fetchFn: typeof fetch, authToken?: string) { + return createRemoteStorageAdapter({ + baseUrl: "http://remote.example/", + storeId: "test", + fetchFn, + authToken, + })!; + } + + it("getItem returns null on 404 and parses stored blob otherwise", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce(new Response("not found", { status: 404 })) + .mockResolvedValueOnce( + new Response(JSON.stringify({ state: { hi: 1 }, version: 0 })), + ) + .mockResolvedValueOnce(new Response("{}")); + + const adapter = makeAdapter(fetchFn as typeof fetch); + expect(await adapter.getItem("anything")).toBeNull(); + const blob = await adapter.getItem("anything"); + expect(blob).toBeTruthy(); + // Empty object response → treated as "not initialized" → null. + expect(await adapter.getItem("anything")).toBeNull(); + }); + + it("getItem throws on non-404 error responses", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce(new Response("boom", { status: 500 })); + const adapter = makeAdapter(fetchFn as typeof fetch); + await expect(adapter.getItem("x")).rejects.toThrow(/500/); + }); + + it("setItem throws when the POST fails", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce(new Response("nope", { status: 500 })); + const adapter = makeAdapter(fetchFn as typeof fetch); + await expect( + adapter.setItem("name", { state: { a: 1 }, version: 0 }), + ).rejects.toThrow(/500/); + }); + + it("removeItem tolerates 404 but rethrows on other failures", async () => { + const fetchFn = vi + .fn() + .mockResolvedValueOnce(new Response(null, { status: 204 })) + .mockResolvedValueOnce(new Response(null, { status: 404 })) + .mockResolvedValueOnce(new Response("boom", { status: 500 })); + const adapter = makeAdapter(fetchFn as typeof fetch); + await expect(adapter.removeItem("x")).resolves.toBeUndefined(); + await expect(adapter.removeItem("x")).resolves.toBeUndefined(); + await expect(adapter.removeItem("x")).rejects.toThrow(/500/); + }); + + it("sends the x-mcp-remote-auth header on all three verbs when authToken is set", async () => { + const fetchFn = vi + .fn() + .mockResolvedValue(new Response("{}")); + const adapter = makeAdapter(fetchFn as typeof fetch, "secret"); + await adapter.getItem("x"); + await adapter.setItem("x", { state: {}, version: 0 }); + await adapter.removeItem("x"); + for (const call of fetchFn.mock.calls) { + const headers = call[1]?.headers as Record | undefined; + expect(headers?.["x-mcp-remote-auth"]).toBe("Bearer secret"); + } + }); + }); + describe("FileStorageAdapter", () => { let tempDir: string | null = null; @@ -146,6 +218,24 @@ describe("Storage adapters", () => { const parsed = JSON.parse(fileContent); expect(Object.keys(parsed.state.servers).length).toBe(0); }); + + it("removeItem deletes the underlying file", async () => { + tempDir = mkdtempSync(join(tmpdir(), "inspector-storage-test-")); + const filePath = join(tempDir!, "test-store.json"); + const storage = createFileStorageAdapter({ filePath }); + // createJSONStorage returns a PersistStorage wrapper that delegates to + // our adapter's removeItem callback (line 25 of file-storage.ts). + const store = createOAuthStore(storage); + store.getState().setServerState("https://example.com", { + tokens: { access_token: "t", token_type: "Bearer" }, + }); + await vi.waitFor(() => expect(existsSync(filePath)).toBe(true), { + timeout: 2000, + interval: 20, + }); + await storage!.removeItem("inspector-oauth-store"); + expect(existsSync(filePath)).toBe(false); + }); }); describe("RemoteStorageAdapter", () => { diff --git a/clients/web/vite.config.ts b/clients/web/vite.config.ts index 84f097345..e9b21c633 100644 --- a/clients/web/vite.config.ts +++ b/clients/web/vite.config.ts @@ -10,15 +10,86 @@ import { playwright } from '@vitest/browser-playwright'; const dirname = typeof __dirname !== 'undefined' ? __dirname : path.dirname(fileURLToPath(import.meta.url)); const repoRoot = path.resolve(dirname, '../..'); -// Aliases shared between the top-level resolve and the unit vitest project. -// Vitest projects don't inherit `resolve` from the parent, so the unit project -// redeclares them — keeping a single source here prevents the two from drifting -// (e.g. if a new core/* alias is added). +// Aliases shared between the top-level resolve and the vitest projects. +// Vitest projects don't inherit `resolve` from the parent, so the unit and +// integration projects redeclare them — keeping a single source here prevents +// them from drifting (e.g. if a new core/* alias is added). const sharedAliases = { '@inspector/core': path.resolve(dirname, '../../core'), - '@modelcontextprotocol/inspector-test-server': path.resolve(dirname, '../../test-servers/src/index.ts'), + // Point at the BUILT test-servers entry, not src/, so that any test which + // calls `getTestMcpServerPath()` (via `fileURLToPath(import.meta.url)`) + // resolves to a `.js` path Node can spawn directly as a subprocess. The + // integration tests build test-servers via `npm run test:integration` + // (or `npm run test-servers:build`) before running. + '@modelcontextprotocol/inspector-test-server': path.resolve(dirname, '../../test-servers/build/index.js'), }; -const sharedDedupe = ['react', 'react-dom']; +const sharedDedupe = [ + 'react', + 'react-dom', + // The SDK is installed under both clients/web/node_modules and the repo + // root's node_modules (hoisted by npm). Without dedupe, source files in + // core/ (no local node_modules) resolve to the root copy while test files + // resolve to the clients/web copy — splitting class identity and breaking + // vi.mock() / instanceof checks (see #1307). + '@modelcontextprotocol/sdk', +]; + +// Bare-module aliases needed when running tests from repoRoot (which has no +// node_modules of its own). Shared between the unit and integration projects. +// Use anchored regex `find` patterns so each package's own `exports` field +// handles subpath resolution. +const nodeModulesAliases = [ + { find: /^react$/, replacement: path.resolve(dirname, 'node_modules/react') }, + { find: /^pino$/, replacement: path.resolve(dirname, 'node_modules/pino') }, + { find: /^pino\/browser\.js$/, replacement: path.resolve(dirname, 'node_modules/pino/browser.js') }, + { find: /^zustand$/, replacement: path.resolve(dirname, 'node_modules/zustand') }, + { find: /^zustand\/middleware$/, replacement: path.resolve(dirname, 'node_modules/zustand/middleware.js') }, + { find: /^zustand\/vanilla$/, replacement: path.resolve(dirname, 'node_modules/zustand/vanilla.js') }, + { find: /^hono$/, replacement: path.resolve(dirname, 'node_modules/hono/dist/index.js') }, + { find: /^hono\/streaming$/, replacement: path.resolve(dirname, 'node_modules/hono/dist/helper/streaming/index.js') }, + { find: /^@hono\/node-server$/, replacement: path.resolve(dirname, 'node_modules/@hono/node-server') }, + { find: /^atomically$/, replacement: path.resolve(dirname, 'node_modules/atomically') }, + { find: /^express$/, replacement: path.resolve(dirname, 'node_modules/express') }, + { find: /^yaml$/, replacement: path.resolve(dirname, 'node_modules/yaml') }, + // Pin the SDK auth subpath so test and source resolve to the exact same + // module ID. Without this, the source's `import` from core/auth/*.ts and + // the test's `vi.mock(...)` can resolve through different cache keys in + // Vitest's transformer pipeline — the mock then fails to intercept the + // source-side import (see #1307). + { find: /^@modelcontextprotocol\/sdk\/client\/auth\.js$/, replacement: path.resolve(dirname, 'node_modules/@modelcontextprotocol/sdk/dist/esm/client/auth.js') }, +]; + +// Project resolve config shared between the unit and integration projects. +// sharedAliases come first as exact-match entries, then the bare-module +// regex aliases that node-style imports from core/ rely on. +const projectResolve = { + alias: [ + ...Object.entries(sharedAliases).map(([find, replacement]) => ({ find, replacement })), + ...nodeModulesAliases, + ], + dedupe: sharedDedupe, +}; + +// v1.5-ported integration tests that need a node-env vitest project — they +// spawn real HTTP/stdio servers via test-servers/, run end-to-end OAuth flows, +// talk to fs/network, or mock `@modelcontextprotocol/sdk/client/auth.js` (the +// SDK auth mock identity is lost under happy-dom + Vitest 4, but works under +// node env). Tracked in #1307. +const integrationTests = [ + 'clients/web/src/test/core/inspectorClient.test.ts', + 'clients/web/src/test/core/inspectorClient-oauth.test.ts', + 'clients/web/src/test/core/inspectorClient-oauth-e2e.test.ts', + 'clients/web/src/test/core/inspectorClient-oauth-fetchFn.test.ts', + 'clients/web/src/test/core/inspectorClient-oauth-remote-storage-e2e.test.ts', + 'clients/web/src/test/core/transport.test.ts', + 'clients/web/src/test/core/remote-transport.test.ts', + 'clients/web/src/test/core/remote-server-config.test.ts', + 'clients/web/src/test/core/storage-adapters.test.ts', + 'clients/web/src/test/core/auth/storage-node.test.ts', + 'clients/web/src/test/core/auth/oauth-callback-server.test.ts', + 'clients/web/src/test/core/auth/discovery.test.ts', + 'clients/web/src/test/core/auth/state-machine.test.ts', +]; // More info at: https://storybook.js.org/docs/next/writing-tests/integrations/vitest-addon export default defineConfig({ @@ -46,6 +117,9 @@ export default defineConfig({ 'src/utils/**/*.{ts,tsx}', path.join(repoRoot, 'core/mcp/**/*.{ts,tsx}'), path.join(repoRoot, 'core/react/**/*.{ts,tsx}'), + path.join(repoRoot, 'core/auth/**/*.{ts,tsx}'), + path.join(repoRoot, 'core/storage/**/*.{ts,tsx}'), + path.join(repoRoot, 'core/logging/**/*.{ts,tsx}'), ], exclude: [ '**/*.stories.{ts,tsx}', @@ -61,25 +135,30 @@ export default defineConfig({ path.join(repoRoot, 'core/mcp/samplingCreateMessage.ts'), path.join(repoRoot, 'core/mcp/sessionStorage.ts'), path.join(repoRoot, 'core/mcp/inspectorClientProtocol.ts'), + path.join(repoRoot, 'core/mcp/remote/types.ts'), + // .d.ts files are declaration-only. + path.join(repoRoot, '**/*.d.ts'), + // TODO(#1310): coverage debt on two large v1.5-ported files that the + // existing v1.5 tests don't fully exercise. #1307 brought the + // integration suite back online and lifted all other v1.5 coverage + // gates; #1310 tracks filling these two: + // - inspectorClient.ts (2184 LOC) is at 73% lines despite 4005 LOC + // of integration tests; remaining gaps are scattered across OAuth, + // subscription, and error-path code (~150 lines, 107 ranges). + // - remote/node/server.ts is at 88% lines / 78% functions; gaps are + // in transport-failure / 401-propagation error paths and the + // private createTokenAuthProvider helper. + path.join(repoRoot, 'core/mcp/inspectorClient.ts'), + path.join(repoRoot, 'core/mcp/remote/node/server.ts'), // inspectorClientEventTarget.ts is types + a single empty-body class // (extends TypedEventTarget). v8/istanbul records 0 statements for it // today. TODO(#1243): drop this exclusion once the class gains real // behavior as the v1.5 InspectorClient port progresses. path.join(repoRoot, 'core/mcp/inspectorClientEventTarget.ts'), path.join(repoRoot, 'core/mcp/__tests__/**'), - // v1.5-ported runtime files (#1302) whose v1.5 tests are excluded from - // the unit project pending a node-env vitest setup. Tracked in #1307 — - // drop each entry below as the corresponding test family comes online. - path.join(repoRoot, 'core/mcp/inspectorClient.ts'), - path.join(repoRoot, 'core/mcp/oauthManager.ts'), - path.join(repoRoot, 'core/mcp/fetchTracking.ts'), - path.join(repoRoot, 'core/mcp/messageTrackingTransport.ts'), - path.join(repoRoot, 'core/mcp/config.ts'), - path.join(repoRoot, 'core/mcp/node/**'), - path.join(repoRoot, 'core/mcp/remote/**'), - path.join(repoRoot, 'core/auth/**'), - path.join(repoRoot, 'core/storage/**'), - path.join(repoRoot, 'core/logging/**'), + // test-servers/ is test infrastructure (composable MCP servers and + // fixtures), not application code — its build output also lives at + // test-servers/build/, which we don't want to measure either. path.join(repoRoot, 'test-servers/**'), ], thresholds: { @@ -93,37 +172,12 @@ export default defineConfig({ projects: [ { extends: true, - // Vitest projects don't inherit `resolve` from the parent, so we - // redeclare the shared aliases here. The extra `react` alias is - // required because we move the unit project root to repoRoot below - // (so vitest's coverage transformer can reach core/), and the repo - // root has no node_modules of its own — bare `react` imports from - // core/react/*.ts would otherwise fail to resolve. - resolve: { - alias: [ - // sharedAliases first as exact-match entries - ...Object.entries(sharedAliases).map(([find, replacement]) => ({ find, replacement })), - { find: /^react$/, replacement: path.resolve(dirname, 'node_modules/react') }, - // v1.5 core/ modules (#1302) import these from clients/web/node_modules, - // but the unit project runs from repoRoot (which has no node_modules of - // its own). Use anchored regex `find` patterns so the package's own - // `exports` field handles subpath resolution (otherwise a bare `hono` - // string alias would rewrite `hono/streaming` to `/streaming`, - // bypassing the exports map). - { find: /^pino$/, replacement: path.resolve(dirname, 'node_modules/pino') }, - { find: /^pino\/browser\.js$/, replacement: path.resolve(dirname, 'node_modules/pino/browser.js') }, - { find: /^zustand$/, replacement: path.resolve(dirname, 'node_modules/zustand') }, - { find: /^zustand\/middleware$/, replacement: path.resolve(dirname, 'node_modules/zustand/middleware.js') }, - { find: /^zustand\/vanilla$/, replacement: path.resolve(dirname, 'node_modules/zustand/vanilla.js') }, - { find: /^hono$/, replacement: path.resolve(dirname, 'node_modules/hono/dist/index.js') }, - { find: /^hono\/streaming$/, replacement: path.resolve(dirname, 'node_modules/hono/dist/helper/streaming/index.js') }, - { find: /^@hono\/node-server$/, replacement: path.resolve(dirname, 'node_modules/@hono/node-server') }, - { find: /^atomically$/, replacement: path.resolve(dirname, 'node_modules/atomically') }, - { find: /^express$/, replacement: path.resolve(dirname, 'node_modules/express') }, - { find: /^yaml$/, replacement: path.resolve(dirname, 'node_modules/yaml') }, - ], - dedupe: sharedDedupe, - }, + // Vitest projects don't inherit `resolve` from the parent. The unit + // project runs from repoRoot (so vitest's coverage transformer can + // reach core/), but repoRoot has no node_modules of its own — the + // shared regex aliases redirect bare `react`/`pino`/etc. imports + // from core/ back into clients/web/node_modules. + resolve: projectResolve, test: { name: 'unit', environment: 'happy-dom', @@ -139,32 +193,43 @@ export default defineConfig({ // consistent and avoids relying on auto-cleanup tied to Vitest's // global lifecycle hooks; cleanup is invoked manually in setup.ts. include: ['clients/web/src/**/*.test.{ts,tsx}'], - // These v1.5-ported tests need either a node-env vitest project - // (they spawn real HTTP/stdio servers via test-servers/, run - // end-to-end OAuth flows, or talk to fs/network) or substantial - // happy-dom-friendly mocks. Tracked in #1307 — remove each entry - // below as the corresponding test starts passing. - exclude: [ - 'clients/web/src/test/core/inspectorClient.test.ts', - 'clients/web/src/test/core/inspectorClient-oauth.test.ts', - 'clients/web/src/test/core/inspectorClient-oauth-e2e.test.ts', - 'clients/web/src/test/core/inspectorClient-oauth-fetchFn.test.ts', - 'clients/web/src/test/core/inspectorClient-oauth-remote-storage-e2e.test.ts', - 'clients/web/src/test/core/transport.test.ts', - 'clients/web/src/test/core/remote-transport.test.ts', - 'clients/web/src/test/core/remote-server-config.test.ts', - 'clients/web/src/test/core/storage-adapters.test.ts', - 'clients/web/src/test/core/auth/storage-node.test.ts', - 'clients/web/src/test/core/auth/oauth-callback-server.test.ts', - // discovery.test.ts + state-machine.test.ts mock the SDK auth - // module, but happy-dom + Vitest mock resolution drops the mock - // (real fetch fires → CORS). Excluded pending mock rework. - 'clients/web/src/test/core/auth/discovery.test.ts', - 'clients/web/src/test/core/auth/state-machine.test.ts', - ], + // Integration tests run in the integration project below (node env). + exclude: integrationTests, setupFiles: [path.join(dirname, 'src/test/setup.ts')], }, }, + { + extends: true, + // See note on the unit project: integration tests also run from + // repoRoot and import core/ modules, so they need the same alias + // setup. The shared bare-module aliases keep `pino`, `hono`, etc. + // resolving against clients/web/node_modules. + resolve: projectResolve, + test: { + name: 'integration', + environment: 'node', + // Same reason as the unit project: rooted at repoRoot so vitest + // can transform core/ modules and run tests against the source. + root: repoRoot, + include: integrationTests, + // Integration tests spawn real HTTP/stdio servers via test-servers/, + // bind sockets, run e2e OAuth flows, and exercise filesystem-backed + // storage. 30s matches the v1.5 core/vitest.config.ts. + testTimeout: 30000, + hookTimeout: 30000, + // Inline the MCP SDK so vi.mock("@modelcontextprotocol/sdk/...") + // hooks the same transformed copy that source files import. + // Externalized node_modules are loaded via Node's loader and bypass + // Vitest's mock system. The explicit alias above pins the auth.js + // subpath to one canonical ID; inlining ensures the transformer + // pipeline owns the module rather than the Node loader. + server: { + deps: { + inline: [/@modelcontextprotocol\/sdk/], + }, + }, + }, + }, { extends: true, plugins: [ diff --git a/test-servers/tsconfig.json b/test-servers/tsconfig.json new file mode 100644 index 000000000..7eda8adbc --- /dev/null +++ b/test-servers/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "outDir": "./build", + "rootDir": "./src", + "declaration": false, + "sourceMap": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "build"] +}