Skip to content
Merged
59 changes: 46 additions & 13 deletions docs/guide/ai-and-mcp.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,18 @@ The extension runs an **MCP (Model Context Protocol) HTTP server** inside the VS
### Setup

1. Open the Command Palette and run: `Weapon: Install MCP server to workspace`
2. This creates `.vscode/mcp.json`:

2. If `weaponized.mcp.cli` is not set, you'll be prompted to select your AI client:
- **VSCode** — writes `.vscode/mcp.json`
- **Claude Code** — writes `.mcp.json`
- **Codex (OpenAI)** — writes `.codex/config.toml`
- **Gemini CLI** — writes `.gemini/settings.json`
- **OpenCode** — writes `.opencode.json`
3. Your choice is saved to workspace settings (`weaponized.mcp.cli`), so subsequent runs skip the picker
4. The port auto-updates on each activation if the config file exists

### Config Examples Per Client

**VSCode** (`.vscode/mcp.json`):
```json
{
"servers": {
Expand All @@ -82,28 +92,50 @@ The extension runs an **MCP (Model Context Protocol) HTTP server** inside the VS
}
```

3. VS Code's built-in MCP support (and compatible extensions) will auto-discover this configuration
4. The port auto-updates if it changes between sessions
**Claude Code** (`.mcp.json`):
```json
{
"mcpServers": {
"weaponized": {
"type": "url",
"url": "http://127.0.0.1:25789/mcp"
}
}
}
```

### Connecting External AI Clients
**Codex** (`.codex/config.toml`):
```toml
[mcp_servers.weaponized]
url = "http://127.0.0.1:25789/mcp"
```

For AI tools that support MCP (Claude Code, Cursor, Windsurf, etc.):
**Gemini CLI** (`.gemini/settings.json`):
```json
{
"mcpServers": {
"weaponized": {
"httpUrl": "http://127.0.0.1:25789/mcp"
}
}
}
```

**Claude Code:**
```bash
# Claude Code auto-discovers .vscode/mcp.json
# Or configure manually in ~/.claude/mcp_settings.json:
**OpenCode** (`.opencode.json`):
```json
{
"servers": {
"mcpServers": {
"weaponized": {
"type": "sse",
"url": "http://127.0.0.1:25789/mcp"
}
}
}
```

**Other MCP clients:**
Point the client to `http://127.0.0.1:25789/mcp` using Streamable HTTP transport.
::: tip
To switch to a different AI client, change `weaponized.mcp.cli` in your workspace settings and re-run the install command.
:::

::: warning
The MCP server binds to `127.0.0.1` (localhost only). It is not accessible from other machines on the network. The port defaults to `25789` but can be changed via `weaponized.mcp.port` setting.
Expand Down Expand Up @@ -199,3 +231,4 @@ The MCP server gives AI clients the same workspace access that CodeLens and comm
|---------|------|---------|-------------|
| `weaponized.ai.enabled` | boolean | `true` | Enable/disable both @weapon chat and MCP server |
| `weaponized.mcp.port` | integer | `25789` | MCP HTTP server port |
| `weaponized.mcp.cli` | string | `""` | Target AI CLI tool (`vscode`, `claude`, `codex`, `gemini`, `opencode`). Empty = prompt on first install |
7 changes: 7 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,13 @@
"format": "port",
"description": "Preferred port for the embedded MCP HTTP server (http://127.0.0.1:<port>/mcp). If the port is already in use, the OS will assign a free port automatically and mcp.json will be updated. Requires reload."
},
"weaponized.mcp.cli": {
"type": "string",
"enum": ["", "vscode", "claude", "codex", "gemini", "opencode"],
"default": "",
"description": "Target AI CLI tool for MCP config file installation. Controls which config file format is written by the Install MCP command. Leave empty to be prompted on first install.",
"scope": "resource"
},
"weaponized.lhost": {
"type": "string",
"default": "$LHOST",
Expand Down
4 changes: 2 additions & 2 deletions src/app/activate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { registerDefinitionProvider } from "../features/definitions";
import { registerAIFeatures } from "../features/ai";
import {
setEmbeddedMcpServer,
autoUpdateMcpJson,
autoUpdateMcpConfig,
} from "../features/mcp/install";
import { EmbeddedMcpServer } from "../features/mcp/httpServer";
import { DEFAULT_MCP_PORT } from "../features/mcp/portManager";
Expand Down Expand Up @@ -97,7 +97,7 @@ export async function activateExtension(context: vscode.ExtensionContext) {
const port = await mcpServer.start(terminalBridge, preferredPort);
setEmbeddedMcpServer(mcpServer);
context.subscriptions.push({ dispose: () => mcpServer.stop() });
await autoUpdateMcpJson(port);
await autoUpdateMcpConfig(port);
logger.info(`Embedded MCP server started on port ${port}`);
} catch (e) {
logger.error("Failed to start embedded MCP server:", e);
Expand Down
181 changes: 181 additions & 0 deletions src/core/domain/engagement.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
import type { Host } from "./host";
import type { UserCredential } from "./user";
import type { Finding } from "./finding";
import type { RelationshipGraph } from "./graph";

export interface EngagementStats {
totalHosts: number;
totalCredentials: number;
totalFindings: number;
criticalFindings: number;
highFindings: number;
mediumFindings: number;
lowFindings: number;
infoFindings: number;
}

/** A finding enriched with its graph-derived associations */
export interface FindingAssociation {
finding: Finding;
/** Host IDs linked to this finding via wiki-links */
hosts: string[];
/** User IDs linked to this finding via wiki-links */
users: string[];
/** Service IDs linked to this finding via wiki-links */
services: string[];
/** Other finding IDs linked to this finding (attack chains) */
findings: string[];
}

export interface EngagementSummary {
stats: EngagementStats;
currentTarget: Host | null;
currentUser: UserCredential | null;
hosts: Host[];
users: UserCredential[];
findings: Finding[];
/** Every finding with its graph-derived associations */
findingAssociations: FindingAssociation[];
/** Findings grouped by associated host ID */
hostBreakdown: Record<string, Finding[]>;
/** Findings grouped by associated user ID */
userBreakdown: Record<string, Finding[]>;
/** Findings with no wiki-link associations */
unassociatedFindings: Finding[];
graph: RelationshipGraph | null;
}

export interface EngagementSummaryInput {
hosts: Host[];
users: UserCredential[];
findings: Finding[];
graph: RelationshipGraph | null;
}

export function buildEngagementSummary(input: EngagementSummaryInput): EngagementSummary {
const { hosts, users, findings, graph } = input;

const currentTarget = hosts.find((h) => h.is_current) ?? null;
const currentUser = users.find((u) => u.is_current) ?? null;

// Severity counts
const sev = { critical: 0, high: 0, medium: 0, low: 0, info: 0 };
for (const f of findings) {
const s = (f.severity?.toLowerCase() ?? "") as keyof typeof sev;
if (s in sev) {
sev[s]++;
}
}

// Build node type lookup from graph
const nodeType = new Map<string, string>();
if (graph) {
for (const n of graph.nodes) {
nodeType.set(n.id, n.type);
}
}

// Derive per-finding associations from findingEdges
const findingAssociations: FindingAssociation[] = [];
const hostBreakdown: Record<string, Finding[]> = {};
const userBreakdown: Record<string, Finding[]> = {};
const associatedIds = new Set<string>();

for (const f of findings) {
const assoc: FindingAssociation = {
finding: f,
hosts: [],
users: [],
services: [],
findings: [],
};

if (graph) {
for (const edge of graph.findingEdges) {
// Find edges where this finding is one endpoint
let other: string | null = null;
if (edge.source === f.id) {
other = edge.target;
} else if (edge.target === f.id) {
other = edge.source;
}
if (!other) {
continue;
}

const type = nodeType.get(other) ?? "note";
switch (type) {
case "host":
if (!assoc.hosts.includes(other)) {
assoc.hosts.push(other);
}
break;
case "user":
if (!assoc.users.includes(other)) {
assoc.users.push(other);
}
break;
case "service":
if (!assoc.services.includes(other)) {
assoc.services.push(other);
}
break;
case "finding":
if (!assoc.findings.includes(other)) {
assoc.findings.push(other);
}
break;
}
}
}

const hasAssociation = assoc.hosts.length > 0 || assoc.users.length > 0
|| assoc.services.length > 0 || assoc.findings.length > 0;
if (hasAssociation) {
associatedIds.add(f.id);
}

// Populate breakdowns
for (const h of assoc.hosts) {
if (!hostBreakdown[h]) {
hostBreakdown[h] = [];
}
hostBreakdown[h].push(f);
}
for (const u of assoc.users) {
if (!userBreakdown[u]) {
userBreakdown[u] = [];
}
userBreakdown[u].push(f);
}

findingAssociations.push(assoc);
}

const unassociatedFindings = findings.filter((f) => !associatedIds.has(f.id));

const stats: EngagementStats = {
totalHosts: hosts.length,
totalCredentials: users.length,
totalFindings: findings.length,
criticalFindings: sev.critical,
highFindings: sev.high,
mediumFindings: sev.medium,
lowFindings: sev.low,
infoFindings: sev.info,
};

return {
stats,
currentTarget,
currentUser,
hosts,
users,
findings,
findingAssociations,
hostBreakdown,
userBreakdown,
unassociatedFindings,
graph,
};
}
3 changes: 2 additions & 1 deletion src/core/domain/graph.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ export interface GraphEdge {
export interface RelationshipGraph {
nodes: GraphNode[];
edges: GraphEdge[]; // all connections
hostEdges: GraphEdge[]; // both endpoints involve type "host"
hostEdges: GraphEdge[]; // at least one endpoint is type "host"
userEdges: GraphEdge[]; // at least one endpoint is type "user"
findingEdges: GraphEdge[]; // at least one endpoint is type "finding"
attackPath: string[]; // ordered node IDs — privilege escalation chain
mermaid: string; // pre-rendered Mermaid diagram
}
Expand Down
8 changes: 8 additions & 0 deletions src/core/domain/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,14 @@ export {
filterFindings
} from "./finding";

export {
EngagementSummary,
EngagementSummaryInput,
EngagementStats,
FindingAssociation,
buildEngagementSummary
} from "./engagement";

import { UserCredential } from "./user";
import { Host } from "./host";

Expand Down
Loading
Loading