Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion actions/setup/js/send_otlp_span.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -641,7 +641,10 @@ function readLastRateLimitEntry() {
* - `GITHUB_REPOSITORY` – `owner/repo` string
*
* Runtime files read:
* - `/tmp/gh-aw/aw_info.json` – workflow/engine metadata written by the agent job
* - `/tmp/gh-aw/aw_info.json` – workflow/engine metadata written by the agent job
* - `/tmp/gh-aw/agent_usage.json` – per-type token breakdown written by parse_token_usage.cjs;
* provides `input_tokens`, `output_tokens`,
* `cache_read_tokens`, and `cache_write_tokens` counters
*
* @param {string} spanName - OTLP span name (e.g. `"gh-aw.job.conclusion"`)
* @param {{ startMs?: number }} [options]
Expand Down Expand Up @@ -724,6 +727,25 @@ async function sendJobConclusionSpan(spanName, options = {}) {
if (!isNaN(effectiveTokens) && effectiveTokens > 0) {
attributes.push(buildAttr("gh-aw.effective_tokens", effectiveTokens));
}

// Enrich span with per-type token breakdown from agent_usage.json (written by
// parse_token_usage.cjs). These four attributes enable cache-hit-rate panels,
// per-type cost attribution, and fine-grained threshold alerts in Grafana /
// Honeycomb / Datadog without requiring the step summary HTML.
const agentUsage = readJSONIfExists("/tmp/gh-aw/agent_usage.json") || {};
if (typeof agentUsage.input_tokens === "number" && agentUsage.input_tokens > 0) {
attributes.push(buildAttr("gh-aw.tokens.input", agentUsage.input_tokens));
}
if (typeof agentUsage.output_tokens === "number" && agentUsage.output_tokens > 0) {
attributes.push(buildAttr("gh-aw.tokens.output", agentUsage.output_tokens));
}
if (typeof agentUsage.cache_read_tokens === "number" && agentUsage.cache_read_tokens > 0) {
attributes.push(buildAttr("gh-aw.tokens.cache_read", agentUsage.cache_read_tokens));
}
if (typeof agentUsage.cache_write_tokens === "number" && agentUsage.cache_write_tokens > 0) {
attributes.push(buildAttr("gh-aw.tokens.cache_write", agentUsage.cache_write_tokens));
}

if (agentConclusion) {
attributes.push(buildAttr("gh-aw.agent.conclusion", agentConclusion));
}
Expand Down
108 changes: 108 additions & 0 deletions actions/setup/js/send_otlp_span.test.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -2448,6 +2448,114 @@ describe("sendJobConclusionSpan", () => {
});
});

describe("token breakdown enrichment in conclusion span", () => {
let readFileSpy;

beforeEach(() => {
readFileSpy = vi.spyOn(fs, "readFileSync").mockImplementation(() => {
throw Object.assign(new Error("ENOENT"), { code: "ENOENT" });
});
});

afterEach(() => {
readFileSpy.mockRestore();
});

it("includes all four token breakdown attributes when agent_usage.json is present", async () => {
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200, statusText: "OK" });
vi.stubGlobal("fetch", mockFetch);

process.env.OTEL_EXPORTER_OTLP_ENDPOINT = "https://traces.example.com";

const usage = { input_tokens: 48200, output_tokens: 1350, cache_read_tokens: 41000, cache_write_tokens: 3100, effective_tokens: 9800 };
readFileSpy.mockImplementation(filePath => {
if (filePath === "/tmp/gh-aw/agent_usage.json") {
return JSON.stringify(usage);
}
throw Object.assign(new Error("ENOENT"), { code: "ENOENT" });
});

await sendJobConclusionSpan("gh-aw.job.conclusion");

const body = JSON.parse(mockFetch.mock.calls[0][1].body);
const span = body.resourceSpans[0].scopeSpans[0].spans[0];
const attrs = Object.fromEntries(span.attributes.map(a => [a.key, a.value.intValue ?? a.value.stringValue]));
expect(attrs["gh-aw.tokens.input"]).toBe(48200);
expect(attrs["gh-aw.tokens.output"]).toBe(1350);
expect(attrs["gh-aw.tokens.cache_read"]).toBe(41000);
expect(attrs["gh-aw.tokens.cache_write"]).toBe(3100);
});

it("omits all token breakdown attributes when agent_usage.json is absent", async () => {
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200, statusText: "OK" });
vi.stubGlobal("fetch", mockFetch);

process.env.OTEL_EXPORTER_OTLP_ENDPOINT = "https://traces.example.com";

// readFileSpy already throws ENOENT for all paths

await sendJobConclusionSpan("gh-aw.job.conclusion");

const body = JSON.parse(mockFetch.mock.calls[0][1].body);
const span = body.resourceSpans[0].scopeSpans[0].spans[0];
const keys = span.attributes.map(a => a.key);
expect(keys).not.toContain("gh-aw.tokens.input");
expect(keys).not.toContain("gh-aw.tokens.output");
expect(keys).not.toContain("gh-aw.tokens.cache_read");
expect(keys).not.toContain("gh-aw.tokens.cache_write");
});

it("omits a token attribute when its value is zero", async () => {
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200, statusText: "OK" });
vi.stubGlobal("fetch", mockFetch);

process.env.OTEL_EXPORTER_OTLP_ENDPOINT = "https://traces.example.com";

const usage = { input_tokens: 1000, output_tokens: 0, cache_read_tokens: 500, cache_write_tokens: 0 };
readFileSpy.mockImplementation(filePath => {
if (filePath === "/tmp/gh-aw/agent_usage.json") {
return JSON.stringify(usage);
}
throw Object.assign(new Error("ENOENT"), { code: "ENOENT" });
});

await sendJobConclusionSpan("gh-aw.job.conclusion");

const body = JSON.parse(mockFetch.mock.calls[0][1].body);
const span = body.resourceSpans[0].scopeSpans[0].spans[0];
const attrs = Object.fromEntries(span.attributes.map(a => [a.key, a.value.intValue ?? a.value.stringValue]));
expect(attrs["gh-aw.tokens.input"]).toBe(1000);
expect(attrs["gh-aw.tokens.cache_read"]).toBe(500);
const keys = span.attributes.map(a => a.key);
expect(keys).not.toContain("gh-aw.tokens.output");
expect(keys).not.toContain("gh-aw.tokens.cache_write");
});

it("omits token breakdown attributes when agent_usage.json contains invalid JSON", async () => {
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200, statusText: "OK" });
vi.stubGlobal("fetch", mockFetch);

process.env.OTEL_EXPORTER_OTLP_ENDPOINT = "https://traces.example.com";

readFileSpy.mockImplementation(filePath => {
if (filePath === "/tmp/gh-aw/agent_usage.json") {
return "not valid json";
}
throw Object.assign(new Error("ENOENT"), { code: "ENOENT" });
});

await sendJobConclusionSpan("gh-aw.job.conclusion");

const body = JSON.parse(mockFetch.mock.calls[0][1].body);
const span = body.resourceSpans[0].scopeSpans[0].spans[0];
const keys = span.attributes.map(a => a.key);
expect(keys).not.toContain("gh-aw.tokens.input");
expect(keys).not.toContain("gh-aw.tokens.output");
expect(keys).not.toContain("gh-aw.tokens.cache_read");
expect(keys).not.toContain("gh-aw.tokens.cache_write");
});
});

describe("staged / deployment.environment", () => {
let readFileSpy;

Expand Down
Loading