Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
2f5eb86
chore(wrangler): register RepoConfigWorkflow + RepoConfigDO bindings
Apr 20, 2026
9750bf2
test(config): add failing tests for RepoConfig schema
Apr 20, 2026
31cb612
feat(config): add sparse+resolved RepoConfig Zod schemas
Apr 20, 2026
a4f5a37
test(durable-objects): add failing RepoConfigDO get/set tests
Apr 20, 2026
599df07
feat(durable-objects): add RepoConfigDO with synchronous KV storage
Apr 20, 2026
5221c19
feat(events): add ConfigPushEvent variant to Event union
Apr 20, 2026
df45ddd
feat(webhooks): re-export PushPayload from @octokit/webhooks
Apr 20, 2026
b2b5367
feat(router): route push events on default branch to ConfigPushEvent
Apr 20, 2026
2ee9ac1
feat(workflow): reject config_push on TaskRunnerWorkflow as NonRetrya…
Apr 20, 2026
69e8954
feat(workflow): branch buildInstanceId for config_push events
Apr 20, 2026
314fb96
feat(github): add getRepoContentFile with 404-as-null semantics
Apr 20, 2026
ef698f0
test(workflow): add failing tests for runSyncRepoConfig
Apr 20, 2026
da9efc9
feat(workflow): add runSyncRepoConfig step factory
Apr 20, 2026
b7df00c
test(workflow): add failing introspection tests for RepoConfigWorkflow
Apr 20, 2026
001e83b
feat(workflow): add RepoConfigWorkflow and branch dispatch
Apr 20, 2026
3106fde
refactor(workflow): consolidate env type + loadConfig; tighten assert…
Apr 20, 2026
6d13410
test(integration): cover push event dispatch paths
Apr 20, 2026
70c82bb
test(e2e): cover push-event end-to-end dispatch
Apr 20, 2026
f114d6e
style: apply biome formatter to repo-config files and push e2e test
Apr 20, 2026
9ceca4d
chore(wrangler): regenerate types with fully-resolved RepoConfigWorkf…
Apr 20, 2026
cc532f3
refactor(durable-objects): split RepoConfigDO storage into per-field …
Apr 20, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"@octokit/graphql": "^9.0.3",
"@octokit/rest": "^22.0.1",
"@octokit/webhooks": "^14.2.0",
"smol-toml": "^1.6.1",
"zod": "^4.3.6"
},
"devDependencies": {
Expand Down
120 changes: 120 additions & 0 deletions src/config/repo-config-schema.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import { describe, expect, test } from "vitest";
import {
parseRepoConfigToml,
resolveRepoConfigSettings,
} from "./repo-config-schema";

describe("parseRepoConfigToml — happy paths", () => {
test("empty string → empty sparse settings", () => {
expect(parseRepoConfigToml("")).toEqual({});
});
test("whitespace only → empty sparse settings", () => {
expect(parseRepoConfigToml(" \n\n")).toEqual({});
});
test("full valid TOML → sparse object with all declared fields", () => {
const toml = `
[sandbox]
size = "medium"
docker = true

[[sandbox.volumes]]
path = "/data"
size = "20gb"

[harness]
provider = "claude"

[[scheduled_jobs]]
name = "nightly"
branch = "main"
schedule = "0 0 * * *"
prompt = "Do the thing"
`;
const parsed = parseRepoConfigToml(toml);
expect(parsed.sandbox?.size).toBe("medium");
expect(parsed.sandbox?.docker).toBe(true);
expect(parsed.sandbox?.volumes?.[0]).toEqual({
path: "/data",
size: "20gb",
});
expect(parsed.harness?.provider).toBe("claude");
expect(parsed.scheduled_jobs?.[0]?.name).toBe("nightly");
});
test("unknown keys are dropped (write-side loose-parse)", () => {
const parsed = parseRepoConfigToml(`[future_feature]\nkey = "value"`);
expect(parsed).toEqual({});
});
test("partial fields do not materialize defaults", () => {
const parsed = parseRepoConfigToml(`[sandbox]\ndocker = true`);
expect(parsed).toEqual({ sandbox: { docker: true } });
expect(parsed.sandbox?.size).toBeUndefined();
});
});

describe("parseRepoConfigToml — failure paths throw NonRetryableError", () => {
test("invalid TOML syntax", () => {
expect(() => parseRepoConfigToml("not = toml = bad")).toThrow(
/Invalid TOML/,
);
});
test("sandbox.size out of enum", () => {
expect(() => parseRepoConfigToml(`[sandbox]\nsize = "huge"`)).toThrow(
/Invalid RepoConfig/,
);
});
test("harness.provider out of enum", () => {
expect(() => parseRepoConfigToml(`[harness]\nprovider = "gemini"`)).toThrow(
/Invalid RepoConfig/,
);
});
test("sandbox.volumes entry missing path", () => {
expect(() =>
parseRepoConfigToml(`[[sandbox.volumes]]\nsize = "10gb"`),
).toThrow(/Invalid RepoConfig/);
});
test("scheduled_jobs entry missing branch", () => {
expect(() =>
parseRepoConfigToml(
`[[scheduled_jobs]]\nname = "x"\nschedule = "0 0 * * *"\nprompt = "y"`,
),
).toThrow(/Invalid RepoConfig/);
});
test("error messages do not include raw values (secret-leak guard)", () => {
try {
parseRepoConfigToml(`[harness]\nprovider = "MY_SECRET_LEAK"`);
} catch (err) {
expect((err as Error).message).not.toContain("MY_SECRET_LEAK");
}
});
});

describe("resolveRepoConfigSettings — defaults applied on read", () => {
test("undefined → full defaults", () => {
const r = resolveRepoConfigSettings(undefined);
expect(r.sandbox.size).toBe("medium");
expect(r.sandbox.docker).toBe(false);
expect(r.sandbox.volumes).toEqual([]);
expect(r.harness.provider).toBe("claude");
expect(r.scheduled_jobs).toEqual([]);
});
test("empty object → full defaults", () => {
expect(resolveRepoConfigSettings({})).toEqual({
sandbox: { size: "medium", docker: false, volumes: [] },
harness: { provider: "claude" },
scheduled_jobs: [],
});
});
test("volume with path-only → size defaulted to '10gb'", () => {
const r = resolveRepoConfigSettings({
sandbox: { volumes: [{ path: "/data" }] },
});
expect(r.sandbox.volumes[0]).toEqual({ path: "/data", size: "10gb" });
});
test("partial override: explicit size beats default", () => {
const r = resolveRepoConfigSettings({
sandbox: { size: "large" },
});
expect(r.sandbox.size).toBe("large");
expect(r.sandbox.docker).toBe(false);
});
});
165 changes: 165 additions & 0 deletions src/config/repo-config-schema.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import { NonRetryableError } from "cloudflare:workflows";
import { parse } from "smol-toml";
import { z } from "zod";

// ── Shared enums ─────────────────────────────────────────────────────────────

/** Allowed values for `sandbox.size`. */
export const SandboxSizeSchema = z.enum(["small", "medium", "large"]);

/** Allowed values for `harness.provider`. */
export const HarnessProviderSchema = z.enum(["claude", "codex"]);

// ── Sparse (stored) schemas ──────────────────────────────────────────────────
// Sparse schemas mirror what users actually wrote in TOML. No `.default()`:
// defaults are applied at read time, not write time, so we can distinguish
// "unset" from "explicitly set to the default value" when needed.

/** Sparse shape for a single sandbox volume entry. `path` is required. */
export const StoredSandboxVolumeSchema = z.object({
path: z.string(),
size: z.string().optional(),
});

/** Sparse shape for the `[sandbox]` section. */
export const StoredSandboxSchema = z.object({
size: SandboxSizeSchema.optional(),
docker: z.boolean().optional(),
volumes: z.array(StoredSandboxVolumeSchema).optional(),
});

/** Sparse shape for the `[harness]` section. */
export const StoredHarnessSchema = z.object({
provider: HarnessProviderSchema.optional(),
});

/**
* A scheduled job entry. Leaf entries — either present with all fields, or
* absent entirely. No partial storage.
*/
export const ScheduledJobSchema = z.object({
name: z.string(),
branch: z.string(),
schedule: z.string(),
prompt: z.string(),
});

/** Top-level sparse shape as stored by the DO. */
export const StoredRepoConfigSettingsSchema = z.object({
sandbox: StoredSandboxSchema.optional(),
harness: StoredHarnessSchema.optional(),
scheduled_jobs: z.array(ScheduledJobSchema).optional(),
});

// ── Resolved (read-side) schemas ─────────────────────────────────────────────
// Resolved schemas apply defaults on read so every consumer sees a fully
// populated object without worrying about whether a field was written.

/** Resolved volume: `size` defaults to `"10gb"` when absent. */
export const ResolvedSandboxVolumeSchema = z.object({
path: z.string(),
size: z.string().default("10gb"),
});

/** Resolved sandbox: size/docker/volumes all have defaults. */
export const ResolvedSandboxSchema = z.object({
size: SandboxSizeSchema.default("medium"),
docker: z.boolean().default(false),
volumes: z.array(ResolvedSandboxVolumeSchema).default([]),
});

/** Resolved harness: provider defaults to `"claude"`. */
export const ResolvedHarnessSchema = z.object({
provider: HarnessProviderSchema.default("claude"),
});

/**
* Top-level resolved shape — always fully populated after `.parse()`.
*
* We use `.prefault({})` on object sub-schemas (not `.default({})`) because
* in Zod v4, `.default(value)` bypasses validation and returns `value` as-is,
* so inner field defaults would NOT be applied. `.prefault({})` substitutes
* `{}` as the input and then runs it through the child schema, correctly
* triggering each inner `.default(...)`.
*/
export const RepoConfigSettingsSchema = z.object({
sandbox: ResolvedSandboxSchema.prefault({}),
harness: ResolvedHarnessSchema.prefault({}),
scheduled_jobs: z.array(ScheduledJobSchema).default([]),
});

// ── Types ────────────────────────────────────────────────────────────────────

/** Sparse settings as stored in the DO (fields may be missing). */
export type StoredRepoConfigSettings = z.infer<
typeof StoredRepoConfigSettingsSchema
>;

/** Fully resolved settings with defaults applied — safe to consume. */
export type RepoConfigSettings = z.infer<typeof RepoConfigSettingsSchema>;

/** Stored RepoConfig envelope (DO record). */
export type StoredRepoConfig = {
repositoryId: number;
repositoryFullName: string;
installationId: number;
settings: StoredRepoConfigSettings;
};

/** Resolved RepoConfig envelope — defaults applied for consumers. */
export type RepoConfig = {
repositoryId: number;
repositoryFullName: string;
installationId: number;
settings: RepoConfigSettings;
};

// ── Helpers ──────────────────────────────────────────────────────────────────

/**
* Apply read-side defaults to a (possibly undefined) sparse settings object.
* Pure — no I/O, no side effects. Safe to call inside `step.do` callbacks.
*/
export function resolveRepoConfigSettings(
stored?: StoredRepoConfigSettings | undefined,
): RepoConfigSettings {
return RepoConfigSettingsSchema.parse(stored ?? {});
}

/**
* Parse a TOML string into a sparse, validated `StoredRepoConfigSettings`.
*
* Invariants:
* - Unknown keys are silently dropped (Zod default `.strip()` behavior) so
* repos can land forward-compatible config before the server knows about
* it.
* - No defaults are materialized here — this is the write path. Defaults
* live in `resolveRepoConfigSettings`.
* - Error messages NEVER include raw input values. We assemble messages from
* `issue.path` + `issue.message` only, so a malformed value that happens
* to contain a secret cannot leak into logs or thrown exceptions.
*
* Throws `NonRetryableError` on any failure — TOML syntax or schema violation.
*/
export function parseRepoConfigToml(raw: string): StoredRepoConfigSettings {
let parsed: unknown;
try {
parsed = parse(raw);
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
throw new NonRetryableError(`Invalid TOML: ${message}`);
}

const result = StoredRepoConfigSettingsSchema.safeParse(parsed);
if (!result.success) {
// Build the error message from Zod issue paths + messages ONLY — never
// include `issue.input` or any raw value. See module docstring for the
// secret-leak invariant.
const issues = result.error.issues
.map((issue) => `${issue.path.join(".")}: ${issue.message}`)
.join("; ");
throw new NonRetryableError(`Invalid RepoConfig: ${issues}`);
}

return result.data;
}
Loading
Loading