From 1dab1b471b01e769548e40b787360acba79af78c Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:04:07 +1100 Subject: [PATCH 01/18] docs: add XML documentation supplementation design spec --- ...26-03-21-xml-doc-supplementation-design.md | 102 ++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md diff --git a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md new file mode 100644 index 0000000..06c59c1 --- /dev/null +++ b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md @@ -0,0 +1,102 @@ +# XML Documentation Supplementation + +## Problem + +Godot's `--dump-extension-api-with-docs` JSON export does not contain all documentation. Tutorials, some GlobalScope entries, and other doc fields are only available in the XML documentation files within the Godot source tree (`doc/classes/*.xml`). + +## Solution + +Automatically fetch and parse Godot's XML documentation from the source tree, using it to supplement the existing JSON data with missing fields. + +## Design + +### Source Acquisition + +1. Run `godot --version` to get the version string (e.g., `4.6.1.stable.official.14d19694e`). +2. Parse the commit hash from the version string (`14d19694e`). +3. Download the source tarball from `https://github.com/godotengine/godot/archive/{hash}.tar.gz`. +4. Stream the tarball through gzip decompression and tar extraction, filtering for `*/doc/classes/*.xml`. +5. Write matching XML files to `~/.cache/gdoc/xml_docs/`. +6. The tarball is never written to disk -- streamed directly from HTTP through decompression and extraction. + +This runs automatically on first use alongside the existing JSON generation. `--clear-cache` clears XML docs too. + +### XML Parsing + +**Dependency**: `ianprime0509/zig-xml` -- a pull/streaming XML parser targeting Zig 0.15.1, with W3C conformance testing and standard `build.zig.zon` integration. + +**New module**: `src/XmlDocParser.zig` -- parses a single Godot XML class doc file and returns supplemental data. + +Godot XML doc structure: + +```xml + + A 2D game object. + ... + + $DOCS_URL/tutorials/2d/custom_drawing_in_2d.html + + + + ... + + + +``` + +### Merge Strategy + +XML data supplements JSON data at display time, not during cache generation. The JSON and XML parsing paths remain independent. + +Merge rules: +- **Tutorials**: New field on `Entry`, rendered as a "Tutorials" section in output. +- **Missing descriptions**: If a JSON entry has no description but the XML does, use the XML description. +- **GlobalScope entries**: XML docs for classes/entries not present in the JSON are added as new entries. + +### Tar Extraction + +Uses Zig 0.15 stdlib -- no external dependency needed: +- `std.http.Client` for HTTP download +- `std.compress.gzip` for decompression +- `std.tar` for streaming extraction + +The pipeline streams download -> decompress -> extract without writing the full tarball to disk. + +### Cache Layout + +``` +~/.cache/gdoc/ +├── extension_api.json # Existing JSON dump +├── xml_docs/ # New: extracted XML files +│ ├── Node2D.xml +│ ├── GlobalScope.xml +│ └── ... +├── Node2D/ +│ └── index.md # Existing markdown cache +└── ... +``` + +Presence of `xml_docs/` serves as the sentinel for whether XML docs have been fetched. + +## Changes + +### New dependency + +- `zig-xml` (`ianprime0509/zig-xml`) -- XML pull parser, 0BSD license + +### New files + +- `src/XmlDocParser.zig` -- Parses Godot XML doc files, returns supplemental data (tutorials, descriptions, GlobalScope entries) +- `src/source_fetch.zig` -- Version parsing, tarball download, streaming extraction of XML docs + +### Modified files + +- `build.zig.zon` -- Add zig-xml dependency +- `build.zig` -- Wire zig-xml into modules +- `src/DocDatabase.zig` -- Add `tutorials` field to `Entry`, possibly new `EntryKind` values for GlobalScope items +- `src/root.zig` -- Merge XML data at display time, trigger XML fetch in cache population flow +- `src/cache.zig` -- Extend cache population to include XML fetch, update sentinel check + +### No breaking changes + +Existing CLI interface unchanged. `--clear-cache` clears everything including XML docs. From e04691422b21e2924fcb158de25085868849530c Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:07:48 +1100 Subject: [PATCH 02/18] docs: address spec review feedback - Fix std.compress.gzip -> std.compress.flate reference - Add fallback for when Godot is not installed - Use tag-based tarball URL with hash fallback - Add error handling section with graceful degradation - Include modules/*/doc_classes/*.xml in tar filter - Add .complete marker for staleness/partial download detection - Specify Tutorial struct type and $DOCS_URL expansion - Clarify merge happens during cache generation, not display time - Note --godot-extension-api bypasses XML supplementation --- ...26-03-21-xml-doc-supplementation-design.md | 53 ++++++++++++++----- 1 file changed, 39 insertions(+), 14 deletions(-) diff --git a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md index 06c59c1..23bab53 100644 --- a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md +++ b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md @@ -2,7 +2,7 @@ ## Problem -Godot's `--dump-extension-api-with-docs` JSON export does not contain all documentation. Tutorials, some GlobalScope entries, and other doc fields are only available in the XML documentation files within the Godot source tree (`doc/classes/*.xml`). +Godot's `--dump-extension-api-with-docs` JSON export does not contain all documentation. Tutorials, some GlobalScope entries, and other doc fields are only available in the XML documentation files within the Godot source tree (`doc/classes/*.xml` and `modules/*/doc_classes/*.xml`). ## Solution @@ -13,11 +13,16 @@ Automatically fetch and parse Godot's XML documentation from the source tree, us ### Source Acquisition 1. Run `godot --version` to get the version string (e.g., `4.6.1.stable.official.14d19694e`). -2. Parse the commit hash from the version string (`14d19694e`). -3. Download the source tarball from `https://github.com/godotengine/godot/archive/{hash}.tar.gz`. -4. Stream the tarball through gzip decompression and tar extraction, filtering for `*/doc/classes/*.xml`. +2. Parse the version number and commit hash. The version number (`4.6.1`) is used to construct a tag-based tarball URL; the commit hash is a fallback. +3. Download the source tarball from `https://github.com/godotengine/godot/archive/refs/tags/{version}-stable.tar.gz` (falling back to `https://github.com/godotengine/godot/archive/{hash}.tar.gz` if the tag URL fails). +4. Stream the tarball through gzip decompression and tar extraction, filtering for: + - `*/doc/classes/*.xml` (core class docs) + - `*/modules/*/doc_classes/*.xml` (module class docs, e.g., GDScript, WebSocket) 5. Write matching XML files to `~/.cache/gdoc/xml_docs/`. -6. The tarball is never written to disk -- streamed directly from HTTP through decompression and extraction. +6. Write a `.complete` marker file with the version string after successful extraction. +7. The tarball is never written to disk -- streamed directly from HTTP through decompression and extraction. + +**When Godot is not installed**: The existing fallback path downloads JSON from GitHub. In this case, use the latest stable release tag from the GitHub API to determine the tarball URL for XML docs. This runs automatically on first use alongside the existing JSON generation. `--clear-cache` clears XML docs too. @@ -44,23 +49,27 @@ Godot XML doc structure: ``` +**`$DOCS_URL` expansion**: Replace `$DOCS_URL` with `https://docs.godotengine.org/en/stable` when rendering tutorial links. + ### Merge Strategy -XML data supplements JSON data at display time, not during cache generation. The JSON and XML parsing paths remain independent. +XML data supplements JSON data during markdown cache generation. When generating cached markdown for a symbol, the XML file for that class is parsed and merged before writing to disk. Merge rules: -- **Tutorials**: New field on `Entry`, rendered as a "Tutorials" section in output. +- **Tutorials**: New field on `Entry` as `?[]Tutorial` where `Tutorial = struct { title: []const u8, url: []const u8 }`. Rendered as a "Tutorials" section in output. - **Missing descriptions**: If a JSON entry has no description but the XML does, use the XML description. -- **GlobalScope entries**: XML docs for classes/entries not present in the JSON are added as new entries. +- **GlobalScope entries**: XML docs for classes/entries not present in the JSON are added as new `Entry` values to the database. + +When using `--godot-extension-api` (custom JSON path), XML supplementation does not apply. ### Tar Extraction Uses Zig 0.15 stdlib -- no external dependency needed: - `std.http.Client` for HTTP download -- `std.compress.gzip` for decompression +- `std.compress.flate` with gzip container mode for decompression - `std.tar` for streaming extraction -The pipeline streams download -> decompress -> extract without writing the full tarball to disk. +The pipeline streams download -> decompress -> extract without writing the full tarball to disk. The full tarball is ~50-80 MB compressed; only the XML files (~5 MB) are written to disk. ### Cache Layout @@ -68,15 +77,26 @@ The pipeline streams download -> decompress -> extract without writing the full ~/.cache/gdoc/ ├── extension_api.json # Existing JSON dump ├── xml_docs/ # New: extracted XML files +│ ├── .complete # Marker file with version string │ ├── Node2D.xml -│ ├── GlobalScope.xml +│ ├── @GlobalScope.xml │ └── ... ├── Node2D/ │ └── index.md # Existing markdown cache └── ... ``` -Presence of `xml_docs/` serves as the sentinel for whether XML docs have been fetched. +**Staleness check**: On startup, compare the version in `xml_docs/.complete` against the current `godot --version`. If they differ, re-fetch XML docs. Presence of `.complete` (not just the directory) is the sentinel for a successful fetch. + +### Error Handling + +- **Version parsing failure** (unexpected format, no hash): Skip XML supplementation, proceed with JSON-only display. Log a warning. +- **Download failure** (network error, 404, rate limit): Skip XML supplementation, proceed with JSON-only display. Log a warning. +- **Partial download** (interrupted stream): No `.complete` marker is written, so next run will retry. +- **Malformed XML**: Skip that individual XML file, proceed with other files. Log which file failed. +- **Disk space**: Rely on OS write errors propagating; ~5 MB of XML is unlikely to be a concern. + +In all error cases, gdoc degrades gracefully -- XML supplementation is best-effort, and the tool remains fully functional with JSON-only data. ## Changes @@ -94,9 +114,14 @@ Presence of `xml_docs/` serves as the sentinel for whether XML docs have been fe - `build.zig.zon` -- Add zig-xml dependency - `build.zig` -- Wire zig-xml into modules - `src/DocDatabase.zig` -- Add `tutorials` field to `Entry`, possibly new `EntryKind` values for GlobalScope items -- `src/root.zig` -- Merge XML data at display time, trigger XML fetch in cache population flow -- `src/cache.zig` -- Extend cache population to include XML fetch, update sentinel check +- `src/root.zig` -- Merge XML data during cache generation, trigger XML fetch in cache population flow +- `src/cache.zig` -- Extend cache population to include XML fetch, update sentinel/staleness check ### No breaking changes Existing CLI interface unchanged. `--clear-cache` clears everything including XML docs. + +### Known limitations + +- XML docs total ~800+ files across `doc/classes/` and `modules/*/doc_classes/`, consuming ~5 MB on disk. +- Full tarball must be streamed even though only XML files are extracted (tar is sequential). From 5a246626bfabde6e389808522214ff3459e8368f Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:22:14 +1100 Subject: [PATCH 03/18] docs: add XML doc supplementation implementation plan 10-task plan covering: zig-xml dependency, version parsing, tarball download/extraction, XML parsing, Entry tutorials field, cache integration, XML merge into DocDatabase, and e2e verification. --- .../2026-03-21-xml-doc-supplementation.md | 1221 +++++++++++++++++ 1 file changed, 1221 insertions(+) create mode 100644 docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md diff --git a/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md b/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md new file mode 100644 index 0000000..f37879b --- /dev/null +++ b/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md @@ -0,0 +1,1221 @@ +# XML Documentation Supplementation Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Supplement Godot's JSON API docs with XML documentation from the Godot source tree, adding tutorials, missing descriptions, and GlobalScope entries. + +**Architecture:** New `source_fetch.zig` handles version parsing and tarball streaming extraction. New `XmlDocParser.zig` parses Godot XML class docs. XML data merges into `DocDatabase` entries during markdown cache generation. All errors degrade gracefully to JSON-only mode. + +**Tech Stack:** Zig 0.15.2, zig-xml (ianprime0509/zig-xml), std.tar, std.compress.gzip, std.http.Client + +**API Notes:** The Zig 0.15.2 stdlib APIs for tar, gzip, and HTTP may have different signatures than what is shown in code snippets below. Code snippets illustrate the *intent* and *data flow*; the implementer must verify exact function signatures against the Zig stdlib source (e.g., `std.tar`, `std.compress.gzip` or `std.compress.flate` with gzip container mode, `std.http.Client`) and adapt accordingly. When in doubt, check the Zig stdlib source or run `zig std` for documentation. + +**Spec:** `docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md` + +--- + +### File Structure + +| File | Responsibility | +|------|---------------| +| `src/source_fetch.zig` (create) | Parse `godot --version`, download tarball, stream-extract XML docs to cache | +| `src/XmlDocParser.zig` (create) | Parse a single Godot XML class doc file into structured data | +| `build.zig.zon` (modify) | Add zig-xml dependency | +| `build.zig` (modify) | Wire zig-xml into the gdoc module | +| `src/DocDatabase.zig` (modify) | Add `tutorials` field to `Entry` | +| `src/cache.zig` (modify) | Add XML staleness check, integrate XML fetch into cache population | +| `src/root.zig` (modify) | Merge XML data during cache generation | + +--- + +### Task 1: Add zig-xml Dependency + +**Files:** +- Modify: `build.zig.zon` +- Modify: `build.zig` + +- [ ] **Step 1: Fetch zig-xml** + +```bash +cd /home/sh/Projects/gdzig/gdoc +zig fetch --save git+https://github.com/ianprime0509/zig-xml +``` + +Expected: `build.zig.zon` updated with zig-xml dependency entry. + +- [ ] **Step 2: Wire zig-xml into build.zig** + +In `build.zig`, after the `zigdown` dependency block (line 23-26), add: + +```zig +const zig_xml = b.dependency("zig_xml", .{ + .target = target, + .optimize = optimize, +}).module("xml"); +``` + +Then add it to the `mod` imports array (line 31-35): + +```zig +.{ .name = "xml", .module = zig_xml }, +``` + +- [ ] **Step 3: Verify build compiles** + +```bash +zig build +``` + +Expected: Clean build, no errors. + +- [ ] **Step 4: Commit** + +```bash +git add build.zig build.zig.zon +git commit -m "feat: add zig-xml dependency for XML doc parsing" +``` + +--- + +### Task 2: Version String Parser in source_fetch.zig + +**Files:** +- Create: `src/source_fetch.zig` + +- [ ] **Step 1: Write failing test for version parsing** + +Create `src/source_fetch.zig`: + +```zig +pub const VersionInfo = struct { + major: []const u8, + minor: []const u8, + patch: []const u8, + hash: ?[]const u8, + + /// Formats the version as "major.minor.patch" into the provided buffer. + pub fn formatVersion(self: VersionInfo, buf: []u8) ?[]const u8 { + return std.fmt.bufPrint(buf, "{s}.{s}.{s}", .{ self.major, self.minor, self.patch }) catch null; + } +}; + +/// Parses a Godot version string like "4.6.1.stable.official.14d19694e" +/// Returns the version components and optional commit hash. +pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { + _ = version_str; + return null; // TODO: implement +} + +test "parseGodotVersion parses standard version string" { + const result = parseGodotVersion("4.6.1.stable.official.14d19694e").?; + try std.testing.expectEqualStrings("4", result.major); + try std.testing.expectEqualStrings("6", result.minor); + try std.testing.expectEqualStrings("1", result.patch); + try std.testing.expectEqualStrings("14d19694e", result.hash.?); +} + +test "parseGodotVersion parses version without hash" { + const result = parseGodotVersion("4.6.1.stable.custom_build").?; + try std.testing.expectEqualStrings("4", result.major); + try std.testing.expectEqualStrings("6", result.minor); + try std.testing.expectEqualStrings("1", result.patch); + try std.testing.expect(result.hash == null); +} + +test "parseGodotVersion handles dev builds" { + const result = parseGodotVersion("4.7.0.dev.official.abc123def").?; + try std.testing.expectEqualStrings("4", result.major); + try std.testing.expectEqualStrings("7", result.minor); + try std.testing.expectEqualStrings("0", result.patch); + try std.testing.expectEqualStrings("abc123def", result.hash.?); +} + +test "parseGodotVersion returns null for empty string" { + try std.testing.expect(parseGodotVersion("") == null); +} + +test "parseGodotVersion returns null for malformed string" { + try std.testing.expect(parseGodotVersion("not-a-version") == null); +} + +const std = @import("std"); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +```bash +zig build test 2>&1 | head -20 +``` + +Expected: Tests fail because `parseGodotVersion` returns `null`. + +- [ ] **Step 3: Implement parseGodotVersion** + +Replace the stub with: + +```zig +pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { + if (version_str.len == 0) return null; + + // Split on dots: "4.6.1.stable.official.14d19694e" + var iter = std.mem.splitScalar(u8, version_str, '.'); + const major = iter.next() orelse return null; + const minor = iter.next() orelse return null; + const patch = iter.next() orelse return null; + + // Validate major/minor/patch are numeric + for (major) |c| if (!std.ascii.isDigit(c)) return null; + for (minor) |c| if (!std.ascii.isDigit(c)) return null; + for (patch) |c| if (!std.ascii.isDigit(c)) return null; + + // Skip stability label (stable/dev/beta/rc) + _ = iter.next() orelse return VersionInfo{ + .major = major, + .minor = minor, + .patch = patch, + .hash = null, + }; + + // Next segment: "official" or "custom_build" etc. + const build_type = iter.next() orelse return VersionInfo{ + .major = major, + .minor = minor, + .patch = patch, + .hash = null, + }; + + // If build type is "official", the next segment is the commit hash + const hash: ?[]const u8 = if (std.mem.eql(u8, build_type, "official")) + iter.next() + else + null; + + return VersionInfo{ + .major = major, + .minor = minor, + .patch = patch, + .hash = hash, + }; +} +``` + +- [ ] **Step 4: Register module in build.zig** + +`source_fetch.zig` is part of the `gdoc` module. Since `root.zig` uses `comptime { std.testing.refAllDecls(@This()); }`, add to `src/root.zig`: + +```zig +pub const source_fetch = @import("source_fetch.zig"); +``` + +- [ ] **Step 5: Run tests to verify they pass** + +```bash +zig build test +``` + +Expected: All tests pass. + +- [ ] **Step 6: Commit** + +```bash +git add src/source_fetch.zig src/root.zig +git commit -m "feat: add Godot version string parser" +``` + +--- + +### Task 3: Run godot --version and Parse Output + +**Files:** +- Modify: `src/source_fetch.zig` + +- [ ] **Step 1: Write failing test for getGodotVersion** + +Add to `src/source_fetch.zig`: + +```zig +/// Runs `godot --version` and parses the output. +/// Returns null if godot is not installed or version can't be parsed. +pub fn getGodotVersion(allocator: Allocator) ?VersionInfo { + _ = allocator; + return null; // TODO +} + +test "getGodotVersion with fake godot script" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + // Create a fake godot that outputs a version string + const script = "#!/bin/sh\necho '4.6.1.stable.official.14d19694e'"; + try tmp_dir.dir.writeFile(.{ .sub_path = "fake-godot", .data = script }); + + var file = try tmp_dir.dir.openFile("fake-godot", .{}); + try file.chmod(0o755); + file.close(); + + const fake_path = try std.fmt.allocPrint(allocator, "{s}/fake-godot", .{tmp_path}); + defer allocator.free(fake_path); + + const result = getGodotVersionFromPath(allocator, fake_path); + try std.testing.expect(result != null); + try std.testing.expectEqualStrings("14d19694e", result.?.hash.?); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +```bash +zig build test 2>&1 | head -20 +``` + +- [ ] **Step 3: Implement getGodotVersionFromPath** + +```zig +/// Runs a godot executable at the given path with --version and parses output. +pub fn getGodotVersionFromPath(allocator: Allocator, godot_path: []const u8) ?VersionInfo { + const result = std.process.Child.run(.{ + .argv = &.{ godot_path, "--version" }, + .allocator = allocator, + }) catch return null; + defer allocator.free(result.stdout); + defer allocator.free(result.stderr); + + switch (result.term) { + .Exited => |code| if (code != 0) return null, + else => return null, + } + + const trimmed = std.mem.trimRight(u8, result.stdout, &std.ascii.whitespace); + return parseGodotVersion(trimmed); +} + +/// Convenience wrapper that uses "godot" from PATH. +pub fn getGodotVersion(allocator: Allocator) ?VersionInfo { + return getGodotVersionFromPath(allocator, "godot"); +} +``` + +- [ ] **Step 4: Run tests** + +```bash +zig build test +``` + +Expected: All pass. + +- [ ] **Step 5: Commit** + +```bash +git add src/source_fetch.zig +git commit -m "feat: run godot --version and parse output" +``` + +--- + +### Task 4: Tarball Download and XML Extraction + +**Files:** +- Modify: `src/source_fetch.zig` + +- [ ] **Step 1: Write the tarball URL builder** + +Add to `src/source_fetch.zig`: + +```zig +/// Builds the GitHub tarball URL for a Godot version. +/// Tries tag-based URL first (e.g., v4.6.1-stable), with hash fallback. +pub fn buildTarballUrl(buf: []u8, version: VersionInfo) ?[]const u8 { + const result = std.fmt.bufPrint(buf, "https://github.com/godotengine/godot/archive/refs/tags/{s}.{s}.{s}-stable.tar.gz", .{ + version.major, version.minor, version.patch, + }) catch return null; + return result; +} + +pub fn buildTarballUrlFromHash(buf: []u8, hash: []const u8) ?[]const u8 { + const result = std.fmt.bufPrint(buf, "https://github.com/godotengine/godot/archive/{s}.tar.gz", .{hash}) catch return null; + return result; +} + +test "buildTarballUrl formats tag-based URL" { + var buf: [256]u8 = undefined; + const url = buildTarballUrl(&buf, .{ + .major = "4", + .minor = "6", + .patch = "1", + .hash = "14d19694e", + }).?; + try std.testing.expectEqualStrings( + "https://github.com/godotengine/godot/archive/refs/tags/4.6.1-stable.tar.gz", + url, + ); +} + +test "buildTarballUrlFromHash formats hash-based URL" { + var buf: [256]u8 = undefined; + const url = buildTarballUrlFromHash(&buf, "14d19694e").?; + try std.testing.expectEqualStrings( + "https://github.com/godotengine/godot/archive/14d19694e.tar.gz", + url, + ); +} +``` + +- [ ] **Step 2: Run tests** + +```bash +zig build test +``` + +- [ ] **Step 3: Write the streaming extraction function** + +This is the core function that downloads a tarball and extracts XML docs. Add to `src/source_fetch.zig`: + +```zig +const Allocator = std.mem.Allocator; + +/// Downloads the Godot source tarball and extracts XML doc files. +/// Streams: HTTP -> gzip decompress -> tar extract -> filter XML files. +/// Writes extracted XML files to `xml_docs_dir`. +/// +/// **API NOTE:** The exact std.tar, std.compress.gzip, and std.http.Client +/// signatures must be verified against the Zig 0.15.2 stdlib source. +/// The pseudocode below shows the intended data flow. Key things to verify: +/// - gzip decompression: try `std.compress.gzip.decompress(reader)` or +/// `std.compress.flate.decompressor(.gzip, reader)` +/// - tar iteration: check `std.tar.iterator()` or `std.tar.pipeToFileSystem()` +/// - HTTP: `std.http.Client` open/send/wait or fetch API +/// - File writer: use `.writer(&buf)` then `.interface` pattern from cache.zig +pub fn fetchAndExtractXmlDocs( + allocator: Allocator, + url: []const u8, + xml_docs_dir: []const u8, +) !void { + // 1. HTTP GET the tarball URL + var client: std.http.Client = .init(allocator); + defer client.deinit(); + + // Open connection, send request, wait for response + var header_buf: [16 * 1024]u8 = undefined; + var req = try client.open(.GET, try std.Uri.parse(url), .{ + .server_header_buffer = &header_buf, + }); + defer req.deinit(); + try req.send(); + try req.wait(); + + if (req.response.status != .ok) return error.DownloadFailed; + + // 2. Pipe HTTP response reader -> gzip decompressor -> tar iterator + // Verify exact API: std.compress.gzip or std.compress.flate with gzip mode + var decompress = std.compress.gzip.decompressor(req.reader()); + + // 3. Iterate tar entries, filtering for XML doc files + var tar_iter = std.tar.iterator(decompress.reader(), .{}); + + while (try tar_iter.next()) |entry| { + const name = entry.name; + const basename = std.fs.path.basename(name); + + if (!std.mem.endsWith(u8, basename, ".xml")) continue; + + // Match: */doc/classes/*.xml and */modules/*/doc_classes/*.xml + const is_core_doc = std.mem.indexOf(u8, name, "/doc/classes/") != null; + const is_module_doc = std.mem.indexOf(u8, name, "/doc_classes/") != null; + if (!is_core_doc and !is_module_doc) continue; + + // 4. Write matching XML file to xml_docs_dir/ClassName.xml + const output_path = try std.fs.path.join(allocator, &.{ xml_docs_dir, basename }); + defer allocator.free(output_path); + + var output_file = try std.fs.createFileAbsolute(output_path, .{}); + defer output_file.close(); + + // Stream entry content to file using buffered writer + // Use the .writer(&buf) then .interface pattern from cache.zig + var buf: [4096]u8 = undefined; + var file_writer = output_file.writer(&buf); + var writer = &file_writer.interface; + + // Read entry content and write to file + // Exact API depends on tar entry reader interface + var read_buf: [8192]u8 = undefined; + while (true) { + const bytes_read = try entry.reader().read(&read_buf); + if (bytes_read == 0) break; + try writer.writeAll(read_buf[0..bytes_read]); + } + try writer.flush(); + } +} +``` + +**IMPORTANT for implementer:** The `std.tar`, `std.compress.gzip`/`std.compress.flate`, and `std.http.Client` APIs shown above are pseudocode illustrating the data flow. You **must** check the actual Zig 0.15.2 stdlib source for correct function signatures before coding. The streaming pipeline concept (HTTP -> gzip -> tar -> filter) is correct; only the exact API calls need verification. + +- [ ] **Step 4: Write the .complete marker function** + +```zig +/// Writes a .complete marker file with the version string. +pub fn writeCompleteMarker(allocator: Allocator, xml_docs_dir: []const u8, version_str: []const u8) !void { + const marker_path = try std.fs.path.join(allocator, &.{ xml_docs_dir, ".complete" }); + defer allocator.free(marker_path); + + var file = try std.fs.createFileAbsolute(marker_path, .{}); + defer file.close(); + + var buf: [256]u8 = undefined; + var file_writer = file.writer(&buf); + var writer = &file_writer.interface; + try writer.writeAll(version_str); + try writer.flush(); +} + +/// Reads the .complete marker and returns the version string, or null if not present. +pub fn readCompleteMarker(allocator: Allocator, xml_docs_dir: []const u8) ?[]const u8 { + const marker_path = std.fs.path.join(allocator, &.{ xml_docs_dir, ".complete" }) catch return null; + defer allocator.free(marker_path); + + const file = std.fs.openFileAbsolute(marker_path, .{}) catch return null; + defer file.close(); + + var buf: [256]u8 = undefined; + var file_reader = file.reader(&buf); + var reader = &file_reader.interface; + return reader.readAlloc(allocator, 256) catch null; +} +``` + +- [ ] **Step 5: Write tests for marker functions** + +```zig +test "writeCompleteMarker and readCompleteMarker round-trip" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + try writeCompleteMarker(allocator, tmp_path, "4.6.1.stable.official.14d19694e"); + + const read_back = readCompleteMarker(allocator, tmp_path).?; + defer allocator.free(read_back); + + try std.testing.expectEqualStrings("4.6.1.stable.official.14d19694e", read_back); +} + +test "readCompleteMarker returns null when no marker exists" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + try std.testing.expect(readCompleteMarker(allocator, tmp_path) == null); +} +``` + +- [ ] **Step 6: Run tests** + +```bash +zig build test +``` + +Expected: Marker tests pass. The `fetchAndExtractXmlDocs` function won't be unit tested (it requires network); it will be integration tested in a later task. + +- [ ] **Step 7: Commit** + +```bash +git add src/source_fetch.zig +git commit -m "feat: add tarball download, XML extraction, and cache markers" +``` + +--- + +### Task 5: XML Doc Parser + +**Files:** +- Create: `src/XmlDocParser.zig` +- Modify: `src/root.zig` (add import) + +- [ ] **Step 1: Define the output data structures** + +Create `src/XmlDocParser.zig`: + +```zig +const XmlDocParser = @This(); + +pub const Tutorial = struct { + title: []const u8, + url: []const u8, +}; + +pub const MemberDoc = struct { + name: []const u8, + description: ?[]const u8 = null, +}; + +pub const ClassDoc = struct { + name: []const u8, + inherits: ?[]const u8 = null, + brief_description: ?[]const u8 = null, + description: ?[]const u8 = null, + tutorials: ?[]Tutorial = null, + methods: ?[]MemberDoc = null, + properties: ?[]MemberDoc = null, + signals: ?[]MemberDoc = null, + constants: ?[]MemberDoc = null, +}; + +const DOCS_BASE_URL = "https://docs.godotengine.org/en/stable"; + +const std = @import("std"); +const Allocator = std.mem.Allocator; +const xml = @import("xml"); +``` + +- [ ] **Step 2: Write a test with sample XML** + +Add to `src/XmlDocParser.zig`: + +```zig +/// Parses a Godot XML class documentation file. +/// All returned strings are allocated with the provided allocator. +pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) !ClassDoc { + _ = allocator; + _ = xml_content; + return error.NotImplemented; // TODO +} + +const test_xml = + \\ + \\ + \\ A 2D game object. + \\ Node2D is the base class for 2D. + \\ + \\ $DOCS_URL/tutorials/2d/custom_drawing.html + \\ https://github.com/godotengine/godot-demo-projects/tree/master/2d + \\ + \\ + \\ + \\ + \\ + \\ Multiplies the current scale by the ratio vector. + \\ + \\ + \\ + \\ + \\Position, relative to the node's parent. + \\ + \\ + \\ +; + +test "parseClassDoc parses class name and inherits" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expectEqualStrings("Node2D", doc.name); + try std.testing.expectEqualStrings("CanvasItem", doc.inherits.?); +} + +test "parseClassDoc parses descriptions" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expectEqualStrings("A 2D game object.", doc.brief_description.?); + try std.testing.expectEqualStrings("Node2D is the base class for 2D.", doc.description.?); +} + +test "parseClassDoc parses tutorials with DOCS_URL expansion" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expect(doc.tutorials != null); + try std.testing.expectEqual(@as(usize, 2), doc.tutorials.?.len); + + try std.testing.expectEqualStrings("Custom drawing in 2D", doc.tutorials.?[0].title); + try std.testing.expectEqualStrings( + "https://docs.godotengine.org/en/stable/tutorials/2d/custom_drawing.html", + doc.tutorials.?[0].url, + ); + + // External URL should be left unchanged + try std.testing.expectEqualStrings( + "https://github.com/godotengine/godot-demo-projects/tree/master/2d", + doc.tutorials.?[1].url, + ); +} + +test "parseClassDoc parses methods" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expect(doc.methods != null); + try std.testing.expectEqual(@as(usize, 1), doc.methods.?.len); + try std.testing.expectEqualStrings("apply_scale", doc.methods.?[0].name); + try std.testing.expectEqualStrings("Multiplies the current scale by the ratio vector.", doc.methods.?[0].description.?); +} + +test "parseClassDoc parses properties from members element" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expect(doc.properties != null); + try std.testing.expectEqual(@as(usize, 1), doc.properties.?.len); + try std.testing.expectEqualStrings("position", doc.properties.?[0].name); +} + +/// Frees all memory allocated by parseClassDoc. +pub fn freeClassDoc(allocator: Allocator, doc: ClassDoc) void { + _ = allocator; + _ = doc; + // TODO: free all allocated strings and slices +} +``` + +- [ ] **Step 3: Run tests to verify they fail** + +```bash +zig build test 2>&1 | head -20 +``` + +Expected: `error.NotImplemented` + +- [ ] **Step 4: Implement parseClassDoc** + +Implement using zig-xml's pull parser API. The implementation should: + +1. Create an `xml.Reader` from the content +2. Loop through events matching element starts/ends +3. Collect text content for `brief_description`, `description` +4. Parse `class` element attributes for `name` and `inherits` +5. Parse `tutorials/link` elements, expanding `$DOCS_URL` +6. Parse `methods/method` and `members/member` elements +7. Build and return `ClassDoc` + +The exact API calls depend on zig-xml's reader interface. Consult zig-xml's README or tests for the exact method names (likely `reader.read()` returning tagged events). + +- [ ] **Step 5: Implement freeClassDoc** + +Free all allocated slices and strings in the `ClassDoc`. + +- [ ] **Step 6: Register in root.zig** + +Add to `src/root.zig`: + +```zig +pub const XmlDocParser = @import("XmlDocParser.zig"); +``` + +- [ ] **Step 7: Run tests** + +```bash +zig build test +``` + +Expected: All pass. + +- [ ] **Step 8: Commit** + +```bash +git add src/XmlDocParser.zig src/root.zig +git commit -m "feat: add XML doc parser for Godot class documentation" +``` + +--- + +### Task 6: Add tutorials Field to Entry + +**Files:** +- Modify: `src/DocDatabase.zig` +- Modify: `src/DocDatabase.zig` (markdown generation) + +- [ ] **Step 1: Add tutorials field to Entry struct** + +In `src/DocDatabase.zig`, add to the `Entry` struct (after line 32): + +```zig +pub const Tutorial = struct { + title: []const u8, + url: []const u8, +}; +``` + +And add the field to `Entry` (after `members`): + +```zig +tutorials: ?[]const Tutorial = null, +``` + +- [ ] **Step 2: Update generateMarkdownForSymbol to render tutorials** + +Find the `generateMarkdownForSymbol` function in `src/DocDatabase.zig`. After the description section, add: + +```zig +if (entry.tutorials) |tutorials| { + if (tutorials.len > 0) { + try writer.writeAll("\n## Tutorials\n\n"); + for (tutorials) |tutorial| { + try writer.print("- [{s}]({s})\n", .{ tutorial.title, tutorial.url }); + } + } +} +``` + +- [ ] **Step 3: Write a snapshot test** + +Update an existing test or add a new one that includes tutorials in the entry and verifies the markdown output contains a Tutorials section. + +- [ ] **Step 4: Run tests** + +```bash +zig build test +``` + +Expected: All pass, snapshots clean. + +- [ ] **Step 5: Commit** + +```bash +git add src/DocDatabase.zig +git commit -m "feat: add tutorials field to Entry and render in markdown output" +``` + +--- + +### Task 7: Integrate XML Fetch into Cache Population + +**Files:** +- Modify: `src/cache.zig` +- Modify: `src/root.zig` + +- [ ] **Step 1: Add XML docs directory helpers to cache.zig** + +Add to `src/cache.zig`: + +```zig +pub fn getXmlDocsDirInCache(allocator: Allocator, cache_dir: []const u8) ![]const u8 { + return std.fmt.allocPrint( + allocator, + "{f}", + .{std.fs.path.fmtJoin(&[_][]const u8{ cache_dir, "xml_docs" })}, + ); +} + +pub fn xmlDocsArePopulated(allocator: Allocator, cache_dir: []const u8) !bool { + const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); + defer allocator.free(xml_dir); + + const marker = source_fetch.readCompleteMarker(allocator, xml_dir); + if (marker) |m| { + allocator.free(m); + return true; + } + return false; +} +``` + +Add the import at the bottom: + +```zig +const source_fetch = @import("source_fetch.zig"); +``` + +- [ ] **Step 2: Add XML staleness check** + +Add to `src/cache.zig`: + +```zig +/// Checks if XML docs are stale by comparing cached version to current godot version. +pub fn xmlDocsAreStale(allocator: Allocator, cache_dir: []const u8, current_version: []const u8) !bool { + const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); + defer allocator.free(xml_dir); + + const cached_version = source_fetch.readCompleteMarker(allocator, xml_dir) orelse return true; + defer allocator.free(cached_version); + + return !std.mem.eql(u8, cached_version, current_version); +} +``` + +- [ ] **Step 3: Modify root.zig to trigger XML fetch during cache population** + +In `src/root.zig`, modify the cache population block (lines 38-50) to also fetch XML docs and check for staleness: + +```zig +const needs_full_rebuild = !try cache.cacheIsPopulated(allocator, cache_path); + +if (needs_full_rebuild) { + try cache.ensureDirectoryExists(cache_path); + try api.generateApiJsonIfNotExists(allocator, "godot", cache_path); +} + +// Fetch XML docs if missing or stale (best-effort, independent of JSON cache) +if (needs_full_rebuild or !try cache.xmlDocsArePopulated(allocator, cache_path)) { + fetchXmlDocs(allocator, cache_path); +} + +if (needs_full_rebuild) { + const json_path = try cache.getJsonCachePathInDir(allocator, cache_path); + defer allocator.free(json_path); + + const json_file = try std.fs.openFileAbsolute(json_path, .{}); + defer json_file.close(); + + var db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), json_file); + + // Merge XML data into db before generating markdown cache + // arena.allocator() for strings that live in the DB, allocator for temporaries + mergeXmlDocs(arena.allocator(), allocator, &db, cache_path); + + try cache.generateMarkdownCache(allocator, db, cache_path); +} +``` + +- [ ] **Step 4: Implement fetchXmlDocs helper** + +Add to `src/root.zig`: + +```zig +fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { + const xml_dir = cache.getXmlDocsDirInCache(allocator, cache_path) catch return; + defer allocator.free(xml_dir); + + cache.ensureDirectoryExists(xml_dir) catch return; + + const version = source_fetch.getGodotVersion(allocator) orelse { + // TODO: When Godot is not installed, query GitHub API for latest release tag + // For now, skip XML supplementation without Godot + std.log.warn("godot not found, skipping XML doc supplementation", .{}); + return; + }; + + var url_buf: [256]u8 = undefined; + const url = source_fetch.buildTarballUrl(&url_buf, version) orelse return; + + source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { + // Try hash-based fallback URL + if (version.hash) |hash| { + var hash_url_buf: [256]u8 = undefined; + const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse return; + source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { + std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); + return; + }; + } else { + std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); + return; + } + }; + + var version_buf: [64]u8 = undefined; + const version_str = version.formatVersion(&version_buf) orelse return; + + source_fetch.writeCompleteMarker(allocator, xml_dir, version_str) catch return; +} +``` + +- [ ] **Step 5: Implement mergeXmlDocs helper (stub for now)** + +Add to `src/root.zig`: + +```zig +fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { + _ = arena_allocator; + _ = tmp_allocator; + _ = db; + _ = cache_path; + // TODO: implement in Task 8 +} +``` + +- [ ] **Step 6: Run tests** + +```bash +zig build test +``` + +Expected: All pass. Network-dependent code is only called in the actual cache population path, not in tests. + +- [ ] **Step 7: Commit** + +```bash +git add src/cache.zig src/root.zig +git commit -m "feat: integrate XML doc fetch into cache population flow" +``` + +--- + +### Task 8: Merge XML Data into DocDatabase + +**Files:** +- Modify: `src/root.zig` + +- [ ] **Step 1: Implement mergeXmlDocs** + +Replace the stub in `src/root.zig`. + +**IMPORTANT memory ownership:** `parseClassDoc` allocates strings with the provided allocator. Since these strings are stored in the `DocDatabase` (which uses an arena allocator that outlives this function), pass the arena allocator to `parseClassDoc` so the strings live as long as the DB. Do NOT free the parsed content -- the arena owns it. + +```zig +/// Merges XML documentation into the DocDatabase. +/// Uses arena_allocator for all allocations so strings live as long as the DB. +/// Uses tmp_allocator for temporary allocations (paths, etc.) that are freed immediately. +fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { + const xml_dir = cache.getXmlDocsDirInCache(tmp_allocator, cache_path) catch return; + defer tmp_allocator.free(xml_dir); + + var dir = std.fs.openDirAbsolute(xml_dir, .{ .iterate = true }) catch return; + defer dir.close(); + + var iter = dir.iterate(); + while (iter.next() catch return) |entry| { + if (!std.mem.endsWith(u8, entry.name, ".xml")) continue; + + const class_name = entry.name[0 .. entry.name.len - 4]; // strip .xml + + // Read XML file content (temporary -- only needed for parsing) + const xml_path = std.fs.path.join(tmp_allocator, &.{ xml_dir, entry.name }) catch continue; + defer tmp_allocator.free(xml_path); + + const content = std.fs.openFileAbsolute(xml_path, .{}) catch continue; + defer content.close(); + const xml_bytes = content.readToEndAlloc(tmp_allocator, 2 * 1024 * 1024) catch continue; + defer tmp_allocator.free(xml_bytes); + + // Parse XML -- allocate strings with arena so they outlive this function + const class_doc = XmlDocParser.parseClassDoc(arena_allocator, xml_bytes) catch |err| { + std.log.warn("failed to parse XML doc for {s}: {}", .{ class_name, err }); + continue; + }; + // Do NOT call freeClassDoc -- arena owns the memory + + // Merge tutorials into existing entry + if (class_doc.tutorials) |tutorials| { + if (db.symbols.getPtr(class_name)) |db_entry| { + if (db_entry.tutorials == null and tutorials.len > 0) { + const db_tutorials = arena_allocator.alloc(DocDatabase.Tutorial, tutorials.len) catch continue; + for (tutorials, 0..) |t, i| { + db_tutorials[i] = .{ .title = t.title, .url = t.url }; + } + db_entry.tutorials = db_tutorials; + } + } + } + + // Fill missing class description + if (class_doc.description) |xml_desc| { + if (db.symbols.getPtr(class_name)) |db_entry| { + if (db_entry.description == null) { + db_entry.description = xml_desc; + } + } + } + + // Helper: merge member descriptions (methods, properties, signals) + const member_lists = [_]struct { members: ?[]XmlDocParser.MemberDoc }{ + .{ .members = class_doc.methods }, + .{ .members = class_doc.properties }, + .{ .members = class_doc.signals }, + }; + + for (member_lists) |list| { + const members = list.members orelse continue; + for (members) |member| { + const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; + defer tmp_allocator.free(member_key); + + if (db.symbols.getPtr(member_key)) |db_entry| { + if (db_entry.description == null) { + db_entry.description = member.description; + } + } + } + } + + // Add GlobalScope entries not present in JSON + if (db.symbols.get(class_name) == null) { + // This class exists in XML but not in JSON -- add it + const key = std.fmt.allocPrint(arena_allocator, "{s}", .{class_name}) catch continue; + db.symbols.put(arena_allocator, key, .{ + .key = key, + .name = key, + .kind = .class, + .description = class_doc.description, + .brief_description = class_doc.brief_description, + }) catch continue; + } + } +} +``` + +- [ ] **Step 2: Write a test for mergeXmlDocs** + +```zig +test "mergeXmlDocs fills missing descriptions from XML" { + const allocator = std.testing.allocator; + + // Use an arena for DB-lifetime allocations (simulates the real flow) + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const cache_dir = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(cache_dir); + + // Create xml_docs dir with a test XML file + const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml_docs", .{cache_dir}); + defer allocator.free(xml_dir); + try std.fs.makeDirAbsolute(xml_dir); + + const xml_path = try std.fmt.allocPrint(allocator, "{s}/TestClass.xml", .{xml_dir}); + defer allocator.free(xml_path); + + const xml_content = + \\ + \\ + \\ A test. + \\ Full description from XML. + \\ + \\ https://example.com + \\ + \\ + ; + try std.fs.cwd().writeFile(.{ .sub_path = xml_path, .data = xml_content }); + + // Create a DB with TestClass but no description + var db = DocDatabase{ .symbols = .empty }; + defer db.symbols.deinit(allocator); + + try db.symbols.put(allocator, "TestClass", .{ + .key = "TestClass", + .name = "TestClass", + .kind = .class, + .description = null, // Missing - should be filled from XML + }); + + // arena.allocator() for DB-lifetime strings, allocator for temporaries + mergeXmlDocs(arena.allocator(), allocator, &db, cache_dir); + + const entry = db.symbols.get("TestClass").?; + try std.testing.expectEqualStrings("Full description from XML.", entry.description.?); + try std.testing.expect(entry.tutorials != null); +} +``` + +- [ ] **Step 3: Run tests** + +```bash +zig build test +``` + +- [ ] **Step 4: Commit** + +```bash +git add src/root.zig +git commit -m "feat: merge XML documentation data into DocDatabase entries" +``` + +--- + +### Task 9: Update clearCache and End-to-End Verification + +**Files:** +- Modify: `src/cache.zig` + +- [ ] **Step 1: Verify clearCache already handles xml_docs** + +`clearCache` deletes the entire cache directory tree (`std.fs.deleteTreeAbsolute`), so `xml_docs/` is already covered. No change needed. + +- [ ] **Step 2: Manual integration test** + +```bash +# Build +zig build + +# Clear existing cache +zig-out/bin/gdoc --clear-cache + +# Look up a class (triggers JSON + XML fetch) +zig-out/bin/gdoc Node2D + +# Verify tutorials section appears +zig-out/bin/gdoc Node2D | grep -i tutorial + +# Look up a member +zig-out/bin/gdoc Node2D.position +``` + +Expected: Node2D output includes a Tutorials section with links. Properties show descriptions that may have been missing before. + +- [ ] **Step 3: Test with --godot-extension-api (should skip XML)** + +```bash +zig-out/bin/gdoc --godot-extension-api extension_api.json Node2D +``` + +Expected: Works as before, no tutorials section (XML not used in this path). + +- [ ] **Step 4: Run full test suite** + +```bash +zig build test +``` + +Expected: All tests pass. + +- [ ] **Step 5: Commit any test adjustments** + +```bash +git add src/ snapshots/ +git commit -m "test: verify end-to-end XML doc supplementation" +``` + +--- + +### Task 10: Update Snapshots + +**Files:** +- Modify: `snapshots/*.md` (as needed) + +- [ ] **Step 1: Regenerate snapshots if format changed** + +If the tutorials section changed the output format for any existing snapshot tests, update the snapshots: + +```bash +zig build test 2>&1 | grep -A5 "snapshot" +``` + +If snapshot diffs exist, review them and update: + +```bash +# Review the diffs +git diff snapshots/ + +# If changes are expected (new Tutorials section), stage them +git add snapshots/ +git commit -m "test: update snapshots for tutorials section" +``` + +- [ ] **Step 2: Final verification** + +```bash +zig build test +``` + +Expected: All green. From 43312ec1ab96b1b7621b18976d41903a59b492d5 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:23:13 +1100 Subject: [PATCH 04/18] docs: require Godot to be installed for XML supplementation --- .../plans/2026-03-21-xml-doc-supplementation.md | 7 +------ .../specs/2026-03-21-xml-doc-supplementation-design.md | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md b/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md index f37879b..76b3ef0 100644 --- a/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md +++ b/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md @@ -896,12 +896,7 @@ fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { cache.ensureDirectoryExists(xml_dir) catch return; - const version = source_fetch.getGodotVersion(allocator) orelse { - // TODO: When Godot is not installed, query GitHub API for latest release tag - // For now, skip XML supplementation without Godot - std.log.warn("godot not found, skipping XML doc supplementation", .{}); - return; - }; + const version = source_fetch.getGodotVersion(allocator) orelse return; var url_buf: [256]u8 = undefined; const url = source_fetch.buildTarballUrl(&url_buf, version) orelse return; diff --git a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md index 23bab53..a09fcd2 100644 --- a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md +++ b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md @@ -22,7 +22,7 @@ Automatically fetch and parse Godot's XML documentation from the source tree, us 6. Write a `.complete` marker file with the version string after successful extraction. 7. The tarball is never written to disk -- streamed directly from HTTP through decompression and extraction. -**When Godot is not installed**: The existing fallback path downloads JSON from GitHub. In this case, use the latest stable release tag from the GitHub API to determine the tarball URL for XML docs. +**Godot must be installed** for XML supplementation. If `godot --version` fails, XML docs are skipped silently. This runs automatically on first use alongside the existing JSON generation. `--clear-cache` clears XML docs too. From 20159f31a12926adefa1856092c7a746f1376033 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:36:40 +1100 Subject: [PATCH 05/18] chore: add .worktrees to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index dc55407..73d8e9c 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ zig-out/ *.o extension_api.json docs/plans/ +.worktrees/ From 2016f9e36943ce923b479a2cbbedf8d4b72a24f4 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:40:27 +1100 Subject: [PATCH 06/18] feat: add zig-xml dependency for XML doc parsing --- build.zig | 6 ++++++ build.zig.zon | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/build.zig b/build.zig index 7a56d22..6802a9b 100644 --- a/build.zig +++ b/build.zig @@ -25,6 +25,11 @@ pub fn build(b: *std.Build) void { .optimize = optimize, }).module("zigdown"); + const zig_xml = b.dependency("xml", .{ + .target = target, + .optimize = optimize, + }).module("xml"); + const mod = b.addModule("gdoc", .{ .root_source_file = b.path("src/root.zig"), .target = target, @@ -32,6 +37,7 @@ pub fn build(b: *std.Build) void { .{ .name = "bbcodez", .module = bbcodez }, .{ .name = "known-folders", .module = known_folders }, .{ .name = "zigdown", .module = zigdown }, + .{ .name = "xml", .module = zig_xml }, }, }); mod.addOptions("build_options", build_options); diff --git a/build.zig.zon b/build.zig.zon index 1c4347a..ef417b3 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -20,6 +20,10 @@ .url = "https://github.com/JacobCrabill/zigdown/archive/refs/tags/v1.2.0.tar.gz", .hash = "zigdown-1.2.0-M06JT7-lFQCrQRNGBipFjSt-qAvFSLgy8-f-D4VFAeOi", }, + .xml = .{ + .url = "git+https://github.com/ianprime0509/zig-xml#8874de5b846f4e3e806a062cd11a01b2bb90fc7a", + .hash = "xml-0.2.0-ZTbP3wE6AgCsnyZut_plzxi6WB2tzzh3kFRBOp3AL7n9", + }, }, .paths = .{ "build.zig", From 2a932f88e3d6f54d63654bce076cb4a67da48ee7 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:41:42 +1100 Subject: [PATCH 07/18] feat: add Godot version string parser --- src/root.zig | 1 + src/source_fetch.zig | 104 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 105 insertions(+) create mode 100644 src/source_fetch.zig diff --git a/src/root.zig b/src/root.zig index fb7a348..9eb732c 100644 --- a/src/root.zig +++ b/src/root.zig @@ -417,6 +417,7 @@ const known_folders = @import("known-folders"); pub const DocDatabase = @import("DocDatabase.zig"); pub const cache = @import("cache.zig"); pub const api = @import("api.zig"); +pub const source_fetch = @import("source_fetch.zig"); const zigdown = @import("zigdown"); const ConsoleRenderer = zigdown.ConsoleRenderer; diff --git a/src/source_fetch.zig b/src/source_fetch.zig new file mode 100644 index 0000000..521d75e --- /dev/null +++ b/src/source_fetch.zig @@ -0,0 +1,104 @@ +pub const VersionInfo = struct { + major: []const u8, + minor: []const u8, + patch: []const u8, + hash: ?[]const u8, + + /// Formats the version as "major.minor.patch" into the provided buffer. + pub fn formatVersion(self: VersionInfo, buf: []u8) ?[]const u8 { + return std.fmt.bufPrint(buf, "{s}.{s}.{s}", .{ self.major, self.minor, self.patch }) catch null; + } +}; + +/// Parses a Godot version string like "4.6.1.stable.official.14d19694e". +/// Returns null for empty or malformed strings. +pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { + if (version_str.len == 0) return null; + + var it = std.mem.splitScalar(u8, version_str, '.'); + + const major = it.next() orelse return null; + const minor = it.next() orelse return null; + const patch = it.next() orelse return null; + + // Validate that major, minor, patch are numeric + for (major) |c| if (!std.ascii.isDigit(c)) return null; + for (minor) |c| if (!std.ascii.isDigit(c)) return null; + for (patch) |c| if (!std.ascii.isDigit(c)) return null; + + if (major.len == 0 or minor.len == 0 or patch.len == 0) return null; + + // 4th segment: stability label (stable/dev/beta/rc) - skip + _ = it.next() orelse return null; + + // 5th segment: build type + const build_type = it.next() orelse return null; + + // 6th segment: commit hash (only if build_type is "official") + const hash: ?[]const u8 = if (std.mem.eql(u8, build_type, "official")) + it.next() + else + null; + + return VersionInfo{ + .major = major, + .minor = minor, + .patch = patch, + .hash = hash, + }; +} + +test "parseGodotVersion: standard version with hash" { + const result = parseGodotVersion("4.6.1.stable.official.14d19694e"); + try std.testing.expect(result != null); + const v = result.?; + try std.testing.expectEqualStrings("4", v.major); + try std.testing.expectEqualStrings("6", v.minor); + try std.testing.expectEqualStrings("1", v.patch); + try std.testing.expectEqualStrings("14d19694e", v.hash.?); +} + +test "parseGodotVersion: custom build without hash" { + const result = parseGodotVersion("4.6.1.stable.custom_build"); + try std.testing.expect(result != null); + const v = result.?; + try std.testing.expectEqualStrings("4", v.major); + try std.testing.expectEqualStrings("6", v.minor); + try std.testing.expectEqualStrings("1", v.patch); + try std.testing.expect(v.hash == null); +} + +test "parseGodotVersion: dev build with hash" { + const result = parseGodotVersion("4.7.0.dev.official.abc123def"); + try std.testing.expect(result != null); + const v = result.?; + try std.testing.expectEqualStrings("4", v.major); + try std.testing.expectEqualStrings("7", v.minor); + try std.testing.expectEqualStrings("0", v.patch); + try std.testing.expectEqualStrings("abc123def", v.hash.?); +} + +test "parseGodotVersion: empty string returns null" { + const result = parseGodotVersion(""); + try std.testing.expect(result == null); +} + +test "parseGodotVersion: malformed string returns null" { + const result = parseGodotVersion("not-a-version"); + try std.testing.expect(result == null); +} + +test "VersionInfo.formatVersion produces correct output" { + const v = VersionInfo{ + .major = "4", + .minor = "6", + .patch = "1", + .hash = "14d19694e", + }; + var buf: [32]u8 = undefined; + const formatted = v.formatVersion(&buf); + try std.testing.expect(formatted != null); + try std.testing.expectEqualStrings("4.6.1", formatted.?); +} + +const std = @import("std"); From fa1799aad7eae14cb8e6bfb79f823aedcd9f4f88 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:44:02 +1100 Subject: [PATCH 08/18] feat: run godot --version and parse output --- src/source_fetch.zig | 65 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/src/source_fetch.zig b/src/source_fetch.zig index 521d75e..17cec32 100644 --- a/src/source_fetch.zig +++ b/src/source_fetch.zig @@ -3,6 +3,13 @@ pub const VersionInfo = struct { minor: []const u8, patch: []const u8, hash: ?[]const u8, + /// When non-null, this is the allocated buffer that backs major/minor/patch/hash. + /// Call `deinit(allocator)` to free it. + backing: ?[]u8 = null, + + pub fn deinit(self: VersionInfo, allocator: Allocator) void { + if (self.backing) |buf| allocator.free(buf); + } /// Formats the version as "major.minor.patch" into the provided buffer. pub fn formatVersion(self: VersionInfo, buf: []u8) ?[]const u8 { @@ -12,6 +19,7 @@ pub const VersionInfo = struct { /// Parses a Godot version string like "4.6.1.stable.official.14d19694e". /// Returns null for empty or malformed strings. +/// The returned slices point into `version_str`; the caller must ensure it outlives the result. pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { if (version_str.len == 0) return null; @@ -48,6 +56,37 @@ pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { }; } +/// Runs the godot executable at `godot_path` with `--version` and parses the output. +/// Returns null if the process fails or the output is malformed. +/// The returned VersionInfo owns its backing buffer; call `result.deinit(allocator)` when done. +pub fn getGodotVersionFromPath(allocator: Allocator, godot_path: []const u8) ?VersionInfo { + const result = std.process.Child.run(.{ + .argv = &.{ godot_path, "--version" }, + .allocator = allocator, + }) catch return null; + defer allocator.free(result.stdout); + defer allocator.free(result.stderr); + + switch (result.term) { + .Exited => |code| if (code != 0) return null, + else => return null, + } + + const trimmed = std.mem.trimRight(u8, result.stdout, &std.ascii.whitespace); + const owned = allocator.dupe(u8, trimmed) catch return null; + var info = parseGodotVersion(owned) orelse { + allocator.free(owned); + return null; + }; + info.backing = owned; + return info; +} + +/// Convenience wrapper that calls `getGodotVersionFromPath` with "godot" from PATH. +pub fn getGodotVersion(allocator: Allocator) ?VersionInfo { + return getGodotVersionFromPath(allocator, "godot"); +} + test "parseGodotVersion: standard version with hash" { const result = parseGodotVersion("4.6.1.stable.official.14d19694e"); try std.testing.expect(result != null); @@ -101,4 +140,30 @@ test "VersionInfo.formatVersion produces correct output" { try std.testing.expectEqualStrings("4.6.1", formatted.?); } +test "getGodotVersionFromPath with fake godot script" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const script = "#!/bin/sh\necho '4.6.1.stable.official.14d19694e'"; + try tmp_dir.dir.writeFile(.{ .sub_path = "fake-godot", .data = script }); + + var file = try tmp_dir.dir.openFile("fake-godot", .{}); + try file.chmod(0o755); + file.close(); + + const fake_path = try std.fmt.allocPrint(allocator, "{s}/fake-godot", .{tmp_path}); + defer allocator.free(fake_path); + + const result = getGodotVersionFromPath(allocator, fake_path); + try std.testing.expect(result != null); + defer result.?.deinit(allocator); + try std.testing.expectEqualStrings("14d19694e", result.?.hash.?); +} + +const Allocator = std.mem.Allocator; const std = @import("std"); From dab7ae02157cda5e048daf11d64ab21d4082af78 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:51:29 +1100 Subject: [PATCH 09/18] feat: add tarball download, XML extraction, and cache markers --- src/source_fetch.zig | 203 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 203 insertions(+) diff --git a/src/source_fetch.zig b/src/source_fetch.zig index 17cec32..74ba4a4 100644 --- a/src/source_fetch.zig +++ b/src/source_fetch.zig @@ -140,6 +140,141 @@ test "VersionInfo.formatVersion produces correct output" { try std.testing.expectEqualStrings("4.6.1", formatted.?); } +/// Builds a tarball URL for a specific Godot version tag. +/// Example: version 4.6.1 -> "https://github.com/godotengine/godot/archive/refs/tags/4.6.1-stable.tar.gz" +pub fn buildTarballUrl(buf: []u8, version: VersionInfo) ?[]const u8 { + return std.fmt.bufPrint(buf, "https://github.com/godotengine/godot/archive/refs/tags/{s}.{s}.{s}-stable.tar.gz", .{ + version.major, version.minor, version.patch, + }) catch null; +} + +/// Builds a tarball URL from a commit hash. +/// Example: hash "14d19694e" -> "https://github.com/godotengine/godot/archive/14d19694e.tar.gz" +pub fn buildTarballUrlFromHash(buf: []u8, hash: []const u8) ?[]const u8 { + return std.fmt.bufPrint(buf, "https://github.com/godotengine/godot/archive/{s}.tar.gz", .{hash}) catch null; +} + +/// Downloads a .tar.gz from a URL and extracts only XML doc files. +/// Matches: +/// - `*/doc/classes/*.xml` (core class docs) +/// - `*/modules/*/doc_classes/*.xml` (module class docs) +/// +/// Extracted files are written to `xml_docs_dir` with their basename only. +pub fn fetchAndExtractXmlDocs(allocator: Allocator, url: []const u8, xml_docs_dir: []const u8) !void { + cache.ensureDirectoryExists(xml_docs_dir) catch {}; + + var client: std.http.Client = .{ .allocator = allocator }; + defer client.deinit(); + + const uri = std.Uri.parse(url) catch return error.InvalidUrl; + + var redirect_buf: [2048]u8 = undefined; + var req = try client.request(.GET, uri, .{ + .redirect_behavior = std.http.Client.Request.RedirectBehavior.init(10), + }); + defer req.deinit(); + + try req.sendBodiless(); + + var response = try req.receiveHead(&redirect_buf); + + if (response.head.status != .ok) return error.HttpRequestFailed; + + // Get the raw HTTP response body reader + var transfer_buf: [8192]u8 = undefined; + const http_reader: *std.Io.Reader = response.reader(&transfer_buf); + + // Decompress gzip + var decompress_buf: [std.compress.flate.max_window_len]u8 = undefined; + var decompressor = std.compress.flate.Decompress.init(http_reader, .gzip, &decompress_buf); + + // Iterate tar entries + var path_buf: [std.fs.max_path_bytes]u8 = undefined; + var link_buf: [std.fs.max_path_bytes]u8 = undefined; + var tar_iter = std.tar.Iterator.init(&decompressor.reader, .{ + .file_name_buffer = &path_buf, + .link_name_buffer = &link_buf, + .diagnostics = null, + }); + + var dir = try std.fs.openDirAbsolute(xml_docs_dir, .{}); + defer dir.close(); + + while (try tar_iter.next()) |file| { + if (file.kind != .file) continue; + + const name = file.name; + if (!std.mem.endsWith(u8, name, ".xml")) continue; + + // Match */doc/classes/*.xml or */modules/*/doc_classes/*.xml + const is_core_doc = matchesPattern(name, "/doc/classes/"); + const is_module_doc = matchesPattern(name, "/doc_classes/"); + + if (!is_core_doc and !is_module_doc) continue; + + // Extract basename + const basename = std.fs.path.basename(name); + + var out_file = dir.createFile(basename, .{}) catch continue; + defer out_file.close(); + + var write_buf: [4096]u8 = undefined; + var file_writer = out_file.writer(&write_buf); + tar_iter.streamRemaining(file, &file_writer.interface) catch continue; + file_writer.interface.flush() catch continue; + } +} + +fn matchesPattern(path: []const u8, pattern: []const u8) bool { + return std.mem.indexOf(u8, path, pattern) != null; +} + +/// Writes a version string to `xml_docs_dir/.complete` as a cache marker. +pub fn writeCompleteMarker(allocator: Allocator, xml_docs_dir: []const u8, version_str: []const u8) !void { + const marker_path = try std.fmt.allocPrint(allocator, "{f}", .{ + std.fs.path.fmtJoin(&[_][]const u8{ xml_docs_dir, ".complete" }), + }); + defer allocator.free(marker_path); + + var file = try std.fs.createFileAbsolute(marker_path, .{}); + defer file.close(); + + var buf: [4096]u8 = undefined; + var file_writer = file.writer(&buf); + var writer = &file_writer.interface; + + try writer.writeAll(version_str); + try writer.flush(); +} + +/// Reads the content of `xml_docs_dir/.complete`, or returns null if not present. +pub fn readCompleteMarker(allocator: Allocator, xml_docs_dir: []const u8) ?[]const u8 { + const marker_path = std.fmt.allocPrint(allocator, "{f}", .{ + std.fs.path.fmtJoin(&[_][]const u8{ xml_docs_dir, ".complete" }), + }) catch return null; + defer allocator.free(marker_path); + + const file = std.fs.openFileAbsolute(marker_path, .{}) catch return null; + defer file.close(); + + var buf: [4096]u8 = undefined; + var file_reader = file.reader(&buf); + var reader = &file_reader.interface; + + var allocating: std.Io.Writer.Allocating = .init(allocator); + errdefer allocating.deinit(); + + _ = reader.stream(&allocating.writer, .unlimited) catch return null; + + const result = allocating.toOwnedSlice() catch return null; + if (result.len == 0) { + allocator.free(result); + return null; + } + + return result; +} + test "getGodotVersionFromPath with fake godot script" { const allocator = std.testing.allocator; @@ -165,5 +300,73 @@ test "getGodotVersionFromPath with fake godot script" { try std.testing.expectEqualStrings("14d19694e", result.?.hash.?); } +test "buildTarballUrl with version 4.6.1" { + var buf: [256]u8 = undefined; + const url = buildTarballUrl(&buf, .{ + .major = "4", + .minor = "6", + .patch = "1", + .hash = null, + }); + try std.testing.expect(url != null); + try std.testing.expectEqualStrings( + "https://github.com/godotengine/godot/archive/refs/tags/4.6.1-stable.tar.gz", + url.?, + ); +} + +test "buildTarballUrlFromHash with commit hash" { + var buf: [256]u8 = undefined; + const url = buildTarballUrlFromHash(&buf, "14d19694e"); + try std.testing.expect(url != null); + try std.testing.expectEqualStrings( + "https://github.com/godotengine/godot/archive/14d19694e.tar.gz", + url.?, + ); +} + +test "buildTarballUrl returns null when buffer too small" { + var buf: [10]u8 = undefined; + const url = buildTarballUrl(&buf, .{ + .major = "4", + .minor = "6", + .patch = "1", + .hash = null, + }); + try std.testing.expect(url == null); +} + +test "writeCompleteMarker and readCompleteMarker round-trip" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const version_str = "4.6.1"; + try writeCompleteMarker(allocator, tmp_path, version_str); + + const result = readCompleteMarker(allocator, tmp_path); + try std.testing.expect(result != null); + defer allocator.free(result.?); + try std.testing.expectEqualStrings(version_str, result.?); +} + +test "readCompleteMarker returns null for non-existent marker" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const result = readCompleteMarker(allocator, tmp_path); + try std.testing.expect(result == null); +} + const Allocator = std.mem.Allocator; const std = @import("std"); +const cache = @import("cache.zig"); From 405172323993ec49f7c36dc62dd544df7278175a Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 12:55:41 +1100 Subject: [PATCH 10/18] feat: add XML doc parser for Godot class documentation --- src/XmlDocParser.zig | 336 +++++++++++++++++++++++++++++++++++++++++++ src/root.zig | 1 + 2 files changed, 337 insertions(+) create mode 100644 src/XmlDocParser.zig diff --git a/src/XmlDocParser.zig b/src/XmlDocParser.zig new file mode 100644 index 0000000..155c953 --- /dev/null +++ b/src/XmlDocParser.zig @@ -0,0 +1,336 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const xml = @import("xml"); + +const docs_url = "https://docs.godotengine.org/en/stable"; + +pub const Tutorial = struct { + title: []const u8, + url: []const u8, +}; + +pub const MemberDoc = struct { + name: []const u8, + description: ?[]const u8 = null, +}; + +pub const ClassDoc = struct { + name: []const u8, + inherits: ?[]const u8 = null, + brief_description: ?[]const u8 = null, + description: ?[]const u8 = null, + tutorials: ?[]Tutorial = null, + methods: ?[]MemberDoc = null, + properties: ?[]MemberDoc = null, + signals: ?[]MemberDoc = null, + constants: ?[]MemberDoc = null, +}; + +pub const ParseError = error{ + MalformedXml, + UnexpectedElement, + MissingClassElement, + MissingNameAttribute, + OutOfMemory, + ReadFailed, +}; + +pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!ClassDoc { + var static_reader: xml.Reader.Static = .init(allocator, xml_content, .{ + .namespace_aware = false, + }); + defer static_reader.deinit(); + const reader = &static_reader.interface; + + var doc: ClassDoc = .{ .name = "" }; + var tutorials: std.ArrayListUnmanaged(Tutorial) = .empty; + defer tutorials.deinit(allocator); + var methods: std.ArrayListUnmanaged(MemberDoc) = .empty; + defer methods.deinit(allocator); + var properties: std.ArrayListUnmanaged(MemberDoc) = .empty; + defer properties.deinit(allocator); + var signals: std.ArrayListUnmanaged(MemberDoc) = .empty; + defer signals.deinit(allocator); + var constants: std.ArrayListUnmanaged(MemberDoc) = .empty; + defer constants.deinit(allocator); + + var found_class = false; + + while (true) { + const node = reader.read() catch return ParseError.MalformedXml; + switch (node) { + .eof => break, + .xml_declaration => continue, + .element_start => { + const name = reader.elementName(); + if (std.mem.eql(u8, name, "class")) { + found_class = true; + doc.name = try getAttributeAlloc(allocator, reader, "name") orelse return ParseError.MissingNameAttribute; + doc.inherits = try getAttributeAlloc(allocator, reader, "inherits"); + } else if (std.mem.eql(u8, name, "brief_description")) { + doc.brief_description = try readTextContent(allocator, reader); + } else if (std.mem.eql(u8, name, "description") and found_class) { + doc.description = try readTextContent(allocator, reader); + } else if (std.mem.eql(u8, name, "link")) { + const title = try getAttributeAlloc(allocator, reader, "title") orelse try allocator.dupe(u8, ""); + const url_raw = try readTextContent(allocator, reader) orelse try allocator.dupe(u8, ""); + const url = try expandDocsUrl(allocator, url_raw); + if (url.ptr != url_raw.ptr) { + allocator.free(url_raw); + } + try tutorials.append(allocator,.{ .title = title, .url = url }); + } else if (std.mem.eql(u8, name, "method")) { + const method_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const desc = try readNestedDescription(allocator, reader, "method"); + try methods.append(allocator,.{ .name = method_name, .description = desc }); + } else if (std.mem.eql(u8, name, "member")) { + const member_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const desc = try readTextContent(allocator, reader); + try properties.append(allocator,.{ .name = member_name, .description = desc }); + } else if (std.mem.eql(u8, name, "signal")) { + const signal_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const desc = try readNestedDescription(allocator, reader, "signal"); + try signals.append(allocator,.{ .name = signal_name, .description = desc }); + } else if (std.mem.eql(u8, name, "constant")) { + const constant_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const desc = try readTextContent(allocator, reader); + try constants.append(allocator,.{ .name = constant_name, .description = desc }); + } + }, + else => continue, + } + } + + if (!found_class) return ParseError.MissingClassElement; + + doc.tutorials = if (tutorials.items.len > 0) try tutorials.toOwnedSlice(allocator) else null; + doc.methods = if (methods.items.len > 0) try methods.toOwnedSlice(allocator) else null; + doc.properties = if (properties.items.len > 0) try properties.toOwnedSlice(allocator) else null; + doc.signals = if (signals.items.len > 0) try signals.toOwnedSlice(allocator) else null; + doc.constants = if (constants.items.len > 0) try constants.toOwnedSlice(allocator) else null; + + return doc; +} + +pub fn freeClassDoc(allocator: Allocator, doc: ClassDoc) void { + allocator.free(doc.name); + if (doc.inherits) |s| allocator.free(s); + if (doc.brief_description) |s| allocator.free(s); + if (doc.description) |s| allocator.free(s); + + if (doc.tutorials) |tutorials| { + for (tutorials) |t| { + allocator.free(t.title); + allocator.free(t.url); + } + allocator.free(tutorials); + } + + inline for (.{ "methods", "properties", "signals", "constants" }) |field| { + if (@field(doc, field)) |members| { + for (members) |m| { + allocator.free(m.name); + if (m.description) |d| allocator.free(d); + } + allocator.free(members); + } + } +} + +fn getAttributeAlloc(allocator: Allocator, reader: *xml.Reader, name: []const u8) Allocator.Error!?[]const u8 { + const idx = reader.attributeIndex(name) orelse return null; + return try reader.attributeValueAlloc(allocator, idx); +} + +fn readTextContent(allocator: Allocator, reader: *xml.Reader) ParseError!?[]const u8 { + var text_buf: std.Io.Writer.Allocating = .init(allocator); + defer text_buf.deinit(); + + var depth: usize = 1; + while (depth > 0) { + const node = reader.read() catch return ParseError.MalformedXml; + switch (node) { + .eof => break, + .element_start => depth += 1, + .element_end => depth -= 1, + .text => { + text_buf.writer.writeAll(reader.textRaw()) catch return ParseError.OutOfMemory; + }, + else => continue, + } + } + + const written = text_buf.written(); + if (written.len == 0) return null; + + const trimmed = std.mem.trim(u8, written, " \t\r\n"); + if (trimmed.len == 0) return null; + + return try allocator.dupe(u8, trimmed); +} + +fn readNestedDescription(allocator: Allocator, reader: *xml.Reader, container_element: []const u8) ParseError!?[]const u8 { + // Read through the container element looking for a nested element. + var depth: usize = 1; + while (depth > 0) { + const node = reader.read() catch return ParseError.MalformedXml; + switch (node) { + .eof => break, + .element_start => { + const name = reader.elementName(); + if (depth == 1 and std.mem.eql(u8, name, "description")) { + return try readTextContent(allocator, reader); + } + depth += 1; + }, + .element_end => { + const name = reader.elementName(); + if (depth == 1 and std.mem.eql(u8, name, container_element)) { + break; + } + depth -= 1; + }, + else => continue, + } + } + return null; +} + +fn expandDocsUrl(allocator: Allocator, url: []const u8) Allocator.Error![]const u8 { + const prefix = "$DOCS_URL"; + if (std.mem.startsWith(u8, url, prefix)) { + return try std.fmt.allocPrint(allocator, "{s}{s}", .{ docs_url, url[prefix.len..] }); + } + return url; +} + +// Tests +const test_xml = + \\ + \\ + \\ A 2D game object. + \\ Node2D is the base class for 2D. + \\ + \\ $DOCS_URL/tutorials/2d/custom_drawing.html + \\ https://github.com/godotengine/godot-demo-projects/tree/master/2d + \\ + \\ + \\ + \\ + \\ + \\ Multiplies the current scale by the ratio vector. + \\ + \\ + \\ + \\ + \\Position, relative to the node's parent. + \\ + \\ + \\ + \\ + \\ Emitted when something happens. + \\ + \\ + \\ + \\ + \\Maximum allowed value. + \\ + \\ + \\ +; + +test "parses class name and inherits" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expectEqualStrings("Node2D", doc.name); + try std.testing.expectEqualStrings("CanvasItem", doc.inherits.?); +} + +test "parses brief_description and description" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + try std.testing.expectEqualStrings("A 2D game object.", doc.brief_description.?); + try std.testing.expectEqualStrings("Node2D is the base class for 2D.", doc.description.?); +} + +test "parses tutorials with $DOCS_URL expansion" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const tutorials = doc.tutorials.?; + try std.testing.expectEqual(2, tutorials.len); + try std.testing.expectEqualStrings("Custom drawing in 2D", tutorials[0].title); + try std.testing.expectEqualStrings( + "https://docs.godotengine.org/en/stable/tutorials/2d/custom_drawing.html", + tutorials[0].url, + ); +} + +test "external tutorial URLs left unchanged" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const tutorials = doc.tutorials.?; + try std.testing.expectEqualStrings("All 2D Demos", tutorials[1].title); + try std.testing.expectEqualStrings( + "https://github.com/godotengine/godot-demo-projects/tree/master/2d", + tutorials[1].url, + ); +} + +test "parses methods with descriptions" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const methods = doc.methods.?; + try std.testing.expectEqual(1, methods.len); + try std.testing.expectEqualStrings("apply_scale", methods[0].name); + try std.testing.expectEqualStrings("Multiplies the current scale by the ratio vector.", methods[0].description.?); +} + +test "parses properties from members element" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const props = doc.properties.?; + try std.testing.expectEqual(1, props.len); + try std.testing.expectEqualStrings("position", props[0].name); + try std.testing.expectEqualStrings("Position, relative to the node's parent.", props[0].description.?); +} + +test "parses signals with descriptions" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const sigs = doc.signals.?; + try std.testing.expectEqual(1, sigs.len); + try std.testing.expectEqualStrings("some_signal", sigs[0].name); + try std.testing.expectEqualStrings("Emitted when something happens.", sigs[0].description.?); +} + +test "parses constants with descriptions" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const consts = doc.constants.?; + try std.testing.expectEqual(1, consts.len); + try std.testing.expectEqualStrings("MAX_VALUE", consts[0].name); + try std.testing.expectEqualStrings("Maximum allowed value.", consts[0].description.?); +} + +test "freeClassDoc doesn't leak" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + freeClassDoc(allocator, doc); + // testing allocator will catch leaks +} diff --git a/src/root.zig b/src/root.zig index 9eb732c..239144c 100644 --- a/src/root.zig +++ b/src/root.zig @@ -415,6 +415,7 @@ const AllocatingWriter = Writer.Allocating; const known_folders = @import("known-folders"); pub const DocDatabase = @import("DocDatabase.zig"); +pub const XmlDocParser = @import("XmlDocParser.zig"); pub const cache = @import("cache.zig"); pub const api = @import("api.zig"); pub const source_fetch = @import("source_fetch.zig"); From 6f74dd2ce7dec49643d285e530380e7550ebb487 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 13:49:45 +1100 Subject: [PATCH 11/18] feat: add tutorials field to Entry and render in markdown output --- src/DocDatabase.zig | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index 5257c34..72e3806 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -21,6 +21,11 @@ pub const EntryKind = enum { signal, }; +pub const Tutorial = struct { + title: []const u8, + url: []const u8, +}; + pub const Entry = struct { key: []const u8, name: []const u8, @@ -30,6 +35,7 @@ pub const Entry = struct { brief_description: ?[]const u8 = null, signature: ?[]const u8 = null, members: ?[]usize = null, + tutorials: ?[]const Tutorial = null, }; const RootState = enum { @@ -421,6 +427,15 @@ fn generateMarkdownForEntry(self: DocDatabase, allocator: Allocator, entry: Entr try writer.print("\n## Description\n\n{s}\n", .{desc}); } + if (entry.tutorials) |tutorials| { + if (tutorials.len > 0) { + try writer.writeAll("\n## Tutorials\n\n"); + for (tutorials) |tutorial| { + try writer.print("- [{s}]({s})\n", .{ tutorial.title, tutorial.url }); + } + } + } + if (entry.members) |member_indices| { try self.generateMemberListings(allocator, member_indices, writer); } From f184545294cc4d2b5f81d276122f213964632df4 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 13:54:20 +1100 Subject: [PATCH 12/18] feat: integrate XML doc supplementation into cache flow --- src/cache.zig | 21 +++++++ src/root.zig | 153 +++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 172 insertions(+), 2 deletions(-) diff --git a/src/cache.zig b/src/cache.zig index b7592e2..a2232aa 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -144,6 +144,26 @@ pub fn cacheIsPopulated(allocator: Allocator, cache_path: []const u8) !bool { return true; } +pub fn getXmlDocsDirInCache(allocator: Allocator, cache_dir: []const u8) ![]const u8 { + return std.fmt.allocPrint( + allocator, + "{f}", + .{std.fs.path.fmtJoin(&[_][]const u8{ cache_dir, "xml_docs" })}, + ); +} + +pub fn xmlDocsArePopulated(allocator: Allocator, cache_dir: []const u8) !bool { + const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); + defer allocator.free(xml_dir); + + const marker = source_fetch.readCompleteMarker(allocator, xml_dir); + if (marker) |m| { + allocator.free(m); + return true; + } + return false; +} + test "getCacheDir returns cache directory path" { const allocator = std.testing.allocator; @@ -744,3 +764,4 @@ const Writer = std.Io.Writer; const known_folders = @import("known-folders"); const DocDatabase = @import("DocDatabase.zig"); +const source_fetch = @import("source_fetch.zig"); diff --git a/src/root.zig b/src/root.zig index 239144c..1304944 100644 --- a/src/root.zig +++ b/src/root.zig @@ -35,17 +35,30 @@ pub fn markdownForSymbol(allocator: Allocator, symbol: []const u8, api_json_path const cache_path = try cache.getCacheDir(allocator); defer allocator.free(cache_path); - if (!try cache.cacheIsPopulated(allocator, cache_path)) { + const needs_full_rebuild = !try cache.cacheIsPopulated(allocator, cache_path); + + if (needs_full_rebuild) { try cache.ensureDirectoryExists(cache_path); try api.generateApiJsonIfNotExists(allocator, "godot", cache_path); + } + + // Fetch XML docs if missing (best-effort) + if (needs_full_rebuild or !try cache.xmlDocsArePopulated(allocator, cache_path)) { + fetchXmlDocs(allocator, cache_path); + } + if (needs_full_rebuild) { const json_path = try cache.getJsonCachePathInDir(allocator, cache_path); defer allocator.free(json_path); const json_file = try std.fs.openFileAbsolute(json_path, .{}); defer json_file.close(); - const db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), json_file); + var db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), json_file); + + // Merge XML data into db before generating markdown cache + mergeXmlDocs(arena.allocator(), allocator, &db, cache_path); + try cache.generateMarkdownCache(allocator, db, cache_path); } @@ -401,6 +414,142 @@ test "markdownForSymbol generates markdown cache when cache is empty" { cache.clearCache(allocator) catch {}; } +fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { + const xml_dir = cache.getXmlDocsDirInCache(allocator, cache_path) catch return; + defer allocator.free(xml_dir); + + cache.ensureDirectoryExists(xml_dir) catch return; + + const version = source_fetch.getGodotVersion(allocator) orelse return; + defer version.deinit(allocator); + + var url_buf: [256]u8 = undefined; + const url = source_fetch.buildTarballUrl(&url_buf, version) orelse return; + + source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { + // Try hash-based fallback URL + if (version.hash) |hash| { + var hash_url_buf: [256]u8 = undefined; + const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse return; + source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { + std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); + return; + }; + } else { + std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); + return; + } + }; + + var version_buf: [64]u8 = undefined; + const version_str = version.formatVersion(&version_buf) orelse return; + + source_fetch.writeCompleteMarker(allocator, xml_dir, version_str) catch return; +} + +fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { + const xml_dir = cache.getXmlDocsDirInCache(tmp_allocator, cache_path) catch return; + defer tmp_allocator.free(xml_dir); + + var dir = std.fs.openDirAbsolute(xml_dir, .{ .iterate = true }) catch return; + defer dir.close(); + + var iter = dir.iterate(); + while (iter.next() catch return) |entry| { + if (entry.kind != .file) continue; + if (!std.mem.endsWith(u8, entry.name, ".xml")) continue; + + const class_name = entry.name[0 .. entry.name.len - 4]; // strip .xml + + // Read XML file + const content = dir.readFileAlloc(tmp_allocator, entry.name, 2 * 1024 * 1024) catch continue; + defer tmp_allocator.free(content); + + // Parse with arena_allocator so strings outlive this function + const class_doc = XmlDocParser.parseClassDoc(arena_allocator, content) catch |err| { + std.log.warn("failed to parse XML doc for {s}: {}", .{ class_name, err }); + continue; + }; + // Do NOT freeClassDoc -- arena owns the memory + + // Merge tutorials + if (class_doc.tutorials) |tutorials| { + if (tutorials.len > 0) { + if (db.symbols.getPtr(class_name)) |db_entry| { + if (db_entry.tutorials == null) { + const db_tutorials = arena_allocator.alloc(DocDatabase.Tutorial, tutorials.len) catch continue; + for (tutorials, 0..) |t, i| { + db_tutorials[i] = .{ .title = t.title, .url = t.url }; + } + db_entry.tutorials = db_tutorials; + } + } + } + } + + // Fill missing class description + if (class_doc.description) |xml_desc| { + if (db.symbols.getPtr(class_name)) |db_entry| { + if (db_entry.description == null) { + db_entry.description = xml_desc; + } + } + } + + // Merge member descriptions (methods, properties, signals) + if (class_doc.methods) |members| { + for (members) |member| { + const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; + defer tmp_allocator.free(member_key); + + if (db.symbols.getPtr(member_key)) |db_entry| { + if (db_entry.description == null) { + db_entry.description = member.description; + } + } + } + } + + if (class_doc.properties) |members| { + for (members) |member| { + const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; + defer tmp_allocator.free(member_key); + + if (db.symbols.getPtr(member_key)) |db_entry| { + if (db_entry.description == null) { + db_entry.description = member.description; + } + } + } + } + + if (class_doc.signals) |members| { + for (members) |member| { + const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; + defer tmp_allocator.free(member_key); + + if (db.symbols.getPtr(member_key)) |db_entry| { + if (db_entry.description == null) { + db_entry.description = member.description; + } + } + } + } + + // Add entries for classes found in XML but not in JSON + if (db.symbols.get(class_name) == null) { + const key = std.fmt.allocPrint(arena_allocator, "{s}", .{class_name}) catch continue; + db.symbols.put(arena_allocator, key, .{ + .key = key, + .name = key, + .kind = .class, + .description = class_doc.description, + .brief_description = class_doc.brief_description, + }) catch continue; + } + } +} + comptime { std.testing.refAllDecls(@This()); } From 05d9e5691b0f19e960a4414d689e151b8a1a367d Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 16:11:08 +1100 Subject: [PATCH 13/18] test: add snapshot test for tutorials rendering --- snapshots/class_with_tutorials.md | 12 +++++++++++ src/DocDatabase.zig | 35 +++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 snapshots/class_with_tutorials.md diff --git a/snapshots/class_with_tutorials.md b/snapshots/class_with_tutorials.md new file mode 100644 index 0000000..ccc6a1a --- /dev/null +++ b/snapshots/class_with_tutorials.md @@ -0,0 +1,12 @@ +# Sprite2D + +General-purpose sprite node. + +## Description + +A node that displays a 2D texture. + +## Tutorials + +- [Custom drawing in 2D](https://docs.godotengine.org/en/stable/tutorials/2d/custom_drawing_in_2d.html) +- [All 2D Demos](https://github.com/godotengine/godot-demo-projects/tree/master/2d) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index 72e3806..ce27bf3 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -1278,6 +1278,41 @@ test "generateMarkdownForSymbol for class with members" { try writer.flush(); } +test "generateMarkdownForSymbol for class with tutorials" { + const allocator = std.testing.allocator; + + var db = DocDatabase{ + .symbols = StringArrayHashMap(Entry).empty, + }; + defer db.symbols.deinit(allocator); + + const tutorials = [_]Tutorial{ + .{ .title = "Custom drawing in 2D", .url = "https://docs.godotengine.org/en/stable/tutorials/2d/custom_drawing_in_2d.html" }, + .{ .title = "All 2D Demos", .url = "https://github.com/godotengine/godot-demo-projects/tree/master/2d" }, + }; + + const entry = Entry{ + .key = "Sprite2D", + .name = "Sprite2D", + .kind = .class, + .brief_description = "General-purpose sprite node.", + .description = "A node that displays a 2D texture.", + .tutorials = &tutorials, + }; + try db.symbols.put(allocator, "Sprite2D", entry); + + // Write snapshot + var file = try std.fs.cwd().createFile("snapshots/class_with_tutorials.md", .{}); + defer file.close(); + + var buf: [4096]u8 = undefined; + var file_writer = file.writer(&buf); + const writer = &file_writer.interface; + + try db.generateMarkdownForSymbol(allocator, "Sprite2D", writer); + try writer.flush(); +} + const std = @import("std"); const ArenaAllocator = std.heap.ArenaAllocator; const Allocator = std.mem.Allocator; From aa35794ff29bcd5240296284e6498fc8a60e7b58 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 16:19:32 +1100 Subject: [PATCH 14/18] fix: skip XML fetch and merge when GDOC_NO_XML is set Add GDOC_NO_XML environment variable to disable XML doc supplementation. Set automatically in build.zig for test runs to avoid network I/O during zig build test. --- build.zig | 2 ++ src/root.zig | 18 ++++++++++++------ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/build.zig b/build.zig index 6802a9b..706698a 100644 --- a/build.zig +++ b/build.zig @@ -73,12 +73,14 @@ pub fn build(b: *std.Build) void { }); const run_mod_tests = b.addRunArtifact(mod_tests); + run_mod_tests.setEnvironmentVariable("GDOC_NO_XML", "1"); const exe_tests = b.addTest(.{ .root_module = exe.root_module, }); const run_exe_tests = b.addRunArtifact(exe_tests); + run_exe_tests.setEnvironmentVariable("GDOC_NO_XML", "1"); const test_step = b.step("test", "Run tests"); test_step.dependOn(&run_mod_tests.step); diff --git a/src/root.zig b/src/root.zig index 1304944..91851d4 100644 --- a/src/root.zig +++ b/src/root.zig @@ -40,14 +40,12 @@ pub fn markdownForSymbol(allocator: Allocator, symbol: []const u8, api_json_path if (needs_full_rebuild) { try cache.ensureDirectoryExists(cache_path); try api.generateApiJsonIfNotExists(allocator, "godot", cache_path); - } - // Fetch XML docs if missing (best-effort) - if (needs_full_rebuild or !try cache.xmlDocsArePopulated(allocator, cache_path)) { - fetchXmlDocs(allocator, cache_path); - } + // Fetch XML docs if missing (best-effort, requires godot) + if (!try cache.xmlDocsArePopulated(allocator, cache_path)) { + fetchXmlDocs(allocator, cache_path); + } - if (needs_full_rebuild) { const json_path = try cache.getJsonCachePathInDir(allocator, cache_path); defer allocator.free(json_path); @@ -414,7 +412,13 @@ test "markdownForSymbol generates markdown cache when cache is empty" { cache.clearCache(allocator) catch {}; } +fn xmlSupplementationDisabled() bool { + return std.posix.getenv("GDOC_NO_XML") != null; +} + fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { + if (xmlSupplementationDisabled()) return; + const xml_dir = cache.getXmlDocsDirInCache(allocator, cache_path) catch return; defer allocator.free(xml_dir); @@ -448,6 +452,8 @@ fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { } fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { + if (xmlSupplementationDisabled()) return; + const xml_dir = cache.getXmlDocsDirInCache(tmp_allocator, cache_path) catch return; defer tmp_allocator.free(xml_dir); From 8c307fe7b47a6413237dcb6b085d06c7ff90e016 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 16:23:12 +1100 Subject: [PATCH 15/18] chore: remove plan and spec documents --- .../2026-03-21-xml-doc-supplementation.md | 1216 ----------------- ...26-03-21-xml-doc-supplementation-design.md | 127 -- 2 files changed, 1343 deletions(-) delete mode 100644 docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md delete mode 100644 docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md diff --git a/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md b/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md deleted file mode 100644 index 76b3ef0..0000000 --- a/docs/superpowers/plans/2026-03-21-xml-doc-supplementation.md +++ /dev/null @@ -1,1216 +0,0 @@ -# XML Documentation Supplementation Implementation Plan - -> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. - -**Goal:** Supplement Godot's JSON API docs with XML documentation from the Godot source tree, adding tutorials, missing descriptions, and GlobalScope entries. - -**Architecture:** New `source_fetch.zig` handles version parsing and tarball streaming extraction. New `XmlDocParser.zig` parses Godot XML class docs. XML data merges into `DocDatabase` entries during markdown cache generation. All errors degrade gracefully to JSON-only mode. - -**Tech Stack:** Zig 0.15.2, zig-xml (ianprime0509/zig-xml), std.tar, std.compress.gzip, std.http.Client - -**API Notes:** The Zig 0.15.2 stdlib APIs for tar, gzip, and HTTP may have different signatures than what is shown in code snippets below. Code snippets illustrate the *intent* and *data flow*; the implementer must verify exact function signatures against the Zig stdlib source (e.g., `std.tar`, `std.compress.gzip` or `std.compress.flate` with gzip container mode, `std.http.Client`) and adapt accordingly. When in doubt, check the Zig stdlib source or run `zig std` for documentation. - -**Spec:** `docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md` - ---- - -### File Structure - -| File | Responsibility | -|------|---------------| -| `src/source_fetch.zig` (create) | Parse `godot --version`, download tarball, stream-extract XML docs to cache | -| `src/XmlDocParser.zig` (create) | Parse a single Godot XML class doc file into structured data | -| `build.zig.zon` (modify) | Add zig-xml dependency | -| `build.zig` (modify) | Wire zig-xml into the gdoc module | -| `src/DocDatabase.zig` (modify) | Add `tutorials` field to `Entry` | -| `src/cache.zig` (modify) | Add XML staleness check, integrate XML fetch into cache population | -| `src/root.zig` (modify) | Merge XML data during cache generation | - ---- - -### Task 1: Add zig-xml Dependency - -**Files:** -- Modify: `build.zig.zon` -- Modify: `build.zig` - -- [ ] **Step 1: Fetch zig-xml** - -```bash -cd /home/sh/Projects/gdzig/gdoc -zig fetch --save git+https://github.com/ianprime0509/zig-xml -``` - -Expected: `build.zig.zon` updated with zig-xml dependency entry. - -- [ ] **Step 2: Wire zig-xml into build.zig** - -In `build.zig`, after the `zigdown` dependency block (line 23-26), add: - -```zig -const zig_xml = b.dependency("zig_xml", .{ - .target = target, - .optimize = optimize, -}).module("xml"); -``` - -Then add it to the `mod` imports array (line 31-35): - -```zig -.{ .name = "xml", .module = zig_xml }, -``` - -- [ ] **Step 3: Verify build compiles** - -```bash -zig build -``` - -Expected: Clean build, no errors. - -- [ ] **Step 4: Commit** - -```bash -git add build.zig build.zig.zon -git commit -m "feat: add zig-xml dependency for XML doc parsing" -``` - ---- - -### Task 2: Version String Parser in source_fetch.zig - -**Files:** -- Create: `src/source_fetch.zig` - -- [ ] **Step 1: Write failing test for version parsing** - -Create `src/source_fetch.zig`: - -```zig -pub const VersionInfo = struct { - major: []const u8, - minor: []const u8, - patch: []const u8, - hash: ?[]const u8, - - /// Formats the version as "major.minor.patch" into the provided buffer. - pub fn formatVersion(self: VersionInfo, buf: []u8) ?[]const u8 { - return std.fmt.bufPrint(buf, "{s}.{s}.{s}", .{ self.major, self.minor, self.patch }) catch null; - } -}; - -/// Parses a Godot version string like "4.6.1.stable.official.14d19694e" -/// Returns the version components and optional commit hash. -pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { - _ = version_str; - return null; // TODO: implement -} - -test "parseGodotVersion parses standard version string" { - const result = parseGodotVersion("4.6.1.stable.official.14d19694e").?; - try std.testing.expectEqualStrings("4", result.major); - try std.testing.expectEqualStrings("6", result.minor); - try std.testing.expectEqualStrings("1", result.patch); - try std.testing.expectEqualStrings("14d19694e", result.hash.?); -} - -test "parseGodotVersion parses version without hash" { - const result = parseGodotVersion("4.6.1.stable.custom_build").?; - try std.testing.expectEqualStrings("4", result.major); - try std.testing.expectEqualStrings("6", result.minor); - try std.testing.expectEqualStrings("1", result.patch); - try std.testing.expect(result.hash == null); -} - -test "parseGodotVersion handles dev builds" { - const result = parseGodotVersion("4.7.0.dev.official.abc123def").?; - try std.testing.expectEqualStrings("4", result.major); - try std.testing.expectEqualStrings("7", result.minor); - try std.testing.expectEqualStrings("0", result.patch); - try std.testing.expectEqualStrings("abc123def", result.hash.?); -} - -test "parseGodotVersion returns null for empty string" { - try std.testing.expect(parseGodotVersion("") == null); -} - -test "parseGodotVersion returns null for malformed string" { - try std.testing.expect(parseGodotVersion("not-a-version") == null); -} - -const std = @import("std"); -``` - -- [ ] **Step 2: Run tests to verify they fail** - -```bash -zig build test 2>&1 | head -20 -``` - -Expected: Tests fail because `parseGodotVersion` returns `null`. - -- [ ] **Step 3: Implement parseGodotVersion** - -Replace the stub with: - -```zig -pub fn parseGodotVersion(version_str: []const u8) ?VersionInfo { - if (version_str.len == 0) return null; - - // Split on dots: "4.6.1.stable.official.14d19694e" - var iter = std.mem.splitScalar(u8, version_str, '.'); - const major = iter.next() orelse return null; - const minor = iter.next() orelse return null; - const patch = iter.next() orelse return null; - - // Validate major/minor/patch are numeric - for (major) |c| if (!std.ascii.isDigit(c)) return null; - for (minor) |c| if (!std.ascii.isDigit(c)) return null; - for (patch) |c| if (!std.ascii.isDigit(c)) return null; - - // Skip stability label (stable/dev/beta/rc) - _ = iter.next() orelse return VersionInfo{ - .major = major, - .minor = minor, - .patch = patch, - .hash = null, - }; - - // Next segment: "official" or "custom_build" etc. - const build_type = iter.next() orelse return VersionInfo{ - .major = major, - .minor = minor, - .patch = patch, - .hash = null, - }; - - // If build type is "official", the next segment is the commit hash - const hash: ?[]const u8 = if (std.mem.eql(u8, build_type, "official")) - iter.next() - else - null; - - return VersionInfo{ - .major = major, - .minor = minor, - .patch = patch, - .hash = hash, - }; -} -``` - -- [ ] **Step 4: Register module in build.zig** - -`source_fetch.zig` is part of the `gdoc` module. Since `root.zig` uses `comptime { std.testing.refAllDecls(@This()); }`, add to `src/root.zig`: - -```zig -pub const source_fetch = @import("source_fetch.zig"); -``` - -- [ ] **Step 5: Run tests to verify they pass** - -```bash -zig build test -``` - -Expected: All tests pass. - -- [ ] **Step 6: Commit** - -```bash -git add src/source_fetch.zig src/root.zig -git commit -m "feat: add Godot version string parser" -``` - ---- - -### Task 3: Run godot --version and Parse Output - -**Files:** -- Modify: `src/source_fetch.zig` - -- [ ] **Step 1: Write failing test for getGodotVersion** - -Add to `src/source_fetch.zig`: - -```zig -/// Runs `godot --version` and parses the output. -/// Returns null if godot is not installed or version can't be parsed. -pub fn getGodotVersion(allocator: Allocator) ?VersionInfo { - _ = allocator; - return null; // TODO -} - -test "getGodotVersion with fake godot script" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - // Create a fake godot that outputs a version string - const script = "#!/bin/sh\necho '4.6.1.stable.official.14d19694e'"; - try tmp_dir.dir.writeFile(.{ .sub_path = "fake-godot", .data = script }); - - var file = try tmp_dir.dir.openFile("fake-godot", .{}); - try file.chmod(0o755); - file.close(); - - const fake_path = try std.fmt.allocPrint(allocator, "{s}/fake-godot", .{tmp_path}); - defer allocator.free(fake_path); - - const result = getGodotVersionFromPath(allocator, fake_path); - try std.testing.expect(result != null); - try std.testing.expectEqualStrings("14d19694e", result.?.hash.?); -} -``` - -- [ ] **Step 2: Run tests to verify they fail** - -```bash -zig build test 2>&1 | head -20 -``` - -- [ ] **Step 3: Implement getGodotVersionFromPath** - -```zig -/// Runs a godot executable at the given path with --version and parses output. -pub fn getGodotVersionFromPath(allocator: Allocator, godot_path: []const u8) ?VersionInfo { - const result = std.process.Child.run(.{ - .argv = &.{ godot_path, "--version" }, - .allocator = allocator, - }) catch return null; - defer allocator.free(result.stdout); - defer allocator.free(result.stderr); - - switch (result.term) { - .Exited => |code| if (code != 0) return null, - else => return null, - } - - const trimmed = std.mem.trimRight(u8, result.stdout, &std.ascii.whitespace); - return parseGodotVersion(trimmed); -} - -/// Convenience wrapper that uses "godot" from PATH. -pub fn getGodotVersion(allocator: Allocator) ?VersionInfo { - return getGodotVersionFromPath(allocator, "godot"); -} -``` - -- [ ] **Step 4: Run tests** - -```bash -zig build test -``` - -Expected: All pass. - -- [ ] **Step 5: Commit** - -```bash -git add src/source_fetch.zig -git commit -m "feat: run godot --version and parse output" -``` - ---- - -### Task 4: Tarball Download and XML Extraction - -**Files:** -- Modify: `src/source_fetch.zig` - -- [ ] **Step 1: Write the tarball URL builder** - -Add to `src/source_fetch.zig`: - -```zig -/// Builds the GitHub tarball URL for a Godot version. -/// Tries tag-based URL first (e.g., v4.6.1-stable), with hash fallback. -pub fn buildTarballUrl(buf: []u8, version: VersionInfo) ?[]const u8 { - const result = std.fmt.bufPrint(buf, "https://github.com/godotengine/godot/archive/refs/tags/{s}.{s}.{s}-stable.tar.gz", .{ - version.major, version.minor, version.patch, - }) catch return null; - return result; -} - -pub fn buildTarballUrlFromHash(buf: []u8, hash: []const u8) ?[]const u8 { - const result = std.fmt.bufPrint(buf, "https://github.com/godotengine/godot/archive/{s}.tar.gz", .{hash}) catch return null; - return result; -} - -test "buildTarballUrl formats tag-based URL" { - var buf: [256]u8 = undefined; - const url = buildTarballUrl(&buf, .{ - .major = "4", - .minor = "6", - .patch = "1", - .hash = "14d19694e", - }).?; - try std.testing.expectEqualStrings( - "https://github.com/godotengine/godot/archive/refs/tags/4.6.1-stable.tar.gz", - url, - ); -} - -test "buildTarballUrlFromHash formats hash-based URL" { - var buf: [256]u8 = undefined; - const url = buildTarballUrlFromHash(&buf, "14d19694e").?; - try std.testing.expectEqualStrings( - "https://github.com/godotengine/godot/archive/14d19694e.tar.gz", - url, - ); -} -``` - -- [ ] **Step 2: Run tests** - -```bash -zig build test -``` - -- [ ] **Step 3: Write the streaming extraction function** - -This is the core function that downloads a tarball and extracts XML docs. Add to `src/source_fetch.zig`: - -```zig -const Allocator = std.mem.Allocator; - -/// Downloads the Godot source tarball and extracts XML doc files. -/// Streams: HTTP -> gzip decompress -> tar extract -> filter XML files. -/// Writes extracted XML files to `xml_docs_dir`. -/// -/// **API NOTE:** The exact std.tar, std.compress.gzip, and std.http.Client -/// signatures must be verified against the Zig 0.15.2 stdlib source. -/// The pseudocode below shows the intended data flow. Key things to verify: -/// - gzip decompression: try `std.compress.gzip.decompress(reader)` or -/// `std.compress.flate.decompressor(.gzip, reader)` -/// - tar iteration: check `std.tar.iterator()` or `std.tar.pipeToFileSystem()` -/// - HTTP: `std.http.Client` open/send/wait or fetch API -/// - File writer: use `.writer(&buf)` then `.interface` pattern from cache.zig -pub fn fetchAndExtractXmlDocs( - allocator: Allocator, - url: []const u8, - xml_docs_dir: []const u8, -) !void { - // 1. HTTP GET the tarball URL - var client: std.http.Client = .init(allocator); - defer client.deinit(); - - // Open connection, send request, wait for response - var header_buf: [16 * 1024]u8 = undefined; - var req = try client.open(.GET, try std.Uri.parse(url), .{ - .server_header_buffer = &header_buf, - }); - defer req.deinit(); - try req.send(); - try req.wait(); - - if (req.response.status != .ok) return error.DownloadFailed; - - // 2. Pipe HTTP response reader -> gzip decompressor -> tar iterator - // Verify exact API: std.compress.gzip or std.compress.flate with gzip mode - var decompress = std.compress.gzip.decompressor(req.reader()); - - // 3. Iterate tar entries, filtering for XML doc files - var tar_iter = std.tar.iterator(decompress.reader(), .{}); - - while (try tar_iter.next()) |entry| { - const name = entry.name; - const basename = std.fs.path.basename(name); - - if (!std.mem.endsWith(u8, basename, ".xml")) continue; - - // Match: */doc/classes/*.xml and */modules/*/doc_classes/*.xml - const is_core_doc = std.mem.indexOf(u8, name, "/doc/classes/") != null; - const is_module_doc = std.mem.indexOf(u8, name, "/doc_classes/") != null; - if (!is_core_doc and !is_module_doc) continue; - - // 4. Write matching XML file to xml_docs_dir/ClassName.xml - const output_path = try std.fs.path.join(allocator, &.{ xml_docs_dir, basename }); - defer allocator.free(output_path); - - var output_file = try std.fs.createFileAbsolute(output_path, .{}); - defer output_file.close(); - - // Stream entry content to file using buffered writer - // Use the .writer(&buf) then .interface pattern from cache.zig - var buf: [4096]u8 = undefined; - var file_writer = output_file.writer(&buf); - var writer = &file_writer.interface; - - // Read entry content and write to file - // Exact API depends on tar entry reader interface - var read_buf: [8192]u8 = undefined; - while (true) { - const bytes_read = try entry.reader().read(&read_buf); - if (bytes_read == 0) break; - try writer.writeAll(read_buf[0..bytes_read]); - } - try writer.flush(); - } -} -``` - -**IMPORTANT for implementer:** The `std.tar`, `std.compress.gzip`/`std.compress.flate`, and `std.http.Client` APIs shown above are pseudocode illustrating the data flow. You **must** check the actual Zig 0.15.2 stdlib source for correct function signatures before coding. The streaming pipeline concept (HTTP -> gzip -> tar -> filter) is correct; only the exact API calls need verification. - -- [ ] **Step 4: Write the .complete marker function** - -```zig -/// Writes a .complete marker file with the version string. -pub fn writeCompleteMarker(allocator: Allocator, xml_docs_dir: []const u8, version_str: []const u8) !void { - const marker_path = try std.fs.path.join(allocator, &.{ xml_docs_dir, ".complete" }); - defer allocator.free(marker_path); - - var file = try std.fs.createFileAbsolute(marker_path, .{}); - defer file.close(); - - var buf: [256]u8 = undefined; - var file_writer = file.writer(&buf); - var writer = &file_writer.interface; - try writer.writeAll(version_str); - try writer.flush(); -} - -/// Reads the .complete marker and returns the version string, or null if not present. -pub fn readCompleteMarker(allocator: Allocator, xml_docs_dir: []const u8) ?[]const u8 { - const marker_path = std.fs.path.join(allocator, &.{ xml_docs_dir, ".complete" }) catch return null; - defer allocator.free(marker_path); - - const file = std.fs.openFileAbsolute(marker_path, .{}) catch return null; - defer file.close(); - - var buf: [256]u8 = undefined; - var file_reader = file.reader(&buf); - var reader = &file_reader.interface; - return reader.readAlloc(allocator, 256) catch null; -} -``` - -- [ ] **Step 5: Write tests for marker functions** - -```zig -test "writeCompleteMarker and readCompleteMarker round-trip" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - try writeCompleteMarker(allocator, tmp_path, "4.6.1.stable.official.14d19694e"); - - const read_back = readCompleteMarker(allocator, tmp_path).?; - defer allocator.free(read_back); - - try std.testing.expectEqualStrings("4.6.1.stable.official.14d19694e", read_back); -} - -test "readCompleteMarker returns null when no marker exists" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - try std.testing.expect(readCompleteMarker(allocator, tmp_path) == null); -} -``` - -- [ ] **Step 6: Run tests** - -```bash -zig build test -``` - -Expected: Marker tests pass. The `fetchAndExtractXmlDocs` function won't be unit tested (it requires network); it will be integration tested in a later task. - -- [ ] **Step 7: Commit** - -```bash -git add src/source_fetch.zig -git commit -m "feat: add tarball download, XML extraction, and cache markers" -``` - ---- - -### Task 5: XML Doc Parser - -**Files:** -- Create: `src/XmlDocParser.zig` -- Modify: `src/root.zig` (add import) - -- [ ] **Step 1: Define the output data structures** - -Create `src/XmlDocParser.zig`: - -```zig -const XmlDocParser = @This(); - -pub const Tutorial = struct { - title: []const u8, - url: []const u8, -}; - -pub const MemberDoc = struct { - name: []const u8, - description: ?[]const u8 = null, -}; - -pub const ClassDoc = struct { - name: []const u8, - inherits: ?[]const u8 = null, - brief_description: ?[]const u8 = null, - description: ?[]const u8 = null, - tutorials: ?[]Tutorial = null, - methods: ?[]MemberDoc = null, - properties: ?[]MemberDoc = null, - signals: ?[]MemberDoc = null, - constants: ?[]MemberDoc = null, -}; - -const DOCS_BASE_URL = "https://docs.godotengine.org/en/stable"; - -const std = @import("std"); -const Allocator = std.mem.Allocator; -const xml = @import("xml"); -``` - -- [ ] **Step 2: Write a test with sample XML** - -Add to `src/XmlDocParser.zig`: - -```zig -/// Parses a Godot XML class documentation file. -/// All returned strings are allocated with the provided allocator. -pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) !ClassDoc { - _ = allocator; - _ = xml_content; - return error.NotImplemented; // TODO -} - -const test_xml = - \\ - \\ - \\ A 2D game object. - \\ Node2D is the base class for 2D. - \\ - \\ $DOCS_URL/tutorials/2d/custom_drawing.html - \\ https://github.com/godotengine/godot-demo-projects/tree/master/2d - \\ - \\ - \\ - \\ - \\ - \\ Multiplies the current scale by the ratio vector. - \\ - \\ - \\ - \\ - \\Position, relative to the node's parent. - \\ - \\ - \\ -; - -test "parseClassDoc parses class name and inherits" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml); - defer freeClassDoc(allocator, doc); - - try std.testing.expectEqualStrings("Node2D", doc.name); - try std.testing.expectEqualStrings("CanvasItem", doc.inherits.?); -} - -test "parseClassDoc parses descriptions" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml); - defer freeClassDoc(allocator, doc); - - try std.testing.expectEqualStrings("A 2D game object.", doc.brief_description.?); - try std.testing.expectEqualStrings("Node2D is the base class for 2D.", doc.description.?); -} - -test "parseClassDoc parses tutorials with DOCS_URL expansion" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml); - defer freeClassDoc(allocator, doc); - - try std.testing.expect(doc.tutorials != null); - try std.testing.expectEqual(@as(usize, 2), doc.tutorials.?.len); - - try std.testing.expectEqualStrings("Custom drawing in 2D", doc.tutorials.?[0].title); - try std.testing.expectEqualStrings( - "https://docs.godotengine.org/en/stable/tutorials/2d/custom_drawing.html", - doc.tutorials.?[0].url, - ); - - // External URL should be left unchanged - try std.testing.expectEqualStrings( - "https://github.com/godotengine/godot-demo-projects/tree/master/2d", - doc.tutorials.?[1].url, - ); -} - -test "parseClassDoc parses methods" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml); - defer freeClassDoc(allocator, doc); - - try std.testing.expect(doc.methods != null); - try std.testing.expectEqual(@as(usize, 1), doc.methods.?.len); - try std.testing.expectEqualStrings("apply_scale", doc.methods.?[0].name); - try std.testing.expectEqualStrings("Multiplies the current scale by the ratio vector.", doc.methods.?[0].description.?); -} - -test "parseClassDoc parses properties from members element" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml); - defer freeClassDoc(allocator, doc); - - try std.testing.expect(doc.properties != null); - try std.testing.expectEqual(@as(usize, 1), doc.properties.?.len); - try std.testing.expectEqualStrings("position", doc.properties.?[0].name); -} - -/// Frees all memory allocated by parseClassDoc. -pub fn freeClassDoc(allocator: Allocator, doc: ClassDoc) void { - _ = allocator; - _ = doc; - // TODO: free all allocated strings and slices -} -``` - -- [ ] **Step 3: Run tests to verify they fail** - -```bash -zig build test 2>&1 | head -20 -``` - -Expected: `error.NotImplemented` - -- [ ] **Step 4: Implement parseClassDoc** - -Implement using zig-xml's pull parser API. The implementation should: - -1. Create an `xml.Reader` from the content -2. Loop through events matching element starts/ends -3. Collect text content for `brief_description`, `description` -4. Parse `class` element attributes for `name` and `inherits` -5. Parse `tutorials/link` elements, expanding `$DOCS_URL` -6. Parse `methods/method` and `members/member` elements -7. Build and return `ClassDoc` - -The exact API calls depend on zig-xml's reader interface. Consult zig-xml's README or tests for the exact method names (likely `reader.read()` returning tagged events). - -- [ ] **Step 5: Implement freeClassDoc** - -Free all allocated slices and strings in the `ClassDoc`. - -- [ ] **Step 6: Register in root.zig** - -Add to `src/root.zig`: - -```zig -pub const XmlDocParser = @import("XmlDocParser.zig"); -``` - -- [ ] **Step 7: Run tests** - -```bash -zig build test -``` - -Expected: All pass. - -- [ ] **Step 8: Commit** - -```bash -git add src/XmlDocParser.zig src/root.zig -git commit -m "feat: add XML doc parser for Godot class documentation" -``` - ---- - -### Task 6: Add tutorials Field to Entry - -**Files:** -- Modify: `src/DocDatabase.zig` -- Modify: `src/DocDatabase.zig` (markdown generation) - -- [ ] **Step 1: Add tutorials field to Entry struct** - -In `src/DocDatabase.zig`, add to the `Entry` struct (after line 32): - -```zig -pub const Tutorial = struct { - title: []const u8, - url: []const u8, -}; -``` - -And add the field to `Entry` (after `members`): - -```zig -tutorials: ?[]const Tutorial = null, -``` - -- [ ] **Step 2: Update generateMarkdownForSymbol to render tutorials** - -Find the `generateMarkdownForSymbol` function in `src/DocDatabase.zig`. After the description section, add: - -```zig -if (entry.tutorials) |tutorials| { - if (tutorials.len > 0) { - try writer.writeAll("\n## Tutorials\n\n"); - for (tutorials) |tutorial| { - try writer.print("- [{s}]({s})\n", .{ tutorial.title, tutorial.url }); - } - } -} -``` - -- [ ] **Step 3: Write a snapshot test** - -Update an existing test or add a new one that includes tutorials in the entry and verifies the markdown output contains a Tutorials section. - -- [ ] **Step 4: Run tests** - -```bash -zig build test -``` - -Expected: All pass, snapshots clean. - -- [ ] **Step 5: Commit** - -```bash -git add src/DocDatabase.zig -git commit -m "feat: add tutorials field to Entry and render in markdown output" -``` - ---- - -### Task 7: Integrate XML Fetch into Cache Population - -**Files:** -- Modify: `src/cache.zig` -- Modify: `src/root.zig` - -- [ ] **Step 1: Add XML docs directory helpers to cache.zig** - -Add to `src/cache.zig`: - -```zig -pub fn getXmlDocsDirInCache(allocator: Allocator, cache_dir: []const u8) ![]const u8 { - return std.fmt.allocPrint( - allocator, - "{f}", - .{std.fs.path.fmtJoin(&[_][]const u8{ cache_dir, "xml_docs" })}, - ); -} - -pub fn xmlDocsArePopulated(allocator: Allocator, cache_dir: []const u8) !bool { - const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); - defer allocator.free(xml_dir); - - const marker = source_fetch.readCompleteMarker(allocator, xml_dir); - if (marker) |m| { - allocator.free(m); - return true; - } - return false; -} -``` - -Add the import at the bottom: - -```zig -const source_fetch = @import("source_fetch.zig"); -``` - -- [ ] **Step 2: Add XML staleness check** - -Add to `src/cache.zig`: - -```zig -/// Checks if XML docs are stale by comparing cached version to current godot version. -pub fn xmlDocsAreStale(allocator: Allocator, cache_dir: []const u8, current_version: []const u8) !bool { - const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); - defer allocator.free(xml_dir); - - const cached_version = source_fetch.readCompleteMarker(allocator, xml_dir) orelse return true; - defer allocator.free(cached_version); - - return !std.mem.eql(u8, cached_version, current_version); -} -``` - -- [ ] **Step 3: Modify root.zig to trigger XML fetch during cache population** - -In `src/root.zig`, modify the cache population block (lines 38-50) to also fetch XML docs and check for staleness: - -```zig -const needs_full_rebuild = !try cache.cacheIsPopulated(allocator, cache_path); - -if (needs_full_rebuild) { - try cache.ensureDirectoryExists(cache_path); - try api.generateApiJsonIfNotExists(allocator, "godot", cache_path); -} - -// Fetch XML docs if missing or stale (best-effort, independent of JSON cache) -if (needs_full_rebuild or !try cache.xmlDocsArePopulated(allocator, cache_path)) { - fetchXmlDocs(allocator, cache_path); -} - -if (needs_full_rebuild) { - const json_path = try cache.getJsonCachePathInDir(allocator, cache_path); - defer allocator.free(json_path); - - const json_file = try std.fs.openFileAbsolute(json_path, .{}); - defer json_file.close(); - - var db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), json_file); - - // Merge XML data into db before generating markdown cache - // arena.allocator() for strings that live in the DB, allocator for temporaries - mergeXmlDocs(arena.allocator(), allocator, &db, cache_path); - - try cache.generateMarkdownCache(allocator, db, cache_path); -} -``` - -- [ ] **Step 4: Implement fetchXmlDocs helper** - -Add to `src/root.zig`: - -```zig -fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { - const xml_dir = cache.getXmlDocsDirInCache(allocator, cache_path) catch return; - defer allocator.free(xml_dir); - - cache.ensureDirectoryExists(xml_dir) catch return; - - const version = source_fetch.getGodotVersion(allocator) orelse return; - - var url_buf: [256]u8 = undefined; - const url = source_fetch.buildTarballUrl(&url_buf, version) orelse return; - - source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { - // Try hash-based fallback URL - if (version.hash) |hash| { - var hash_url_buf: [256]u8 = undefined; - const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse return; - source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { - std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); - return; - }; - } else { - std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); - return; - } - }; - - var version_buf: [64]u8 = undefined; - const version_str = version.formatVersion(&version_buf) orelse return; - - source_fetch.writeCompleteMarker(allocator, xml_dir, version_str) catch return; -} -``` - -- [ ] **Step 5: Implement mergeXmlDocs helper (stub for now)** - -Add to `src/root.zig`: - -```zig -fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { - _ = arena_allocator; - _ = tmp_allocator; - _ = db; - _ = cache_path; - // TODO: implement in Task 8 -} -``` - -- [ ] **Step 6: Run tests** - -```bash -zig build test -``` - -Expected: All pass. Network-dependent code is only called in the actual cache population path, not in tests. - -- [ ] **Step 7: Commit** - -```bash -git add src/cache.zig src/root.zig -git commit -m "feat: integrate XML doc fetch into cache population flow" -``` - ---- - -### Task 8: Merge XML Data into DocDatabase - -**Files:** -- Modify: `src/root.zig` - -- [ ] **Step 1: Implement mergeXmlDocs** - -Replace the stub in `src/root.zig`. - -**IMPORTANT memory ownership:** `parseClassDoc` allocates strings with the provided allocator. Since these strings are stored in the `DocDatabase` (which uses an arena allocator that outlives this function), pass the arena allocator to `parseClassDoc` so the strings live as long as the DB. Do NOT free the parsed content -- the arena owns it. - -```zig -/// Merges XML documentation into the DocDatabase. -/// Uses arena_allocator for all allocations so strings live as long as the DB. -/// Uses tmp_allocator for temporary allocations (paths, etc.) that are freed immediately. -fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { - const xml_dir = cache.getXmlDocsDirInCache(tmp_allocator, cache_path) catch return; - defer tmp_allocator.free(xml_dir); - - var dir = std.fs.openDirAbsolute(xml_dir, .{ .iterate = true }) catch return; - defer dir.close(); - - var iter = dir.iterate(); - while (iter.next() catch return) |entry| { - if (!std.mem.endsWith(u8, entry.name, ".xml")) continue; - - const class_name = entry.name[0 .. entry.name.len - 4]; // strip .xml - - // Read XML file content (temporary -- only needed for parsing) - const xml_path = std.fs.path.join(tmp_allocator, &.{ xml_dir, entry.name }) catch continue; - defer tmp_allocator.free(xml_path); - - const content = std.fs.openFileAbsolute(xml_path, .{}) catch continue; - defer content.close(); - const xml_bytes = content.readToEndAlloc(tmp_allocator, 2 * 1024 * 1024) catch continue; - defer tmp_allocator.free(xml_bytes); - - // Parse XML -- allocate strings with arena so they outlive this function - const class_doc = XmlDocParser.parseClassDoc(arena_allocator, xml_bytes) catch |err| { - std.log.warn("failed to parse XML doc for {s}: {}", .{ class_name, err }); - continue; - }; - // Do NOT call freeClassDoc -- arena owns the memory - - // Merge tutorials into existing entry - if (class_doc.tutorials) |tutorials| { - if (db.symbols.getPtr(class_name)) |db_entry| { - if (db_entry.tutorials == null and tutorials.len > 0) { - const db_tutorials = arena_allocator.alloc(DocDatabase.Tutorial, tutorials.len) catch continue; - for (tutorials, 0..) |t, i| { - db_tutorials[i] = .{ .title = t.title, .url = t.url }; - } - db_entry.tutorials = db_tutorials; - } - } - } - - // Fill missing class description - if (class_doc.description) |xml_desc| { - if (db.symbols.getPtr(class_name)) |db_entry| { - if (db_entry.description == null) { - db_entry.description = xml_desc; - } - } - } - - // Helper: merge member descriptions (methods, properties, signals) - const member_lists = [_]struct { members: ?[]XmlDocParser.MemberDoc }{ - .{ .members = class_doc.methods }, - .{ .members = class_doc.properties }, - .{ .members = class_doc.signals }, - }; - - for (member_lists) |list| { - const members = list.members orelse continue; - for (members) |member| { - const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; - defer tmp_allocator.free(member_key); - - if (db.symbols.getPtr(member_key)) |db_entry| { - if (db_entry.description == null) { - db_entry.description = member.description; - } - } - } - } - - // Add GlobalScope entries not present in JSON - if (db.symbols.get(class_name) == null) { - // This class exists in XML but not in JSON -- add it - const key = std.fmt.allocPrint(arena_allocator, "{s}", .{class_name}) catch continue; - db.symbols.put(arena_allocator, key, .{ - .key = key, - .name = key, - .kind = .class, - .description = class_doc.description, - .brief_description = class_doc.brief_description, - }) catch continue; - } - } -} -``` - -- [ ] **Step 2: Write a test for mergeXmlDocs** - -```zig -test "mergeXmlDocs fills missing descriptions from XML" { - const allocator = std.testing.allocator; - - // Use an arena for DB-lifetime allocations (simulates the real flow) - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const cache_dir = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(cache_dir); - - // Create xml_docs dir with a test XML file - const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml_docs", .{cache_dir}); - defer allocator.free(xml_dir); - try std.fs.makeDirAbsolute(xml_dir); - - const xml_path = try std.fmt.allocPrint(allocator, "{s}/TestClass.xml", .{xml_dir}); - defer allocator.free(xml_path); - - const xml_content = - \\ - \\ - \\ A test. - \\ Full description from XML. - \\ - \\ https://example.com - \\ - \\ - ; - try std.fs.cwd().writeFile(.{ .sub_path = xml_path, .data = xml_content }); - - // Create a DB with TestClass but no description - var db = DocDatabase{ .symbols = .empty }; - defer db.symbols.deinit(allocator); - - try db.symbols.put(allocator, "TestClass", .{ - .key = "TestClass", - .name = "TestClass", - .kind = .class, - .description = null, // Missing - should be filled from XML - }); - - // arena.allocator() for DB-lifetime strings, allocator for temporaries - mergeXmlDocs(arena.allocator(), allocator, &db, cache_dir); - - const entry = db.symbols.get("TestClass").?; - try std.testing.expectEqualStrings("Full description from XML.", entry.description.?); - try std.testing.expect(entry.tutorials != null); -} -``` - -- [ ] **Step 3: Run tests** - -```bash -zig build test -``` - -- [ ] **Step 4: Commit** - -```bash -git add src/root.zig -git commit -m "feat: merge XML documentation data into DocDatabase entries" -``` - ---- - -### Task 9: Update clearCache and End-to-End Verification - -**Files:** -- Modify: `src/cache.zig` - -- [ ] **Step 1: Verify clearCache already handles xml_docs** - -`clearCache` deletes the entire cache directory tree (`std.fs.deleteTreeAbsolute`), so `xml_docs/` is already covered. No change needed. - -- [ ] **Step 2: Manual integration test** - -```bash -# Build -zig build - -# Clear existing cache -zig-out/bin/gdoc --clear-cache - -# Look up a class (triggers JSON + XML fetch) -zig-out/bin/gdoc Node2D - -# Verify tutorials section appears -zig-out/bin/gdoc Node2D | grep -i tutorial - -# Look up a member -zig-out/bin/gdoc Node2D.position -``` - -Expected: Node2D output includes a Tutorials section with links. Properties show descriptions that may have been missing before. - -- [ ] **Step 3: Test with --godot-extension-api (should skip XML)** - -```bash -zig-out/bin/gdoc --godot-extension-api extension_api.json Node2D -``` - -Expected: Works as before, no tutorials section (XML not used in this path). - -- [ ] **Step 4: Run full test suite** - -```bash -zig build test -``` - -Expected: All tests pass. - -- [ ] **Step 5: Commit any test adjustments** - -```bash -git add src/ snapshots/ -git commit -m "test: verify end-to-end XML doc supplementation" -``` - ---- - -### Task 10: Update Snapshots - -**Files:** -- Modify: `snapshots/*.md` (as needed) - -- [ ] **Step 1: Regenerate snapshots if format changed** - -If the tutorials section changed the output format for any existing snapshot tests, update the snapshots: - -```bash -zig build test 2>&1 | grep -A5 "snapshot" -``` - -If snapshot diffs exist, review them and update: - -```bash -# Review the diffs -git diff snapshots/ - -# If changes are expected (new Tutorials section), stage them -git add snapshots/ -git commit -m "test: update snapshots for tutorials section" -``` - -- [ ] **Step 2: Final verification** - -```bash -zig build test -``` - -Expected: All green. diff --git a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md b/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md deleted file mode 100644 index a09fcd2..0000000 --- a/docs/superpowers/specs/2026-03-21-xml-doc-supplementation-design.md +++ /dev/null @@ -1,127 +0,0 @@ -# XML Documentation Supplementation - -## Problem - -Godot's `--dump-extension-api-with-docs` JSON export does not contain all documentation. Tutorials, some GlobalScope entries, and other doc fields are only available in the XML documentation files within the Godot source tree (`doc/classes/*.xml` and `modules/*/doc_classes/*.xml`). - -## Solution - -Automatically fetch and parse Godot's XML documentation from the source tree, using it to supplement the existing JSON data with missing fields. - -## Design - -### Source Acquisition - -1. Run `godot --version` to get the version string (e.g., `4.6.1.stable.official.14d19694e`). -2. Parse the version number and commit hash. The version number (`4.6.1`) is used to construct a tag-based tarball URL; the commit hash is a fallback. -3. Download the source tarball from `https://github.com/godotengine/godot/archive/refs/tags/{version}-stable.tar.gz` (falling back to `https://github.com/godotengine/godot/archive/{hash}.tar.gz` if the tag URL fails). -4. Stream the tarball through gzip decompression and tar extraction, filtering for: - - `*/doc/classes/*.xml` (core class docs) - - `*/modules/*/doc_classes/*.xml` (module class docs, e.g., GDScript, WebSocket) -5. Write matching XML files to `~/.cache/gdoc/xml_docs/`. -6. Write a `.complete` marker file with the version string after successful extraction. -7. The tarball is never written to disk -- streamed directly from HTTP through decompression and extraction. - -**Godot must be installed** for XML supplementation. If `godot --version` fails, XML docs are skipped silently. - -This runs automatically on first use alongside the existing JSON generation. `--clear-cache` clears XML docs too. - -### XML Parsing - -**Dependency**: `ianprime0509/zig-xml` -- a pull/streaming XML parser targeting Zig 0.15.1, with W3C conformance testing and standard `build.zig.zon` integration. - -**New module**: `src/XmlDocParser.zig` -- parses a single Godot XML class doc file and returns supplemental data. - -Godot XML doc structure: - -```xml - - A 2D game object. - ... - - $DOCS_URL/tutorials/2d/custom_drawing_in_2d.html - - - - ... - - - -``` - -**`$DOCS_URL` expansion**: Replace `$DOCS_URL` with `https://docs.godotengine.org/en/stable` when rendering tutorial links. - -### Merge Strategy - -XML data supplements JSON data during markdown cache generation. When generating cached markdown for a symbol, the XML file for that class is parsed and merged before writing to disk. - -Merge rules: -- **Tutorials**: New field on `Entry` as `?[]Tutorial` where `Tutorial = struct { title: []const u8, url: []const u8 }`. Rendered as a "Tutorials" section in output. -- **Missing descriptions**: If a JSON entry has no description but the XML does, use the XML description. -- **GlobalScope entries**: XML docs for classes/entries not present in the JSON are added as new `Entry` values to the database. - -When using `--godot-extension-api` (custom JSON path), XML supplementation does not apply. - -### Tar Extraction - -Uses Zig 0.15 stdlib -- no external dependency needed: -- `std.http.Client` for HTTP download -- `std.compress.flate` with gzip container mode for decompression -- `std.tar` for streaming extraction - -The pipeline streams download -> decompress -> extract without writing the full tarball to disk. The full tarball is ~50-80 MB compressed; only the XML files (~5 MB) are written to disk. - -### Cache Layout - -``` -~/.cache/gdoc/ -├── extension_api.json # Existing JSON dump -├── xml_docs/ # New: extracted XML files -│ ├── .complete # Marker file with version string -│ ├── Node2D.xml -│ ├── @GlobalScope.xml -│ └── ... -├── Node2D/ -│ └── index.md # Existing markdown cache -└── ... -``` - -**Staleness check**: On startup, compare the version in `xml_docs/.complete` against the current `godot --version`. If they differ, re-fetch XML docs. Presence of `.complete` (not just the directory) is the sentinel for a successful fetch. - -### Error Handling - -- **Version parsing failure** (unexpected format, no hash): Skip XML supplementation, proceed with JSON-only display. Log a warning. -- **Download failure** (network error, 404, rate limit): Skip XML supplementation, proceed with JSON-only display. Log a warning. -- **Partial download** (interrupted stream): No `.complete` marker is written, so next run will retry. -- **Malformed XML**: Skip that individual XML file, proceed with other files. Log which file failed. -- **Disk space**: Rely on OS write errors propagating; ~5 MB of XML is unlikely to be a concern. - -In all error cases, gdoc degrades gracefully -- XML supplementation is best-effort, and the tool remains fully functional with JSON-only data. - -## Changes - -### New dependency - -- `zig-xml` (`ianprime0509/zig-xml`) -- XML pull parser, 0BSD license - -### New files - -- `src/XmlDocParser.zig` -- Parses Godot XML doc files, returns supplemental data (tutorials, descriptions, GlobalScope entries) -- `src/source_fetch.zig` -- Version parsing, tarball download, streaming extraction of XML docs - -### Modified files - -- `build.zig.zon` -- Add zig-xml dependency -- `build.zig` -- Wire zig-xml into modules -- `src/DocDatabase.zig` -- Add `tutorials` field to `Entry`, possibly new `EntryKind` values for GlobalScope items -- `src/root.zig` -- Merge XML data during cache generation, trigger XML fetch in cache population flow -- `src/cache.zig` -- Extend cache population to include XML fetch, update sentinel/staleness check - -### No breaking changes - -Existing CLI interface unchanged. `--clear-cache` clears everything including XML docs. - -### Known limitations - -- XML docs total ~800+ files across `doc/classes/` and `modules/*/doc_classes/`, consuming ~5 MB on disk. -- Full tarball must be streamed even though only XML files are extracted (tar is sequential). From f6e19ce4a2e2d796156c9c938f4493f0e863d01f Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 16:35:03 +1100 Subject: [PATCH 16/18] feat: add terminal spinner during XML doc download --- src/Spinner.zig | 41 +++++++++++++++++++++++++++++++++++++++++ src/root.zig | 5 +++++ 2 files changed, 46 insertions(+) create mode 100644 src/Spinner.zig diff --git a/src/Spinner.zig b/src/Spinner.zig new file mode 100644 index 0000000..195e482 --- /dev/null +++ b/src/Spinner.zig @@ -0,0 +1,41 @@ +const std = @import("std"); + +const Spinner = @This(); + +const frames = [_][]const u8{ "⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏" }; +const delay_ns = 80 * std.time.ns_per_ms; + +message: []const u8, +thread: ?std.Thread = null, +stop: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + +pub fn start(self: *Spinner) void { + self.stop.store(false, .release); + self.thread = std.Thread.spawn(.{}, run, .{self}) catch return; +} + +pub fn finish(self: *Spinner) void { + self.stop.store(true, .release); + if (self.thread) |t| { + t.join(); + self.thread = null; + } + const stderr = std.fs.File.stderr(); + var buf: [256]u8 = undefined; + var w = stderr.writer(&buf); + w.interface.writeAll("\r\x1b[2K") catch {}; + w.interface.flush() catch {}; +} + +fn run(self: *Spinner) void { + const stderr = std.fs.File.stderr(); + var buf: [256]u8 = undefined; + var w = stderr.writer(&buf); + var i: usize = 0; + while (!self.stop.load(.acquire)) { + w.interface.print("\r{s} {s}", .{ frames[i], self.message }) catch return; + w.interface.flush() catch return; + i = (i + 1) % frames.len; + std.Thread.sleep(delay_ns); + } +} diff --git a/src/root.zig b/src/root.zig index 91851d4..d62bf5c 100644 --- a/src/root.zig +++ b/src/root.zig @@ -430,6 +430,10 @@ fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { var url_buf: [256]u8 = undefined; const url = source_fetch.buildTarballUrl(&url_buf, version) orelse return; + var spinner = Spinner{ .message = "Downloading XML docs..." }; + spinner.start(); + defer spinner.finish(); + source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { // Try hash-based fallback URL if (version.hash) |hash| { @@ -574,6 +578,7 @@ pub const XmlDocParser = @import("XmlDocParser.zig"); pub const cache = @import("cache.zig"); pub const api = @import("api.zig"); pub const source_fetch = @import("source_fetch.zig"); +const Spinner = @import("Spinner.zig"); const zigdown = @import("zigdown"); const ConsoleRenderer = zigdown.ConsoleRenderer; From a28e3cea2697c4ecbe0024296e599e91464ea924 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 16:37:18 +1100 Subject: [PATCH 17/18] feat: add spinner for cache building phase --- src/Spinner.zig | 9 +++++---- src/root.zig | 5 ++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/Spinner.zig b/src/Spinner.zig index 195e482..063b5fd 100644 --- a/src/Spinner.zig +++ b/src/Spinner.zig @@ -15,11 +15,12 @@ pub fn start(self: *Spinner) void { } pub fn finish(self: *Spinner) void { + if (self.thread == null) return; + self.stop.store(true, .release); - if (self.thread) |t| { - t.join(); - self.thread = null; - } + self.thread.?.join(); + self.thread = null; + const stderr = std.fs.File.stderr(); var buf: [256]u8 = undefined; var w = stderr.writer(&buf); diff --git a/src/root.zig b/src/root.zig index d62bf5c..27e47ab 100644 --- a/src/root.zig +++ b/src/root.zig @@ -46,6 +46,10 @@ pub fn markdownForSymbol(allocator: Allocator, symbol: []const u8, api_json_path fetchXmlDocs(allocator, cache_path); } + var spinner: Spinner = .{ .message = "Building documentation cache..." }; + if (!xmlSupplementationDisabled()) spinner.start(); + defer spinner.finish(); + const json_path = try cache.getJsonCachePathInDir(allocator, cache_path); defer allocator.free(json_path); @@ -54,7 +58,6 @@ pub fn markdownForSymbol(allocator: Allocator, symbol: []const u8, api_json_path var db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), json_file); - // Merge XML data into db before generating markdown cache mergeXmlDocs(arena.allocator(), allocator, &db, cache_path); try cache.generateMarkdownCache(allocator, db, cache_path); From 5b6e1dd74efa1dd8628ef24ca474410e3a3c26cc Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 16:48:21 +1100 Subject: [PATCH 18/18] fix: use cross-platform env var check with fixed buffer allocator --- src/root.zig | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/root.zig b/src/root.zig index 27e47ab..ab8d645 100644 --- a/src/root.zig +++ b/src/root.zig @@ -416,7 +416,9 @@ test "markdownForSymbol generates markdown cache when cache is empty" { } fn xmlSupplementationDisabled() bool { - return std.posix.getenv("GDOC_NO_XML") != null; + var buf: [256]u8 = undefined; + var fba = std.heap.FixedBufferAllocator.init(&buf); + return std.process.hasEnvVar(fba.allocator(), "GDOC_NO_XML") catch false; } fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void {