From e7e57876c7d9df32d345293db8f857c7121e1da5 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 17:38:20 +1100 Subject: [PATCH 01/19] docs: add design spec for replacing JSON with XML as primary doc source --- ...26-03-21-xml-docs-primary-source-design.md | 184 ++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md diff --git a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md new file mode 100644 index 0000000..79133fa --- /dev/null +++ b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md @@ -0,0 +1,184 @@ +# Replace JSON Docs with XML as Primary Source + +**Date:** 2026-03-21 +**Status:** Proposed + +## Summary + +Replace the JSON extension API (`extension_api.json` from `godot --dump-extension-api`) with Godot's XML class documentation as the sole data source for gdoc. The XML docs are the upstream source from which the JSON is generated, and they contain significantly richer information: tutorials, code examples, constructors, operators, property defaults, and method qualifiers. + +## Motivation + +The current architecture uses JSON as the primary source and XML as a supplement to fill gaps. This is backwards — the XML docs are the authoritative, hand-curated source, and the JSON is a machine-generated subset designed for GDExtension binding generators, not human documentation. JSON-only data (hashes, memory sizes, native structures) is irrelevant to a doc viewer. + +Key XML advantages over JSON: +- **Tutorials** with documentation links +- **Constructors** (e.g., `Vector2(x, y)`) +- **Operators** (e.g., `Vector2 * float`) +- **Property default values** (e.g., `default="Vector2(0, 0)"`) +- **Method qualifiers** (`virtual`, `const`, `static`) +- **Code examples** in `[codeblock]` sections +- **Richer descriptions** — XML is hand-written; JSON strips or abbreviates + +## Design + +### Data Model + +The `DocDatabase.Entry` struct expands to hold everything XML provides: + +``` +Entry { + key // "Node2D" or "Node2D.position" + name // "Node2D" or "position" + kind // class, builtin_class, method, property, signal, constant, + // enum_value, constructor, operator, utility_function + inherits // "CanvasItem" (classes only) + description // full BBCode description + brief_description + signature // ": Vector2", "(x: float, y: float) -> Vector2", etc. + qualifiers // "virtual const", "static" (methods only) + default_value // "Vector2(0, 0)" (properties only) + members // indices of child entries + tutorials // [{title, url}] +} +``` + +New `EntryKind` values added: `constructor`, `operator`. + +The `DocDatabase` remains a flat symbol table keyed by dotted paths (`"Vector2.abs"`, `"@GlobalScope.sin"`). Utility functions from `@GlobalScope.xml` and `@GDScript.xml` are registered both under their qualified name and as top-level entries for convenience (e.g., both `"@GlobalScope.abs"` and `"abs"`). + +### XmlDocParser Expansion + +The parser currently handles: `brief_description`, `description`, `tutorials`, `methods`, `members` (properties), `signals`, `constants`. + +New parsing: + +- **``** — same structure as methods (name, params, return type, description) +- **``** — name like `operator +`, params, return type, description +- **`inherits` attribute** on `` — already parsed, now stored in DocDatabase +- **Method `qualifiers` attribute** — `"virtual const"`, `"static"`, etc. +- **Property `default` attribute** — `default="Vector2(0, 0)"` +- **Method/constructor/operator params** — `name`, `type`, `default` per param +- **Return types** — `` + +Expanded structs: + +``` +ClassDoc { + name, inherits, brief_description, description, tutorials + methods, properties, signals, constants // existing + constructors // new + operators // new +} + +MemberDoc { + name, description // existing + qualifiers // new + default_value // new + return_type // new + params: []ParamDoc // new +} + +ParamDoc { + name, type, default_value +} +``` + +### Removals + +- **`api.zig`** — entire file (runs `godot --dump-extension-api`) +- **`DocDatabase.loadFromJsonFileLeaky`** — JSON parsing logic in `DocDatabase.zig` +- **`--api-json` CLI flag** — and `api_json_path` parameter threaded through `markdownForSymbol` / `formatAndDisplay` +- **`mergeXmlDocs`** in `root.zig` — no more supplementation +- **`fetchXmlDocs`** as a separate supplementation step — XML fetch becomes the main path +- **`api.generateApiJsonIfNotExists`** call in cache flow +- **`getJsonCachePathInDir`** and JSON-specific cache helpers in `cache.zig` +- **JSON cache file** (`extension_api.json`) from cache directory + +**Kept:** +- **`bbcodez`** — still needed for BBCode→Markdown conversion in descriptions +- **`source_fetch.zig`** — still fetches XML docs from GitHub tarballs +- **`XmlDocParser.zig`** — expanded +- **`cache.zig`** — adapted to build from XML instead of JSON + +### New Cache Flow + +Current: `godot --dump-extension-api` → JSON → merge XML → generate markdown cache + +New: + +``` +1. Check cache populated (markdown sentinel files exist) +2. If not: + a. godot --version → get version string + b. Download source tarball from GitHub → extract XML to cache/xml_docs/ + c. Parse all XML files → build DocDatabase (new: DocDatabase.loadFromXmlDir) + d. Generate markdown cache from DocDatabase +3. Read symbol markdown from cache +``` + +The `godot` binary is only used for `--version` (to match the tarball URL), never for `--dump-extension-api`. + +`cache.cacheIsPopulated` checks for the `xml_docs/.complete` marker plus sentinel markdown files. + +### Markdown Output Format + +With the expanded data model, generated markdown per class becomes richer: + +```markdown +# Vector2 + +*Inherits: none* + +A 2D vector using floating-point coordinates. + +## Description + +A 2-element structure that can be used to represent 2D coordinates... + +## Tutorials + +- [Math documentation index](https://docs.godotengine.org/en/stable/tutorials/math/index.html) +- [Vector math](https://docs.godotengine.org/en/stable/tutorials/math/vector_math.html) + +## Properties + +- **x: float** = `0.0` — The vector's X component. +- **y: float** = `0.0` — The vector's Y component. + +## Constructors + +- **Vector2()** — Constructs a default-initialized Vector2... +- **Vector2(from: Vector2i)** — Constructs a new Vector2 from Vector2i. +- **Vector2(x: float, y: float)** — Constructs a new Vector2... + +## Methods + +- **abs() -> Vector2** `const` — Returns a new vector with all components in absolute values. +- **angle() -> float** `const` — Returns this vector's angle... + +## Operators + +- **Vector2 * float -> Vector2** — Multiplies each component... +- **Vector2 + Vector2 -> Vector2** — Adds each component... + +## Constants + +- **ZERO = Vector2(0, 0)** — Zero vector... +- **ONE = Vector2(1, 1)** — One vector... +``` + +Key additions vs current output: +- Inheritance line +- Property default values +- Constructors section +- Full method signatures with params, return types, and qualifiers +- Operators section + +## Testing + +- Existing snapshot tests updated to reflect new markdown format +- New snapshots for classes with constructors/operators (e.g., Vector2) +- Unit tests for expanded XmlDocParser (constructors, operators, qualifiers, defaults, params) +- Integration test: XML dir → DocDatabase → markdown output roundtrip +- Tests that previously used inline JSON fixtures rewritten to use inline XML fixtures From 7caedaa1703385fe9c2f13548bb17d83dae6e759 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 17:40:53 +1100 Subject: [PATCH 02/19] docs: address spec review feedback - Fix CLI flag name (--godot-extension-api, not --api-json) - Add DocDatabase.loadFromXmlDir specification - Add builtin class detection strategy (hardcoded Variant type list) - Add enum extraction from XML constants with enum attribute - Specify godot-not-found error behavior - Specify cache sentinel (Object/index.md) - Note --no-xml / GDOC_NO_XML removal - Clarify @GlobalScope vs @GDScript collision handling - Fix: operator already exists in EntryKind, only constructor is new - Clarify test migration: JSON fixture tests deleted, not rewritten --- ...26-03-21-xml-docs-primary-source-design.md | 38 +++++++++++++++---- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md index 79133fa..986fc25 100644 --- a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md +++ b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md @@ -43,9 +43,15 @@ Entry { } ``` -New `EntryKind` values added: `constructor`, `operator`. +New fields on `Entry`: `inherits`, `qualifiers`, `default_value` (the existing struct has `key`, `name`, `kind`, `description`, `brief_description`, `signature`, `members`, `tutorials`). -The `DocDatabase` remains a flat symbol table keyed by dotted paths (`"Vector2.abs"`, `"@GlobalScope.sin"`). Utility functions from `@GlobalScope.xml` and `@GDScript.xml` are registered both under their qualified name and as top-level entries for convenience (e.g., both `"@GlobalScope.abs"` and `"abs"`). +New `EntryKind` value: `constructor` (added to existing set which already includes `operator`). + +The `DocDatabase` remains a flat symbol table keyed by dotted paths (`"Vector2.abs"`, `"@GlobalScope.sin"`). Utility functions from `@GlobalScope.xml` and `@GDScript.xml` are registered both under their qualified name and as top-level entries for convenience (e.g., both `"@GlobalScope.abs"` and `"abs"`). If both files define the same function name, `@GlobalScope` wins (it is the canonical source; `@GDScript` contains GDScript-specific helpers like `preload`). + +**Builtin class detection:** XML docs don't distinguish builtin classes from regular classes. Builtins are identified by a hardcoded list matching the Variant types (Vector2, Vector3, Color, AABB, Basis, Transform2D, Transform3D, Projection, Quaternion, Plane, Rect2, Rect2i, Vector2i, Vector3i, Vector4, Vector4i, RID, Callable, Signal, Dictionary, Array, NodePath, StringName, String, PackedByteArray, PackedInt32Array, PackedInt64Array, PackedFloat32Array, PackedFloat64Array, PackedStringArray, PackedVector2Array, PackedVector3Array, PackedColorArray, PackedVector4Array, int, float, bool, Nil). This list is stable across Godot versions. + +**Enum extraction from XML:** XML stores enums within `` elements using an `enum` attribute (e.g., ``). Constants with the same `enum` attribute are grouped into enum entries with kind `enum_value`, keyed as `"ClassName.EnumName.VALUE_NAME"`. ### XmlDocParser Expansion @@ -87,13 +93,15 @@ ParamDoc { ### Removals - **`api.zig`** — entire file (runs `godot --dump-extension-api`) -- **`DocDatabase.loadFromJsonFileLeaky`** — JSON parsing logic in `DocDatabase.zig` -- **`--api-json` CLI flag** — and `api_json_path` parameter threaded through `markdownForSymbol` / `formatAndDisplay` +- **`DocDatabase.loadFromJsonFileLeaky`** — all JSON parsing logic in `DocDatabase.zig` +- **`--godot-extension-api` CLI flag** — and the `api_json_path` parameter threaded through `markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` - **`mergeXmlDocs`** in `root.zig` — no more supplementation - **`fetchXmlDocs`** as a separate supplementation step — XML fetch becomes the main path - **`api.generateApiJsonIfNotExists`** call in cache flow - **`getJsonCachePathInDir`** and JSON-specific cache helpers in `cache.zig` - **JSON cache file** (`extension_api.json`) from cache directory +- **`--no-xml` / `GDOC_NO_XML`** — the `no_xml` field on `Config` becomes meaningless since XML is the sole source +- **Tests using JSON fixtures** — tests in `root.zig` that create inline JSON (e.g., `markdownForSymbol returns ApiFileNotFound`) are deleted, not rewritten; the JSON path no longer exists **Kept:** - **`bbcodez`** — still needed for BBCode→Markdown conversion in descriptions @@ -117,9 +125,25 @@ New: 3. Read symbol markdown from cache ``` -The `godot` binary is only used for `--version` (to match the tarball URL), never for `--dump-extension-api`. +The `godot` binary is only used for `--version` (to match the tarball URL), never for `--dump-extension-api`. If `godot` is not on PATH, the tool errors with a clear message: "godot not found. Install Godot and ensure it's on your PATH." This matches the current behavior — `godot` has always been required for JSON export too. + +`cache.cacheIsPopulated` checks for the `xml_docs/.complete` marker plus the existence of at least one generated markdown directory (e.g., `Object/index.md` as the sentinel — `Object` is the root of the class hierarchy and is always present). + +### DocDatabase.loadFromXmlDir + +New entry point replacing `loadFromJsonFileLeaky`. Behavior: -`cache.cacheIsPopulated` checks for the `xml_docs/.complete` marker plus sentinel markdown files. +1. Open `xml_dir` and iterate all `.xml` files +2. For each file, call `XmlDocParser.parseClassDoc` (using an arena allocator so all strings outlive the function) +3. Create a class-level `Entry` with kind determined by the builtin list (see Data Model above) +4. For each member category (methods, properties, signals, constants, constructors, operators), create child entries keyed as `"ClassName.member_name"` +5. Build `signature` strings from parsed params and return types: + - Methods: `(param: Type, param2: Type = default) -> ReturnType` + - Properties: `: Type` (with `= default` if present) + - Constructors: `(param: Type, ...)` (name is always the class name) + - Operators: `OperatorName(other: Type) -> ReturnType` +6. For `@GlobalScope.xml` and `@GDScript.xml`, register utility functions as both qualified (`@GlobalScope.sin`) and top-level (`sin`) entries +7. Populate `members` index arrays on class entries pointing to their children ### Markdown Output Format @@ -181,4 +205,4 @@ Key additions vs current output: - New snapshots for classes with constructors/operators (e.g., Vector2) - Unit tests for expanded XmlDocParser (constructors, operators, qualifiers, defaults, params) - Integration test: XML dir → DocDatabase → markdown output roundtrip -- Tests that previously used inline JSON fixtures rewritten to use inline XML fixtures +- Tests using inline JSON fixtures are deleted (JSON path no longer exists); new tests use inline XML strings From 6db21913b4cdcec17882527b8e8711c8d2df9699 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 17:43:16 +1100 Subject: [PATCH 03/19] docs: address round 2 spec review feedback - Fix tutorials listed as existing Entry field (it's new) - Rename utility_function to global_function (matches codebase) - Note Config.testing update for no_xml removal - Add cli/root.zig to removals list - Add markdownForSymbol signature change section - Make MemberDoc fields explicitly nullable with usage notes - Add replacement error-path test coverage --- ...26-03-21-xml-docs-primary-source-design.md | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md index 986fc25..2dbe639 100644 --- a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md +++ b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md @@ -31,7 +31,7 @@ Entry { key // "Node2D" or "Node2D.position" name // "Node2D" or "position" kind // class, builtin_class, method, property, signal, constant, - // enum_value, constructor, operator, utility_function + // enum_value, constructor, operator, global_function inherits // "CanvasItem" (classes only) description // full BBCode description brief_description @@ -43,7 +43,7 @@ Entry { } ``` -New fields on `Entry`: `inherits`, `qualifiers`, `default_value` (the existing struct has `key`, `name`, `kind`, `description`, `brief_description`, `signature`, `members`, `tutorials`). +New fields on `Entry`: `inherits`, `qualifiers`, `default_value`, `tutorials` (the existing struct has `key`, `name`, `kind`, `description`, `brief_description`, `signature`, `members`, `parent_index`; the `Tutorial` type exists at the `DocDatabase` level but is not yet wired into `Entry`). New `EntryKind` value: `constructor` (added to existing set which already includes `operator`). @@ -79,10 +79,10 @@ ClassDoc { MemberDoc { name, description // existing - qualifiers // new - default_value // new - return_type // new - params: []ParamDoc // new + qualifiers // new (nullable; methods/constructors/operators) + default_value // new (nullable; properties/constants) + return_type // new (nullable; not on constants) + params: ?[]ParamDoc // new (nullable; not on constants) } ParamDoc { @@ -100,7 +100,8 @@ ParamDoc { - **`api.generateApiJsonIfNotExists`** call in cache flow - **`getJsonCachePathInDir`** and JSON-specific cache helpers in `cache.zig` - **JSON cache file** (`extension_api.json`) from cache directory -- **`--no-xml` / `GDOC_NO_XML`** — the `no_xml` field on `Config` becomes meaningless since XML is the sole source +- **`--no-xml` / `GDOC_NO_XML`** — the `no_xml` field on `Config` becomes meaningless since XML is the sole source; `Config.testing` updated to remove this field +- **`--godot-extension-api` flag definition in `cli/root.zig`** — flag declaration, reading, and threading through to `formatAndDisplay` - **Tests using JSON fixtures** — tests in `root.zig` that create inline JSON (e.g., `markdownForSymbol returns ApiFileNotFound`) are deleted, not rewritten; the JSON path no longer exists **Kept:** @@ -109,6 +110,10 @@ ParamDoc { - **`XmlDocParser.zig`** — expanded - **`cache.zig`** — adapted to build from XML instead of JSON +### Function Signature Changes + +`markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` all lose the `api_json_path: ?[]const u8` parameter. The JSON file path codepath (direct load from a user-provided file) is removed entirely. These functions always use the cache flow — there is no "bypass cache" mode. + ### New Cache Flow Current: `godot --dump-extension-api` → JSON → merge XML → generate markdown cache @@ -206,3 +211,4 @@ Key additions vs current output: - Unit tests for expanded XmlDocParser (constructors, operators, qualifiers, defaults, params) - Integration test: XML dir → DocDatabase → markdown output roundtrip - Tests using inline JSON fixtures are deleted (JSON path no longer exists); new tests use inline XML strings +- Replacement error-path tests: XML parse failure (malformed XML), symbol not found in XML-built database, cache directory missing/unwritable From b5f47c65104deeae0adb24882bf695fbb983b0ed Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 17:44:11 +1100 Subject: [PATCH 04/19] docs: drop builtin_class distinction from spec All XML elements become EntryKind.class uniformly. Constructors and operators render via their own entry kinds. --- .../specs/2026-03-21-xml-docs-primary-source-design.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md index 2dbe639..b678ed7 100644 --- a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md +++ b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md @@ -30,7 +30,7 @@ The `DocDatabase.Entry` struct expands to hold everything XML provides: Entry { key // "Node2D" or "Node2D.position" name // "Node2D" or "position" - kind // class, builtin_class, method, property, signal, constant, + kind // class, method, property, signal, constant, // enum_value, constructor, operator, global_function inherits // "CanvasItem" (classes only) description // full BBCode description @@ -49,7 +49,7 @@ New `EntryKind` value: `constructor` (added to existing set which already includ The `DocDatabase` remains a flat symbol table keyed by dotted paths (`"Vector2.abs"`, `"@GlobalScope.sin"`). Utility functions from `@GlobalScope.xml` and `@GDScript.xml` are registered both under their qualified name and as top-level entries for convenience (e.g., both `"@GlobalScope.abs"` and `"abs"`). If both files define the same function name, `@GlobalScope` wins (it is the canonical source; `@GDScript` contains GDScript-specific helpers like `preload`). -**Builtin class detection:** XML docs don't distinguish builtin classes from regular classes. Builtins are identified by a hardcoded list matching the Variant types (Vector2, Vector3, Color, AABB, Basis, Transform2D, Transform3D, Projection, Quaternion, Plane, Rect2, Rect2i, Vector2i, Vector3i, Vector4, Vector4i, RID, Callable, Signal, Dictionary, Array, NodePath, StringName, String, PackedByteArray, PackedInt32Array, PackedInt64Array, PackedFloat32Array, PackedFloat64Array, PackedStringArray, PackedVector2Array, PackedVector3Array, PackedColorArray, PackedVector4Array, int, float, bool, Nil). This list is stable across Godot versions. +**No builtin class distinction:** The `builtin_class` EntryKind is removed. All XML `` elements become `EntryKind.class`. Builtins (Vector2, int, etc.) and regular classes (Node, Sprite2D, etc.) render identically — constructors and operators are their own entry kinds and render regardless of parent class type. **Enum extraction from XML:** XML stores enums within `` elements using an `enum` attribute (e.g., ``). Constants with the same `enum` attribute are grouped into enum entries with kind `enum_value`, keyed as `"ClassName.EnumName.VALUE_NAME"`. @@ -140,7 +140,7 @@ New entry point replacing `loadFromJsonFileLeaky`. Behavior: 1. Open `xml_dir` and iterate all `.xml` files 2. For each file, call `XmlDocParser.parseClassDoc` (using an arena allocator so all strings outlive the function) -3. Create a class-level `Entry` with kind determined by the builtin list (see Data Model above) +3. Create a class-level `Entry` with kind `class` (all XML `` elements are treated uniformly) 4. For each member category (methods, properties, signals, constants, constructors, operators), create child entries keyed as `"ClassName.member_name"` 5. Build `signature` strings from parsed params and return types: - Methods: `(param: Type, param2: Type = default) -> ReturnType` From f5dac877883900ba20a897b9126fed721283d86b Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 17:48:44 +1100 Subject: [PATCH 05/19] docs: add implementation plan for XML-only doc source migration --- .../2026-03-21-xml-docs-primary-source.md | 1193 +++++++++++++++++ 1 file changed, 1193 insertions(+) create mode 100644 docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md diff --git a/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md b/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md new file mode 100644 index 0000000..59b3c10 --- /dev/null +++ b/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md @@ -0,0 +1,1193 @@ +# XML Docs as Primary Source — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Replace JSON extension API with XML class docs as the sole data source for gdoc. + +**Architecture:** Delete all JSON parsing code, expand XmlDocParser to handle constructors/operators/params/qualifiers/defaults, add `DocDatabase.loadFromXmlDir` to build the symbol table from XML files, update cache flow to skip JSON export, update markdown generation to render new fields. + +**Tech Stack:** Zig 0.15.1, zig-xml, bbcodez + +**Spec:** `docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md` + +--- + +## File Map + +**Modify:** +- `src/XmlDocParser.zig` — Expand `MemberDoc` with params/qualifiers/return_type/default_value, add `ParamDoc`, parse ``, ``, ``, ``, `qualifiers`/`default` attributes +- `src/DocDatabase.zig` — Remove all JSON parsing, remove `builtin_class` from `EntryKind`, add `constructor` to `EntryKind`, add `inherits`/`qualifiers`/`default_value` to `Entry`, add `loadFromXmlDir`, update `generateMarkdownForEntry` to render new fields (inheritance, constructors, operators, qualifiers, defaults) +- `src/root.zig` — Remove `mergeXmlDocs`, `fetchXmlDocs`, `api_json_path` parameter from all functions, simplify `markdownForSymbol` to XML-only cache flow +- `src/cache.zig` — Remove `getJsonCachePathInDir`, update `cacheIsPopulated` to check `Object/index.md` instead of JSON file +- `src/Config.zig` — Remove `no_xml` field, update `Config.testing` +- `src/cli/root.zig` — Remove `--godot-extension-api` flag, update error handling +- `build.zig` — Remove `bbcodez` from module imports (it's only used by JSON parsing in DocDatabase), keep `xml` + +**Delete:** +- `src/api.zig` — Entire file + +**Update:** +- `snapshots/*.md` — Updated to reflect new markdown format +- Tests throughout — JSON fixture tests deleted, XML fixture tests added + +--- + +### Task 1: Expand XmlDocParser with ParamDoc and new MemberDoc fields + +**Files:** +- Modify: `src/XmlDocParser.zig` + +- [ ] **Step 1: Write failing test for ParamDoc parsing on methods** + +Add test to `src/XmlDocParser.zig` using XML that includes `` and `` elements inside a ``: + +```zig +const test_xml_with_params = + \\ + \\ + \\ A 2D game object. + \\ Node2D is the base class for 2D. + \\ + \\ + \\ + \\ + \\ Returns the angle between the node and the point. + \\ + \\ + \\ + \\ + \\ + \\ Applies a local translation on the X axis. + \\ + \\ + \\ +; + +test "parses method params and return type" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_params); + defer freeClassDoc(allocator, doc); + + const methods = doc.methods.?; + try std.testing.expectEqual(2, methods.len); + + // First method: get_angle_to + try std.testing.expectEqualStrings("const", methods[0].qualifiers.?); + try std.testing.expectEqualStrings("float", methods[0].return_type.?); + const params0 = methods[0].params.?; + try std.testing.expectEqual(1, params0.len); + try std.testing.expectEqualStrings("point", params0[0].name); + try std.testing.expectEqualStrings("Vector2", params0[0].type); + try std.testing.expect(params0[0].default_value == null); + + // Second method: move_local_x with default param + const params1 = methods[1].params.?; + try std.testing.expectEqual(2, params1.len); + try std.testing.expectEqualStrings("false", params1[1].default_value.?); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `zig build test 2>&1 | grep "parses method params"` +Expected: Compilation error — `MemberDoc` has no field `qualifiers`, `return_type`, `params` + +- [ ] **Step 3: Add ParamDoc struct and expand MemberDoc** + +In `src/XmlDocParser.zig`, add `ParamDoc` and expand `MemberDoc`: + +```zig +pub const ParamDoc = struct { + name: []const u8, + type: []const u8, + default_value: ?[]const u8 = null, +}; + +pub const MemberDoc = struct { + name: []const u8, + description: ?[]const u8 = null, + qualifiers: ?[]const u8 = null, + default_value: ?[]const u8 = null, + return_type: ?[]const u8 = null, + params: ?[]ParamDoc = null, +}; +``` + +Update `parseClassDoc` to parse `qualifiers` attribute on ``, and parse `` and `` elements inside methods. Update `readNestedDescription` to also capture params and return type while walking the method element. + +Replace the `` handler with a call to a new `parseMethodElement` function that: +1. Reads the `name` and `qualifiers` attributes +2. Walks child elements collecting ``, ``, and `` +3. Returns a fully populated `MemberDoc` + +Update `freeClassDoc` to free the new fields on `MemberDoc` (qualifiers, default_value, return_type, params array and each param's fields). + +- [ ] **Step 4: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add src/XmlDocParser.zig +git commit -m "feat: parse method params, return type, and qualifiers from XML" +``` + +--- + +### Task 2: Parse property default values from XML + +**Files:** +- Modify: `src/XmlDocParser.zig` + +- [ ] **Step 1: Write failing test for property default values** + +```zig +test "parses property default value" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const props = doc.properties.?; + try std.testing.expectEqual(1, props.len); + try std.testing.expectEqualStrings("Vector2(0, 0)", props[0].default_value.?); + try std.testing.expectEqualStrings("Vector2", props[0].return_type.?); +} +``` + +Note: The existing `test_xml` already has `` — we just need to extract the `default` and `type` attributes. + +- [ ] **Step 2: Run test to verify it fails** + +Run: `zig build test 2>&1 | grep "parses property default"` +Expected: FAIL — `default_value` is null + +- [ ] **Step 3: Update member parsing to read default and type attributes** + +In `parseClassDoc`, update the `` handler to also read `default` and `type` attributes: + +```zig +} else if (std.mem.eql(u8, name, "member")) { + const member_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const member_type = try getAttributeAlloc(allocator, reader, "type"); + const member_default = try getAttributeAlloc(allocator, reader, "default"); + const desc = try readTextContent(allocator, reader); + try properties.append(allocator, .{ + .name = member_name, + .description = desc, + .return_type = member_type, + .default_value = member_default, + }); +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add src/XmlDocParser.zig +git commit -m "feat: parse property type and default value from XML" +``` + +--- + +### Task 3: Parse constructors and operators from XML + +**Files:** +- Modify: `src/XmlDocParser.zig` + +- [ ] **Step 1: Write failing test for constructors and operators** + +```zig +const test_xml_with_constructors_and_operators = + \\ + \\ + \\ A 2D vector. + \\ 2D vector type. + \\ + \\ + \\ + \\ Constructs a default Vector2. + \\ + \\ + \\ + \\ + \\ + \\ Constructs from x and y. + \\ + \\ + \\ + \\ + \\ + \\ + \\ Adds two vectors. + \\ + \\ + \\ +; + +test "parses constructors" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_constructors_and_operators); + defer freeClassDoc(allocator, doc); + + const ctors = doc.constructors.?; + try std.testing.expectEqual(2, ctors.len); + try std.testing.expectEqualStrings("Vector2", ctors[0].name); + try std.testing.expect(ctors[0].params == null); + try std.testing.expectEqual(2, ctors[1].params.?.len); +} + +test "parses operators" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_constructors_and_operators); + defer freeClassDoc(allocator, doc); + + const ops = doc.operators.?; + try std.testing.expectEqual(1, ops.len); + try std.testing.expectEqualStrings("operator +", ops[0].name); + try std.testing.expectEqualStrings("Vector2", ops[0].return_type.?); + try std.testing.expectEqual(1, ops[0].params.?.len); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `zig build test 2>&1 | grep "parses constructors"` +Expected: Compilation error — `ClassDoc` has no field `constructors` + +- [ ] **Step 3: Add constructors and operators to ClassDoc and parsing** + +Add fields to `ClassDoc`: +```zig +constructors: ?[]MemberDoc = null, +operators: ?[]MemberDoc = null, +``` + +Add array lists in `parseClassDoc`: +```zig +var constructors: std.ArrayListUnmanaged(MemberDoc) = .empty; +defer constructors.deinit(allocator); +var operators: std.ArrayListUnmanaged(MemberDoc) = .empty; +defer operators.deinit(allocator); +``` + +Add element handlers for `` and `` — reuse the same `parseMethodElement` function from Task 1 since they have identical XML structure (name, return, params, description). + +Update `freeClassDoc` to free constructors and operators. + +Set on doc: +```zig +doc.constructors = if (constructors.items.len > 0) try constructors.toOwnedSlice(allocator) else null; +doc.operators = if (operators.items.len > 0) try operators.toOwnedSlice(allocator) else null; +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add src/XmlDocParser.zig +git commit -m "feat: parse constructors and operators from XML" +``` + +--- + +### Task 4: Parse constants with enum attribute from XML + +**Files:** +- Modify: `src/XmlDocParser.zig` + +- [ ] **Step 1: Write failing test for enum attribute on constants** + +```zig +const test_xml_with_enums = + \\ + \\ + \\ Base class. + \\ Base node. + \\ + \\ Ready notification. + \\ Inherits process mode. + \\ Always process. + \\ + \\ +; + +test "parses constant value and enum attribute" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_enums); + defer freeClassDoc(allocator, doc); + + const consts = doc.constants.?; + try std.testing.expectEqual(3, consts.len); + + // Regular constant — no enum + try std.testing.expectEqualStrings("13", consts[0].default_value.?); + try std.testing.expect(consts[0].qualifiers == null); + + // Enum constant — enum name stored in qualifiers field + try std.testing.expectEqualStrings("0", consts[1].default_value.?); + try std.testing.expectEqualStrings("ProcessMode", consts[1].qualifiers.?); +} +``` + +Note: We reuse `qualifiers` on `MemberDoc` to store the `enum` attribute name for constants. This avoids adding yet another field just for this case. + +- [ ] **Step 2: Run test to verify it fails** + +Run: `zig build test 2>&1 | grep "parses constant value"` +Expected: FAIL — `default_value` is null on constants + +- [ ] **Step 3: Update constant parsing to read value and enum attributes** + +```zig +} else if (std.mem.eql(u8, name, "constant")) { + const constant_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const constant_value = try getAttributeAlloc(allocator, reader, "value"); + const constant_enum = try getAttributeAlloc(allocator, reader, "enum"); + const desc = try readTextContent(allocator, reader); + try constants.append(allocator, .{ + .name = constant_name, + .description = desc, + .default_value = constant_value, + .qualifiers = constant_enum, + }); +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add src/XmlDocParser.zig +git commit -m "feat: parse constant value and enum attribute from XML" +``` + +--- + +### Task 5: Update DocDatabase Entry and EntryKind + +**Files:** +- Modify: `src/DocDatabase.zig` + +- [ ] **Step 1: Write failing test for new Entry fields** + +```zig +test "Entry supports inherits, qualifiers, and default_value fields" { + const entry = Entry{ + .key = "Node2D.position", + .name = "position", + .kind = .property, + .inherits = null, + .qualifiers = null, + .default_value = "Vector2(0, 0)", + }; + try std.testing.expectEqualStrings("Vector2(0, 0)", entry.default_value.?); +} + +test "EntryKind has constructor value" { + const kind: EntryKind = .constructor; + try std.testing.expect(kind == .constructor); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Expected: Compilation error — `Entry` has no field `inherits`, `qualifiers`, `default_value`; `EntryKind` has no field `constructor` + +- [ ] **Step 3: Update Entry struct and EntryKind enum** + +In `src/DocDatabase.zig`: + +Add to `EntryKind`: +```zig +constructor, +``` + +Remove from `EntryKind`: +```zig +builtin_class, +``` + +Add to `Entry`: +```zig +inherits: ?[]const u8 = null, +qualifiers: ?[]const u8 = null, +default_value: ?[]const u8 = null, +``` + +- [ ] **Step 4: Fix all compilation errors from builtin_class removal** + +Search for all references to `builtin_class` and `.builtin_class` in the codebase and change to `.class`. This includes `loadFromJsonLeaky` and tests that assert `EntryKind.builtin_class`. + +- [ ] **Step 5: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 6: Commit** + +```bash +git add src/DocDatabase.zig +git commit -m "feat: add inherits, qualifiers, default_value to Entry; add constructor EntryKind; remove builtin_class" +``` + +--- + +### Task 6: Add DocDatabase.loadFromXmlDir + +**Files:** +- Modify: `src/DocDatabase.zig` + +- [ ] **Step 1: Write failing test for loadFromXmlDir** + +```zig +test "loadFromXmlDir parses XML files into symbol table" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + // Write a minimal XML file + const xml_content = + \\ + \\ + \\ A sprite node. + \\ Displays a 2D texture. + \\ + \\ + \\ + \\ Returns true if flipped horizontally. + \\ + \\ + \\ + \\ The texture to display. + \\ + \\ + ; + try tmp_dir.dir.writeFile(.{ .sub_path = "Sprite2D.xml", .data = xml_content }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + + // Class entry + const class_entry = db.symbols.get("Sprite2D"); + try std.testing.expect(class_entry != null); + try std.testing.expectEqual(EntryKind.class, class_entry.?.kind); + try std.testing.expectEqualStrings("Node2D", class_entry.?.inherits.?); + try std.testing.expect(class_entry.?.members != null); + + // Method entry + const method = db.symbols.get("Sprite2D.is_flipped_h"); + try std.testing.expect(method != null); + try std.testing.expectEqual(EntryKind.method, method.?.kind); + try std.testing.expect(std.mem.indexOf(u8, method.?.signature.?, "bool") != null); + + // Property entry + const prop = db.symbols.get("Sprite2D.texture"); + try std.testing.expect(prop != null); + try std.testing.expectEqual(EntryKind.property, prop.?.kind); + try std.testing.expectEqualStrings("null", prop.?.default_value.?); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Expected: Compilation error — `DocDatabase` has no declaration `loadFromXmlDir` + +- [ ] **Step 3: Implement loadFromXmlDir** + +Add a new public function `loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_dir_path: []const u8) !DocDatabase`: + +1. Open `xml_dir_path` as a directory with `.iterate = true` +2. Iterate all `.xml` files +3. For each file: + a. Read contents with `tmp_allocator`, parse with `XmlDocParser.parseClassDoc(arena_allocator, content)`, free content + b. Create class `Entry` with `kind = .class`, set `inherits`, `brief_description`, `description`, `tutorials` + c. Put class entry into `symbols` + d. For each member category (methods, properties, signals, constants, constructors, operators): + - Create child `Entry` with appropriate kind, dotted key (`"ClassName.member_name"`) + - Build `signature` string (see signature building rules below) + - Set `qualifiers`, `default_value` from parsed data + - Put into `symbols`, track index for parent's `members` array + e. Update class entry's `members` with collected indices +4. Handle `@GlobalScope.xml` and `@GDScript.xml`: also register their methods as top-level entries (e.g., both `"@GlobalScope.sin"` and `"sin"`). `@GlobalScope` entries take precedence. + +**Signature building rules:** +- Methods: `(param: Type, param2: Type = default) -> ReturnType` (omit `-> void`) +- Properties: `: Type` +- Constructors: `(param: Type, ...)` (name is class name, no return type shown) +- Operators: format as `(right: Type) -> ReturnType` +- Constants with value: ` = value` + +Helper function `buildSignature(allocator, member, kind) !?[]const u8` handles this. + +- [ ] **Step 4: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Write test for @GlobalScope dual-registration** + +```zig +test "loadFromXmlDir registers GlobalScope functions as top-level entries" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const xml_content = + \\ + \\ + \\ Global scope. + \\ Global functions. + \\ + \\ + \\ + \\ + \\ Returns absolute value. + \\ + \\ + \\ + ; + try tmp_dir.dir.writeFile(.{ .sub_path = "@GlobalScope.xml", .data = xml_content }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + + // Should exist under qualified name + try std.testing.expect(db.symbols.get("@GlobalScope.abs") != null); + // Should also exist as top-level + try std.testing.expect(db.symbols.get("abs") != null); +} +``` + +- [ ] **Step 6: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 7: Commit** + +```bash +git add src/DocDatabase.zig +git commit -m "feat: add DocDatabase.loadFromXmlDir to build symbol table from XML files" +``` + +--- + +### Task 7: Update markdown generation for new fields + +**Files:** +- Modify: `src/DocDatabase.zig` +- Update: `snapshots/*.md` + +- [ ] **Step 1: Write failing test for inheritance in markdown output** + +```zig +test "generateMarkdownForSymbol shows inheritance" { + const allocator = std.testing.allocator; + + var db = DocDatabase{ .symbols = StringArrayHashMap(Entry).empty }; + defer db.symbols.deinit(allocator); + + try db.symbols.put(allocator, "Node2D", Entry{ + .key = "Node2D", + .name = "Node2D", + .kind = .class, + .inherits = "CanvasItem", + .brief_description = "A 2D game object.", + }); + + var allocating: std.Io.Writer.Allocating = .init(allocator); + defer allocating.deinit(); + + try db.generateMarkdownForSymbol(allocator, "Node2D", &allocating.writer); + const written = allocating.written(); + + try std.testing.expect(std.mem.indexOf(u8, written, "*Inherits: CanvasItem*") != null); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Expected: FAIL — output doesn't contain inheritance line + +- [ ] **Step 3: Update generateMarkdownForEntry** + +In `generateMarkdownForEntry`, after writing the heading, add: + +```zig +if (entry.inherits) |inherits| { + try writer.print("\n*Inherits: {s}*\n", .{inherits}); +} +``` + +Update `generateMemberListings` to add `constructor` and `operator` sections: + +```zig +var constructors: ArrayList(usize) = .empty; +var operators: ArrayList(usize) = .empty; +defer constructors.deinit(allocator); +defer operators.deinit(allocator); + +// In the switch: +.constructor => try constructors.append(allocator, idx), +.operator => try operators.append(allocator, idx), + +// Render sections: +try self.formatMemberSection("Constructors", constructors.items, writer); +// Render constructors BEFORE methods, operators AFTER methods +``` + +Update `formatMemberLine` to show qualifiers and default values: + +```zig +// After signature, before closing ** +if (member.qualifiers) |quals| { + try writer.print("** `{s}`", .{quals}); +} else { + try writer.writeAll("**"); +} + +// For properties with defaults: +if (member.default_value) |default| { + try writer.print(" = `{s}`", .{default}); +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Update snapshot files** + +Run `zig build test` — snapshot tests will update the files. Verify the diffs look correct with `git diff snapshots/`. + +- [ ] **Step 6: Commit** + +```bash +git add src/DocDatabase.zig snapshots/ +git commit -m "feat: render inheritance, constructors, operators, qualifiers, defaults in markdown" +``` + +--- + +### Task 8: Remove JSON parsing from DocDatabase + +**Files:** +- Modify: `src/DocDatabase.zig` +- Modify: `build.zig` + +- [ ] **Step 1: Delete all JSON parsing code** + +Remove from `src/DocDatabase.zig`: +- `RootState` enum +- `loadFromJsonFileLeaky` function +- `loadFromJsonLeaky` function +- `parseClasses` function +- `parseClass` function +- `parseEntry` function +- `parseEntryArray` function +- `nextTokenToMarkdownAlloc` function +- `bbcodeToMarkdown` function (only used by JSON parsing) +- All handler maps and handler functions (`MethodKey`, `ConstantKey`, `SignalKey`, `EnumKey`, `PropertyKey`, handler maps) +- All JSON-related imports (`Scanner`, `Reader`, `Token`) +- The `bbcodez` import +- All tests that use `loadFromJsonLeaky` or `loadFromJsonFileLeaky` (tests at lines 511-996) + +- [ ] **Step 2: Remove bbcodez from DocDatabase module imports in build.zig** + +Check if bbcodez is still used anywhere else. If only DocDatabase used it, remove from `build.zig` module imports. If `root.zig` or other files still use it for BBCode→Markdown conversion of XML descriptions, keep it. + +Look at how descriptions flow: XML descriptions contain BBCode (`[b]`, `[code]`, etc.). Currently the JSON path converts BBCode→Markdown via `bbcodeToMarkdown` during JSON parsing. With XML as source, BBCode conversion needs to happen somewhere — either in `loadFromXmlDir` when building entries, or in `generateMarkdownForEntry` when rendering. + +Decision: Keep bbcodez, move BBCode→Markdown conversion to `loadFromXmlDir` (convert descriptions as they're stored in Entry). Copy the `bbcodeToMarkdown` helper function to be used by `loadFromXmlDir`. + +- [ ] **Step 3: Run tests to verify compilation** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All remaining tests pass. JSON tests are gone. + +- [ ] **Step 4: Commit** + +```bash +git add src/DocDatabase.zig build.zig +git commit -m "refactor: remove all JSON parsing code from DocDatabase" +``` + +--- + +### Task 9: Remove api.zig and update Config + +**Files:** +- Delete: `src/api.zig` +- Modify: `src/Config.zig` +- Modify: `src/root.zig` (remove `pub const api` import) + +- [ ] **Step 1: Delete api.zig** + +```bash +rm src/api.zig +``` + +- [ ] **Step 2: Remove no_xml from Config** + +In `src/Config.zig`: +- Remove `no_xml: bool` field from `Config` struct +- Remove `.no_xml = hasEnv("GDOC_NO_XML")` from `init` +- Remove `.no_xml = true` from `Config.testing` +- Update the test that asserts `Config.testing.no_xml` +- Keep `hasEnv` function (may be useful later, and it's tiny) + +- [ ] **Step 3: Remove api import from root.zig** + +Remove `pub const api = @import("api.zig");` from the imports in `src/root.zig`. + +- [ ] **Step 4: Run tests to verify compilation** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add -A +git commit -m "refactor: delete api.zig, remove no_xml from Config" +``` + +--- + +### Task 10: Update cache.zig — remove JSON helpers, update cacheIsPopulated + +**Files:** +- Modify: `src/cache.zig` + +- [ ] **Step 1: Write failing test for new cacheIsPopulated logic** + +```zig +test "cacheIsPopulated returns true when Object/index.md exists" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const cache_dir = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(cache_dir); + + // Create xml_docs/.complete marker + const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml_docs", .{cache_dir}); + defer allocator.free(xml_dir); + try std.fs.makeDirAbsolute(xml_dir); + const complete_path = try std.fmt.allocPrint(allocator, "{s}/.complete", .{xml_dir}); + defer allocator.free(complete_path); + try std.fs.cwd().writeFile(.{ .sub_path = complete_path, .data = "4.4.1" }); + + // Create Object/index.md + const object_dir = try std.fmt.allocPrint(allocator, "{s}/Object", .{cache_dir}); + defer allocator.free(object_dir); + try std.fs.makeDirAbsolute(object_dir); + const index_path = try std.fmt.allocPrint(allocator, "{s}/index.md", .{object_dir}); + defer allocator.free(index_path); + try std.fs.cwd().writeFile(.{ .sub_path = index_path, .data = "# Object\n" }); + + const result = try cacheIsPopulated(allocator, cache_dir); + try std.testing.expect(result); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Expected: FAIL — current implementation looks for `extension_api.json` + +- [ ] **Step 3: Update cacheIsPopulated and remove JSON helpers** + +Rewrite `cacheIsPopulated` to check for `xml_docs/.complete` marker and `Object/index.md`: + +```zig +pub fn cacheIsPopulated(allocator: Allocator, cache_path: []const u8) !bool { + // Check xml_docs/.complete marker + const xml_dir = try getXmlDocsDirInCache(allocator, cache_path); + defer allocator.free(xml_dir); + + const marker = source_fetch.readCompleteMarker(allocator, xml_dir); + if (marker) |m| { + allocator.free(m); + } else { + return false; + } + + // Check Object/index.md sentinel + const object_path = try resolveSymbolPath(allocator, cache_path, "Object"); + defer allocator.free(object_path); + + const object_file = std.fs.openFileAbsolute(object_path, .{}) catch |err| switch (err) { + error.FileNotFound => return false, + else => return err, + }; + object_file.close(); + + return true; +} +``` + +Delete `getJsonCachePathInDir`. + +Update tests that used `getJsonCachePathInDir` or checked for `extension_api.json`. + +- [ ] **Step 4: Run tests to verify all pass** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add src/cache.zig +git commit -m "refactor: update cacheIsPopulated to check XML marker + Object sentinel, remove JSON helpers" +``` + +--- + +### Task 11: Update CLI — remove --godot-extension-api flag + +**Files:** +- Modify: `src/cli/root.zig` + +- [ ] **Step 1: Remove flag and update runLookup** + +In `src/cli/root.zig`: +- Remove the `addFlag` block for `godot-extension-api` (lines 21-26) +- Remove `api_json_path_raw` and `api_json_path` variables (lines 47-48) +- Remove `api_json_path == null` from the help condition (line 54) — just check `ctx.positional_args.len == 0` +- Update `formatAndDisplay` call to remove `api_json_path` argument +- Remove the `error.ApiFileNotFound` and `error.InvalidApiJson` error handlers + +- [ ] **Step 2: Run build to verify compilation** + +Run: `zig build 2>&1` +Expected: Compilation errors in `root.zig` because `formatAndDisplay` signature hasn't changed yet. That's OK — Task 12 handles that. + +- [ ] **Step 3: Commit (can wait for Task 12)** + +Will commit together with root.zig changes. + +--- + +### Task 12: Update root.zig — remove JSON paths, simplify cache flow + +**Files:** +- Modify: `src/root.zig` + +- [ ] **Step 1: Remove api_json_path from function signatures** + +Update `markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` to remove `api_json_path: ?[]const u8` parameter. + +- [ ] **Step 2: Remove the JSON-direct-load codepath from markdownForSymbol** + +Delete the `if (api_json_file)` branch that loads JSON directly. The function now always uses the cache flow. + +- [ ] **Step 3: Simplify the cache rebuild flow** + +Replace the current cache rebuild logic with: + +```zig +if (needs_full_rebuild) { + try cache.ensureDirectoryExists(cache_path); + + // Fetch XML docs + const xml_dir = try cache.getXmlDocsDirInCache(allocator, cache_path); + defer allocator.free(xml_dir); + try cache.ensureDirectoryExists(xml_dir); + + const version = source_fetch.getGodotVersion(allocator) orelse + return error.GodotNotFound; + defer version.deinit(allocator); + + // Download and extract XML + var url_buf: [256]u8 = undefined; + const url = source_fetch.buildTarballUrl(&url_buf, version) orelse + return error.GodotNotFound; + + var spinner = Spinner{ .message = "Downloading XML docs..." }; + spinner.start(); + + source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { + if (version.hash) |hash| { + var hash_url_buf: [256]u8 = undefined; + const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse { + spinner.finish(); + return err; + }; + source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { + spinner.finish(); + return err; + }; + } else { + spinner.finish(); + return err; + } + }; + + spinner.finish(); + + // Write version marker + var version_buf: [64]u8 = undefined; + const version_str = version.formatVersion(&version_buf) orelse return error.GodotNotFound; + try source_fetch.writeCompleteMarker(allocator, xml_dir, version_str); + + // Build database from XML + var build_spinner = Spinner{ .message = "Building documentation cache..." }; + build_spinner.start(); + defer build_spinner.finish(); + + var arena = ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, xml_dir); + try cache.generateMarkdownCache(allocator, db, cache_path); +} +``` + +- [ ] **Step 4: Remove mergeXmlDocs and fetchXmlDocs functions** + +Delete both functions from `root.zig`. + +- [ ] **Step 5: Remove LookupError.ApiFileNotFound** + +Remove from `LookupError` error set. Add `GodotNotFound` if not already there. + +- [ ] **Step 6: Delete JSON fixture tests** + +Delete tests in `root.zig` that create inline JSON or test `api_json_path`: +- `markdownForSymbol returns ApiFileNotFound for nonexistent file` +- `markdownForSymbol returns InvalidApiJson for malformed JSON` +- `markdownForSymbol loads from custom API file and finds symbol` +- `markdownForSymbol returns SymbolNotFound when symbol doesn't exist` +- `markdownForSymbol works with relative path` +- `formatAndDisplay with markdown format produces markdown output` +- `formatAndDisplay with terminal format produces terminal output` + +Keep cache-flow tests but update them to not create `extension_api.json`. + +- [ ] **Step 7: Update imports** + +Remove `pub const api = @import("api.zig");` if not already done. + +- [ ] **Step 8: Run tests to verify everything compiles and passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 9: Commit CLI and root.zig together** + +```bash +git add src/root.zig src/cli/root.zig +git commit -m "refactor: remove JSON paths from root.zig and CLI, simplify to XML-only cache flow" +``` + +--- + +### Task 13: Update remaining cache-flow tests + +**Files:** +- Modify: `src/root.zig` +- Modify: `src/cache.zig` + +- [ ] **Step 1: Update markdownForSymbol cache tests** + +Update `markdownForSymbol reads from markdown cache when available` — remove the `extension_api.json` creation, ensure `cacheIsPopulated` returns true by creating the new sentinels (xml_docs/.complete + Object/index.md). + +Update `markdownForSymbol generates markdown cache when cache is empty` — this test now needs XML docs in the cache instead of JSON. Create a minimal XML file in `xml_docs/` dir with a `.complete` marker, or restructure to test `loadFromXmlDir` + `generateMarkdownCache` directly. + +- [ ] **Step 2: Run all tests** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 3: Run the full build** + +Run: `zig build` +Expected: Clean build, no errors + +- [ ] **Step 4: Commit** + +```bash +git add src/root.zig src/cache.zig +git commit -m "test: update cache-flow tests for XML-only architecture" +``` + +--- + +### Task 14: Integration test — XML dir to markdown roundtrip + +**Files:** +- Modify: `src/DocDatabase.zig` (or `src/root.zig`) + +- [ ] **Step 1: Write integration test** + +```zig +test "XML dir to markdown roundtrip" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + // Write a realistic XML doc + const xml_content = + \\ + \\ + \\ A test class. + \\ A class for testing. + \\ + \\ https://example.com + \\ + \\ + \\ + \\ + \\ Default constructor. + \\ + \\ + \\ + \\ + \\ + \\ + \\ Does a thing. + \\ + \\ + \\ + \\ Movement speed. + \\ + \\ + \\ Maximum speed. + \\ + \\ + ; + + // Write XML to a subdir + const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml", .{tmp_path}); + defer allocator.free(xml_dir); + try std.fs.makeDirAbsolute(xml_dir); + const xml_path = try std.fmt.allocPrint(allocator, "{s}/TestClass.xml", .{xml_dir}); + defer allocator.free(xml_path); + try std.fs.cwd().writeFile(.{ .sub_path = xml_path, .data = xml_content }); + + // Load from XML + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, xml_dir); + + // Generate markdown + const cache_dir = try std.fmt.allocPrint(allocator, "{s}/cache", .{tmp_path}); + defer allocator.free(cache_dir); + try cache.generateMarkdownCache(allocator, db, cache_dir); + + // Read back the class markdown + var output: std.Io.Writer.Allocating = .init(allocator); + defer output.deinit(); + try cache.readSymbolMarkdown(allocator, "TestClass", cache_dir, &output.writer); + const written = output.written(); + + // Verify key content + try std.testing.expect(std.mem.indexOf(u8, written, "# TestClass") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "*Inherits: RefCounted*") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Tutorials") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Constructors") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Methods") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Properties") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Constants") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "do_thing") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "speed") != null); +} +``` + +- [ ] **Step 2: Run test** + +Run: `zig build test 2>&1 | grep "XML dir to markdown"` +Expected: PASS + +- [ ] **Step 3: Update snapshot files** + +Run full test suite, verify snapshots: +```bash +zig build test +git diff snapshots/ +``` + +- [ ] **Step 4: Commit** + +```bash +git add src/ snapshots/ +git commit -m "test: add XML-to-markdown roundtrip integration test, update snapshots" +``` + +--- + +### Task 15: Final cleanup and verification + +**Files:** +- All modified files + +- [ ] **Step 1: Verify no dead code remains** + +Search for any remaining references to removed items: + +```bash +grep -rn "api_json\|extension_api\|loadFromJson\|builtin_class\|no_xml\|GDOC_NO_XML\|api\.zig\|mergeXmlDocs\|fetchXmlDocs" src/ +``` + +Expected: No matches + +- [ ] **Step 2: Run full test suite** + +```bash +zig build test +``` + +Expected: All tests pass, no warnings + +- [ ] **Step 3: Test the binary manually** + +```bash +zig build run -- --clear-cache +zig build run -- Node2D +zig build run -- Vector2 +zig build run -- sin +zig build run -- Node2D.position +``` + +Expected: Each command shows documentation with the new format (inheritance, constructors where applicable, etc.) + +- [ ] **Step 4: Final commit if any remaining changes** + +```bash +git add -A +git commit -m "chore: final cleanup for XML-only doc source migration" +``` From d904f52b3ec245ad2efb917fdb3260bacb981321 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:00:45 +1100 Subject: [PATCH 06/19] docs: update implementation plan after review feedback - Merge Task 11 (CLI) into Task 12 (root.zig) since they can't compile independently - Move InvalidApiJson removal to Task 8 (alongside JSON deletion) - Fix error name in XML parse failure test (was referencing nonexistent error) - Add enum grouping test to Task 6 - Add operator rendering assertions to integration test - Add BBCode conversion test to Task 8 - Update signal parsing to reuse parseMethodElement for param capture - Renumber tasks 12-15 after merge --- .../2026-03-21-xml-docs-primary-source.md | 248 +++++++++++++++--- 1 file changed, 207 insertions(+), 41 deletions(-) diff --git a/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md b/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md index 59b3c10..a347856 100644 --- a/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md +++ b/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md @@ -21,7 +21,7 @@ - `src/cache.zig` — Remove `getJsonCachePathInDir`, update `cacheIsPopulated` to check `Object/index.md` instead of JSON file - `src/Config.zig` — Remove `no_xml` field, update `Config.testing` - `src/cli/root.zig` — Remove `--godot-extension-api` flag, update error handling -- `build.zig` — Remove `bbcodez` from module imports (it's only used by JSON parsing in DocDatabase), keep `xml` +- `build.zig` — Keep `bbcodez` (still needed for BBCode→Markdown in descriptions), keep `xml` **Delete:** - `src/api.zig` — Entire file @@ -277,7 +277,7 @@ var operators: std.ArrayListUnmanaged(MemberDoc) = .empty; defer operators.deinit(allocator); ``` -Add element handlers for `` and `` — reuse the same `parseMethodElement` function from Task 1 since they have identical XML structure (name, return, params, description). +Add element handlers for ``, ``, and `` — reuse the same `parseMethodElement` function from Task 1 since they all have identical XML structure (name, optional return, optional params, description). Update the existing `` handler to use `parseMethodElement` so signal params are captured (e.g., `child_entered_tree(node: Node)`). Update `freeClassDoc` to free constructors and operators. @@ -528,8 +528,9 @@ Add a new public function `loadFromXmlDir(arena_allocator: Allocator, tmp_alloca - Build `signature` string (see signature building rules below) - Set `qualifiers`, `default_value` from parsed data - Put into `symbols`, track index for parent's `members` array + - **Enum grouping for constants:** If a constant has an `enum` attribute (stored in `qualifiers` by the parser), key it as `"ClassName.EnumName.VALUE_NAME"` with `kind = .enum_value`. Constants without `enum` attribute are keyed as `"ClassName.CONSTANT_NAME"` with `kind = .constant`. e. Update class entry's `members` with collected indices -4. Handle `@GlobalScope.xml` and `@GDScript.xml`: also register their methods as top-level entries (e.g., both `"@GlobalScope.sin"` and `"sin"`). `@GlobalScope` entries take precedence. +4. Handle `@GlobalScope.xml` and `@GDScript.xml`: also register their methods as top-level entries with `kind = .global_function` (not `.method`), e.g., both `"@GlobalScope.sin"` and `"sin"`. `@GlobalScope` entries take precedence over `@GDScript`. **Signature building rules:** - Methods: `(param: Type, param2: Type = default) -> ReturnType` (omit `-> void`) @@ -545,7 +546,59 @@ Helper function `buildSignature(allocator, member, kind) !?[]const u8` handles t Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` Expected: All tests pass -- [ ] **Step 5: Write test for @GlobalScope dual-registration** +- [ ] **Step 5: Write test for enum grouping in loadFromXmlDir** + +```zig +test "loadFromXmlDir groups constants with enum attribute as enum_value entries" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const xml_content = + \\ + \\ + \\ Base class. + \\ Base node. + \\ + \\ Ready. + \\ Inherits. + \\ Always. + \\ + \\ + ; + try tmp_dir.dir.writeFile(.{ .sub_path = "Node.xml", .data = xml_content }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + + // Regular constant: keyed as ClassName.CONSTANT_NAME + const notif = db.symbols.get("Node.NOTIFICATION_READY"); + try std.testing.expect(notif != null); + try std.testing.expectEqual(EntryKind.constant, notif.?.kind); + + // Enum constant: keyed as ClassName.EnumName.VALUE_NAME + const inherit = db.symbols.get("Node.ProcessMode.PROCESS_MODE_INHERIT"); + try std.testing.expect(inherit != null); + try std.testing.expectEqual(EntryKind.enum_value, inherit.?.kind); + + const always = db.symbols.get("Node.ProcessMode.PROCESS_MODE_ALWAYS"); + try std.testing.expect(always != null); + try std.testing.expectEqual(EntryKind.enum_value, always.?.kind); +} +``` + +- [ ] **Step 6: Run test to verify it passes** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 7: Write test for @GlobalScope dual-registration** ```zig test "loadFromXmlDir registers GlobalScope functions as top-level entries" { @@ -585,12 +638,12 @@ test "loadFromXmlDir registers GlobalScope functions as top-level entries" { } ``` -- [ ] **Step 6: Run test to verify it passes** +- [ ] **Step 8: Run test to verify it passes** Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` Expected: All tests pass -- [ ] **Step 7: Commit** +- [ ] **Step 9: Commit** ```bash git add src/DocDatabase.zig @@ -714,10 +767,10 @@ Remove from `src/DocDatabase.zig`: - `parseEntry` function - `parseEntryArray` function - `nextTokenToMarkdownAlloc` function -- `bbcodeToMarkdown` function (only used by JSON parsing) +- `bbcodeToMarkdown` function (only used by JSON parsing — keep a copy if needed by `loadFromXmlDir` for BBCode→Markdown conversion of descriptions) - All handler maps and handler functions (`MethodKey`, `ConstantKey`, `SignalKey`, `EnumKey`, `PropertyKey`, handler maps) - All JSON-related imports (`Scanner`, `Reader`, `Token`) -- The `bbcodez` import +- `InvalidApiJson` from the `Error` enum (this error was only produced by JSON parsing) - All tests that use `loadFromJsonLeaky` or `loadFromJsonFileLeaky` (tests at lines 511-996) - [ ] **Step 2: Remove bbcodez from DocDatabase module imports in build.zig** @@ -728,12 +781,45 @@ Look at how descriptions flow: XML descriptions contain BBCode (`[b]`, `[code]`, Decision: Keep bbcodez, move BBCode→Markdown conversion to `loadFromXmlDir` (convert descriptions as they're stored in Entry). Copy the `bbcodeToMarkdown` helper function to be used by `loadFromXmlDir`. -- [ ] **Step 3: Run tests to verify compilation** +- [ ] **Step 3: Write test verifying BBCode conversion in loadFromXmlDir** + +```zig +test "loadFromXmlDir converts BBCode descriptions to Markdown" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const xml_content = + \\ + \\ + \\ Has [b]bold[/b] text. + \\ Uses [code]code[/code] and [i]italic[/i]. + \\ + ; + try tmp_dir.dir.writeFile(.{ .sub_path = "TestBBCode.xml", .data = xml_content }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + const entry = db.symbols.get("TestBBCode").?; + + // BBCode should be converted to Markdown + try std.testing.expect(std.mem.indexOf(u8, entry.brief_description.?, "**bold**") != null); + try std.testing.expect(std.mem.indexOf(u8, entry.description.?, "`code`") != null); +} +``` + +- [ ] **Step 4: Run tests to verify compilation and BBCode test passes** Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All remaining tests pass. JSON tests are gone. +Expected: All remaining tests pass. JSON tests are gone. BBCode conversion test passes. -- [ ] **Step 4: Commit** +- [ ] **Step 5: Commit** ```bash git add src/DocDatabase.zig build.zig @@ -873,12 +959,13 @@ git commit -m "refactor: update cacheIsPopulated to check XML marker + Object se --- -### Task 11: Update CLI — remove --godot-extension-api flag +### Task 11: Update CLI and root.zig — remove JSON paths, simplify cache flow **Files:** - Modify: `src/cli/root.zig` +- Modify: `src/root.zig` -- [ ] **Step 1: Remove flag and update runLookup** +- [ ] **Step 1: Remove --godot-extension-api flag from CLI** In `src/cli/root.zig`: - Remove the `addFlag` block for `godot-extension-api` (lines 21-26) @@ -887,31 +974,15 @@ In `src/cli/root.zig`: - Update `formatAndDisplay` call to remove `api_json_path` argument - Remove the `error.ApiFileNotFound` and `error.InvalidApiJson` error handlers -- [ ] **Step 2: Run build to verify compilation** - -Run: `zig build 2>&1` -Expected: Compilation errors in `root.zig` because `formatAndDisplay` signature hasn't changed yet. That's OK — Task 12 handles that. - -- [ ] **Step 3: Commit (can wait for Task 12)** - -Will commit together with root.zig changes. - ---- - -### Task 12: Update root.zig — remove JSON paths, simplify cache flow - -**Files:** -- Modify: `src/root.zig` - -- [ ] **Step 1: Remove api_json_path from function signatures** +- [ ] **Step 2: Remove api_json_path from function signatures in root.zig** Update `markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` to remove `api_json_path: ?[]const u8` parameter. -- [ ] **Step 2: Remove the JSON-direct-load codepath from markdownForSymbol** +- [ ] **Step 3: Remove the JSON-direct-load codepath from markdownForSymbol** Delete the `if (api_json_file)` branch that loads JSON directly. The function now always uses the cache flow. -- [ ] **Step 3: Simplify the cache rebuild flow** +- [ ] **Step 4: Simplify the cache rebuild flow** Replace the current cache rebuild logic with: @@ -973,15 +1044,16 @@ if (needs_full_rebuild) { } ``` -- [ ] **Step 4: Remove mergeXmlDocs and fetchXmlDocs functions** +- [ ] **Step 5: Remove mergeXmlDocs and fetchXmlDocs functions** Delete both functions from `root.zig`. -- [ ] **Step 5: Remove LookupError.ApiFileNotFound** +- [ ] **Step 6: Remove error types for deleted codepaths** -Remove from `LookupError` error set. Add `GodotNotFound` if not already there. +- Remove `ApiFileNotFound` from `LookupError` error set in `root.zig`. Add `GodotNotFound` if not already there. +- `InvalidApiJson` was already removed from `DocDatabase.Error` in Task 8. -- [ ] **Step 6: Delete JSON fixture tests** +- [ ] **Step 7: Delete JSON fixture tests** Delete tests in `root.zig` that create inline JSON or test `api_json_path`: - `markdownForSymbol returns ApiFileNotFound for nonexistent file` @@ -994,16 +1066,16 @@ Delete tests in `root.zig` that create inline JSON or test `api_json_path`: Keep cache-flow tests but update them to not create `extension_api.json`. -- [ ] **Step 7: Update imports** +- [ ] **Step 8: Update imports** Remove `pub const api = @import("api.zig");` if not already done. -- [ ] **Step 8: Run tests to verify everything compiles and passes** +- [ ] **Step 9: Run tests to verify everything compiles and passes** Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` Expected: All tests pass -- [ ] **Step 9: Commit CLI and root.zig together** +- [ ] **Step 10: Commit CLI and root.zig together** ```bash git add src/root.zig src/cli/root.zig @@ -1012,7 +1084,7 @@ git commit -m "refactor: remove JSON paths from root.zig and CLI, simplify to XM --- -### Task 13: Update remaining cache-flow tests +### Task 12: Update remaining cache-flow tests **Files:** - Modify: `src/root.zig` @@ -1043,7 +1115,7 @@ git commit -m "test: update cache-flow tests for XML-only architecture" --- -### Task 14: Integration test — XML dir to markdown roundtrip +### Task 13: Integration test — XML dir to markdown roundtrip **Files:** - Modify: `src/DocDatabase.zig` (or `src/root.zig`) @@ -1085,6 +1157,13 @@ test "XML dir to markdown roundtrip" { \\ \\ Movement speed. \\ + \\ + \\ + \\ + \\ + \\ Multiplies by a scalar. + \\ + \\ \\ \\ Maximum speed. \\ @@ -1123,8 +1202,10 @@ test "XML dir to markdown roundtrip" { try std.testing.expect(std.mem.indexOf(u8, written, "## Methods") != null); try std.testing.expect(std.mem.indexOf(u8, written, "## Properties") != null); try std.testing.expect(std.mem.indexOf(u8, written, "## Constants") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Operators") != null); try std.testing.expect(std.mem.indexOf(u8, written, "do_thing") != null); try std.testing.expect(std.mem.indexOf(u8, written, "speed") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "operator *") != null); } ``` @@ -1150,6 +1231,91 @@ git commit -m "test: add XML-to-markdown roundtrip integration test, update snap --- +### Task 14: Error-path replacement tests + +**Files:** +- Modify: `src/DocDatabase.zig` or `src/root.zig` + +- [ ] **Step 1: Write test for XML parse failure** + +`loadFromXmlDir` should propagate zig-xml parse errors when encountering malformed XML. The specific error variant depends on what `zig-xml` returns — check the actual error set from `XmlDocParser.parseClassDoc` (likely `error.MalformedXml` or a zig-xml `SyntaxError`). Use `expectError` with the correct variant: + +```zig +test "loadFromXmlDir returns error for malformed XML" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + // Write a malformed XML file + try tmp_dir.dir.writeFile(.{ .sub_path = "Bad.xml", .data = "\n + \\ + \\ Base class. + \\ Base node class. + \\ + }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + const result = db.lookupSymbolExact("NonExistent"); + try std.testing.expectError(DocDatabase.Error.SymbolNotFound, result); +} +``` + +- [ ] **Step 3: Write test for missing cache directory** + +```zig +test "cacheIsPopulated returns false for nonexistent directory" { + const allocator = std.testing.allocator; + const result = try cache.cacheIsPopulated(allocator, "/tmp/gdoc-nonexistent-test-path"); + try std.testing.expect(!result); +} +``` + +- [ ] **Step 4: Run tests** + +Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` +Expected: All tests pass + +- [ ] **Step 5: Commit** + +```bash +git add src/DocDatabase.zig src/cache.zig +git commit -m "test: add error-path tests for XML parse failure, missing symbols, missing cache" +``` + +--- + ### Task 15: Final cleanup and verification **Files:** From 23da0dce0e94ba75acc32884d90e4ab48f3b5218 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:04:33 +1100 Subject: [PATCH 07/19] feat: parse method params, return type, and qualifiers from XML --- src/XmlDocParser.zig | 121 +++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 118 insertions(+), 3 deletions(-) diff --git a/src/XmlDocParser.zig b/src/XmlDocParser.zig index 155c953..43476c6 100644 --- a/src/XmlDocParser.zig +++ b/src/XmlDocParser.zig @@ -9,9 +9,19 @@ pub const Tutorial = struct { url: []const u8, }; +pub const ParamDoc = struct { + name: []const u8, + type: []const u8, + default_value: ?[]const u8 = null, +}; + pub const MemberDoc = struct { name: []const u8, description: ?[]const u8 = null, + qualifiers: ?[]const u8 = null, + default_value: ?[]const u8 = null, + return_type: ?[]const u8 = null, + params: ?[]ParamDoc = null, }; pub const ClassDoc = struct { @@ -80,9 +90,8 @@ pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!C } try tutorials.append(allocator,.{ .title = title, .url = url }); } else if (std.mem.eql(u8, name, "method")) { - const method_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; - const desc = try readNestedDescription(allocator, reader, "method"); - try methods.append(allocator,.{ .name = method_name, .description = desc }); + const method_doc = try parseMethodElement(allocator, reader); + try methods.append(allocator, method_doc); } else if (std.mem.eql(u8, name, "member")) { const member_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; const desc = try readTextContent(allocator, reader); @@ -131,6 +140,17 @@ pub fn freeClassDoc(allocator: Allocator, doc: ClassDoc) void { for (members) |m| { allocator.free(m.name); if (m.description) |d| allocator.free(d); + if (m.qualifiers) |q| allocator.free(q); + if (m.default_value) |dv| allocator.free(dv); + if (m.return_type) |rt| allocator.free(rt); + if (m.params) |params| { + for (params) |p| { + allocator.free(p.name); + allocator.free(p.type); + if (p.default_value) |pdv| allocator.free(pdv); + } + allocator.free(params); + } } allocator.free(members); } @@ -196,6 +216,57 @@ fn readNestedDescription(allocator: Allocator, reader: *xml.Reader, container_el return null; } +fn parseMethodElement(allocator: Allocator, reader: *xml.Reader) ParseError!MemberDoc { + const method_name = try getAttributeAlloc(allocator, reader, "name") orelse return ParseError.MissingNameAttribute; + const qualifiers = try getAttributeAlloc(allocator, reader, "qualifiers"); + + var return_type: ?[]const u8 = null; + var description: ?[]const u8 = null; + var params: std.ArrayListUnmanaged(ParamDoc) = .empty; + defer params.deinit(allocator); + + var depth: usize = 1; + while (depth > 0) { + const node = reader.read() catch return ParseError.MalformedXml; + switch (node) { + .eof => break, + .element_start => { + const name = reader.elementName(); + if (depth == 1 and std.mem.eql(u8, name, "return")) { + return_type = try getAttributeAlloc(allocator, reader, "type"); + depth += 1; + } else if (depth == 1 and std.mem.eql(u8, name, "param")) { + const param_name = try getAttributeAlloc(allocator, reader, "name") orelse try allocator.dupe(u8, ""); + const param_type = try getAttributeAlloc(allocator, reader, "type") orelse try allocator.dupe(u8, ""); + const param_default = try getAttributeAlloc(allocator, reader, "default"); + try params.append(allocator, .{ + .name = param_name, + .type = param_type, + .default_value = param_default, + }); + depth += 1; + } else if (depth == 1 and std.mem.eql(u8, name, "description")) { + description = try readTextContent(allocator, reader); + } else { + depth += 1; + } + }, + .element_end => { + depth -= 1; + }, + else => continue, + } + } + + return .{ + .name = method_name, + .description = description, + .qualifiers = qualifiers, + .return_type = return_type, + .params = if (params.items.len > 0) try params.toOwnedSlice(allocator) else null, + }; +} + fn expandDocsUrl(allocator: Allocator, url: []const u8) Allocator.Error![]const u8 { const prefix = "$DOCS_URL"; if (std.mem.startsWith(u8, url, prefix)) { @@ -328,6 +399,50 @@ test "parses constants with descriptions" { try std.testing.expectEqualStrings("Maximum allowed value.", consts[0].description.?); } +const test_xml_with_params = + \\ + \\ + \\ A 2D game object. + \\ Node2D is the base class for 2D. + \\ + \\ + \\ + \\ + \\ Returns the angle between the node and the point. + \\ + \\ + \\ + \\ + \\ + \\ Applies a local translation on the X axis. + \\ + \\ + \\ +; + +test "parses method params and return type" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_params); + defer freeClassDoc(allocator, doc); + + const methods = doc.methods.?; + try std.testing.expectEqual(2, methods.len); + + // First method: get_angle_to + try std.testing.expectEqualStrings("const", methods[0].qualifiers.?); + try std.testing.expectEqualStrings("float", methods[0].return_type.?); + const params0 = methods[0].params.?; + try std.testing.expectEqual(1, params0.len); + try std.testing.expectEqualStrings("point", params0[0].name); + try std.testing.expectEqualStrings("Vector2", params0[0].type); + try std.testing.expect(params0[0].default_value == null); + + // Second method: move_local_x with default param + const params1 = methods[1].params.?; + try std.testing.expectEqual(2, params1.len); + try std.testing.expectEqualStrings("false", params1[1].default_value.?); +} + test "freeClassDoc doesn't leak" { const allocator = std.testing.allocator; const doc = try parseClassDoc(allocator, test_xml); From 49edb8bc4760fea9e59aaff41ae0430df314d23e Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:07:56 +1100 Subject: [PATCH 08/19] feat: parse property type and default value from XML --- src/XmlDocParser.zig | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/XmlDocParser.zig b/src/XmlDocParser.zig index 43476c6..d4067d0 100644 --- a/src/XmlDocParser.zig +++ b/src/XmlDocParser.zig @@ -94,8 +94,15 @@ pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!C try methods.append(allocator, method_doc); } else if (std.mem.eql(u8, name, "member")) { const member_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const member_type = try getAttributeAlloc(allocator, reader, "type"); + const member_default = try getAttributeAlloc(allocator, reader, "default"); const desc = try readTextContent(allocator, reader); - try properties.append(allocator,.{ .name = member_name, .description = desc }); + try properties.append(allocator, .{ + .name = member_name, + .description = desc, + .return_type = member_type, + .default_value = member_default, + }); } else if (std.mem.eql(u8, name, "signal")) { const signal_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; const desc = try readNestedDescription(allocator, reader, "signal"); @@ -377,6 +384,17 @@ test "parses properties from members element" { try std.testing.expectEqualStrings("Position, relative to the node's parent.", props[0].description.?); } +test "parses property default value" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml); + defer freeClassDoc(allocator, doc); + + const props = doc.properties.?; + try std.testing.expectEqual(1, props.len); + try std.testing.expectEqualStrings("Vector2(0, 0)", props[0].default_value.?); + try std.testing.expectEqualStrings("Vector2", props[0].return_type.?); +} + test "parses signals with descriptions" { const allocator = std.testing.allocator; const doc = try parseClassDoc(allocator, test_xml); From 158004f59c610dcc8f24652a9e2c23895cd9d17e Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:09:12 +1100 Subject: [PATCH 09/19] feat: parse constructors and operators from XML --- src/XmlDocParser.zig | 72 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 68 insertions(+), 4 deletions(-) diff --git a/src/XmlDocParser.zig b/src/XmlDocParser.zig index d4067d0..fa06f58 100644 --- a/src/XmlDocParser.zig +++ b/src/XmlDocParser.zig @@ -34,6 +34,8 @@ pub const ClassDoc = struct { properties: ?[]MemberDoc = null, signals: ?[]MemberDoc = null, constants: ?[]MemberDoc = null, + constructors: ?[]MemberDoc = null, + operators: ?[]MemberDoc = null, }; pub const ParseError = error{ @@ -63,6 +65,10 @@ pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!C defer signals.deinit(allocator); var constants: std.ArrayListUnmanaged(MemberDoc) = .empty; defer constants.deinit(allocator); + var constructors: std.ArrayListUnmanaged(MemberDoc) = .empty; + defer constructors.deinit(allocator); + var operators: std.ArrayListUnmanaged(MemberDoc) = .empty; + defer operators.deinit(allocator); var found_class = false; @@ -104,9 +110,14 @@ pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!C .default_value = member_default, }); } else if (std.mem.eql(u8, name, "signal")) { - const signal_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; - const desc = try readNestedDescription(allocator, reader, "signal"); - try signals.append(allocator,.{ .name = signal_name, .description = desc }); + const signal_doc = try parseMethodElement(allocator, reader); + try signals.append(allocator, signal_doc); + } else if (std.mem.eql(u8, name, "constructor")) { + const ctor_doc = try parseMethodElement(allocator, reader); + try constructors.append(allocator, ctor_doc); + } else if (std.mem.eql(u8, name, "operator")) { + const op_doc = try parseMethodElement(allocator, reader); + try operators.append(allocator, op_doc); } else if (std.mem.eql(u8, name, "constant")) { const constant_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; const desc = try readTextContent(allocator, reader); @@ -124,6 +135,8 @@ pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!C doc.properties = if (properties.items.len > 0) try properties.toOwnedSlice(allocator) else null; doc.signals = if (signals.items.len > 0) try signals.toOwnedSlice(allocator) else null; doc.constants = if (constants.items.len > 0) try constants.toOwnedSlice(allocator) else null; + doc.constructors = if (constructors.items.len > 0) try constructors.toOwnedSlice(allocator) else null; + doc.operators = if (operators.items.len > 0) try operators.toOwnedSlice(allocator) else null; return doc; } @@ -142,7 +155,7 @@ pub fn freeClassDoc(allocator: Allocator, doc: ClassDoc) void { allocator.free(tutorials); } - inline for (.{ "methods", "properties", "signals", "constants" }) |field| { + inline for (.{ "methods", "properties", "signals", "constants", "constructors", "operators" }) |field| { if (@field(doc, field)) |members| { for (members) |m| { allocator.free(m.name); @@ -417,6 +430,57 @@ test "parses constants with descriptions" { try std.testing.expectEqualStrings("Maximum allowed value.", consts[0].description.?); } +const test_xml_with_constructors_and_operators = + \\ + \\ + \\ A 2D vector. + \\ 2D vector type. + \\ + \\ + \\ + \\ Constructs a default Vector2. + \\ + \\ + \\ + \\ + \\ + \\ Constructs from x and y. + \\ + \\ + \\ + \\ + \\ + \\ + \\ Adds two vectors. + \\ + \\ + \\ +; + +test "parses constructors" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_constructors_and_operators); + defer freeClassDoc(allocator, doc); + + const ctors = doc.constructors.?; + try std.testing.expectEqual(2, ctors.len); + try std.testing.expectEqualStrings("Vector2", ctors[0].name); + try std.testing.expect(ctors[0].params == null); + try std.testing.expectEqual(2, ctors[1].params.?.len); +} + +test "parses operators" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_constructors_and_operators); + defer freeClassDoc(allocator, doc); + + const ops = doc.operators.?; + try std.testing.expectEqual(1, ops.len); + try std.testing.expectEqualStrings("operator +", ops[0].name); + try std.testing.expectEqualStrings("Vector2", ops[0].return_type.?); + try std.testing.expectEqual(1, ops[0].params.?.len); +} + const test_xml_with_params = \\ \\ From 533abcf79b715c50d8cb80dc50f8e522800d5695 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:09:46 +1100 Subject: [PATCH 10/19] feat: parse constant value and enum attribute from XML --- src/XmlDocParser.zig | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/src/XmlDocParser.zig b/src/XmlDocParser.zig index fa06f58..4c89d75 100644 --- a/src/XmlDocParser.zig +++ b/src/XmlDocParser.zig @@ -120,8 +120,15 @@ pub fn parseClassDoc(allocator: Allocator, xml_content: []const u8) ParseError!C try operators.append(allocator, op_doc); } else if (std.mem.eql(u8, name, "constant")) { const constant_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; + const constant_value = try getAttributeAlloc(allocator, reader, "value"); + const constant_enum = try getAttributeAlloc(allocator, reader, "enum"); const desc = try readTextContent(allocator, reader); - try constants.append(allocator,.{ .name = constant_name, .description = desc }); + try constants.append(allocator, .{ + .name = constant_name, + .description = desc, + .default_value = constant_value, + .qualifiers = constant_enum, + }); } }, else => continue, @@ -481,6 +488,36 @@ test "parses operators" { try std.testing.expectEqual(1, ops[0].params.?.len); } +const test_xml_with_enums = + \\ + \\ + \\ Base class. + \\ Base node. + \\ + \\ Ready notification. + \\ Inherits process mode. + \\ Always process. + \\ + \\ +; + +test "parses constant value and enum attribute" { + const allocator = std.testing.allocator; + const doc = try parseClassDoc(allocator, test_xml_with_enums); + defer freeClassDoc(allocator, doc); + + const consts = doc.constants.?; + try std.testing.expectEqual(3, consts.len); + + // Regular constant — no enum + try std.testing.expectEqualStrings("13", consts[0].default_value.?); + try std.testing.expect(consts[0].qualifiers == null); + + // Enum constant — enum name stored in qualifiers field + try std.testing.expectEqualStrings("0", consts[1].default_value.?); + try std.testing.expectEqualStrings("ProcessMode", consts[1].qualifiers.?); +} + const test_xml_with_params = \\ \\ From f9e6846a16a94dd528518688bddbfe0fc9c4e6a7 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:12:38 +1100 Subject: [PATCH 11/19] feat: update EntryKind and Entry for XML doc support - Replace `builtin_class` with `constructor` in EntryKind enum - Add `inherits`, `qualifiers`, and `default_value` fields to Entry - Update all `.builtin_class` references to `.class` for compilation --- src/DocDatabase.zig | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index ce27bf3..0e19944 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -10,8 +10,8 @@ pub const Error = error{ }; pub const EntryKind = enum { - builtin_class, class, + constructor, method, property, constant, @@ -36,6 +36,9 @@ pub const Entry = struct { signature: ?[]const u8 = null, members: ?[]usize = null, tutorials: ?[]const Tutorial = null, + inherits: ?[]const u8 = null, + qualifiers: ?[]const u8 = null, + default_value: ?[]const u8 = null, }; const RootState = enum { @@ -80,7 +83,7 @@ pub fn loadFromJsonLeaky(arena_allocator: Allocator, scanner: *Scanner) !DocData }; switch (state) { - .builtin_classes => try db.parseClasses(.builtin_class, arena_allocator, scanner), + .builtin_classes => try db.parseClasses(.class, arena_allocator, scanner), .classes => try db.parseClasses(.class, arena_allocator, scanner), .utility_functions => try db.parseGlobalMethods(arena_allocator, scanner), else => continue, @@ -532,7 +535,7 @@ test "parse simple builtin class from JSON" { const entry = db.symbols.get("bool"); try std.testing.expect(entry != null); try std.testing.expectEqualStrings("bool", entry.?.name); - try std.testing.expectEqual(EntryKind.builtin_class, entry.?.kind); + try std.testing.expectEqual(EntryKind.class, entry.?.kind); } test "parse regular class from JSON" { @@ -1313,6 +1316,23 @@ test "generateMarkdownForSymbol for class with tutorials" { try writer.flush(); } +test "Entry supports inherits, qualifiers, and default_value fields" { + const entry = Entry{ + .key = "Node2D.position", + .name = "position", + .kind = .property, + .inherits = null, + .qualifiers = null, + .default_value = "Vector2(0, 0)", + }; + try std.testing.expectEqualStrings("Vector2(0, 0)", entry.default_value.?); +} + +test "EntryKind has constructor value" { + const kind: EntryKind = .constructor; + try std.testing.expect(kind == .constructor); +} + const std = @import("std"); const ArenaAllocator = std.heap.ArenaAllocator; const Allocator = std.mem.Allocator; From f64eb05b0910919ea895fc2796a09109e10dd4bc Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:17:07 +1100 Subject: [PATCH 12/19] feat: add loadFromXmlDir to build DocDatabase from XML files Add loadFromXmlDir function that iterates a directory of Godot XML doc files and builds a complete symbol table with class entries, methods, properties, signals, constants (with enum grouping), constructors, and operators. Includes signature building helpers and dual-registration of @GlobalScope/@GDScript functions as top-level entries. --- src/DocDatabase.zig | 448 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 448 insertions(+) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index 0e19944..b18f3d6 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -408,6 +408,325 @@ pub fn lookupSymbolExact(self: DocDatabase, symbol: []const u8) DocDatabase.Erro return self.symbols.get(symbol) orelse return DocDatabase.Error.SymbolNotFound; } +pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_dir_path: []const u8) !DocDatabase { + var db: DocDatabase = .{}; + + var dir = try std.fs.openDirAbsolute(xml_dir_path, .{ .iterate = true }); + defer dir.close(); + + // First pass: collect all files, parse them, register classes. + // We need two passes for GlobalScope precedence, but we can do it in one + // by deferring global registration. + const GlobalEntry = struct { + key: []const u8, + entry: Entry, + }; + var global_scope_entries: ArrayList(GlobalEntry) = .empty; + defer global_scope_entries.deinit(tmp_allocator); + var gdscript_entries: ArrayList(GlobalEntry) = .empty; + defer gdscript_entries.deinit(tmp_allocator); + + var iter = dir.iterate(); + while (iter.next() catch return error.ReadFailed) |dir_entry| { + if (dir_entry.kind != .file) continue; + if (!std.mem.endsWith(u8, dir_entry.name, ".xml")) continue; + + const content = dir.readFileAlloc(tmp_allocator, dir_entry.name, 2 * 1024 * 1024) catch continue; + defer tmp_allocator.free(content); + + const class_doc = XmlDocParser.parseClassDoc(arena_allocator, content) catch |err| { + const class_name = dir_entry.name[0 .. dir_entry.name.len - 4]; + parser_log.warn("failed to parse XML doc for {s}: {}", .{ class_name, err }); + continue; + }; + + // Convert tutorials + const db_tutorials: ?[]const Tutorial = if (class_doc.tutorials) |tutorials| blk: { + const result = try arena_allocator.alloc(Tutorial, tutorials.len); + for (tutorials, 0..) |t, i| { + result[i] = .{ .title = t.title, .url = t.url }; + } + break :blk result; + } else null; + + // Create class entry + const class_key = class_doc.name; + try db.symbols.put(arena_allocator, class_key, .{ + .key = class_key, + .name = class_key, + .kind = .class, + .description = class_doc.description, + .brief_description = class_doc.brief_description, + .inherits = class_doc.inherits, + .tutorials = db_tutorials, + }); + const class_idx = db.symbols.getIndex(class_key).?; + + var member_indices: ArrayList(usize) = .empty; + defer member_indices.deinit(tmp_allocator); + + const is_global_scope = std.mem.eql(u8, class_doc.name, "@GlobalScope"); + const is_gdscript = std.mem.eql(u8, class_doc.name, "@GDScript"); + + // Process methods + if (class_doc.methods) |methods| { + for (methods) |method| { + const sig = try buildMethodSignature(arena_allocator, method); + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, method.name }); + const child_kind: EntryKind = if (is_global_scope or is_gdscript) .global_function else .method; + const child: Entry = .{ + .key = dotted_key, + .name = method.name, + .parent_index = class_idx, + .kind = child_kind, + .description = method.description, + .signature = sig, + .qualifiers = method.qualifiers, + }; + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + + // Track for top-level registration + if (is_global_scope) { + try global_scope_entries.append(tmp_allocator, .{ .key = method.name, .entry = child }); + } else if (is_gdscript) { + try gdscript_entries.append(tmp_allocator, .{ .key = method.name, .entry = child }); + } + } + } + + // Process properties + if (class_doc.properties) |properties| { + for (properties) |prop| { + const sig = try buildPropertySignature(arena_allocator, prop); + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, prop.name }); + const child: Entry = .{ + .key = dotted_key, + .name = prop.name, + .parent_index = class_idx, + .kind = .property, + .description = prop.description, + .signature = sig, + .default_value = prop.default_value, + }; + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + } + } + + // Process signals + if (class_doc.signals) |signals| { + for (signals) |signal| { + const sig = try buildSignalSignature(arena_allocator, signal); + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, signal.name }); + const child: Entry = .{ + .key = dotted_key, + .name = signal.name, + .parent_index = class_idx, + .kind = .signal, + .description = signal.description, + .signature = sig, + }; + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + } + } + + // Process constants (with enum grouping) + if (class_doc.constants) |constants| { + for (constants) |constant| { + const sig = try buildConstantSignature(arena_allocator, constant); + if (constant.qualifiers) |enum_name| { + // Enum-grouped constant: "ClassName.EnumName.VALUE_NAME" + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}.{s}", .{ class_doc.name, enum_name, constant.name }); + const child: Entry = .{ + .key = dotted_key, + .name = constant.name, + .parent_index = class_idx, + .kind = .enum_value, + .description = constant.description, + .signature = sig, + .qualifiers = constant.qualifiers, + .default_value = constant.default_value, + }; + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + } else { + // Regular constant: "ClassName.CONSTANT_NAME" + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, constant.name }); + const child: Entry = .{ + .key = dotted_key, + .name = constant.name, + .parent_index = class_idx, + .kind = .constant, + .description = constant.description, + .signature = sig, + .default_value = constant.default_value, + }; + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + } + } + } + + // Process constructors + if (class_doc.constructors) |constructors| { + for (constructors) |ctor| { + const sig = try buildConstructorSignature(arena_allocator, ctor); + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, ctor.name }); + const child: Entry = .{ + .key = dotted_key, + .name = ctor.name, + .parent_index = class_idx, + .kind = .constructor, + .description = ctor.description, + .signature = sig, + }; + // Constructors may have duplicate keys (overloads); only keep first + if (db.symbols.get(dotted_key) == null) { + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + } + } + } + + // Process operators + if (class_doc.operators) |operators| { + for (operators) |op| { + const sig = try buildOperatorSignature(arena_allocator, op); + const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, op.name }); + const child: Entry = .{ + .key = dotted_key, + .name = op.name, + .parent_index = class_idx, + .kind = .operator, + .description = op.description, + .signature = sig, + }; + if (db.symbols.get(dotted_key) == null) { + try db.symbols.put(arena_allocator, dotted_key, child); + const child_idx = db.symbols.getIndex(dotted_key).?; + try member_indices.append(tmp_allocator, child_idx); + } + } + } + + // Update class entry's members + if (member_indices.items.len > 0) { + const members_slice = try arena_allocator.dupe(usize, member_indices.items); + var class_ptr = db.symbols.getPtr(class_key).?; + class_ptr.members = members_slice; + } + } + + // Register @GDScript functions as top-level (lower precedence) + for (gdscript_entries.items) |ge| { + if (db.symbols.get(ge.key) == null) { + var entry = ge.entry; + entry.key = try arena_allocator.dupe(u8, ge.key); + try db.symbols.put(arena_allocator, entry.key, entry); + } + } + + // Register @GlobalScope functions as top-level (higher precedence, overwrites) + for (global_scope_entries.items) |ge| { + var entry = ge.entry; + entry.key = try arena_allocator.dupe(u8, ge.key); + try db.symbols.put(arena_allocator, entry.key, entry); + } + + return db; +} + +fn buildMethodSignature(allocator: Allocator, method: XmlDocParser.MemberDoc) !?[]const u8 { + var buf: std.Io.Writer.Allocating = .init(allocator); + errdefer buf.deinit(); + + try buf.writer.writeByte('('); + try writeParams(&buf.writer, method.params); + try buf.writer.writeByte(')'); + + if (method.return_type) |rt| { + if (!std.mem.eql(u8, rt, "void")) { + try buf.writer.print(" -> {s}", .{rt}); + } + } + + return try buf.toOwnedSlice(); +} + +fn buildPropertySignature(allocator: Allocator, prop: XmlDocParser.MemberDoc) !?[]const u8 { + if (prop.return_type) |rt| { + return try std.fmt.allocPrint(allocator, ": {s}", .{rt}); + } + return null; +} + +fn buildConstructorSignature(allocator: Allocator, ctor: XmlDocParser.MemberDoc) !?[]const u8 { + var buf: std.Io.Writer.Allocating = .init(allocator); + errdefer buf.deinit(); + + try buf.writer.writeByte('('); + try writeParams(&buf.writer, ctor.params); + try buf.writer.writeByte(')'); + + return try buf.toOwnedSlice(); +} + +fn buildOperatorSignature(allocator: Allocator, op: XmlDocParser.MemberDoc) !?[]const u8 { + var buf: std.Io.Writer.Allocating = .init(allocator); + errdefer buf.deinit(); + + try buf.writer.writeByte('('); + try writeParams(&buf.writer, op.params); + try buf.writer.writeByte(')'); + + if (op.return_type) |rt| { + try buf.writer.print(" -> {s}", .{rt}); + } + + return try buf.toOwnedSlice(); +} + +fn buildSignalSignature(allocator: Allocator, signal: XmlDocParser.MemberDoc) !?[]const u8 { + if (signal.params) |_| { + var buf: std.Io.Writer.Allocating = .init(allocator); + errdefer buf.deinit(); + + try buf.writer.writeByte('('); + try writeParams(&buf.writer, signal.params); + try buf.writer.writeByte(')'); + + return try buf.toOwnedSlice(); + } + return null; +} + +fn buildConstantSignature(allocator: Allocator, constant: XmlDocParser.MemberDoc) !?[]const u8 { + if (constant.default_value) |val| { + return try std.fmt.allocPrint(allocator, " = {s}", .{val}); + } + return null; +} + +fn writeParams(writer: *std.Io.Writer, params: ?[]XmlDocParser.ParamDoc) !void { + if (params) |ps| { + for (ps, 0..) |p, i| { + if (i > 0) try writer.writeAll(", "); + try writer.print("{s}: {s}", .{ p.name, p.type }); + if (p.default_value) |dv| { + try writer.print(" = {s}", .{dv}); + } + } + } +} + fn generateMarkdownForEntry(self: DocDatabase, allocator: Allocator, entry: Entry, writer: *Writer) !void { try writer.print("# {s}", .{entry.key}); @@ -1333,6 +1652,134 @@ test "EntryKind has constructor value" { try std.testing.expect(kind == .constructor); } +test "loadFromXmlDir parses XML files into symbol table" { + var arena = ArenaAllocator.init(std.testing.allocator); + defer arena.deinit(); + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const sprite2d_xml = + \\ + \\ + \\ A 2D sprite node. + \\ Displays a 2D texture. + \\ + \\ + \\ + \\ Returns whether the sprite is flipped horizontally. + \\ + \\ + \\ + \\ The texture to display. + \\ + \\ + ; + + try tmp_dir.dir.writeFile(.{ .sub_path = "Sprite2D.xml", .data = sprite2d_xml }); + + const tmp_path = try tmp_dir.dir.realpathAlloc(std.testing.allocator, "."); + defer std.testing.allocator.free(tmp_path); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), std.testing.allocator, tmp_path); + + // Verify class entry + const class_entry = db.symbols.get("Sprite2D").?; + try std.testing.expectEqual(EntryKind.class, class_entry.kind); + try std.testing.expectEqualStrings("Node2D", class_entry.inherits.?); + try std.testing.expectEqualStrings("A 2D sprite node.", class_entry.brief_description.?); + try std.testing.expectEqualStrings("Displays a 2D texture.", class_entry.description.?); + + // Verify method entry + const method_entry = db.symbols.get("Sprite2D.is_flipped_h").?; + try std.testing.expectEqual(EntryKind.method, method_entry.kind); + try std.testing.expect(method_entry.signature != null); + try std.testing.expect(std.mem.indexOf(u8, method_entry.signature.?, "bool") != null); + try std.testing.expectEqualStrings("const", method_entry.qualifiers.?); + + // Verify property entry + const prop_entry = db.symbols.get("Sprite2D.texture").?; + try std.testing.expectEqual(EntryKind.property, prop_entry.kind); + try std.testing.expectEqualStrings("null", prop_entry.default_value.?); +} + +test "loadFromXmlDir groups constants with enum attribute as enum_value entries" { + var arena = ArenaAllocator.init(std.testing.allocator); + defer arena.deinit(); + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const node_xml = + \\ + \\ + \\ Base class. + \\ Base node. + \\ + \\ Ready notification. + \\ Inherits process mode. + \\ Always process. + \\ + \\ + ; + + try tmp_dir.dir.writeFile(.{ .sub_path = "Node.xml", .data = node_xml }); + + const tmp_path = try tmp_dir.dir.realpathAlloc(std.testing.allocator, "."); + defer std.testing.allocator.free(tmp_path); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), std.testing.allocator, tmp_path); + + // Regular constant + const notif_entry = db.symbols.get("Node.NOTIFICATION_READY").?; + try std.testing.expectEqual(EntryKind.constant, notif_entry.kind); + + // Enum-grouped constants + const inherit_entry = db.symbols.get("Node.ProcessMode.PROCESS_MODE_INHERIT").?; + try std.testing.expectEqual(EntryKind.enum_value, inherit_entry.kind); + + const always_entry = db.symbols.get("Node.ProcessMode.PROCESS_MODE_ALWAYS").?; + try std.testing.expectEqual(EntryKind.enum_value, always_entry.kind); +} + +test "loadFromXmlDir registers GlobalScope functions as top-level entries" { + var arena = ArenaAllocator.init(std.testing.allocator); + defer arena.deinit(); + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const global_scope_xml = + \\ + \\ + \\ Global scope. + \\ Global scope constants and functions. + \\ + \\ + \\ + \\ + \\ Returns the absolute value. + \\ + \\ + \\ + ; + + try tmp_dir.dir.writeFile(.{ .sub_path = "@GlobalScope.xml", .data = global_scope_xml }); + + const tmp_path = try tmp_dir.dir.realpathAlloc(std.testing.allocator, "."); + defer std.testing.allocator.free(tmp_path); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), std.testing.allocator, tmp_path); + + // Verify dotted key exists + const dotted_entry = db.symbols.get("@GlobalScope.abs").?; + try std.testing.expectEqual(EntryKind.global_function, dotted_entry.kind); + + // Verify top-level entry exists + const top_entry = db.symbols.get("abs").?; + try std.testing.expectEqual(EntryKind.global_function, top_entry.kind); +} + const std = @import("std"); const ArenaAllocator = std.heap.ArenaAllocator; const Allocator = std.mem.Allocator; @@ -1345,3 +1792,4 @@ const File = std.fs.File; const Writer = std.Io.Writer; const bbcodez = @import("bbcodez"); +const XmlDocParser = @import("XmlDocParser.zig"); From 221b0f4271e4de9a8fcba308cef51f92f040f564 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:20:53 +1100 Subject: [PATCH 13/19] feat: update markdown generation for inherits, qualifiers, default values Add inheritance line after heading in generateMarkdownForEntry. Add constructor and operator lists to generateMemberListings. Show qualifiers and default values in formatMemberLine. Add test for inheritance display in generated markdown. --- src/DocDatabase.zig | 45 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index b18f3d6..7df3e61 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -736,6 +736,10 @@ fn generateMarkdownForEntry(self: DocDatabase, allocator: Allocator, entry: Entr try writer.writeByte('\n'); + if (entry.inherits) |inherits| { + try writer.print("\n*Inherits: {s}*\n", .{inherits}); + } + if (entry.parent_index) |parent_index| { const parent = self.symbols.values()[parent_index]; try writer.print("\n**Parent**: {s}\n", .{parent.name}); @@ -764,14 +768,18 @@ fn generateMarkdownForEntry(self: DocDatabase, allocator: Allocator, entry: Entr } fn generateMemberListings(self: DocDatabase, allocator: Allocator, member_indices: []usize, writer: *Writer) !void { + var constructors: ArrayList(usize) = .empty; var properties: ArrayList(usize) = .empty; var methods: ArrayList(usize) = .empty; + var operators: ArrayList(usize) = .empty; var signals: ArrayList(usize) = .empty; var constants: ArrayList(usize) = .empty; var enums: ArrayList(usize) = .empty; + defer constructors.deinit(allocator); defer properties.deinit(allocator); defer methods.deinit(allocator); + defer operators.deinit(allocator); defer signals.deinit(allocator); defer constants.deinit(allocator); defer enums.deinit(allocator); @@ -779,8 +787,10 @@ fn generateMemberListings(self: DocDatabase, allocator: Allocator, member_indice for (member_indices) |idx| { const member: Entry = self.symbols.values()[idx]; switch (member.kind) { + .constructor => try constructors.append(allocator, idx), .property => try properties.append(allocator, idx), .method => try methods.append(allocator, idx), + .operator => try operators.append(allocator, idx), .signal => try signals.append(allocator, idx), .constant => try constants.append(allocator, idx), .enum_value => try enums.append(allocator, idx), @@ -788,8 +798,10 @@ fn generateMemberListings(self: DocDatabase, allocator: Allocator, member_indice } } + try self.formatMemberSection("Constructors", constructors.items, writer); try self.formatMemberSection("Properties", properties.items, writer); try self.formatMemberSection("Methods", methods.items, writer); + try self.formatMemberSection("Operators", operators.items, writer); try self.formatMemberSection("Signals", signals.items, writer); try self.formatMemberSection("Constants", constants.items, writer); try self.formatMemberSection("Enums", enums.items, writer); @@ -813,7 +825,15 @@ fn formatMemberLine(self: DocDatabase, member_idx: usize, writer: *Writer) !void try writer.writeAll(sig); } - try writer.writeAll("**"); + if (member.qualifiers) |quals| { + try writer.print("** `{s}`", .{quals}); + } else { + try writer.writeAll("**"); + } + + if (member.default_value) |default| { + try writer.print(" = `{s}`", .{default}); + } if (member.brief_description) |brief| { try writer.print(" - {s}", .{brief}); @@ -1652,6 +1672,29 @@ test "EntryKind has constructor value" { try std.testing.expect(kind == .constructor); } +test "generateMarkdownForSymbol shows inheritance" { + const allocator = std.testing.allocator; + + var db = DocDatabase{ .symbols = StringArrayHashMap(Entry).empty }; + defer db.symbols.deinit(allocator); + + try db.symbols.put(allocator, "Node2D", Entry{ + .key = "Node2D", + .name = "Node2D", + .kind = .class, + .inherits = "CanvasItem", + .brief_description = "A 2D game object.", + }); + + var allocating: std.Io.Writer.Allocating = .init(allocator); + defer allocating.deinit(); + + try db.generateMarkdownForSymbol(allocator, "Node2D", &allocating.writer); + const written = allocating.written(); + + try std.testing.expect(std.mem.indexOf(u8, written, "*Inherits: CanvasItem*") != null); +} + test "loadFromXmlDir parses XML files into symbol table" { var arena = ArenaAllocator.init(std.testing.allocator); defer arena.deinit(); From 6fac96fac1d7c4eb81a79e50ef3290dc36e0cd85 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:25:42 +1100 Subject: [PATCH 14/19] refactor: remove all JSON parsing code from DocDatabase Delete JSON-specific functions (loadFromJsonFileLeaky, loadFromJsonLeaky, parseClass, parseEntry, parseEntryArray, etc.), handler maps, RootState enum, InvalidApiJson error, and all JSON-based tests. Add BBCode-to-Markdown conversion in loadFromXmlDir for class and member descriptions. --- src/DocDatabase.zig | 923 ++++---------------------------------------- 1 file changed, 70 insertions(+), 853 deletions(-) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index 7df3e61..cc9490c 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -6,7 +6,6 @@ symbols: StringArrayHashMap(Entry) = .empty, pub const Error = error{ SymbolNotFound, - InvalidApiJson, }; pub const EntryKind = enum { @@ -41,106 +40,6 @@ pub const Entry = struct { default_value: ?[]const u8 = null, }; -const RootState = enum { - init, - builtin_classes, - classes, - global_constants, - global_enums, - native_structures, - singletons, - utility_functions, -}; - -pub fn loadFromJsonFileLeaky(arena_allocator: Allocator, file: File) !DocDatabase { - var buf: [4096]u8 = undefined; - var file_reader = file.reader(&buf); - const reader = &file_reader.interface; - - const file_content = try reader.readAlloc(arena_allocator, try file.getEndPos()); - defer arena_allocator.free(file_content); - - var scanner = Scanner.initCompleteInput(arena_allocator, file_content); - defer scanner.deinit(); - - return loadFromJsonLeaky(arena_allocator, &scanner) catch |err| switch (err) { - Scanner.Error.SyntaxError, Scanner.Error.UnexpectedEndOfInput => return Error.InvalidApiJson, - else => return err, - }; -} - -pub fn loadFromJsonLeaky(arena_allocator: Allocator, scanner: *Scanner) !DocDatabase { - var db = DocDatabase{}; - - while (true) { - const token = try scanner.next(); - switch (token) { - .string => |s| { - std.debug.assert(scanner.string_is_object_key); - const state = std.meta.stringToEnum(RootState, s) orelse { - try scanner.skipValue(); - continue; - }; - - switch (state) { - .builtin_classes => try db.parseClasses(.class, arena_allocator, scanner), - .classes => try db.parseClasses(.class, arena_allocator, scanner), - .utility_functions => try db.parseGlobalMethods(arena_allocator, scanner), - else => continue, - } - }, - .end_of_document => break, - else => {}, - } - } - - return db; -} - -fn parseGlobalMethods(self: *DocDatabase, allocator: Allocator, scanner: *Scanner) !void { - std.debug.assert(try scanner.next() == .array_begin); - - while (true) { - const token = try scanner.next(); - switch (token) { - .object_begin => { - const method = try self.parseEntry(.global_function, allocator, scanner); - try self.symbols.put(allocator, method.key, method); - }, - .array_end => break, - .end_of_document => unreachable, - else => {}, - } - } -} - -fn parseClasses(self: *DocDatabase, comptime kind: EntryKind, allocator: Allocator, scanner: *Scanner) !void { - std.debug.assert(try scanner.next() == .array_begin); - - while (true) { - const token = try scanner.next(); - switch (token) { - .object_begin => { - try self.parseClass(kind, allocator, scanner); - }, - .array_end => break, - .end_of_document => unreachable, - else => {}, - } - } -} - -const ClassKey = enum { - name, - methods, - properties, - signals, - constants, - description, - brief_description, - enums, -}; - fn bbcodeToMarkdown(allocator: Allocator, input: []const u8) ![]const u8 { var output: std.Io.Writer.Allocating = .init(allocator); @@ -152,258 +51,6 @@ fn bbcodeToMarkdown(allocator: Allocator, input: []const u8) ![]const u8 { return try output.toOwnedSlice(); } -fn parseClass(self: *DocDatabase, comptime kind: EntryKind, allocator: Allocator, scanner: *Scanner) !void { - var entry: Entry = .{ - .name = undefined, - .key = undefined, - .kind = kind, - }; - - var methods: []Entry = &.{}; - var properties: []Entry = &.{}; - var signals: []Entry = &.{}; - var constants: []Entry = &.{}; - var enums: []Entry = &.{}; - - while (true) { - const token = try scanner.next(); - switch (token) { - .string => |s| { - std.debug.assert(scanner.string_is_object_key); - const class_key = std.meta.stringToEnum(ClassKey, s) orelse { - try scanner.skipValue(); - continue; - }; - - switch (class_key) { - .name => { - const name = try scanner.next(); - std.debug.assert(name == .string); - - entry.name = try allocator.dupe(u8, name.string); - entry.key = entry.name; - }, - .methods => methods = try self.parseEntryArray(.method, allocator, scanner), - .properties => properties = try self.parseEntryArray(.property, allocator, scanner), - .signals => signals = try self.parseEntryArray(.signal, allocator, scanner), - .constants => constants = try self.parseEntryArray(.constant, allocator, scanner), - .enums => enums = try self.parseEntryArray(.enum_value, allocator, scanner), - .brief_description => entry.brief_description = try nextTokenToMarkdownAlloc(allocator, scanner), - .description => entry.description = try nextTokenToMarkdownAlloc(allocator, scanner), - } - }, - .object_end => break, - .end_of_document => unreachable, - else => {}, - } - } - - try self.symbols.put(allocator, entry.key, entry); - const entry_idx = self.symbols.getIndex(entry.name).?; - - const member_count = methods.len + properties.len + signals.len + constants.len + enums.len; - - var member_indices: ArrayList(usize) = .empty; - defer member_indices.deinit(allocator); - try member_indices.ensureTotalCapacity(allocator, member_count); - - try self.appendEntries(allocator, entry, entry_idx, methods, &member_indices); - try self.appendEntries(allocator, entry, entry_idx, properties, &member_indices); - try self.appendEntries(allocator, entry, entry_idx, signals, &member_indices); - try self.appendEntries(allocator, entry, entry_idx, constants, &member_indices); - try self.appendEntries(allocator, entry, entry_idx, enums, &member_indices); - - if (member_indices.items.len > 0) { - var entry_ptr = self.symbols.getPtr(entry.key).?; - entry_ptr.members = try member_indices.toOwnedSlice(allocator); - } -} - -fn appendEntries(self: *DocDatabase, allocator: Allocator, parent: Entry, parent_idx: usize, entries: []Entry, indices: *ArrayList(usize)) !void { - for (entries) |*property_entry| { - property_entry.parent_index = parent_idx; - property_entry.key = try std.fmt.allocPrint(allocator, "{s}.{s}", .{ parent.name, property_entry.name }); - - // store property entry in the database - try self.symbols.put(allocator, property_entry.key, property_entry.*); - - // update property index on the parent entry - const property_index = self.symbols.getIndex(property_entry.key).?; - indices.appendAssumeCapacity(property_index); - } -} - -const MethodKey = enum { - name, - description, -}; - -const PropertyKey = enum { - name, - type, - getter, - setter, - description, -}; - -const ConstantKey = enum { - name, - description, -}; - -const SignalKey = enum { - name, - description, -}; - -const EnumKey = enum { - name, - description, -}; - -const kind_key_map: std.StaticStringMap(type) = .initComptime(.{ - .{ @tagName(EntryKind.method), MethodKey }, - .{ @tagName(EntryKind.constant), ConstantKey }, - .{ @tagName(EntryKind.signal), SignalKey }, - .{ @tagName(EntryKind.enum_value), EnumKey }, - .{ @tagName(EntryKind.property), PropertyKey }, - .{ @tagName(EntryKind.global_function), MethodKey }, -}); - -const constant_handler_map: std.StaticStringMap(*const fn (Allocator, *Entry, *Scanner) anyerror!void) = .initComptime(.{ - .{ @tagName(ConstantKey.name), handleEntryName }, - .{ @tagName(ConstantKey.description), handleEntryDescription }, -}); - -const method_handler_map: std.StaticStringMap(*const fn (Allocator, *Entry, *Scanner) anyerror!void) = .initComptime(.{ - .{ @tagName(MethodKey.name), handleEntryName }, - .{ @tagName(MethodKey.description), handleEntryDescription }, -}); - -const signal_handler_map: std.StaticStringMap(*const fn (Allocator, *Entry, *Scanner) anyerror!void) = .initComptime(.{ - .{ @tagName(SignalKey.name), handleEntryName }, - .{ @tagName(SignalKey.description), handleEntryDescription }, -}); - -const enum_value_handler_map: std.StaticStringMap(*const fn (Allocator, *Entry, *Scanner) anyerror!void) = .initComptime(.{ - .{ @tagName(EnumKey.name), handleEntryName }, - .{ @tagName(EnumKey.description), handleEntryDescription }, -}); - -const property_handler_map: std.StaticStringMap(*const fn (Allocator, *Entry, *Scanner) anyerror!void) = .initComptime(.{ - .{ @tagName(PropertyKey.name), handleEntryName }, - .{ @tagName(PropertyKey.type), handlePropertyType }, - .{ @tagName(PropertyKey.getter), skipValue }, - .{ @tagName(PropertyKey.setter), skipValue }, - // TODO: bbcodez throws for some reason - // .{ @tagName(PropertyKey.description), handleEntryDescription }, -}); - -const kind_handler_map: std.StaticStringMap(std.StaticStringMap(*const fn (Allocator, *Entry, *Scanner) anyerror!void)) = .initComptime(.{ - .{ @tagName(EntryKind.constant), constant_handler_map }, - .{ @tagName(EntryKind.signal), signal_handler_map }, - .{ @tagName(EntryKind.enum_value), enum_value_handler_map }, - .{ @tagName(EntryKind.property), property_handler_map }, - .{ @tagName(EntryKind.method), method_handler_map }, - .{ @tagName(EntryKind.global_function), method_handler_map }, -}); - -fn handleEntryName(allocator: Allocator, entry: *Entry, scanner: *Scanner) anyerror!void { - const name = try scanner.next(); - std.debug.assert(name == .string); - - entry.name = try allocator.dupe(u8, name.string); - entry.key = entry.name; -} - -fn handlePropertyType(allocator: Allocator, entry: *Entry, scanner: *Scanner) anyerror!void { - const @"type" = try scanner.next(); - std.debug.assert(@"type" == .string); - entry.signature = try std.fmt.allocPrint(allocator, ": {s}", .{@"type".string}); -} - -fn handleEntryDescription(allocator: Allocator, entry: *Entry, scanner: *Scanner) anyerror!void { - entry.description = try nextTokenToMarkdownAlloc(allocator, scanner); -} - -fn skipValue(allocator: Allocator, entry: *Entry, scanner: *Scanner) anyerror!void { - _ = allocator; - _ = entry; - try scanner.skipValue(); -} - -fn parseEntry(self: *const DocDatabase, comptime kind: EntryKind, allocator: Allocator, scanner: *Scanner) !Entry { - _ = self; // autofix - - var entry: Entry = .{ - .name = undefined, - .key = undefined, - .kind = kind, - }; - - const KeyType = kind_key_map.get(@tagName(kind)) orelse @compileError("No key type found for kind: " ++ @tagName(kind)); - const handlers = kind_handler_map.get(@tagName(kind)) orelse std.debug.panic("No handlers found for kind: {}", .{kind}); - - while (true) { - const token = try scanner.next(); - switch (token) { - .string => |s| { - std.debug.assert(scanner.string_is_object_key); - const key = std.meta.stringToEnum(KeyType, s) orelse { - try scanner.skipValue(); - continue; - }; - - const handler = handlers.get(@tagName(key)) orelse { - parser_log.warn("No handler found for key: {s}.{s}", .{ @tagName(kind), @tagName(key) }); - try scanner.skipValue(); - continue; - }; - - try handler(allocator, &entry, scanner); - }, - .object_end => break, - .end_of_document => unreachable, - else => {}, - } - } - - return entry; -} - -fn parseEntryArray(self: *const DocDatabase, comptime kind: EntryKind, allocator: Allocator, scanner: *Scanner) ![]Entry { - var entries: ArrayList(Entry) = .empty; - defer entries.deinit(allocator); - - const constants_token = try scanner.next(); - std.debug.assert(constants_token == .array_begin); - - while (true) { - const constant_token = try scanner.next(); - switch (constant_token) { - .object_begin => { - try entries.append(allocator, try self.parseEntry(kind, allocator, scanner)); - }, - .array_end => break, - .end_of_document => unreachable, - else => {}, - } - } - - return entries.toOwnedSlice(allocator); -} - -fn nextTokenToMarkdownAlloc(allocator: Allocator, scanner: *Scanner) ![]const u8 { - const token = try scanner.nextAlloc(allocator, .alloc_if_needed); - - const value = switch (token) { - inline .string, .allocated_string => |str| str, - else => unreachable, - }; - - return try bbcodeToMarkdown(allocator, value); -} - pub fn lookupSymbolExact(self: DocDatabase, symbol: []const u8) DocDatabase.Error!Entry { return self.symbols.get(symbol) orelse return DocDatabase.Error.SymbolNotFound; } @@ -449,14 +96,24 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ break :blk result; } else null; + // Convert BBCode descriptions to Markdown + const description = if (class_doc.description) |desc| + bbcodeToMarkdown(arena_allocator, desc) catch desc + else + null; + const brief_description = if (class_doc.brief_description) |desc| + bbcodeToMarkdown(arena_allocator, desc) catch desc + else + null; + // Create class entry const class_key = class_doc.name; try db.symbols.put(arena_allocator, class_key, .{ .key = class_key, .name = class_key, .kind = .class, - .description = class_doc.description, - .brief_description = class_doc.brief_description, + .description = description, + .brief_description = brief_description, .inherits = class_doc.inherits, .tutorials = db_tutorials, }); @@ -474,12 +131,16 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ const sig = try buildMethodSignature(arena_allocator, method); const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, method.name }); const child_kind: EntryKind = if (is_global_scope or is_gdscript) .global_function else .method; + const desc = if (method.description) |d| + bbcodeToMarkdown(arena_allocator, d) catch d + else + null; const child: Entry = .{ .key = dotted_key, .name = method.name, .parent_index = class_idx, .kind = child_kind, - .description = method.description, + .description = desc, .signature = sig, .qualifiers = method.qualifiers, }; @@ -501,12 +162,16 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ for (properties) |prop| { const sig = try buildPropertySignature(arena_allocator, prop); const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, prop.name }); + const desc = if (prop.description) |d| + bbcodeToMarkdown(arena_allocator, d) catch d + else + null; const child: Entry = .{ .key = dotted_key, .name = prop.name, .parent_index = class_idx, .kind = .property, - .description = prop.description, + .description = desc, .signature = sig, .default_value = prop.default_value, }; @@ -521,12 +186,16 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ for (signals) |signal| { const sig = try buildSignalSignature(arena_allocator, signal); const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, signal.name }); + const desc = if (signal.description) |d| + bbcodeToMarkdown(arena_allocator, d) catch d + else + null; const child: Entry = .{ .key = dotted_key, .name = signal.name, .parent_index = class_idx, .kind = .signal, - .description = signal.description, + .description = desc, .signature = sig, }; try db.symbols.put(arena_allocator, dotted_key, child); @@ -539,6 +208,10 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ if (class_doc.constants) |constants| { for (constants) |constant| { const sig = try buildConstantSignature(arena_allocator, constant); + const desc = if (constant.description) |d| + bbcodeToMarkdown(arena_allocator, d) catch d + else + null; if (constant.qualifiers) |enum_name| { // Enum-grouped constant: "ClassName.EnumName.VALUE_NAME" const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}.{s}", .{ class_doc.name, enum_name, constant.name }); @@ -547,7 +220,7 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ .name = constant.name, .parent_index = class_idx, .kind = .enum_value, - .description = constant.description, + .description = desc, .signature = sig, .qualifiers = constant.qualifiers, .default_value = constant.default_value, @@ -563,7 +236,7 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ .name = constant.name, .parent_index = class_idx, .kind = .constant, - .description = constant.description, + .description = desc, .signature = sig, .default_value = constant.default_value, }; @@ -579,12 +252,16 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ for (constructors) |ctor| { const sig = try buildConstructorSignature(arena_allocator, ctor); const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, ctor.name }); + const desc = if (ctor.description) |d| + bbcodeToMarkdown(arena_allocator, d) catch d + else + null; const child: Entry = .{ .key = dotted_key, .name = ctor.name, .parent_index = class_idx, .kind = .constructor, - .description = ctor.description, + .description = desc, .signature = sig, }; // Constructors may have duplicate keys (overloads); only keep first @@ -601,12 +278,16 @@ pub fn loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_ for (operators) |op| { const sig = try buildOperatorSignature(arena_allocator, op); const dotted_key = try std.fmt.allocPrint(arena_allocator, "{s}.{s}", .{ class_doc.name, op.name }); + const desc = if (op.description) |d| + bbcodeToMarkdown(arena_allocator, d) catch d + else + null; const child: Entry = .{ .key = dotted_key, .name = op.name, .parent_index = class_idx, .kind = .operator, - .description = op.description, + .description = desc, .signature = sig, }; if (db.symbols.get(dotted_key) == null) { @@ -850,493 +531,6 @@ pub fn generateMarkdownForSymbol(self: DocDatabase, allocator: Allocator, symbol try self.generateMarkdownForEntry(allocator, self.symbols.get(symbol) orelse return error.SymbolNotFound, writer); } -test "parse simple builtin class from JSON" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - // Minimal JSON with one builtin class - const json_source = - \\{ - \\ "builtin_classes": [ - \\ { - \\ "name": "bool" - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class was parsed - const entry = db.symbols.get("bool"); - try std.testing.expect(entry != null); - try std.testing.expectEqualStrings("bool", entry.?.name); - try std.testing.expectEqual(EntryKind.class, entry.?.kind); -} - -test "parse regular class from JSON" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Node2D" - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - const entry = db.symbols.get("Node2D"); - try std.testing.expect(entry != null); - try std.testing.expectEqualStrings("Node2D", entry.?.name); - try std.testing.expectEqual(EntryKind.class, entry.?.kind); -} - -test "parse method with parent" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Node2D", - \\ "methods": [ - \\ { - \\ "name": "get_global_position" - \\ } - \\ ] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class exists - const class_entry = db.symbols.get("Node2D"); - try std.testing.expect(class_entry != null); - - // Verify the method exists with correct full_path - const method_entry = db.symbols.get("Node2D.get_global_position"); - try std.testing.expect(method_entry != null); - try std.testing.expectEqualStrings("get_global_position", method_entry.?.name); - try std.testing.expectEqualStrings("Node2D.get_global_position", method_entry.?.key); - try std.testing.expectEqual(EntryKind.method, method_entry.?.kind); - - // Verify parent points to the class - try std.testing.expect(method_entry.?.parent_index != null); - - const parent = db.symbols.values()[method_entry.?.parent_index.?]; - try std.testing.expectEqualStrings("Node2D", parent.name); -} - -test "parse utility functions as global functions" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "utility_functions": [ - \\ { - \\ "name": "sin" - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - const entry = db.symbols.get("sin"); - try std.testing.expect(entry != null); - try std.testing.expectEqualStrings("sin", entry.?.name); - try std.testing.expectEqualStrings("sin", entry.?.key); - try std.testing.expectEqual(EntryKind.global_function, entry.?.kind); - try std.testing.expect(entry.?.parent_index == null); // No parent -} - -test "class stores member indices not strings" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Vector2", - \\ "methods": [ - \\ { - \\ "name": "normalized" - \\ }, - \\ { - \\ "name": "length" - \\ } - \\ ] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class has members array - const class_entry = db.symbols.get("Vector2"); - try std.testing.expect(class_entry != null); - try std.testing.expect(class_entry.?.members != null); - - // Should have 2 members - const members = class_entry.?.members.?; - try std.testing.expectEqual(@as(usize, 2), members.len); - - // Members should be indices into the symbols array - const first_member = db.symbols.values()[members[0]]; - const second_member = db.symbols.values()[members[1]]; - - // Verify members are the methods - try std.testing.expectEqualStrings("normalized", first_member.name); - try std.testing.expectEqual(EntryKind.method, first_member.kind); - - try std.testing.expectEqualStrings("length", second_member.name); - try std.testing.expectEqual(EntryKind.method, second_member.kind); -} - -test "convert BBCode description to Markdown" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Node2D", - \\ "brief_description": "A 2D game object with [b]position[/b] and [i]rotation[/i]." - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - const entry = db.symbols.get("Node2D"); - try std.testing.expect(entry != null); - try std.testing.expect(entry.?.brief_description != null); - - // BBCode should be converted to Markdown - // [b]text[/b] -> **text** - // [i]text[/i] -> *text* - const expected = "A 2D game object with **position** and *rotation*."; - try std.testing.expectEqualStrings(expected, entry.?.brief_description.?); -} - -test "skip unknown root-level keys like header" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - // JSON with all unknown root-level keys that should be skipped - const json_source = - \\{ - \\ "header": { - \\ "version_major": 4, - \\ "version_minor": 5 - \\ }, - \\ "builtin_class_sizes": [ - \\ { - \\ "build_configuration": "float_32", - \\ "sizes": [ - \\ { - \\ "name": "bool", - \\ "size": 1 - \\ } - \\ ] - \\ } - \\ ], - \\ "builtin_class_member_offsets": [], - \\ "global_constants": [], - \\ "global_enums": [], - \\ "native_structures": [], - \\ "singletons": [], - \\ "classes": [ - \\ { - \\ "name": "Node2D" - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Should successfully parse despite unknown keys - const entry = db.symbols.get("Node2D"); - try std.testing.expect(entry != null); - try std.testing.expectEqualStrings("Node2D", entry.?.name); -} - -test "skip unknown method fields like return_type" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - // JSON with method containing fields beyond just "name" - const json_source = - \\{ - \\ "utility_functions": [ - \\ { - \\ "name": "sin", - \\ "return_type": "float", - \\ "category": "math", - \\ "is_vararg": false, - \\ "hash": 12345, - \\ "arguments": [] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Should successfully parse method despite unknown fields - const entry = db.symbols.get("sin"); - try std.testing.expect(entry != null); - try std.testing.expectEqualStrings("sin", entry.?.name); - try std.testing.expectEqual(EntryKind.global_function, entry.?.kind); -} - -test "skip unknown class fields like api_type" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - // JSON with class containing fields beyond name/methods/brief_description - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Node2D", - \\ "api_type": "core", - \\ "inherits": "CanvasItem", - \\ "is_instantiable": true, - \\ "is_refcounted": false, - \\ "description": "A 2D game object.", - \\ "enums": [] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Should successfully parse class despite unknown fields - const entry = db.symbols.get("Node2D"); - try std.testing.expect(entry != null); - try std.testing.expectEqualStrings("Node2D", entry.?.name); - try std.testing.expectEqual(EntryKind.class, entry.?.kind); -} - -// RED PHASE: Test parsing properties from class -test "parse class with properties array" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Node2D", - \\ "properties": [ - \\ { - \\ "name": "position", - \\ "type": "Vector2", - \\ "setter": "set_position", - \\ "getter": "get_position" - \\ } - \\ ] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class has members with the property - const class_entry = db.symbols.get("Node2D"); - try std.testing.expect(class_entry != null); - try std.testing.expect(class_entry.?.members != null); - try std.testing.expectEqual(@as(usize, 1), class_entry.?.members.?.len); - - // Verify the property entry exists - const property_entry = db.symbols.get("Node2D.position"); - try std.testing.expect(property_entry != null); - try std.testing.expectEqualStrings("position", property_entry.?.name); - try std.testing.expectEqualStrings("Node2D.position", property_entry.?.key); - try std.testing.expectEqual(EntryKind.property, property_entry.?.kind); - - // Verify property is in the class members - const member = db.symbols.values()[class_entry.?.members.?[0]]; - try std.testing.expectEqualStrings("position", member.name); - try std.testing.expectEqual(EntryKind.property, member.kind); -} - -// RED PHASE: Test parsing signals from class -test "parse class with signals array" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Area2D", - \\ "signals": [ - \\ { - \\ "name": "body_entered", - \\ "description": "Emitted when a body enters." - \\ } - \\ ] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class has members with the signal - const class_entry = db.symbols.get("Area2D"); - try std.testing.expect(class_entry != null); - try std.testing.expect(class_entry.?.members != null); - try std.testing.expectEqual(@as(usize, 1), class_entry.?.members.?.len); - - // Verify the signal entry exists - const signal_entry = db.symbols.get("Area2D.body_entered"); - try std.testing.expect(signal_entry != null); - try std.testing.expectEqualStrings("body_entered", signal_entry.?.name); - try std.testing.expectEqualStrings("Area2D.body_entered", signal_entry.?.key); - try std.testing.expectEqual(EntryKind.signal, signal_entry.?.kind); - - // Verify signal is in the class members - const member = db.symbols.values()[class_entry.?.members.?[0]]; - try std.testing.expectEqualStrings("body_entered", member.name); - try std.testing.expectEqual(EntryKind.signal, member.kind); -} - -// RED PHASE: Test parsing constants from class -test "parse class with constants array" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "Node", - \\ "constants": [ - \\ { - \\ "name": "NOTIFICATION_READY", - \\ "value": 30 - \\ } - \\ ] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class has members with the constant - const class_entry = db.symbols.get("Node"); - try std.testing.expect(class_entry != null); - try std.testing.expect(class_entry.?.members != null); - try std.testing.expectEqual(@as(usize, 1), class_entry.?.members.?.len); - - // Verify the constant entry exists - const constant_entry = db.symbols.get("Node.NOTIFICATION_READY"); - try std.testing.expect(constant_entry != null); - try std.testing.expectEqualStrings("NOTIFICATION_READY", constant_entry.?.name); - try std.testing.expectEqualStrings("Node.NOTIFICATION_READY", constant_entry.?.key); - try std.testing.expectEqual(EntryKind.constant, constant_entry.?.kind); - - // Verify constant is in the class members - const member = db.symbols.values()[class_entry.?.members.?[0]]; - try std.testing.expectEqualStrings("NOTIFICATION_READY", member.name); - try std.testing.expectEqual(EntryKind.constant, member.kind); -} - -// RED PHASE: Test parsing enums from class -test "parse class with enums array" { - var arena = ArenaAllocator.init(std.testing.allocator); - const allocator = arena.allocator(); - defer arena.deinit(); - - const json_source = - \\{ - \\ "classes": [ - \\ { - \\ "name": "AESContext", - \\ "enums": [ - \\ { - \\ "name": "Mode", - \\ "is_bitfield": false, - \\ "values": [ - \\ { - \\ "name": "MODE_ECB_ENCRYPT", - \\ "value": 0 - \\ } - \\ ] - \\ } - \\ ] - \\ } - \\ ] - \\} - ; - - var json_scanner = Scanner.initCompleteInput(allocator, json_source); - const db = try DocDatabase.loadFromJsonLeaky(allocator, &json_scanner); - - // Verify the class has members with the enum - const class_entry = db.symbols.get("AESContext"); - try std.testing.expect(class_entry != null); - try std.testing.expect(class_entry.?.members != null); - try std.testing.expectEqual(@as(usize, 1), class_entry.?.members.?.len); - - // Verify the enum value entry exists - const enum_entry = db.symbols.get("AESContext.Mode"); - try std.testing.expect(enum_entry != null); - try std.testing.expectEqualStrings("Mode", enum_entry.?.name); - try std.testing.expectEqualStrings("AESContext.Mode", enum_entry.?.key); - try std.testing.expectEqual(EntryKind.enum_value, enum_entry.?.kind); - - // Verify enum value is in the class members - const member = db.symbols.values()[class_entry.?.members.?[0]]; - try std.testing.expectEqualStrings("Mode", member.name); - try std.testing.expectEqual(EntryKind.enum_value, member.kind); -} - // RED PHASE: Tests for DocDatabase.generateMarkdownForSymbol using snapshot testing test "generateMarkdownForSymbol for global function" { const allocator = std.testing.allocator; @@ -1823,15 +1017,38 @@ test "loadFromXmlDir registers GlobalScope functions as top-level entries" { try std.testing.expectEqual(EntryKind.global_function, top_entry.kind); } +test "loadFromXmlDir converts BBCode descriptions to Markdown" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + const xml_content = + \\ + \\ + \\ Has [b]bold[/b] text. + \\ Uses [code]code[/code] and [i]italic[/i]. + \\ + ; + try tmp_dir.dir.writeFile(.{ .sub_path = "TestBBCode.xml", .data = xml_content }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + const entry = db.symbols.get("TestBBCode").?; + + // BBCode should be converted to Markdown + try std.testing.expect(std.mem.indexOf(u8, entry.brief_description.?, "**bold**") != null); + try std.testing.expect(std.mem.indexOf(u8, entry.description.?, "`code`") != null); +} + const std = @import("std"); const ArenaAllocator = std.heap.ArenaAllocator; const Allocator = std.mem.Allocator; -const Scanner = std.json.Scanner; -const Reader = std.json.Reader; -const Token = std.json.Token; const StringArrayHashMap = std.StringArrayHashMapUnmanaged; const ArrayList = std.ArrayListUnmanaged; -const File = std.fs.File; const Writer = std.Io.Writer; const bbcodez = @import("bbcodez"); From 535e9d34772a85a28556754861b417b4df21c1a7 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:30:31 +1100 Subject: [PATCH 15/19] refactor: remove JSON codepaths, simplify to XML-only architecture Remove api.zig, the no_xml config field, getJsonCachePathInDir, xmlDocsArePopulated, mergeXmlDocs, fetchXmlDocs, and all JSON-direct-load codepaths. The cache rebuild flow now exclusively uses XML docs via DocDatabase.loadFromXmlDir. Update cacheIsPopulated to check xml_docs/.complete marker + Object/index.md sentinel. Remove godot-extension-api CLI flag and associated error handlers. --- src/Config.zig | 4 - src/api.zig | 112 ---------- src/cache.zig | 94 ++++----- src/cli/root.zig | 16 +- src/root.zig | 523 +++++++---------------------------------------- 5 files changed, 113 insertions(+), 636 deletions(-) delete mode 100644 src/api.zig diff --git a/src/Config.zig b/src/Config.zig index c38b759..e29c2d8 100644 --- a/src/Config.zig +++ b/src/Config.zig @@ -3,7 +3,6 @@ const known_folders = @import("known-folders"); const Config = @This(); -no_xml: bool, cache_dir: []const u8, pub fn init(allocator: std.mem.Allocator) !Config { @@ -18,7 +17,6 @@ pub fn init(allocator: std.mem.Allocator) !Config { }; return .{ - .no_xml = hasEnv("GDOC_NO_XML"), .cache_dir = cache_dir, }; } @@ -28,7 +26,6 @@ pub fn deinit(self: Config, allocator: std.mem.Allocator) void { } pub const testing: Config = .{ - .no_xml = true, .cache_dir = "/tmp/gdoc-test-cache", }; @@ -45,6 +42,5 @@ test "init" { } test "testing config" { - try std.testing.expect(Config.testing.no_xml); try std.testing.expectEqualStrings("/tmp/gdoc-test-cache", Config.testing.cache_dir); } diff --git a/src/api.zig b/src/api.zig deleted file mode 100644 index 69b7552..0000000 --- a/src/api.zig +++ /dev/null @@ -1,112 +0,0 @@ -pub fn generateApiJsonIfNotExists(allocator: Allocator, godot_path: []const u8, destination_dir: []const u8) !void { - const json_path = try cache.getJsonCachePathInDir(allocator, destination_dir); - defer allocator.free(json_path); - - if (std.fs.openFileAbsolute(json_path, .{}) catch |err| switch (err) { - error.FileNotFound => null, - else => return err, - }) |json_file| { - json_file.close(); - return; - } - - const result = try Child.run(.{ - .cwd = destination_dir, - .argv = &[_][]const u8{ godot_path, "--dump-extension-api-with-docs", "--headless" }, - .allocator = allocator, - }); - defer allocator.free(result.stdout); - defer allocator.free(result.stderr); - - switch (result.term) { - .Exited => |code| { - if (code != 0) { - return error.GodotExecutionFailed; - } - }, - else => return error.GodotExecutionFailed, - } -} - -// Tests for generateApiJson function - -test "generateApiJson executes godot and creates extension_api.json in cache" { - const allocator = std.testing.allocator; - - // Create a fake godot script that creates extension_api.json - // We'll use a shell script to simulate godot's behavior - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const fake_godot = try std.fmt.allocPrint(allocator, "{s}/fake-godot.sh", .{tmp_path}); - defer allocator.free(fake_godot); - - // Create fake godot script that writes extension_api.json - const script_content = - \\#!/bin/sh - \\echo '{"version": "test"}' > extension_api.json - ; - - try tmp_dir.dir.writeFile(.{ .sub_path = "fake-godot.sh", .data = script_content }); - - // Make it executable - var file = try tmp_dir.dir.openFile("fake-godot.sh", .{}); - try file.chmod(0o755); - file.close(); - - // Generate the API JSON - try generateApiJsonIfNotExists(allocator, fake_godot, tmp_path); - - // Verify the JSON file was created in cache directory - const json_path = try std.fmt.allocPrint(allocator, "{s}/extension_api.json", .{tmp_path}); - defer allocator.free(json_path); - - const json_data = try std.fs.cwd().readFileAlloc(allocator, json_path, 1024 * 1024); - defer allocator.free(json_data); - - // Should contain the test JSON - try std.testing.expect(std.mem.indexOf(u8, json_data, "test") != null); -} - -test "generateApiJson returns error when godot executable not found" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const non_existant_godot = try std.fmt.allocPrint(allocator, "{s}/godot", .{tmp_path}); - defer allocator.free(non_existant_godot); - - const result = generateApiJsonIfNotExists(allocator, non_existant_godot, tmp_path); - - try std.testing.expectError(error.FileNotFound, result); -} - -test "generateApiJson returns error on non-zero exit code" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - // Use 'false' command which always exits with code 1 - const result = generateApiJsonIfNotExists(allocator, "false", tmp_path); - - try std.testing.expectError(error.GodotExecutionFailed, result); -} - -const std = @import("std"); -const Allocator = std.mem.Allocator; -const Child = std.process.Child; - -const cache = @import("cache.zig"); - -const DocDatabase = @import("DocDatabase.zig"); diff --git a/src/cache.zig b/src/cache.zig index c289711..9259417 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -17,14 +17,6 @@ pub fn ensureDirectoryExists(dir_path: []const u8) !void { defer dir.close(); } -pub fn getJsonCachePathInDir(allocator: Allocator, cache_dir: []const u8) ![]const u8 { - return std.fmt.allocPrint( - allocator, - "{f}", - .{std.fs.path.fmtJoin(&[_][]const u8{ cache_dir, "extension_api.json" })}, - ); -} - pub fn resolveSymbolPath(allocator: Allocator, cache_path: []const u8, symbol: []const u8) ![]const u8 { // dot notation if (std.mem.indexOf(u8, symbol, ".")) |dot_pos| { @@ -105,23 +97,25 @@ pub fn generateMarkdownCache(allocator: Allocator, db: DocDatabase, cache_path: } pub fn cacheIsPopulated(allocator: Allocator, cache_path: []const u8) !bool { - const json_file_path = try getJsonCachePathInDir(allocator, cache_path); - defer allocator.free(json_file_path); + // Check xml_docs/.complete marker + const xml_dir = try getXmlDocsDirInCache(allocator, cache_path); + defer allocator.free(xml_dir); - const json_file = std.fs.openFileAbsolute(json_file_path, .{}) catch |err| switch (err) { - error.FileNotFound => return false, - else => return err, - }; - defer json_file.close(); + if (source_fetch.readCompleteMarker(allocator, xml_dir)) |m| { + allocator.free(m); + } else { + return false; + } - const node_path = try resolveSymbolPath(allocator, cache_path, "Node"); - defer allocator.free(node_path); + // Check Object/index.md sentinel + const object_path = try resolveSymbolPath(allocator, cache_path, "Object"); + defer allocator.free(object_path); - const node_file = std.fs.openFileAbsolute(node_path, .{}) catch |err| switch (err) { + const object_file = std.fs.openFileAbsolute(object_path, .{}) catch |err| switch (err) { error.FileNotFound => return false, else => return err, }; - defer node_file.close(); + object_file.close(); return true; } @@ -134,18 +128,6 @@ pub fn getXmlDocsDirInCache(allocator: Allocator, cache_dir: []const u8) ![]cons ); } -pub fn xmlDocsArePopulated(allocator: Allocator, cache_dir: []const u8) !bool { - const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); - defer allocator.free(xml_dir); - - const marker = source_fetch.readCompleteMarker(allocator, xml_dir); - if (marker) |m| { - allocator.free(m); - return true; - } - return false; -} - test "testing config has valid cache directory" { try std.testing.expect(Config.testing.cache_dir.len > 0); try std.testing.expect(std.mem.indexOf(u8, Config.testing.cache_dir, "gdoc") != null); @@ -208,12 +190,12 @@ test "clearCache deletes cache directory" { // Ensure cache directory exists try ensureDirectoryExists(cache_dir); - // Create JSON file and markdown files - const json_path = try getJsonCachePathInDir(allocator, cache_dir); - defer allocator.free(json_path); + // Create a dummy file + const dummy_path = try std.fmt.allocPrint(allocator, "{s}/dummy.txt", .{cache_dir}); + defer allocator.free(dummy_path); - var json_file = try std.fs.createFileAbsolute(json_path, .{}); - json_file.close(); + var dummy_file = try std.fs.createFileAbsolute(dummy_path, .{}); + dummy_file.close(); const node_dir = try std.fmt.allocPrint(allocator, "{s}/Node", .{cache_dir}); defer allocator.free(node_dir); @@ -224,7 +206,7 @@ test "clearCache deletes cache directory" { try std.fs.cwd().writeFile(.{ .sub_path = index_path, .data = "# Node\n" }); // Verify files exist - _ = try std.fs.openFileAbsolute(json_path, .{}); + _ = try std.fs.openFileAbsolute(dummy_path, .{}); _ = try std.fs.openFileAbsolute(index_path, .{}); // Clear cache @@ -671,7 +653,7 @@ test "cacheIsPopulated returns false for nonexistent directory" { try std.testing.expect(!result); } -test "cacheIsPopulated returns true when cache has markdown files" { +test "cacheIsPopulated returns true when cache has xml marker and Object sentinel" { const allocator = std.testing.allocator; var tmp_dir = std.testing.tmpDir(.{}); @@ -680,26 +662,27 @@ test "cacheIsPopulated returns true when cache has markdown files" { const cache_dir = try tmp_dir.dir.realpathAlloc(allocator, "."); defer allocator.free(cache_dir); - // Create the JSON file (required by implementation) - const json_path = try getJsonCachePathInDir(allocator, cache_dir); - defer allocator.free(json_path); - try std.fs.cwd().writeFile(.{ .sub_path = json_path, .data = "{}" }); + // Create xml_docs/.complete marker + const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); + defer allocator.free(xml_dir); + try std.fs.makeDirAbsolute(xml_dir); + source_fetch.writeCompleteMarker(allocator, xml_dir, "4.3.stable") catch unreachable; - // Create Node symbol directory with markdown file - const node_dir = try std.fmt.allocPrint(allocator, "{s}/Node", .{cache_dir}); - defer allocator.free(node_dir); - try std.fs.makeDirAbsolute(node_dir); + // Create Object symbol directory with markdown file + const object_dir = try std.fmt.allocPrint(allocator, "{s}/Object", .{cache_dir}); + defer allocator.free(object_dir); + try std.fs.makeDirAbsolute(object_dir); - const index_path = try std.fmt.allocPrint(allocator, "{s}/index.md", .{node_dir}); + const index_path = try std.fmt.allocPrint(allocator, "{s}/index.md", .{object_dir}); defer allocator.free(index_path); - try std.fs.cwd().writeFile(.{ .sub_path = index_path, .data = "# Node\n" }); + try std.fs.cwd().writeFile(.{ .sub_path = index_path, .data = "# Object\n" }); - // Should return true since cache has both JSON and markdown files + // Should return true since cache has both xml marker and Object sentinel const result = try cacheIsPopulated(allocator, cache_dir); try std.testing.expect(result); } -test "cacheIsPopulated returns false when only extension_api.json exists" { +test "cacheIsPopulated returns false when only xml marker exists" { const allocator = std.testing.allocator; var tmp_dir = std.testing.tmpDir(.{}); @@ -708,12 +691,13 @@ test "cacheIsPopulated returns false when only extension_api.json exists" { const cache_dir = try tmp_dir.dir.realpathAlloc(allocator, "."); defer allocator.free(cache_dir); - // Create only the JSON file, no markdown files - const json_path = try std.fmt.allocPrint(allocator, "{s}/extension_api.json", .{cache_dir}); - defer allocator.free(json_path); - try std.fs.cwd().writeFile(.{ .sub_path = json_path, .data = "{}" }); + // Create xml_docs/.complete marker but no Object sentinel + const xml_dir = try getXmlDocsDirInCache(allocator, cache_dir); + defer allocator.free(xml_dir); + try std.fs.makeDirAbsolute(xml_dir); + source_fetch.writeCompleteMarker(allocator, xml_dir, "4.3.stable") catch unreachable; - // Should return false - JSON alone doesn't mean cache is populated + // Should return false - xml marker alone doesn't mean cache is populated const result = try cacheIsPopulated(allocator, cache_dir); try std.testing.expect(!result); } diff --git a/src/cli/root.zig b/src/cli/root.zig index 0546b81..93b4e27 100644 --- a/src/cli/root.zig +++ b/src/cli/root.zig @@ -18,13 +18,6 @@ pub fn build(allocator: Allocator, writer: *Writer, reader: *Reader) !*Command { .default_value = .{ .Bool = false }, }); - try root.addFlag(.{ - .name = "godot-extension-api", - .description = "Path to Godot extension_api.json file (bypasses cache)", - .type = .String, - .default_value = .{ .String = "" }, - }); - try root.addFlag(.{ .name = "output-format", .description = "Output format (markdown or terminal). Defaults to terminal for TTY, markdown otherwise.", @@ -44,14 +37,11 @@ pub fn build(allocator: Allocator, writer: *Writer, reader: *Reader) !*Command { fn runLookup(ctx: CommandContext) !void { const clear_cache = ctx.flag("clear-cache", bool); - const api_json_path_raw = ctx.flag("godot-extension-api", []const u8); - const api_json_path: ?[]const u8 = if (api_json_path_raw.len == 0) null else api_json_path_raw; - const output_format_raw = ctx.flag("output-format", []const u8); const output_format: OutputFormat = std.meta.stringToEnum(OutputFormat, output_format_raw) orelse .detect; // print help when no arguments/flags are provided - if (!clear_cache and ctx.positional_args.len == 0 and api_json_path == null) { + if (!clear_cache and ctx.positional_args.len == 0) { try ctx.command.printHelp(); return; } @@ -65,10 +55,8 @@ fn runLookup(ctx: CommandContext) !void { const symbol = ctx.getArg("symbol") orelse return; - gdoc.formatAndDisplay(ctx.allocator, symbol, api_json_path, ctx.writer, output_format, config) catch |err| switch (err) { + gdoc.formatAndDisplay(ctx.allocator, symbol, ctx.writer, output_format, config) catch |err| switch (err) { DocDatabaseError.SymbolNotFound => try ctx.writer.print("Symbol '{s}' not found.\n", .{symbol}), - error.ApiFileNotFound => try ctx.writer.print("Error: API file not found: {s}\n", .{api_json_path.?}), - error.InvalidApiJson => try ctx.writer.print("Error: Invalid JSON in API file: {s}\n", .{api_json_path.?}), else => return err, }; diff --git a/src/root.zig b/src/root.zig index e85784e..3e2ecf1 100644 --- a/src/root.zig +++ b/src/root.zig @@ -4,83 +4,83 @@ pub const OutputFormat = enum { detect, }; -pub const LookupError = error{ - ApiFileNotFound, -} || Writer.Error || DocDatabase.Error || File.OpenError; +pub const LookupError = Writer.Error || DocDatabase.Error || File.OpenError; -pub fn markdownForSymbol(allocator: Allocator, symbol: []const u8, api_json_path: ?[]const u8, writer: *Writer, config: *const Config) !void { +pub fn markdownForSymbol(allocator: Allocator, symbol: []const u8, writer: *Writer, config: *const Config) !void { var arena = ArenaAllocator.init(allocator); defer arena.deinit(); - const api_json_file = if (api_json_path) |path| std.fs.cwd().openFile(path, .{}) catch |err| switch (err) { - error.FileNotFound => return LookupError.ApiFileNotFound, - else => return err, - } else null; - defer if (api_json_file) |f| f.close(); - - if (api_json_file) |f| { - const db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), f); - const entry = try db.lookupSymbolExact(symbol); - - try writer.print("# {s}\n", .{symbol}); - - if (entry.brief_description) |brief| { - try writer.print("\n{s}\n", .{brief}); - } - - if (entry.description) |desc| { - try writer.print("\n## Description\n\n{s}\n", .{desc}); - } - } else { - const cache_path = config.cache_dir; - - const needs_full_rebuild = !try cache.cacheIsPopulated(allocator, cache_path); - - if (needs_full_rebuild) { - try cache.ensureDirectoryExists(cache_path); - try api.generateApiJsonIfNotExists(allocator, "godot", cache_path); - - // Fetch XML docs if missing (best-effort, requires godot) - if (!config.no_xml and !try cache.xmlDocsArePopulated(allocator, cache_path)) { - fetchXmlDocs(allocator, cache_path); + const cache_path = config.cache_dir; + + const needs_full_rebuild = !try cache.cacheIsPopulated(allocator, cache_path); + + if (needs_full_rebuild) { + try cache.ensureDirectoryExists(cache_path); + + const xml_dir = try cache.getXmlDocsDirInCache(allocator, cache_path); + defer allocator.free(xml_dir); + try cache.ensureDirectoryExists(xml_dir); + + const version = source_fetch.getGodotVersion(allocator) orelse + return error.GodotNotFound; + defer version.deinit(allocator); + + var url_buf: [256]u8 = undefined; + const url = source_fetch.buildTarballUrl(&url_buf, version) orelse + return error.GodotNotFound; + + var spinner = Spinner{ .message = "Downloading XML docs..." }; + spinner.start(); + + source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { + if (version.hash) |hash| { + var hash_url_buf: [256]u8 = undefined; + const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse { + spinner.finish(); + return err; + }; + source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { + spinner.finish(); + return err; + }; + } else { + spinner.finish(); + return err; } + }; - var spinner: Spinner = .{ .message = "Building documentation cache..." }; - if (!config.no_xml) spinner.start(); - defer spinner.finish(); - - const json_path = try cache.getJsonCachePathInDir(allocator, cache_path); - defer allocator.free(json_path); - - const json_file = try std.fs.openFileAbsolute(json_path, .{}); - defer json_file.close(); - - var db = try DocDatabase.loadFromJsonFileLeaky(arena.allocator(), json_file); + spinner.finish(); - mergeXmlDocs(arena.allocator(), allocator, &db, cache_path); + var version_buf: [64]u8 = undefined; + const version_str = version.formatVersion(&version_buf) orelse return error.GodotNotFound; + source_fetch.writeCompleteMarker(allocator, xml_dir, version_str) catch return error.GodotNotFound; - try cache.generateMarkdownCache(allocator, db, cache_path); - } + var build_spinner = Spinner{ .message = "Building documentation cache..." }; + build_spinner.start(); + defer build_spinner.finish(); - try cache.readSymbolMarkdown(allocator, symbol, cache_path, writer); + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, xml_dir); + try cache.generateMarkdownCache(allocator, db, cache_path); } + + try cache.readSymbolMarkdown(allocator, symbol, cache_path, writer); } -pub fn formatAndDisplay(allocator: Allocator, symbol: []const u8, api_json_path: ?[]const u8, writer: *Writer, format: OutputFormat, config: *const Config) !void { +pub fn formatAndDisplay(allocator: Allocator, symbol: []const u8, writer: *Writer, format: OutputFormat, config: *const Config) !void { switch (format) { - .markdown => try markdownForSymbol(allocator, symbol, api_json_path, writer, config), - .terminal => try renderWithZigdown(allocator, symbol, api_json_path, writer, config), + .markdown => try markdownForSymbol(allocator, symbol, writer, config), + .terminal => try renderWithZigdown(allocator, symbol, writer, config), .detect => { - try formatAndDisplay(allocator, symbol, api_json_path, writer, if (File.stdout().isTty()) .terminal else .markdown, config); + try formatAndDisplay(allocator, symbol, writer, if (File.stdout().isTty()) .terminal else .markdown, config); }, } } -fn renderWithZigdown(allocator: Allocator, symbol: []const u8, api_json_path: ?[]const u8, writer: *Writer, config: *const Config) !void { +fn renderWithZigdown(allocator: Allocator, symbol: []const u8, writer: *Writer, config: *const Config) !void { var markdown_buf: AllocatingWriter = .init(allocator); defer markdown_buf.deinit(); - try markdownForSymbol(allocator, symbol, api_json_path, &markdown_buf.writer, config); + try markdownForSymbol(allocator, symbol, &markdown_buf.writer, config); const markdown = markdown_buf.written(); renderMarkdownWithZigdown(allocator, markdown, writer) catch |err| { @@ -109,141 +109,6 @@ fn renderMarkdownWithZigdown(allocator: Allocator, markdown: []const u8, writer: try renderer.renderBlock(parser.document); } -test "markdownForSymbol returns ApiFileNotFound for nonexistent file" { - const allocator = std.testing.allocator; - - // Create a temporary directory for test - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const nonexistent_path = try std.fmt.allocPrint(allocator, "{s}/nonexistent.json", .{tmp_path}); - defer allocator.free(nonexistent_path); - - // Create a discarding writer (we don't care about output for this error test) - var buf: [4096]u8 = undefined; - var discard = std.io.Writer.Discarding.init(&buf); - - const result = markdownForSymbol(allocator, "Node2D", nonexistent_path, &discard.writer, &Config.testing); - - try std.testing.expectError(LookupError.ApiFileNotFound, result); -} - -test "markdownForSymbol returns InvalidApiJson for malformed JSON" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const bad_api_path = try std.fmt.allocPrint(allocator, "{s}/bad_api.json", .{tmp_path}); - defer allocator.free(bad_api_path); - - // Write invalid JSON - try tmp_dir.dir.writeFile(.{ .sub_path = "bad_api.json", .data = "{ invalid json" }); - - var buf: [4096]u8 = undefined; - var discard = std.io.Writer.Discarding.init(&buf); - - const result = markdownForSymbol(allocator, "Node2D", bad_api_path, &discard.writer, &Config.testing); - - try std.testing.expectError(DocDatabase.Error.InvalidApiJson, result); -} - -test "markdownForSymbol loads from custom API file and finds symbol" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const api_path = try std.fmt.allocPrint(allocator, "{s}/test_api.json", .{tmp_path}); - defer allocator.free(api_path); - - // Write minimal valid API JSON with a test class - const test_json = - \\{"builtin_classes": [{"name": "TestClass", "brief_description": "A test class"}]} - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "test_api.json", .data = test_json }); - - var allocating_writer = std.Io.Writer.Allocating.init(allocator); - defer allocating_writer.deinit(); - - // Should successfully load and display TestClass - try markdownForSymbol(allocator, "TestClass", api_path, &allocating_writer.writer, &Config.testing); - - // Verify something was written to output - const written = try allocating_writer.toOwnedSlice(); - defer allocator.free(written); - try std.testing.expect(written.len > 0); -} - -test "markdownForSymbol returns SymbolNotFound when symbol doesn't exist" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const api_path = try std.fmt.allocPrint(allocator, "{s}/test_api.json", .{tmp_path}); - defer allocator.free(api_path); - - // Write valid API JSON but without the symbol we're looking for - const test_json = - \\{"builtin_classes": [{"name": "TestClass", "brief_description": "A test class"}]} - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "test_api.json", .data = test_json }); - - var buf: [4096]u8 = undefined; - var discard = std.io.Writer.Discarding.init(&buf); - - // Try to look up NonExistentClass which is not in the API - const result = markdownForSymbol(allocator, "NonExistentClass", api_path, &discard.writer, &Config.testing); - - try std.testing.expectError(DocDatabase.Error.SymbolNotFound, result); -} - -test "markdownForSymbol works with relative path" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - // Write minimal valid API JSON - const test_json = - \\{"builtin_classes": [{"name": "TestClass", "brief_description": "A test class"}]} - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "test_api.json", .data = test_json }); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - // Change to tmp directory and use relative path - const original_cwd = try std.fs.cwd().realpathAlloc(allocator, "."); - defer allocator.free(original_cwd); - defer std.posix.chdir(original_cwd) catch {}; - - try std.posix.chdir(tmp_path); - - var allocating_writer = std.Io.Writer.Allocating.init(allocator); - defer allocating_writer.deinit(); - - // Use relative path - try markdownForSymbol(allocator, "TestClass", "test_api.json", &allocating_writer.writer, &Config.testing); - - const written = try allocating_writer.toOwnedSlice(); - defer allocator.free(written); - try std.testing.expect(written.len > 0); -} - test "OutputFormat enum has markdown and terminal values" { // Test that OutputFormat enum exists with expected values const format_markdown: OutputFormat = .markdown; @@ -253,71 +118,7 @@ test "OutputFormat enum has markdown and terminal values" { try std.testing.expect(format_terminal == .terminal); } -test "formatAndDisplay with markdown format produces markdown output" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const api_path = try std.fmt.allocPrint(allocator, "{s}/test_api.json", .{tmp_path}); - defer allocator.free(api_path); - - // Write minimal valid API JSON with a test class - const test_json = - \\{"builtin_classes": [{"name": "TestClass", "brief_description": "A test class"}]} - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "test_api.json", .data = test_json }); - - var allocating_writer = std.Io.Writer.Allocating.init(allocator); - defer allocating_writer.deinit(); - - // Call formatAndDisplay with markdown format - try formatAndDisplay(allocator, "TestClass", api_path, &allocating_writer.writer, .markdown, &Config.testing); - - const written = try allocating_writer.toOwnedSlice(); - defer allocator.free(written); - - // Verify markdown output was produced - try std.testing.expect(written.len > 0); - try std.testing.expect(std.mem.indexOf(u8, written, "TestClass") != null); -} - -test "formatAndDisplay with terminal format produces terminal output" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const api_path = try std.fmt.allocPrint(allocator, "{s}/test_api.json", .{tmp_path}); - defer allocator.free(api_path); - - // Write minimal valid API JSON with a test class - const test_json = - \\{"builtin_classes": [{"name": "TestClass", "brief_description": "A test class"}]} - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "test_api.json", .data = test_json }); - - var allocating_writer = std.Io.Writer.Allocating.init(allocator); - defer allocating_writer.deinit(); - - // Call formatAndDisplay with terminal format - try formatAndDisplay(allocator, "TestClass", api_path, &allocating_writer.writer, .terminal, &Config.testing); - - const written = try allocating_writer.toOwnedSlice(); - defer allocator.free(written); - - // Verify terminal output was produced (should still contain the symbol name) - try std.testing.expect(written.len > 0); - try std.testing.expect(std.mem.indexOf(u8, written, "TestClass") != null); -} - -// Test verifies the normal cache flow when api_json_path is null +// Test verifies the normal cache flow when cache is pre-populated test "markdownForSymbol reads from markdown cache when available" { const allocator = std.testing.allocator; const cache_dir = Config.testing.cache_dir; @@ -328,15 +129,20 @@ test "markdownForSymbol reads from markdown cache when available" { // Ensure cache directory exists try cache.ensureDirectoryExists(cache_dir); - // Create extension_api.json in cache to prevent godot execution - // Use a different class name so it doesn't overwrite our test markdown - const json_path = try cache.getJsonCachePathInDir(allocator, cache_dir); - defer allocator.free(json_path); + // Create xml_docs/.complete marker to make cacheIsPopulated return true + const xml_dir = try cache.getXmlDocsDirInCache(allocator, cache_dir); + defer allocator.free(xml_dir); + try cache.ensureDirectoryExists(xml_dir); + try source_fetch.writeCompleteMarker(allocator, xml_dir, "4.3.stable"); + + // Create Object/index.md sentinel to make cacheIsPopulated return true + const object_dir = try std.fmt.allocPrint(allocator, "{s}/Object", .{cache_dir}); + defer allocator.free(object_dir); + try cache.ensureDirectoryExists(object_dir); - const test_json = - \\{"builtin_classes": [{"name": "DummyClass", "brief_description": "A dummy class"}]} - ; - try std.fs.cwd().writeFile(.{ .sub_path = json_path, .data = test_json }); + const object_index = try std.fmt.allocPrint(allocator, "{s}/index.md", .{object_dir}); + defer allocator.free(object_index); + try std.fs.cwd().writeFile(.{ .sub_path = object_index, .data = "# Object\n" }); // Pre-populate cache with a markdown file for TestCachedClass const testclass_dir = try std.fmt.allocPrint(allocator, "{s}/TestCachedClass", .{cache_dir}); @@ -352,8 +158,8 @@ test "markdownForSymbol reads from markdown cache when available" { var allocating_writer = std.Io.Writer.Allocating.init(allocator); defer allocating_writer.deinit(); - // Call markdownForSymbol with null api_json_path - should use cache - try markdownForSymbol(allocator, "TestCachedClass", null, &allocating_writer.writer, &Config.testing); + // Call markdownForSymbol - should use cache + try markdownForSymbol(allocator, "TestCachedClass", &allocating_writer.writer, &Config.testing); const written = allocating_writer.written(); @@ -364,190 +170,6 @@ test "markdownForSymbol reads from markdown cache when available" { cache.clearCache(&Config.testing) catch {}; } -// RED PHASE: Test for automatic cache population -// This test verifies that markdownForSymbol auto-generates the cache when empty -test "markdownForSymbol generates markdown cache when cache is empty" { - const allocator = std.testing.allocator; - const cache_dir = Config.testing.cache_dir; - - // Clear cache to start empty - cache.clearCache(&Config.testing) catch {}; - - // Ensure cache directory exists - try cache.ensureDirectoryExists(cache_dir); - - // Create a JSON file in the cache directory - const json_path = try cache.getJsonCachePathInDir(allocator, cache_dir); - defer allocator.free(json_path); - - const test_json = - \\{"builtin_classes": [{"name": "AutoGenClass", "brief_description": "Auto-generated from empty cache"}]} - ; - try std.fs.cwd().writeFile(.{ .sub_path = json_path, .data = test_json }); - - var allocating_writer = std.Io.Writer.Allocating.init(allocator); - defer allocating_writer.deinit(); - - // Call markdownForSymbol with null api_json_path - // Should auto-generate cache and return the symbol - try markdownForSymbol(allocator, "AutoGenClass", null, &allocating_writer.writer, &Config.testing); - - const written = allocating_writer.written(); - - // Verify it generated markdown for the symbol - try std.testing.expect(std.mem.indexOf(u8, written, "AutoGenClass") != null); - - // Verify the cache was actually created - const autogen_path = try std.fmt.allocPrint(allocator, "{s}/AutoGenClass/index.md", .{cache_dir}); - defer allocator.free(autogen_path); - - const cache_file = try std.fs.openFileAbsolute(autogen_path, .{}); - cache_file.close(); - - // Cleanup - cache.clearCache(&Config.testing) catch {}; -} - -fn fetchXmlDocs(allocator: Allocator, cache_path: []const u8) void { - const xml_dir = cache.getXmlDocsDirInCache(allocator, cache_path) catch return; - defer allocator.free(xml_dir); - - cache.ensureDirectoryExists(xml_dir) catch return; - - const version = source_fetch.getGodotVersion(allocator) orelse return; - defer version.deinit(allocator); - - var url_buf: [256]u8 = undefined; - const url = source_fetch.buildTarballUrl(&url_buf, version) orelse return; - - var spinner = Spinner{ .message = "Downloading XML docs..." }; - spinner.start(); - defer spinner.finish(); - - source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { - // Try hash-based fallback URL - if (version.hash) |hash| { - var hash_url_buf: [256]u8 = undefined; - const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse return; - source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { - std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); - return; - }; - } else { - std.log.warn("XML doc fetch failed ({}), proceeding without XML supplementation", .{err}); - return; - } - }; - - var version_buf: [64]u8 = undefined; - const version_str = version.formatVersion(&version_buf) orelse return; - - source_fetch.writeCompleteMarker(allocator, xml_dir, version_str) catch return; -} - -fn mergeXmlDocs(arena_allocator: Allocator, tmp_allocator: Allocator, db: *DocDatabase, cache_path: []const u8) void { - const xml_dir = cache.getXmlDocsDirInCache(tmp_allocator, cache_path) catch return; - defer tmp_allocator.free(xml_dir); - - var dir = std.fs.openDirAbsolute(xml_dir, .{ .iterate = true }) catch return; - defer dir.close(); - - var iter = dir.iterate(); - while (iter.next() catch return) |entry| { - if (entry.kind != .file) continue; - if (!std.mem.endsWith(u8, entry.name, ".xml")) continue; - - const class_name = entry.name[0 .. entry.name.len - 4]; // strip .xml - - // Read XML file - const content = dir.readFileAlloc(tmp_allocator, entry.name, 2 * 1024 * 1024) catch continue; - defer tmp_allocator.free(content); - - // Parse with arena_allocator so strings outlive this function - const class_doc = XmlDocParser.parseClassDoc(arena_allocator, content) catch |err| { - std.log.warn("failed to parse XML doc for {s}: {}", .{ class_name, err }); - continue; - }; - // Do NOT freeClassDoc -- arena owns the memory - - // Merge tutorials - if (class_doc.tutorials) |tutorials| { - if (tutorials.len > 0) { - if (db.symbols.getPtr(class_name)) |db_entry| { - if (db_entry.tutorials == null) { - const db_tutorials = arena_allocator.alloc(DocDatabase.Tutorial, tutorials.len) catch continue; - for (tutorials, 0..) |t, i| { - db_tutorials[i] = .{ .title = t.title, .url = t.url }; - } - db_entry.tutorials = db_tutorials; - } - } - } - } - - // Fill missing class description - if (class_doc.description) |xml_desc| { - if (db.symbols.getPtr(class_name)) |db_entry| { - if (db_entry.description == null) { - db_entry.description = xml_desc; - } - } - } - - // Merge member descriptions (methods, properties, signals) - if (class_doc.methods) |members| { - for (members) |member| { - const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; - defer tmp_allocator.free(member_key); - - if (db.symbols.getPtr(member_key)) |db_entry| { - if (db_entry.description == null) { - db_entry.description = member.description; - } - } - } - } - - if (class_doc.properties) |members| { - for (members) |member| { - const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; - defer tmp_allocator.free(member_key); - - if (db.symbols.getPtr(member_key)) |db_entry| { - if (db_entry.description == null) { - db_entry.description = member.description; - } - } - } - } - - if (class_doc.signals) |members| { - for (members) |member| { - const member_key = std.fmt.allocPrint(tmp_allocator, "{s}.{s}", .{ class_name, member.name }) catch continue; - defer tmp_allocator.free(member_key); - - if (db.symbols.getPtr(member_key)) |db_entry| { - if (db_entry.description == null) { - db_entry.description = member.description; - } - } - } - } - - // Add entries for classes found in XML but not in JSON - if (db.symbols.get(class_name) == null) { - const key = std.fmt.allocPrint(arena_allocator, "{s}", .{class_name}) catch continue; - db.symbols.put(arena_allocator, key, .{ - .key = key, - .name = key, - .kind = .class, - .description = class_doc.description, - .brief_description = class_doc.brief_description, - }) catch continue; - } - } -} - comptime { std.testing.refAllDecls(@This()); } @@ -563,7 +185,6 @@ pub const DocDatabase = @import("DocDatabase.zig"); pub const XmlDocParser = @import("XmlDocParser.zig"); pub const cache = @import("cache.zig"); pub const Config = @import("Config.zig"); -pub const api = @import("api.zig"); pub const source_fetch = @import("source_fetch.zig"); const Spinner = @import("Spinner.zig"); From f15e99cbed82b602e5081566317d73e6497c6491 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:32:48 +1100 Subject: [PATCH 16/19] test: add integration roundtrip test and error-path tests Add XML-to-markdown roundtrip integration test that verifies the full pipeline: XML parsing -> DocDatabase -> markdown cache -> readback. Add error-path tests for malformed XML skipping and symbol-not-found on an XML-loaded database. --- src/DocDatabase.zig | 128 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) diff --git a/src/DocDatabase.zig b/src/DocDatabase.zig index cc9490c..d69efc3 100644 --- a/src/DocDatabase.zig +++ b/src/DocDatabase.zig @@ -1044,6 +1044,133 @@ test "loadFromXmlDir converts BBCode descriptions to Markdown" { try std.testing.expect(std.mem.indexOf(u8, entry.description.?, "`code`") != null); } +test "XML dir to markdown roundtrip" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + // Write a realistic XML doc + const xml_content = + \\ + \\ + \\ A test class. + \\ A class for testing. + \\ + \\ https://example.com + \\ + \\ + \\ + \\ + \\ Default constructor. + \\ + \\ + \\ + \\ + \\ + \\ + \\ Does a thing. + \\ + \\ + \\ + \\ Movement speed. + \\ + \\ + \\ + \\ + \\ + \\ Multiplies by a scalar. + \\ + \\ + \\ + \\ Maximum speed. + \\ + \\ + ; + + // Write XML to a subdir + const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml", .{tmp_path}); + defer allocator.free(xml_dir); + try std.fs.makeDirAbsolute(xml_dir); + const xml_path = try std.fmt.allocPrint(allocator, "{s}/TestClass.xml", .{xml_dir}); + defer allocator.free(xml_path); + try std.fs.cwd().writeFile(.{ .sub_path = xml_path, .data = xml_content }); + + // Load from XML + var arena_alloc = std.heap.ArenaAllocator.init(allocator); + defer arena_alloc.deinit(); + const db = try DocDatabase.loadFromXmlDir(arena_alloc.allocator(), allocator, xml_dir); + + // Generate markdown cache + const cache_dir = try std.fmt.allocPrint(allocator, "{s}/cache", .{tmp_path}); + defer allocator.free(cache_dir); + try cache.generateMarkdownCache(allocator, db, cache_dir); + + // Read back the class markdown + var output: std.Io.Writer.Allocating = .init(allocator); + defer output.deinit(); + try cache.readSymbolMarkdown(allocator, "TestClass", cache_dir, &output.writer); + const written = output.written(); + + // Verify key content + try std.testing.expect(std.mem.indexOf(u8, written, "# TestClass") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "*Inherits: RefCounted*") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Tutorials") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Constructors") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Methods") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Properties") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Operators") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "## Constants") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "do_thing") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "speed") != null); + try std.testing.expect(std.mem.indexOf(u8, written, "operator *") != null); +} + +test "loadFromXmlDir skips malformed XML files" { + const allocator = std.testing.allocator; + + var tmp_dir = std.testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); + defer allocator.free(tmp_path); + + try tmp_dir.dir.writeFile(.{ .sub_path = "Bad.xml", .data = "\n + \\ + \\ Base class. + \\ Base node class. + \\ + }); + + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); + const result = db.lookupSymbolExact("NonExistent"); + try std.testing.expectError(DocDatabase.Error.SymbolNotFound, result); +} + const std = @import("std"); const ArenaAllocator = std.heap.ArenaAllocator; const Allocator = std.mem.Allocator; @@ -1052,4 +1179,5 @@ const ArrayList = std.ArrayListUnmanaged; const Writer = std.Io.Writer; const bbcodez = @import("bbcodez"); +const cache = @import("cache.zig"); const XmlDocParser = @import("XmlDocParser.zig"); From 15c39a12d35362065a685d26a1471fac4f287adb Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:35:37 +1100 Subject: [PATCH 17/19] docs: update CLAUDE.md to reflect XML-only architecture --- CLAUDE.md | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 5ae5efd..2b1cd8e 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -9,8 +9,8 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co **gdoc** is a CLI documentation viewer for Godot API documentation, similar to `zigdoc`. It parses Godot's API documentation and displays it in the terminal with BBCode-to-Markdown conversion. Key behavior: -- Uses the user's local `godot` executable if available -- Falls back to downloading the latest API JSON from GitHub into cache if Godot is not installed +- Requires `godot` executable to determine version and fetch XML class documentation +- Downloads XML docs from GitHub, parses them, and builds a markdown cache - Converts BBCode documentation to Markdown using the `bbcodez` library for terminal display ## Build System @@ -57,11 +57,14 @@ The build system imports `bbcodez` as a dependency and makes it available to the ### Expected Data Flow 1. Parse CLI arguments for symbol lookup (e.g., `gdoc Node2D.position`) -2. Locate Godot API JSON: - - Check for local `godot` executable → run `godot --dump-extension-api` - - If not found → download from GitHub to cache directory -3. Parse JSON to find requested symbol documentation -4. Convert BBCode documentation to Markdown using `bbcodez` +2. Check if markdown cache is populated (xml_docs/.complete marker + Object/index.md sentinel) +3. If cache is empty: + - Run `godot --version` to determine Godot version + - Download XML class docs tarball from GitHub + - Parse all XML files into DocDatabase via `loadFromXmlDir` + - Convert BBCode descriptions to Markdown + - Generate markdown cache files +4. Read requested symbol's markdown from cache 5. Display formatted output to terminal ### Integration with bbcodez From 66684fffb77f92a243487185349eb5c8c84fa9d5 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:37:08 +1100 Subject: [PATCH 18/19] chore: remove design docs and implementation plans --- .../2026-03-21-xml-docs-primary-source.md | 1359 ----------------- ...26-03-21-xml-docs-primary-source-design.md | 214 --- 2 files changed, 1573 deletions(-) delete mode 100644 docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md delete mode 100644 docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md diff --git a/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md b/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md deleted file mode 100644 index a347856..0000000 --- a/docs/superpowers/plans/2026-03-21-xml-docs-primary-source.md +++ /dev/null @@ -1,1359 +0,0 @@ -# XML Docs as Primary Source — Implementation Plan - -> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. - -**Goal:** Replace JSON extension API with XML class docs as the sole data source for gdoc. - -**Architecture:** Delete all JSON parsing code, expand XmlDocParser to handle constructors/operators/params/qualifiers/defaults, add `DocDatabase.loadFromXmlDir` to build the symbol table from XML files, update cache flow to skip JSON export, update markdown generation to render new fields. - -**Tech Stack:** Zig 0.15.1, zig-xml, bbcodez - -**Spec:** `docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md` - ---- - -## File Map - -**Modify:** -- `src/XmlDocParser.zig` — Expand `MemberDoc` with params/qualifiers/return_type/default_value, add `ParamDoc`, parse ``, ``, ``, ``, `qualifiers`/`default` attributes -- `src/DocDatabase.zig` — Remove all JSON parsing, remove `builtin_class` from `EntryKind`, add `constructor` to `EntryKind`, add `inherits`/`qualifiers`/`default_value` to `Entry`, add `loadFromXmlDir`, update `generateMarkdownForEntry` to render new fields (inheritance, constructors, operators, qualifiers, defaults) -- `src/root.zig` — Remove `mergeXmlDocs`, `fetchXmlDocs`, `api_json_path` parameter from all functions, simplify `markdownForSymbol` to XML-only cache flow -- `src/cache.zig` — Remove `getJsonCachePathInDir`, update `cacheIsPopulated` to check `Object/index.md` instead of JSON file -- `src/Config.zig` — Remove `no_xml` field, update `Config.testing` -- `src/cli/root.zig` — Remove `--godot-extension-api` flag, update error handling -- `build.zig` — Keep `bbcodez` (still needed for BBCode→Markdown in descriptions), keep `xml` - -**Delete:** -- `src/api.zig` — Entire file - -**Update:** -- `snapshots/*.md` — Updated to reflect new markdown format -- Tests throughout — JSON fixture tests deleted, XML fixture tests added - ---- - -### Task 1: Expand XmlDocParser with ParamDoc and new MemberDoc fields - -**Files:** -- Modify: `src/XmlDocParser.zig` - -- [ ] **Step 1: Write failing test for ParamDoc parsing on methods** - -Add test to `src/XmlDocParser.zig` using XML that includes `` and `` elements inside a ``: - -```zig -const test_xml_with_params = - \\ - \\ - \\ A 2D game object. - \\ Node2D is the base class for 2D. - \\ - \\ - \\ - \\ - \\ Returns the angle between the node and the point. - \\ - \\ - \\ - \\ - \\ - \\ Applies a local translation on the X axis. - \\ - \\ - \\ -; - -test "parses method params and return type" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml_with_params); - defer freeClassDoc(allocator, doc); - - const methods = doc.methods.?; - try std.testing.expectEqual(2, methods.len); - - // First method: get_angle_to - try std.testing.expectEqualStrings("const", methods[0].qualifiers.?); - try std.testing.expectEqualStrings("float", methods[0].return_type.?); - const params0 = methods[0].params.?; - try std.testing.expectEqual(1, params0.len); - try std.testing.expectEqualStrings("point", params0[0].name); - try std.testing.expectEqualStrings("Vector2", params0[0].type); - try std.testing.expect(params0[0].default_value == null); - - // Second method: move_local_x with default param - const params1 = methods[1].params.?; - try std.testing.expectEqual(2, params1.len); - try std.testing.expectEqualStrings("false", params1[1].default_value.?); -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -Run: `zig build test 2>&1 | grep "parses method params"` -Expected: Compilation error — `MemberDoc` has no field `qualifiers`, `return_type`, `params` - -- [ ] **Step 3: Add ParamDoc struct and expand MemberDoc** - -In `src/XmlDocParser.zig`, add `ParamDoc` and expand `MemberDoc`: - -```zig -pub const ParamDoc = struct { - name: []const u8, - type: []const u8, - default_value: ?[]const u8 = null, -}; - -pub const MemberDoc = struct { - name: []const u8, - description: ?[]const u8 = null, - qualifiers: ?[]const u8 = null, - default_value: ?[]const u8 = null, - return_type: ?[]const u8 = null, - params: ?[]ParamDoc = null, -}; -``` - -Update `parseClassDoc` to parse `qualifiers` attribute on ``, and parse `` and `` elements inside methods. Update `readNestedDescription` to also capture params and return type while walking the method element. - -Replace the `` handler with a call to a new `parseMethodElement` function that: -1. Reads the `name` and `qualifiers` attributes -2. Walks child elements collecting ``, ``, and `` -3. Returns a fully populated `MemberDoc` - -Update `freeClassDoc` to free the new fields on `MemberDoc` (qualifiers, default_value, return_type, params array and each param's fields). - -- [ ] **Step 4: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add src/XmlDocParser.zig -git commit -m "feat: parse method params, return type, and qualifiers from XML" -``` - ---- - -### Task 2: Parse property default values from XML - -**Files:** -- Modify: `src/XmlDocParser.zig` - -- [ ] **Step 1: Write failing test for property default values** - -```zig -test "parses property default value" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml); - defer freeClassDoc(allocator, doc); - - const props = doc.properties.?; - try std.testing.expectEqual(1, props.len); - try std.testing.expectEqualStrings("Vector2(0, 0)", props[0].default_value.?); - try std.testing.expectEqualStrings("Vector2", props[0].return_type.?); -} -``` - -Note: The existing `test_xml` already has `` — we just need to extract the `default` and `type` attributes. - -- [ ] **Step 2: Run test to verify it fails** - -Run: `zig build test 2>&1 | grep "parses property default"` -Expected: FAIL — `default_value` is null - -- [ ] **Step 3: Update member parsing to read default and type attributes** - -In `parseClassDoc`, update the `` handler to also read `default` and `type` attributes: - -```zig -} else if (std.mem.eql(u8, name, "member")) { - const member_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; - const member_type = try getAttributeAlloc(allocator, reader, "type"); - const member_default = try getAttributeAlloc(allocator, reader, "default"); - const desc = try readTextContent(allocator, reader); - try properties.append(allocator, .{ - .name = member_name, - .description = desc, - .return_type = member_type, - .default_value = member_default, - }); -} -``` - -- [ ] **Step 4: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add src/XmlDocParser.zig -git commit -m "feat: parse property type and default value from XML" -``` - ---- - -### Task 3: Parse constructors and operators from XML - -**Files:** -- Modify: `src/XmlDocParser.zig` - -- [ ] **Step 1: Write failing test for constructors and operators** - -```zig -const test_xml_with_constructors_and_operators = - \\ - \\ - \\ A 2D vector. - \\ 2D vector type. - \\ - \\ - \\ - \\ Constructs a default Vector2. - \\ - \\ - \\ - \\ - \\ - \\ Constructs from x and y. - \\ - \\ - \\ - \\ - \\ - \\ - \\ Adds two vectors. - \\ - \\ - \\ -; - -test "parses constructors" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml_with_constructors_and_operators); - defer freeClassDoc(allocator, doc); - - const ctors = doc.constructors.?; - try std.testing.expectEqual(2, ctors.len); - try std.testing.expectEqualStrings("Vector2", ctors[0].name); - try std.testing.expect(ctors[0].params == null); - try std.testing.expectEqual(2, ctors[1].params.?.len); -} - -test "parses operators" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml_with_constructors_and_operators); - defer freeClassDoc(allocator, doc); - - const ops = doc.operators.?; - try std.testing.expectEqual(1, ops.len); - try std.testing.expectEqualStrings("operator +", ops[0].name); - try std.testing.expectEqualStrings("Vector2", ops[0].return_type.?); - try std.testing.expectEqual(1, ops[0].params.?.len); -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -Run: `zig build test 2>&1 | grep "parses constructors"` -Expected: Compilation error — `ClassDoc` has no field `constructors` - -- [ ] **Step 3: Add constructors and operators to ClassDoc and parsing** - -Add fields to `ClassDoc`: -```zig -constructors: ?[]MemberDoc = null, -operators: ?[]MemberDoc = null, -``` - -Add array lists in `parseClassDoc`: -```zig -var constructors: std.ArrayListUnmanaged(MemberDoc) = .empty; -defer constructors.deinit(allocator); -var operators: std.ArrayListUnmanaged(MemberDoc) = .empty; -defer operators.deinit(allocator); -``` - -Add element handlers for ``, ``, and `` — reuse the same `parseMethodElement` function from Task 1 since they all have identical XML structure (name, optional return, optional params, description). Update the existing `` handler to use `parseMethodElement` so signal params are captured (e.g., `child_entered_tree(node: Node)`). - -Update `freeClassDoc` to free constructors and operators. - -Set on doc: -```zig -doc.constructors = if (constructors.items.len > 0) try constructors.toOwnedSlice(allocator) else null; -doc.operators = if (operators.items.len > 0) try operators.toOwnedSlice(allocator) else null; -``` - -- [ ] **Step 4: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add src/XmlDocParser.zig -git commit -m "feat: parse constructors and operators from XML" -``` - ---- - -### Task 4: Parse constants with enum attribute from XML - -**Files:** -- Modify: `src/XmlDocParser.zig` - -- [ ] **Step 1: Write failing test for enum attribute on constants** - -```zig -const test_xml_with_enums = - \\ - \\ - \\ Base class. - \\ Base node. - \\ - \\ Ready notification. - \\ Inherits process mode. - \\ Always process. - \\ - \\ -; - -test "parses constant value and enum attribute" { - const allocator = std.testing.allocator; - const doc = try parseClassDoc(allocator, test_xml_with_enums); - defer freeClassDoc(allocator, doc); - - const consts = doc.constants.?; - try std.testing.expectEqual(3, consts.len); - - // Regular constant — no enum - try std.testing.expectEqualStrings("13", consts[0].default_value.?); - try std.testing.expect(consts[0].qualifiers == null); - - // Enum constant — enum name stored in qualifiers field - try std.testing.expectEqualStrings("0", consts[1].default_value.?); - try std.testing.expectEqualStrings("ProcessMode", consts[1].qualifiers.?); -} -``` - -Note: We reuse `qualifiers` on `MemberDoc` to store the `enum` attribute name for constants. This avoids adding yet another field just for this case. - -- [ ] **Step 2: Run test to verify it fails** - -Run: `zig build test 2>&1 | grep "parses constant value"` -Expected: FAIL — `default_value` is null on constants - -- [ ] **Step 3: Update constant parsing to read value and enum attributes** - -```zig -} else if (std.mem.eql(u8, name, "constant")) { - const constant_name = try getAttributeAlloc(allocator, reader, "name") orelse continue; - const constant_value = try getAttributeAlloc(allocator, reader, "value"); - const constant_enum = try getAttributeAlloc(allocator, reader, "enum"); - const desc = try readTextContent(allocator, reader); - try constants.append(allocator, .{ - .name = constant_name, - .description = desc, - .default_value = constant_value, - .qualifiers = constant_enum, - }); -} -``` - -- [ ] **Step 4: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add src/XmlDocParser.zig -git commit -m "feat: parse constant value and enum attribute from XML" -``` - ---- - -### Task 5: Update DocDatabase Entry and EntryKind - -**Files:** -- Modify: `src/DocDatabase.zig` - -- [ ] **Step 1: Write failing test for new Entry fields** - -```zig -test "Entry supports inherits, qualifiers, and default_value fields" { - const entry = Entry{ - .key = "Node2D.position", - .name = "position", - .kind = .property, - .inherits = null, - .qualifiers = null, - .default_value = "Vector2(0, 0)", - }; - try std.testing.expectEqualStrings("Vector2(0, 0)", entry.default_value.?); -} - -test "EntryKind has constructor value" { - const kind: EntryKind = .constructor; - try std.testing.expect(kind == .constructor); -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -Expected: Compilation error — `Entry` has no field `inherits`, `qualifiers`, `default_value`; `EntryKind` has no field `constructor` - -- [ ] **Step 3: Update Entry struct and EntryKind enum** - -In `src/DocDatabase.zig`: - -Add to `EntryKind`: -```zig -constructor, -``` - -Remove from `EntryKind`: -```zig -builtin_class, -``` - -Add to `Entry`: -```zig -inherits: ?[]const u8 = null, -qualifiers: ?[]const u8 = null, -default_value: ?[]const u8 = null, -``` - -- [ ] **Step 4: Fix all compilation errors from builtin_class removal** - -Search for all references to `builtin_class` and `.builtin_class` in the codebase and change to `.class`. This includes `loadFromJsonLeaky` and tests that assert `EntryKind.builtin_class`. - -- [ ] **Step 5: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 6: Commit** - -```bash -git add src/DocDatabase.zig -git commit -m "feat: add inherits, qualifiers, default_value to Entry; add constructor EntryKind; remove builtin_class" -``` - ---- - -### Task 6: Add DocDatabase.loadFromXmlDir - -**Files:** -- Modify: `src/DocDatabase.zig` - -- [ ] **Step 1: Write failing test for loadFromXmlDir** - -```zig -test "loadFromXmlDir parses XML files into symbol table" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - // Write a minimal XML file - const xml_content = - \\ - \\ - \\ A sprite node. - \\ Displays a 2D texture. - \\ - \\ - \\ - \\ Returns true if flipped horizontally. - \\ - \\ - \\ - \\ The texture to display. - \\ - \\ - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "Sprite2D.xml", .data = xml_content }); - - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); - - // Class entry - const class_entry = db.symbols.get("Sprite2D"); - try std.testing.expect(class_entry != null); - try std.testing.expectEqual(EntryKind.class, class_entry.?.kind); - try std.testing.expectEqualStrings("Node2D", class_entry.?.inherits.?); - try std.testing.expect(class_entry.?.members != null); - - // Method entry - const method = db.symbols.get("Sprite2D.is_flipped_h"); - try std.testing.expect(method != null); - try std.testing.expectEqual(EntryKind.method, method.?.kind); - try std.testing.expect(std.mem.indexOf(u8, method.?.signature.?, "bool") != null); - - // Property entry - const prop = db.symbols.get("Sprite2D.texture"); - try std.testing.expect(prop != null); - try std.testing.expectEqual(EntryKind.property, prop.?.kind); - try std.testing.expectEqualStrings("null", prop.?.default_value.?); -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -Expected: Compilation error — `DocDatabase` has no declaration `loadFromXmlDir` - -- [ ] **Step 3: Implement loadFromXmlDir** - -Add a new public function `loadFromXmlDir(arena_allocator: Allocator, tmp_allocator: Allocator, xml_dir_path: []const u8) !DocDatabase`: - -1. Open `xml_dir_path` as a directory with `.iterate = true` -2. Iterate all `.xml` files -3. For each file: - a. Read contents with `tmp_allocator`, parse with `XmlDocParser.parseClassDoc(arena_allocator, content)`, free content - b. Create class `Entry` with `kind = .class`, set `inherits`, `brief_description`, `description`, `tutorials` - c. Put class entry into `symbols` - d. For each member category (methods, properties, signals, constants, constructors, operators): - - Create child `Entry` with appropriate kind, dotted key (`"ClassName.member_name"`) - - Build `signature` string (see signature building rules below) - - Set `qualifiers`, `default_value` from parsed data - - Put into `symbols`, track index for parent's `members` array - - **Enum grouping for constants:** If a constant has an `enum` attribute (stored in `qualifiers` by the parser), key it as `"ClassName.EnumName.VALUE_NAME"` with `kind = .enum_value`. Constants without `enum` attribute are keyed as `"ClassName.CONSTANT_NAME"` with `kind = .constant`. - e. Update class entry's `members` with collected indices -4. Handle `@GlobalScope.xml` and `@GDScript.xml`: also register their methods as top-level entries with `kind = .global_function` (not `.method`), e.g., both `"@GlobalScope.sin"` and `"sin"`. `@GlobalScope` entries take precedence over `@GDScript`. - -**Signature building rules:** -- Methods: `(param: Type, param2: Type = default) -> ReturnType` (omit `-> void`) -- Properties: `: Type` -- Constructors: `(param: Type, ...)` (name is class name, no return type shown) -- Operators: format as `(right: Type) -> ReturnType` -- Constants with value: ` = value` - -Helper function `buildSignature(allocator, member, kind) !?[]const u8` handles this. - -- [ ] **Step 4: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Write test for enum grouping in loadFromXmlDir** - -```zig -test "loadFromXmlDir groups constants with enum attribute as enum_value entries" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const xml_content = - \\ - \\ - \\ Base class. - \\ Base node. - \\ - \\ Ready. - \\ Inherits. - \\ Always. - \\ - \\ - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "Node.xml", .data = xml_content }); - - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); - - // Regular constant: keyed as ClassName.CONSTANT_NAME - const notif = db.symbols.get("Node.NOTIFICATION_READY"); - try std.testing.expect(notif != null); - try std.testing.expectEqual(EntryKind.constant, notif.?.kind); - - // Enum constant: keyed as ClassName.EnumName.VALUE_NAME - const inherit = db.symbols.get("Node.ProcessMode.PROCESS_MODE_INHERIT"); - try std.testing.expect(inherit != null); - try std.testing.expectEqual(EntryKind.enum_value, inherit.?.kind); - - const always = db.symbols.get("Node.ProcessMode.PROCESS_MODE_ALWAYS"); - try std.testing.expect(always != null); - try std.testing.expectEqual(EntryKind.enum_value, always.?.kind); -} -``` - -- [ ] **Step 6: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 7: Write test for @GlobalScope dual-registration** - -```zig -test "loadFromXmlDir registers GlobalScope functions as top-level entries" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const xml_content = - \\ - \\ - \\ Global scope. - \\ Global functions. - \\ - \\ - \\ - \\ - \\ Returns absolute value. - \\ - \\ - \\ - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "@GlobalScope.xml", .data = xml_content }); - - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); - - // Should exist under qualified name - try std.testing.expect(db.symbols.get("@GlobalScope.abs") != null); - // Should also exist as top-level - try std.testing.expect(db.symbols.get("abs") != null); -} -``` - -- [ ] **Step 8: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 9: Commit** - -```bash -git add src/DocDatabase.zig -git commit -m "feat: add DocDatabase.loadFromXmlDir to build symbol table from XML files" -``` - ---- - -### Task 7: Update markdown generation for new fields - -**Files:** -- Modify: `src/DocDatabase.zig` -- Update: `snapshots/*.md` - -- [ ] **Step 1: Write failing test for inheritance in markdown output** - -```zig -test "generateMarkdownForSymbol shows inheritance" { - const allocator = std.testing.allocator; - - var db = DocDatabase{ .symbols = StringArrayHashMap(Entry).empty }; - defer db.symbols.deinit(allocator); - - try db.symbols.put(allocator, "Node2D", Entry{ - .key = "Node2D", - .name = "Node2D", - .kind = .class, - .inherits = "CanvasItem", - .brief_description = "A 2D game object.", - }); - - var allocating: std.Io.Writer.Allocating = .init(allocator); - defer allocating.deinit(); - - try db.generateMarkdownForSymbol(allocator, "Node2D", &allocating.writer); - const written = allocating.written(); - - try std.testing.expect(std.mem.indexOf(u8, written, "*Inherits: CanvasItem*") != null); -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -Expected: FAIL — output doesn't contain inheritance line - -- [ ] **Step 3: Update generateMarkdownForEntry** - -In `generateMarkdownForEntry`, after writing the heading, add: - -```zig -if (entry.inherits) |inherits| { - try writer.print("\n*Inherits: {s}*\n", .{inherits}); -} -``` - -Update `generateMemberListings` to add `constructor` and `operator` sections: - -```zig -var constructors: ArrayList(usize) = .empty; -var operators: ArrayList(usize) = .empty; -defer constructors.deinit(allocator); -defer operators.deinit(allocator); - -// In the switch: -.constructor => try constructors.append(allocator, idx), -.operator => try operators.append(allocator, idx), - -// Render sections: -try self.formatMemberSection("Constructors", constructors.items, writer); -// Render constructors BEFORE methods, operators AFTER methods -``` - -Update `formatMemberLine` to show qualifiers and default values: - -```zig -// After signature, before closing ** -if (member.qualifiers) |quals| { - try writer.print("** `{s}`", .{quals}); -} else { - try writer.writeAll("**"); -} - -// For properties with defaults: -if (member.default_value) |default| { - try writer.print(" = `{s}`", .{default}); -} -``` - -- [ ] **Step 4: Run test to verify it passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Update snapshot files** - -Run `zig build test` — snapshot tests will update the files. Verify the diffs look correct with `git diff snapshots/`. - -- [ ] **Step 6: Commit** - -```bash -git add src/DocDatabase.zig snapshots/ -git commit -m "feat: render inheritance, constructors, operators, qualifiers, defaults in markdown" -``` - ---- - -### Task 8: Remove JSON parsing from DocDatabase - -**Files:** -- Modify: `src/DocDatabase.zig` -- Modify: `build.zig` - -- [ ] **Step 1: Delete all JSON parsing code** - -Remove from `src/DocDatabase.zig`: -- `RootState` enum -- `loadFromJsonFileLeaky` function -- `loadFromJsonLeaky` function -- `parseClasses` function -- `parseClass` function -- `parseEntry` function -- `parseEntryArray` function -- `nextTokenToMarkdownAlloc` function -- `bbcodeToMarkdown` function (only used by JSON parsing — keep a copy if needed by `loadFromXmlDir` for BBCode→Markdown conversion of descriptions) -- All handler maps and handler functions (`MethodKey`, `ConstantKey`, `SignalKey`, `EnumKey`, `PropertyKey`, handler maps) -- All JSON-related imports (`Scanner`, `Reader`, `Token`) -- `InvalidApiJson` from the `Error` enum (this error was only produced by JSON parsing) -- All tests that use `loadFromJsonLeaky` or `loadFromJsonFileLeaky` (tests at lines 511-996) - -- [ ] **Step 2: Remove bbcodez from DocDatabase module imports in build.zig** - -Check if bbcodez is still used anywhere else. If only DocDatabase used it, remove from `build.zig` module imports. If `root.zig` or other files still use it for BBCode→Markdown conversion of XML descriptions, keep it. - -Look at how descriptions flow: XML descriptions contain BBCode (`[b]`, `[code]`, etc.). Currently the JSON path converts BBCode→Markdown via `bbcodeToMarkdown` during JSON parsing. With XML as source, BBCode conversion needs to happen somewhere — either in `loadFromXmlDir` when building entries, or in `generateMarkdownForEntry` when rendering. - -Decision: Keep bbcodez, move BBCode→Markdown conversion to `loadFromXmlDir` (convert descriptions as they're stored in Entry). Copy the `bbcodeToMarkdown` helper function to be used by `loadFromXmlDir`. - -- [ ] **Step 3: Write test verifying BBCode conversion in loadFromXmlDir** - -```zig -test "loadFromXmlDir converts BBCode descriptions to Markdown" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - const xml_content = - \\ - \\ - \\ Has [b]bold[/b] text. - \\ Uses [code]code[/code] and [i]italic[/i]. - \\ - ; - try tmp_dir.dir.writeFile(.{ .sub_path = "TestBBCode.xml", .data = xml_content }); - - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); - const entry = db.symbols.get("TestBBCode").?; - - // BBCode should be converted to Markdown - try std.testing.expect(std.mem.indexOf(u8, entry.brief_description.?, "**bold**") != null); - try std.testing.expect(std.mem.indexOf(u8, entry.description.?, "`code`") != null); -} -``` - -- [ ] **Step 4: Run tests to verify compilation and BBCode test passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All remaining tests pass. JSON tests are gone. BBCode conversion test passes. - -- [ ] **Step 5: Commit** - -```bash -git add src/DocDatabase.zig build.zig -git commit -m "refactor: remove all JSON parsing code from DocDatabase" -``` - ---- - -### Task 9: Remove api.zig and update Config - -**Files:** -- Delete: `src/api.zig` -- Modify: `src/Config.zig` -- Modify: `src/root.zig` (remove `pub const api` import) - -- [ ] **Step 1: Delete api.zig** - -```bash -rm src/api.zig -``` - -- [ ] **Step 2: Remove no_xml from Config** - -In `src/Config.zig`: -- Remove `no_xml: bool` field from `Config` struct -- Remove `.no_xml = hasEnv("GDOC_NO_XML")` from `init` -- Remove `.no_xml = true` from `Config.testing` -- Update the test that asserts `Config.testing.no_xml` -- Keep `hasEnv` function (may be useful later, and it's tiny) - -- [ ] **Step 3: Remove api import from root.zig** - -Remove `pub const api = @import("api.zig");` from the imports in `src/root.zig`. - -- [ ] **Step 4: Run tests to verify compilation** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add -A -git commit -m "refactor: delete api.zig, remove no_xml from Config" -``` - ---- - -### Task 10: Update cache.zig — remove JSON helpers, update cacheIsPopulated - -**Files:** -- Modify: `src/cache.zig` - -- [ ] **Step 1: Write failing test for new cacheIsPopulated logic** - -```zig -test "cacheIsPopulated returns true when Object/index.md exists" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const cache_dir = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(cache_dir); - - // Create xml_docs/.complete marker - const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml_docs", .{cache_dir}); - defer allocator.free(xml_dir); - try std.fs.makeDirAbsolute(xml_dir); - const complete_path = try std.fmt.allocPrint(allocator, "{s}/.complete", .{xml_dir}); - defer allocator.free(complete_path); - try std.fs.cwd().writeFile(.{ .sub_path = complete_path, .data = "4.4.1" }); - - // Create Object/index.md - const object_dir = try std.fmt.allocPrint(allocator, "{s}/Object", .{cache_dir}); - defer allocator.free(object_dir); - try std.fs.makeDirAbsolute(object_dir); - const index_path = try std.fmt.allocPrint(allocator, "{s}/index.md", .{object_dir}); - defer allocator.free(index_path); - try std.fs.cwd().writeFile(.{ .sub_path = index_path, .data = "# Object\n" }); - - const result = try cacheIsPopulated(allocator, cache_dir); - try std.testing.expect(result); -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -Expected: FAIL — current implementation looks for `extension_api.json` - -- [ ] **Step 3: Update cacheIsPopulated and remove JSON helpers** - -Rewrite `cacheIsPopulated` to check for `xml_docs/.complete` marker and `Object/index.md`: - -```zig -pub fn cacheIsPopulated(allocator: Allocator, cache_path: []const u8) !bool { - // Check xml_docs/.complete marker - const xml_dir = try getXmlDocsDirInCache(allocator, cache_path); - defer allocator.free(xml_dir); - - const marker = source_fetch.readCompleteMarker(allocator, xml_dir); - if (marker) |m| { - allocator.free(m); - } else { - return false; - } - - // Check Object/index.md sentinel - const object_path = try resolveSymbolPath(allocator, cache_path, "Object"); - defer allocator.free(object_path); - - const object_file = std.fs.openFileAbsolute(object_path, .{}) catch |err| switch (err) { - error.FileNotFound => return false, - else => return err, - }; - object_file.close(); - - return true; -} -``` - -Delete `getJsonCachePathInDir`. - -Update tests that used `getJsonCachePathInDir` or checked for `extension_api.json`. - -- [ ] **Step 4: Run tests to verify all pass** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add src/cache.zig -git commit -m "refactor: update cacheIsPopulated to check XML marker + Object sentinel, remove JSON helpers" -``` - ---- - -### Task 11: Update CLI and root.zig — remove JSON paths, simplify cache flow - -**Files:** -- Modify: `src/cli/root.zig` -- Modify: `src/root.zig` - -- [ ] **Step 1: Remove --godot-extension-api flag from CLI** - -In `src/cli/root.zig`: -- Remove the `addFlag` block for `godot-extension-api` (lines 21-26) -- Remove `api_json_path_raw` and `api_json_path` variables (lines 47-48) -- Remove `api_json_path == null` from the help condition (line 54) — just check `ctx.positional_args.len == 0` -- Update `formatAndDisplay` call to remove `api_json_path` argument -- Remove the `error.ApiFileNotFound` and `error.InvalidApiJson` error handlers - -- [ ] **Step 2: Remove api_json_path from function signatures in root.zig** - -Update `markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` to remove `api_json_path: ?[]const u8` parameter. - -- [ ] **Step 3: Remove the JSON-direct-load codepath from markdownForSymbol** - -Delete the `if (api_json_file)` branch that loads JSON directly. The function now always uses the cache flow. - -- [ ] **Step 4: Simplify the cache rebuild flow** - -Replace the current cache rebuild logic with: - -```zig -if (needs_full_rebuild) { - try cache.ensureDirectoryExists(cache_path); - - // Fetch XML docs - const xml_dir = try cache.getXmlDocsDirInCache(allocator, cache_path); - defer allocator.free(xml_dir); - try cache.ensureDirectoryExists(xml_dir); - - const version = source_fetch.getGodotVersion(allocator) orelse - return error.GodotNotFound; - defer version.deinit(allocator); - - // Download and extract XML - var url_buf: [256]u8 = undefined; - const url = source_fetch.buildTarballUrl(&url_buf, version) orelse - return error.GodotNotFound; - - var spinner = Spinner{ .message = "Downloading XML docs..." }; - spinner.start(); - - source_fetch.fetchAndExtractXmlDocs(allocator, url, xml_dir) catch |err| { - if (version.hash) |hash| { - var hash_url_buf: [256]u8 = undefined; - const hash_url = source_fetch.buildTarballUrlFromHash(&hash_url_buf, hash) orelse { - spinner.finish(); - return err; - }; - source_fetch.fetchAndExtractXmlDocs(allocator, hash_url, xml_dir) catch { - spinner.finish(); - return err; - }; - } else { - spinner.finish(); - return err; - } - }; - - spinner.finish(); - - // Write version marker - var version_buf: [64]u8 = undefined; - const version_str = version.formatVersion(&version_buf) orelse return error.GodotNotFound; - try source_fetch.writeCompleteMarker(allocator, xml_dir, version_str); - - // Build database from XML - var build_spinner = Spinner{ .message = "Building documentation cache..." }; - build_spinner.start(); - defer build_spinner.finish(); - - var arena = ArenaAllocator.init(allocator); - defer arena.deinit(); - - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, xml_dir); - try cache.generateMarkdownCache(allocator, db, cache_path); -} -``` - -- [ ] **Step 5: Remove mergeXmlDocs and fetchXmlDocs functions** - -Delete both functions from `root.zig`. - -- [ ] **Step 6: Remove error types for deleted codepaths** - -- Remove `ApiFileNotFound` from `LookupError` error set in `root.zig`. Add `GodotNotFound` if not already there. -- `InvalidApiJson` was already removed from `DocDatabase.Error` in Task 8. - -- [ ] **Step 7: Delete JSON fixture tests** - -Delete tests in `root.zig` that create inline JSON or test `api_json_path`: -- `markdownForSymbol returns ApiFileNotFound for nonexistent file` -- `markdownForSymbol returns InvalidApiJson for malformed JSON` -- `markdownForSymbol loads from custom API file and finds symbol` -- `markdownForSymbol returns SymbolNotFound when symbol doesn't exist` -- `markdownForSymbol works with relative path` -- `formatAndDisplay with markdown format produces markdown output` -- `formatAndDisplay with terminal format produces terminal output` - -Keep cache-flow tests but update them to not create `extension_api.json`. - -- [ ] **Step 8: Update imports** - -Remove `pub const api = @import("api.zig");` if not already done. - -- [ ] **Step 9: Run tests to verify everything compiles and passes** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 10: Commit CLI and root.zig together** - -```bash -git add src/root.zig src/cli/root.zig -git commit -m "refactor: remove JSON paths from root.zig and CLI, simplify to XML-only cache flow" -``` - ---- - -### Task 12: Update remaining cache-flow tests - -**Files:** -- Modify: `src/root.zig` -- Modify: `src/cache.zig` - -- [ ] **Step 1: Update markdownForSymbol cache tests** - -Update `markdownForSymbol reads from markdown cache when available` — remove the `extension_api.json` creation, ensure `cacheIsPopulated` returns true by creating the new sentinels (xml_docs/.complete + Object/index.md). - -Update `markdownForSymbol generates markdown cache when cache is empty` — this test now needs XML docs in the cache instead of JSON. Create a minimal XML file in `xml_docs/` dir with a `.complete` marker, or restructure to test `loadFromXmlDir` + `generateMarkdownCache` directly. - -- [ ] **Step 2: Run all tests** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 3: Run the full build** - -Run: `zig build` -Expected: Clean build, no errors - -- [ ] **Step 4: Commit** - -```bash -git add src/root.zig src/cache.zig -git commit -m "test: update cache-flow tests for XML-only architecture" -``` - ---- - -### Task 13: Integration test — XML dir to markdown roundtrip - -**Files:** -- Modify: `src/DocDatabase.zig` (or `src/root.zig`) - -- [ ] **Step 1: Write integration test** - -```zig -test "XML dir to markdown roundtrip" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - // Write a realistic XML doc - const xml_content = - \\ - \\ - \\ A test class. - \\ A class for testing. - \\ - \\ https://example.com - \\ - \\ - \\ - \\ - \\ Default constructor. - \\ - \\ - \\ - \\ - \\ - \\ - \\ Does a thing. - \\ - \\ - \\ - \\ Movement speed. - \\ - \\ - \\ - \\ - \\ - \\ Multiplies by a scalar. - \\ - \\ - \\ - \\ Maximum speed. - \\ - \\ - ; - - // Write XML to a subdir - const xml_dir = try std.fmt.allocPrint(allocator, "{s}/xml", .{tmp_path}); - defer allocator.free(xml_dir); - try std.fs.makeDirAbsolute(xml_dir); - const xml_path = try std.fmt.allocPrint(allocator, "{s}/TestClass.xml", .{xml_dir}); - defer allocator.free(xml_path); - try std.fs.cwd().writeFile(.{ .sub_path = xml_path, .data = xml_content }); - - // Load from XML - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, xml_dir); - - // Generate markdown - const cache_dir = try std.fmt.allocPrint(allocator, "{s}/cache", .{tmp_path}); - defer allocator.free(cache_dir); - try cache.generateMarkdownCache(allocator, db, cache_dir); - - // Read back the class markdown - var output: std.Io.Writer.Allocating = .init(allocator); - defer output.deinit(); - try cache.readSymbolMarkdown(allocator, "TestClass", cache_dir, &output.writer); - const written = output.written(); - - // Verify key content - try std.testing.expect(std.mem.indexOf(u8, written, "# TestClass") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "*Inherits: RefCounted*") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "## Tutorials") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "## Constructors") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "## Methods") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "## Properties") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "## Constants") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "## Operators") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "do_thing") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "speed") != null); - try std.testing.expect(std.mem.indexOf(u8, written, "operator *") != null); -} -``` - -- [ ] **Step 2: Run test** - -Run: `zig build test 2>&1 | grep "XML dir to markdown"` -Expected: PASS - -- [ ] **Step 3: Update snapshot files** - -Run full test suite, verify snapshots: -```bash -zig build test -git diff snapshots/ -``` - -- [ ] **Step 4: Commit** - -```bash -git add src/ snapshots/ -git commit -m "test: add XML-to-markdown roundtrip integration test, update snapshots" -``` - ---- - -### Task 14: Error-path replacement tests - -**Files:** -- Modify: `src/DocDatabase.zig` or `src/root.zig` - -- [ ] **Step 1: Write test for XML parse failure** - -`loadFromXmlDir` should propagate zig-xml parse errors when encountering malformed XML. The specific error variant depends on what `zig-xml` returns — check the actual error set from `XmlDocParser.parseClassDoc` (likely `error.MalformedXml` or a zig-xml `SyntaxError`). Use `expectError` with the correct variant: - -```zig -test "loadFromXmlDir returns error for malformed XML" { - const allocator = std.testing.allocator; - - var tmp_dir = std.testing.tmpDir(.{}); - defer tmp_dir.cleanup(); - - const tmp_path = try tmp_dir.dir.realpathAlloc(allocator, "."); - defer allocator.free(tmp_path); - - // Write a malformed XML file - try tmp_dir.dir.writeFile(.{ .sub_path = "Bad.xml", .data = "\n - \\ - \\ Base class. - \\ Base node class. - \\ - }); - - var arena = std.heap.ArenaAllocator.init(allocator); - defer arena.deinit(); - - const db = try DocDatabase.loadFromXmlDir(arena.allocator(), allocator, tmp_path); - const result = db.lookupSymbolExact("NonExistent"); - try std.testing.expectError(DocDatabase.Error.SymbolNotFound, result); -} -``` - -- [ ] **Step 3: Write test for missing cache directory** - -```zig -test "cacheIsPopulated returns false for nonexistent directory" { - const allocator = std.testing.allocator; - const result = try cache.cacheIsPopulated(allocator, "/tmp/gdoc-nonexistent-test-path"); - try std.testing.expect(!result); -} -``` - -- [ ] **Step 4: Run tests** - -Run: `zig build test 2>&1 | grep -E "PASS|FAIL"` -Expected: All tests pass - -- [ ] **Step 5: Commit** - -```bash -git add src/DocDatabase.zig src/cache.zig -git commit -m "test: add error-path tests for XML parse failure, missing symbols, missing cache" -``` - ---- - -### Task 15: Final cleanup and verification - -**Files:** -- All modified files - -- [ ] **Step 1: Verify no dead code remains** - -Search for any remaining references to removed items: - -```bash -grep -rn "api_json\|extension_api\|loadFromJson\|builtin_class\|no_xml\|GDOC_NO_XML\|api\.zig\|mergeXmlDocs\|fetchXmlDocs" src/ -``` - -Expected: No matches - -- [ ] **Step 2: Run full test suite** - -```bash -zig build test -``` - -Expected: All tests pass, no warnings - -- [ ] **Step 3: Test the binary manually** - -```bash -zig build run -- --clear-cache -zig build run -- Node2D -zig build run -- Vector2 -zig build run -- sin -zig build run -- Node2D.position -``` - -Expected: Each command shows documentation with the new format (inheritance, constructors where applicable, etc.) - -- [ ] **Step 4: Final commit if any remaining changes** - -```bash -git add -A -git commit -m "chore: final cleanup for XML-only doc source migration" -``` diff --git a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md b/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md deleted file mode 100644 index b678ed7..0000000 --- a/docs/superpowers/specs/2026-03-21-xml-docs-primary-source-design.md +++ /dev/null @@ -1,214 +0,0 @@ -# Replace JSON Docs with XML as Primary Source - -**Date:** 2026-03-21 -**Status:** Proposed - -## Summary - -Replace the JSON extension API (`extension_api.json` from `godot --dump-extension-api`) with Godot's XML class documentation as the sole data source for gdoc. The XML docs are the upstream source from which the JSON is generated, and they contain significantly richer information: tutorials, code examples, constructors, operators, property defaults, and method qualifiers. - -## Motivation - -The current architecture uses JSON as the primary source and XML as a supplement to fill gaps. This is backwards — the XML docs are the authoritative, hand-curated source, and the JSON is a machine-generated subset designed for GDExtension binding generators, not human documentation. JSON-only data (hashes, memory sizes, native structures) is irrelevant to a doc viewer. - -Key XML advantages over JSON: -- **Tutorials** with documentation links -- **Constructors** (e.g., `Vector2(x, y)`) -- **Operators** (e.g., `Vector2 * float`) -- **Property default values** (e.g., `default="Vector2(0, 0)"`) -- **Method qualifiers** (`virtual`, `const`, `static`) -- **Code examples** in `[codeblock]` sections -- **Richer descriptions** — XML is hand-written; JSON strips or abbreviates - -## Design - -### Data Model - -The `DocDatabase.Entry` struct expands to hold everything XML provides: - -``` -Entry { - key // "Node2D" or "Node2D.position" - name // "Node2D" or "position" - kind // class, method, property, signal, constant, - // enum_value, constructor, operator, global_function - inherits // "CanvasItem" (classes only) - description // full BBCode description - brief_description - signature // ": Vector2", "(x: float, y: float) -> Vector2", etc. - qualifiers // "virtual const", "static" (methods only) - default_value // "Vector2(0, 0)" (properties only) - members // indices of child entries - tutorials // [{title, url}] -} -``` - -New fields on `Entry`: `inherits`, `qualifiers`, `default_value`, `tutorials` (the existing struct has `key`, `name`, `kind`, `description`, `brief_description`, `signature`, `members`, `parent_index`; the `Tutorial` type exists at the `DocDatabase` level but is not yet wired into `Entry`). - -New `EntryKind` value: `constructor` (added to existing set which already includes `operator`). - -The `DocDatabase` remains a flat symbol table keyed by dotted paths (`"Vector2.abs"`, `"@GlobalScope.sin"`). Utility functions from `@GlobalScope.xml` and `@GDScript.xml` are registered both under their qualified name and as top-level entries for convenience (e.g., both `"@GlobalScope.abs"` and `"abs"`). If both files define the same function name, `@GlobalScope` wins (it is the canonical source; `@GDScript` contains GDScript-specific helpers like `preload`). - -**No builtin class distinction:** The `builtin_class` EntryKind is removed. All XML `` elements become `EntryKind.class`. Builtins (Vector2, int, etc.) and regular classes (Node, Sprite2D, etc.) render identically — constructors and operators are their own entry kinds and render regardless of parent class type. - -**Enum extraction from XML:** XML stores enums within `` elements using an `enum` attribute (e.g., ``). Constants with the same `enum` attribute are grouped into enum entries with kind `enum_value`, keyed as `"ClassName.EnumName.VALUE_NAME"`. - -### XmlDocParser Expansion - -The parser currently handles: `brief_description`, `description`, `tutorials`, `methods`, `members` (properties), `signals`, `constants`. - -New parsing: - -- **``** — same structure as methods (name, params, return type, description) -- **``** — name like `operator +`, params, return type, description -- **`inherits` attribute** on `` — already parsed, now stored in DocDatabase -- **Method `qualifiers` attribute** — `"virtual const"`, `"static"`, etc. -- **Property `default` attribute** — `default="Vector2(0, 0)"` -- **Method/constructor/operator params** — `name`, `type`, `default` per param -- **Return types** — `` - -Expanded structs: - -``` -ClassDoc { - name, inherits, brief_description, description, tutorials - methods, properties, signals, constants // existing - constructors // new - operators // new -} - -MemberDoc { - name, description // existing - qualifiers // new (nullable; methods/constructors/operators) - default_value // new (nullable; properties/constants) - return_type // new (nullable; not on constants) - params: ?[]ParamDoc // new (nullable; not on constants) -} - -ParamDoc { - name, type, default_value -} -``` - -### Removals - -- **`api.zig`** — entire file (runs `godot --dump-extension-api`) -- **`DocDatabase.loadFromJsonFileLeaky`** — all JSON parsing logic in `DocDatabase.zig` -- **`--godot-extension-api` CLI flag** — and the `api_json_path` parameter threaded through `markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` -- **`mergeXmlDocs`** in `root.zig` — no more supplementation -- **`fetchXmlDocs`** as a separate supplementation step — XML fetch becomes the main path -- **`api.generateApiJsonIfNotExists`** call in cache flow -- **`getJsonCachePathInDir`** and JSON-specific cache helpers in `cache.zig` -- **JSON cache file** (`extension_api.json`) from cache directory -- **`--no-xml` / `GDOC_NO_XML`** — the `no_xml` field on `Config` becomes meaningless since XML is the sole source; `Config.testing` updated to remove this field -- **`--godot-extension-api` flag definition in `cli/root.zig`** — flag declaration, reading, and threading through to `formatAndDisplay` -- **Tests using JSON fixtures** — tests in `root.zig` that create inline JSON (e.g., `markdownForSymbol returns ApiFileNotFound`) are deleted, not rewritten; the JSON path no longer exists - -**Kept:** -- **`bbcodez`** — still needed for BBCode→Markdown conversion in descriptions -- **`source_fetch.zig`** — still fetches XML docs from GitHub tarballs -- **`XmlDocParser.zig`** — expanded -- **`cache.zig`** — adapted to build from XML instead of JSON - -### Function Signature Changes - -`markdownForSymbol`, `formatAndDisplay`, and `renderWithZigdown` all lose the `api_json_path: ?[]const u8` parameter. The JSON file path codepath (direct load from a user-provided file) is removed entirely. These functions always use the cache flow — there is no "bypass cache" mode. - -### New Cache Flow - -Current: `godot --dump-extension-api` → JSON → merge XML → generate markdown cache - -New: - -``` -1. Check cache populated (markdown sentinel files exist) -2. If not: - a. godot --version → get version string - b. Download source tarball from GitHub → extract XML to cache/xml_docs/ - c. Parse all XML files → build DocDatabase (new: DocDatabase.loadFromXmlDir) - d. Generate markdown cache from DocDatabase -3. Read symbol markdown from cache -``` - -The `godot` binary is only used for `--version` (to match the tarball URL), never for `--dump-extension-api`. If `godot` is not on PATH, the tool errors with a clear message: "godot not found. Install Godot and ensure it's on your PATH." This matches the current behavior — `godot` has always been required for JSON export too. - -`cache.cacheIsPopulated` checks for the `xml_docs/.complete` marker plus the existence of at least one generated markdown directory (e.g., `Object/index.md` as the sentinel — `Object` is the root of the class hierarchy and is always present). - -### DocDatabase.loadFromXmlDir - -New entry point replacing `loadFromJsonFileLeaky`. Behavior: - -1. Open `xml_dir` and iterate all `.xml` files -2. For each file, call `XmlDocParser.parseClassDoc` (using an arena allocator so all strings outlive the function) -3. Create a class-level `Entry` with kind `class` (all XML `` elements are treated uniformly) -4. For each member category (methods, properties, signals, constants, constructors, operators), create child entries keyed as `"ClassName.member_name"` -5. Build `signature` strings from parsed params and return types: - - Methods: `(param: Type, param2: Type = default) -> ReturnType` - - Properties: `: Type` (with `= default` if present) - - Constructors: `(param: Type, ...)` (name is always the class name) - - Operators: `OperatorName(other: Type) -> ReturnType` -6. For `@GlobalScope.xml` and `@GDScript.xml`, register utility functions as both qualified (`@GlobalScope.sin`) and top-level (`sin`) entries -7. Populate `members` index arrays on class entries pointing to their children - -### Markdown Output Format - -With the expanded data model, generated markdown per class becomes richer: - -```markdown -# Vector2 - -*Inherits: none* - -A 2D vector using floating-point coordinates. - -## Description - -A 2-element structure that can be used to represent 2D coordinates... - -## Tutorials - -- [Math documentation index](https://docs.godotengine.org/en/stable/tutorials/math/index.html) -- [Vector math](https://docs.godotengine.org/en/stable/tutorials/math/vector_math.html) - -## Properties - -- **x: float** = `0.0` — The vector's X component. -- **y: float** = `0.0` — The vector's Y component. - -## Constructors - -- **Vector2()** — Constructs a default-initialized Vector2... -- **Vector2(from: Vector2i)** — Constructs a new Vector2 from Vector2i. -- **Vector2(x: float, y: float)** — Constructs a new Vector2... - -## Methods - -- **abs() -> Vector2** `const` — Returns a new vector with all components in absolute values. -- **angle() -> float** `const` — Returns this vector's angle... - -## Operators - -- **Vector2 * float -> Vector2** — Multiplies each component... -- **Vector2 + Vector2 -> Vector2** — Adds each component... - -## Constants - -- **ZERO = Vector2(0, 0)** — Zero vector... -- **ONE = Vector2(1, 1)** — One vector... -``` - -Key additions vs current output: -- Inheritance line -- Property default values -- Constructors section -- Full method signatures with params, return types, and qualifiers -- Operators section - -## Testing - -- Existing snapshot tests updated to reflect new markdown format -- New snapshots for classes with constructors/operators (e.g., Vector2) -- Unit tests for expanded XmlDocParser (constructors, operators, qualifiers, defaults, params) -- Integration test: XML dir → DocDatabase → markdown output roundtrip -- Tests using inline JSON fixtures are deleted (JSON path no longer exists); new tests use inline XML strings -- Replacement error-path tests: XML parse failure (malformed XML), symbol not found in XML-built database, cache directory missing/unwritable From 5ce03b430433013873ed19f6b5b6373630b39589 Mon Sep 17 00:00:00 2001 From: Simon Hartcher Date: Sat, 21 Mar 2026 20:40:32 +1100 Subject: [PATCH 19/19] fix: use makePath for recursive directory creation in cache ensureDirectoryExists used makeDirAbsolute which only creates a single directory level. After --clear-cache removes the entire cache tree, nested paths like cache/Node2D/ fail because the parent doesn't exist. --- src/cache.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cache.zig b/src/cache.zig index 9259417..83119e3 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -9,7 +9,7 @@ pub fn clearCache(config: *const Config) !void { pub fn ensureDirectoryExists(dir_path: []const u8) !void { var dir = std.fs.openDirAbsolute(dir_path, .{}) catch |err| switch (err) { error.FileNotFound => { - try std.fs.makeDirAbsolute(dir_path); + try std.fs.cwd().makePath(dir_path); return; }, else => return err,