feat: API catalog surface, protocol v2 tooling, MCP expansion, on+re update
## Summary
Session 2026-03-23. Closes the loop between handler code and discoverability
across all three surfaces (browser, CLI, MCP agent) via compile-time inventory
registration. Adds protocol v2 update tooling, extends MCP from 21 to 29 tools,
and brings the self-description up to date.
## API Catalog Surface (#[onto_api] proc-macro)
- crates/ontoref-derive: new proc-macro crate; `#[onto_api(method, path,
description, auth, actors, params, tags)]` emits `inventory::submit!(ApiRouteEntry{...})`
at link time
- crates/ontoref-daemon/src/api_catalog.rs: `catalog()` — pure fn over
`inventory::iter::<ApiRouteEntry>()`, zero runtime allocation
- GET /api/catalog: returns full annotated HTTP surface as JSON
- templates/pages/api_catalog.html: new page with client-side filtering by
method, auth, path/description; detail panel per route (params table,
feature flag); linked from dashboard card and nav
- UI nav: "API" link (</> icon) added to mobile dropdown and desktop bar
- inventory = "0.3" added to workspace.dependencies (MIT, zero transitive deps)
## Protocol Update Mode
- reflection/modes/update_ontoref.ncl: 9-step DAG (5 detect parallel, 2 update
idempotent, 2 validate, 1 report) — brings any project from protocol v1 to v2
by adding manifest.ncl and connections.ncl if absent, scanning ADRs for
deprecated check_hint, validating with nickel export
- reflection/templates/update-ontology-prompt.md: 8-phase reusable prompt for
agent-driven ontology enrichment (infrastructure → audit → core.ncl →
state.ncl → manifest.ncl → connections.ncl → ADR migration → validation)
## CLI — describe group extensions
- reflection/bin/ontoref.nu: `describe diff [--fmt] [--file]` and
`describe api [--actor] [--tag] [--auth] [--fmt]` registered as canonical
subcommands with log-action; aliases `df` and `da` added; QUICK REFERENCE
and ALIASES sections updated
## MCP — two new tools (21 → 29 total)
- ontoref_api_catalog: filters catalog() output by actor/tag/auth; returns
{ routes, total } — no HTTP roundtrip, calls inventory directly
- ontoref_file_versions: reads ProjectContext.file_versions DashMap per slug;
returns BTreeMap<filename, u64> reload counters
- insert_mcp_ctx: audited and updated from 15 to 28 entries in 6 groups
- HelpTool JSON: 8 new entries (validate_adrs, validate, impact, guides,
bookmark_list, bookmark_add, api_catalog, file_versions)
- ServerHandler::get_info instructions updated to mention new tools
## Web UI — dashboard additions
- Dashboard: "API Catalog" card (9th); "Ontology File Versions" section showing
per-file reload counters from file_versions DashMap
- dashboard_mp: builds BTreeMap<String, u64> from ctx.file_versions and injects
into Tera context
## on+re update
- .ontology/core.ncl: describe-query-layer and adopt-ontoref-tooling descriptions
updated; ontoref-daemon updated ("11 pages", "29 tools", API catalog,
per-file versioning, #[onto_api]); new node api-catalog-surface (Yang/Practice)
with 3 edges; artifact_paths extended across 3 nodes
- .ontology/state.ncl: protocol-maturity blocker updated (protocol v2 complete);
self-description-coverage catalyst updated with session 2026-03-23 additions
- ADR-007: "API Surface Discoverability via #[onto_api] Proc-Macro" — Accepted
## Documentation
- README.md: crates table updated (11 pages, 29 MCP tools, ontoref-derive row);
MCP representative table expanded; API Catalog, Semantic Diff, Per-File
Versioning paragraphs added; update_ontoref onboarding section added
- CHANGELOG.md: [Unreleased] section with 4 change groups
- assets/web/src/index.html: tool counts 19→29 (EN+ES), page counts 12→11
(EN+ES), daemon description paragraph updated with API catalog + #[onto_api]
282 lines
9.5 KiB
Plaintext
282 lines
9.5 KiB
Plaintext
#!/usr/bin/env nu
|
|
# reflection/modules/validate.nu — ADR constraint validation runner.
|
|
#
|
|
# Interprets the typed constraint_check_type ADT exported by adrs/adr-schema.ncl.
|
|
# Each constraint.check record has a `tag` discriminant; this module dispatches
|
|
# execution per variant and returns a structured result.
|
|
#
|
|
# Commands:
|
|
# validate check-constraint <c> — run a single constraint record
|
|
# validate check-adr <id> — run all constraints for one ADR
|
|
# validate check-all — run all constraints across all accepted ADRs
|
|
#
|
|
# Error handling: do { ... } | complete — never panics, always returns a result.
|
|
|
|
use env.nu *
|
|
use store.nu [daemon-export-safe]
|
|
|
|
# ── Internal helpers ────────────────────────────────────────────────────────
|
|
|
|
def adr-root []: nothing -> string {
|
|
$env.ONTOREF_PROJECT_ROOT? | default $env.ONTOREF_ROOT
|
|
}
|
|
|
|
def adr-files []: nothing -> list<string> {
|
|
glob ([(adr-root), "adrs", "adr-*.ncl"] | path join)
|
|
}
|
|
|
|
# Resolve a check path (may be file or directory) relative to project root.
|
|
def resolve-path [rel: string]: nothing -> string {
|
|
[(adr-root), $rel] | path join
|
|
}
|
|
|
|
# Run a 'Grep check: ripgrep pattern across paths; empty/non-empty assertion.
|
|
def run-grep [check: record]: nothing -> record {
|
|
let paths = ($check.paths | each { |p| resolve-path $p })
|
|
let valid_paths = ($paths | where { |p| $p | path exists })
|
|
|
|
if ($valid_paths | is-empty) {
|
|
return {
|
|
passed: false,
|
|
detail: $"No paths exist: ($check.paths | str join ', ')"
|
|
}
|
|
}
|
|
|
|
let result = do {
|
|
^rg --no-heading --count-matches $check.pattern ...$valid_paths
|
|
} | complete
|
|
|
|
let has_matches = ($result.exit_code == 0)
|
|
|
|
if $check.must_be_empty {
|
|
{
|
|
passed: (not $has_matches),
|
|
detail: (if $has_matches { $"Pattern found (violation): ($result.stdout | str trim)" } else { "Pattern absent — ok" })
|
|
}
|
|
} else {
|
|
{
|
|
passed: $has_matches,
|
|
detail: (if $has_matches { "Pattern present — ok" } else { "Pattern absent (required match missing)" })
|
|
}
|
|
}
|
|
}
|
|
|
|
# Run a 'Cargo check: parse Cargo.toml, verify forbidden_deps absent from [dependencies].
|
|
def run-cargo [check: record]: nothing -> record {
|
|
let cargo_path = ([(adr-root), "crates", $check.crate, "Cargo.toml"] | path join)
|
|
if not ($cargo_path | path exists) {
|
|
return { passed: false, detail: $"Cargo.toml not found: ($cargo_path)" }
|
|
}
|
|
|
|
let cargo = (open $cargo_path)
|
|
let all_dep_sections = [
|
|
($cargo.dependencies? | default {}),
|
|
($cargo."dev-dependencies"? | default {}),
|
|
($cargo."build-dependencies"? | default {}),
|
|
]
|
|
let all_dep_keys = ($all_dep_sections | each { |d| $d | columns } | flatten)
|
|
|
|
let found = ($check.forbidden_deps | where { |dep| $dep in $all_dep_keys })
|
|
{
|
|
passed: ($found | is-empty),
|
|
detail: (if ($found | is-empty) { "No forbidden deps found" } else { $"Forbidden deps present: ($found | str join ', ')" })
|
|
}
|
|
}
|
|
|
|
# Run a 'NuCmd check: execute cmd via nu -c, assert exit code.
|
|
def run-nucmd [check: record]: nothing -> record {
|
|
let result = do { nu -c $check.cmd } | complete
|
|
let expected = ($check.expect_exit? | default 0)
|
|
{
|
|
passed: ($result.exit_code == $expected),
|
|
detail: (if ($result.exit_code == $expected) {
|
|
"Command exited as expected"
|
|
} else {
|
|
$"Exit ($result.exit_code) ≠ expected ($expected): ($result.stderr | str trim)"
|
|
})
|
|
}
|
|
}
|
|
|
|
# Run an 'ApiCall check: GET endpoint, navigate json_path, compare to expected.
|
|
def run-apicall [check: record]: nothing -> record {
|
|
let result = do { ^curl -sf $check.endpoint } | complete
|
|
if $result.exit_code != 0 {
|
|
return { passed: false, detail: $"curl failed (exit ($result.exit_code)): ($result.stderr | str trim)" }
|
|
}
|
|
let value = do { $result.stdout | from json | get $check.json_path } | complete
|
|
if $value.exit_code != 0 {
|
|
return { passed: false, detail: $"json_path '($check.json_path)' not found in response" }
|
|
}
|
|
let actual = $value.stdout | str trim
|
|
let expected = ($check.expected | into string)
|
|
{
|
|
passed: ($actual == $expected),
|
|
detail: (if ($actual == $expected) { "Value matches" } else { $"Expected '($expected)', got '($actual)'" })
|
|
}
|
|
}
|
|
|
|
# Run a 'FileExists check: assert presence or absence of a path.
|
|
def run-fileexists [check: record]: nothing -> record {
|
|
let p = (resolve-path $check.path)
|
|
let exists = ($p | path exists)
|
|
let want = ($check.present? | default true)
|
|
{
|
|
passed: ($exists == $want),
|
|
detail: (if ($exists == $want) {
|
|
(if $want { $"File exists: ($p)" } else { $"File absent: ($p)" })
|
|
} else {
|
|
(if $want { $"File missing: ($p)" } else { $"File unexpectedly present: ($p)" })
|
|
})
|
|
}
|
|
}
|
|
|
|
# Dispatch a single constraint.check record to the appropriate runner.
|
|
def dispatch-check [check: record]: nothing -> record {
|
|
match $check.tag {
|
|
"Grep" => (run-grep $check),
|
|
"Cargo" => (run-cargo $check),
|
|
"NuCmd" => (run-nucmd $check),
|
|
"ApiCall" => (run-apicall $check),
|
|
"FileExists" => (run-fileexists $check),
|
|
_ => { passed: false, detail: $"Unknown check tag: ($check.tag)" }
|
|
}
|
|
}
|
|
|
|
# ── Public commands ──────────────────────────────────────────────────────────
|
|
|
|
# Run a single constraint record.
|
|
# Returns { constraint_id, severity, passed, detail }.
|
|
export def "validate check-constraint" [
|
|
constraint: record, # Constraint record from an ADR export
|
|
]: nothing -> record {
|
|
if ($constraint.check? | is-empty) {
|
|
return {
|
|
constraint_id: ($constraint.id? | default "unknown"),
|
|
severity: ($constraint.severity? | default "Hard"),
|
|
passed: false,
|
|
detail: "No typed 'check' field — constraint uses deprecated check_hint only"
|
|
}
|
|
}
|
|
|
|
let result = dispatch-check $constraint.check
|
|
{
|
|
constraint_id: $constraint.id,
|
|
severity: $constraint.severity,
|
|
passed: $result.passed,
|
|
detail: $result.detail,
|
|
}
|
|
}
|
|
|
|
# Run all constraints for a single ADR by id ("001", "adr-001", or "adr-001-slug").
|
|
# Returns a list of { constraint_id, severity, passed, detail }.
|
|
export def "validate check-adr" [
|
|
id: string, # ADR id: "001", "adr-001", or full stem
|
|
--fmt: string = "table", # Output: table | json | yaml
|
|
]: nothing -> any {
|
|
let canonical = if ($id | str starts-with "adr-") { $id } else { $"adr-($id)" }
|
|
let files = (glob ([(adr-root), "adrs", $"($canonical)-*.ncl"] | path join))
|
|
if ($files | is-empty) {
|
|
error make { msg: $"ADR '($id)' not found in adrs/" }
|
|
}
|
|
|
|
let adr = (daemon-export-safe ($files | first))
|
|
if $adr == null {
|
|
error make { msg: $"ADR '($id)' failed to export" }
|
|
}
|
|
|
|
let results = ($adr.constraints | each { |c|
|
|
if ($c.check? | is-empty) {
|
|
{
|
|
constraint_id: $c.id,
|
|
severity: $c.severity,
|
|
passed: false,
|
|
detail: "check field missing — uses deprecated check_hint"
|
|
}
|
|
} else {
|
|
validate check-constraint $c
|
|
}
|
|
})
|
|
|
|
match $fmt {
|
|
"json" => { $results | to json },
|
|
"yaml" => { $results | to yaml },
|
|
_ => { $results | table --expand },
|
|
}
|
|
}
|
|
|
|
# Run all Hard constraints across all accepted ADRs.
|
|
# Returns { adr_id, constraint_id, severity, passed, detail } records.
|
|
# Exit code is non-zero if any Hard constraint fails.
|
|
export def "validate check-all" [
|
|
--fmt: string = "table", # Output: table | json | yaml
|
|
--hard-only, # Include only Hard constraints (default: all)
|
|
]: nothing -> any {
|
|
let all_results = (adr-files | each { |ncl|
|
|
let adr = (daemon-export-safe $ncl)
|
|
if $adr == null { return [] }
|
|
if $adr.status != "Accepted" { return [] }
|
|
|
|
$adr.constraints | each { |c|
|
|
if $hard_only and $c.severity != "Hard" { return null }
|
|
let res = if ($c.check? | is-empty) {
|
|
{ passed: false, detail: "check field missing — uses deprecated check_hint" }
|
|
} else {
|
|
dispatch-check $c.check
|
|
}
|
|
{
|
|
adr_id: $adr.id,
|
|
constraint_id: $c.id,
|
|
severity: $c.severity,
|
|
passed: $res.passed,
|
|
detail: $res.detail,
|
|
}
|
|
} | compact
|
|
} | flatten)
|
|
|
|
let failures = ($all_results | where passed == false)
|
|
let total = ($all_results | length)
|
|
let n_fail = ($failures | length)
|
|
|
|
let output = match $fmt {
|
|
"json" => { $all_results | to json },
|
|
"yaml" => { $all_results | to yaml },
|
|
_ => { $all_results | table --expand },
|
|
}
|
|
|
|
print $output
|
|
|
|
if ($failures | is-not-empty) {
|
|
error make {
|
|
msg: $"($n_fail) of ($total) constraints failed",
|
|
}
|
|
}
|
|
}
|
|
|
|
# Show a summary of constraint validation state across all accepted ADRs.
|
|
# Intended for ontoref status / describe guides.
|
|
export def "validate summary" []: nothing -> record {
|
|
let all_results = (adr-files | each { |ncl|
|
|
let adr = (daemon-export-safe $ncl)
|
|
if $adr == null { return [] }
|
|
if $adr.status != "Accepted" { return [] }
|
|
|
|
$adr.constraints | each { |c|
|
|
let res = if ($c.check? | is-empty) {
|
|
{ passed: false }
|
|
} else {
|
|
dispatch-check $c.check
|
|
}
|
|
{ severity: $c.severity, passed: $res.passed }
|
|
} | compact
|
|
} | flatten)
|
|
|
|
let hard = ($all_results | where severity == "Hard")
|
|
let soft = ($all_results | where severity == "Soft")
|
|
{
|
|
hard_total: ($hard | length),
|
|
hard_passing: ($hard | where passed == true | length),
|
|
soft_total: ($soft | length),
|
|
soft_passing: ($soft | where passed == true | length),
|
|
}
|
|
}
|