.coder/m
Some checks failed
Nickel Type Check / Nickel Type Checking (push) Has been cancelled
Rust CI / Security Audit (push) Has been cancelled
Rust CI / Check + Test + Lint (nightly) (push) Has been cancelled
Rust CI / Check + Test + Lint (stable) (push) Has been cancelled

This commit is contained in:
Jesús Pérez 2026-03-29 00:19:56 +00:00
parent 502b5f0caa
commit da083fb9ec
Signed by: jesus
GPG Key ID: 9F243E355E0BC939
51 changed files with 5953 additions and 172 deletions

5
.gitignore vendored
View File

@ -69,3 +69,8 @@ vendordiff.patch
# Generated SBOM files
SBOM.*.json
*.sbom.json
# UnoCSS build
assets/css/node_modules/
assets/css/pnpm-lock.yaml
crates/ontoref-daemon/public/css/ontoref.css

View File

@ -88,6 +88,7 @@
"globs": [
"**/*.md",
"!node_modules/**",
"!**/node_modules/**",
"!target/**",
"!.git/**",
"!build/**",
@ -106,6 +107,7 @@
"assets/branding/**",
"assets/logo_prompt.md",
"node_modules/**",
"**/node_modules/**",
"target/**",
".git/**",
"build/**",

View File

@ -82,9 +82,10 @@ let d = import "../ontology/defaults/core.ncl" in
"adrs/adr-007-api-surface-discoverability-onto-api-proc-macro.ncl",
"adrs/adr-008-ncl-first-config-validation-and-override-layer.ncl",
"adrs/adr-009-manifest-self-interrogation-layer-three-semantic-axes.ncl",
"adrs/adr-010-protocol-migration-system.ncl",
"CHANGELOG.md",
],
adrs = ["adr-001", "adr-002", "adr-003", "adr-004", "adr-005", "adr-006", "adr-007", "adr-008", "adr-009"],
adrs = ["adr-001", "adr-002", "adr-003", "adr-004", "adr-005", "adr-006", "adr-007", "adr-008", "adr-009", "adr-010"],
},
d.make_node {
@ -151,9 +152,27 @@ let d = import "../ontology/defaults/core.ncl" in
"reflection/forms/adopt_ontoref.ncl",
"reflection/templates/adopt_ontoref.nu.j2",
"reflection/templates/update-ontology-prompt.md",
"reflection/migrations/",
],
},
d.make_node {
id = "protocol-migration-system",
name = "Protocol Migration System",
pole = 'Yang,
level = 'Practice,
description = "Progressive, ordered protocol migrations for consumer projects. Each migration is an NCL file in reflection/migrations/NNN-slug.ncl declaring id, slug, description, a typed check (FileExists | Grep | NuCmd), and instructions interpolated at runtime with project_root and project_name. Applied state is determined solely by whether the check passes — no state file, fully idempotent. NuCmd checks must be valid Nushell (no bash &&, $env.VAR not $VAR). Accessible via `ontoref migrate list/pending/show` and the interactive group dispatch. Narrows ADR instance checks to `adr-[0-9][0-9][0-9]-*.ncl` to exclude schema/template infrastructure files from pattern matching.",
invariant = false,
artifact_paths = [
"reflection/migrations/",
"reflection/modules/migrate.nu",
"reflection/nulib/interactive.nu",
"reflection/nulib/help.nu",
"reflection/bin/ontoref.nu",
],
adrs = ["adr-010"],
},
d.make_node {
id = "ontology-three-file-split",
name = "Ontology Three-File Split",
@ -503,6 +522,16 @@ let d = import "../ontology/defaults/core.ncl" in
{ from = "manifest-self-description", to = "adr-lifecycle", kind = 'Complements, weight = 'Medium,
note = "capabilities.adrs[] creates explicit typed links from capabilities to the ADRs that formalize them — the ADR→Node linkage pattern extended to the manifest layer." },
# Protocol Migration System edges
{ from = "protocol-migration-system", to = "adopt-ontoref-tooling", kind = 'ManifestsIn, weight = 'High,
note = "Migration system is the versioned upgrade surface for adopt-ontoref-tooling — new protocol features arrive as numbered migrations, not template rewrites." },
{ from = "protocol-migration-system", to = "adr-lifecycle", kind = 'Complements, weight = 'High,
note = "Each migration check can verify ADR-level constraints are met in consumer repos — migrations and ADRs are complementary protocol enforcement layers." },
{ from = "protocol-migration-system", to = "no-enforcement", kind = 'Complements, weight = 'Medium,
note = "Migrations are advisory: `migrate pending` reports state, never applies automatically. The actor decides when to apply." },
{ from = "self-describing", to = "protocol-migration-system", kind = 'ManifestsIn, weight = 'Medium,
note = "Ontoref runs its own migration checks against itself — the migration system is self-applied." },
# Config Surface edges
{ from = "config-surface", to = "ontoref-daemon", kind = 'ManifestsIn, weight = 'High },
{ from = "config-surface", to = "ontoref-ontology-crate", kind = 'DependsOn, weight = 'High,

View File

@ -61,6 +61,33 @@ m.make_manifest {
},
],
templates = [
m.make_template {
id = "project-full-adoption-prompt",
kind = 'AgentPrompt,
source_path = "reflection/templates/project-full-adoption-prompt.md",
description = "Master adoption prompt for new and existing projects: protocol infrastructure, ontology enrichment, config surface (nickel-validated-overrides + ConfigFields derive), API surface (#[onto_api]), and manifest self-interrogation (capabilities/requirements/critical_deps). Orchestrates all other templates.",
},
m.make_template {
id = "update-ontology-prompt",
kind = 'AgentPrompt,
source_path = "reflection/templates/update-ontology-prompt.md",
description = "8-phase ontology enrichment prompt: core.ncl nodes/edges, state.ncl dimension transitions, manifest assets, connections, ADR check_hint migration. Called from Phase 2 of project-full-adoption-prompt.",
},
m.make_template {
id = "manifest-self-interrogation-prompt",
kind = 'AgentPrompt,
source_path = "reflection/templates/manifest-self-interrogation-prompt.md",
description = "Focused prompt for populating capabilities[], requirements[], and critical_deps[] in manifest.ncl. Called from Phase 5 of project-full-adoption-prompt.",
},
m.make_template {
id = "vendor-frontend-assets-prompt",
kind = 'AgentPrompt,
source_path = "reflection/templates/vendor-frontend-assets-prompt.md",
description = "Guide for vendoring frontend JS dependencies locally: directory layout (assets/vendor/), just recipe structure (assets.just with pinned version variables), Tera template integration, CDN asset verification steps, and agent execution checklist. Reusable across any ontoref-protocol project with a static-file-serving daemon.",
},
],
consumption_modes = [
m.make_consumption_mode {
consumer = 'Developer,

View File

@ -24,8 +24,8 @@ let d = import "../ontology/defaults/state.ncl" in
from = "adoption-tooling-complete",
to = "protocol-stable",
condition = "ADR-001 accepted, ontoref.dev published, at least two external projects consuming the protocol.",
catalyst = "First external adoption.",
blocker = "ontoref.dev not yet published; no external consumers yet. Auth model complete. Install pipeline complete. Personal/career schema layer present; content modes operational. Nu 0.111 compat fixed (ADR-006). Protocol v2 complete: manifest.ncl + connections.ncl templates, update_ontoref mode, API catalog via #[onto_api], describe diff, describe api, per-file versioning. Config surface complete (ADR-008): typed DaemonNclConfig, #[derive(ConfigFields)] inventory coherence registry, NCL contracts (LogConfig/DaemonConfig in .ontoref/contracts.ncl), override-layer mutation API, multi-consumer manifest schema. Manifest self-interrogation layer complete (ADR-009): capability_type, requirement_type (env_target: Production/Development/Both, kind: Tool/Service/EnvVar/Infrastructure), critical_dep_type — describe requirements new subcommand, describe guides extended. Syntaxis syntaxis-ontology crate has pending ES→EN migration errors.",
catalyst = "10 projects consuming the protocol: vapora, stratumiops, kogral, typedialog, secretumvault, rustelo, librecloud_renew, website-impl, jpl_ontology, provisioning. ADR-001 Accepted. Auth model, install pipeline, personal/career schemas, content modes, API catalog (#[onto_api], ADR-007), config surface (ADR-008), manifest self-interrogation (ADR-009), protocol migration system (ADR-010) all complete.",
blocker = "ontoref.dev not yet published.",
horizon = 'Months,
},
],

View File

@ -111,23 +111,25 @@ repos:
hooks:
- id: check-added-large-files
args: ['--maxkb=1000']
exclude: ^assets/presentation/
exclude: (^assets/presentation/|node_modules/)
- id: check-case-conflict
exclude: node_modules/
- id: check-merge-conflict
exclude: node_modules/
- id: check-toml
exclude: ^assets/presentation/
exclude: (^assets/presentation/|node_modules/)
- id: check-yaml
exclude: ^(\.woodpecker/|assets/presentation/)
exclude: (^\.woodpecker/|^assets/presentation/|node_modules/)
- id: end-of-file-fixer
exclude: ^assets/presentation/
exclude: (^assets/presentation/|node_modules/)
- id: trailing-whitespace
exclude: (\.md$|^assets/presentation/)
exclude: (\.md$|^assets/presentation/|node_modules/)
- id: mixed-line-ending
exclude: ^assets/presentation/
exclude: (^assets/presentation/|node_modules/)

View File

@ -7,6 +7,59 @@ ADRs referenced below live in `adrs/` as typed Nickel records.
## [Unreleased]
### Protocol Migration System — progressive NCL checks for consumer project upgrades (ADR-010)
Replaces the template-prompt approach with an ordered, idempotent migration system. Applied state
determined by check result alone — no state file. 6 migrations shipped; runtime ships
`migrate list/pending/show` with interactive group dispatch.
#### `reflection/migrations/` — 6 ordered migrations
- `0001-ontology-infrastructure``.ontology/manifest.ncl` and `connections.ncl` present.
- `0002-adr-typed-checks` — no `check_hint` in ADR instance files (`adr-[0-9][0-9][0-9]-*.ncl`);
check narrowed from `adrs/` broad scan to exclude schema/template infrastructure files.
- `0003-manifest-self-interrogation``capabilities[]` and `requirements[]` non-empty in manifest.ncl.
- `0004-just-convention` — justfile validates against canonical module convention (pending in this repo — documented gap).
- `0005-mode-step-schema` — all reflection mode steps declare `actor`, `on_error`, `depends_on`.
- `0006-claude-agent-entrypoint``Agent Entry-Point Protocol` section present in `.claude/CLAUDE.md`.
All NuCmd checks rewritten from bash to valid Nushell: `&&` removed, `$env.VAR` replacing `$VAR`,
no bash-style redirects. Grep checks on ADR files use `adr-[0-9][0-9][0-9]-*.ncl` glob.
#### `reflection/modules/migrate.nu` — new module
- `migrate list [--fmt] [--actor]` — all migrations with applied/pending status; JSON for agents.
- `migrate pending [--fmt] [--actor]` — pending only.
- `migrate show <id> [--fmt]` — runtime-interpolated instructions; accepts short ids (`002``0002`).
- Applied state: `run-migration-check` dispatches over `FileExists | Grep | NuCmd`.
- No state file — idempotent by construction.
#### `reflection/nulib/interactive.nu` + `help.nu``migrate` group wired
- `group-command-info``migrate` case added (list, pending, show).
- `run-group-command``migrate` dispatch added.
- `help-group``migrate` help section added; fallback "Available groups" updated.
#### `reflection/bin/ontoref.nu` — shims + aliases
- `main migrate`, `main migrate list/pending/show` added.
- Short aliases: `mg`, `mg l`, `mg p`.
#### `reflection/schemas/justfile-convention.ncl` — export fix
- Removed `Module` and `ModuleSystem` from the exported record (open contract fields with no default
value caused `nickel export` to fail). Both remain as `let` bindings for internal NCL use.
#### on+re update
| Artifact | Change |
|----------|--------|
| `adrs/adr-010-...ncl` | Created — protocol migration system, progressive NCL checks |
| `.ontology/core.ncl` | `protocol-migration-system` node added; `adopt-ontoref-tooling` artifacts updated; `adr-lifecycle` updated with ADR-010; 4 new edges |
| `.ontology/state.ncl` | `protocol-maturity` catalyst updated (10 consumers, all features complete); blocker narrowed to `ontoref.dev not yet published` |
---
### Manifest Self-Interrogation Layer — capabilities, requirements, critical deps (ADR-009)
Three new typed arrays in `manifest_type` answering operational self-knowledge queries distinct from

View File

@ -131,6 +131,14 @@ gate — Rust structs are contract-trusted readers with `#[serde(default)]`.
Ontoref demonstrates the pattern on itself: `.ontoref/contracts.ncl` applies `LogConfig` and
`DaemonConfig` contracts to `.ontoref/config.ncl`. ([ADR-008](adrs/adr-008-ncl-first-config-validation-and-override-layer.ncl))
**Protocol Migration System** — protocol upgrades for consumer projects expressed as ordered NCL files
in `reflection/migrations/NNN-slug.ncl`. Each migration declares a typed check (`FileExists | Grep |
NuCmd`) whose result IS the applied state — no state file, fully idempotent. `migrate list` shows all
migrations with applied/pending status; `migrate pending` lists only what is missing; `migrate show <id>`
renders runtime-interpolated instructions (project_root and project_name auto-detected). NuCmd checks are
valid Nushell (no bash `&&`, `$env.VAR` not `$VAR`). Grep checks targeting ADR files scope to
`adr-[0-9][0-9][0-9]-*.ncl` to exclude schema/template infrastructure files. ([ADR-010](adrs/adr-010-protocol-migration-system.ncl))
**Manifest Self-Interrogation** — `manifest_type` gains three typed arrays that answer self-knowledge
queries agents and operators need on cold start: `capabilities[]` (what the project does, why it was
built, how it works — with explicit `nodes[]` and `adrs[]` cross-references into the DAG),

View File

@ -0,0 +1,84 @@
let d = import "adr-defaults.ncl" in
d.make_adr {
id = "adr-010",
title = "Protocol Migration System — Progressive NCL Checks for Consumer Project Upgrades",
status = 'Accepted,
date = "2026-03-28",
context = "As the ontoref protocol evolved (manifest.ncl self-interrogation, typed ADR checks, CLAUDE.md agent entry-point, justfile convention), the adoption tooling relied on static prompt templates with manual {placeholder} substitution. An agent or developer adopting ontoref had no machine-queryable way to know which protocol features were missing from their project, nor how to apply them in a safe, ordered sequence. The template approach produced four separate documents that drifted out of sync with the actual protocol state and required human judgement to determine which ones applied. There was no idempotency guarantee and no check mechanism — a project that had already applied a change would re-read instructions that no longer applied.",
decision = "Protocol upgrades for consumer projects are expressed as ordered NCL migration files in reflection/migrations/NNN-slug.ncl. Each migration declares: id (zero-padded 4-digit string), slug, description, a typed check record (FileExists | Grep | NuCmd), and an instructions string interpolated at runtime with project_root and project_name. Applied state is determined solely by whether the check passes — there is no state file. This makes migrations fully idempotent: running `migrate list` on an already-compliant project shows all applied with no side effects. NuCmd checks must be valid Nushell (no bash &&, $env.VAR not $VAR, no bash redirects). Grep checks targeting ADR files must use the glob pattern adrs/adr-[0-9][0-9][0-9]-*.ncl to exclude infrastructure files (adr-schema.ncl, adr-constraints.ncl, _template.ncl) that legitimately contain deprecated field names as schema definitions. The system is exposed via `ontoref migrate list`, `migrate pending`, and `migrate show <id>` — wired into the interactive group dispatch and help system. Migrations are advisory: the system reports state, never applies changes automatically.",
rationale = [
{
claim = "Check-as-source-of-truth eliminates state file drift",
detail = "Any state file recording 'migration 0003 applied' becomes stale the moment someone reverts a change, changes branches, or clones a fresh repo. The check IS the state: if the condition is satisfied, the migration is applied; if not, it is pending. This is the same principle used by database migration tools that check for a schema version column — except here the 'column' is a Nushell assertion over the project's file system. No synchronization required.",
},
{
claim = "Typed checks (FileExists | Grep | NuCmd) cover the full protocol surface",
detail = "FileExists covers structural requirements (.ontology/manifest.ncl present). Grep covers content requirements (pattern present or absent in specific files). NuCmd covers semantic requirements that require evaluation — nickel export succeeds, capabilities[] is non-empty, justfile validates. The three types compose the full assertion space without requiring a general-purpose script language in the migration definition itself.",
},
{
claim = "Ordered numbering enables dependency reasoning without a dependency graph",
detail = "Migration 0003 (manifest self-interrogation) requires migration 0001 (manifest.ncl present) to have been applied. Rather than declaring explicit depends_on edges (which require a DAG evaluator), the numeric ordering encodes the implicit prerequisite sequence. An agent applying pending migrations in order will always satisfy prerequisites before dependent checks.",
},
],
consequences = {
positive = [
"`migrate pending` gives agents and developers a single authoritative list of what is missing — no manual comparison against protocol documentation",
"Migrations are idempotent and safe to re-run: `migrate list` on a fully-adopted project is a no-op read",
"Instructions are interpolated at runtime with project_root and project_name — no manual placeholder substitution",
"New protocol features arrive as numbered migrations without touching existing template files",
"NuCmd checks encode the same typed check logic used by ADR constraints in validate.nu — consistent assertion model across the protocol",
],
negative = [
"NuCmd checks must be single-line Nushell (nu -c) — complex multi-step checks become dense; readability degrades for non-trivial assertions",
"Grep checks require knowing which files to exclude (infrastructure vs instance files); the adr-[0-9][0-9][0-9]-*.ncl pattern is a convention that authors must follow",
"Migration ordering encodes implicit dependencies — a migration that genuinely depends on two prior migrations has no way to express that formally beyond numeric sequence",
],
},
alternatives_considered = [
{
option = "Single monolithic adoption prompt template with {placeholder} substitution",
why_rejected = "Produced four separate documents (project-full-adoption-prompt.md, update-ontology-prompt.md, manifest-self-interrogation-prompt.md, vendor-frontend-assets-prompt.md) that drifted out of sync. Required manual judgement to determine which applied to a given project. No idempotency, no machine-queryable state, no ordered application guarantee. Each new protocol feature required updating multiple templates.",
},
{
option = "State file recording applied migration IDs",
why_rejected = "State files become stale on branch switches, cherry-picks, and fresh clones. They require commit discipline to keep in sync. A project where someone manually applied the changes without running the migration tool would show the migration as pending despite being satisfied — false negatives. The check-as-truth model has no false negatives by construction.",
},
{
option = "Jinja2/j2 templating for instruction rendering",
why_rejected = "The ontoref runtime already runs Nushell for all automation. Adding a j2 dependency for template rendering introduces a new tool to install, configure, and maintain. Runtime string interpolation in Nushell (str replace --all) is sufficient for the two substitution values needed (project_root, project_name) and keeps the migration runner dependency-free.",
},
],
constraints = [
{
id = "nucmd-checks-must-be-nushell",
claim = "NuCmd check cmd fields must be valid Nushell — no bash operators (&&, ||, 2>/dev/null), no $VARNAME (must be $env.VARNAME)",
scope = "reflection/migrations/*.ncl (any migration with tag = 'NuCmd)",
severity = 'Hard,
check = { tag = 'Grep, pattern = "&&|\\$[A-Z_]+[^)]", paths = ["reflection/migrations/"], must_be_empty = true },
rationale = "The migration runner executes checks via `nu -c $check.cmd`. Bash syntax in a Nu script produces parser errors that surface as false-negative check results — the migration appears pending due to a runner error, not because the condition is unmet.",
},
{
id = "grep-checks-use-instance-glob",
claim = "Grep checks targeting ADR files must scope to adrs/adr-[0-9][0-9][0-9]-*.ncl, not adrs/ or adrs/adr-*.ncl",
scope = "reflection/migrations/*.ncl (any migration with tag = 'Grep and paths containing 'adrs')",
severity = 'Soft,
check = { tag = 'Grep, pattern = "\"adrs/\"", paths = ["reflection/migrations/"], must_be_empty = true },
rationale = "adr-schema.ncl, adr-constraints.ncl, adr-defaults.ncl, and _template.ncl are infrastructure files that legitimately contain deprecated field names as schema definitions. Scanning all of adrs/ produces false positives in ontoref's own repo and in any consumer project that vendors the ADR schema files.",
},
],
related_adrs = ["adr-001", "adr-006"],
ontology_check = {
decision_string = "Protocol migrations expressed as ordered NCL files with typed idempotent checks; applied state determined by check result not state file; NuCmd checks must be valid Nushell; Grep checks on ADR files must use instance-only glob",
invariants_at_risk = ["no-enforcement", "self-describing"],
verdict = 'Safe,
},
}

964
api-catalog.json Normal file
View File

@ -0,0 +1,964 @@
[
{
"method": "GET",
"path": "/actors",
"description": "List all registered actor sessions with their last-seen timestamp and pending notification count",
"auth": "viewer",
"actors": [
"developer",
"admin"
],
"params": [
{
"name": "project",
"kind": "string",
"constraint": "optional",
"description": "Filter by project slug"
}
],
"tags": [
"actors"
],
"feature": ""
},
{
"method": "POST",
"path": "/actors/register",
"description": "Register an actor session and receive a bearer token for subsequent calls",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [
{
"name": "actor",
"kind": "string",
"constraint": "required",
"description": "Actor type (agent|developer|ci|admin)"
},
{
"name": "project",
"kind": "string",
"constraint": "optional",
"description": "Project slug to associate with"
},
{
"name": "label",
"kind": "string",
"constraint": "optional",
"description": "Human label for audit trail"
}
],
"tags": [
"actors",
"auth"
],
"feature": ""
},
{
"method": "DELETE",
"path": "/actors/{token}",
"description": "Deregister an actor session and invalidate its bearer token",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [],
"tags": [
"actors",
"auth"
],
"feature": ""
},
{
"method": "POST",
"path": "/actors/{token}/profile",
"description": "Update actor profile metadata: display name, role, and custom context fields",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [],
"tags": [
"actors"
],
"feature": ""
},
{
"method": "POST",
"path": "/actors/{token}/touch",
"description": "Extend actor session TTL; prevents the session from expiring due to inactivity",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [],
"tags": [
"actors"
],
"feature": ""
},
{
"method": "GET",
"path": "/adr/{id}",
"description": "Read a single ADR by id, exported from NCL as structured JSON",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"adrs"
],
"feature": ""
},
{
"method": "GET",
"path": "/api/catalog",
"description": "Full catalog of daemon HTTP endpoints with metadata: auth, actors, params, tags",
"auth": "none",
"actors": [
"agent",
"developer",
"ci",
"admin"
],
"params": [],
"tags": [
"meta",
"catalog"
],
"feature": ""
},
{
"method": "GET",
"path": "/backlog-json",
"description": "Export the project backlog as structured JSON from reflection/backlog.ncl",
"auth": "viewer",
"actors": [
"developer",
"agent"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"backlog"
],
"feature": ""
},
{
"method": "POST",
"path": "/cache/invalidate",
"description": "Invalidate one or all NCL cache entries, forcing re-export on next request",
"auth": "admin",
"actors": [
"developer",
"admin"
],
"params": [
{
"name": "file",
"kind": "string",
"constraint": "optional",
"description": "Specific file path to invalidate (omit to invalidate all)"
}
],
"tags": [
"cache"
],
"feature": ""
},
{
"method": "GET",
"path": "/cache/stats",
"description": "NCL export cache statistics: entry count, hit/miss counters",
"auth": "viewer",
"actors": [
"developer",
"admin"
],
"params": [],
"tags": [
"cache",
"meta"
],
"feature": ""
},
{
"method": "GET",
"path": "/config/cross-project",
"description": "Compare config surfaces across all registered projects: shared values, conflicts, coverage gaps",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [],
"tags": [
"config"
],
"feature": ""
},
{
"method": "GET",
"path": "/describe/actor-init",
"description": "Minimal onboarding payload for a new actor session: what to register as and what to do first",
"auth": "none",
"actors": [
"agent"
],
"params": [
{
"name": "actor",
"kind": "string",
"constraint": "optional",
"description": "Actor type to onboard as"
},
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug"
}
],
"tags": [
"describe",
"actors"
],
"feature": ""
},
{
"method": "GET",
"path": "/describe/capabilities",
"description": "Available reflection modes, just recipes, Claude capabilities and CI tools for the project",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"describe"
],
"feature": ""
},
{
"method": "GET",
"path": "/describe/connections",
"description": "Cross-project connection declarations: upstream, downstream, peers with addressing",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"describe",
"federation"
],
"feature": ""
},
{
"method": "GET",
"path": "/describe/guides",
"description": "Complete operational context for an actor: identity, axioms, practices, constraints, gate state, modes, actor policy, connections, content assets",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
},
{
"name": "actor",
"kind": "string",
"constraint": "optional",
"description": "Actor context filters the policy (agent|developer|ci|admin)"
}
],
"tags": [
"describe",
"guides"
],
"feature": ""
},
{
"method": "GET",
"path": "/describe/project",
"description": "Project self-description: identity, axioms, tensions, practices, gates, ADRs, dimensions",
"auth": "none",
"actors": [
"agent",
"developer",
"ci",
"admin"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"describe",
"ontology"
],
"feature": ""
},
{
"method": "GET",
"path": "/graph/impact",
"description": "BFS impact graph from an ontology node; optionally traverses cross-project connections",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "node",
"kind": "string",
"constraint": "required",
"description": "Ontology node id to start from"
},
{
"name": "depth",
"kind": "u32",
"constraint": "default=2",
"description": "Max BFS hops (capped at 5)"
},
{
"name": "include_external",
"kind": "bool",
"constraint": "default=false",
"description": "Follow connections.ncl to external projects"
},
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"graph",
"federation"
],
"feature": ""
},
{
"method": "GET",
"path": "/graph/node/{id}",
"description": "Resolve a single ontology node by id from the local cache (used by federation)",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"graph",
"federation"
],
"feature": ""
},
{
"method": "GET",
"path": "/health",
"description": "Daemon health check: uptime, version, feature flags, active projects",
"auth": "none",
"actors": [
"agent",
"developer",
"ci",
"admin"
],
"params": [],
"tags": [
"meta"
],
"feature": ""
},
{
"method": "POST",
"path": "/nickel/export",
"description": "Export a Nickel file to JSON, using the cache when the file is unchanged",
"auth": "viewer",
"actors": [
"developer",
"agent"
],
"params": [
{
"name": "file",
"kind": "string",
"constraint": "required",
"description": "Absolute path to the .ncl file to export"
},
{
"name": "import_path",
"kind": "string",
"constraint": "optional",
"description": "NICKEL_IMPORT_PATH override"
}
],
"tags": [
"nickel",
"cache"
],
"feature": ""
},
{
"method": "POST",
"path": "/notifications/ack",
"description": "Acknowledge one or more notifications; removes them from the pending queue",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [
{
"name": "token",
"kind": "string",
"constraint": "required",
"description": "Actor bearer token"
},
{
"name": "ids",
"kind": "string",
"constraint": "required",
"description": "Comma-separated notification ids to acknowledge"
}
],
"tags": [
"notifications"
],
"feature": ""
},
{
"method": "GET",
"path": "/notifications/pending",
"description": "Poll pending notifications for an actor; optionally marks them as seen",
"auth": "none",
"actors": [
"agent",
"developer",
"ci"
],
"params": [
{
"name": "token",
"kind": "string",
"constraint": "required",
"description": "Actor bearer token"
},
{
"name": "project",
"kind": "string",
"constraint": "optional",
"description": "Project slug filter"
},
{
"name": "check_only",
"kind": "bool",
"constraint": "default=false",
"description": "Return count without marking seen"
}
],
"tags": [
"notifications"
],
"feature": ""
},
{
"method": "GET",
"path": "/notifications/stream",
"description": "SSE push stream: actor subscribes once and receives notification events as they occur",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "token",
"kind": "string",
"constraint": "required",
"description": "Actor bearer token"
},
{
"name": "project",
"kind": "string",
"constraint": "optional",
"description": "Project slug filter"
}
],
"tags": [
"notifications",
"sse"
],
"feature": ""
},
{
"method": "GET",
"path": "/ontology",
"description": "List available ontology extension files beyond core, state, gate, manifest",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"ontology"
],
"feature": ""
},
{
"method": "POST",
"path": "/ontology/changed",
"description": "Git hook endpoint: actor signs a file-change event it caused to suppress self-notification",
"auth": "viewer",
"actors": [
"developer",
"ci"
],
"params": [
{
"name": "token",
"kind": "string",
"constraint": "required",
"description": "Actor bearer token"
},
{
"name": "files",
"kind": "string",
"constraint": "required",
"description": "JSON array of changed file paths"
}
],
"tags": [
"ontology",
"notifications"
],
"feature": ""
},
{
"method": "GET",
"path": "/ontology/{file}",
"description": "Export a specific ontology extension file to JSON",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"ontology"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects",
"description": "List all registered projects with slug, root, push_only flag and import path",
"auth": "admin",
"actors": [
"admin"
],
"params": [],
"tags": [
"projects",
"registry"
],
"feature": ""
},
{
"method": "POST",
"path": "/projects",
"description": "Register a new project at runtime without daemon restart",
"auth": "admin",
"actors": [
"admin"
],
"params": [],
"tags": [
"projects",
"registry"
],
"feature": ""
},
{
"method": "DELETE",
"path": "/projects/{slug}",
"description": "Deregister a project and stop its file watcher",
"auth": "admin",
"actors": [
"admin"
],
"params": [],
"tags": [
"projects",
"registry"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects/{slug}/config",
"description": "Full config export for a registered project (merged with any active overrides)",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug"
}
],
"tags": [
"config"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects/{slug}/config/coherence",
"description": "Multi-consumer coherence report: unclaimed NCL fields, consumer field mismatches",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug"
},
{
"name": "section",
"kind": "string",
"constraint": "optional",
"description": "Filter to one section"
}
],
"tags": [
"config"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects/{slug}/config/quickref",
"description": "Generated config documentation with rationales, override history, and coherence status",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug"
},
{
"name": "section",
"kind": "string",
"constraint": "optional",
"description": "Filter to one section"
},
{
"name": "format",
"kind": "string",
"constraint": "optional",
"description": "Output format (json|markdown)"
}
],
"tags": [
"config"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects/{slug}/config/schema",
"description": "Config surface schema: sections with descriptions, rationales, contracts, and declared consumers",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug"
}
],
"tags": [
"config"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects/{slug}/config/{section}",
"description": "Values for a single config section (from the merged NCL export)",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug"
},
{
"name": "section",
"kind": "string",
"constraint": "required",
"description": "Section id"
}
],
"tags": [
"config"
],
"feature": ""
},
{
"method": "PUT",
"path": "/projects/{slug}/config/{section}",
"description": "Mutate a config section via the override layer. dry_run=true (default) returns the proposed change without writing.",
"auth": "admin",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug"
},
{
"name": "section",
"kind": "string",
"constraint": "required",
"description": "Section id"
}
],
"tags": [
"config"
],
"feature": ""
},
{
"method": "PUT",
"path": "/projects/{slug}/keys",
"description": "Hot-rotate credentials for a project; invalidates all existing actor and UI sessions",
"auth": "admin",
"actors": [
"admin"
],
"params": [],
"tags": [
"projects",
"auth"
],
"feature": ""
},
{
"method": "GET",
"path": "/projects/{slug}/ontology/versions",
"description": "Per-file ontology change counters for a project; incremented on every cache invalidation",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [],
"tags": [
"projects",
"ontology",
"cache"
],
"feature": ""
},
{
"method": "GET",
"path": "/qa-json",
"description": "Export the Q&A knowledge store as structured JSON from reflection/qa.ncl",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"qa"
],
"feature": ""
},
{
"method": "GET",
"path": "/search",
"description": "Full-text search over ontology nodes, ADRs, practices and Q&A entries",
"auth": "none",
"actors": [
"agent",
"developer"
],
"params": [
{
"name": "q",
"kind": "string",
"constraint": "required",
"description": "Search query string"
},
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (ui feature only)"
}
],
"tags": [
"search"
],
"feature": ""
},
{
"method": "POST",
"path": "/sync",
"description": "Push-based sync: remote projects POST their NCL export JSON here to update the daemon cache",
"auth": "viewer",
"actors": [
"ci",
"agent"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "required",
"description": "Project slug from Authorization header context"
}
],
"tags": [
"sync",
"federation"
],
"feature": ""
},
{
"method": "GET",
"path": "/validate/adrs",
"description": "Execute typed ADR constraint checks and return per-constraint pass/fail results",
"auth": "viewer",
"actors": [
"developer",
"ci",
"agent"
],
"params": [
{
"name": "slug",
"kind": "string",
"constraint": "optional",
"description": "Project slug (defaults to primary)"
}
],
"tags": [
"validate",
"adrs"
],
"feature": ""
}
]

14
assets/css/package.json Normal file
View File

@ -0,0 +1,14 @@
{
"private": true,
"type": "module",
"packageManager": "pnpm@10.7.0",
"scripts": {
"build": "unocss '../../crates/ontoref-daemon/templates/**/*.html' -o ../../crates/ontoref-daemon/public/css/ontoref.css --minify",
"watch": "unocss '../../crates/ontoref-daemon/templates/**/*.html' -o ../../crates/ontoref-daemon/public/css/ontoref.css --watch"
},
"devDependencies": {
"@unocss/cli": "^66.3.2",
"unocss": "^66.3.2",
"unocss-preset-daisy": "^7.0.0"
}
}

View File

@ -0,0 +1,11 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: [
'../../crates/ontoref-daemon/templates/**/*.html',
],
plugins: [require('daisyui')],
daisyui: {
themes: ['dark', 'light'],
logs: false,
},
};

58
assets/css/uno.config.js Normal file
View File

@ -0,0 +1,58 @@
import { defineConfig, presetUno, transformerDirectives, transformerVariantGroup } from 'unocss'
import { presetDaisy } from 'unocss-preset-daisy'
import { readFileSync } from 'fs'
import { createRequire } from 'module'
const require = createRequire(import.meta.url)
const daisyuiThemes = readFileSync(require.resolve('daisyui/dist/themes.css'), 'utf-8')
const basePreflight = `
*,::before,::after{box-sizing:border-box}
html{font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";line-height:1.5;-webkit-text-size-adjust:100%;tab-size:4}
body{margin:0;padding:0;line-height:inherit}
a{color:inherit;text-decoration:inherit}
img,svg,video,canvas,audio,iframe,embed,object{display:block;vertical-align:middle}
img,video{max-width:100%;height:auto}
h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}
ol,ul{list-style:none;margin:0;padding:0}
button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:inherit;color:inherit;margin:0;padding:0}
button,select{text-transform:none}
`
// DaisyUI v3 sets --btn-text-case:uppercase per [data-theme=*] in themes.css and
// drives text-transform via that variable in component styles. Both come after
// preflights, so !important is the only reliable escape.
// svg sizing is handled in base.html via higher-specificity (.btn svg.w-N) rules.
const daisyV3Overrides = `
.btn{text-transform:none!important;letter-spacing:normal!important}
`
export default defineConfig({
preflights: [
{ getCSS: () => basePreflight },
{ getCSS: () => daisyuiThemes },
{ getCSS: () => daisyV3Overrides },
],
content: {
filesystem: [
'../../crates/ontoref-daemon/templates/**/*.html',
],
},
presets: [
presetUno(),
presetDaisy({ themes: ['dark', 'light'] }),
],
transformers: [
transformerDirectives(),
transformerVariantGroup(),
],
safelist: [
// DaisyUI component classes assembled dynamically in JS (authBadge, statusBadge)
'badge', 'badge-xs', 'badge-ghost', 'badge-info', 'badge-error',
'badge-success', 'badge-warning', 'badge-neutral', 'badge-lg', 'badge-outline',
'loading', 'loading-spinner', 'loading-sm',
// Utility classes assembled dynamically in JS
'font-mono', 'hidden', 'line-through',
'text-orange-400', 'text-cyan-400', 'text-purple-400', 'text-yellow-400',
],
})

960
assets/vendor/cytoscape-navigator.js vendored Normal file
View File

@ -0,0 +1,960 @@
;(function(){ 'use strict';
var defaults = {
container: false // can be a HTML or jQuery element or jQuery selector
, viewLiveFramerate: 0 // set false to update graph pan only on drag end; set 0 to do it instantly; set a number (frames per second) to update not more than N times per second
, dblClickDelay: 200 // milliseconds
, removeCustomContainer: true // destroy the container specified by user on plugin destroy
, rerenderDelay: 500 // ms to throttle rerender updates to the panzoom for performance
};
var debounce = (function(){
/**
* lodash 3.1.1 (Custom Build) <https://lodash.com/>
* Build: `lodash modern modularize exports="npm" -o ./`
* Copyright 2012-2015 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
* Copyright 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <https://lodash.com/license>
*/
/** Used as the `TypeError` message for "Functions" methods. */
var FUNC_ERROR_TEXT = 'Expected a function';
/* Native method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max,
nativeNow = Date.now;
/**
* Gets the number of milliseconds that have elapsed since the Unix epoch
* (1 January 1970 00:00:00 UTC).
*
* @static
* @memberOf _
* @category Date
* @example
*
* _.defer(function(stamp) {
* console.log(_.now() - stamp);
* }, _.now());
* // => logs the number of milliseconds it took for the deferred function to be invoked
*/
var now = nativeNow || function() {
return new Date().getTime();
};
/**
* Creates a debounced function that delays invoking `func` until after `wait`
* milliseconds have elapsed since the last time the debounced function was
* invoked. The debounced function comes with a `cancel` method to cancel
* delayed invocations. Provide an options object to indicate that `func`
* should be invoked on the leading and/or trailing edge of the `wait` timeout.
* Subsequent calls to the debounced function return the result of the last
* `func` invocation.
*
* **Note:** If `leading` and `trailing` options are `true`, `func` is invoked
* on the trailing edge of the timeout only if the the debounced function is
* invoked more than once during the `wait` timeout.
*
* See [David Corbacho's article](http://drupalmotion.com/article/debounce-and-throttle-visual-explanation)
* for details over the differences between `_.debounce` and `_.throttle`.
*
* @static
* @memberOf _
* @category Function
* @param {Function} func The function to debounce.
* @param {number} [wait=0] The number of milliseconds to delay.
* @param {Object} [options] The options object.
* @param {boolean} [options.leading=false] Specify invoking on the leading
* edge of the timeout.
* @param {number} [options.maxWait] The maximum time `func` is allowed to be
* delayed before it's invoked.
* @param {boolean} [options.trailing=true] Specify invoking on the trailing
* edge of the timeout.
* @returns {Function} Returns the new debounced function.
* @example
*
* // avoid costly calculations while the window size is in flux
* jQuery(window).on('resize', _.debounce(calculateLayout, 150));
*
* // invoke `sendMail` when the click event is fired, debouncing subsequent calls
* jQuery('#postbox').on('click', _.debounce(sendMail, 300, {
* 'leading': true,
* 'trailing': false
* }));
*
* // ensure `batchLog` is invoked once after 1 second of debounced calls
* var source = new EventSource('/stream');
* jQuery(source).on('message', _.debounce(batchLog, 250, {
* 'maxWait': 1000
* }));
*
* // cancel a debounced call
* var todoChanges = _.debounce(batchLog, 1000);
* Object.observe(models.todo, todoChanges);
*
* Object.observe(models, function(changes) {
* if (_.find(changes, { 'user': 'todo', 'type': 'delete'})) {
* todoChanges.cancel();
* }
* }, ['delete']);
*
* // ...at some point `models.todo` is changed
* models.todo.completed = true;
*
* // ...before 1 second has passed `models.todo` is deleted
* // which cancels the debounced `todoChanges` call
* delete models.todo;
*/
function debounce(func, wait, options) {
var args,
maxTimeoutId,
result,
stamp,
thisArg,
timeoutId,
trailingCall,
lastCalled = 0,
maxWait = false,
trailing = true;
if (typeof func != 'function') {
throw new TypeError(FUNC_ERROR_TEXT);
}
wait = wait < 0 ? 0 : (+wait || 0);
if (options === true) {
var leading = true;
trailing = false;
} else if (isObject(options)) {
leading = !!options.leading;
maxWait = 'maxWait' in options && nativeMax(+options.maxWait || 0, wait);
trailing = 'trailing' in options ? !!options.trailing : trailing;
}
function cancel() {
if (timeoutId) {
clearTimeout(timeoutId);
}
if (maxTimeoutId) {
clearTimeout(maxTimeoutId);
}
lastCalled = 0;
maxTimeoutId = timeoutId = trailingCall = undefined;
}
function complete(isCalled, id) {
if (id) {
clearTimeout(id);
}
maxTimeoutId = timeoutId = trailingCall = undefined;
if (isCalled) {
lastCalled = now();
result = func.apply(thisArg, args);
if (!timeoutId && !maxTimeoutId) {
args = thisArg = undefined;
}
}
}
function delayed() {
var remaining = wait - (now() - stamp);
if (remaining <= 0 || remaining > wait) {
complete(trailingCall, maxTimeoutId);
} else {
timeoutId = setTimeout(delayed, remaining);
}
}
function maxDelayed() {
complete(trailing, timeoutId);
}
function debounced() {
args = arguments;
stamp = now();
thisArg = this;
trailingCall = trailing && (timeoutId || !leading);
if (maxWait === false) {
var leadingCall = leading && !timeoutId;
} else {
if (!maxTimeoutId && !leading) {
lastCalled = stamp;
}
var remaining = maxWait - (stamp - lastCalled),
isCalled = remaining <= 0 || remaining > maxWait;
if (isCalled) {
if (maxTimeoutId) {
maxTimeoutId = clearTimeout(maxTimeoutId);
}
lastCalled = stamp;
result = func.apply(thisArg, args);
}
else if (!maxTimeoutId) {
maxTimeoutId = setTimeout(maxDelayed, remaining);
}
}
if (isCalled && timeoutId) {
timeoutId = clearTimeout(timeoutId);
}
else if (!timeoutId && wait !== maxWait) {
timeoutId = setTimeout(delayed, wait);
}
if (leadingCall) {
isCalled = true;
result = func.apply(thisArg, args);
}
if (isCalled && !timeoutId && !maxTimeoutId) {
args = thisArg = undefined;
}
return result;
}
debounced.cancel = cancel;
return debounced;
}
/**
* Checks if `value` is the [language type](https://es5.github.io/#x8) of `Object`.
* (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
*
* @static
* @memberOf _
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
* @example
*
* _.isObject({});
* // => true
*
* _.isObject([1, 2, 3]);
* // => true
*
* _.isObject(1);
* // => false
*/
function isObject(value) {
// Avoid a V8 JIT bug in Chrome 19-20.
// See https://code.google.com/p/v8/issues/detail?id=2291 for more details.
var type = typeof value;
return !!value && (type == 'object' || type == 'function');
}
return debounce;
})();
// ported lodash throttle function
var throttle = function( func, wait, options ){
var leading = true,
trailing = true;
if( options === false ){
leading = false;
} else if( typeof options === typeof {} ){
leading = 'leading' in options ? options.leading : leading;
trailing = 'trailing' in options ? options.trailing : trailing;
}
options = options || {};
options.leading = leading;
options.maxWait = wait;
options.trailing = trailing;
return debounce( func, wait, options );
};
var Navigator = function ( element, options ) {
this._init(element, options)
};
var extend = function() {
for(var i = 1; i < arguments.length; i++) {
for(var key in arguments[i]) {
if(arguments[i].hasOwnProperty(key)) {
arguments[0][key] = arguments[i][key];
}
}
}
return arguments[0];
};
var wid = function(elem) {
return elem.getBoundingClientRect().width;
};
var hei = function(elem) {
return elem.getBoundingClientRect().height;
};
Navigator.prototype = {
constructor: Navigator
/****************************
Main functions
****************************/
, bb: function(){
var bb = this.cy.elements().boundingBox()
if( bb.w === 0 || bb.h === 0 ){
return {
x1: 0,
x2: Infinity,
y1: 0,
y2: Infinity,
w: Infinity,
h: Infinity
} // => hide interactive overlay
}
return bb
}
, _addCyListener: function(events, handler){
this._cyListeners.push({
events: events,
handler: handler
})
this.cy.on(events, handler)
}
, _removeCyListeners: function(){
var cy = this.cy
this._cyListeners.forEach(function(l){
cy.off(l.events, l.handler)
})
cy.offRender(this._onRenderHandler)
}
, _init: function ( cy, options ) {
this._cyListeners = []
this.$element = cy.container()
this.options = extend({}, defaults, options)
this.cy = cy
// Cache bounding box
this.boundingBox = this.bb()
// Cache sizes
this.width = wid(this.$element);
this.height = hei(this.$element)
// Init components
this._initPanel()
this._initThumbnail()
this._initView()
this._initOverlay()
}
, destroy: function () {
this._removeEventsHandling();
// If container is not created by navigator and its removal is prohibited
if (this.options.container && !this.options.removeCustomContainer) {
this.$panel.innerHTML = '';
} else {
this.$panel.parentElement.removeChild(this.$panel);
}
}
/****************************
Navigator elements functions
****************************/
/*
* Used inner attributes
*
* w {number} width
* h {number} height
*/
, _initPanel: function () {
var options = this.options
if(options.container && typeof options.container === 'string' && options.container.length > 0) {
// to not break users which gives a jquery string selector
if (options.container.indexOf('#') !== -1) {
this.$panel = document.getElementById(options.container.replace('#', ''));
} else {
this.$panel = document.getElementsByClassName(options.container.replace('.', ''))[0];
}
} else {
this.$panel = document.createElement('div');
this.$panel.className = 'cytoscape-navigator';
document.body.appendChild(this.$panel);
}
this._setupPanel()
this._addCyListener('resize', this.resize.bind(this))
}
, _setupPanel: function () {
// Cache sizes
this.panelWidth = wid(this.$panel);
this.panelHeight = hei(this.$panel);
}
/*
* Used inner attributes
*
* zoom {number}
* pan {object} - {x: 0, y: 0}
*/
, _initThumbnail: function () {
// Create thumbnail
this.$thumbnail = document.createElement('img');
// Add thumbnail canvas to the DOM
this.$panel.appendChild(this.$thumbnail);
// Setup thumbnail
this._setupThumbnailSizes()
this._setupThumbnail()
}
, _setupThumbnail: function () {
this._updateThumbnailImage()
}
, _setupThumbnailSizes: function () {
// Update bounding box cache
this.boundingBox = this.bb()
this.thumbnailZoom = Math.min(this.panelHeight / this.boundingBox.h, this.panelWidth / this.boundingBox.w)
// Used on thumbnail generation
this.thumbnailPan = {
x: (this.panelWidth - this.thumbnailZoom * (this.boundingBox.x1 + this.boundingBox.x2))/2
, y: (this.panelHeight - this.thumbnailZoom * (this.boundingBox.y1 + this.boundingBox.y2))/2
}
}
// If bounding box has changed then update sizes
// Otherwise just update the thumbnail
, _checkThumbnailSizesAndUpdate: function () {
// Cache previous values
var _zoom = this.thumbnailZoom
, _pan_x = this.thumbnailPan.x
, _pan_y = this.thumbnailPan.y
this._setupThumbnailSizes()
if (_zoom != this.thumbnailZoom || _pan_x != this.thumbnailPan.x || _pan_y != this.thumbnailPan.y) {
this._setupThumbnail()
this._setupView()
} else {
this._updateThumbnailImage()
}
}
/*
* Used inner attributes
*
* w {number} width
* h {number} height
* x {number}
* y {number}
* borderWidth {number}
* locked {boolean}
*/
, _initView: function () {
this.$view = document.createElement('div');
this.$view.className = 'cytoscape-navigatorView';
this.$panel.appendChild(this.$view)
// Compute borders
this.viewBorderTop = parseInt(this.$view.style['border-top-width'], 10) || 0;
this.viewBorderRight = parseInt(this.$view.style['border-right-width'], 10) || 0;
this.viewBorderBottom = parseInt(this.$view.style['border-bottom-width'], 10) || 0;
this.viewBorderLeft = parseInt(this.$view.style['border-left-width'], 10) || 0;
// Abstract borders
this.viewBorderHorizontal = this.viewBorderLeft + this.viewBorderRight
this.viewBorderVertical = this.viewBorderTop + this.viewBorderBottom
this._setupView()
// Hook graph zoom and pan
this._addCyListener('zoom pan', this._setupView.bind(this))
}
, _setupView: function () {
if (this.viewLocked)
return
var cyZoom = this.cy.zoom()
, cyPan = this.cy.pan()
// Horizontal computation
this.viewW = this.width / cyZoom * this.thumbnailZoom
this.viewX = -cyPan.x * this.viewW / this.width + this.thumbnailPan.x - this.viewBorderLeft
// Vertical computation
this.viewH = this.height / cyZoom * this.thumbnailZoom
this.viewY = -cyPan.y * this.viewH / this.height + this.thumbnailPan.y - this.viewBorderTop
// CSS view
this.$view.style['width'] = this.viewW + 'px';
this.$view.style['height'] = this.viewH + 'px';
this.$view.style['position'] = 'absolute';
this.$view.style['left'] = this.viewX + 'px';
this.$view.style['top'] = this.viewY + 'px';
}
/*
* Used inner attributes
*
* timeout {number} used to keep stable frame rate
* lastMoveStartTime {number}
* inMovement {boolean}
* hookPoint {object} {x: 0, y: 0}
*/
, _initOverlay: function () {
// Used to capture mouse events
this.$overlay = document.createElement('div');
this.$overlay.className = 'cytoscape-navigatorOverlay';
// Add overlay to the DOM
this.$panel.appendChild(this.$overlay)
// Init some attributes
this.overlayHookPointX = 0;
this.overlayHookPointY = 0;
// Listen for events
this._initEventsHandling()
}
/****************************
Event handling functions
****************************/
, resize: function () {
// Cache sizes
this.width = wid(this.$element);
this.height = hei(this.$element);
this._thumbnailSetup = false
this._setupPanel()
this._checkThumbnailSizesAndUpdate()
this._setupView()
}
, _initEventsHandling: function () {
var that = this
, eventsLocal = [
// Mouse events
'mousedown'
, 'mousewheel'
, 'DOMMouseScroll' // Mozilla specific event
// Touch events
, 'touchstart'
]
, eventsGlobal = [
'mouseup'
, 'mouseout'
, 'mousemove'
// Touch events
, 'touchmove'
, 'touchend'
]
// handle events and stop their propagation
var overlayListener = function (ev) {
// Touch events
if (ev.type == 'touchstart') {
// Will count as middle of View
Object.defineProperty(ev, 'offsetX', {
value: that.viewX + that.viewW / 2,
writable: true
});
Object.defineProperty(ev, 'offsetY', {
value: that.viewY + that.viewH / 2,
writable: true
});
}
// Normalize offset for browsers which do not provide that value
if (ev.offsetX === undefined || ev.offsetY === undefined) {
var rect = ev.target.getBoundingClientRect();
var targetOffset = {
top: rect.top + window.scrollY,
left: rect.left + window.scrollX,
};
Object.defineProperty(ev, 'offsetX', {
value: ev.pageX - targetOffset.left,
writable: true
});
Object.defineProperty(ev, 'offsetY', {
value: ev.pageY - targetOffset.top,
writable: true
});
}
if (ev.type == 'mousedown' || ev.type == 'touchstart') {
that._eventMoveStart(ev)
} else if (ev.type == 'mousewheel' || ev.type == 'DOMMouseScroll') {
that._eventZoom(ev)
}
// Prevent default and propagation
// Don't use peventPropagation as it breaks mouse events
return false;
};
// Hook global events
var globalListener = function (ev) {
// Do not make any computations if it is has no effect on Navigator
if (!that.overlayInMovement)
return;
// Touch events
if (ev.type == 'touchend') {
// Will count as middle of View
Object.defineProperty(ev, 'offsetX', {
value: that.viewX + that.viewW / 2,
writable: true
});
Object.defineProperty(ev, 'offsetY', {
value: that.viewY + that.viewH / 2,
writable: true
});
} else if (ev.type == 'touchmove') {
// Hack - we take in account only first touch
Object.defineProperty(ev, 'pageX', {
value: ev.originalEvent.touches[0].pageX,
writable: true
});
Object.defineProperty(ev, 'pageY', {
value: ev.originalEvent.touches[0].pageY,
writable: true
});
}
// Normalize offset for browsers which do not provide that value
if (ev.offsetX === undefined || ev.offsetY === undefined) {
var rect = ev.target.getBoundingClientRect();
var targetOffset = {
top: rect.top + window.scrollY,
left: rect.left + window.scrollX,
};
Object.defineProperty(ev, 'offsetX', {
value: ev.pageX - targetOffset.left,
writable: true
});
Object.defineProperty(ev, 'offsetY', {
value: ev.pageY - targetOffset.top,
writable: true
});
}
// Translate global events into local coordinates
if (ev.target !== that.$overlay) {
var rect = ev.target.getBoundingClientRect();
var rect2 = that.$overlay.getBoundingClientRect();
var targetOffset = {
top: rect.top + window.scrollY,
left: rect.left + window.scrollX,
};
var overlayOffset = {
top: rect2.top + window.scrollY,
left: rect2.left + window.scrollX,
};
if(targetOffset && overlayOffset) {
Object.defineProperty(ev, 'offsetX', {
value: ev.offsetX - overlayOffset.left + targetOffset.left,
writable: true
});
Object.defineProperty(ev, 'offsetY', {
value: ev.offsetY - overlayOffset.top + targetOffset.top,
writable: true
});
} else {
return false;
}
}
if (ev.type == 'mousemove' || ev.type == 'touchmove') {
that._eventMove(ev)
} else if (ev.type == 'mouseup' || ev.type == 'touchend') {
that._eventMoveEnd(ev)
}
// Prevent default and propagation
// Don't use peventPropagation as it breaks mouse events
return false;
};
for (var i = 0; i < eventsLocal.length; i++) {
this.$overlay.addEventListener(eventsLocal[i], overlayListener, false);
}
for (var i = 0; i < eventsGlobal.length; i++) {
window.addEventListener(eventsGlobal[i], globalListener, false);
}
this._removeEventsHandling = function(){
for (var i = 0; i < eventsLocal.length; i++) {
this.$overlay.removeEventListener(eventsLocal[i], overlayListener);
}
for (var i = 0; i < eventsGlobal.length; i++) {
window.removeEventListener(eventsGlobal[i], globalListener);
}
}
}
, _eventMoveStart: function (ev) {
var now = new Date().getTime()
// Check if it was double click
if (this.overlayLastMoveStartTime
&& this.overlayLastMoveStartTime + this.options.dblClickDelay > now) {
// Reset lastMoveStartTime
this.overlayLastMoveStartTime = 0
// Enable View in order to move it to the center
this.overlayInMovement = true
// Set hook point as View center
this.overlayHookPointX = this.viewW / 2
this.overlayHookPointY = this.viewH / 2
// Move View to start point
if (this.options.viewLiveFramerate !== false) {
this._eventMove({
offsetX: this.panelWidth / 2
, offsetY: this.panelHeight / 2
})
} else {
this._eventMoveEnd({
offsetX: this.panelWidth / 2
, offsetY: this.panelHeight / 2
})
}
// View should be inactive as we don't want to move it right after double click
this.overlayInMovement = false
}
// This is a single click
// Take care as single click happens before double click 2 times
else {
this.overlayLastMoveStartTime = now
this.overlayInMovement = true
// Lock view moving caused by cy events
this.viewLocked = true
// if event started in View
if (ev.offsetX >= this.viewX && ev.offsetX <= this.viewX + this.viewW
&& ev.offsetY >= this.viewY && ev.offsetY <= this.viewY + this.viewH
) {
this.overlayHookPointX = ev.offsetX - this.viewX
this.overlayHookPointY = ev.offsetY - this.viewY
}
// if event started in Thumbnail (outside of View)
else {
// Set hook point as View center
this.overlayHookPointX = this.viewW / 2
this.overlayHookPointY = this.viewH / 2
// Move View to start point
this._eventMove(ev)
}
}
}
, _eventMove: function (ev) {
var that = this
this._checkMousePosition(ev)
// break if it is useless event
if (!this.overlayInMovement) {
return;
}
// Update cache
this.viewX = ev.offsetX - this.overlayHookPointX
this.viewY = ev.offsetY - this.overlayHookPointY
// Update view position
this.$view.style['left'] = this.viewX + 'px';
this.$view.style['top'] = this.viewY + 'px';
// Move Cy
if (this.options.viewLiveFramerate !== false) {
// trigger instantly
if (this.options.viewLiveFramerate == 0) {
this._moveCy()
}
// trigger less often than frame rate
else if (!this.overlayTimeout) {
// Set a timeout for graph movement
this.overlayTimeout = setTimeout(function () {
that._moveCy()
that.overlayTimeout = false
}, 1000 / this.options.viewLiveFramerate)
}
}
}
, _checkMousePosition: function (ev) {
// If mouse in over View
if(ev.offsetX > this.viewX && ev.offsetX < this.viewX + this.viewBorderHorizontal + this.viewW
&& ev.offsetY > this.viewY && ev.offsetY < this.viewY + this.viewBorderVertical + this.viewH) {
this.$panel.classList.add('mouseover-view')
} else {
this.$panel.classList.remove('mouseover-view')
}
}
, _eventMoveEnd: function (ev) {
// Unlock view changing caused by graph events
this.viewLocked = false
// Remove class when mouse is not over Navigator
this.$panel.classList.remove('mouseover-view')
if (!this.overlayInMovement) {
return;
}
// Trigger one last move
this._eventMove(ev)
// If mode is not live then move graph on drag end
if (this.options.viewLiveFramerate === false) {
this._moveCy()
}
// Stop movement permission
this.overlayInMovement = false
}
, _eventZoom: function (ev) {
var ev2 = extend({}, ev.originalEvent);
var delta = ev.wheelDeltaY / 1000 || ev.wheelDelta / 1000 || ev.detail / -32 || ev2.wheelDeltaY / 1000 || ev2.wheelDelta / 1000 || ev2.detail / -32;
var zoomRate = Math.pow(10, delta)
, mousePosition = {
left: ev.offsetX
, top: ev.offsetY
}
if (this.cy.zoomingEnabled()) {
this._zoomCy(zoomRate, mousePosition)
}
}
, _updateThumbnailImage: function () {
var that = this;
if( this._thumbnailUpdating ){
return;
}
this._thumbnailUpdating = true;
var render = function() {
that._checkThumbnailSizesAndUpdate();
that._setupView();
var $img = that.$thumbnail;
var img = $img;
var w = that.panelWidth;
var h = that.panelHeight;
var bb = that.boundingBox;
var zoom = Math.min( w/bb.w, h/bb.h );
var png = that.cy.png({
full: true,
scale: zoom,
maxHeight: h,
maxWidth: w
});
if( png.indexOf('image/png') < 0 ){
img.removeAttribute( 'src' );
} else {
img.setAttribute( 'src', png );
}
var translate = {
x: (w - zoom*( bb.w ))/2,
y: (h - zoom*( bb.h ))/2
};
$img.style['position'] = 'absolute';
$img.style['left'] = translate.x + 'px';
$img.style['top'] = translate.y + 'px';
}
this._onRenderHandler = throttle(render, that.options.rerenderDelay)
this.cy.onRender( this._onRenderHandler )
}
/****************************
Navigator view moving
****************************/
, _moveCy: function () {
this.cy.pan({
x: -(this.viewX + this.viewBorderLeft - this.thumbnailPan.x) * this.width / this.viewW
, y: -(this.viewY + this.viewBorderLeft - this.thumbnailPan.y) * this.height / this.viewH
})
}
/**
* Zooms graph.
*
* @this {cytoscapeNavigator}
* @param {number} zoomRate The zoom rate value. 1 is 100%.
*/
, _zoomCy: function (zoomRate, zoomCenterRaw) {
var zoomCenter
, isZoomCenterInView = false
zoomCenter = {
x: this.width / 2
, y: this.height / 2
};
this.cy.zoom({
level: this.cy.zoom() * zoomRate
, renderedPosition: zoomCenter
})
}
}
// registers the extension on a cytoscape lib ref
var register = function( cytoscape ){
if (!cytoscape){ return; } // can't register if cytoscape unspecified
cytoscape( 'core', 'navigator', function( options ){
var cy = this;
return new Navigator( cy, options );
} );
};
if (typeof module !== 'undefined' && module.exports) { // expose as a commonjs module
module.exports = function( cytoscape ){
register( cytoscape );
};
} else if (typeof define !== 'undefined' && define.amd) { // expose as an amd/requirejs module
define('cytoscape-navigator', function(){
return register;
});
}
if (typeof cytoscape !== 'undefined') { // expose to global cytoscape (i.e. window.cytoscape)
register(cytoscape);
}
})();

File diff suppressed because one or more lines are too long

View File

@ -2728,7 +2728,7 @@
<span class="tech-badge">DashMap</span>
<span class="tech-badge">notify</span>
<span class="tech-badge">MCP Protocol</span>
<span class="tech-badge">D3.js</span>
<span class="tech-badge">Cytoscape.js</span>
<span class="tech-badge">Server-Sent Events</span>
<span class="tech-badge">SurrealDB</span>
<span class="tech-badge">NATS JetStream</span>

View File

@ -1,36 +1,4 @@
/// A single query/path/body parameter declared on an API route.
#[derive(serde::Serialize, Clone)]
pub struct ApiParam {
pub name: &'static str,
/// Rust-like type hint: string | u32 | bool | i64 | json.
pub kind: &'static str,
/// "required" | "optional" | "default=<value>"
pub constraint: &'static str,
pub description: &'static str,
}
/// Static metadata for a daemon HTTP endpoint.
///
/// Registered at link time via [`inventory::submit!`] — generated by
/// `#[onto_api(...)]` proc-macro attribute on each handler function.
/// Collected by [`GET /api/catalog`](super::api_catalog_handler).
#[derive(serde::Serialize, Clone)]
pub struct ApiRouteEntry {
pub method: &'static str,
pub path: &'static str,
pub description: &'static str,
/// Authentication required: "none" | "viewer" | "admin"
pub auth: &'static str,
/// Which actors typically call this endpoint.
pub actors: &'static [&'static str],
pub params: &'static [ApiParam],
/// Semantic grouping tags (e.g. "graph", "federation", "describe").
pub tags: &'static [&'static str],
/// Non-empty when the endpoint is only compiled under a feature flag.
pub feature: &'static str,
}
inventory::collect!(ApiRouteEntry);
pub use ontoref_ontology::api::{ApiParam, ApiRouteEntry};
/// Return the full API catalog sorted by path then method.
pub fn catalog() -> Vec<&'static ApiRouteEntry> {

View File

@ -305,6 +305,12 @@ struct Cli {
#[arg(long, value_name = "PASSWORD")]
hash_password: Option<String>,
/// Print all #[onto_api] registered routes as a JSON array and exit.
/// Pipe to api-catalog.json so the ontoref UI can display this project's
/// API surface when it is registered as a non-primary slug in the daemon.
#[arg(long)]
dump_api_catalog: bool,
/// Run as an MCP server over stdin/stdout (for Claude Desktop, Cursor,
/// etc.). No HTTP server is started in this mode.
#[cfg(feature = "mcp")]
@ -448,6 +454,11 @@ async fn main() {
tracing_subscriber::fmt().with_env_filter(env_filter).init();
}
if cli.dump_api_catalog {
println!("{}", ontoref_ontology::api::dump_catalog_json());
return;
}
if let Some(ref password) = cli.hash_password {
use argon2::{
password_hash::{rand_core::OsRng, PasswordHasher, SaltString},

View File

@ -880,6 +880,84 @@ pub async fn dashboard_mp(
let adr_count = count_ncl_files(&ctx_ref.root.join("adrs"));
let mode_count = count_ncl_files(&ctx_ref.root.join("reflection").join("modes"));
// ── Project identity (same data as project_picker card) ──────────────────
let config_json = load_config_json(
&ctx_ref.root,
&ctx_ref.cache,
ctx_ref.import_path.as_deref(),
)
.await;
let card = config_json
.as_ref()
.map(extract_card_from_config)
.unwrap_or(serde_json::Value::Null);
let description = if card
.get("tagline")
.and_then(|v| v.as_str())
.unwrap_or("")
.is_empty()
{
readme_description(&ctx_ref.root)
} else {
String::new()
};
let manifest_path = ctx_ref.root.join(".ontology").join("manifest.ncl");
let (layers, op_modes, default_mode, repo_kind) = if manifest_path.exists() {
match ctx_ref
.cache
.export(&manifest_path, ctx_ref.import_path.as_deref())
.await
{
Ok((json, _)) => {
let layers: Vec<serde_json::Value> = json
.get("layers")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.map(|l| {
serde_json::json!({
"id": l.get("id").and_then(|v| v.as_str()).unwrap_or(""),
"description": l.get("description").and_then(|v| v.as_str()).unwrap_or(""),
})
})
.collect()
})
.unwrap_or_default();
let op_modes: Vec<serde_json::Value> = json
.get("operational_modes")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.map(|m| {
serde_json::json!({
"id": m.get("id").and_then(|v| v.as_str()).unwrap_or(""),
"description": m.get("description").and_then(|v| v.as_str()).unwrap_or(""),
})
})
.collect()
})
.unwrap_or_default();
let default_mode = json
.get("default_mode")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let repo_kind = json
.get("repo_kind")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
(layers, op_modes, default_mode, repo_kind)
}
Err(_) => (vec![], vec![], String::new(), String::new()),
}
} else {
(vec![], vec![], String::new(), String::new())
};
let repos = git_remotes(&ctx_ref.root);
let showcase = detect_showcase(&ctx_ref.root, &base_url);
let generated = detect_generated(&ctx_ref.root, &base_url);
let mut ctx = Context::new();
ctx.insert("uptime_secs", &state.started_at.elapsed().as_secs());
ctx.insert("cache_entries", &ctx_ref.cache.len());
@ -899,6 +977,15 @@ pub async fn dashboard_mp(
ctx.insert("adr_count", &adr_count);
ctx.insert("mode_count", &mode_count);
ctx.insert("current_role", &auth_role_str(&auth));
ctx.insert("card", &card);
ctx.insert("description", &description);
ctx.insert("repo_kind", &repo_kind);
ctx.insert("repos", &repos);
ctx.insert("layers", &layers);
ctx.insert("op_modes", &op_modes);
ctx.insert("default_mode", &default_mode);
ctx.insert("showcase", &showcase);
ctx.insert("generated", &generated);
let file_versions: std::collections::BTreeMap<String, u64> = ctx_ref
.file_versions
@ -936,9 +1023,10 @@ pub async fn api_catalog_page_mp(
let ctx_ref = state.registry.get(&slug).ok_or(UiError::NotConfigured)?;
let base_url = format!("/ui/{slug}");
// The #[onto_api] catalog is the ontoref-daemon's own HTTP surface.
// Only expose it for the primary project (ontoref itself). Consumer
// projects have their own API surfaces not registered in this process.
// The #[onto_api] catalog is populated via inventory::submit! at link time.
// For the primary project (ontoref itself) we read from the live inventory.
// For consumer projects (separate binaries) we read api-catalog.json from
// their project root — generated by `just export-api-catalog` in that project.
let is_primary = slug == state.registry.primary_slug();
let routes: Vec<serde_json::Value> = if is_primary {
crate::api_catalog::catalog()
@ -969,7 +1057,7 @@ pub async fn api_catalog_page_mp(
})
.collect()
} else {
vec![]
read_project_api_catalog(&ctx_ref.root)
};
let catalog_json = serde_json::to_string(&routes).unwrap_or_else(|_| "[]".to_string());
@ -1460,6 +1548,30 @@ pub async fn serve_public_single(
serve_dir_from(&state.project_root.join("public"), &pub_path).await
}
/// Read `api-catalog.json` from a consumer project root.
///
/// Returns an empty vec (silently) when the file is absent — the UI will show
/// a "no catalog" state instead of an error. Returns an empty vec on parse
/// failure and logs a warning so misconfigured files are visible in daemon
/// logs.
fn read_project_api_catalog(project_root: &std::path::Path) -> Vec<serde_json::Value> {
let path = project_root.join("api-catalog.json");
let bytes = match std::fs::read(&path) {
Ok(b) => b,
Err(_) => return vec![],
};
match serde_json::from_slice::<Vec<serde_json::Value>>(&bytes) {
Ok(v) => v,
Err(e) => {
tracing::warn!(
"api-catalog.json at {} is invalid JSON: {e}",
path.display()
);
vec![]
}
}
}
/// Generic file server for an arbitrary base directory.
async fn serve_dir_from(base: &std::path::Path, rel: &str) -> Result<Response, UiError> {
let Ok(canonical_base) = base.canonicalize() else {

View File

@ -13,14 +13,22 @@
if(m==="icons")document.documentElement.classList.add("nav-icons");
else if(m==="names")document.documentElement.classList.add("nav-names");
})()</script>
<link href="https://cdn.jsdelivr.net/npm/daisyui@4/dist/full.min.css" rel="stylesheet">
<script src="https://cdn.tailwindcss.com"></script>
<link href="/public/css/ontoref.css" rel="stylesheet">
<style>
.badge-xs { height: 1rem; }
.badge-success, .badge-info, .badge-error, .badge-warning { color: #ffffff; }
html.nav-icons .nav-label { display: none !important; }
html.nav-icons .dropdown-content .nav-label { display: inline !important; }
html.nav-names .nav-icon { display: none !important; }
/* DaisyUI v3 .btn svg{width:1em;height:1em} has specificity 0,1,1.
These rules are 0,2,1 — override per-size without !important. */
.btn svg.w-3,.btn svg.h-3{width:.75rem;height:.75rem}
.btn svg.w-3\.5,.btn svg.h-3\.5{width:.875rem;height:.875rem}
.btn svg.w-4,.btn svg.h-4{width:1rem;height:1rem}
.btn svg.w-5,.btn svg.h-5{width:1.25rem;height:1.25rem}
.btn svg.w-9,.btn svg.h-9{width:2.25rem;height:2.25rem}
.btn svg.w-10,.btn svg.h-10{width:2.5rem;height:2.5rem}
.btn svg.w-12,.btn svg.h-12{width:3rem;height:3rem}
</style>
{% block head %}{% endblock head %}
</head>

View File

@ -6,13 +6,6 @@
{% block nav_group_dev %}active{% endblock nav_group_dev %}
{% block head %}
<style>
.status-accepted { @apply badge badge-success badge-xs font-mono; }
.status-proposed { @apply badge badge-warning badge-xs font-mono; }
.status-deprecated { @apply badge badge-ghost badge-xs font-mono; }
.status-superseded { @apply badge badge-error badge-xs font-mono; }
.status-error { @apply badge badge-error badge-xs font-mono; }
</style>
{% endblock head %}
{% block content %}
@ -92,13 +85,15 @@
<script>
const ADRS = {{ adrs_json | safe }};
function statusClass(s) {
const map = { Accepted: 'accepted', Proposed: 'proposed', Deprecated: 'deprecated', Superseded: 'superseded', Error: 'error' };
return `status-${map[s] || 'proposed'}`;
}
function statusBadge(s) {
return `<span class="${statusClass(s)}">${s}</span>`;
const cls = {
Accepted: 'badge badge-success badge-xs font-mono',
Proposed: 'badge badge-warning badge-xs font-mono',
Deprecated: 'badge badge-ghost badge-xs font-mono',
Superseded: 'badge badge-error badge-xs font-mono',
Error: 'badge badge-error badge-xs font-mono',
}[s] ?? 'badge badge-ghost badge-xs font-mono';
return `<span class="${cls}">${s}</span>`;
}
let visibleAdrs = ADRS;

View File

@ -7,9 +7,6 @@
{% block head %}
<style>
.auth-none { @apply badge badge-ghost badge-xs font-mono; }
.auth-viewer { @apply badge badge-info badge-xs font-mono; }
.auth-admin { @apply badge badge-error badge-xs font-mono; }
.method-get { color: #4ade80; }
.method-post { color: #60a5fa; }
.method-put { color: #f59e0b; }
@ -112,7 +109,12 @@ function methodClass(m) {
}
function authBadge(auth) {
return `<span class="auth-${auth}">${auth}</span>`;
const cls = {
none: 'badge badge-ghost badge-xs font-mono',
viewer: 'badge badge-info badge-xs font-mono',
admin: 'badge badge-error badge-xs font-mono',
}[auth] ?? 'badge badge-ghost badge-xs font-mono';
return `<span class="${cls}">${auth}</span>`;
}
function actorBadges(actors) {

View File

@ -4,15 +4,6 @@
{% block nav_group_dev %}active{% endblock nav_group_dev %}
{% block head %}
<style>
.status-ok { @apply badge badge-success badge-xs font-mono; }
.status-warning { @apply badge badge-warning badge-xs font-mono; }
.status-error { @apply badge badge-error badge-xs font-mono; }
.kind-ruststruct { @apply badge badge-ghost badge-xs font-mono text-orange-400; }
.kind-nuscript { @apply badge badge-ghost badge-xs font-mono text-cyan-400; }
.kind-cipipeline { @apply badge badge-ghost badge-xs font-mono text-purple-400; }
.kind-external { @apply badge badge-ghost badge-xs font-mono text-yellow-400; }
</style>
{% endblock head %}
{% block content %}
@ -49,7 +40,7 @@
<h2 class="font-bold font-mono text-lg">{{ section.id }}</h2>
{% if section.coherence %}
{% set coh = section.coherence %}
<span class="status-{{ coh.status | lower }}">{{ coh.status }}</span>
<span class="badge badge-xs font-mono {% if coh.status == 'Ok' %}badge-success{% elif coh.status == 'Warning' %}badge-warning{% else %}badge-error{% endif %}">{{ coh.status }}</span>
{% endif %}
{% if not section.mutable %}
<span class="badge badge-ghost badge-xs">read-only</span>
@ -77,7 +68,7 @@
{% if section.consumers %}
<div class="flex flex-wrap gap-1.5 mb-3">
{% for c in section.consumers %}
<span class="kind-{{ c.kind | lower | replace(from='ruststruct', to='ruststruct') }}" title="{{ c.ref }}">
<span class="badge badge-ghost badge-xs font-mono {% if c.kind == 'RustStruct' %}text-orange-400{% elif c.kind == 'NuScript' %}text-cyan-400{% elif c.kind == 'CiPipeline' %}text-purple-400{% else %}text-yellow-400{% endif %}" title="{{ c.ref }}">
{{ c.kind | replace(from='RustStruct', to='Rust') | replace(from='NuScript', to='Nu') | replace(from='CiPipeline', to='CI') | replace(from='External', to='Ext') }}:{{ c.id }}
</span>
{% endfor %}

View File

@ -6,8 +6,102 @@
{% block content %}
<div class="mb-6">
<h1 class="text-2xl font-bold">Dashboard</h1>
<p class="text-base-content/60 text-sm font-mono mt-1">{{ project_root }}</p>
<div class="flex flex-wrap items-start justify-between gap-2 mb-1">
<div class="flex flex-wrap items-center gap-2">
<h1 class="text-2xl font-bold font-mono">{{ slug }}</h1>
{% if repo_kind %}
<span class="badge badge-outline badge-sm text-base-content/50">{{ repo_kind }}</span>
{% endif %}
{% if card and card.version %}
<span class="badge badge-ghost badge-sm font-mono">v{{ card.version }}</span>
{% endif %}
{% if card and card.status %}
<span class="badge badge-sm
{% if card.status == 'active' or card.status == 'stable' %}badge-success
{% elif card.status == 'wip' or card.status == 'alpha' %}badge-warning
{% elif card.status == 'deprecated' %}badge-error
{% else %}badge-ghost{% endif %}">{{ card.status }}</span>
{% endif %}
</div>
{% if showcase or generated %}
<div class="flex flex-wrap gap-1">
{% for s in showcase %}
<a href="{{ s.url }}" target="_blank" rel="noopener" class="btn btn-xs btn-ghost gap-1 border border-base-content/10">
{% if s.id == "branding" %}🎨{% elif s.id == "web" %}🌐{% elif s.id == "presentation" %}📊{% endif %}
{{ s.label }}
</a>
{% endfor %}
{% for g in generated %}
<a href="{{ g.url }}" target="_blank" rel="noopener" class="btn btn-xs btn-ghost gap-1 border border-base-content/10 opacity-70">
📄 {{ g.label }}
</a>
{% endfor %}
</div>
{% endif %}
</div>
{% if card and card.tagline %}
<p class="text-base-content/60 text-sm italic mb-1">{{ card.tagline }}</p>
{% elif description %}
<p class="text-base-content/60 text-sm mb-1">{{ description }}</p>
{% endif %}
{% if card and card.description %}
<p class="text-base-content/50 text-xs leading-relaxed mb-2 max-w-2xl">{{ card.description }}</p>
{% endif %}
<div class="flex flex-wrap items-center gap-2 mt-1">
<p class="text-base-content/30 text-xs font-mono truncate" title="{{ project_root }}">{{ project_root }}</p>
{% if repos %}
{% for r in repos %}
<a href="{{ r.url }}" target="_blank" rel="noopener"
class="badge badge-xs badge-ghost font-mono gap-1 border border-base-content/10 hover:border-primary/40 hover:text-primary transition-colors"
title="{{ r.url }}">
<svg class="w-2.5 h-2.5 flex-shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 20l4-16m4 4l4 4-4 4M6 16l-4-4 4-4"/>
</svg>
{{ r.name }}
</a>
{% endfor %}
{% endif %}
{% if card and card.tags %}
{% for tag in card.tags %}
<span class="badge badge-xs badge-ghost">{{ tag }}</span>
{% endfor %}
{% endif %}
</div>
{% if layers or op_modes %}
<details class="collapse collapse-arrow bg-base-200 rounded-lg mt-3 max-w-2xl">
<summary class="collapse-title text-xs font-semibold py-2 min-h-0 px-3 cursor-pointer">
Features &amp; Layers
</summary>
<div class="collapse-content px-3 pb-3">
{% if layers %}
<p class="text-xs font-medium text-base-content/40 uppercase tracking-wider mb-1.5">Layers</p>
<div class="space-y-1 mb-3">
{% for l in layers %}
<div class="flex gap-2">
<span class="badge badge-xs badge-ghost font-mono flex-shrink-0 mt-0.5">{{ l.id }}</span>
<span class="text-xs text-base-content/70">{{ l.description }}</span>
</div>
{% endfor %}
</div>
{% endif %}
{% if op_modes %}
<p class="text-xs font-medium text-base-content/40 uppercase tracking-wider mb-1.5">Operational Modes</p>
<div class="space-y-1">
{% for m in op_modes %}
<div class="flex gap-2">
<span class="badge badge-xs badge-primary font-mono flex-shrink-0 mt-0.5">{{ m.id }}</span>
<span class="text-xs text-base-content/70">{{ m.description }}</span>
</div>
{% endfor %}
</div>
{% endif %}
</div>
</details>
{% endif %}
</div>
<!-- Daemon stats -->

View File

@ -6,6 +6,7 @@
{% block head %}
<script src="https://cdn.jsdelivr.net/npm/cytoscape@3.30.2/dist/cytoscape.min.js"></script>
<script src="/assets/vendor/cytoscape-navigator.js"></script>
<style>
#graph-root {
display: flex;
@ -14,14 +15,88 @@
gap: 0;
user-select: none;
}
#graph-root:fullscreen,
#graph-root:-webkit-full-screen {
height: 100dvh;
background: oklch(var(--b1));
}
#cy-wrapper {
flex: 1 1 auto;
min-width: 220px;
overflow: hidden;
border-radius: 0.5rem;
position: relative;
}
#cy { width: 100%; height: 100%; }
/* Floating controls — bottom-right */
#cy-controls {
position: absolute;
bottom: 14px;
right: 14px;
z-index: 10;
display: flex;
flex-direction: column;
gap: 4px;
}
#cy-controls button {
width: 30px;
height: 30px;
border-radius: 6px;
border: 1px solid oklch(var(--bc) / 0.18);
background: oklch(var(--b2) / 0.90);
color: oklch(var(--bc));
font-size: 16px;
font-weight: 600;
line-height: 1;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
transition: background 0.12s, border-color 0.12s, color 0.12s;
backdrop-filter: blur(6px);
-webkit-backdrop-filter: blur(6px);
}
#cy-controls button:hover {
background: oklch(var(--p) / 0.85);
border-color: oklch(var(--p));
color: oklch(var(--pc));
}
#cy-controls button svg {
width: 13px;
height: 13px;
flex-shrink: 0;
pointer-events: none;
}
.ctrl-divider {
height: 1px;
background: oklch(var(--bc) / 0.15);
margin: 2px 0;
}
/* Minimap — bottom-left */
#cy-nav {
position: absolute;
bottom: 14px;
left: 14px;
z-index: 10;
width: 160px;
height: 96px;
border-radius: 6px;
overflow: hidden;
border: 1px solid oklch(var(--bc) / 0.15);
backdrop-filter: blur(4px);
-webkit-backdrop-filter: blur(4px);
}
/* navigator internals */
#cy-nav canvas { display: block; }
.cy-navigator-view {
border: 2px solid oklch(var(--p) / 0.7) !important;
background: oklch(var(--p) / 0.12) !important;
border-radius: 2px !important;
}
#resize-handle {
flex: 0 0 6px;
cursor: col-resize;
@ -58,6 +133,140 @@
{% block content %}
<input type="hidden" id="graph-slug" value="{% if slug %}{{ slug }}{% endif %}">
<!-- Legend modal -->
<dialog id="legend-modal" class="modal">
<div class="modal-box w-11/12 max-w-2xl">
<div class="flex justify-between items-center mb-5">
<h3 class="font-bold text-lg">Graph Legend</h3>
<form method="dialog"><button class="btn btn-sm btn-circle btn-ghost"></button></form>
</div>
<div class="space-y-6 text-sm overflow-y-auto max-h-[70vh] pr-1">
<!-- Levels -->
<section>
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-3">Levels — abstraction of knowledge</p>
<div class="space-y-3">
<div class="flex items-start gap-3">
<span class="btn btn-xs btn-warning flex-shrink-0 pointer-events-none select-none" style="min-width:80px">◆ Axiom</span>
<div>
<p class="font-medium leading-snug">Fundamental invariant</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">What the project holds to be unconditionally true. Axioms cannot change without a formal ADR. They anchor the rest of the graph — every Tension and Practice traces back to at least one Axiom.</p>
</div>
</div>
<div class="flex items-start gap-3">
<span class="btn btn-xs btn-error flex-shrink-0 pointer-events-none select-none" style="min-width:80px">● Tension</span>
<div>
<p class="font-medium leading-snug">Active contradiction</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">A force the project holds without resolving. Tensions are not problems to eliminate — they are productive oppositions that generate direction. Each Practice exists because a Tension demands a response.</p>
</div>
</div>
<div class="flex items-start gap-3">
<span class="btn btn-xs btn-success flex-shrink-0 pointer-events-none select-none" style="min-width:80px">▪ Practice</span>
<div>
<p class="font-medium leading-snug">Concrete approach</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">How the project operationalizes a Tension. Practices are the most mutable layer — they evolve as the project learns. They map directly to artifacts, ADRs, and code.</p>
</div>
</div>
</div>
</section>
<div class="divider my-0"></div>
<!-- Poles -->
<section>
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-3">Poles — which force drives the node</p>
<div class="space-y-3">
<div class="flex items-start gap-3">
<span class="btn btn-xs flex-shrink-0 pointer-events-none select-none" style="background:#f59e0b;color:#111;border-color:#f59e0b;min-width:64px">Yang</span>
<div>
<p class="font-medium leading-snug">Active · building · outward</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">Nodes that assert, create, or push outward. Yang principles drive the definition of what a project <em>does</em> and what it makes available to others.</p>
</div>
</div>
<div class="flex items-start gap-3">
<span class="btn btn-xs flex-shrink-0 pointer-events-none select-none" style="background:#3b82f6;color:#fff;border-color:#3b82f6;min-width:64px">Yin</span>
<div>
<p class="font-medium leading-snug">Receptive · constraining · protective</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">Nodes that receive, limit, or protect. Yin principles define what the project <em>refuses</em>, what boundaries it holds, and what it conserves.</p>
</div>
</div>
<div class="flex items-start gap-3">
<span class="btn btn-xs flex-shrink-0 pointer-events-none select-none" style="background:#8b5cf6;color:#fff;border-color:#8b5cf6;min-width:64px">Spiral</span>
<div>
<p class="font-medium leading-snug">Dialectical · both/and · evolving</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">Nodes that hold both sides simultaneously and move through the tension. Spiral principles cannot be resolved into Yang or Yin — they are the engine of change itself.</p>
</div>
</div>
</div>
</section>
<div class="divider my-0"></div>
<!-- Layouts -->
<section>
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-3">Layouts — how the graph is arranged</p>
<div class="space-y-3">
<div class="flex items-start gap-3">
<div class="join flex-shrink-0">
<span class="join-item btn btn-xs btn-primary pointer-events-none select-none">Hierarchy</span>
</div>
<div>
<p class="font-medium leading-snug">Breadth-first top-down</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">Axioms at the root, Tensions in the middle, Practices at the leaves. Use this to trace causality: which invariants drive which contradictions, and which contradictions demand which responses.</p>
</div>
</div>
<div class="flex items-start gap-3">
<div class="join flex-shrink-0">
<span class="join-item btn btn-xs btn-ghost pointer-events-none select-none">Force</span>
</div>
<div>
<p class="font-medium leading-snug">Physics simulation (COSE)</p>
<p class="text-base-content/60 leading-relaxed mt-0.5">Nodes are pulled together by edges and pushed apart by repulsion. Heavily connected nodes cluster at the center. Use this to find hubs — nodes that influence many others — and isolated areas that may be undertested or underdocumented.</p>
</div>
</div>
</div>
</section>
<div class="divider my-0"></div>
<!-- Visual cues -->
<section>
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-3">Visual cues</p>
<div class="space-y-2.5">
<div class="flex items-center gap-3">
<span class="inline-block w-4 h-4 rounded-sm flex-shrink-0" style="background:#f59e0b;outline:3px solid #f59e0b;outline-offset:1px"></span>
<p class="text-base-content/70"><span class="font-medium text-base-content">Gold border</span> — invariant node. Cannot be modified without creating a formal ADR.</p>
</div>
<div class="flex items-center gap-3">
<span class="inline-block w-4 h-4 rounded-sm flex-shrink-0" style="background:#6b7280;outline:2px solid #fff;outline-offset:1px"></span>
<p class="text-base-content/70"><span class="font-medium text-base-content">White border</span> — currently selected node.</p>
</div>
<div class="flex items-center gap-3">
<span class="inline-block w-4 h-4 rounded-sm flex-shrink-0 opacity-20" style="background:#6b7280"></span>
<p class="text-base-content/70"><span class="font-medium text-base-content">Dimmed nodes</span> — not connected to the selected node. Click the canvas to clear.</p>
</div>
</div>
</section>
<div class="divider my-0"></div>
<!-- Shortcuts -->
<section>
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-3">Keyboard shortcuts</p>
<div class="grid grid-cols-2 gap-x-6 gap-y-1.5 text-xs">
<div class="flex gap-2 items-center"><kbd class="kbd kbd-xs">f</kbd><span class="text-base-content/60">Fit all nodes in view</span></div>
<div class="flex gap-2 items-center"><kbd class="kbd kbd-xs">+</kbd><span class="text-base-content/60">Zoom in</span></div>
<div class="flex gap-2 items-center"><kbd class="kbd kbd-xs">g</kbd><span class="text-base-content/60">Toggle full screen</span></div>
<div class="flex gap-2 items-center"><kbd class="kbd kbd-xs"></kbd><span class="text-base-content/60">Zoom out</span></div>
<div class="flex gap-2 items-center"><kbd class="kbd kbd-xs">Esc</kbd><span class="text-base-content/60">Close detail panel</span></div>
</div>
</section>
</div>
</div>
<form method="dialog" class="modal-backdrop"><button>close</button></form>
</dialog>
<!-- ADR modal -->
<dialog id="adr-modal" class="modal">
<div class="modal-box w-11/12 max-w-2xl">
@ -92,6 +301,7 @@
<button id="btn-cose" class="join-item btn btn-xs btn-ghost">Force</button>
</div>
<button id="btn-reset" class="btn btn-xs btn-ghost">Reset</button>
<button id="btn-legend" class="btn btn-xs btn-ghost" title="Graph legend">?</button>
</div>
</div>
@ -99,6 +309,18 @@
<div id="graph-root">
<div id="cy-wrapper" class="bg-base-200">
<div id="cy"></div>
<!-- Minimap -->
<div id="cy-nav"></div>
<!-- Floating zoom + nav controls -->
<div id="cy-controls">
<button id="cy-zoom-in" title="Zoom in">+</button>
<button id="cy-zoom-out" title="Zoom out"></button>
<div class="ctrl-divider"></div>
<button id="cy-fit" title="Fit to view"></button>
<button id="cy-fullscreen" title="Full screen"></button>
</div>
</div>
<div id="resize-handle"></div>
@ -130,6 +352,15 @@
<script>
const GRAPH = {{ graph_json | safe }};
// ── Icons ─────────────────────────────────────────────────────
const SVG_FIT = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><path d="M8 3H5a2 2 0 0 0-2 2v3m18 0V5a2 2 0 0 0-2-2h-3m0 18h3a2 2 0 0 0 2-2v-3M3 16v3a2 2 0 0 0 2 2h3"/></svg>`;
const SVG_FS_ENTER = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><path d="M15 3h6v6M9 21H3v-6M21 3l-7 7M3 21l7-7"/></svg>`;
const SVG_FS_EXIT = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><path d="M8 3v3a2 2 0 0 1-2 2H3m18 0h-3a2 2 0 0 1-2-2V3m0 18v-3a2 2 0 0 1 2-2h3M3 16h3a2 2 0 0 1 2 2v3"/></svg>`;
document.getElementById('cy-fit').innerHTML = SVG_FIT;
document.getElementById('cy-fullscreen').innerHTML = SVG_FS_ENTER;
// ── Graph data ────────────────────────────────────────────────
const POLE_COLOR = { Yang: "#f59e0b", Yin: "#3b82f6", Spiral: "#8b5cf6" };
const LEVEL_SHAPE = {
Axiom: "diamond",
@ -183,8 +414,7 @@ const edges = (GRAPH.edges || []).map(e => {
};
});
// ── Cytoscape ────────────────────────────────────────────────
// ── Cytoscape ─────────────────────────────────────────────────
const cy = cytoscape({
container: document.getElementById("cy"),
elements: { nodes, edges },
@ -262,18 +492,36 @@ const cy = cytoscape({
}
},
],
layout: buildBfsLayout(),
layout: buildBfsLayout(false),
wheelSensitivity: 0.3,
boxSelectionEnabled: false,
minZoom: 0.1,
maxZoom: 4,
});
function buildBfsLayout() {
// ── Minimap (navigator) ───────────────────────────────────────
if (typeof cy.navigator === 'function') {
cy.navigator({
container: document.getElementById('cy-nav'),
viewLiveFramerate: 0,
thumbnailEventFramerate: 30,
thumbnailLiveFramerate: false,
dblClickDelay: 200,
removeCustomContainer: false,
rerenderDelay: 100,
});
} else {
document.getElementById('cy-nav').style.display = 'none';
}
// ── Layout builders ───────────────────────────────────────────
function buildBfsLayout(animate) {
return {
name: "breadthfirst",
directed: true,
animate: false,
animate: !!animate,
animationDuration: 500,
animationEasing: 'ease-in-out-cubic',
spacingFactor: 2.2,
padding: 48,
fit: true,
@ -303,9 +551,75 @@ function buildCoseLayout() {
};
}
// ── Filters ──────────────────────────────────────────────────
// ── Zoom at cursor ────────────────────────────────────────────
// Track last known mouse position over the canvas for cursor-centered zoom.
let lastCursorViewport = null;
// Track which levels/poles are hidden
document.getElementById('cy').addEventListener('mousemove', e => {
const rect = e.currentTarget.getBoundingClientRect();
lastCursorViewport = { x: e.clientX - rect.left, y: e.clientY - rect.top };
});
document.getElementById('cy').addEventListener('mouseleave', () => {
lastCursorViewport = null;
});
function zoomAt(factor) {
const oldZoom = cy.zoom();
const newZoom = Math.max(cy.minZoom(), Math.min(cy.maxZoom(), oldZoom * factor));
if (newZoom === oldZoom) return;
if (lastCursorViewport) {
const pan = cy.pan();
const { x, y } = lastCursorViewport;
cy.animate({
zoom: newZoom,
pan: {
x: x - (x - pan.x) * (newZoom / oldZoom),
y: y - (y - pan.y) * (newZoom / oldZoom),
},
}, { duration: 180, easing: 'ease-in-out-quad' });
} else {
cy.animate({ zoom: newZoom }, { duration: 180, easing: 'ease-in-out-quad' });
}
}
// ── Floating controls ─────────────────────────────────────────
document.getElementById('cy-zoom-in').addEventListener('click', () => zoomAt(1.35));
document.getElementById('cy-zoom-out').addEventListener('click', () => zoomAt(1 / 1.35));
document.getElementById('cy-fit').addEventListener('click', () => {
cy.animate({ fit: { eles: cy.elements(':visible'), padding: 48 } }, { duration: 350, easing: 'ease-in-out-cubic' });
});
// ── Full-screen ───────────────────────────────────────────────
const graphRoot = document.getElementById('graph-root');
const btnFullscreen = document.getElementById('cy-fullscreen');
function isFullscreen() {
return !!(document.fullscreenElement || document.webkitFullscreenElement);
}
btnFullscreen.addEventListener('click', () => {
if (!isFullscreen()) {
const req = graphRoot.requestFullscreen || graphRoot.webkitRequestFullscreen;
if (req) req.call(graphRoot);
} else {
const exit = document.exitFullscreen || document.webkitExitFullscreen;
if (exit) exit.call(document);
}
});
function onFullscreenChange() {
const fs = isFullscreen();
btnFullscreen.innerHTML = fs ? SVG_FS_EXIT : SVG_FS_ENTER;
btnFullscreen.title = fs ? 'Exit full screen' : 'Full screen';
requestAnimationFrame(() => { cy.resize(); cy.fit(undefined, 48); });
}
document.addEventListener('fullscreenchange', onFullscreenChange);
document.addEventListener('webkitfullscreenchange', onFullscreenChange);
// ── Filters ───────────────────────────────────────────────────
const hiddenLevels = new Set();
const hiddenPoles = new Set();
@ -352,13 +666,12 @@ document.querySelectorAll(".filter-btn[data-pole]").forEach(btn => {
});
});
// ── Layout buttons ───────────────────────────────────────────
// ── Layout buttons ────────────────────────────────────────────
const btnBfs = document.getElementById("btn-bfs");
const btnCose = document.getElementById("btn-cose");
btnBfs.addEventListener("click", () => {
cy.layout(buildBfsLayout()).run();
cy.layout(buildBfsLayout(true)).run();
btnBfs.classList.add("btn-primary"); btnBfs.classList.remove("btn-ghost");
btnCose.classList.add("btn-ghost"); btnCose.classList.remove("btn-primary");
});
@ -369,14 +682,20 @@ btnCose.addEventListener("click", () => {
btnBfs.classList.add("btn-ghost"); btnBfs.classList.remove("btn-primary");
});
document.getElementById("btn-legend").addEventListener("click", () => {
document.getElementById("legend-modal").showModal();
});
document.getElementById("btn-reset").addEventListener("click", () => {
cy.elements().removeClass("faded highlighted");
cy.fit(undefined, 48);
cy.animate(
{ fit: { eles: cy.elements(':visible'), padding: 48 } },
{ duration: 400, easing: 'ease-in-out-cubic' }
);
closePanel();
});
// ── Node detail panel ────────────────────────────────────────
// ── Node detail panel ─────────────────────────────────────────
const panel = document.getElementById("detail-panel");
const dName = document.getElementById("d-name");
const dBadges = document.getElementById("d-badges");
@ -394,7 +713,9 @@ function closePanel() {
}
cy.on("tap", "node", evt => {
const d = evt.target.data();
const node = evt.target;
const d = node.data();
panel.classList.remove("hidden");
dName.textContent = d.label;
@ -424,7 +745,7 @@ cy.on("tap", "node", evt => {
dAdrs.classList.add("hidden");
}
const conn = evt.target.connectedEdges();
const conn = node.connectedEdges();
if (conn.length) {
dEdges.classList.remove("hidden");
dEdgeList.innerHTML = conn.map(e => {
@ -443,7 +764,13 @@ cy.on("tap", "node", evt => {
// Dim non-neighbours
cy.elements().addClass("faded").removeClass("highlighted");
evt.target.closedNeighborhood().removeClass("faded").addClass("highlighted");
node.closedNeighborhood().removeClass("faded").addClass("highlighted");
// Animate center + zoom to selected node
cy.animate(
{ center: { eles: node }, zoom: Math.max(cy.zoom(), 1.2) },
{ duration: 350, easing: 'ease-in-out-cubic' }
);
});
cy.on("tap", evt => {
@ -453,10 +780,8 @@ cy.on("tap", evt => {
document.getElementById("btn-close-panel").addEventListener("click", closePanel);
// ── Resizable split ───────────────────────────────────────────
const handle = document.getElementById("resize-handle");
const cyWrapper = document.getElementById("cy-wrapper");
const graphRoot = document.getElementById("graph-root");
let resizing = false;
@ -490,7 +815,6 @@ document.addEventListener("mouseup", () => {
});
// ── ADR modal ─────────────────────────────────────────────────
const adrModal = document.getElementById("adr-modal");
const adrModalTitle = document.getElementById("adr-modal-title");
const adrModalBody = document.getElementById("adr-modal-body");
@ -501,7 +825,7 @@ function renderAdrBody(data) {
return `<p class="text-error">${data.error}</p>`;
}
const rows = Object.entries(data)
.filter(([k]) => !["id"].includes(k))
.filter(([k]) => k !== "id")
.map(([k, v]) => {
const label = k.replace(/_/g, " ");
let val;
@ -543,5 +867,42 @@ document.addEventListener("click", e => {
const btn = e.target.closest(".adr-link");
if (btn) fetchAdr(btn.dataset.adr);
});
// ── Keyboard shortcuts ────────────────────────────────────────
document.addEventListener('keydown', e => {
// Skip when focus is inside an input, textarea, or dialog
const tag = document.activeElement?.tagName;
if (tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT') return;
if (document.querySelector('dialog[open]')) return;
switch (e.key) {
case 'f':
case 'F':
e.preventDefault();
cy.animate({ fit: { eles: cy.elements(':visible'), padding: 48 } }, { duration: 350, easing: 'ease-in-out-cubic' });
break;
case '+':
case '=':
e.preventDefault();
zoomAt(1.35);
break;
case '-':
e.preventDefault();
zoomAt(1 / 1.35);
break;
case 'g':
case 'G':
e.preventDefault();
btnFullscreen.click();
break;
case 'Escape':
// Only close panel; fullscreen exit is handled natively by the browser
if (!isFullscreen() && !panel.classList.contains('hidden')) {
e.preventDefault();
closePanel();
}
break;
}
});
</script>
{% endblock scripts %}

View File

@ -98,11 +98,13 @@ fn expand_onto_api(args: TokenStream, input: TokenStream) -> syn::Result<proc_ma
"path" => path = Some(val),
"description" => description = Some(val),
"auth" => match val.as_str() {
"none" | "viewer" | "admin" => auth = val,
"none" | "viewer" | "bearer" | "admin" => auth = val,
other => {
return Err(syn::Error::new_spanned(
&kv.value,
format!("unknown auth level '{other}'; expected none | viewer | admin"),
format!(
"unknown auth level '{other}'; expected none | viewer | bearer | admin"
),
))
}
},
@ -207,7 +209,7 @@ fn emit_onto_api(attr: OntoApiAttr, item: proc_macro2::TokenStream) -> proc_macr
let c = LitStr::new(&p.constraint, Span::call_site());
let d = LitStr::new(&p.description, Span::call_site());
quote! {
crate::api_catalog::ApiParam { name: #n, kind: #k, constraint: #c, description: #d }
::ontoref_ontology::api::ApiParam { name: #n, kind: #k, constraint: #c, description: #d }
}
})
.collect();
@ -225,7 +227,7 @@ fn emit_onto_api(attr: OntoApiAttr, item: proc_macro2::TokenStream) -> proc_macr
quote! {
::inventory::submit! {
crate::api_catalog::ApiRouteEntry {
::ontoref_ontology::api::ApiRouteEntry {
method: #method,
path: #path,
description: #desc,

View File

@ -0,0 +1,45 @@
/// A single query/path/body parameter declared on an API route.
#[derive(serde::Serialize, Clone)]
pub struct ApiParam {
pub name: &'static str,
/// Rust-like type hint: string | u32 | bool | i64 | json.
pub kind: &'static str,
/// "required" | "optional" | "default=<value>"
pub constraint: &'static str,
pub description: &'static str,
}
/// Static metadata for an HTTP endpoint.
///
/// Registered at link time via [`inventory::submit!`] — generated by
/// `#[onto_api(...)]` proc-macro attribute on each handler function.
/// Collected via [`inventory::iter::<ApiRouteEntry>()`].
#[derive(serde::Serialize, Clone)]
pub struct ApiRouteEntry {
pub method: &'static str,
pub path: &'static str,
pub description: &'static str,
/// Authentication required: "none" | "viewer" | "bearer" | "admin"
pub auth: &'static str,
/// Which actors typically call this endpoint.
pub actors: &'static [&'static str],
pub params: &'static [ApiParam],
/// Semantic grouping tags (e.g. "graph", "federation", "describe").
pub tags: &'static [&'static str],
/// Non-empty when the endpoint is only compiled under a feature flag.
pub feature: &'static str,
}
inventory::collect!(ApiRouteEntry);
/// Serialize all statically-registered [`ApiRouteEntry`] items to a
/// pretty-printed JSON array, sorted by path then method.
///
/// Intended for daemon binaries that expose a `--dump-api-catalog` flag: write
/// the output to `api-catalog.json` in the project root so the ontoref UI can
/// display the API surface of consumer projects that run as separate processes.
pub fn dump_catalog_json() -> String {
let mut routes: Vec<&'static ApiRouteEntry> = inventory::iter::<ApiRouteEntry>().collect();
routes.sort_by(|a, b| a.path.cmp(b.path).then(a.method.cmp(b.method)));
serde_json::to_string_pretty(&routes).unwrap_or_else(|_| "[]".to_string())
}

View File

@ -5,6 +5,11 @@ pub mod types;
#[cfg(feature = "derive")]
pub mod contrib;
#[cfg(feature = "derive")]
pub mod api;
#[cfg(feature = "derive")]
pub use api::{ApiParam, ApiRouteEntry};
#[cfg(feature = "derive")]
pub use contrib::{ConfigFieldsEntry, NodeContribution, TestCoverage};
pub use error::OntologyError;

View File

@ -1,4 +1,5 @@
import 'justfiles/ci.just'
import 'justfiles/assets.just'
default:
@just --list

31
justfiles/assets.just Normal file
View File

@ -0,0 +1,31 @@
# Frontend asset management
#
# Vendored JS libs live in assets/vendor/ and are served by the daemon
# at /assets/vendor/<file>. Pin versions explicitly; bump manually.
#
# Pattern guide: reflection/templates/vendor-frontend-assets-prompt.md
CYTOSCAPE_NAVIGATOR_VERSION := "2.0.1"
# Export this daemon's API catalog to api-catalog.json.
# Run after any #[onto_api] annotation is added or changed.
# The file is read by the ontoref UI when this project is registered as a
# non-primary slug — consumer projects that run as separate binaries use this
# to expose their API surface in the ontoref UI.
[doc("Export #[onto_api] routes to api-catalog.json")]
export-api-catalog:
cargo run -p ontoref-daemon --no-default-features -- --dump-api-catalog > api-catalog.json
@echo "exported $(cat api-catalog.json | python3 -c 'import sys,json; print(len(json.load(sys.stdin)))') routes to api-catalog.json"
# Download/update all vendored frontend JS dependencies
[doc("Vendor all frontend JS dependencies")]
vendor-js: vendor-cytoscape-navigator
# cytoscape-navigator — minimap extension for Cytoscape.js
[doc("Vendor cytoscape-navigator (minimap)")]
vendor-cytoscape-navigator:
mkdir -p assets/vendor
curl -fsSL \
"https://cdn.jsdelivr.net/npm/cytoscape-navigator@{{CYTOSCAPE_NAVIGATOR_VERSION}}/cytoscape-navigator.js" \
-o assets/vendor/cytoscape-navigator.js
@echo "vendored cytoscape-navigator@{{CYTOSCAPE_NAVIGATOR_VERSION}}"

View File

@ -204,8 +204,16 @@ hooks-run-all:
# Install
# ==============================================================================
# Build UnoCSS bundle for the daemon UI
build-css:
cd assets/css && pnpm install && pnpm run build
# Build UnoCSS in watch mode (development)
watch-css:
cd assets/css && pnpm install && pnpm run watch
# Build ontoref-daemon and install binary, assets, CLI wrapper, and bootstrapper
install-daemon:
install-daemon: build-css
cargo build --release -p ontoref-daemon
nu install/install.nu

View File

@ -20,6 +20,10 @@ use ../modules/store.nu *
use ../modules/services.nu *
use ../modules/nats.nu *
use ../modules/opmode.nu *
use ../modules/run.nu *
use ../modules/graph.nu *
use ../modules/validate.nu *
use ../modules/migrate.nu *
use ../nulib/fmt.nu *
use ../nulib/shared.nu *
@ -258,6 +262,64 @@ def "main mode run" [id: string, --dry-run (-n), --yes (-y)] {
run-mode $id --dry-run=$dry_run --yes=$yes
}
# ── Run / Step ────────────────────────────────────────────────────────────────
def "main run start" [mode: string, --task (-t): string = "", --fmt (-f): string = ""] {
log-action $"run start ($mode)" "write"
run start $mode --task $task --fmt $fmt
}
def "main run status" [--run (-r): string = "", --fmt (-f): string = ""] {
log-action "run status" "read"
run status --run $run --fmt $fmt
}
def "main mode complete" [mode: string, --task (-t): string = "", --run (-r): string = "", --fmt (-f): string = ""] {
log-action $"mode complete ($mode)" "write"
mode complete $mode --task $task --run $run --fmt $fmt
}
def "main step" [action?: string] { missing-target "step" $action }
def "main step report" [
mode: string,
step_id: string,
--status (-s): string,
--exit-code (-e): int = 0,
--artifacts (-a): list<string> = [],
--warnings (-w): int = 0,
--run (-r): string = "",
--fmt (-f): string = "",
] {
log-action $"step report ($mode) ($step_id) ($status)" "write"
step report $mode $step_id --status $status --exit-code $exit_code --artifacts $artifacts --warnings $warnings --run $run --fmt $fmt
}
# ── Graph ─────────────────────────────────────────────────────────────────────
def "main graph" [type: string = "ontology", --fmt (-f): string = ""] {
log-action $"graph show ($type)" "read"
graph show $type --fmt $fmt
}
# ── Migrate ───────────────────────────────────────────────────────────────────
def "main migrate" [action?: string] { missing-target "migrate" $action }
def "main migrate list" [--fmt (-f): string = ""] { log-action "migrate list" "read"; migrate list --fmt $fmt }
def "main migrate pending" [--fmt (-f): string = ""] { log-action "migrate pending" "read"; migrate pending --fmt $fmt }
def "main migrate show" [id: string, --fmt (-f): string = ""] { log-action $"migrate show ($id)" "read"; migrate show $id --fmt $fmt }
def "main mg" [action?: string] { missing-target "migrate" $action }
def "main mg l" [--fmt (-f): string = ""] { log-action "migrate list" "read"; migrate list --fmt $fmt }
def "main mg p" [--fmt (-f): string = ""] { log-action "migrate pending" "read"; migrate pending --fmt $fmt }
# ── Validate ──────────────────────────────────────────────────────────────────
def "main validate" [target?: string] { missing-target "validate" $target }
def "main validate justfile" [--fmt (-f): string = ""] {
log-action "validate justfile" "read"
validate justfile --fmt $fmt
}
# ── ADR ───────────────────────────────────────────────────────────────────────
def "main adr" [action?: string] { missing-target "adr" $action }

View File

@ -0,0 +1,26 @@
{
id = "0001",
slug = "ontology-infrastructure",
description = "Add manifest.ncl and connections.ncl to .ontology/",
check = {
tag = "NuCmd",
cmd = "if ($\"($env.ONTOREF_PROJECT_ROOT)/.ontology/manifest.ncl\" | path exists) and ($\"($env.ONTOREF_PROJECT_ROOT)/.ontology/connections.ncl\" | path exists) { exit 0 } else { exit 1 }",
expect_exit = 0,
},
instructions = "
1. Source env vars if not already set:
. $(which ontoref) --env-only
2. Copy missing templates (additive — existing files are NOT overwritten):
test -f .ontology/manifest.ncl || sed 's/{{ project_name }}/{project_name}/g' \\
\"$ONTOREF_ROOT/templates/ontology/manifest.ncl\" > .ontology/manifest.ncl
test -f .ontology/connections.ncl || sed 's/{{ project_name }}/{project_name}/g' \\
\"$ONTOREF_ROOT/templates/ontology/connections.ncl\" > .ontology/connections.ncl
3. Validate both export cleanly:
nickel export --import-path \"$NICKEL_IMPORT_PATH\" .ontology/manifest.ncl > /dev/null && echo ok
nickel export --import-path \"$NICKEL_IMPORT_PATH\" .ontology/connections.ncl > /dev/null && echo ok
4. Open manifest.ncl and set repo_kind to the correct value for this project.
",
}

View File

@ -0,0 +1,28 @@
{
id = "0002",
slug = "adr-typed-checks",
description = "Migrate deprecated check_hint fields in ADRs to typed check variants",
check = {
tag = "NuCmd",
cmd = "let adrs = (glob $\"($env.ONTOREF_PROJECT_ROOT)/adrs/adr-[0-9][0-9][0-9]-*.ncl\"); if ($adrs | is-empty) { exit 0 }; let r = (do { ^rg -l check_hint ...$adrs } | complete); if $r.exit_code == 0 { exit 1 } else { exit 0 }",
expect_exit = 0,
},
instructions = "
1. Find ADRs with deprecated check_hint:
grep -rl 'check_hint' adrs/
2. For each file found, replace check_hint with a typed check variant:
'NuCmd — shell command: { tag = 'NuCmd, cmd = \"...\", expect_exit = 0 }
'Grep — pattern search: { tag = 'Grep, pattern = \"...\", paths = [\"...\"], must_be_empty = true }
'Cargo — dep check: { tag = 'Cargo, crate = \"...\", forbidden_deps = [\"...\"] }
'FileExists — file presence: { tag = 'FileExists, path = \"...\", present = true }
'ApiCall — HTTP check: { tag = 'ApiCall, endpoint = \"...\", json_path = \"...\", expected = \"...\" }
3. Validate each edited ADR:
nickel export --import-path \"$NICKEL_IMPORT_PATH\" adrs/adr-NNN-*.ncl > /dev/null
4. Run all ADR constraints:
ontoref adr validate
",
}

View File

@ -0,0 +1,51 @@
{
id = "0003",
slug = "manifest-self-interrogation",
description = "Populate capabilities[], requirements[], and critical_deps[] in manifest.ncl",
check = {
tag = "NuCmd",
cmd = "let f = $\"($env.ONTOREF_PROJECT_ROOT)/.ontology/manifest.ncl\"; if not ($f | path exists) { exit 1 }; let r = (do { ^nickel export --format json --import-path $env.NICKEL_IMPORT_PATH $f } | complete); if $r.exit_code != 0 { exit 1 }; let m = ($r.stdout | from json); if (($m.capabilities? | default [] | length) > 0) and (($m.requirements? | default [] | length) > 0) { exit 0 } else { exit 1 }",
expect_exit = 0,
},
instructions = "
Open .ontology/manifest.ncl and populate three arrays.
capabilities[] — what the project offers (2-6 entries, audience-facing):
m.make_capability {
id = \"kebab-id\",
name = \"Short Name\",
summary = \"One line: what this does.\",
rationale = \"Why it exists. What was rejected.\",
how = \"Key patterns, entry points, data flows.\",
artifacts = [\"crates/foo/\", \"GET /api/foo\"],
adrs = [], # ADR IDs that formalize decisions here
nodes = [], # node IDs from .ontology/core.ncl
}
requirements[] — prerequisites to run (one per tool/service/envvar):
m.make_requirement {
id = \"kebab-id\",
name = \"Human Name\",
env = 'Both, # 'Production | 'Development | 'Both
kind = 'Tool, # 'Tool | 'Service | 'EnvVar | 'Infrastructure
version = \"\",
required = true,
impact = \"What breaks if absent.\",
provision = \"How to install or set.\",
}
critical_deps[] — load-bearing external deps with documented blast radius:
m.make_critical_dep {
id = \"kebab-id\",
name = \"crate-or-service\",
ref = \"crates.io: foo\",
used_for = \"Which capabilities depend on this.\",
failure_impact = \"What breaks if this dep disappears.\",
mitigation = \"Feature flags, fallback builds, alternatives.\",
}
Verify:
nickel export --import-path \"$NICKEL_IMPORT_PATH\" .ontology/manifest.ncl \\
| jq '{capabilities: (.capabilities|length), requirements: (.requirements|length), critical_deps: (.critical_deps|length)}'
",
}

View File

@ -0,0 +1,37 @@
{
id = "0004",
slug = "just-convention",
description = "Justfile validates against the canonical module convention",
check = {
tag = "NuCmd",
cmd = "cd $env.ONTOREF_PROJECT_ROOT; $env.ONTOREF_ACTOR = \"agent\"; let r = (do { ^ontoref validate justfile --fmt json } | complete); if $r.exit_code != 0 { exit 1 }; let result = ($r.stdout | from json); if $result.ok { exit 0 } else { exit 1 }",
expect_exit = 0,
},
instructions = "
Run the validator to see what is missing:
ONTOREF_ACTOR=agent ontoref validate justfile
Fix each violation:
missing required modules (build, test, dev, ci):
Create justfiles/{name}.just with at minimum:
[doc(\"Show {name} help\")]
help:
@just --list
Add to root justfile:
import 'justfiles/{name}.just'
missing required recipes (default, help in root justfile):
default:
@just --list
[doc(\"Show available recipes\")]
help:
@just --list
missing required variables:
project_root := justfile_directory()
Verify:
ONTOREF_ACTOR=agent ontoref validate justfile
",
}

View File

@ -0,0 +1,37 @@
{
id = "0005",
slug = "mode-step-schema",
description = "All reflection mode steps have actor, on_error, and depends_on fields",
check = {
tag = "NuCmd",
cmd = "let modes_dir = $\"($env.ONTOREF_PROJECT_ROOT)/reflection/modes\"; if not ($modes_dir | path exists) { exit 0 }; let files = (glob $\"($modes_dir)/*.ncl\"); if ($files | is-empty) { exit 0 }; let ip = $env.NICKEL_IMPORT_PATH; let all_ok = ($files | all { |f| let r = (do { ^nickel export --format json --import-path $ip $f } | complete); if $r.exit_code != 0 { false } else { let m = ($r.stdout | from json); let steps = ($m.steps? | default []); ($steps | all { |s| ($s.actor? | is-not-empty) and ($s.on_error? | is-not-empty) }) } }); if $all_ok { exit 0 } else { exit 1 }",
expect_exit = 0,
},
instructions = "
For each file in reflection/modes/*.ncl, ensure every step has:
{
id = \"step-id\",
action = \"What this step does.\",
actor = 'Agent, # 'Human | 'Agent | 'Both
on_error = 'Stop, # 'Stop | 'Continue | 'Retry
depends_on = [], # [] or [{ step = \"id\", kind = 'OnSuccess }]
}
actor decision:
agent runs it autonomously (build, write, export) -> 'Agent
human must do it (review, approve, deploy) -> 'Human
either depending on context -> 'Both
on_error decision:
failure blocks subsequent steps -> 'Stop
failure is non-blocking -> 'Continue
transient, should retry -> 'Retry
Validate each mode exports cleanly:
nickel export --import-path \"$NICKEL_IMPORT_PATH\" reflection/modes/*.ncl > /dev/null
Verify graphs:
ONTOREF_ACTOR=agent ontoref graph <mode-id>
",
}

View File

@ -0,0 +1,47 @@
{
id = "0006",
slug = "claude-agent-entrypoint",
description = "Add Agent Entry-Point Protocol section to .claude/CLAUDE.md",
check = {
tag = "Grep",
pattern = "Agent Entry-Point Protocol",
paths = [".claude/CLAUDE.md"],
must_be_empty = false,
},
instructions = "
Add the following section to .claude/CLAUDE.md (insert before the last major section
or append at end). Create .claude/CLAUDE.md if it does not exist.
## Agent Entry-Point Protocol
When arriving at this project as an agent, execute these discovery steps in order
before any code changes:
# 1. Project capabilities — what this project has and can do
ONTOREF_ACTOR=agent ontoref describe capabilities
# 2. Available reflection modes — operational DAGs you can execute
ONTOREF_ACTOR=agent ontoref describe mode
# 3. Check migration status — apply any pending protocol migrations
ONTOREF_ACTOR=agent ontoref migrate list
# 4. Start a run before any structured work session
ONTOREF_ACTOR=agent ontoref run start <mode-id> --task \"description\"
# 5. Report each step as you complete it
ONTOREF_ACTOR=agent ontoref step report <mode-id> <step-id> --status pass|fail|skip
# 6. Verify mode completion
ONTOREF_ACTOR=agent ontoref mode complete <mode-id>
Graph output (Mermaid DSL, parseable):
ONTOREF_ACTOR=agent ontoref graph ontology
ONTOREF_ACTOR=agent ontoref graph deps
ONTOREF_ACTOR=agent ontoref graph flow
ONTOREF_ACTOR=agent ontoref graph <mode-id>
Justfile validation:
ONTOREF_ACTOR=agent ontoref validate justfile
",
}

View File

@ -17,7 +17,7 @@ def actor-default []: nothing -> string {
# Build NICKEL_IMPORT_PATH for a given project root.
# Includes project-local ontology, onref symlinked schemas, ADR defaults,
# and the existing NICKEL_IMPORT_PATH from the environment.
def nickel-import-path [root: string]: nothing -> string {
export def nickel-import-path [root: string]: nothing -> string {
let entries = [
$"($root)/.ontology"
$"($root)/adrs"
@ -99,7 +99,9 @@ export def "describe capabilities" [
let a = if ($actor | is-not-empty) { $actor } else { (actor-default) }
let f = if ($fmt | is-not-empty) { $fmt } else if $a == "agent" { "json" } else { "text" }
let project_flags = (scan-project-flags $root)
let just_modules = (scan-just-modules $root)
let just_recipes = (scan-just-recipes $root)
let ontoref_commands = (scan-ontoref-commands)
let modes = (scan-reflection-modes $root)
let claude = (scan-claude-capabilities $root)
@ -107,20 +109,144 @@ export def "describe capabilities" [
let manifest_modes = (scan-manifest-modes $root)
let manifest = (load-manifest-safe $root)
let manifest_capabilities = ($manifest.capabilities? | default [])
let backlog_pending = (count-backlog-pending $root)
let data = {
project_flags: $project_flags,
just_modules: $just_modules,
just_recipes: $just_recipes,
ontoref_commands: $ontoref_commands,
reflection_modes: $modes,
claude_capabilities: $claude,
ci_tools: $ci_tools,
manifest_modes: $manifest_modes,
manifest_capabilities: $manifest_capabilities,
backlog_pending: $backlog_pending,
}
emit-output $data $f { || render-capabilities-text $data $a $root }
}
# ── describe mode ────────────────────────────────────────────────────────────────
# "What steps does this mode define? In what order? What does each step do?"
export def "describe mode" [
name?: string, # Mode ID (without .ncl extension). Omit to list all.
--fmt: string = "", # Output format: text* | json | yaml | table
--actor: string = "", # Perspective: developer | agent | ci
--with-capabilities, # Annotate each step with applicable flag (requires capabilities scan)
]: nothing -> nothing {
let root = (project-root)
let a = if ($actor | is-not-empty) { $actor } else { (actor-default) }
let f = if ($fmt | is-not-empty) { $fmt } else if $a == "agent" { "json" } else { "text" }
# List mode — no name given
if ($name | is-empty) {
let modes = (scan-reflection-modes $root)
let data = { modes: $modes }
emit-output $data $f {||
print ""
print "AVAILABLE MODES"
print "══════════════════════════════════════════════════════════════════"
for m in $modes {
let src = if $m.source == "project" { " [project]" } else { "" }
print $" ($m.id)($src) — ($m.steps) steps"
if ($m.trigger | is-not-empty) { print $" ($m.trigger)" }
}
print ""
print $"Run: ontoref describe mode <name>"
}
return
}
# Locate mode file — project-local takes precedence over ontoref
let project_file = $"($root)/reflection/modes/($name).ncl"
let ontoref_file = $"($env.ONTOREF_ROOT)/reflection/modes/($name).ncl"
let mode_root = if ($project_file | path exists) { $root } else { $env.ONTOREF_ROOT }
let mode_file = if ($project_file | path exists) { $project_file } else { $ontoref_file }
if not ($mode_file | path exists) {
print $"(ansi red)Mode '($name)' not found.(ansi reset)"
print $" Searched: ($project_file)"
print $" ($ontoref_file)"
return
}
let ip = (nickel-import-path $mode_root)
let mode = (daemon-export-safe $mode_file --import-path $ip)
if $mode == null {
print $"(ansi red)Failed to export mode '($name)' — check NCL syntax.(ansi reset)"
return
}
# Optionally annotate steps with capability flags
let flags = if $with_capabilities { (scan-project-flags $root) } else { {} }
let steps = ($mode.steps? | default [] | each { |s|
if ($flags | is-not-empty) {
$s | insert "_applicable" true # placeholder — extended in T3 schema with `needs`
} else { $s }
})
let data = {
id: ($mode.id? | default $name),
trigger: ($mode.trigger? | default ""),
preconditions: ($mode.preconditions? | default []),
steps: $steps,
postconditions: ($mode.postconditions? | default []),
source: (if ($project_file | path exists) { "project" } else { "ontoref" }),
file: $mode_file,
}
emit-output $data $f {||
print ""
print $"MODE: ($data.id) [($data.source)]"
print "══════════════════════════════════════════════════════════════════"
if ($data.trigger | is-not-empty) {
print $" ($data.trigger)"
}
if ($data.preconditions | is-not-empty) {
print ""
print " PRECONDITIONS"
for p in $data.preconditions { print $" · ($p)" }
}
print ""
print " STEPS"
print " ──────────────────────────────────────────────────────────────"
for s in $data.steps {
let deps = if ($s.depends_on? | default [] | is-not-empty) {
let dep_ids = ($s.depends_on | each { |d|
let kind = ($d.kind? | default "Always")
if $kind != "Always" { $"($d.step)[($kind)]" } else { $d.step }
})
$" after: ($dep_ids | str join ', ')"
} else { "" }
let actor_tag = match ($s.actor? | default "Both") {
"Human" => " [human]",
"Agent" => " [agent]",
_ => "",
}
let err = ($s.on_error?.strategy? | default "Stop")
print $" ($s.id)($actor_tag) on_error=($err)($deps)"
print $" ($s.action? | default '')"
if ($s.cmd? | default "" | is-not-empty) {
print $" $ ($s.cmd)"
}
if ($s.verify? | default "" | is-not-empty) {
print $" verify: ($s.verify)"
}
}
if ($data.postconditions | is-not-empty) {
print ""
print " POSTCONDITIONS"
for p in $data.postconditions { print $" · ($p)" }
}
print ""
}
}
# ── describe requirements ────────────────────────────────────────────────────────
# "What does this project need to run? What are the prod/dev prerequisites?"
@ -1691,6 +1817,10 @@ def scan-just-modules [root: string]: nothing -> list<record> {
$modules
}
def categorize-recipe [name: string]: nothing -> string {
if ($name | str starts-with "ci") { "ci" } else if ($name | str starts-with "build") { "build" } else if ($name | str starts-with "test") or ($name == "test") { "test" } else if ($name | str starts-with "doc") { "docs" } else if ($name | str starts-with "deploy") { "deploy" } else if ($name | str starts-with "nickel") { "nickel" } else if ($name | str starts-with "install") or ($name | str starts-with "release") or ($name | str starts-with "package") or ($name | str starts-with "dist") { "distro" } else if ($name in ["fmt", "format", "lint", "watch", "dev", "setup", "setup-hooks", "clean"]) or ($name | str starts-with "fmt") or ($name | str starts-with "lint") or ($name | str starts-with "watch") { "dev" } else { "other" }
}
def scan-just-recipes [root: string]: nothing -> list<record> {
let result = do { ^just --list --unsorted --justfile $"($root)/justfile" } | complete
if $result.exit_code != 0 { return [] }
@ -1700,7 +1830,7 @@ def scan-just-recipes [root: string]: nothing -> list<record> {
let parts = ($trimmed | split row " # ")
let name = ($parts | first | str trim)
let desc = if ($parts | length) > 1 { $parts | skip 1 | str join " # " | str trim } else { "" }
{ name: $name, description: $desc }
{ name: $name, category: (categorize-recipe $name), description: $desc }
}
}
@ -1818,6 +1948,81 @@ def scan-dev-tools [root: string]: nothing -> list<record> {
$tools
}
def git-remote-slug [root: string]: nothing -> string {
let r = do { ^git -C $root remote get-url origin } | complete
if $r.exit_code != 0 { return "" }
let url = ($r.stdout | str trim)
if ($url | str contains "@") {
# git@host:owner/repo.git
$url | split row ":" | last | str replace -r '\.git$' "" | str trim
} else {
# https://host/owner/repo.git
let parts = ($url | split row "/" | last 2)
($parts | str join "/") | str replace -r '\.git$' ""
}
}
def scan-project-flags [root: string]: nothing -> record {
let cargo_toml = $"($root)/Cargo.toml"
let has_rust = ($cargo_toml | path exists)
let crates = if $has_rust {
let cargo = (open $cargo_toml)
let members = ($cargo | get -o workspace.members | default [])
if ($members | is-not-empty) {
$members | each { |m|
glob $"($root)/($m)/Cargo.toml"
} | flatten | each { |f|
let c = (open $f)
$c | get -o package.name | default ($f | path dirname | path basename)
}
} else {
let name = ($cargo | get -o package.name | default ($root | path basename))
[$name]
}
} else { [] }
let config_file = $"($root)/.ontoref/config.ncl"
let has_nats = if ($config_file | path exists) {
let ip = (nickel-import-path $root)
let cfg = (daemon-export-safe $config_file --import-path $ip)
if $cfg != null { $cfg.nats_events?.enabled? | default false } else { false }
} else { false }
let has_git = ($"($root)/.git" | path exists)
let git_slug = if $has_git { git-remote-slug $root } else { "" }
let has_git_remote = ($git_slug | is-not-empty)
let open_prs = if $has_git_remote and (which gh | is-not-empty) {
let r = do { ^gh pr list --repo $git_slug --state open --json number --jq length } | complete
if $r.exit_code == 0 { $r.stdout | str trim | into int } else { 0 }
} else { 0 }
{
has_rust: $has_rust,
has_ui: (($"($root)/templates" | path exists) or ($"($root)/assets" | path exists)),
has_mdbook: ($"($root)/docs/SUMMARY.md" | path exists),
has_nats: $has_nats,
has_precommit: ($"($root)/.pre-commit-config.yaml" | path exists),
has_backlog: ($"($root)/reflection/backlog.ncl" | path exists),
has_git_remote: $has_git_remote,
git_slug: $git_slug,
open_prs: $open_prs,
crates: $crates,
}
}
def count-backlog-pending [root: string]: nothing -> int {
let file = $"($root)/reflection/backlog.ncl"
if not ($file | path exists) { return 0 }
let ip = (nickel-import-path $root)
let backlog = (daemon-export-safe $file --import-path $ip)
if $backlog == null { return 0 }
($backlog.items? | default [])
| where { |i| ($i.status? | default "open") not-in ["done", "graduated"] }
| length
}
# ── Feature collectors ────────────────────────────────────────────────────────
def collect-cargo-features [root: string]: nothing -> list<record> {
@ -2098,6 +2303,55 @@ def render-capabilities-text [data: record, actor: string, root: string]: nothin
print "CAPABILITIES"
print "══════════════════════════════════════════════════════════════════"
let flags = ($data.project_flags? | default {})
if ($flags | is-not-empty) {
print ""
print "PROJECT FLAGS"
print "──────────────────────────────────────────────────────────────────"
let flag_pairs = [
["has_rust", "Rust"],
["has_ui", "UI (templates/assets)"],
["has_mdbook", "mdBook (docs/SUMMARY.md)"],
["has_nats", "NATS events"],
["has_precommit", "pre-commit hooks"],
["has_backlog", "backlog (reflection/backlog.ncl)"],
["has_git_remote", "git remote"],
]
for pair in $flag_pairs {
let key = ($pair | first)
let label = ($pair | last)
let val = ($flags | get -o $key | default false)
let mark = if $val { "✓" } else { "○" }
print $" ($mark) ($label)"
}
let crates = ($flags.crates? | default [])
if ($crates | is-not-empty) {
print $" Crates: ($crates | str join ', ')"
}
if ($data.backlog_pending? | default 0) > 0 {
print $" Backlog pending: ($data.backlog_pending)"
}
let open_prs = ($flags.open_prs? | default 0)
if $open_prs > 0 {
print $" Open PRs: ($open_prs) [($flags.git_slug? | default '')]"
}
}
if ($data.just_recipes? | default [] | is-not-empty) {
print ""
print "JUST RECIPES (by category)"
print "──────────────────────────────────────────────────────────────────"
let by_cat = ($data.just_recipes | group-by category)
for cat in ($by_cat | columns | sort) {
let recipes = ($by_cat | get $cat)
print $" [($cat)]"
for r in $recipes {
let desc = if ($r.description | is-not-empty) { $" — ($r.description)" } else { "" }
print $" ($r.name)($desc)"
}
}
}
if ($data.just_modules | is-not-empty) {
print ""
print "JUST MODULES"

270
reflection/modules/graph.nu Normal file
View File

@ -0,0 +1,270 @@
#!/usr/bin/env nu
# reflection/modules/graph.nu — actor-aware graph output.
#
# Agent → Mermaid DSL (stdout, parseable, diffeable)
# Human → URL to daemon UI (if running), else Mermaid
#
# Supported types:
# ontology — core.ncl nodes + edges as flowchart
# flow — last run steps as DAG
# deps — Rust crate dependency graph (if has_rust)
# mode — a reflection mode DAG
use ../modules/store.nu [daemon-export-safe]
use ../modules/describe.nu [nickel-import-path]
def project-root []: nothing -> string {
let pr = ($env.ONTOREF_PROJECT_ROOT? | default "")
if ($pr | is-not-empty) and ($pr != $env.ONTOREF_ROOT) { $pr } else { $env.ONTOREF_ROOT }
}
def actor-default []: nothing -> string {
$env.ONTOREF_ACTOR? | default "developer"
}
def daemon-url []: nothing -> string {
$env.ONTOREF_DAEMON_URL? | default "http://127.0.0.1:7891"
}
def daemon-running []: nothing -> bool {
let r = do { ^curl -sf $"(daemon-url)/health" } | complete
$r.exit_code == 0
}
# Sanitize a string for use as a Mermaid node ID (no hyphens, dots, spaces).
def mermaid-id [s: string]: nothing -> string {
$s | str replace --all "-" "_" | str replace --all "." "_" | str replace --all " " "_"
}
# Emit Mermaid or UI URL based on actor and daemon availability.
def emit-graph [mermaid: string, ui_path: string, fmt: string, actor: string]: nothing -> nothing {
if $fmt == "mermaid" or $actor == "agent" {
print $mermaid
} else if $fmt == "url" {
print $"(daemon-url)($ui_path)"
} else {
# text/human: prefer URL if daemon running, else Mermaid
if (daemon-running) {
print $"Open in UI: (daemon-url)($ui_path)"
print $"Or run: ONTOREF_ACTOR=agent ontoref graph (if ($ui_path | str contains "ontology") { "ontology" } else if ($ui_path | str contains "flow") { "flow" } else { "deps" }) --fmt mermaid"
} else {
print $mermaid
}
}
}
def ontology-to-mermaid [root: string]: nothing -> string {
let ip = (nickel-import-path $root)
let core_file = $"($root)/.ontology/core.ncl"
if not ($core_file | path exists) { return "flowchart LR\n note[\"No .ontology/core.ncl found\"]" }
let core = (daemon-export-safe $core_file --import-path $ip)
if $core == null { return "flowchart LR\n note[\"Failed to export core.ncl\"]" }
let nodes = ($core.nodes? | default [])
let edges = ($core.edges? | default [])
mut lines = ["flowchart LR"]
$lines = ($lines | append " classDef axiom fill:#1a1a2e,stroke:#e94560,color:#fff,font-weight:bold")
$lines = ($lines | append " classDef tension fill:#16213e,stroke:#f5a623,color:#fff")
$lines = ($lines | append " classDef practice fill:#0f3460,stroke:#53c28b,color:#fff")
$lines = ($lines | append " classDef project fill:#1b262c,stroke:#4fc3f7,color:#fff")
$lines = ($lines | append " classDef moment fill:#2d2d2d,stroke:#aaa,color:#fff")
$lines = ($lines | append "")
for n in $nodes {
let nid = (mermaid-id $n.id)
let label = ($n.name? | default $n.id)
let level = ($n.level? | default "Project" | str downcase)
$lines = ($lines | append $" ($nid)[\"($label)\"]:::($level)")
}
if ($edges | is-not-empty) {
$lines = ($lines | append "")
for e in $edges {
let fid = (mermaid-id $e.from)
let tid = (mermaid-id $e.to)
let kind = ($e.kind? | default "")
let label = if ($kind | is-not-empty) { $" -->|\"($kind)\"| " } else { " --> " }
$lines = ($lines | append $" ($fid)($label)($tid)")
}
}
$lines | str join "\n"
}
def run-flow-to-mermaid [root: string, actor: string]: nothing -> string {
let runs_dir = $"($root)/.coder/($actor)/runs"
let current_file = $"($runs_dir)/current.json"
if not ($current_file | path exists) { return "flowchart TD\n note[\"No active run found\"]" }
let current = (open $current_file)
let run_id = ($current.run_id? | default "")
let mode_id = ($current.mode? | default "")
if ($run_id | is-empty) { return "flowchart TD\n note[\"No active run\"]" }
let steps_file = $"($runs_dir)/($run_id)/steps.jsonl"
let reported = if ($steps_file | path exists) {
open $steps_file | lines | where { |l| $l | str trim | is-not-empty } | each { |l| $l | from json }
} else { [] }
# Load mode DAG for structure
let ontoref_file = $"($env.ONTOREF_ROOT)/reflection/modes/($mode_id).ncl"
let project_file = $"($root)/reflection/modes/($mode_id).ncl"
let mode_file = if ($project_file | path exists) { $project_file } else if ($ontoref_file | path exists) { $ontoref_file } else { "" }
let steps = if ($mode_file | is-not-empty) {
let ip = (nickel-import-path (if ($project_file | path exists) { $root } else { $env.ONTOREF_ROOT }))
let m = (daemon-export-safe $mode_file --import-path $ip)
if $m != null { $m.steps? | default [] } else { [] }
} else { [] }
mut lines = [$"flowchart TD", $" classDef pass fill:#1a472a,stroke:#53c28b,color:#fff", $" classDef fail fill:#4a0e0e,stroke:#e94560,color:#fff", $" classDef skip fill:#2d2d2d,stroke:#888,color:#aaa", $" classDef pending fill:#1a1a2e,stroke:#555,color:#666", ""]
for s in $steps {
let sid = (mermaid-id $s.id)
let rep_list = ($reported | where { |r| $r.step == $s.id })
let rep = if ($rep_list | is-not-empty) { $rep_list | first } else { null }
let status = if ($rep | is-not-empty) { $rep.status } else { "pending" }
let warn = if ($rep | is-not-empty) and ($rep.warnings? | default 0) > 0 { $" ⚠($rep.warnings)" } else { "" }
$lines = ($lines | append $" ($sid)[\"($s.id)\\n($status)($warn)\"]:::($status)")
}
$lines = ($lines | append "")
for s in $steps {
let sid = (mermaid-id $s.id)
for dep in ($s.depends_on? | default []) {
let did = (mermaid-id $dep.step)
let kind = ($dep.kind? | default "Always")
let arrow = if $kind == "OnFailure" { " -.->|\"OnFailure\"| " } else { " --> " }
$lines = ($lines | append $" ($did)($arrow)($sid)")
}
}
$lines | str join "\n"
}
def deps-to-mermaid [root: string]: nothing -> string {
let cargo_toml = $"($root)/Cargo.toml"
if not ($cargo_toml | path exists) { return "flowchart LR\n note[\"No Cargo.toml found\"]" }
let cargo = (open $cargo_toml)
let members = ($cargo | get -o workspace.members | default [])
if ($members | is-empty) {
return "flowchart LR\n note[\"Single-crate project — no inter-crate deps to show\"]"
}
# Collect crate names
let crate_names = ($members | each { |m|
glob $"($root)/($m)/Cargo.toml"
} | flatten | each { |f|
let c = (open $f)
$c | get -o package.name | default ($f | path dirname | path basename)
})
# Collect inter-workspace dependencies
mut edges = []
for m in $members {
let expanded = (glob $"($root)/($m)/Cargo.toml")
for ct in $expanded {
let c = (open $ct)
let cname = ($c | get -o package.name | default ($ct | path dirname | path basename))
let all_deps = (
($c | get -o dependencies | default {} | columns) ++
($c | get -o "dev-dependencies" | default {} | columns) ++
($c | get -o "build-dependencies" | default {} | columns)
)
for dep in $all_deps {
if $dep in $crate_names {
$edges = ($edges | append { from: $cname, to: $dep })
}
}
}
}
mut lines = ["flowchart LR"]
for n in $crate_names {
let nid = (mermaid-id $n)
$lines = ($lines | append $" ($nid)[\"($n)\"]")
}
$lines = ($lines | append "")
for e in $edges {
let fid = (mermaid-id $e.from)
let tid = (mermaid-id $e.to)
$lines = ($lines | append $" ($fid) --> ($tid)")
}
$lines | str join "\n"
}
def mode-to-mermaid [mode_id: string]: nothing -> string {
let root = (project-root)
let project_file = $"($root)/reflection/modes/($mode_id).ncl"
let ontoref_file = $"($env.ONTOREF_ROOT)/reflection/modes/($mode_id).ncl"
let mode_file = if ($project_file | path exists) { $project_file } else if ($ontoref_file | path exists) { $ontoref_file } else { "" }
if ($mode_file | is-empty) { return $"flowchart TD\n note[\"Mode '($mode_id)' not found\"]" }
let mode_root = if ($project_file | path exists) { $root } else { $env.ONTOREF_ROOT }
let ip = (nickel-import-path $mode_root)
let m = (daemon-export-safe $mode_file --import-path $ip)
if $m == null { return $"flowchart TD\n note[\"Failed to export mode '($mode_id)'\"]" }
let steps = ($m.steps? | default [])
mut lines = [$"flowchart TD", $" classDef human fill:#16213e,stroke:#f5a623,color:#fff", $" classDef agent fill:#0f3460,stroke:#53c28b,color:#fff", $" classDef both fill:#1a1a2e,stroke:#4fc3f7,color:#fff", ""]
for s in $steps {
let sid = (mermaid-id $s.id)
let actor_class = match ($s.actor? | default "Both") {
"Human" => "human",
"Agent" => "agent",
_ => "both",
}
let cmd_hint = if ($s.cmd? | default "" | is-not-empty) { "\\n[cmd]" } else { "" }
$lines = ($lines | append $" ($sid)[\"($s.id)($cmd_hint)\"]:::($actor_class)")
}
$lines = ($lines | append "")
for s in $steps {
let sid = (mermaid-id $s.id)
for dep in ($s.depends_on? | default []) {
let did = (mermaid-id $dep.step)
$lines = ($lines | append $" ($did) --> ($sid)")
}
}
$lines | str join "\n"
}
# Emit a graph. Type: ontology | flow | deps | mode:<id>
# Agent → Mermaid DSL. Human → daemon UI URL (if running) or Mermaid.
export def "graph show" [
type: string = "ontology", # ontology | flow | deps | mode:<id>
--fmt (-f): string = "", # mermaid | url | text (default: actor-aware)
--actor: string = "",
]: nothing -> nothing {
let root = (project-root)
let a = if ($actor | is-not-empty) { $actor } else { (actor-default) }
let f = if ($fmt | is-not-empty) { $fmt } else if $a == "agent" { "mermaid" } else { "text" }
match $type {
"ontology" => {
let mmd = (ontology-to-mermaid $root)
emit-graph $mmd "/graph" $f $a
}
"flow" => {
let mmd = (run-flow-to-mermaid $root $a)
emit-graph $mmd "/graph" $f $a
}
"deps" => {
let mmd = (deps-to-mermaid $root)
emit-graph $mmd "/graph" $f $a
}
_ => {
# mode:<id> or just the mode id
let mode_id = if ($type | str starts-with "mode:") { $type | str replace "mode:" "" } else { $type }
let mmd = (mode-to-mermaid $mode_id)
emit-graph $mmd "/graph" $f $a
}
}
}

View File

@ -0,0 +1,202 @@
#!/usr/bin/env nu
# reflection/modules/migrate.nu — ontoref protocol migration runner.
#
# Migrations live in $ONTOREF_ROOT/reflection/migrations/NNN-slug.ncl
# Each has: id, slug, description, check, instructions
# "Applied" = check passes. No state file — checks are the source of truth.
#
# Commands:
# migrate list — all migrations with applied/pending status
# migrate pending — only pending migrations
# migrate show <id> — instructions for a specific migration
use env.nu *
use store.nu [daemon-export-safe]
use describe.nu [nickel-import-path]
def project-root []: nothing -> string {
$env.ONTOREF_PROJECT_ROOT? | default $env.ONTOREF_ROOT
}
def migrations-dir []: nothing -> string {
[$env.ONTOREF_ROOT, "reflection", "migrations"] | path join
}
# Run a check record against the given project root. Returns { passed, detail }.
def run-migration-check [check: record, root: string]: nothing -> record {
match $check.tag {
"FileExists" => {
let p = ([$root, $check.path] | path join)
let exists = ($p | path exists)
let want = ($check.present? | default true)
{
passed: ($exists == $want),
detail: (if ($exists == $want) { "ok" } else if $want { $"missing: ($p)" } else { $"unexpected: ($p)" })
}
}
"Grep" => {
let paths = ($check.paths | each { |p| [$root, $p] | path join } | where { |p| $p | path exists })
if ($paths | is-empty) {
return { passed: false, detail: "no target paths exist" }
}
let r = do { ^rg --no-heading -l $check.pattern ...$paths } | complete
let has_matches = ($r.exit_code == 0)
let must_empty = ($check.must_be_empty? | default false)
if $must_empty {
let files = ($r.stdout | str trim)
{ passed: (not $has_matches), detail: (if $has_matches { $"pattern found in: ($files)" } else { "ok" }) }
} else {
{ passed: $has_matches, detail: (if $has_matches { "ok" } else { "required pattern absent" }) }
}
}
"NuCmd" => {
let r = do { nu -c $check.cmd } | complete
let expected = ($check.expect_exit? | default 0)
{
passed: ($r.exit_code == $expected),
detail: (if ($r.exit_code == $expected) { "ok" } else { $"exit ($r.exit_code) ≠ ($expected): ($r.stderr | str trim | str substring 0..120)" })
}
}
_ => { passed: false, detail: $"unknown check tag: ($check.tag)" }
}
}
def load-all-migrations []: nothing -> list<record> {
let dir = (migrations-dir)
if not ($dir | path exists) { return [] }
let ip = (nickel-import-path $env.ONTOREF_ROOT)
glob $"($dir)/*.ncl"
| sort
| each { |f|
try {
let m = (daemon-export-safe $f --import-path $ip)
if $m != null { [$m] } else { [] }
} catch { [] }
}
| flatten
}
def migration-status [migrations: list<record>, root: string]: nothing -> list<record> {
$migrations | each { |m|
let r = (run-migration-check $m.check $root)
{
id: $m.id,
slug: $m.slug,
description: $m.description,
applied: $r.passed,
detail: $r.detail,
}
}
}
# List all protocol migrations with applied/pending status.
export def "migrate list" [
--fmt (-f): string = "",
--actor (-a): string = "",
]: nothing -> nothing {
let root = (project-root)
let actor = if ($actor | is-not-empty) { $actor } else { $env.ONTOREF_ACTOR? | default "developer" }
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let statuses = (migration-status (load-all-migrations) $root)
if $fmt == "json" {
print ($statuses | to json)
return
}
let n_applied = ($statuses | where applied == true | length)
let n_total = ($statuses | length)
print $" migrations: ($n_applied)/($n_total) applied"
print ""
for s in $statuses {
let mark = if $s.applied { "✓" } else { "○" }
print $" ($mark) ($s.id) ($s.slug)"
print $" ($s.description)"
if not $s.applied and $s.detail != "ok" {
print $" ($s.detail)"
}
}
}
# List only pending (not yet applied) migrations.
export def "migrate pending" [
--fmt (-f): string = "",
--actor (-a): string = "",
]: nothing -> nothing {
let root = (project-root)
let actor = if ($actor | is-not-empty) { $actor } else { $env.ONTOREF_ACTOR? | default "developer" }
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let pending = (
migration-status (load-all-migrations) $root
| where applied == false
)
if $fmt == "json" {
print ($pending | to json)
return
}
if ($pending | is-empty) {
print " all migrations applied"
return
}
print $" ($pending | length) pending:"
for p in $pending {
print $" ○ ($p.id) ($p.slug) — ($p.description)"
}
}
# Show the instructions for a specific migration. Accepts id ("0001") or slug.
export def "migrate show" [
id: string,
--fmt (-f): string = "",
--actor (-a): string = "",
]: nothing -> nothing {
let root = (project-root)
let actor = if ($actor | is-not-empty) { $actor } else { $env.ONTOREF_ACTOR? | default "developer" }
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let migrations = (load-all-migrations)
let id_norm = ($id | str replace --regex '^0+' '')
let id_norm = if $id_norm == "" { "0" } else { $id_norm }
let matching = ($migrations | where { |m| $m.id == $id or $m.slug == $id or (($m.id | str replace --regex '^0+' '') == $id_norm) })
if ($matching | is-empty) {
error make { msg: $"migration '($id)' not found" }
}
let m = ($matching | first)
let r = (run-migration-check $m.check $root)
# Interpolate runtime values into instruction text
let project_name = ($root | path basename)
let instructions = ($m.instructions
| str replace --all "{project_root}" $root
| str replace --all "{project_name}" $project_name
| str replace --all "PROJECT_NAME" $project_name
| str replace --all "$ONTOREF_PROJECT_ROOT" $root
)
if $fmt == "json" {
print ({
id: $m.id,
slug: $m.slug,
description: $m.description,
applied: $r.passed,
project_root: $root,
project_name: $project_name,
instructions: $instructions,
} | to json)
return
}
let status = if $r.passed { "✓ applied" } else { "○ pending" }
print $" ($m.id) ($m.slug) — ($status)"
print $" ($m.description)"
if not $r.passed and $r.detail != "ok" {
print $" check: ($r.detail)"
}
print ""
print $instructions
}

390
reflection/modules/run.nu Normal file
View File

@ -0,0 +1,390 @@
#!/usr/bin/env nu
# reflection/modules/run.nu — step execution tracking and run lifecycle.
# Provides: run start, step report, run status
#
# Storage: .coder/<actor>/runs/<run_id>/
# run.json — run metadata
# steps.jsonl — one JSON record per reported step
# current.json — pointer to active run (in .coder/<actor>/runs/)
use ../modules/store.nu [daemon-export-safe]
use ../modules/describe.nu [nickel-import-path]
def project-root []: nothing -> string {
let pr = ($env.ONTOREF_PROJECT_ROOT? | default "")
if ($pr | is-not-empty) and ($pr != $env.ONTOREF_ROOT) { $pr } else { $env.ONTOREF_ROOT }
}
def runs-dir []: nothing -> string {
let root = (project-root)
let actor = ($env.ONTOREF_ACTOR? | default "developer")
$"($root)/.coder/($actor)/runs"
}
def current-run-file []: nothing -> string {
$"(runs-dir)/current.json"
}
def load-current-run []: nothing -> record {
let f = (current-run-file)
if not ($f | path exists) { return {} }
open $f
}
def load-run-steps [run_id: string]: nothing -> list<record> {
let f = $"(runs-dir)/($run_id)/steps.jsonl"
if not ($f | path exists) { return [] }
open $f | lines | where { |l| $l | str trim | is-not-empty } | each { |l| $l | from json }
}
def find-mode-file [mode: string]: nothing -> string {
let root = (project-root)
let project_file = $"($root)/reflection/modes/($mode).ncl"
let ontoref_file = $"($env.ONTOREF_ROOT)/reflection/modes/($mode).ncl"
if ($project_file | path exists) { $project_file } else if ($ontoref_file | path exists) { $ontoref_file } else { "" }
}
def load-mode-dag [mode: string]: nothing -> record {
let root = (project-root)
let mode_file = (find-mode-file $mode)
if ($mode_file | is-empty) {
error make { msg: $"Mode '($mode)' not found in project or ontoref reflection/modes/" }
}
let mode_root = if ($mode_file | str starts-with $root) and ($root != $env.ONTOREF_ROOT) {
$root
} else {
$env.ONTOREF_ROOT
}
let ip = (nickel-import-path $mode_root)
let result = (daemon-export-safe $mode_file --import-path $ip)
if $result == null {
error make { msg: $"Failed to export mode '($mode)' — check NCL syntax and import paths" }
}
$result
}
def now-iso []: nothing -> string {
date now | format date "%Y-%m-%dT%H:%M:%SZ"
}
def run-id-for [mode: string, task: string]: nothing -> string {
if ($task | is-not-empty) {
$"($mode)-($task)"
} else {
let ts = (date now | format date "%Y%m%dT%H%M%S")
$"($mode)-($ts)"
}
}
# Start a new run for a mode. Establishes run context for subsequent `step report` calls.
export def "run start" [
mode: string, # Mode ID (must exist in reflection/modes/)
--task (-t): string = "", # Backlog task ID to associate with this run
--fmt (-f): string = "",
]: nothing -> nothing {
let actor = ($env.ONTOREF_ACTOR? | default "developer")
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let mode_data = (load-mode-dag $mode)
let run_id = (run-id-for $mode $task)
let dir = $"(runs-dir)/($run_id)"
mkdir $dir
let step_count = ($mode_data.steps? | default [] | length)
let state = {
run_id: $run_id,
mode: $mode,
task: $task,
actor: $actor,
started_at: (now-iso),
steps_total: $step_count,
}
$state | to json | save --force $"($dir)/run.json"
$state | to json | save --force (current-run-file)
# Seed empty steps.jsonl so append works consistently
"" | save --force $"($dir)/steps.jsonl"
if $fmt == "json" {
print ($state | to json)
} else {
print $"Run started: ($run_id)"
print $" Mode: ($mode) ($step_count) steps"
if ($task | is-not-empty) { print $" Task: ($task)" }
print $" Dir: ($dir)"
print $" Next: ontoref step report ($mode) <step_id> --status pass|fail"
}
}
# Report the result of a step. Validates step exists and dependencies are satisfied.
export def "step report" [
mode: string, # Mode ID
step_id: string, # Step ID to report
--status (-s): string, # pass | fail | skip
--exit-code (-e): int = 0,
--artifacts (-a): list<string> = [],
--warnings (-w): int = 0,
--run (-r): string = "", # Override run ID (default: active run)
--fmt (-f): string = "",
]: nothing -> nothing {
let actor = ($env.ONTOREF_ACTOR? | default "developer")
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
if not ($status in ["pass", "fail", "skip"]) {
error make { msg: $"Invalid status '($status)'. Must be: pass | fail | skip" }
}
# Resolve run context
let current = (load-current-run)
let run_id = if ($run | is-not-empty) {
$run
} else if ($current | is-not-empty) and ($current.mode? | default "") == $mode {
$current.run_id? | default ""
} else {
error make { msg: $"No active run for mode '($mode)'. Start one with: ontoref run start ($mode)" }
}
let dir = $"(runs-dir)/($run_id)"
if not ($dir | path exists) {
error make { msg: $"Run directory not found: ($dir). Start with: ontoref run start ($mode)" }
}
let steps_file = $"($dir)/steps.jsonl"
# Validate step exists in mode DAG
let mode_data = (load-mode-dag $mode)
let matching_steps = ($mode_data.steps? | default [] | where { |s| $s.id == $step_id })
let step_def = if ($matching_steps | is-not-empty) { $matching_steps | first } else { null }
if ($step_def | is-empty) {
error make { msg: $"Step '($step_id)' not found in mode '($mode)'. Valid steps: ($mode_data.steps? | default [] | each { |s| $s.id } | str join ', ')" }
}
# Validate blocking dependencies are satisfied
let completed = (load-run-steps $run_id)
let blocking = ($step_def.depends_on? | default [] | where { |d|
($d.kind? | default "Always") in ["Always", "OnSuccess"]
})
for dep in $blocking {
let prev_list = ($completed | where { |r| $r.step == $dep.step })
let prev = if ($prev_list | is-not-empty) { $prev_list | first } else { null }
if ($prev | is-empty) {
error make { msg: $"Step '($step_id)' requires '($dep.step)' to be reported first" }
}
if ($dep.kind? | default "Always") == "OnSuccess" and ($prev.status? | default "") != "pass" {
error make { msg: $"Step '($step_id)' depends on '($dep.step)' with OnSuccess, but it ($prev.status? | default 'unknown')" }
}
}
let entry = {
run_id: $run_id,
mode: $mode,
step: $step_id,
status: $status,
exit_code: $exit_code,
warnings: $warnings,
artifacts: $artifacts,
actor: $actor,
ts: (now-iso),
}
$"\n($entry | to json -r)" | save --append $steps_file
# Emit side-effect on fail+Stop
let on_error_strategy = ($step_def.on_error?.strategy? | default "Stop")
if $status == "fail" and $on_error_strategy == "Stop" {
if $fmt != "json" {
print $"(ansi red)BLOCKED(ansi reset): step '($step_id)' failed with on_error=Stop"
print $" Run '($run_id)' is now blocked. Resolve and re-run this step."
}
}
if $fmt == "json" {
print ($entry | to json)
} else {
let mark = if $status == "pass" { $"(ansi green)✓(ansi reset)" } else if $status == "fail" { $"(ansi red)✗(ansi reset)" } else { $"(ansi yellow)-(ansi reset)" }
print $" ($mark) ($step_id) [($mode) / ($run_id)]"
if $warnings > 0 { print $" ($warnings) warning(s)" }
if ($artifacts | is-not-empty) { print $" artifacts: ($artifacts | str join ', ')" }
}
}
# Show current run progress.
export def "run status" [
--run (-r): string = "", # Specific run ID (default: active run)
--fmt (-f): string = "",
]: nothing -> nothing {
let actor = ($env.ONTOREF_ACTOR? | default "developer")
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let current = (load-current-run)
let run_id = if ($run | is-not-empty) { $run } else if ($current | is-not-empty) { $current.run_id? | default "" } else { "" }
if ($run_id | is-empty) {
if $fmt == "json" { print "null" } else { print "No active run. Start with: ontoref run start <mode>" }
return
}
let mode_id = ($current.mode? | default "")
let mode_data = if ($mode_id | is-not-empty) {
try { load-mode-dag $mode_id } catch { {} }
} else { {} }
let all_step_ids = ($mode_data.steps? | default [] | each { |s| $s.id })
let reported = (load-run-steps $run_id)
let steps_view = ($all_step_ids | each { |id|
let r_list = ($reported | where { |s| $s.step == $id })
let r = if ($r_list | is-not-empty) { $r_list | first } else { null }
if ($r | is-not-empty) {
{ id: $id, status: $r.status, exit_code: ($r.exit_code? | default 0), ts: ($r.ts? | default ""), warnings: ($r.warnings? | default 0) }
} else {
{ id: $id, status: "pending", exit_code: null, ts: "", warnings: 0 }
}
})
let data = {
run_id: $run_id,
mode: $mode_id,
task: ($current.task? | default ""),
started_at: ($current.started_at? | default ""),
steps_total: ($all_step_ids | length),
steps_reported: ($reported | length),
steps: $steps_view,
}
if $fmt == "json" {
print ($data | to json)
} else {
print ""
print $"RUN: ($data.run_id)"
print "══════════════════════════════════════════════════════════════════"
if ($data.task | is-not-empty) { print $" Task: ($data.task)" }
print $" Mode: ($data.mode)"
print $" Started: ($data.started_at)"
print $" Progress: ($data.steps_reported)/($data.steps_total) steps reported"
print ""
for s in $data.steps {
let mark = match $s.status {
"pass" => $"(ansi green)✓(ansi reset)",
"fail" => $"(ansi red)✗(ansi reset)",
"skip" => $"(ansi yellow)-(ansi reset)",
"pending" => $"(ansi dark_gray)·(ansi reset)",
_ => " ",
}
let warn_tag = if ($s.warnings? | default 0) > 0 { $" ⚠ ($s.warnings) warnings" } else { "" }
print $" ($mark) ($s.id)($warn_tag)"
}
print ""
}
}
# Verify a run is complete and emit side effects (backlog close, artifacts summary).
# Fails with a specific message if any required step is missing or blocked.
export def "mode complete" [
mode: string, # Mode ID
--task (-t): string = "", # Backlog task ID to close on success
--run (-r): string = "", # Override run ID (default: active run)
--fmt (-f): string = "",
]: nothing -> nothing {
let actor = ($env.ONTOREF_ACTOR? | default "developer")
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
# Resolve run context
let current = (load-current-run)
let run_id = if ($run | is-not-empty) { $run } else if ($current | is-not-empty) and ($current.mode? | default "") == $mode { $current.run_id? | default "" } else { "" }
if ($run_id | is-empty) {
error make { msg: $"No active run for mode '($mode)'. Start one with: ontoref run start ($mode)" }
}
let dir = $"(runs-dir)/($run_id)"
if not ($dir | path exists) {
error make { msg: $"Run directory not found: ($dir)" }
}
# Load mode DAG and reported steps
let mode_data = (load-mode-dag $mode)
let all_steps = ($mode_data.steps? | default [])
let reported = (load-run-steps $run_id)
# Build verification results — one record per step
let results = ($all_steps | each { |step_def|
let rep_list = ($reported | where { |r| $r.step == $step_def.id })
let rep = if ($rep_list | is-not-empty) { $rep_list | first } else { null }
let strategy = ($step_def.on_error?.strategy? | default "Stop")
{
id: $step_def.id,
reported: ($rep | is-not-empty),
status: (if ($rep | is-not-empty) { $rep.status } else { "pending" }),
strategy: $strategy,
# A step blocks completion if: not reported AND strategy=Stop,
# OR reported as fail AND strategy=Stop
blocks: (
(($rep | is-empty) and $strategy == "Stop") or
(($rep | is-not-empty) and $rep.status == "fail" and $strategy == "Stop")
),
}
})
let blockers = ($results | where blocks == true)
let warnings = ($results | where { |r| not $r.blocks and $r.status in ["fail", "pending"] })
let all_artifacts = ($reported | each { |r| $r.artifacts? | default [] } | flatten)
let total_warnings = ($reported | each { |r| $r.warnings? | default 0 } | math sum)
let resolved_task = if ($task | is-not-empty) { $task } else { $current.task? | default "" }
let ok = ($blockers | is-empty)
let result = {
run_id: $run_id,
mode: $mode,
task: $resolved_task,
ok: $ok,
steps_total: ($all_steps | length),
steps_reported: ($reported | length),
blockers: ($blockers | each { |b| { id: $b.id, status: $b.status, strategy: $b.strategy } }),
warnings: ($warnings | each { |w| { id: $w.id, status: $w.status } }),
artifacts: $all_artifacts,
total_warnings: $total_warnings,
}
if $ok {
# Mark run as completed in run.json
let run_meta = (open $"($dir)/run.json")
$run_meta | merge { completed_at: (now-iso), ok: true } | to json | save --force $"($dir)/run.json"
# Clear current.json — no active run
"{}" | save --force (current-run-file)
}
if $fmt == "json" {
print ($result | to json)
} else if $ok {
print ""
print $"(ansi green)✓ RUN COMPLETE(ansi reset): ($run_id)"
print "══════════════════════════════════════════════════════════════════"
print $" Mode: ($mode)"
if ($resolved_task | is-not-empty) { print $" Task: ($resolved_task)" }
print $" Steps: ($result.steps_reported)/($result.steps_total) reported"
if $total_warnings > 0 { print $" Warnings: ($total_warnings) advisory (non-blocking)" }
if ($all_artifacts | is-not-empty) {
print ""
print " Artifacts:"
for a in $all_artifacts { print $" ($a)" }
}
if ($resolved_task | is-not-empty) {
print ""
print $" Close backlog item with: ontoref backlog done ($resolved_task)"
}
print ""
} else {
print ""
print $"(ansi red)✗ RUN INCOMPLETE(ansi reset): ($run_id)"
print "══════════════════════════════════════════════════════════════════"
print $" ($blockers | length) blocker(s):"
for b in $blockers {
let reason = if $b.status == "pending" { "not reported" } else { $"($b.status) with on_error=Stop" }
print $" ($b.id) — ($reason)"
}
print ""
}
}

View File

@ -1,5 +1,5 @@
#!/usr/bin/env nu
# reflection/modules/validate.nu — ADR constraint validation runner.
# reflection/modules/validate.nu — ADR constraint validation + justfile convention checker.
#
# Interprets the typed constraint_check_type ADT exported by adrs/adr-schema.ncl.
# Each constraint.check record has a `tag` discriminant; this module dispatches
@ -9,6 +9,7 @@
# validate check-constraint <c> — run a single constraint record
# validate check-adr <id> — run all constraints for one ADR
# validate check-all — run all constraints across all accepted ADRs
# validate justfile — validate project justfile against convention schema
#
# Error handling: do { ... } | complete — never panics, always returns a result.
@ -279,3 +280,141 @@ export def "validate summary" []: nothing -> record {
soft_passing: ($soft | where passed == true | length),
}
}
# ── Justfile convention validator ────────────────────────────────────────────
# Detect module system from root justfile content.
def detect-just-module-system [justfile_path: string]: nothing -> string {
if not ($justfile_path | path exists) { return "Flat" }
let content = (open --raw $justfile_path)
let has_import = ($content | lines | any { |l| $l =~ '^import ' })
let has_mod = ($content | lines | any { |l| $l =~ '^mod ' })
if $has_import and $has_mod { "Hybrid" } else if $has_import { "Import" } else if $has_mod { "Mod" } else { "Flat" }
}
# Extract recipe names from a just file (lines starting with identifier at col 0, followed by whitespace or ':').
def extract-just-recipes [file_path: string]: nothing -> list<string> {
if not ($file_path | path exists) { return [] }
open --raw $file_path
| lines
| where { |l| not ($l | str starts-with "#") and not ($l | str starts-with " ") and not ($l | str starts-with "\t") and not ($l | str starts-with "[") and ($l =~ '^[a-z_@][a-z0-9_@-]*[\s:]') }
| each { |l| $l | str replace -r '[\s:].*' '' | str replace -r '^@' '' }
| where { |n| ($n | str length) > 0 }
| uniq
}
# Extract variable names from a just file (lines with ':=' at col 0).
def extract-just-variables [file_path: string]: nothing -> list<string> {
if not ($file_path | path exists) { return [] }
open --raw $file_path
| lines
| where { |l| $l =~ ':=' and not ($l | str starts-with "#") and not ($l | str starts-with " ") and not ($l | str starts-with "\t") }
| each { |l| $l | split row ':=' | first | str trim }
| where { |n| ($n | str length) > 0 }
| uniq
}
# Validate the project justfile tree against reflection/schemas/justfile-convention.ncl.
# Reports missing required modules, recipes, and variables.
export def "validate justfile" [
--root (-r): string = "", # Project root (default: ONTOREF_PROJECT_ROOT or ONTOREF_ROOT)
--fmt (-f): string = "", # json | text (default: actor-aware)
--actor (-a): string = "",
]: nothing -> nothing {
let project_root = if ($root | is-not-empty) { $root } else {
$env.ONTOREF_PROJECT_ROOT? | default $env.ONTOREF_ROOT
}
let actor = if ($actor | is-not-empty) { $actor } else { $env.ONTOREF_ACTOR? | default "developer" }
let fmt = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let schema_file = ([$env.ONTOREF_ROOT, "reflection", "schemas", "justfile-convention.ncl"] | path join)
if not ($schema_file | path exists) {
error make { msg: $"Convention schema not found: ($schema_file)" }
}
let exported = (daemon-export-safe $schema_file --import-path $env.ONTOREF_ROOT)
if $exported == null {
error make { msg: "Failed to export justfile-convention.ncl" }
}
let conv = $exported.Convention
# Locate root justfile
let root_just = ([$project_root, "justfile"] | path join)
let detected_system = (detect-just-module-system $root_just)
# Collect all just files in the convention directory
let just_dir = ([$project_root, $conv.directory] | path join)
let all_files = (
[[$root_just]] ++
(if ($just_dir | path exists) {
glob $"($just_dir)/*($conv.extension)"
} else { [] })
) | flatten | where { |f| $f | path exists }
# Extract recipes and variables across all files
let all_recipes = ($all_files | each { |f| extract-just-recipes $f } | flatten | uniq)
let all_variables = ($all_files | each { |f| extract-just-variables $f } | flatten | uniq)
# Check canonical modules: for each module, does {dir}/{name}{ext} exist?
let module_results = ($conv.canonical_modules | each { |m|
let mfile = ([$project_root, $conv.directory, $"($m.name)($conv.extension)"] | path join)
let present = ($mfile | path exists)
{
name: $m.name,
required: $m.required,
present: $present,
description: $m.description,
}
})
let missing_required_modules = ($module_results | where required == true and present == false | get name)
let missing_optional_modules = ($module_results | where required == false and present == false | get name)
# Check required recipes
let missing_recipes = ($conv.required_recipes | where { |r| not ($r in $all_recipes) })
# Check required variables
let missing_variables = ($conv.required_variables | where { |v| not ($v in $all_variables) })
let ok = ($missing_required_modules | is-empty) and ($missing_recipes | is-empty) and ($missing_variables | is-empty)
let result = {
ok: $ok,
module_system_detected: $detected_system,
module_system_expected: ($conv.system | into string),
modules_present: ($module_results | where present == true | get name),
missing_required_modules: $missing_required_modules,
missing_optional_modules: $missing_optional_modules,
recipes_found: ($all_recipes | length),
missing_required_recipes: $missing_recipes,
variables_found: ($all_variables | length),
missing_required_variables: $missing_variables,
}
if $fmt == "json" {
print ($result | to json)
return
}
# Text output
let status_line = if $ok { "✓ justfile validates against convention" } else { "✗ justfile convention violations found" }
print $status_line
print $" module system: detected=($detected_system) expected=($conv.system)"
print $" modules present: ($result.modules_present | str join ', ')"
if ($missing_required_modules | is-not-empty) {
print $" MISSING required modules: ($missing_required_modules | str join ', ')"
}
if ($missing_optional_modules | is-not-empty) {
print $" optional modules absent: ($missing_optional_modules | str join ', ')"
}
if ($missing_recipes | is-not-empty) {
print $" MISSING required recipes: ($missing_recipes | str join ', ')"
}
if ($missing_variables | is-not-empty) {
print $" MISSING required variables: ($missing_variables | str join ', ')"
}
if $ok {
print $" recipes found: ($result.recipes_found) variables found: ($result.variables_found)"
}
}

View File

@ -443,10 +443,26 @@ export def help-group [group: string] {
fmt-section "Columns: ts | author | actor | level | action"
print ""
},
"migrate" => {
print ""
fmt-header "MIGRATE (protocol migrations)"
fmt-sep
fmt-info "Progressive, ordered protocol migrations. Applied = check passes. No state file."
print ""
fmt-cmd $"($cmd) migrate list" "all migrations with applied/pending status"
fmt-cmd $"($cmd) migrate pending" "only pending migrations"
fmt-cmd $"($cmd) migrate show <id>" "instructions for a specific migration"
print ""
fmt-aliases [
{ short: "mg", long: "migrate" },
{ short: "mg l", long: "migrate list" },
{ short: "mg p", long: "migrate pending" },
]
},
_ => {
print $" (ansi red)Unknown group: ($group)(ansi reset)"
print ""
fmt-info "Available groups: check | form | mode | adr | register | backlog | config | sync | coder | manifest | describe | log"
fmt-info "Available groups: check | form | mode | adr | register | backlog | config | sync | coder | manifest | describe | log | migrate"
print ""
},
}

View File

@ -73,6 +73,11 @@ export def group-command-info [group: string]: nothing -> list<record> {
{ name: accept, desc: "Proposed → Accepted", args: [{name: "id", prompt: "adr id (e.g. adr-001)", optional: false}] },
{ name: show, desc: "show ADR detail", args: [{name: "id", prompt: "adr id (empty=interactive)", optional: true}] },
],
"migrate" => [
{ name: list, desc: "all migrations with applied/pending status", args: [] },
{ name: pending, desc: "only pending migrations", args: [] },
{ name: show, desc: "instructions for a specific migration", args: [{name: "id", prompt: "migration id (e.g. 0001)", optional: false}] },
],
_ => [],
}
}
@ -207,6 +212,14 @@ export def run-group-command [group: string, sub: string, args: list<string>] {
_ => {},
}
},
"migrate" => {
match $sub {
"list" => { migrate list },
"pending" => { migrate pending },
"show" => { migrate show $a0 },
_ => {},
}
},
_ => {},
}
}

View File

@ -7,9 +7,6 @@ let module_type = {
} in
{
ModuleSystem = module_system_type,
Module = module_type,
Convention = {
system | module_system_type | default = 'Mod,
directory | String | default = "justfiles",

View File

@ -0,0 +1,330 @@
# Agent Protocol Upgrade Prompt
**Purpose:** Bring an existing ontoref-adopted project up to the agent-aware protocol v2.
Covers: just recipe convention compliance, reflection mode step schema, agent CLAUDE.md
entry-point, graph readiness, and a full agent simulation smoke test.
**Prerequisite:** Project already has `.ontology/`, `reflection/modes/`, and `ontoref`
installed globally (`~/.local/bin/ontoref`, via `just install-daemon` from the ontoref repo).
If the project hasn't adopted ontoref at all, run `project-full-adoption-prompt.md` first.
**Substitutions required before use:**
- `{project_name}` — kebab-case project identifier
- `{project_dir}` — absolute path to project root
---
## Phase 0 — Discovery: what is the current state
Run these before touching anything.
```sh
cd {project_dir}
# What does ontoref already see about this project?
ONTOREF_ACTOR=agent ontoref describe capabilities
# Just recipe categorization
ONTOREF_ACTOR=agent ontoref describe capabilities \
| from json | get just_recipes | group-by category | transpose key value \
| each { |r| { category: $r.key, count: ($r.value | length) } }
# Project flags: which features are detected?
ONTOREF_ACTOR=agent ontoref describe capabilities \
| from json | get project_flags
# Justfile convention violations
ONTOREF_ACTOR=agent ontoref validate justfile
# Existing reflection modes
ls reflection/modes/*.ncl 2>/dev/null || echo "no modes"
# Ontology graph — see nodes/edges and their level classification
ONTOREF_ACTOR=agent ontoref graph ontology
```
Identify:
- Which canonical just modules are missing (required: build, test, dev, ci)
- Whether `default` and `help` recipes exist in the root justfile
- Whether reflection modes have `actor` and `on_error` fields in their `steps[]`
- Whether `.ontology/core.ncl` nodes have `level` fields (for graph coloring)
- Whether `reflection/backlog.ncl` exists (for `has_backlog` flag)
---
## Phase 1 — Just recipe convention compliance
### 1a. Run the validator
```sh
ONTOREF_ACTOR=agent ontoref validate justfile --fmt json
```
Act on these fields in the output:
- `missing_required_modules` — canonical just files that must be created
- `missing_required_recipes``default` and/or `help` missing from root justfile
- `missing_required_variables``project_root` not declared anywhere
### 1b. Fix missing required modules
For each name in `missing_required_modules`, create `justfiles/{name}.just`:
```just
# {name} recipes for {project_name}
[doc("Show {name} help")]
help:
@just --list
```
Import it in the root `justfile`:
```just
import 'justfiles/{name}.just'
```
**Recipe → module mapping:**
| Module | What goes here |
|---------|----------------|
| `build` | Compilation, linking, binary generation, asset bundling |
| `test` | Unit, integration, property-based tests |
| `dev` | `fmt`, `lint`, `watch`, `clean`, local dev setup |
| `ci` | CI pipeline orchestration (`ci-full`, `ci-lint`, `ci-test`, `ci-audit`) |
| `distro`| Packaging, `install-*`, `release`, distribution targets |
| `docs` | Documentation generation and serving |
| `nickel`| Nickel typecheck, export, contract validation |
| `deploy`| Deployment to staging/production environments |
Move existing recipes to the correct module file. Group by concern, not by file size.
### 1c. Fix missing required recipes
If `default` is missing from root `justfile`:
```just
default:
@just --list
```
If `help` is missing:
```just
[doc("Show available recipes")]
help:
@just --list
```
### 1d. Fix missing required variables
```just
project_root := justfile_directory()
```
### 1e. Verify
```sh
ONTOREF_ACTOR=agent ontoref validate justfile
# Expected: "✓ justfile validates against convention"
```
---
## Phase 2 — Reflection mode step schema
For `run start` / `step report` / `graph <mode-id>` to work, each mode NCL file needs
steps with the v2 fields. Check what each mode currently has:
```sh
for f in reflection/modes/*.ncl; do
echo "=== $f ==="
nickel export --import-path $NICKEL_IMPORT_PATH "$f" 2>/dev/null \
| jq '[.steps[] | {id, actor: (.actor // "MISSING"), on_error: (.on_error // "MISSING")}]'
done
```
Or via the ontoref CLI:
```sh
ONTOREF_ACTOR=agent ontoref describe mode | from json \
| each { |m| { id: $m.id, steps: ($m.steps | length),
missing_fields: ($m.steps | where { |s| ($s.actor? | is-empty) or ($s.on_error? | is-empty) } | length) } }
```
### 2a. Required fields per step
Each step in `steps[]` must have:
```nickel
{
id = "kebab-step-id",
action = "What this step does (human-readable).",
actor = 'Agent, # 'Human | 'Agent | 'Both
on_error = 'Stop, # 'Stop | 'Continue | 'Retry
depends_on = [], # list of { step = "id", kind = 'Always } or { step = "id", kind = 'OnSuccess }
}
```
**`actor` decision:**
- Agent does it autonomously (builds, file writes, nickel export, API calls) → `'Agent`
- Human must do it (review, approve, deploy decision) → `'Human`
- Either depending on context → `'Both`
**`on_error` decision:**
- Failure invalidates subsequent steps (build fails → tests cannot run) → `'Stop`
- Step can fail without blocking others (optional lint, docs) → `'Continue`
- Transient failures should be retried → `'Retry`
### 2b. Verify graph output for each mode
```sh
ONTOREF_ACTOR=agent ontoref graph <mode-id>
```
Each mode should render a `flowchart TD` with nodes colored by actor class (human/agent/both)
and dependency edges. If you see `note["Failed to export mode"]`, the NCL export is broken.
---
## Phase 3 — CLAUDE.md agent entry-point
Add this block to `.claude/CLAUDE.md`. Insert before the last major section or append at
the end.
```markdown
## Agent Entry-Point Protocol
When arriving at this project as an agent, execute these discovery steps in order
before any code changes:
```sh
# 1. Project capabilities — what this project has and can do (JSON for parsing)
ONTOREF_ACTOR=agent ontoref describe capabilities
# 2. Available reflection modes — operational DAGs you can execute
ONTOREF_ACTOR=agent ontoref describe mode
# 3. Start a run before any structured work session
ONTOREF_ACTOR=agent ontoref run start <mode-id> --task "description"
# 4. Report each step as you complete it
ONTOREF_ACTOR=agent ontoref step report <mode-id> <step-id> --status pass|fail|skip
# 5. Verify mode completion (checks all required steps are reported)
ONTOREF_ACTOR=agent ontoref mode complete <mode-id>
```
Graph output (Mermaid DSL, parseable):
```sh
ONTOREF_ACTOR=agent ontoref graph ontology # ontology nodes + edges
ONTOREF_ACTOR=agent ontoref graph deps # Rust crate dependency graph
ONTOREF_ACTOR=agent ontoref graph flow # current run step statuses
ONTOREF_ACTOR=agent ontoref graph <mode-id> # mode DAG colored by actor
```
Justfile validation:
```sh
ONTOREF_ACTOR=agent ontoref validate justfile # check against convention schema
```
The `describe capabilities` JSON output contains: `project_flags` (has_rust, has_ui,
has_mdbook, has_nats, has_precommit, has_backlog, has_git_remote, open_prs, crates),
`just_recipes` (categorized by canonical module), `backlog_pending` count, and full
ontology/reflection metadata.
---
## Phase 4 — Graph readiness
### 4a. Ontology node levels
`graph ontology` colors nodes by the `level` field. Nodes without `level` fall back to
`project` class (dark neutral). Add `level` to nodes in `.ontology/core.ncl` where
the distinction matters:
```nickel
{ id = "my-node", name = "Name", level = 'Practice, ... }
# 'Axiom | 'Tension | 'Practice | 'Project | 'Moment
```
Validate:
```sh
ONTOREF_ACTOR=agent ontoref graph ontology
# classDef coloring should be visible in the Mermaid output
```
### 4b. Crate dependency graph (Rust projects only)
```sh
ONTOREF_ACTOR=agent ontoref graph deps
```
`Single-crate project — no inter-crate deps to show` is the correct output for
non-workspace projects. If workspace members are missing, check `Cargo.toml [workspace]`
has `members` listed.
---
## Phase 5 — Smoke test: full agent simulation
```sh
cd {project_dir}
# Discovery
ONTOREF_ACTOR=agent ontoref describe capabilities | from json \
| select project_flags backlog_pending
# List modes — pick one for the test
ONTOREF_ACTOR=agent ontoref describe mode | from json
# Start a run (replace <mode-id> and <first-step-id> with real values from above)
ONTOREF_ACTOR=agent ontoref run start <mode-id> \
--task "agent protocol upgrade smoke test"
# Pending steps
ONTOREF_ACTOR=agent ontoref run status
# Report one step
ONTOREF_ACTOR=agent ontoref step report <mode-id> <first-step-id> --status pass
# Flow graph — reported step should show as pass
ONTOREF_ACTOR=agent ontoref graph flow
# Complete the run
ONTOREF_ACTOR=agent ontoref mode complete <mode-id>
```
Expected:
- `describe capabilities` → parseable JSON, non-empty `project_flags` and `just_recipes`
- `run start``{ run_id, mode, task, started_at }`
- `run status` → all steps `pending` before any reports
- `graph flow` → reported step colored `pass`, rest `pending`
- `mode complete``ok: true` if all `Stop`-on-error steps were reported, else `blockers` list
---
## Checklist
### Just recipe convention
- [ ] `validate justfile` returns ok
- [ ] Required canonical modules present: `build`, `test`, `dev`, `ci`
- [ ] Root `justfile` has `default` and `help` recipes
- [ ] `project_root` variable declared
### Reflection mode step schema
- [ ] All modes export cleanly via `nickel export`
- [ ] Every step has `actor`, `on_error`, `depends_on` fields
- [ ] `graph <mode-id>` renders with actor-colored nodes for all modes
### CLAUDE.md agent entry-point
- [ ] `.claude/CLAUDE.md` has Agent Entry-Point Protocol section
### Graph readiness
- [ ] `graph ontology` renders without errors, node levels correct
- [ ] `graph deps` renders without errors
- [ ] `graph flow` renders after a run with at least one reported step
### Smoke test
- [ ] `describe capabilities` parses as JSON without errors
- [ ] `run start``step report``mode complete` cycle completes without errors
- [ ] `mode complete` returns `ok: true`
### Do NOT commit without developer review

View File

@ -0,0 +1,253 @@
# Manifest Self-Interrogation — Enrichment Prompt
**Purpose:** Populate `capabilities[]`, `requirements[]`, and `critical_deps[]` in
`{project_name}`'s `.ontology/manifest.ncl`. These fields answer the operational
self-knowledge questions that ontology Practice nodes don't: what does the project
do and why, what does it need to run, and what external dependencies carry blast
radius when they break.
**Substitutions required before use:**
- `{project_name}` — kebab-case project identifier
- `{project_dir}` — absolute path to project root
**Requires:** `ontoref` installed globally (`just install-daemon` from the ontoref repo).
---
## Bootstrap — source ontoref env vars
Before running any direct `nickel export` command, source the ontoref env into the current
shell:
```sh
cd {project_dir}
. $(which ontoref) --env-only
# NICKEL_IMPORT_PATH and ONTOREF_ROOT are now available in this shell session
```
---
## Context for the agent
You are enriching `.ontology/manifest.ncl` for project `{project_name}`. The three
new arrays are semantically distinct — do not conflate them:
- `capabilities[]` — operational, audience-facing. What the project offers, why it
was built, how it works at implementation level. Cross-references ontology node IDs
(`nodes[]`) and the ADRs that formalize design decisions (`adrs[]`). These are not
architectural invariants — they evolve freely.
- `requirements[]` — prerequisites to start. Classified by `env` (Production /
Development / Both) and `kind` (Tool / Service / EnvVar / Infrastructure). Focus
on `impact` (what breaks if absent) and `provision` (how to satisfy it).
- `critical_deps[]` — runtime load-bearing external dependencies. Not "you need this
to start" but "if this breaks or disappears, these capabilities degrade in these
ways". `failure_impact` is required. Include feature flags or fallback builds in
`mitigation`.
All entries must pass `nickel export` cleanly. Do not add entries you cannot
substantiate by reading the code.
---
## Phase 1 — Read the project
Before writing anything, understand the project. Read in order:
```sh
cd {project_dir}
# Architecture and purpose
cat README.md
cat .claude/CLAUDE.md 2>/dev/null || true
# Existing ontology — understand Practice nodes already declared
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/core.ncl \
| jq '[.nodes[] | {id, name, level, description}]'
# Existing manifest — see what is already declared
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl \
| jq '{description, capabilities: (.capabilities | length), requirements: (.requirements | length), critical_deps: (.critical_deps | length)}'
# Cargo workspace — understand crate boundaries
cat Cargo.toml 2>/dev/null || true
ls crates/ 2>/dev/null || true
# External dependencies with most weight
cargo metadata --format-version 1 2>/dev/null \
| jq '[.packages[] | select(.source == null) | {name, dependencies: [.dependencies[] | .name]}]' \
2>/dev/null | head -60 || true
```
Identify:
1. What the project does for each audience (developer, agent, CI, end user)
2. Which Cargo features are optional vs default
3. What external services are consumed (databases, message buses, APIs)
4. What environment variables the project reads at startup
5. Which dependencies are foundational (the project cannot function if they break their contract)
---
## Phase 2 — Add `description` field
If `.ontology/manifest.ncl` has no `description` field, add it immediately after `repo_kind`:
```nickel
description = "One sentence: what is this project and for whom.",
```
Confirm with:
```sh
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl | jq .description
```
---
## Phase 3 — Populate `capabilities[]`
For each major capability the project offers, write one entry. Use the builders:
```nickel
capabilities = [
m.make_capability {
id = "kebab-id",
name = "Short Name",
summary = "One line: what does this capability do?",
rationale = "Why does this exist? What problem does it solve? What was consciously rejected?",
how = "Implementation level: key patterns, entry points, data flows.",
artifacts = [
"crates/foo/src/lib.rs",
"GET /api/endpoint",
"reflection/modes/foo.ncl",
],
adrs = [], # ADR IDs that formalize decisions in this capability
nodes = [], # ontology node IDs from core.ncl that this capability manifests
},
],
```
**Guidelines:**
- Aim for 26 capabilities. Do not list every feature — group by audience-facing concern.
- `nodes[]` must match IDs declared in `.ontology/core.ncl`. Verify:
```sh
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/core.ncl | jq '[.nodes[].id]'
```
- `adrs[]` must match IDs in `adrs/adr-*.ncl`. Verify:
```sh
ls adrs/adr-*.ncl | sed 's|adrs/||;s|-[^-]*\.ncl||'
```
- `how` and `rationale` can be empty strings (`""`) if there is nothing substantive to say.
Do not pad with generic prose.
---
## Phase 4 — Populate `requirements[]`
For each prerequisite to run the project (prod or dev), write one entry:
```nickel
requirements = [
m.make_requirement {
id = "kebab-id",
name = "Human Name",
env = 'Both, # 'Production | 'Development | 'Both
kind = 'Tool, # 'Tool | 'Service | 'EnvVar | 'Infrastructure
version = "0.x", # empty string if no constraint
required = true,
impact = "What breaks or degrades if this is absent.",
provision = "How to install, set, or provision this.",
},
],
```
**Decision tree for `env`:**
- Only needed to build/test/lint → `'Development`
- Only needed when the service is deployed → `'Production`
- Needed in both contexts → `'Both`
**Decision tree for `kind`:**
- Binary on PATH (`nu`, `nickel`, `cargo`, `just`) → `'Tool`
- External running service (`postgres`, `redis`, `nats`) → `'Service`
- Environment variable that must be set (`DATABASE_URL`, `API_KEY`) → `'EnvVar`
- Filesystem layout, platform, or network topology → `'Infrastructure`
**What to include:**
- Runtime tools (nushell, nickel, just, docker)
- Feature-gated services (database, message bus) — mark `required = false` if optional
- Required env vars (especially secrets and URLs)
- Repo layout dependencies (sibling checkouts, path dependencies in Cargo.toml)
**What to exclude:**
- Standard Rust toolchain (assumed for all Rust projects)
- OS-level libraries already declared in Cargo dependencies
---
## Phase 5 — Populate `critical_deps[]`
List only external dependencies whose failure has a documented blast radius. This is not
a full dependency audit — focus on load-bearing external contracts:
```nickel
critical_deps = [
m.make_critical_dep {
id = "kebab-id",
name = "crate-or-service-name",
ref = "crates.io: foo / github.com/org/repo",
used_for = "Which capabilities or features depend on this.",
failure_impact = "What breaks if this dep disappears or breaks its API contract.",
mitigation = "Feature flags, fallback builds, or alternative paths. Empty if none.",
},
],
```
**Candidates to consider:**
- HTTP framework (axum, actix-web) — entire API surface goes down
- Serialization crates with unusual API stability guarantees
- `inventory` or other linker-section crates — catalog surfaces go silent
- Database driver — persistence layer failure
- Any proc-macro crate the project defines (breakage cascades to all users)
- External APIs called at runtime without a fallback
**Do not include:**
- `serde` / `tokio` / `tracing` — industry-stable, ubiquitous, low blast-radius
- Dev-only dependencies (test frameworks, criterion, cargo-nextest)
- Transitive dependencies the project has no direct contract with
---
## Phase 6 — Validate
```sh
cd {project_dir}
# Must export without errors
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl
# Confirm counts
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl \
| jq '{description, capabilities: (.capabilities | length), requirements: (.requirements | length), critical_deps: (.critical_deps | length)}'
# Confirm nodes[] cross-references are valid
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl \
| jq '[.capabilities[].nodes[]]' > /tmp/cap_nodes.json
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/core.ncl \
| jq '[.nodes[].id]' > /tmp/core_ids.json
# All capability node refs must appear in core_ids — check manually if jq diff not available
# Smoke test describe output
ontoref --actor developer describe requirements
ONTOREF_ACTOR=agent ontoref describe capabilities | from json | get capabilities | length
```
---
## Checklist
- [ ] `description` field populated (non-empty)
- [ ] At least 1 capability entry with non-empty `summary`
- [ ] `nodes[]` entries verified against `.ontology/core.ncl` node IDs
- [ ] At least 1 requirement per environment context that applies (Production / Development)
- [ ] All `critical_deps` have non-empty `failure_impact`
- [ ] `nickel export` passes cleanly with no contract errors
- [ ] `describe requirements` renders without errors
- [ ] `ONTOREF_ACTOR=agent ontoref describe capabilities | from json | get capabilities | length` > 0

View File

@ -0,0 +1,625 @@
# Ontoref Full Adoption Prompt
**Purpose:** Complete onboarding of `{project_name}` into the ontoref protocol — or bring
an existing adoption up to the current version. Covers all adoption layers in dependency
order: protocol infrastructure → ontology enrichment → config surface → API surface →
manifest self-interrogation.
**Substitutions required before use:**
- `{project_name}` — kebab-case project identifier
- `{project_dir}` — absolute path to project root
- `{ontoref_source_dir}` — absolute path to the ontoref source checkout (only needed for
Cargo path dependencies in Phases 3c and 4a; not needed if ontoref crates are not used
as Rust dependencies)
**Run as:** `ontoref --actor developer` from `{project_dir}` (requires `ontoref` installed
globally via `just install-daemon` from the ontoref repo).
---
## Bootstrap — source ontoref env vars
Before running any direct `nickel export` command, source the ontoref env into the current
shell. This sets `NICKEL_IMPORT_PATH` and `ONTOREF_ROOT` without launching a full command:
```sh
cd {project_dir}
. $(which ontoref) --env-only
# NICKEL_IMPORT_PATH and ONTOREF_ROOT are now available in this shell session
```
All `nickel export` commands in this prompt assume these vars are set. Re-run the source
line if you open a new terminal.
---
## Phase 0 — Read the project first
Do not write anything until you have read and understood the project. This phase is not
optional — subsequent phases require accurate knowledge of what the project actually does.
```sh
cd {project_dir}
# Purpose, architecture, stack
cat README.md
cat .claude/CLAUDE.md 2>/dev/null || true
cat CLAUDE.md 2>/dev/null || true
# Existing ontology state
test -f .ontology/core.ncl && \
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/core.ncl \
| jq '{nodes: [.nodes[] | {id, name, level}], edge_count: (.edges | length)}'
# Manifest if present
test -f .ontology/manifest.ncl && \
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl \
| jq '{repo_kind, description}'
# Rust crates and their purposes
cat Cargo.toml 2>/dev/null | grep -A2 '\[workspace\]' || cat Cargo.toml 2>/dev/null | head -20
ls crates/ 2>/dev/null || true
# Config loading pattern: does the project use nickel export + serde?
grep -rl 'nickel export\|DaemonNclConfig\|ConfigLoader\|config_from_ncl' \
crates/ src/ 2>/dev/null | head -10
# HTTP handlers: does the project expose an HTTP API?
grep -rl '#\[get\|#\[post\|#\[put\|Router::new\|axum\|actix' \
crates/ src/ 2>/dev/null | head -10
# External services: what does it connect to?
grep -rl 'SurrealDb\|nats\|postgres\|redis\|reqwest\|http::Client' \
crates/ src/ 2>/dev/null | head -10
```
Identify:
- What the project does for each audience (developer, agent, CI, end user)
- Whether it uses NCL schemas for configuration (Nickel-validated-overrides applies)
- Whether it exposes an HTTP API (`#[onto_api]` annotation applies)
- What external services it depends on (critical_deps candidates)
- What the existing ontology covers vs what is missing
---
## Phase 1 — Protocol infrastructure
Add missing v2 files. All steps are additive — nothing is overwritten.
```sh
cd {project_dir}
# Detect missing files
test -f .ontology/manifest.ncl && echo "manifest: present" || echo "manifest: MISSING"
test -f .ontology/connections.ncl && echo "connections: present" || echo "connections: MISSING"
# Add manifest.ncl if missing (template is installed with ontoref)
test -f .ontology/manifest.ncl || \
sed "s/{{ project_name }}/{project_name}/g" \
"$ONTOREF_ROOT/templates/ontology/manifest.ncl" > .ontology/manifest.ncl
# Add connections.ncl if missing
test -f .ontology/connections.ncl || \
sed "s/{{ project_name }}/{project_name}/g" \
"$ONTOREF_ROOT/templates/ontology/connections.ncl" > .ontology/connections.ncl
# Validate both parse
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl \
> /dev/null && echo "manifest: ok"
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/connections.ncl \
> /dev/null && echo "connections: ok"
```
Check for deprecated `check_hint` fields in ADRs and migrate to typed `check` variants.
See `$ONTOREF_ROOT/reflection/templates/update-ontology-prompt.md` Phase 7 for the
migration mapping.
---
## Phase 2 — Ontology enrichment (core.ncl, state.ncl)
Follow `$ONTOREF_ROOT/reflection/templates/update-ontology-prompt.md` Phases 26 in full.
Key rules:
- Add nodes only for things that actually exist in code today — no aspirational nodes
- Advance dimension states only when transition conditions are demonstrably met
- Update `blocker` and `catalyst` to reflect current reality
- Every edit must pass `nickel export` before continuing to the next node
After completing ontology enrichment, run:
```sh
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/core.ncl \
| jq '{nodes: (.nodes|length), edges: (.edges|length)}'
```
---
## Phase 3 — Config surface
**Skip this phase if the project has no structured NCL configuration system.**
This phase has three parts that build on each other: declare the config surface in the
manifest, apply the nickel-validated-overrides pattern to Rust services, and register
struct fields via `#[derive(ConfigFields)]`.
### 3a. Declare config_surface in manifest.ncl
Open `.ontology/manifest.ncl`. Identify:
- Where the project's NCL config files live (`config_root`)
- What the entry-point file is (`entry_point`, usually `config.ncl`)
- What sections exist (each top-level key in the merged export)
- Who reads each section (Rust structs, Nu scripts, CI pipelines)
```nickel
config_surface = m.make_config_surface {
config_root = "config/", # adjust to project layout
entry_point = "config.ncl",
kind = 'NclMerge, # 'NclMerge | 'SingleFile | 'TypeDialog
contracts_path = "nickel/contracts/", # where NCL contract files live
overrides_dir = "", # defaults to config_root
sections = [
m.make_config_section {
id = "server", # must match top-level NCL key
file = "config.ncl",
contract = "contracts.ncl -> ServerConfig",
description = "...",
rationale = "...",
mutable = true,
consumers = [
m.make_config_consumer {
id = "{project_name}-backend",
kind = 'RustStruct,
ref = "crates/{crate}/src/config.rs -> ServerConfig",
fields = [], # leave empty once #[derive(ConfigFields)] is in place
},
],
},
],
},
```
Validate:
```sh
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl | jq .config_surface
```
### 3b. Nickel-validated-overrides pattern
**Apply this if the project has Rust services that read NCL config AND accept env var or
CLI argument overrides.** Without this, env overrides bypass all NCL contract validation.
**The core insight:** JSON is valid Nickel syntax. Write env overrides as JSON to a temp
`.ncl` file and pass it as a second argument to `nickel export`. Nickel merges both files
and applies contracts to the merged result before any Rust struct is populated.
```text
OLD: load_from_ncl() → deserialize → apply_env_overrides(&mut self) ← bypasses Nickel
NEW: collect_env_overrides() → nickel export base.ncl overrides.ncl → deserialize
```
**Step 1 — Add `export_with_overrides` to the config loader:**
In the crate that calls `nickel export` (typically a platform-config or config-loader crate):
```rust
pub fn export_with_overrides(
ncl_path: &Path,
import_path: &str,
overrides: &serde_json::Value,
) -> Result<String, ConfigError> {
if overrides.is_null() || overrides == &serde_json::Value::Object(Default::default()) {
return plain_nickel_export(ncl_path, import_path);
}
let tmp = tempfile::NamedTempFile::with_suffix(".ncl")?;
serde_json::to_writer(&tmp, overrides)?;
let output = std::process::Command::new("nickel")
.args(["export", "--format", "json"])
.arg(ncl_path)
.arg(tmp.path())
.arg("--import-path").arg(import_path)
.output()?;
// tmp dropped here — cleaned up automatically
if output.status.success() {
Ok(String::from_utf8(output.stdout)?)
} else {
Err(ConfigError::NickelContract(String::from_utf8_lossy(&output.stderr).into()))
}
}
```
**Step 2 — Replace `apply_env_overrides(&mut self)` with `collect_env_overrides()`:**
```rust
// REMOVE: fn apply_env_overrides(&mut self) { self.port = env::var("PORT")... }
// ADD:
pub fn collect_env_overrides() -> serde_json::Value {
let mut overrides = serde_json::json!({});
// JSON shape must match NCL schema nesting exactly
if let Ok(port) = std::env::var("SERVER_PORT") {
if let Ok(n) = port.parse::<u16>() {
overrides["server"]["port"] = n.into();
}
}
// ... one block per env var, following the NCL schema nesting
overrides
}
```
**Step 3 — Wire through the load path:**
```rust
pub fn load() -> Result<Self, ConfigError> {
let overrides = Self::collect_env_overrides();
let json = export_with_overrides(&config_path, &import_path, &overrides)?;
Ok(serde_json::from_str(&json)?)
}
```
**Step 4 — Verify NCL schemas have real constraints, not bare type annotations:**
```sh
grep -n '| Number\b\|| String\b' {project_dir}/nickel/**/*.ncl 2>/dev/null || \
grep -rn '| Number\b\|| String\b' {project_dir}/config/ 2>/dev/null
```
Bare `| Number` passes any number. Constraints must use `from_validator` or `from_predicate`:
```nickel
# Weak — any number passes
port | Number = 8080
# Strong — contract rejects values outside 1024-65535
port | std.contract.from_validator (fun port =>
if port >= 1024 && port <= 65535 then 'Ok
else 'Error { message = "port must be 1024-65535, got #{port}" }) = 8080
```
Without real constraints in NCL, the overrides pattern has no enforcement teeth.
**Validation tests:**
```sh
# Invalid override must produce a Nickel contract error, NOT start silently
SERVER_PORT=99999 cargo run -- 2>&1 | grep -i "nickel\|contract\|error"
# Valid override must start with the overridden value
SERVER_PORT=8090 cargo run -- 2>&1 | grep "port.*8090"
cargo clippy -- -D warnings
```
### 3c. `#[derive(ConfigFields)]` for compile-time field registration
Annotate every Rust struct that deserialises a config section. This gives the ontoref
daemon accurate field lists without hand-maintaining `fields[]` in the manifest.
**Add dependency** (in the crate containing config structs; path is relative to that crate,
assuming ontoref source is checked out as a sibling of this project):
```toml
[dependencies]
ontoref-ontology = { path = "../../{ontoref_source_dir}/crates/ontoref-ontology" }
```
**Annotate config structs:**
```rust
use ontoref_ontology::ConfigFields;
#[derive(Deserialize, ConfigFields)]
#[config_section(id = "server", ncl_file = "config/config.ncl")]
pub struct ServerConfig {
pub host: String,
pub port: u16,
// #[serde(rename = "tls_enabled")] is respected — renamed name is registered
pub tls: bool,
}
```
Rules:
- `id` must match the section `id` in `manifest.ncl → config_surface.sections[*].id`
- `ncl_file` is relative to the project root
- Only top-level fields of the annotated struct are registered; annotate nested structs
separately if their fields need to appear in the coherence report
**Verify registration:**
```rust
#[test]
fn config_fields_registered() {
for entry in inventory::iter::<ontoref_ontology::ConfigFieldsEntry> {
assert!(!entry.fields.is_empty(), "section {} has no fields", entry.section_id);
eprintln!("section={} struct={} fields={:?}",
entry.section_id, entry.struct_name, entry.fields);
}
}
```
**Coherence integration test:**
```rust
#[test]
fn ncl_rust_coherence() {
let root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
.ancestors().find(|p| p.join(".ontology").exists())
.expect("project root not found");
for entry in inventory::iter::<ontoref_ontology::ConfigFieldsEntry> {
let ncl_path = root.join(entry.ncl_file);
let out = std::process::Command::new("nickel")
.args(["export", "--format", "json"])
.arg(&ncl_path).current_dir(root).output().unwrap();
assert!(out.status.success(), "nickel export failed: {}", entry.ncl_file);
let json: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
let section = json.get(entry.section_id).or(Some(&json))
.and_then(|v| v.as_object())
.unwrap_or_else(|| panic!("section '{}' missing", entry.section_id));
let ncl_keys: std::collections::BTreeSet<&str> = section.keys().map(String::as_str).collect();
let rust_fields: std::collections::BTreeSet<&str> = entry.fields.iter().copied().collect();
let missing: Vec<_> = rust_fields.difference(&ncl_keys).collect();
assert!(missing.is_empty(),
"{} declares fields not in NCL: {:?}", entry.struct_name, missing);
}
}
```
Once `#[derive(ConfigFields)]` is in place, remove the `fields = [...]` lists from the
corresponding `config_consumer` entries in `manifest.ncl` — the derive macro is now
the source of truth.
---
## Phase 4 — API surface
**Skip this phase if the project exposes no HTTP API.**
If the project has an HTTP API served by axum or actix-web, annotate each handler with
`#[onto_api]` so the daemon can surface the full annotated catalog at `GET /api/catalog`.
### 4a. Add ontoref-derive dependency
Path is relative to the consuming crate, assuming ontoref source is a sibling project:
```toml
[dependencies]
ontoref-ontology = { path = "../../{ontoref_source_dir}/crates/ontoref-ontology" }
ontoref-derive = { path = "../../{ontoref_source_dir}/crates/ontoref-derive" }
```
### 4b. Annotate HTTP handlers
```rust
use ontoref_derive::onto_api;
#[onto_api(
method = "GET",
path = "/api/things",
description = "List all things with optional filter.",
auth = "bearer",
actors = ["developer", "agent"],
params = [("filter", "string", false, "optional substring filter")],
tags = ["things", "read"]
)]
async fn list_things(/* axum extractors */) -> impl IntoResponse { /* ... */ }
```
Fields:
- `method` — HTTP verb in caps: `"GET" | "POST" | "PUT" | "DELETE" | "PATCH"`
- `path` — route path as registered in the router (e.g. `"/projects/{slug}/things"`)
- `description` — one sentence, agent-readable
- `auth``"bearer" | "admin" | "none"`
- `actors` — list of actors allowed: `"developer" | "agent" | "ci" | "admin"`
- `params` — array of `(name, type, required, description)` tuples
- `tags` — grouping tags for catalog filtering
**Register inventory collection** in the crate's `lib.rs` or `main.rs`:
```rust
// In ontoref-ontology already: inventory::collect!(ApiRouteEntry);
// In your crate: this is automatic — inventory::submit! is emitted by the macro.
```
### 4c. Expose the catalog endpoint
Add the catalog route to the router (if not already provided by ontoref-daemon):
```rust
// If embedding ontoref-daemon routes:
// GET /api/catalog is already registered by ontoref-daemon's router.
// If building a standalone service with its own router, add:
use ontoref_ontology::ApiRouteEntry;
async fn api_catalog() -> axum::Json<serde_json::Value> {
let routes: Vec<_> = inventory::iter::<ApiRouteEntry>
.map(|r| serde_json::json!({
"method": r.method, "path": r.path, "description": r.description,
"auth": r.auth, "actors": r.actors, "params": r.params, "tags": r.tags,
}))
.collect();
axum::Json(serde_json::json!({ "routes": routes }))
}
```
### 4d. Export the catalog for ontoref UI visibility
The ontoref daemon reads `#[onto_api]` entries from its own `inventory` at runtime.
Consumer projects run as separate binaries — their entries are never linked into the
ontoref-daemon process. To make the API surface visible in the ontoref UI, generate a
static `api-catalog.json` in the project root and commit it.
**Add `--dump-api-catalog` to the daemon binary's `Cli` struct** (in `main.rs`):
```rust
/// Print all #[onto_api] registered routes as JSON and exit.
/// Pipe to api-catalog.json so the ontoref UI can display this project's
/// API surface when registered as a non-primary slug.
#[arg(long)]
dump_api_catalog: bool,
```
Add an early-exit handler before the server starts (same pattern as `--hash-password`):
```rust
if cli.dump_api_catalog {
println!("{}", ontoref_ontology::api::dump_catalog_json());
return;
}
```
**Add the just recipe** — in `justfiles/assets.just` (create if absent, import in root `justfile`):
```just
# Export this daemon's API catalog to api-catalog.json.
# Run after any #[onto_api] annotation is added or changed.
# Read by the ontoref UI for non-primary project slugs.
[doc("Export #[onto_api] routes to api-catalog.json")]
export-api-catalog:
cargo run -p {daemon_crate} --no-default-features -- --dump-api-catalog > api-catalog.json
@echo "exported routes to api-catalog.json"
```
Replace `{daemon_crate}` with the crate name of the project's HTTP daemon binary.
**Run and commit:**
```sh
just export-api-catalog
git add api-catalog.json
```
Commit `api-catalog.json` alongside the `#[onto_api]` annotations — they change together.
### 4e. Verify
```sh
cargo check --all-targets
just export-api-catalog
# Confirm routes were captured:
cat api-catalog.json | jq '[.[] | {method, path, tags}]'
```
---
## Phase 5 — Manifest self-interrogation
Populate `capabilities[]`, `requirements[]`, and `critical_deps[]` in `.ontology/manifest.ncl`.
Follow `$ONTOREF_ROOT/reflection/templates/manifest-self-interrogation-prompt.md` in full.
**Quick reference for the three types:**
```nickel
capabilities = [
m.make_capability {
id = "kebab-id",
name = "Name",
summary = "One line: what does this capability do?",
rationale = "Why it exists. What was rejected.",
how = "Key patterns, entry points, data flows.",
artifacts = ["crates/foo/", "GET /api/foo"],
adrs = ["adr-001"], # IDs that formalize decisions here
nodes = ["practice-node-id"], # IDs from .ontology/core.ncl
},
],
requirements = [
m.make_requirement {
id = "id",
name = "Name",
env = 'Both, # 'Production | 'Development | 'Both
kind = 'Tool, # 'Tool | 'Service | 'EnvVar | 'Infrastructure
version = "",
required = true,
impact = "What breaks if absent.",
provision = "How to install/set/provision.",
},
],
critical_deps = [
m.make_critical_dep {
id = "id",
name = "crate-or-service",
ref = "crates.io: foo",
used_for = "Which capabilities depend on this.",
failure_impact = "What breaks if this dep disappears or breaks its contract.",
mitigation = "Feature flags, fallback builds, alternatives.",
},
],
```
---
## Phase 6 — Final validation
```sh
cd {project_dir}
# All .ontology/ files
for f in .ontology/core.ncl .ontology/state.ncl .ontology/gate.ncl \
.ontology/manifest.ncl .ontology/connections.ncl; do
test -f "$f" && \
nickel export --import-path "$NICKEL_IMPORT_PATH" "$f" > /dev/null \
&& echo "ok: $f" || echo "FAIL: $f"
done
# All ADRs
for f in adrs/adr-*.ncl; do
nickel export --import-path "$NICKEL_IMPORT_PATH" "$f" > /dev/null \
&& echo "ok: $f" || echo "FAIL: $f"
done
# Rust: build, lint, tests
cargo check --all-targets --all-features
cargo clippy --all-targets --all-features -- -D warnings
cargo test config_fields_registered -- --nocapture 2>/dev/null || true
cargo test ncl_rust_coherence -- --nocapture 2>/dev/null || true
# Describe output
ontoref --actor developer describe project
ontoref --actor developer describe requirements
ONTOREF_ACTOR=agent ontoref describe capabilities | from json | get capabilities | length
```
---
## Checklist
### Protocol layer
- [ ] `.ontology/manifest.ncl` present and exports cleanly
- [ ] `.ontology/connections.ncl` present and exports cleanly
- [ ] `core.ncl` nodes reflect current implementation (no aspirational nodes)
- [ ] `state.ncl` dimension states match current reality
- [ ] All `check_hint` fields migrated to typed `check` variants
### Config surface
- [ ] `config_surface` declared in `manifest.ncl` (if project uses NCL config)
- [ ] All sections have `id`, `file`, `consumers` with accurate kinds
- [ ] Nickel-validated-overrides: `collect_env_overrides()` implemented (if applicable)
- [ ] Nickel-validated-overrides: `apply_env_overrides(&mut self)` removed
- [ ] NCL schemas have real constraints (`from_validator`, not bare `| Number`)
- [ ] `#[derive(ConfigFields)]` on all config structs that read NCL sections
- [ ] `cargo test config_fields_registered` passes
- [ ] `cargo test ncl_rust_coherence` passes
- [ ] `fields = [...]` removed from manifest consumers once derive is in place
### API surface
- [ ] `#[onto_api]` on all HTTP handlers (if project has an HTTP API)
- [ ] `GET /api/catalog` returns non-empty routes list
- [ ] All routes have accurate `auth`, `actors`, `tags`
### Manifest self-interrogation
- [ ] `description` field populated (non-empty)
- [ ] At least 1 `capability` entry with non-empty `summary`
- [ ] `capabilities[].nodes[]` verified against `core.ncl` node IDs
- [ ] At least 1 `requirement` per relevant environment
- [ ] All `critical_deps` have non-empty `failure_impact`
### Delivery
- [ ] `describe project` returns complete picture
- [ ] `describe requirements` renders without errors
- [ ] No orphaned `describe diff` changes (everything committed or staged intentionally)
- [ ] Do NOT commit — developer reviews the diff first

View File

@ -7,7 +7,23 @@ session with ontoref available.
**Substitutions required before use:**
- `{project_name}` — kebab-case project identifier
- `{project_dir}` — absolute path to project root
- `{ontoref_dir}` — absolute path to the ontoref checkout
**Requires:** `ontoref` installed globally (`just install-daemon` from the ontoref repo).
---
## Bootstrap — source ontoref env vars
Before running any direct `nickel export` command, source the ontoref env into the current
shell. This sets `NICKEL_IMPORT_PATH` and `ONTOREF_ROOT` without launching a full command:
```sh
cd {project_dir}
. $(which ontoref) --env-only
# NICKEL_IMPORT_PATH and ONTOREF_ROOT are now available in this shell session
```
All `nickel export` commands in this prompt assume these vars are set.
---
@ -33,22 +49,22 @@ cd {project_dir}
test -f .ontology/manifest.ncl && echo "manifest: present" || echo "manifest: MISSING"
test -f .ontology/connections.ncl && echo "connections: present" || echo "connections: MISSING"
# Step 1b: add manifest.ncl if missing
# Step 1b: add manifest.ncl if missing (template installed with ontoref)
test -f .ontology/manifest.ncl || \
sed 's/{{ project_name }}/{project_name}/g' \
{ontoref_dir}/templates/ontology/manifest.ncl > .ontology/manifest.ncl
"$ONTOREF_ROOT/templates/ontology/manifest.ncl" > .ontology/manifest.ncl
# Step 1c: add connections.ncl if missing
test -f .ontology/connections.ncl || \
sed 's/{{ project_name }}/{project_name}/g' \
{ontoref_dir}/templates/ontology/connections.ncl > .ontology/connections.ncl
"$ONTOREF_ROOT/templates/ontology/connections.ncl" > .ontology/connections.ncl
# Step 1d: validate both files parse
nickel export --import-path {ontoref_dir}/ontology:{ontoref_dir}/ontology/schemas:{ontoref_dir}/ontology/defaults:{ontoref_dir} \
.ontology/manifest.ncl > /dev/null && echo "manifest: ok"
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl \
> /dev/null && echo "manifest: ok"
nickel export --import-path {ontoref_dir}/reflection/schemas:{ontoref_dir} \
.ontology/connections.ncl > /dev/null && echo "connections: ok"
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/connections.ncl \
> /dev/null && echo "connections: ok"
```
If either validation fails, read the file, fix the import path or schema mismatch, and revalidate
@ -62,19 +78,19 @@ Run these commands and read the output before making any changes to core.ncl or
```sh
# Full project self-description (identity, axioms, practices, gate)
./scripts/ontoref describe project
ontoref describe project
# Semantic diff vs HEAD — shows what changed since last commit
./scripts/ontoref describe diff
ontoref describe diff
# What modes are available, what gates allow
./scripts/ontoref describe guides
ontoref describe guides
# Current gate state and dimension health
./scripts/ontoref describe gate
ontoref describe gate
# API surface available (requires daemon running)
./scripts/ontoref describe api
ontoref describe api
```
Read the output of each command. Note:
@ -94,11 +110,9 @@ Open `.ontology/core.ncl`. For each of the following, apply only what is actuall
For any practice or capability the project has implemented since the last ontology update,
add a node with:
- `id` — kebab-case, stable identifier
- `level``'Protocol | 'Integration | 'Application | 'Tooling`
- `pole``'Positive | 'Negative | 'Tension`
- `level``'Axiom | 'Tension | 'Practice | 'Project | 'Moment`
- `description` — one sentence, present tense, what it IS (not what it should be)
- `adrs` — list any ADR IDs that govern this node
- `practices` — list practice slugs if declared in `.ontology/state.ncl`
Do NOT add aspirational nodes. If a feature is not yet implemented, do not add it.
@ -114,12 +128,12 @@ Valid edge kinds: `'Implements | 'Depends | 'Extends | 'Supersedes | 'Tensions`
### 3c. Tensions — update descriptions
For tension nodes (pole = 'Tension), update the description to reflect the current root cause
For tension nodes (`level = 'Tension`), update the description to reflect the current root cause
if it has changed. Tensions describe real trade-offs the project has made, not theoretical ones.
After editing, validate:
```sh
nickel export --import-path {ontoref_dir}/ontology:{ontoref_dir} .ontology/core.ncl > /dev/null
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/core.ncl > /dev/null
```
---
@ -138,8 +152,7 @@ demonstrably met (code exists, tests pass, ADR written — not "in progress").
After editing:
```sh
nickel export --import-path {ontoref_dir}/ontology:{ontoref_dir}/ontology/defaults:{ontoref_dir} \
.ontology/state.ncl > /dev/null
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/state.ncl > /dev/null
```
---
@ -164,9 +177,7 @@ ls assets/ 2>/dev/null; ls docs/ 2>/dev/null; ls reflection/templates/ 2>/dev/nu
After editing:
```sh
nickel export \
--import-path {ontoref_dir}/ontology:{ontoref_dir}/ontology/schemas:{ontoref_dir}/ontology/defaults:{ontoref_dir} \
.ontology/manifest.ncl > /dev/null
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/manifest.ncl > /dev/null
```
---
@ -187,8 +198,7 @@ Do NOT invent connections.
After editing:
```sh
nickel export --import-path {ontoref_dir}/reflection/schemas:{ontoref_dir} \
.ontology/connections.ncl > /dev/null
nickel export --import-path "$NICKEL_IMPORT_PATH" .ontology/connections.ncl > /dev/null
```
---
@ -210,7 +220,10 @@ If any files are found, for each ADR:
- File presence → `'FileExists { path = "...", present = true }`
- API response → `'ApiCall { endpoint = "...", json_path = "...", expected = ... }`
3. Replace `check_hint` with `check` using the typed variant
4. Validate: `nickel export --import-path {ontoref_dir}/adrs:{ontoref_dir} adrs/adr-NNN-*.ncl > /dev/null`
4. Validate:
```sh
nickel export --import-path "$NICKEL_IMPORT_PATH" adrs/adr-NNN-*.ncl > /dev/null
```
---
@ -222,24 +235,21 @@ Run all validations in sequence:
# All .ontology/ files
for f in .ontology/core.ncl .ontology/state.ncl .ontology/gate.ncl \
.ontology/manifest.ncl .ontology/connections.ncl; do
nickel export \
--import-path {ontoref_dir}/ontology:{ontoref_dir}/ontology/schemas:\
{ontoref_dir}/ontology/defaults:{ontoref_dir}/reflection/schemas:{ontoref_dir} \
"$f" > /dev/null && echo "ok: $f" || echo "FAIL: $f"
nickel export --import-path "$NICKEL_IMPORT_PATH" "$f" > /dev/null \
&& echo "ok: $f" || echo "FAIL: $f"
done
# All ADRs
for f in adrs/adr-*.ncl; do
nickel export --import-path {ontoref_dir}/adrs:{ontoref_dir} $f > /dev/null && echo "ok: $f" || echo "FAIL: $f"
nickel export --import-path "$NICKEL_IMPORT_PATH" "$f" > /dev/null \
&& echo "ok: $f" || echo "FAIL: $f"
done
# Re-run describe diff to confirm changes are coherent
./scripts/ontoref describe diff
```
ontoref describe diff
After all files pass, run the protocol update report:
```sh
./scripts/ontoref describe project
# Final project state
ontoref describe project
```
---

View File

@ -0,0 +1,143 @@
# Vendor Frontend Assets — Implementation Guide
**Purpose:** Establish the vendored-assets pattern in a project that serves static
frontend files through the ontoref daemon (or any Axum-based server with a static
file route). Covers the full setup: directory layout, just recipes, and template
references.
**Substitutions required before use:**
- `{project_dir}` — absolute path to project root
- `{daemon_crate}` — path to the Axum daemon crate (e.g. `crates/myproject-daemon`)
- `{assets_route}` — URL prefix the daemon serves assets from (e.g. `/assets`)
---
## Why vendor instead of CDN
CDN references in server-rendered templates create three failure modes that vendoring
eliminates:
1. **Version drift** — a CDN package can be yanked or silently updated at the minor
level depending on the version specifier used.
2. **Offline development** — daemon templates break without network access.
3. **Registration race** — some Cytoscape extensions auto-register against
`window.cytoscape` at load time; if the CDN is slow or returns a 404 for a
sub-resource (e.g. a CSS that does not exist in that package version), the
extension silently fails to register and `cy.pluginName(...)` throws at runtime
with no network error visible in the console.
The canonical signal that a CDN asset has a sub-resource problem: `curl -sI
<url>` returns 200 but the body starts with "Couldn't find the requested file".
Always `curl -fsSL` (fail on HTTP errors) when downloading to catch this.
---
## Directory layout
```text
assets/
vendor/ ← all vendored JS; served at {assets_route}/vendor/<file>
<lib>@<ver>.js ← optional: keep version in filename for cache-busting
justfiles/
assets.just ← recipes for downloading/updating vendor assets
justfile ← root just file; imports assets.just
```
Vendor files are committed to the repository. They are not gitignored — the goal
is reproducible builds without network access.
---
## assets.just structure
```just
# Frontend asset management
#
# Vendored JS libs live in assets/vendor/ and are served by the daemon
# at {assets_route}/vendor/<file>. Pin versions explicitly; bump manually.
LIB_VERSION := "x.y.z"
# Download/update all vendored frontend JS dependencies
[doc("Vendor all frontend JS dependencies")]
vendor-js: vendor-<lib>
[doc("Vendor <lib>")]
vendor-<lib>:
mkdir -p assets/vendor
curl -fsSL \
"https://cdn.jsdelivr.net/npm/<lib>@{{LIB_VERSION}}/<lib>.js" \
-o assets/vendor/<lib>.js
@echo "vendored <lib>@{{LIB_VERSION}}"
```
Rules:
- One version variable per library, at the top of the file.
- `vendor-js` is the aggregate recipe — depends on all individual `vendor-*` recipes.
- Use `curl -fsSL` (not `-sL`): `-f` makes curl exit non-zero on HTTP errors, catching
404s before they write a garbage file to disk.
- Adding a new library: add a version variable, add a `vendor-<lib>` recipe, add it
as a dependency of `vendor-js`.
---
## Importing in the root justfile
```just
import 'justfiles/ci.just'
import 'justfiles/assets.just'
```
---
## Referencing vendor assets in Tera templates
In `{% block head %}`, load vendored scripts **after** their host library:
```html
<!-- Host library (CDN is acceptable for well-established, stable libs) -->
<script src="https://cdn.jsdelivr.net/npm/cytoscape@3.30.2/dist/cytoscape.min.js"></script>
<!-- Extension: vendored locally — loads after host, auto-registers on window.cytoscape -->
<script src="{assets_route}/vendor/cytoscape-navigator.js"></script>
```
**Do not use** `<link rel="stylesheet">` for a vendor asset unless you have verified
the CSS file actually exists in the npm package (`curl -fsSI <url>` and check Content-Type).
Many Cytoscape extensions ship JS-only and use inline styles; fetching a non-existent
CSS produces a silent 404 that can confuse debugging.
---
## Verifying a new CDN asset before vendoring
```sh
# 1. Check the package contents on npm
curl -s "https://registry.npmjs.org/<lib>/<version>" | jq '.dist.unpackedSize, .dist.fileCount'
# 2. Confirm the specific file exists (fail fast)
curl -fsSI "https://cdn.jsdelivr.net/npm/<lib>@<version>/<file>.js" | head -3
# 3. Check auto-registration pattern (for Cytoscape extensions)
curl -sL "https://cdn.jsdelivr.net/npm/<lib>@<version>/<file>.js" | tail -10
# Look for: if (typeof cytoscape !== 'undefined') { register(cytoscape); }
# If absent, the extension requires manual registration: cytoscape.use(require('<lib>'))
```
---
## Agent steps to apply this pattern
1. Read `justfiles/assets.just` if it exists; otherwise create it from the structure
above.
2. Add the library version variable and `vendor-<lib>` recipe.
3. Add the new recipe as a dependency of `vendor-js`.
4. Run `just vendor-<lib>` to download the file.
5. Verify the downloaded file is valid JS: `head -3 assets/vendor/<lib>.js` — if the
first line starts with "Couldn't" or "<!DOCTYPE", the download silently failed; re-run
with `curl -fsSL` (the `-f` flag was likely missing).
6. Update the Tera template: replace the CDN `<script src="...">` with
`<script src="{assets_route}/vendor/<lib>.js"></script>`.
7. Remove any `<link rel="stylesheet">` that referenced the same CDN package unless
you have verified the CSS exists (step 2 of the verification section above).
8. Commit `assets/vendor/<lib>.js` and `justfiles/assets.just` together.