feat: personal/career schemas, content modes, search bookmarks, Nu 0.111 compat (ADR-006), commit optimize

This commit is contained in:
Jesús Pérez 2026-03-16 01:48:17 +00:00
parent 9c171ffea2
commit a7ee8dee6f
Signed by: jesus
GPG Key ID: 9F243E355E0BC939
55 changed files with 3723 additions and 223 deletions

View File

@ -18,6 +18,13 @@ lto = false
panic = "unwind"
incremental = true
[profile.clippy]
# Lint-only profile: no debug info, no codegen — clippy only needs MIR/HIR.
# Used by pre-commit to avoid bloating target/debug with DWARF/dSYM artifacts.
inherits = "dev"
debug = 0
incremental = true
[profile.release]
# Release profile - slow compilation, optimized binary
opt-level = 3

2
.gitignore vendored
View File

@ -1,5 +1,7 @@
CLAUDE.md
.claude
logs
logs-archive
utils/save*sh
.fastembed_cache
presentaciones

View File

@ -68,7 +68,7 @@ let d = import "../ontology/defaults/core.ncl" in
name = "ADR Lifecycle",
pole = 'Yang,
level = 'Practice,
description = "Architectural decisions follow: Proposed → Accepted → Superseded. Superseded ADRs retain constraints for historical reconstruction. Active Hard constraints drive the constraint set.",
description = "Architectural decisions follow: Proposed → Accepted → Superseded. Superseded ADRs retain constraints for historical reconstruction. Active Hard constraints drive the constraint set. Nodes declare which ADRs validate them via the adrs field — surfaced by describe and the daemon graph UI.",
artifact_paths = [
"adrs/schema.ncl",
"adrs/reflection.ncl",
@ -78,8 +78,10 @@ let d = import "../ontology/defaults/core.ncl" in
"adrs/adr-003-qa-and-knowledge-persistence-as-ncl.ncl",
"adrs/adr-004-ncl-pipe-bootstrap-pattern.ncl",
"adrs/adr-005-unified-auth-session-model.ncl",
"adrs/adr-006-nushell-0111-string-interpolation-compat.ncl",
"CHANGELOG.md",
],
adrs = ["adr-001", "adr-002", "adr-003", "adr-004", "adr-005", "adr-006"],
},
d.make_node {
@ -105,7 +107,7 @@ let d = import "../ontology/defaults/core.ncl" in
name = "Describe Query Layer",
pole = 'Yang,
level = 'Practice,
description = "describe.nu aggregates all project sources and answers self-knowledge queries: what IS this, what can I DO, what can I NOT do, what tools exist, what is the impact of changing X.",
description = "describe.nu aggregates all project sources and answers self-knowledge queries: what IS this, what can I DO, what can I NOT do, what tools exist, what is the impact of changing X. Renders Validated by section when a node declares adrs — surfacing declared ADR constraints alongside source, examples, and connections.",
artifact_paths = ["reflection/modules/describe.nu"],
},
@ -114,8 +116,9 @@ let d = import "../ontology/defaults/core.ncl" in
name = "Ontoref Ontology Crate",
pole = 'Yang,
level = 'Practice,
description = "Rust implementation for loading and querying .ontology/ NCL files as typed structs. Provides the Core, Gate, and State types for ecosystem-level introspection.",
description = "Rust implementation for loading and querying .ontology/ NCL files as typed structs. Provides Core, Gate, and State types for ecosystem-level introspection. Node carries artifact_paths (Vec<String>) and adrs (Vec<String>) — both serde(default) for zero-migration backward compatibility.",
artifact_paths = ["crates/ontoref-ontology/"],
adrs = ["adr-001"],
},
d.make_node {
@ -146,6 +149,31 @@ let d = import "../ontology/defaults/core.ncl" in
],
},
d.make_node {
id = "ontology-three-file-split",
name = "Ontology Three-File Split",
pole = 'Yang,
level = 'Practice,
description = "The .ontology/ directory separates three orthogonal concerns into three files. core.ncl captures what the project IS — invariant axioms and structural tensions; touching invariant=true nodes requires a new ADR. state.ncl captures where it IS vs where it wants to BE — current and desired state per dimension. gate.ncl defines when it is READY to cross a boundary — active membranes protecting key conditions. reflection/ reads all three and answers self-knowledge queries. This separation lets an agent understand a project without reading code — only by consulting the declarative graph.",
invariant = false,
artifact_paths = [".ontology/core.ncl", ".ontology/state.ncl", ".ontology/gate.ncl"],
},
d.make_node {
id = "adr-node-linkage",
name = "ADRNode Declared Linkage",
pole = 'Yang,
level = 'Practice,
description = "Nodes declare which ADRs validate them via the adrs field (Array String). This makes the ADR→Node relationship explicit in the graph rather than implicit in prose. describe surfaces a Validated by section per node. The daemon graph UI renders each ADR as a clickable link opening the full ADR via GET /api/adr/{id}. Field is serde(default) and Nickel default=[] — zero migration cost for existing nodes.",
artifact_paths = [
"ontology/schemas/core.ncl",
"crates/ontoref-ontology/src/types.rs",
"reflection/modules/describe.nu",
"crates/ontoref-daemon/templates/pages/graph.html",
"crates/ontoref-daemon/src/api.rs",
],
},
d.make_node {
id = "web-presence",
name = "Web Presence",
@ -174,6 +202,7 @@ let d = import "../ontology/defaults/core.ncl" in
"crates/ontoref-daemon/src/session.rs",
"crates/ontoref-daemon/src/ui/auth.rs",
"crates/ontoref-daemon/src/ui/login.rs",
"crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs",
"justfiles/ci.just",
],
},
@ -257,6 +286,53 @@ let d = import "../ontology/defaults/core.ncl" in
],
},
d.make_node {
id = "personal-ontology-schemas",
name = "Personal Ontology Schemas",
pole = 'Yin,
level = 'Practice,
description = "Typed NCL schema layer for personal and career artifacts: career.ncl (Skills, WorkExperience, Talks, Positioning, CompanyTargets, PublicationCards), personal.ncl (Content and Opportunity lifecycle — BlogPost to CV to Application, Job to Conference to Grant), project-card.ncl (canonical display metadata for portfolio and cv_repo publication). All types carry linked_nodes referencing .ontology/core.ncl node IDs — bridging career artifacts into the DAG.",
invariant = false,
artifact_paths = [
"ontology/schemas/career.ncl",
"ontology/schemas/personal.ncl",
"ontology/schemas/project-card.ncl",
"ontology/defaults/career.ncl",
"ontology/defaults/personal.ncl",
"ontology/defaults/project-card.ncl",
],
},
d.make_node {
id = "content-modes",
name = "Content & Career Reflection Modes",
pole = 'Yang,
level = 'Practice,
description = "NCL DAG modes for personal content and career operations: draft-application (job/grant/collaboration application anchored in personal ontology — gate alignment check, node selection, career trajectory render), draft-email, generate-article, update-cv, write-cfp. Each mode queries personal.ncl and core.ncl nodes to ground output in declared project artifacts rather than free-form prose.",
invariant = false,
artifact_paths = [
"reflection/modes/draft-application.ncl",
"reflection/modes/draft-email.ncl",
"reflection/modes/generate-article.ncl",
"reflection/modes/update-cv.ncl",
"reflection/modes/write-cfp.ncl",
],
},
d.make_node {
id = "search-bookmarks",
name = "Search Bookmarks",
pole = 'Yin,
level = 'Practice,
description = "Persistent bookmark store for search results over the ontology graph. Entries typed as BookmarkEntry (id, node_id, kind, title, level, term, actor, created_at, tags) and persisted to reflection/search_bookmarks.ncl via line-level NCL surgery — same atomic-write pattern as qa_ncl.rs. IDs are sequential sb-NNN, zero-padded. Concurrency-safe via NclWriteLock. Supports add and remove; accessible from the daemon search UI.",
invariant = false,
artifact_paths = [
"reflection/search_bookmarks.ncl",
"reflection/schemas/search_bookmarks.ncl",
"crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs",
],
},
d.make_node {
id = "drift-observation",
name = "Passive Drift Observation",
@ -283,6 +359,8 @@ let d = import "../ontology/defaults/core.ncl" in
{ from = "no-enforcement", to = "formalization-vs-adoption", kind = 'Resolves, weight = 'Medium },
{ from = "protocol-not-runtime", to = "no-enforcement", kind = 'Implies, weight = 'High },
{ from = "adr-lifecycle", to = "reflection-modes", kind = 'Complements, weight = 'Medium },
{ from = "adr-node-linkage", to = "adr-lifecycle", kind = 'ManifestsIn, weight = 'High },
{ from = "adr-node-linkage", to = "describe-query-layer", kind = 'Complements, weight = 'High },
{ from = "describe-query-layer", to = "dag-formalized", kind = 'DependsOn, weight = 'High },
{ from = "coder-process-memory", to = "describe-query-layer", kind = 'Complements, weight = 'Medium },
{ from = "ontoref-daemon", to = "ontoref-ontology-crate", kind = 'Complements, weight = 'High },
@ -319,6 +397,19 @@ let d = import "../ontology/defaults/core.ncl" in
{ from = "drift-observation", to = "reflection-modes", kind = 'DependsOn, weight = 'High,
note = "Invokes sync-ontology mode steps (scan, diff) as read-only sub-processes." },
# Personal Ontology Schemas edges
{ from = "personal-ontology-schemas", to = "dag-formalized", kind = 'ManifestsIn, weight = 'High,
note = "Career and personal artifacts are typed NCL records with linked_nodes — DAG connections into the core ontology." },
{ from = "personal-ontology-schemas", to = "self-describing", kind = 'Complements, weight = 'Medium,
note = "Personal/career schemas let projects describe not just what they ARE but who built them and for what trajectory." },
{ from = "content-modes", to = "reflection-modes", kind = 'ManifestsIn, weight = 'High },
{ from = "content-modes", to = "personal-ontology-schemas", kind = 'DependsOn, weight = 'High,
note = "Content and career modes query personal.ncl and core.ncl to ground output in declared artifacts." },
{ from = "search-bookmarks", to = "qa-knowledge-store", kind = 'Complements, weight = 'High,
note = "Both are NCL persistence layers using the same atomic-write surgery pattern. Q&A is for accumulated knowledge; bookmarks are for search navigation state." },
{ from = "search-bookmarks", to = "ontoref-daemon", kind = 'ManifestsIn, weight = 'High },
{ from = "ontoref-daemon", to = "search-bookmarks", kind = 'Contains, weight = 'High },
# Unified Auth Model edges
{ from = "unified-auth-model", to = "ontoref-daemon", kind = 'ManifestsIn, weight = 'High },
{ from = "unified-auth-model", to = "no-enforcement", kind = 'Contradicts, weight = 'Low,

View File

@ -25,7 +25,7 @@ let d = import "../ontology/defaults/state.ncl" in
to = "protocol-stable",
condition = "ADR-001 accepted, ontoref.dev published, at least two external projects consuming the protocol.",
catalyst = "First external adoption.",
blocker = "ontoref.dev not yet published; no external consumers yet. Auth model complete (session exchange, CLI Bearer, key rotation invalidation). Install pipeline: config form roundtrip and NATS topology operational; check-config-sync CI guard present.",
blocker = "ontoref.dev not yet published; no external consumers yet. Auth model complete. Install pipeline complete. Personal/career schema layer present; content modes operational. Nu 0.111 compat fixed (ADR-006). Syntaxis syntaxis-ontology crate has pending ES→EN migration errors.",
horizon = 'Months,
},
],
@ -52,7 +52,7 @@ let d = import "../ontology/defaults/state.ncl" in
from = "modes-and-web-present",
to = "fully-self-described",
condition = "At least 3 ADRs accepted, reflection/backlog.ncl present, describe project returns complete picture.",
catalyst = "ADR-001ADR-004 authored (4 ADRs present, 3+ threshold met). Auth model, project onboarding, and session management nodes added to core.ncl in session 2026-03-13.",
catalyst = "ADR-001ADR-006 authored (6 ADRs present). Auth model, project onboarding, and session management nodes added in 2026-03-13. Personal/career/project-card schemas, 5 content modes, search bookmarks, and ADR-006 (Nu 0.111 compat) added in session 2026-03-15.",
blocker = "none",
horizon = 'Weeks,
},

View File

@ -66,4 +66,6 @@
actors = ["developer", "agent"],
},
],
card = import "../card.ncl",
}

View File

@ -3,6 +3,9 @@ let s = import "ontoref-project.ncl" in
s.make_project {
slug = "ontoref",
root = "/Users/Akasha/Development/ontoref",
nickel_import_paths = ["/Users/Akasha/Development/ontoref"],
nickel_import_paths = [
"/Users/Akasha/Development/ontoref",
"/Users/Akasha/Development/ontoref/ontology",
],
keys = [],
}

View File

@ -18,7 +18,7 @@ repos:
- id: rust-clippy
name: Rust linting (cargo clippy)
entry: bash -c 'cargo clippy --all-targets -- -D warnings'
entry: bash -c 'CARGO_TARGET_DIR=target cargo clippy --all-targets --no-deps --profile clippy -- -D warnings'
language: system
types: [rust]
pass_filenames: false

View File

@ -7,6 +7,121 @@ ADRs referenced below live in `adrs/` as typed Nickel records.
## [Unreleased]
### Personal Ontology Schemas & Content Modes
Three new typed NCL schema families added to `ontology/schemas/` and `ontology/defaults/`:
| Schema | Types exported |
| --- | --- |
| `career.ncl` | `Skill`, `WorkExperience`, `Talk`, `Positioning`, `CompanyTarget`, `PublicationCard`, `CareerConfig` |
| `personal.ncl` | `Content` (BlogPost / ConferenceProposal / CV / Application / Email / Thread), `Opportunity` (Job / Conference / Grant / Collaboration / Podcast), `PersonalConfig` |
| `project-card.ncl` | `ProjectCard` — canonical display metadata (name, tagline, status, tags, tools, features, sort_order) for portfolio and cv_repo publication |
All types carry `linked_nodes | Array String` referencing `.ontology/core.ncl` node IDs.
`PublicationCard` is a career overlay referencing a canonical `project_node` from the portfolio repo.
Five NCL DAG reflection modes added to `reflection/modes/`:
| Mode | Purpose |
| --- | --- |
| `draft-application` | Job/grant/collaboration application anchored in personal ontology — gate alignment check, node selection, career trajectory render, status update |
| `draft-email` | Context-grounded email composition using ontology nodes as evidence |
| `generate-article` | Blog post / thread generation from project nodes and tensions |
| `update-cv` | CV refresh loop querying current career.ncl and core.ncl state |
| `write-cfp` | Conference proposal from Practice/Project nodes with gate alignment check |
### Search Bookmarks
Bookmark persistence for search results over the ontology graph. Mirrors Q&A NCL pattern (ADR-003).
- `reflection/schemas/search_bookmarks.ncl``BookmarkEntry` (id, node_id, kind, title, level, term, actor, created_at, tags) and `BookmarkStore` contracts
- `reflection/search_bookmarks.ncl` — typed store file; conforms to `BookmarkStore` contract
- `crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs``add_entry` / `remove_entry` via
line-level NCL surgery; auto-incremented `sb-NNN` ids; concurrency-safe via `NclWriteLock`
Tests: `next_id_empty`, `next_id_increments`, `insert_into_empty_store`, `delete_first_entry`,
`delete_second_entry`, `delete_missing_id_errors`, `escape_quotes_and_backslashes`,
`concurrent_add_produces_unique_ids` (tokio, 6 concurrent tasks, asserts unique ids).
### Protocol
- ADR-006 accepted: Nushell 0.111 string interpolation compatibility fix. Four print statements in
`reflection/bin/ontoref.nu` used `(identifier: expr)` patterns inside `$"..."` — parsed as
command calls by Nu 0.111 parser. Fix: bare `identifier: (expr)` for label-value pairs; plain
strings (no `$`) for zero-interpolation prints. Hard constraint: no `(label: expr)` inside
`$"..."` in any `.nu` file. Soft constraint: zero-interpolation strings must not use `$"..."`.
([adr-006](adrs/adr-006-nushell-0111-string-interpolation-compat.ncl))
### Self-Description — on+re Update
`.ontology/core.ncl` — 3 new Practice nodes, updated `adr-lifecycle` and `ontoref-daemon` nodes:
| Change | Detail |
| --- | --- |
| New node `personal-ontology-schemas` | Yin — career/personal/project-card typed NCL schemas with linked_nodes DAG bridges |
| New node `content-modes` | Yang — 5 NCL DAG modes for personal content and career operations |
| New node `search-bookmarks` | Yin — bookmark persistence layer; NCL surgery via search_bookmarks_ncl.rs |
| `adr-lifecycle` | ADR-006 added to `artifact_paths` and `adrs` list |
| `ontoref-daemon` | `search_bookmarks_ncl.rs` added to `artifact_paths` |
New edges: `personal-ontology-schemas → dag-formalized` (ManifestsIn/High),
`personal-ontology-schemas → self-describing` (Complements/Medium),
`content-modes → reflection-modes` (ManifestsIn/High),
`content-modes → personal-ontology-schemas` (DependsOn/High),
`search-bookmarks → qa-knowledge-store` (Complements/High),
`search-bookmarks → ontoref-daemon` (ManifestsIn/High),
`ontoref-daemon → search-bookmarks` (Contains/High).
`.ontology/state.ncl``self-description-coverage` catalyst updated to include 2026-03-15 session
additions. `protocol-maturity` blocker updated to reflect Nu 0.111 fix and personal schema layer
completion.
Previous: 4 axioms, 2 tensions, 17 practices. Current: 4 axioms, 2 tensions, 20 practices.
---
### ADRNode Declared Linkage
- `Node` schema extended with `adrs | Array String | default = []` (Nickel `ontology/schemas/core.ncl`
and inline `CoreConfig` type).
- Rust `Node` struct gains `artifact_paths: Vec<String>` and `adrs: Vec<String>`, both
`#[serde(default)]` — zero migration cost for existing nodes that omit the fields.
- `describe.nu` `build-howto` populates `adrs` from the node record; `render-howto` (ANSI),
`render-howto-md`, and `howto-to-md-string` (clipboard) all emit a **Validated by** section
when `adrs` is non-empty.
- New `GET /api/adr/{id}?slug=<slug>` endpoint — reads `adrs/<stem>.ncl`, exports via NCL
cache, returns JSON. No auth required (read-only, loopback boundary).
- Graph UI (`graph.html`): `adrs` field passed into Cytoscape node data. Detail panel renders
"Validated by" section with clickable `◆ <adr-id>` buttons that open a DaisyUI modal
fetching full ADR content via the new endpoint.
- Fixed glob pattern error in `describe.nu:build-howto`: `glob $"($full)/*.rs"` replaced with
`glob ($full | path join "*.rs")` — eliminates `//` in pattern when path has trailing separator.
### Self-Description — on+re Update
`.ontology/core.ncl` — new node, updated nodes, new edges:
| Change | Detail |
| --- | --- |
| New node `adr-node-linkage` | Practice: declares `adrs` field pattern, lists all 5 modified artifacts |
| `adr-lifecycle` | Description updated; `adrs = ["adr-001"…"adr-005"]` declared |
| `describe-query-layer` | Description updated to mention Validated by rendering |
| `ontoref-ontology-crate` | Description updated to mention `artifact_paths` + `adrs` fields; `adrs = ["adr-001"]` |
| New edge `adr-node-linkage → adr-lifecycle` | ManifestsIn/High |
| New edge `adr-node-linkage → describe-query-layer` | Complements/High |
Previous: 4 axioms, 2 tensions, 16 practices. Current: 4 axioms, 2 tensions, 17 practices.
### Ontology Three-File Split
- New Practice node `ontology-three-file-split` in `.ontology/core.ncl`: documents the
`core.ncl` (what IS) / `state.ncl` (where we ARE vs want to BE) / `gate.ncl` (when READY
to cross a boundary) separation and the role of `reflection/` in answering self-knowledge
queries without reading code.
- `assets/presentation/slides.md` speaker note updated to English with reflection mention.
- `assets/web/src/index.html` "Scattered Project Knowledge" solution bullets updated (bilingual)
to express the three-file split and `reflection/` self-knowledge layer.
### Auth & Session Model (ADR-005)
Unified key-to-session token exchange across all surfaces. All work gated on `#[cfg(feature = "ui")]`.

View File

@ -34,9 +34,9 @@ crates/ Rust implementation — typed struct loaders and mode executo
| Crate | Purpose |
| --- | --- |
| `ontoref-ontology` | `.ontology/` NCL → typed Rust structs: Node, Edge, Dimension, Gate, Membrane. Graph traversal, invariant queries. Zero deps. |
| `ontoref-ontology` | `.ontology/` NCL → typed Rust structs: Node, Edge, Dimension, Gate, Membrane. `Node` carries `artifact_paths` and `adrs` (`Vec<String>`, both `serde(default)`). Graph traversal, invariant queries. Zero deps. |
| `ontoref-reflection` | NCL DAG contract executor: ADR lifecycle, step dep resolution, config seal. `stratum-graph` + `stratum-state` required. |
| `ontoref-daemon` | HTTP UI (10 pages), actor registry, notification barrier, MCP (19 tools), search engine, SurrealDB, NCL export cache. |
| `ontoref-daemon` | HTTP UI (10 pages), actor registry, notification barrier, MCP (21 tools), search engine, search bookmarks, SurrealDB, NCL export cache. |
`ontoref-daemon` caches `nickel export` results (keyed by path + mtime), reducing full sync
scans from ~2m42s to <30s. The daemon is always optional every module falls back to direct
@ -54,8 +54,8 @@ automatically.
**Q&A Knowledge Store** — accumulated Q&A entries persist to `reflection/qa.ncl` (typed NCL,
git-versioned). Not localStorage. Any actor — developer, agent, CI — reads the same store.
**MCP Server** — 19 tools over stdio and streamable-HTTP. Categories: nodes, ADRs, modes,
backlog, Q&A, sessions, search, notifications. Representative subset:
**MCP Server** — 21 tools over stdio and streamable-HTTP. Categories: nodes, ADRs, modes,
backlog, Q&A, sessions, search, bookmarks, notifications. Representative subset:
| Tool | What it does |
| --- | --- |
@ -68,6 +68,22 @@ backlog, Q&A, sessions, search, notifications. Representative subset:
| `ontoref_describe` | Describe project ontology and constraints |
| `ontoref_sync_scan` | Scan for ontology drift |
**Search Bookmarks** — search results persist to `reflection/search_bookmarks.ncl` (typed NCL,
`BookmarkEntry` schema). Same atomic-write pattern as Q&A. IDs are sequential `sb-NNN`.
Concurrency-safe via `NclWriteLock`. Add and remove from the daemon search UI.
**Personal Ontology Schemas** — `ontology/schemas/career.ncl`, `personal.ncl`, `project-card.ncl`
provide typed contract layers for career and content artifacts (Skills, WorkExperience, Talks,
Content lifecycle, Opportunities, PublicationCards). All types carry `linked_nodes` referencing
core ontology node IDs — bridging career artifacts into the DAG. Five content/career reflection
modes (`draft-application`, `draft-email`, `generate-article`, `update-cv`, `write-cfp`) query
these schemas to ground output in declared project artifacts rather than free-form prose.
**ADRNode Linkage** — nodes declare which ADRs validate them via `adrs: Array String`.
`describe` surfaces a **Validated by** section per node (CLI and `--fmt md`). The graph UI
renders each ADR as a clickable link that opens the full ADR content in a modal via
`GET /api/adr/{id}`.
**Passive Drift Observation** — background file watcher that detects divergence between Yang
code artifacts and Yin ontology. Watches `crates/`, `.ontology/`, `adrs/`, `reflection/modes/`.
After a 15s debounce runs `sync scan + sync diff`; emits an `ontology_drift` notification when

View File

@ -0,0 +1,76 @@
let d = import "adr-defaults.ncl" in
d.make_adr {
id = "adr-006",
title = "Nushell 0.111 String Interpolation Compatibility Fix",
status = 'Accepted,
date = "2026-03-14",
context = "Nushell 0.111 introduced a breaking change in string interpolation parsing: expressions inside `$\"...\"` that match the pattern `(identifier: expr)` are now parsed as command calls rather than as record literals or literal text. This broke four print statements in reflection/bin/ontoref.nu that used patterns like `(kind: ($kind))`, `(logo: ($logo_file))`, `(parents: ($parent_slugs))`, and `(POST /actors/register)`. The bug manifested when running `ontoref setup` and `ontoref hooks-install` on any consumer project using Nu 0.111+. The minimum Nu version gate (>= 0.110.0) did not catch 0.111 regressions since it only guards the lower bound.",
decision = "Fix all four affected print statements by removing the outer parentheses from label-value pairs inside string interpolations, or by removing the `$` prefix from strings that contain no variable interpolation. The fix is minimal and non-semantic: `(kind: ($kind))` becomes `kind: ($kind)` (literal label + variable), and `$\"(POST /actors/register)\"` becomes `\"(POST /actors/register)\"` (plain string). The fix is applied to both the dev repo (reflection/bin/ontoref.nu) and the installed copy (~/.local/bin/ontoref via just install-daemon). The minimum version gate remains >= 0.110.0 but 0.111 is now the tested floor.",
rationale = [
{
claim = "Minimal-diff fix over workarounds",
detail = "The broken patterns were purely cosmetic print statements. The fix removes one level of parens — no logic change. Alternatives that added escape sequences or string concatenation would obscure the intent.",
},
{
claim = "Plain string for zero-interpolation prints",
detail = "Strings with no variable interpolation (like the POST endpoint hint) should never use `$\"...\"`. Removing the `$` prefix makes them immune to any future interpolation parsing changes and is the correct Nushell idiom.",
},
{
claim = "just install-daemon as the sync mechanism",
detail = "The installed copy at ~/.local/bin/ontoref is managed via just install-daemon. Patching both the dev repo and the installed copy via install-daemon is the established update path and keeps them in sync.",
},
],
consequences = {
positive = [
"ontoref setup and hooks-install work correctly on Nushell 0.111+",
"All consumer projects (vapora, typedialog, evol-rustelo) can run setup without errors",
"Plain-string fix removes implicit fragility from zero-interpolation print statements",
],
negative = [
"The 0.111 regression was not caught by the version gate — the gate only guards >= 0.110.0 and does not test 0.111 compatibility proactively",
],
},
alternatives_considered = [
{
option = "Raise minimum Nu version to 0.111 and document the breaking change",
why_rejected = "Does not fix the broken syntax — just makes the breakage explicit. Consumer projects already on 0.111 would still fail until the print statements are fixed.",
},
{
option = "Use escape sequences or string concatenation to embed literal parens",
why_rejected = "Nushell has no escape for parens in string interpolation. String concatenation (e.g. `'(kind: ' + $kind + ')'`) works but is significantly less readable than bare `kind: ($kind)`.",
},
],
constraints = [
{
id = "no-label-value-parens-in-interpolation",
claim = "String interpolations in ontoref.nu must not use `(identifier: expr)` patterns — use bare `identifier: (expr)` instead",
scope = "ontoref (reflection/bin/ontoref.nu, all .nu files)",
severity = 'Hard,
check_hint = "rg '\\([a-z_]+: \\(' reflection/bin/ontoref.nu",
rationale = "Nushell 0.111 parses (identifier: expr) inside $\"...\" as a command call. The fix pattern (bare label + variable interpolation) is equivalent visually and immune to this parser behaviour.",
},
{
id = "plain-string-for-zero-interpolation",
claim = "Print statements with no variable interpolation must use plain strings, not `$\"...\"`",
scope = "ontoref (all .nu files)",
severity = 'Soft,
check_hint = "rg '\\$\"[^(]*\"' reflection/ | grep -v '\\$('",
rationale = "Zero-interpolation `$\"...\"` strings are fragile against future parser changes and mislead readers into expecting variable substitution.",
},
],
related_adrs = [],
ontology_check = {
decision_string = "Fix four Nu 0.111 string interpolation regressions in ontoref.nu; enforce no (label: expr) inside interpolations; use plain strings for zero-interpolation prints",
invariants_at_risk = [],
verdict = 'Safe,
},
}

View File

@ -743,10 +743,14 @@ Es un grafo consultable que el sistema y los agentes leen.
<Footer />
<!--
core.ncl = invariants (what cannot change)
state.ncl = current position (where we are in each dimension)
gate.ncl = active guards (what is protected right now)
All three are queried by stratum-session-start.sh to inject context into every Claude session.
The .ontology/ directory separates three orthogonal concerns in three files:
core.ncl — what the project IS: invariant axioms and structural tensions.
state.ncl — where it IS vs where it wants to BE.
gate.ncl — when it is READY to cross a boundary.
reflection/ reads all three and answers self-knowledge queries.
This separation allows an agent to understand the project without reading code —
only by consulting the declarative graph.
-->
---

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,181 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 860 780" font-family="JetBrains Mono, ui-monospace, monospace">
<defs>
<style>
text { font-family: inherit; }
.title { font-size:15px; font-weight:700; fill:#f1f5f9; }
.label { font-size:11px; fill:#94a3b8; }
.mono { font-size:10px; fill:#7dd3fc; }
.mono-sm { font-size:9px; fill:#7dd3fc; }
.note { font-size:10px; fill:#64748b; font-style:italic; }
.badge { font-size:9px; font-weight:600; }
.head { font-size:12px; font-weight:700; fill:#e2e8f0; }
.env { font-size:9px; fill:#fcd34d; }
.arrow { stroke:#475569; stroke-width:1.5; fill:none; marker-end:url(#arr); }
.arrow-g { stroke:#84cc16; stroke-width:1.5; fill:none; marker-end:url(#arr-g); }
.arrow-r { stroke:#f87171; stroke-width:1.5; fill:none; marker-end:url(#arr-r); }
.arrow-b { stroke:#60a5fa; stroke-width:1.5; fill:none; marker-end:url(#arr-b); }
</style>
<marker id="arr" markerWidth="8" markerHeight="6" refX="7" refY="3" orient="auto"><polygon points="0 0,8 3,0 6" fill="#475569"/></marker>
<marker id="arr-g" markerWidth="8" markerHeight="6" refX="7" refY="3" orient="auto"><polygon points="0 0,8 3,0 6" fill="#84cc16"/></marker>
<marker id="arr-r" markerWidth="8" markerHeight="6" refX="7" refY="3" orient="auto"><polygon points="0 0,8 3,0 6" fill="#f87171"/></marker>
<marker id="arr-b" markerWidth="8" markerHeight="6" refX="7" refY="3" orient="auto"><polygon points="0 0,8 3,0 6" fill="#60a5fa"/></marker>
</defs>
<!-- Background -->
<rect width="860" height="780" rx="12" fill="#0f172a"/>
<rect x="1" y="1" width="858" height="778" rx="11" fill="none" stroke="#1e293b" stroke-width="1"/>
<!-- ══ TITLE ═══════════════════════════════════════════════════════════════ -->
<text x="30" y="36" class="title">ontoref — key &amp; auth model</text>
<line x1="30" y1="45" x2="830" y2="45" stroke="#1e293b" stroke-width="1"/>
<!-- ══ SECTION 1 · KEY GENERATION ════════════════════════════════════════ -->
<text x="30" y="68" class="head">① Key generation</text>
<!-- Box: hash -->
<rect x="30" y="76" width="240" height="52" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="42" y="93" class="label">generate PHC hash</text>
<text x="42" y="108" class="mono">ontoref-daemon --hash-password &lt;pw&gt;</text>
<text x="42" y="120" class="note">→ $argon2id$v=19$... (stdout)</text>
<!-- Box: roles -->
<rect x="290" y="76" width="300" height="52" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="302" y="93" class="label">KeyEntry fields (in keys-overlay.json / config)</text>
<text x="302" y="108" class="mono">role: admin | viewer</text>
<text x="302" y="120" class="mono">hash: &lt;argon2id PHC string&gt; label: &lt;name&gt;</text>
<!-- Arrow -->
<line x1="270" y1="102" x2="288" y2="102" class="arrow"/>
<!-- ══ SECTION 2 · DAEMON STARTUP ════════════════════════════════════════ -->
<text x="30" y="158" class="head">② Daemon startup — load keys</text>
<!-- env vars -->
<rect x="30" y="166" width="380" height="76" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="42" y="183" class="label">env vars (priority order)</text>
<text x="42" y="198" class="env">ONTOREF_ADMIN_TOKEN_FILE</text><text x="195" y="198" class="label"> path to file containing PHC hash</text>
<text x="42" y="213" class="env">ONTOREF_ADMIN_TOKEN</text><text x="172" y="213" class="label"> inline PHC hash (fallback)</text>
<text x="42" y="228" class="note"> → loads as admin key for primary project at boot</text>
<!-- keys-overlay.json -->
<rect x="430" y="166" width="240" height="76" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="442" y="183" class="label">~/.config/ontoref/keys-overlay.json</text>
<text x="442" y="198" class="mono">{ "&lt;slug&gt;": [ KeyEntry, … ] }</text>
<text x="442" y="213" class="note">persisted by PUT /projects/{slug}/keys</text>
<text x="442" y="228" class="note">loaded on daemon start, merged into registry</text>
<!-- daemon box -->
<rect x="680" y="166" width="148" height="76" rx="6" fill="#172554" stroke="#3b82f6"/>
<text x="754" y="196" class="head" text-anchor="middle">daemon</text>
<text x="754" y="212" class="label" text-anchor="middle">ProjectRegistry</text>
<text x="754" y="226" class="label" text-anchor="middle">keys: RwLock&lt;Vec&lt;KeyEntry&gt;&gt;</text>
<line x1="410" y1="204" x2="428" y2="204" class="arrow"/>
<line x1="670" y1="204" x2="678" y2="204" class="arrow"/>
<!-- ══ SECTION 3 · REQUEST FLOW ══════════════════════════════════════════ -->
<text x="30" y="276" class="head">③ Request auth flow</text>
<!-- No keys -->
<rect x="30" y="284" width="180" height="42" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="120" y="301" class="label" text-anchor="middle">no keys configured</text>
<text x="120" y="315" class="mono" text-anchor="middle">auth_enabled() → false</text>
<line x1="210" y1="305" x2="248" y2="305" class="arrow-g"/>
<rect x="250" y="284" width="90" height="42" rx="6" fill="#14532d" stroke="#84cc16"/>
<text x="295" y="305" class="badge" text-anchor="middle" fill="#84cc16">PASS</text>
<text x="295" y="318" class="label" text-anchor="middle">(all requests)</text>
<!-- With keys -->
<rect x="30" y="344" width="180" height="42" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="120" y="361" class="label" text-anchor="middle">keys configured</text>
<text x="120" y="375" class="mono" text-anchor="middle">check_primary_auth()</text>
<!-- no bearer -->
<line x1="210" y1="365" x2="248" y2="345" class="arrow-r"/>
<rect x="250" y="330" width="120" height="32" rx="6" fill="#450a0a" stroke="#f87171"/>
<text x="310" y="345" class="badge" text-anchor="middle" fill="#f87171">401</text>
<text x="310" y="358" class="label" text-anchor="middle">missing Bearer</text>
<!-- with bearer → verify -->
<line x1="210" y1="365" x2="248" y2="380" class="arrow-b"/>
<rect x="250" y="367" width="140" height="32" rx="6" fill="#1e293b" stroke="#60a5fa"/>
<text x="320" y="382" class="mono" text-anchor="middle">argon2id verify</text>
<text x="320" y="394" class="label" text-anchor="middle">~100ms per attempt</text>
<!-- pass/fail -->
<line x1="390" y1="383" x2="428" y2="365" class="arrow-g"/>
<rect x="430" y="352" width="90" height="28" rx="6" fill="#14532d" stroke="#84cc16"/>
<text x="475" y="366" class="badge" text-anchor="middle" fill="#84cc16">PASS</text>
<text x="475" y="376" class="note" text-anchor="middle">role attached</text>
<line x1="390" y1="383" x2="428" y2="393" class="arrow-r"/>
<rect x="430" y="383" width="90" height="28" rx="6" fill="#450a0a" stroke="#f87171"/>
<text x="475" y="397" class="badge" text-anchor="middle" fill="#f87171">401</text>
<text x="475" y="407" class="note" text-anchor="middle">rate-limited</text>
<!-- session shortcut -->
<rect x="540" y="344" width="220" height="42" rx="6" fill="#1e293b" stroke="#7c3aed"/>
<text x="650" y="361" class="label" text-anchor="middle">session token shortcut</text>
<text x="650" y="375" class="mono" text-anchor="middle">UUID v4 → SessionStore O(1)</text>
<line x1="520" y1="360" x2="538" y2="360" class="arrow-b"/>
<!-- ══ SECTION 4 · PROTECTED vs PUBLIC ═══════════════════════════════════ -->
<text x="30" y="434" class="head">④ Endpoint protection</text>
<!-- Protected -->
<rect x="30" y="442" width="270" height="74" rx="6" fill="#1e293b" stroke="#f97316"/>
<text x="42" y="459" class="label" fill="#f97316">■ check_primary_auth required</text>
<text x="42" y="474" class="mono">POST /api/nickel/export</text>
<text x="42" y="488" class="mono">POST /api/cache/invalidate</text>
<text x="42" y="502" class="mono">PUT /api/projects/{slug}/keys (admin role)</text>
<!-- Public -->
<rect x="320" y="442" width="270" height="74" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="332" y="459" class="label">■ public (loopback boundary)</text>
<text x="332" y="474" class="mono">GET /api/search</text>
<text x="332" y="488" class="mono">GET /api/describe/*</text>
<text x="332" y="502" class="mono">GET /api/adr/{id} GET /health</text>
<!-- Sessions -->
<rect x="608" y="442" width="220" height="74" rx="6" fill="#1e293b" stroke="#7c3aed"/>
<text x="620" y="459" class="label" fill="#a78bfa">■ session-gated (ui feature)</text>
<text x="620" y="474" class="mono">POST /api/sessions (create)</text>
<text x="620" y="488" class="mono">GET /api/sessions (list)</text>
<text x="620" y="502" class="mono">DEL /api/sessions/{id} (revoke)</text>
<!-- ══ SECTION 5 · CLI TOKEN FLOW ════════════════════════════════════════ -->
<text x="30" y="546" class="head">⑤ CLI token flow (store.nu)</text>
<rect x="30" y="554" width="390" height="58" rx="6" fill="#1e293b" stroke="#334155"/>
<text x="42" y="571" class="env">ONTOREF_TOKEN</text><text x="140" y="571" class="label"> → bearer-args → curl -H "Authorization: Bearer …"</text>
<text x="42" y="586" class="label">daemon reachable? → HTTP (token sent if set)</text>
<text x="42" y="600" class="label">daemon down? → subprocess nickel (no token, no daemon)</text>
<!-- ══ QUICK REFERENCE ═══════════════════════════════════════════════════ -->
<line x1="30" y1="630" x2="830" y2="630" stroke="#1e293b" stroke-width="1"/>
<text x="30" y="648" class="head">Quick reference</text>
<!-- col 1 -->
<text x="30" y="666" class="label">Generate hash</text>
<text x="160" y="666" class="mono-sm">ontoref-daemon --hash-password &lt;pw&gt;</text>
<text x="30" y="681" class="label">Set keys (admin)</text>
<text x="160" y="681" class="mono-sm">PUT /api/projects/{slug}/keys body: {keys:[{role,hash,label}]}</text>
<text x="30" y="696" class="label">Create session</text>
<text x="160" y="696" class="mono-sm">POST /api/sessions body: {key:&lt;password&gt;, actor:&lt;type&gt;}</text>
<!-- col 2 -->
<text x="30" y="715" class="label">Export NCL</text>
<text x="160" y="715" class="mono-sm">POST /api/nickel/export body: {path, import_path?} Bearer required</text>
<text x="30" y="730" class="label">Get ADR</text>
<text x="160" y="730" class="mono-sm">GET /api/adr/{id}?slug=&lt;slug&gt;</text>
<text x="30" y="745" class="label">Search</text>
<text x="160" y="745" class="mono-sm">GET /api/search?q=&lt;term&gt;&amp;slug=&lt;slug&gt;</text>
<text x="30" y="760" class="label">Describe project</text>
<text x="160" y="760" class="mono-sm">GET /api/describe/project?slug=&lt;slug&gt;</text>
</svg>

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -6,6 +6,7 @@
<title
data-en="Ontoref — A Self-Describing Ontology &amp; Reflection Protocol"
data-es="Ontoref — Un Protocolo de Ontolog&iacute;a y Reflexi&oacute;n Auto-Descriptivo"
data-key="ontoref-page-title"
>
Ontoref
</title>
@ -1294,6 +1295,7 @@
class="lang-btn"
data-en="Architecture"
data-es="Arquitectura"
data-key="ontoref-architecture-title"
>Architecture</a
>
<button
@ -1313,6 +1315,7 @@
class="status-badge"
data-en="Protocol + Runtime · v0.1.0"
data-es="Protocolo + Runtime · v0.1.0"
data-key="ontoref-badge"
>Protocol + Runtime · v0.1.0</span
>
<div class="logo-container">
@ -1323,39 +1326,45 @@
</div>
<p
class="tagline"
data-en="Structure that remembers why."
data-es="Estructura que recuerda el porqu&eacute;."
data-en="Structure that remembers why"
data-es="Estructura que recuerda el porqu&eacute;"
data-key="ontoref-tagline"
>
Structure that remembers why.
Structure that remembers why
</p>
<h1
data-en="Self-Describing Protocol for<br>Evolving Codebases"
data-es="Protocolo Auto-Descriptivo para<br>Codebases Evolutivas"
data-en="Self-Describing Protocol for<br>Evolving Systems"
data-es="Protocolo Auto-Descriptivo para<br>Sistemas Evolutivos"
data-key="ontoref-page-subtitle"
>
Self-Describing Protocol for<br />Evolving Codebases
Self-Describing Protocol for<br />Evolving Systems
</h1>
<p class="hero-subtitle">
<span
class="highlight"
data-en="Ontology + Reflection + Daemon + MCP"
data-es="Ontolog&iacute;a + Reflexi&oacute;n + Daemon + MCP"
data-key="ontoref-hero-highlight"
>Ontology + Reflection + Daemon + MCP</span
><span
data-en=" &mdash; encode what your codebase IS (invariants, tensions, constraints) and what it DOES (operational modes, actor flows, config seals) in machine-queryable directed acyclic graphs. First-class web UI (12 pages), MCP server (19 tools), and live session sharing for AI agents. One protocol for developers, agents, and CI."
data-es=" &mdash; codifica lo que tu codebase ES (invariantes, tensiones, constraints) y lo que HACE (modos operacionales, flujos de actor, config selladas) en grafos ac&iacute;clicos dirigidos consultables por m&aacute;quina. UI web de primer nivel (12 p&aacute;ginas), servidor MCP (19 herramientas) y compartici&oacute;n de tareas en vivo para agentes IA. Un protocolo para desarrolladores, agentes y CI."
data-en=" &mdash; encode what a system IS (invariants, tensions, constraints) and where it IS GOING (state dimensions, transition conditions, membranes) in machine-queryable directed acyclic graphs. Software projects, personal operational systems, agent contexts — same three files, same protocol. First-class web UI (12 pages), MCP server (19 tools), live session sharing. One protocol for developers, agents, CI, and individuals."
data-es=" &mdash; codifica lo que un sistema ES (invariantes, tensiones, constraints) y hacia d&oacute;nde VA (dimensiones de estado, condiciones de transici&oacute;n, membranas) en grafos ac&iacute;clicos dirigidos consultables por m&aacute;quina. Proyectos de software, sistemas operacionales personales, contextos de agente &mdash; los mismos tres ficheros, el mismo protocolo. UI web de primer nivel (12 p&aacute;ginas), servidor MCP (19 herramientas), compartici&oacute;n de sesiones en vivo. Un protocolo para desarrolladores, agentes, CI e individuos."
data-key="ontoref-hero-desc"
>
&mdash; encode what your codebase IS (invariants, tensions,
constraints) and what it DOES (operational modes, actor flows,
config seals) in machine-queryable directed acyclic graphs.
First-class web UI (12 pages), MCP server (19 tools), and live
session sharing for AI agents. One protocol for developers, agents,
and CI.
&mdash; encode what a system IS (invariants, tensions, constraints)
and where it IS GOING (state dimensions, transition conditions,
membranes) in machine-queryable directed acyclic graphs. Software
projects, personal operational systems, agent contexts &mdash; same
three files, same protocol. First-class web UI (12 pages), MCP
server (19 tools), live session sharing. One protocol for
developers, agents, CI, and individuals.
</span>
<br />
<span>
<strong
data-en="Protocol + Runtime. Zero enforcement."
data-es="Protocolo + Runtime. Sin coacci&oacute;n."
data-key="ontoref-hero-coda"
>Protocol + Runtime. Zero enforcement.</strong
>
</span>
@ -1366,9 +1375,10 @@
<section class="section">
<h2 class="section-title">
<span
data-en="The 6 Problems It Solves"
data-es="Los 6 Problemas que Resuelve"
>The 6 Problems It Solves</span
data-en="The 7 Problems It Solves"
data-es="Los 7 Problemas que Resuelve"
data-key="ontoref-problems-title"
>The 7 Problems It Solves</span
>
</h2>
<div class="problems-grid">
@ -1377,12 +1387,14 @@
<h3
data-en="Decisions Without Memory"
data-es="Decisiones Sin Memoria"
data-key="ontoref-problem-1-title"
>
Decisions Without Memory
</h3>
<ul
data-en="<li>Architectural choices made in chat, forgotten after rotation</li><li>No machine-queryable source of why something exists</li><li>ADRs as typed Nickel: invariants, constraints, supersession chain</li><li>Hard constraints enforced at every operation</li>"
data-es="<li>Decisiones arquitectónicas en chat, olvidadas tras rotación</li><li>Sin fuente consultable por máquina de por qué algo existe</li><li>ADRs como Nickel tipado: invariantes, constraints, cadena de supersedencia</li><li>Constraints Hard aplicadas en cada operación</li>"
data-key="ontoref-problem-1-desc"
>
<li>
Architectural choices made in chat, forgotten after rotation
@ -1401,12 +1413,14 @@
<h3
data-en="Invisible Configuration Drift"
data-es="Drift de Configuraci&oacute;n Invisible"
data-key="ontoref-problem-2-title"
>
Invisible Configuration Drift
</h3>
<ul
data-en="<li>Configs change outside any review cycle</li><li>No audit trail linking change to PR or ADR</li><li>Rollback requires manual file archaeology</li><li>Sealed profiles: sha256 hash, full history, verified rollback</li>"
data-es="<li>Configs cambian fuera de cualquier ciclo de revisión</li><li>Sin trazabilidad que vincule cambio a PR o ADR</li><li>Rollback requiere arqueología manual de ficheros</li><li>Perfiles sellados: hash sha256, historia completa, rollback verificado</li>"
data-key="ontoref-problem-2-desc"
>
<li>Configs change outside any review cycle</li>
<li>No audit trail linking change to PR or ADR</li>
@ -1419,12 +1433,13 @@
<div class="problem-card">
<div class="problem-number">03</div>
<h3 data-en="Agents Without Context" data-es="Agentes Sin Contexto">
<h3 data-en="Agents Without Context" data-es="Agentes Sin Contexto" data-key="ontoref-problem-3-title">
Agents Without Context
</h3>
<ul
data-en="<li>LLMs start each session with zero project knowledge</li><li>Same mistakes, same questions, no accumulation across operations</li><li>Actor registry tracks each session token, type, current mode, last seen — persisted to disk</li><li>MCP tools give agents direct DAG read/write: nodes, ADRs, backlog, Q&amp;A</li><li>Composed tasks shared via daemon — multiple actors see the same operational context live</li>"
data-es="<li>Los LLMs empiezan cada sesión con cero conocimiento del proyecto</li><li>Mismos errores, mismas preguntas, sin acumulación entre operaciones</li><li>El registro de actores rastrea cada token de sesión, tipo, modo actual, último visto — persistido en disco</li><li>Las herramientas MCP dan a los agentes acceso DAG de lectura/escritura directo: nodos, ADRs, backlog, Q&amp;A</li><li>Tareas compuestas compartidas via daemon — múltiples actores ven el mismo contexto operacional en vivo</li>"
data-key="ontoref-problem-3-desc"
>
<li>LLMs start each session with zero project knowledge</li>
<li>
@ -1450,24 +1465,29 @@
<h3
data-en="Scattered Project Knowledge"
data-es="Conocimiento de Proyecto Disperso"
data-key="ontoref-problem-4-title"
>
Scattered Project Knowledge
</h3>
<ul
data-en="<li>Guidelines in wikis, patterns in docs, decisions in Slack</li><li>No single source queryable by humans, agents, and CI equally</li><li>.ontology/ as DAG: nodes, edges, invariants, tensions, gates</li><li>Same graph serves developer context, agent initialization, CI validation</li>"
data-es="<li>Gu&iacute;as en wikis, patrones en docs, decisiones en Slack</li><li>Sin fuente única consultable por humanos, agentes y CI por igual</li><li>.ontology/ como DAG: nodos, aristas, invariantes, tensiones, gates</li><li>El mismo grafo sirve contexto de desarrollador, inicialización de agente, validación de CI</li>"
data-en="<li>Guidelines in wikis, patterns in docs, decisions in Slack</li><li>No single source queryable by humans, agents, and CI equally</li><li><code>.ontology/</code> separates three orthogonal concerns: <code>core.ncl</code> (what IS) · <code>state.ncl</code> (where we ARE vs want to BE) · <code>gate.ncl</code> (when READY to cross a boundary)</li><li><code>reflection/</code> reads all three and answers self-knowledge queries — an agent understands the project without reading code, only by consulting the declarative graph</li>"
data-es="<li>Gu&iacute;as en wikis, patrones en docs, decisiones en Slack</li><li>Sin fuente única consultable por humanos, agentes y CI por igual</li><li><code>.ontology/</code> separa tres concerns ortogonales: <code>core.ncl</code> (lo que ES) · <code>state.ncl</code> (d&oacute;nde ESTAMOS vs queremos estar) · <code>gate.ncl</code> (cu&aacute;ndo LISTO para cruzar una frontera)</li><li><code>reflection/</code> lee los tres y responde consultas de autoconocimiento — un agente entiende el proyecto sin leer c&oacute;digo, solo consultando el grafo declarativo</li>"
data-key="ontoref-problem-4-desc"
>
<li>Guidelines in wikis, patterns in docs, decisions in Slack</li>
<li>
No single source queryable by humans, agents, and CI equally
</li>
<li>
<code>.ontology/</code> as DAG: nodes, edges, invariants,
tensions, gates
<code>.ontology/</code> separates three orthogonal concerns:
<code>core.ncl</code> (what IS) &middot;
<code>state.ncl</code> (where we ARE vs want to BE) &middot;
<code>gate.ncl</code> (when READY to cross a boundary)
</li>
<li>
Same graph serves developer context, agent initialization, CI
validation
<code>reflection/</code> reads all three and answers
self-knowledge queries &mdash; an agent understands the project
without reading code, only by consulting the declarative graph
</li>
</ul>
</div>
@ -1477,12 +1497,14 @@
<h3
data-en="Protocol Fragmentation"
data-es="Fragmentaci&oacute;n de Protocolo"
data-key="ontoref-problem-5-title"
>
Protocol Fragmentation
</h3>
<ul
data-en="<li>Each project re-invents its own conventions</li><li>No shared contract for how operations are defined and executed</li><li>Reflection modes: typed DAG contracts for any workflow</li><li>One protocol adopted per-project, without enforcing uniformity</li>"
data-es="<li>Cada proyecto reinventa sus propias convenciones</li><li>Sin contrato compartido para c&oacute;mo se definen y ejecutan las operaciones</li><li>Modos de reflexi&oacute;n: contratos DAG tipados para cualquier flujo</li><li>Un protocolo adoptado por proyecto, sin imponer uniformidad</li>"
data-key="ontoref-problem-5-desc"
>
<li>Each project re-invents its own conventions</li>
<li>
@ -1500,12 +1522,14 @@
<h3
data-en="Knowledge Lost Between Sessions"
data-es="Conocimiento Perdido Entre Sesiones"
data-key="ontoref-problem-6-title"
>
Knowledge Lost Between Sessions
</h3>
<ul
data-en="<li>Q&amp;A answered in one session forgotten by the next</li><li>Agent re-asks questions already answered in previous sessions</li><li>Q&amp;A Knowledge Store: typed NCL, git-versioned, persists across browser resets</li><li>Notification barrier surfaces drift to agents proactively — pre_commit, drift, ontology_drift signals block until acknowledged</li>"
data-es="<li>Q&amp;A respondido en una sesión olvidado en la siguiente</li><li>El agente repite preguntas ya respondidas en sesiones anteriores</li><li>Q&amp;A Knowledge Store: NCL tipado, versionado en git, persiste a través de resets del navegador</li><li>La barrera de notificaciones transmite drift a los agentes de forma proactiva — señales pre_commit, drift, ontology_drift bloquean hasta ser reconocidas</li>"
data-key="ontoref-problem-6-desc"
>
<li>Q&amp;A answered in one session forgotten by the next</li>
<li>
@ -1522,6 +1546,41 @@
</li>
</ul>
</div>
<div class="problem-card">
<div class="problem-number">07</div>
<h3
data-en="Decisions Without a Map"
data-es="Decisiones Sin Mapa"
data-key="ontoref-problem-7-title"
>
Decisions Without a Map
</h3>
<ul
data-en="<li>Personal and professional decisions made against implicit, unverifiable assumptions</li><li>No queryable model of what you never compromise</li><li>No structured way to ask: does this opportunity violate who I am?</li><li>ontoref as personal operational ontology — same core/state/gate files applied to life, career, and ecosystem dimensions</li><li><code>jpl validate &quot;accept offer&quot;</code> → invariants_at_risk, relevant edges, verdict</li>"
data-es="<li>Decisiones personales y profesionales tomadas contra supuestos implícitos e inverificables</li><li>Sin modelo consultable de lo que nunca comprometes</li><li>Sin forma estructurada de preguntar: ¿viola esta oportunidad quién soy?</li><li>ontoref como ontología operacional personal — los mismos ficheros core/state/gate aplicados a dimensiones de vida, carrera y ecosistema</li><li><code>jpl validate &quot;aceptar oferta&quot;</code> → invariants_at_risk, aristas relevantes, veredicto</li>"
data-key="ontoref-problem-7-desc"
>
<li>
Personal and professional decisions made against implicit,
unverifiable assumptions
</li>
<li>No queryable model of what you never compromise</li>
<li>
No structured way to ask: does this opportunity violate who I
am?
</li>
<li>
ontoref as personal operational ontology — same
<code>core/state/gate</code> files applied to life, career, and
ecosystem dimensions
</li>
<li>
<code>jpl validate &quot;accept offer&quot;</code>
invariants_at_risk, relevant edges, verdict
</li>
</ul>
</div>
</div>
</section>
@ -1531,6 +1590,7 @@
<span
data-en="Ontology &amp; Reflection — Yin and Yang"
data-es="Ontolog&iacute;a y Reflexi&oacute;n — Yin y Yang"
data-key="ontoref-duality-title"
>Ontology &amp; Reflection — Yin and Yang</span
>
</h2>
@ -1540,6 +1600,7 @@
<h3
data-en="Yin — The Ontology Layer"
data-es="Yin — La Capa de Ontolog&iacute;a"
data-key="ontoref-yin-title"
>
Yin — The Ontology Layer
</h3>
@ -1547,12 +1608,14 @@
class="sub"
data-en="What must be true"
data-es="Lo que debe ser verdad"
data-key="ontoref-yin-sub"
>
What must be true
</p>
<ul
data-en="<li><strong>Invariants</strong> — axioms that cannot change without a new ADR</li><li><strong>Tensions</strong> — structural conflicts the project navigates, never resolves</li><li><strong>Practices</strong> — confirmed patterns with artifact paths to real files</li><li><strong>Gates</strong> — membranes controlling readiness thresholds</li><li><strong>Dimensions</strong> — current vs desired state, with transition conditions</li><li><strong>Q&amp;A Knowledge Store</strong> — accumulated Q&amp;A persisted to NCL, git-versioned, queryable by any actor</li>"
data-es="<li><strong>Invariantes</strong> — axiomas que no pueden cambiar sin un nuevo ADR</li><li><strong>Tensiones</strong> — conflictos estructurales que el proyecto navega, nunca resuelve</li><li><strong>Prácticas</strong> — patrones confirmados con rutas a archivos reales</li><li><strong>Gates</strong> — membranas que controlan umbrales de preparación</li><li><strong>Dimensiones</strong> — estado actual vs deseado, con condiciones de transición</li><li><strong>Q&amp;A Knowledge Store</strong> — Q&amp;A acumulado persistido en NCL, versionado en git, consultable por cualquier actor</li>"
data-en="<li><strong>Invariants</strong> — axioms that cannot change without a new ADR</li><li><strong>Tensions</strong> — structural conflicts the project navigates, never resolves</li><li><strong>Practices</strong> — confirmed patterns with artifact paths to real files and declared ADR validators</li><li><strong>Gates</strong> — membranes controlling readiness thresholds</li><li><strong>Dimensions</strong> — current vs desired state, with transition conditions</li><li><strong>Q&amp;A Knowledge Store</strong> — accumulated Q&amp;A persisted to NCL, git-versioned, queryable by any actor</li>"
data-es="<li><strong>Invariantes</strong> — axiomas que no pueden cambiar sin un nuevo ADR</li><li><strong>Tensiones</strong> — conflictos estructurales que el proyecto navega, nunca resuelve</li><li><strong>Prácticas</strong> — patrones confirmados con rutas a archivos reales y validadores ADR declarados</li><li><strong>Gates</strong> — membranas que controlan umbrales de preparación</li><li><strong>Dimensiones</strong> — estado actual vs deseado, con condiciones de transición</li><li><strong>Q&amp;A Knowledge Store</strong> — Q&amp;A acumulado persistido en NCL, versionado en git, consultable por cualquier actor</li>"
data-key="ontoref-yin-desc"
>
<li>
<strong>Invariants</strong> — axioms that cannot change without
@ -1564,7 +1627,7 @@
</li>
<li>
<strong>Practices</strong> — confirmed patterns with artifact
paths to real files
paths to real files and declared ADR validators
</li>
<li>
<strong>Gates</strong> — membranes controlling readiness
@ -1584,6 +1647,7 @@
<h3
data-en="Yang — The Reflection Layer"
data-es="Yang — La Capa de Reflexi&oacute;n"
data-key="ontoref-yang-title"
>
Yang — The Reflection Layer
</h3>
@ -1591,12 +1655,14 @@
class="sub"
data-en="How things move and change"
data-es="C&oacute;mo las cosas se mueven y cambian"
data-key="ontoref-yang-sub"
>
How things move and change
</p>
<ul
data-en="<li><strong>Modes</strong> — typed DAG workflow contracts (preconditions, steps, postconditions)</li><li><strong>Forms</strong> — parameter collection driving modes</li><li><strong>ADR lifecycle</strong> — Proposed → Accepted → Superseded, with constraint history</li><li><strong>Actors</strong> — developer / agent / CI, same protocol, different capabilities</li><li><strong>Config seals</strong> — sha256-sealed profiles, drift detection, rollback</li><li><strong>Quick Actions</strong> — runnable shortcuts over modes; configured in <code>.ontoref/config.ncl</code></li><li><strong>Passive Drift Observer</strong> — watches code changes, emits <code>ontology_drift</code> notifications with missing/stale/drift/broken counts</li>"
data-es="<li><strong>Modos</strong> — contratos DAG tipados de flujo (precondiciones, pasos, postcondiciones)</li><li><strong>Formularios</strong> — recolección de parámetros que conducen modos</li><li><strong>Ciclo de vida ADR</strong> — Proposed → Accepted → Superseded, con historial de constraints</li><li><strong>Actores</strong> — developer / agent / CI, mismo protocolo, distintas capacidades</li><li><strong>Config seals</strong> — perfiles sellados con sha256, drift detection, rollback</li><li><strong>Quick Actions</strong> — atajos ejecutables sobre modos; configurados en <code>.ontoref/config.ncl</code></li><li><strong>Observador de Drift Pasivo</strong> — observa cambios de código, emite notificaciones <code>ontology_drift</code> con conteos de missing/stale/drift/broken</li>"
data-key="ontoref-yang-desc"
>
<li>
<strong>Modes</strong> — typed DAG workflow contracts
@ -1634,12 +1700,14 @@
<span
data-en="Ontology without Reflection = correct but static. Perfect invariants with no operations = dead documentation."
data-es="Ontolog&iacute;a sin Reflexi&oacute;n = correcta pero est&aacute;tica. Invariantes perfectos sin operaciones = documentaci&oacute;n muerta."
data-key="ontoref-tension-1"
>Ontology without Reflection = correct but static. Perfect
invariants with no operations = dead documentation.</span
><br />
<span
data-en="Reflection without Ontology = fluid but unanchored. Workflows that forget what they protect."
data-es="Reflexi&oacute;n sin Ontolog&iacute;a = fluida pero sin ancla. Flujos que olvidan lo que protegen."
data-key="ontoref-tension-2"
>Reflection without Ontology = fluid but unanchored. Workflows that
forget what they protect.</span
>
@ -1647,6 +1715,7 @@
class="tension-thesis"
data-en="The protocol lives in coexistence."
data-es="El protocolo vive en la coexistencia."
data-key="ontoref-tension-thesis"
>
The protocol lives in coexistence.
</p>
@ -1659,6 +1728,7 @@
class="layer-label"
data-en="DECLARATIVE LAYER · Nickel"
data-es="CAPA DECLARATIVA · Nickel"
data-key="ontoref-layer-decl-label"
>
DECLARATIVE LAYER · Nickel
</div>
@ -1670,6 +1740,7 @@
class="layer-desc"
data-en="Strong types, contracts, enums. Fails at definition time, not at runtime."
data-es="Tipos fuertes, contratos, enums. Falla en definici&oacute;n, no en runtime."
data-key="ontoref-layer-decl-desc"
>
Strong types, contracts, enums. Fails at definition time, not at
runtime.
@ -1680,6 +1751,7 @@
class="layer-label"
data-en="OPERATIONAL LAYER · Nushell"
data-es="CAPA OPERACIONAL · Nushell"
data-key="ontoref-layer-op-label"
>
OPERATIONAL LAYER · Nushell
</div>
@ -1691,6 +1763,7 @@
class="layer-desc"
data-en="Typed pipelines over structured data. No text streams."
data-es="Pipelines tipadas sobre datos estructurados. No streams de texto."
data-key="ontoref-layer-op-desc"
>
Typed pipelines over structured data. No text streams.
</div>
@ -1700,6 +1773,7 @@
class="layer-label"
data-en="ENTRY POINT · Bash → Nu"
data-es="PUNTO DE ENTRADA · Bash → Nu"
data-key="ontoref-layer-entry-label"
>
ENTRY POINT · Bash → Nu
</div>
@ -1711,6 +1785,7 @@
class="layer-desc"
data-en="Single entry point per project. Detects actor (developer/agent/CI), acquires lock, dispatches to correct Nu module."
data-es="Un &uacute;nico entry point por proyecto. Detecta actor (developer/agent/CI), adquiere lock, despacha al m&oacute;dulo Nu correcto."
data-key="ontoref-layer-entry-desc"
>
Single entry point per project. Detects actor
(developer/agent/CI), acquires lock, dispatches to correct Nu
@ -1722,6 +1797,7 @@
class="layer-label"
data-en="KNOWLEDGE GRAPH · .ontology/"
data-es="GRAFO DE CONOCIMIENTO · .ontology/"
data-key="ontoref-layer-graph-label"
>
KNOWLEDGE GRAPH · .ontology/
</div>
@ -1732,6 +1808,7 @@
class="layer-desc"
data-en="The project knows what it knows. Actor-agnostic. Machine-queryable via nickel export."
data-es="El proyecto sabe qu&eacute; sabe. Actor-agnostic. Consultable por m&aacute;quina v&iacute;a nickel export."
data-key="ontoref-layer-graph-desc"
>
The project knows what it knows. Actor-agnostic. Machine-queryable
via <code>nickel export</code>.
@ -1742,6 +1819,7 @@
class="layer-label"
data-en="RUNTIME LAYER · Rust + axum"
data-es="CAPA RUNTIME · Rust + axum"
data-key="ontoref-layer-runtime-label"
>
RUNTIME LAYER · Rust + axum
</div>
@ -1755,6 +1833,7 @@
class="layer-desc"
data-en="Optional persistent daemon. NCL export cache, HTTP UI (12 pages), MCP server (19 tools), actor registry, notification store, search engine, SurrealDB persistence. Never a protocol requirement."
data-es="Daemon persistente opcional. Cach&eacute; de exports NCL, UI HTTP (12 p&aacute;ginas), servidor MCP (19 herramientas), registro de actores, almac&eacute;n de notificaciones, motor de b&uacute;squeda, persistencia SurrealDB. Nunca un requisito del protocolo."
data-key="ontoref-layer-runtime-desc"
>
Optional persistent daemon. NCL export cache, HTTP UI (12 pages),
MCP server (19 tools), actor registry, notification store, search
@ -1766,6 +1845,7 @@
class="layer-label"
data-en="ADOPTION LAYER · Per-project"
data-es="CAPA DE ADOPCI&Oacute;N · Por proyecto"
data-key="ontoref-layer-adopt-label"
>
ADOPTION LAYER · Per-project
</div>
@ -1777,6 +1857,7 @@
class="layer-desc"
data-en="Each project maintains its own .ontology/ data. Ontoref provides the schemas, modules, and migration scripts. Zero lock-in."
data-es="Cada proyecto mantiene sus propios datos de .ontology/. Ontoref provee los schemas, m&oacute;dulos y scripts de migraci&oacute;n. Cero vendor lock-in."
data-key="ontoref-layer-adopt-desc"
>
Each project maintains its own <code>.ontology/</code> data.
Ontoref provides the schemas, modules, and migration scripts. Zero
@ -1789,7 +1870,7 @@
<!-- ── CRATES & TOOLING ── -->
<section class="section">
<h2 class="section-title">
<span data-en="Crates &amp; Tooling" data-es="Crates y Herramientas"
<span data-en="Crates &amp; Tooling" data-es="Crates y Herramientas" data-key="ontoref-crates-title"
>Crates &amp; Tooling</span
>
</h2>
@ -1805,7 +1886,7 @@
Load and query <code>.ontology/</code> NCL files as typed Rust
structs
</li>
<li>Node, Edge, Dimension, Gate, Membrane types</li>
<li>Node, Edge, Dimension, Gate, Membrane types<code>Node</code> carries <code>artifact_paths</code> and <code>adrs</code>, both <code>serde(default)</code></li>
<li>Graph traversal: callers, callees, impact queries</li>
<li>Invariant extraction and constraint validation</li>
<li>
@ -1881,10 +1962,15 @@
</h3>
<ul class="feature-text">
<li>
HTTP UI (axum + Tera): <strong>12 pages</strong> — dashboard, D3
HTTP UI (axum + Tera): <strong>12 pages</strong> — dashboard,
graph, search, sessions, notifications, backlog, Q&amp;A,
actions, modes, compose, manage/login, manage/logout
</li>
<li>
Graph node detail panel: artifacts, connections, and
<strong>ADR validators</strong> — each ADR is a clickable link
that opens the full record via <code>GET /api/adr/{id}</code>
</li>
<li>
Actor registry (DashMap): token, type (developer / agent / CI),
registered_at, last_seen, current_mode — serializable snapshot
@ -1954,6 +2040,7 @@
class="adopt-title"
data-en="Adopt in Any Project"
data-es="Adoptar en Cualquier Proyecto"
data-key="ontoref-adoption-title"
>
Adopt in Any Project
</h3>
@ -1961,6 +2048,7 @@
class="adopt-subtitle"
data-en="ontoref setup wires up any new or existing project — idempotent scaffold with optional auth key bootstrap."
data-es="ontoref setup conecta cualquier proyecto nuevo o existente — scaffold idempotente con bootstrap de auth keys opcional."
data-key="ontoref-adoption-subtitle"
>
<code>ontoref setup</code> wires up any new or existing project —
idempotent scaffold with optional auth key bootstrap.
@ -2025,6 +2113,7 @@
<span
data-en="Daemon &amp; MCP — Runtime Intelligence Layer"
data-es="Daemon &amp; MCP — Capa de Inteligencia en Tiempo de Ejecuci&oacute;n"
data-key="ontoref-mcp-title"
>Daemon &amp; MCP — Runtime Intelligence Layer</span
>
</h2>
@ -2037,6 +2126,7 @@
"
data-en="ontoref-daemon is an optional persistent process. It caches NCL exports, serves 12 UI pages, exposes 19 MCP tools, maintains an actor registry, stores notifications, indexes everything for search, and optionally persists to SurrealDB. Auth is opt-in: all surfaces (CLI, UI, MCP) exchange a project key for a UUID v4 session token via <code>POST /sessions</code>; CLI injects <code>ONTOREF_TOKEN</code> as Bearer automatically. It never changes the protocol — it accelerates and shares access to it. Configured via <code>~/.config/ontoref/config.ncl</code> (Nickel, type-checked); edit interactively with <code>ontoref config-edit</code>. Started via NCL pipe bootstrap: <code>ontoref-daemon-boot</code>."
data-es="ontoref-daemon es un proceso persistente opcional. Cachea exports NCL, sirve 12 páginas de UI, expone 19 herramientas MCP, mantiene un registro de actores, almacena notificaciones, indexa todo para búsqueda y opcionalmente persiste en SurrealDB. Auth es opt-in: todas las superficies (CLI, UI, MCP) intercambian una project key por un token de sesión UUID v4 via <code>POST /sessions</code>; la CLI inyecta <code>ONTOREF_TOKEN</code> como Bearer automáticamente. Nunca cambia el protocolo — acelera y comparte el acceso a él. Configurado via <code>~/.config/ontoref/config.ncl</code> (Nickel, type-checked); edición interactiva con <code>ontoref config-edit</code>. Iniciado via NCL pipe bootstrap: <code>ontoref-daemon-boot</code>."
data-key="ontoref-mcp-core-desc"
>
<code>ontoref-daemon</code> is an optional persistent process. It
caches NCL exports, serves 12 UI pages, exposes 19 MCP tools,
@ -2058,6 +2148,7 @@
class="daemon-col-title"
data-en="The Web UI — 12 Pages"
data-es="La UI Web — 12 P&aacute;ginas"
data-key="ontoref-ui-dashboard-title"
>
The Web UI — 12 Pages
</div>
@ -2093,9 +2184,10 @@
<span class="window-page-route">/graph</span>
<span class="window-page-name">Graph</span>
<span class="window-page-desc"
>D3 force-directed ontology graph — nodes colored by pole
(Yang=orange, Yin=blue, Spiral=purple), clickable with
detail panel, edge labels</span
>Cytoscape.js ontology graph — nodes colored by pole
(Yang=orange, Yin=blue, Spiral=purple), clickable detail
panel with artifacts, connections, and ADR links that open
the full record in a modal</span
>
</div>
<div class="window-page-row">
@ -2174,6 +2266,7 @@
class="daemon-col-title"
data-en="The MCP Server — 19 Tools"
data-es="El Servidor MCP — 19 Herramientas"
data-key="ontoref-mcp-query-title"
>
The MCP Server — 19 Tools
</div>
@ -2181,8 +2274,8 @@
<table class="mcp-table">
<thead>
<tr>
<th data-en="Tool" data-es="Herramienta">Tool</th>
<th data-en="Description" data-es="Descripci&oacute;n">
<th data-en="Tool" data-es="Herramienta" data-key="ontoref-mcp-table-tool-header">Tool</th>
<th data-en="Description" data-es="Descripci&oacute;n" data-key="ontoref-mcp-table-desc-header">
Description
</th>
</tr>
@ -2193,6 +2286,7 @@
<td
data-en="List available tools and usage"
data-es="Lista herramientas disponibles y uso"
data-key="ontoref-mcp-tool-help-desc"
>
List available tools and usage
</td>
@ -2202,6 +2296,7 @@
<td
data-en="Enumerate all registered projects"
data-es="Enumerar todos los proyectos registrados"
data-key="ontoref-mcp-tool-list-projects-desc"
>
Enumerate all registered projects
</td>
@ -2211,6 +2306,7 @@
<td
data-en="Set session default project context"
data-es="Establecer contexto de proyecto por defecto"
data-key="ontoref-mcp-tool-set-project-desc"
>
Set session default project context
</td>
@ -2220,6 +2316,7 @@
<td
data-en="Full project dashboard — health, drift, actors"
data-es="Dashboard completo del proyecto — salud, drift, actores"
data-key="ontoref-mcp-tool-project-status-desc"
>
Full project dashboard — health, drift, actors
</td>
@ -2229,6 +2326,7 @@
<td
data-en="Architecture overview and self-description"
data-es="Resumen de arquitectura y auto-descripci&oacute;n"
data-key="ontoref-mcp-tool-describe-desc"
>
Architecture overview and self-description
</td>
@ -2238,6 +2336,7 @@
<td
data-en="Free-text search across nodes, ADRs, modes"
data-es="B&uacute;squeda de texto libre en nodos, ADRs, modos"
data-key="ontoref-mcp-tool-search-desc"
>
Free-text search across nodes, ADRs, modes
</td>
@ -2247,6 +2346,7 @@
<td
data-en="Fetch ontology node by id"
data-es="Obtener nodo de ontolog&iacute;a por id"
data-key="ontoref-mcp-tool-get-desc"
>
Fetch ontology node by id
</td>
@ -2256,6 +2356,7 @@
<td
data-en="Full ontology node with edges and constraints"
data-es="Nodo completo con aristas y constraints"
data-key="ontoref-mcp-tool-get-node-desc"
>
Full ontology node with edges and constraints
</td>
@ -2265,6 +2366,7 @@
<td
data-en="List ADRs filtered by status"
data-es="Listar ADRs filtrados por estado"
data-key="ontoref-mcp-tool-list-adrs-desc"
>
List ADRs filtered by status
</td>
@ -2274,6 +2376,7 @@
<td
data-en="Full ADR content with constraints"
data-es="Contenido completo de ADR con constraints"
data-key="ontoref-mcp-tool-get-adr-desc"
>
Full ADR content with constraints
</td>
@ -2283,6 +2386,7 @@
<td
data-en="List all reflection modes"
data-es="Listar todos los modos de reflexi&oacute;n"
data-key="ontoref-mcp-tool-list-modes-desc"
>
List all reflection modes
</td>
@ -2292,6 +2396,7 @@
<td
data-en="Mode DAG contract — steps, preconditions, postconditions"
data-es="Contrato DAG del modo — pasos, pre/postcondiciones"
data-key="ontoref-mcp-tool-get-mode-desc"
>
Mode DAG contract — steps, preconditions, postconditions
</td>
@ -2301,6 +2406,7 @@
<td
data-en="Backlog items filtered by status"
data-es="Elementos de backlog filtrados por estado"
data-key="ontoref-mcp-tool-get-backlog-desc"
>
Backlog items filtered by status
</td>
@ -2310,6 +2416,7 @@
<td
data-en="Add or update_status on a backlog item"
data-es="A&ntilde;adir o actualizar estado de elemento del backlog"
data-key="ontoref-mcp-tool-backlog-desc"
>
Add or update_status on a backlog item
</td>
@ -2319,6 +2426,7 @@
<td
data-en="All hard + soft architectural constraints"
data-es="Todos los constraints arquitect&oacute;nicos hard + soft"
data-key="ontoref-mcp-tool-constraints-desc"
>
All hard + soft architectural constraints
</td>
@ -2328,6 +2436,7 @@
<td
data-en="List Q&amp;A knowledge store with optional filter"
data-es="Listar almac&eacute;n Q&amp;A con filtro opcional"
data-key="ontoref-mcp-tool-qa-list-desc"
>
List Q&amp;A knowledge store with optional filter
</td>
@ -2337,6 +2446,7 @@
<td
data-en="Persist new Q&amp;A entry to reflection/qa.ncl"
data-es="Persistir nueva entrada Q&amp;A en reflection/qa.ncl"
data-key="ontoref-mcp-tool-qa-add-desc"
>
Persist new Q&amp;A entry to reflection/qa.ncl
</td>
@ -2346,6 +2456,7 @@
<td
data-en="Quick actions catalog from .ontoref/config.ncl"
data-es="Cat&aacute;logo de acciones r&aacute;pidas de .ontoref/config.ncl"
data-key="ontoref-mcp-tool-action-list-desc"
>
Quick actions catalog from .ontoref/config.ncl
</td>
@ -2355,6 +2466,7 @@
<td
data-en="Create reflection mode + register as quick action"
data-es="Crear modo de reflexi&oacute;n + registrar como acci&oacute;n r&aacute;pida"
data-key="ontoref-mcp-tool-action-add-desc"
>
Create reflection mode + register as quick action
</td>
@ -2371,12 +2483,14 @@
<h4
data-en="SurrealDB Persistence — Optional"
data-es="Persistencia SurrealDB — Opcional"
data-key="ontoref-mcp-knowledge-title"
>
SurrealDB Persistence — Optional
</h4>
<ul
data-en="<li>Enabled with <code>--db</code> feature flag and <code>--db-url ws://...</code></li><li>Connects via WebSocket at startup — 5s timeout, <strong>fail-open</strong> (daemon runs without it)</li><li>Seeds ontology tables from local NCL files on startup and on file changes</li><li>Persists: actor sessions, seeded ontology tables, search index, notification history</li><li>Without <code>--db</code>: DashMap-backed in-memory, process-lifetime only</li><li>Namespace configurable via <code>--db-namespace</code>; credentials via <code>--db-username/--db-password</code></li>"
data-es="<li>Habilitado con flag de feature <code>--db</code> y <code>--db-url ws://...</code></li><li>Conecta v&iacute;a WebSocket al inicio — 5s timeout, <strong>fail-open</strong> (el daemon funciona sin &eacute;l)</li><li>Siembra tablas de ontolog&iacute;a desde archivos NCL locales al inicio y en cambios de fichero</li><li>Persiste: sesiones de actores, tablas de ontolog&iacute;a sembradas, &iacute;ndice de b&uacute;squeda, historial de notificaciones</li><li>Sin <code>--db</code>: respaldado por DashMap en memoria, solo durante el proceso</li><li>Namespace configurable v&iacute;a <code>--db-namespace</code>; credenciales v&iacute;a <code>--db-username/--db-password</code></li>"
data-key="ontoref-mcp-knowledge-desc"
>
<li>
Enabled with <code>--db</code> feature flag and
@ -2409,12 +2523,14 @@
<h4
data-en="Notification Barrier"
data-es="Barrera de Notificaciones"
data-key="ontoref-mcp-backlog-title"
>
Notification Barrier
</h4>
<ul
data-en="<li><strong>pre_commit</strong> — pre-commit hook POLLs <code>GET /notifications/pending?token=X&amp;project=Y</code>; blocks git commit until all acked</li><li><strong>drift</strong> — schema drift detected between codebase and ontology</li><li><strong>ontology_drift</strong> — emitted by passive observer with missing/stale/drift/broken counts after 15s debounce</li><li>Fail-open: if daemon is unreachable, pre-commit hook passes — commits are never blocked by daemon downtime</li><li>Ack via UI or <code>POST /notifications/ack</code>; custom notifications via <code>POST /{slug}/notifications/emit</code></li><li>Action buttons in notifications can link to any dashboard page</li>"
data-es="<li><strong>pre_commit</strong> — el hook pre-commit hace POLL en <code>GET /notifications/pending?token=X&amp;project=Y</code>; bloquea el commit git hasta que todo es reconocido</li><li><strong>drift</strong> — drift de schema detectado entre codebase y ontolog&iacute;a</li><li><strong>ontology_drift</strong> — emitido por el observador pasivo con conteos missing/stale/drift/broken tras 15s debounce</li><li>Fail-open: si el daemon no est&aacute; disponible, el hook pre-commit pasa — los commits nunca son bloqueados por ca&iacute;da del daemon</li><li>Ack v&iacute;a UI o <code>POST /notifications/ack</code>; notificaciones custom v&iacute;a <code>POST /{slug}/notifications/emit</code></li><li>Los botones de acci&oacute;n en notificaciones pueden enlazar a cualquier p&aacute;gina del dashboard</li>"
data-key="ontoref-mcp-backlog-desc"
>
<li>
<strong>pre_commit</strong> — pre-commit hook polls
@ -2524,6 +2640,7 @@
<span
data-en="The UI in Action &middot; Graph View"
data-es="La UI en Acci&oacute;n &middot; Vista de Grafo"
data-key="ontoref-graph-title"
>The UI in Action &middot; Graph View</span
>
</h2>
@ -2539,6 +2656,7 @@
<span
data-en="Force-directed graph of the live ontology. Nodes are typed (Axiom · Tension · Practice) and polarized (Yang · Yin · Spiral). Click any node to open its detail panel — artifacts, connections, NCL source."
data-es="Grafo dirigido por fuerzas de la ontología en vivo. Los nodos son tipados (Axioma · Tensión · Práctica) y polarizados (Yang · Yin · Espiral). Haz clic en cualquier nodo para abrir su panel de detalles."
data-key="ontoref-graph-desc"
>Force-directed graph of the live ontology. Nodes are typed (Axiom ·
Tension · Practice) and polarized (Yang · Yin · Spiral). Click any
node to open its detail panel — artifacts, connections, NCL
@ -2595,7 +2713,7 @@
<!-- ── TECH STACK ── -->
<section class="section">
<h2 class="section-title">
<span data-en="Technology Stack" data-es="Stack Tecnol&oacute;gico"
<span data-en="Technology Stack" data-es="Stack Tecnol&oacute;gico" data-key="ontoref-tech-stack-title"
>Technology Stack</span
>
</h2>
@ -2625,6 +2743,7 @@
<span
data-en="Protocol Metrics"
data-es="M&eacute;tricas del Protocolo"
data-key="ontoref-metrics-title"
>Protocol Metrics</span
>
</h2>
@ -2679,6 +2798,7 @@
class="cta-title"
data-en="Structure That Remembers Why"
data-es="Estructura que Recuerda el Porqu&eacute;"
data-key="ontoref-cta-title"
>
Structure That Remembers Why
</h2>
@ -2686,6 +2806,7 @@
class="cta-subtitle"
data-en="Start with ontoref setup. Your project gains machine-queryable invariants, living ADRs, actor-aware operational modes, and a daemon that shares context across every actor in real time."
data-es="Empieza con ontoref setup. Tu proyecto gana invariantes consultables por m&aacute;quina, ADRs vivos, modos operacionales con actor-awareness y un daemon que comparte contexto entre todos los actores en tiempo real."
data-key="ontoref-cta-subtitle"
>
Start with <code>ontoref setup</code>. Your project gains
machine-queryable invariants, living ADRs, actor-aware operational
@ -2697,6 +2818,7 @@
class="cta-button"
data-en="Explore the Protocol"
data-es="Explorar el Protocolo"
data-key="ontoref-cta-explore"
>Explore the Protocol</a
>
</div>
@ -2710,6 +2832,7 @@
<p
data-en="Protocol + Runtime. Zero enforcement. One graph per project."
data-es="Protocolo + Runtime. Sin coacci&oacute;n. Un grafo por proyecto."
data-key="ontoref-footer-tagline"
>
Protocol + Runtime. Zero enforcement. One graph per project.
</p>

25
card.ncl Normal file
View File

@ -0,0 +1,25 @@
let d = import "schemas/project-card.ncl" in
d.ProjectCard & {
id = "ontoref",
name = "Ontoref",
tagline = "Structure that remembers why.",
description = "Self-describing project ontology protocol. Projects implement it via typed NCL schemas — axioms, tensions, practices, state, gates. A queryable structure for validating architectural decisions and auditing coherence.",
version = "0.1.0",
status = 'Active,
source = 'Local,
url = "https://ontoref.jesusperez.pro",
started_at = "2025",
tags = ["nickel", "ontology", "governance", "protocol", "architecture"],
tools = ["Nickel", "Nushell"],
features = [
"Three-layer NCL pattern: schemas → defaults → config",
"Reflection modes: structured agent/developer workflows",
"DAG topology for architectural decisions",
"Gate membranes for controlled external signal entry",
"Protocol — never a runtime dependency",
],
featured = false,
sort_order = 4,
logo = "assets/logo.svg",
}

View File

@ -77,6 +77,7 @@ impl AuthRateLimiter {
/// Returns true if `s` has the format of a UUID v4 (36 chars, hyphens at
/// positions 8/13/18/23). Used to distinguish session tokens from raw passwords
/// in `check_primary_auth` without needing to attempt argon2 on token strings.
#[cfg(feature = "ui")]
fn is_uuid_v4(s: &str) -> bool {
if s.len() != 36 {
return false;
@ -306,6 +307,11 @@ pub fn router(state: AppState) -> axum::Router {
.route("/describe/capabilities", get(describe_capabilities))
.route("/describe/connections", get(describe_connections))
.route("/describe/actor-init", get(describe_actor_init))
// ADR read endpoint
.route("/adr/{id}", get(get_adr))
// Ontology extension endpoints
.route("/ontology", get(list_ontology_extensions))
.route("/ontology/{file}", get(get_ontology_extension))
// Backlog JSON endpoint
.route("/backlog-json", get(backlog_json))
// Q&A read endpoint
@ -336,7 +342,15 @@ pub fn router(state: AppState) -> axum::Router {
let app = app
.route("/qa/add", post(crate::ui::handlers::qa_add))
.route("/qa/delete", post(crate::ui::handlers::qa_delete))
.route("/qa/update", post(crate::ui::handlers::qa_update));
.route("/qa/update", post(crate::ui::handlers::qa_update))
.route(
"/search/bookmark/add",
post(crate::ui::handlers::search_bookmark_add),
)
.route(
"/search/bookmark/delete",
post(crate::ui::handlers::search_bookmark_delete),
);
let app = app.with_state(state.clone());
@ -1274,6 +1288,151 @@ async fn describe_actor_init(
}
}
// ── ADR read endpoint ────────────────────────────────────────────────────────
#[derive(Deserialize)]
struct AdrQuery {
slug: Option<String>,
}
async fn get_adr(
State(state): State<AppState>,
Path(id): Path<String>,
Query(q): Query<AdrQuery>,
) -> impl IntoResponse {
state.touch_activity();
let (root, cache, import_path) = resolve_project_ctx(&state, q.slug.as_deref());
let adrs_dir = root.join("adrs");
let entries = match std::fs::read_dir(&adrs_dir) {
Ok(e) => e,
Err(_) => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({ "error": "adrs directory not found" })),
);
}
};
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|e| e.to_str()) != Some("ncl") {
continue;
}
let stem = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if !stem.contains(id.as_str()) {
continue;
}
return match cache.export(&path, import_path.as_deref()).await {
Ok((v, _)) => (StatusCode::OK, Json(v)),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
),
};
}
(
StatusCode::NOT_FOUND,
Json(serde_json::json!({ "error": format!("ADR '{}' not found", id) })),
)
}
// ── Ontology extension endpoints ─────────────────────────────────────────────
const CORE_FILES: &[&str] = &["core.ncl", "state.ncl", "gate.ncl"];
#[derive(Deserialize)]
struct OntologyQuery {
slug: Option<String>,
}
async fn list_ontology_extensions(
State(state): State<AppState>,
Query(q): Query<OntologyQuery>,
) -> impl IntoResponse {
state.touch_activity();
let (root, _, _) = resolve_project_ctx(&state, q.slug.as_deref());
let ontology_dir = root.join(".ontology");
let entries = match std::fs::read_dir(&ontology_dir) {
Ok(e) => e,
Err(_) => {
return (
StatusCode::OK,
Json(serde_json::json!({ "extensions": [] })),
);
}
};
let mut extensions: Vec<serde_json::Value> = entries
.flatten()
.filter_map(|e| {
let path = e.path();
if path.extension().and_then(|x| x.to_str()) != Some("ncl") {
return None;
}
let name = path.file_name()?.to_str()?.to_string();
if CORE_FILES.contains(&name.as_str()) {
return None;
}
let stem = path.file_stem()?.to_str()?.to_string();
Some(serde_json::json!({ "file": name, "id": stem }))
})
.collect();
extensions.sort_by_key(|v| v["id"].as_str().unwrap_or("").to_string());
(
StatusCode::OK,
Json(serde_json::json!({ "extensions": extensions })),
)
}
async fn get_ontology_extension(
State(state): State<AppState>,
Path(file): Path<String>,
Query(q): Query<OntologyQuery>,
) -> impl IntoResponse {
state.touch_activity();
let (root, cache, import_path) = resolve_project_ctx(&state, q.slug.as_deref());
// Reject traversal attempts and core files — they have dedicated endpoints.
if file.contains('/') || file.contains("..") || CORE_FILES.contains(&file.as_str()) {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": "invalid file name" })),
);
}
let file = if file.ends_with(".ncl") {
file
} else {
format!("{file}.ncl")
};
let path = root.join(".ontology").join(&file);
if !path.exists() {
return (
StatusCode::NOT_FOUND,
Json(
serde_json::json!({ "error": format!("ontology extension '{}' not found", file) }),
),
);
}
match cache.export(&path, import_path.as_deref()).await {
Ok((v, _)) => (StatusCode::OK, Json(v)),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
),
}
}
async fn backlog_json(
State(state): State<AppState>,
Query(q): Query<DescribeQuery>,

View File

@ -108,6 +108,23 @@ fn apply_stdin_config(cli: &mut Cli) -> serde_json::Value {
json
}
/// Run `nickel export` on `config_path` with an optional `NICKEL_IMPORT_PATH`.
fn run_nickel_config(
config_path: &std::path::Path,
import_path: Option<&str>,
) -> Option<serde_json::Value> {
let mut cmd = Command::new("nickel");
cmd.arg("export").arg(config_path);
if let Some(ip) = import_path {
cmd.env("NICKEL_IMPORT_PATH", ip);
}
let output = cmd.output().ok()?;
if !output.status.success() {
return None;
}
serde_json::from_slice(&output.stdout).ok()
}
/// Load daemon config from .ontoref/config.ncl and override CLI defaults.
/// Returns (NICKEL_IMPORT_PATH, parsed config JSON) — both optional.
fn load_config_overrides(cli: &mut Cli) -> (Option<String>, Option<serde_json::Value>) {
@ -116,27 +133,23 @@ fn load_config_overrides(cli: &mut Cli) -> (Option<String>, Option<serde_json::V
return (None, None);
}
let output = match Command::new("nickel")
.arg("export")
.arg(&config_path)
.output()
{
Ok(o) => o,
Err(e) => {
warn!(error = %e, path = %config_path.display(), "failed to read config");
return (None, None);
}
};
// First attempt: no NICKEL_IMPORT_PATH (fast path, works for configs without
// imports). Second attempt: include project root and common sub-paths to
// resolve card/schema imports. Canonicalize here so the fallback paths are
// absolute even when project_root is ".".
let abs_root = cli
.project_root
.canonicalize()
.unwrap_or_else(|_| cli.project_root.clone());
let root = abs_root.display().to_string();
let fallback_ip = format!("{root}:{root}/ontology:{root}/.ontology:{root}/ontology/schemas");
let config_json = run_nickel_config(&config_path, None)
.or_else(|| run_nickel_config(&config_path, Some(&fallback_ip)));
if !output.status.success() {
warn!("nickel export failed for config");
return (None, None);
}
let config_json: serde_json::Value = match serde_json::from_slice(&output.stdout) {
Ok(v) => v,
Err(e) => {
warn!(error = %e, "failed to parse config JSON");
let config_json = match config_json {
Some(v) => v,
None => {
warn!("nickel export failed for config");
return (None, None);
}
};
@ -225,12 +238,23 @@ fn load_config_overrides(cli: &mut Cli) -> (Option<String>, Option<serde_json::V
info!("config loaded from {}", config_path.display());
// Resolve relative paths against the canonicalized project root so the
// resulting NICKEL_IMPORT_PATH is always absolute, regardless of the
// daemon's working directory.
let import_path = config_json
.get("nickel_import_paths")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str())
.map(|p| {
let candidate = std::path::Path::new(p);
if candidate.is_absolute() {
p.to_string()
} else {
abs_root.join(candidate).display().to_string()
}
})
.collect::<Vec<_>>()
.join(":")
})
@ -498,19 +522,27 @@ async fn main() {
}
// If templates/public dirs were not set by config or CLI, fall back to the
// XDG share location installed by `just install-daemon`.
// platform data dir installed by `just install-daemon`.
// install.nu uses ~/Library/Application Support/ontoref on macOS and
// ~/.local/share/ontoref on Linux — both without the `-daemon` suffix.
#[cfg(feature = "ui")]
{
let xdg_share = std::env::var_os("HOME")
.map(|home| std::path::PathBuf::from(home).join(".local/share/ontoref-daemon"));
let data_share = std::env::var_os("HOME").map(|home| {
let base = std::path::PathBuf::from(home);
#[cfg(target_os = "macos")]
let share = base.join("Library/Application Support/ontoref");
#[cfg(not(target_os = "macos"))]
let share = base.join(".local/share/ontoref");
share
});
if cli.templates_dir.is_none() {
let candidate = xdg_share.as_deref().map(|s| s.join("templates"));
let candidate = data_share.as_deref().map(|s| s.join("templates"));
if candidate.as_deref().is_some_and(|p| p.exists()) {
cli.templates_dir = candidate;
}
}
if cli.public_dir.is_none() {
let candidate = xdg_share.as_deref().map(|s| s.join("public"));
let candidate = data_share.as_deref().map(|s| s.join("public"));
if candidate.as_deref().is_some_and(|p| p.exists()) {
cli.public_dir = candidate;
}
@ -550,6 +582,39 @@ async fn main() {
.unwrap_or("default")
.to_string();
// In --config-stdin (service) mode, the global nickel_import_paths is always
// empty. Per-project import paths live in each project's project.ncl, which
// is already included in stdin_projects. The primary project's entry is
// skipped by the registry (slug collision), so we must extract its
// import_path from the matching stdin_projects entry here.
let nickel_import_path = if cli.config_stdin {
stdin_projects
.iter()
.find(|e| {
std::path::PathBuf::from(&e.root)
.canonicalize()
.ok()
.as_deref()
== Some(project_root.as_path())
})
.and_then(|e| {
let joined = e
.nickel_import_paths
.iter()
.map(|p| resolve_nickel_import_path(p, &project_root))
.collect::<Vec<_>>()
.join(":");
if joined.is_empty() {
None
} else {
Some(joined)
}
})
.or(nickel_import_path)
} else {
nickel_import_path
};
// Build primary ProjectContext up-front so its Arcs (cache, actors,
// notifications, seed_lock, ontology_version) can be aliased into AppState
// and reused by the watcher before the registry is assembled.
@ -1253,6 +1318,15 @@ async fn connect_db(cli: &Cli) -> Option<Arc<stratum_db::StratumDb>> {
}
#[cfg(feature = "ui")]
fn resolve_nickel_import_path(p: &str, project_root: &std::path::Path) -> String {
let c = std::path::Path::new(p);
if c.is_absolute() {
p.to_owned()
} else {
project_root.join(c).display().to_string()
}
}
fn resolve_asset_dir(project_root: &std::path::Path, config_dir: &str) -> std::path::PathBuf {
let from_root = project_root.join(config_dir);
if from_root.exists() {

View File

@ -148,6 +148,34 @@ struct QaAddInput {
project: Option<String>,
}
#[derive(Deserialize, JsonSchema, Default)]
struct BookmarkListInput {
/// Project slug. Omit to use the default project.
project: Option<String>,
/// Optional substring filter on node_id or title.
filter: Option<String>,
}
#[derive(Deserialize, JsonSchema, Default)]
struct BookmarkAddInput {
/// Ontology node id to bookmark (e.g. `"add-project"`).
node_id: String,
/// Kind of the result: `"node"`, `"adr"`, or `"mode"`.
kind: Option<String>,
/// Human-readable title of the bookmarked node.
title: String,
/// Ontology level: `Axiom`, `Tension`, `Practice`, `Project`. May be empty.
level: Option<String>,
/// Search term that produced this result.
term: Option<String>,
/// Actor saving the bookmark. Defaults to `"agent"`.
actor: Option<String>,
/// Optional tags for categorisation.
tags: Option<Vec<String>>,
/// Project slug. Omit to use the default project.
project: Option<String>,
}
#[derive(Deserialize, JsonSchema, Default)]
struct ActionListInput {
/// Project slug. Omit to use the default project.
@ -201,6 +229,8 @@ impl OntoreServer {
.with_async_tool::<ProjectStatusTool>()
.with_async_tool::<ListAdrsTool>()
.with_async_tool::<GetAdrTool>()
.with_async_tool::<ListOntologyExtensionsTool>()
.with_async_tool::<GetOntologyExtensionTool>()
.with_async_tool::<ListModesTool>()
.with_async_tool::<GetModeTool>()
.with_async_tool::<GetNodeTool>()
@ -209,6 +239,8 @@ impl OntoreServer {
.with_async_tool::<GetConstraintsTool>()
.with_async_tool::<QaListTool>()
.with_async_tool::<QaAddTool>()
.with_async_tool::<BookmarkListTool>()
.with_async_tool::<BookmarkAddTool>()
.with_async_tool::<ActionListTool>()
.with_async_tool::<ActionAddTool>()
}
@ -544,6 +576,135 @@ impl AsyncTool<OntoreServer> for GetAdrTool {
}
}
// ── Tool: list_ontology_extensions
// ──────────────────────────────────────────────
struct ListOntologyExtensionsTool;
impl ToolBase for ListOntologyExtensionsTool {
type Parameter = ProjectParam;
type Output = serde_json::Value;
type Error = ToolError;
fn name() -> Cow<'static, str> {
"ontoref_list_ontology_extensions".into()
}
fn description() -> Option<Cow<'static, str>> {
Some(
"List extra .ontology/*.ncl files beyond core.ncl, state.ncl, and gate.ncl. These are \
project-defined domain extensions (e.g. career.ncl, personal.ncl)."
.into(),
)
}
fn output_schema() -> Option<Arc<JsonObject>> {
None
}
}
impl AsyncTool<OntoreServer> for ListOntologyExtensionsTool {
async fn invoke(
service: &OntoreServer,
param: ProjectParam,
) -> Result<serde_json::Value, ToolError> {
debug!(tool = "list_ontology_extensions", project = ?param.project);
let ctx = service.project_ctx(param.project.as_deref());
let ontology_dir = ctx.root.join(".ontology");
const CORE: &[&str] = &["core.ncl", "state.ncl", "gate.ncl"];
let Ok(entries) = std::fs::read_dir(&ontology_dir) else {
return Ok(serde_json::json!({ "extensions": [] }));
};
let mut extensions: Vec<serde_json::Value> = entries
.flatten()
.filter_map(|e| {
let path = e.path();
if path.extension().and_then(|x| x.to_str()) != Some("ncl") {
return None;
}
let name = path.file_name()?.to_str()?.to_string();
if CORE.contains(&name.as_str()) {
return None;
}
let stem = path.file_stem()?.to_str()?.to_string();
Some(serde_json::json!({ "file": name, "id": stem }))
})
.collect();
extensions.sort_by_key(|v| v["id"].as_str().unwrap_or("").to_string());
Ok(serde_json::json!({ "extensions": extensions }))
}
}
// ── Tool: get_ontology_extension
// ────────────────────────────────────────────
struct GetOntologyExtensionTool;
impl ToolBase for GetOntologyExtensionTool {
type Parameter = GetItemInput;
type Output = serde_json::Value;
type Error = ToolError;
fn name() -> Cow<'static, str> {
"ontoref_get_ontology_extension".into()
}
fn description() -> Option<Cow<'static, str>> {
Some(
"Export a project-defined .ontology extension file by stem (e.g. \"career\", \
\"personal\"). Returns the full exported JSON. Use ontoref_list_ontology_extensions \
to discover available files."
.into(),
)
}
fn output_schema() -> Option<Arc<JsonObject>> {
None
}
}
impl AsyncTool<OntoreServer> for GetOntologyExtensionTool {
async fn invoke(
service: &OntoreServer,
param: GetItemInput,
) -> Result<serde_json::Value, ToolError> {
debug!(tool = "get_ontology_extension", id = %param.id, project = ?param.project);
let ctx = service.project_ctx(param.project.as_deref());
const CORE: &[&str] = &["core.ncl", "state.ncl", "gate.ncl"];
let file = if param.id.ends_with(".ncl") {
param.id.clone()
} else {
format!("{}.ncl", param.id)
};
if file.contains('/') || file.contains("..") || CORE.contains(&file.as_str()) {
return Err(ToolError(format!(
"'{}' is a core file — use dedicated tools for core/state/gate",
param.id
)));
}
let path = ctx.root.join(".ontology").join(&file);
if !path.exists() {
return Err(ToolError(format!(
"ontology extension '{}' not found",
param.id
)));
}
ctx.cache
.export(&path, ctx.import_path.as_deref())
.await
.map(|(v, _)| v)
.map_err(|e| ToolError(e.to_string()))
}
}
// ── Tool: list_modes
// ────────────────────────────────────────────────────────────
@ -919,6 +1080,10 @@ impl AsyncTool<OntoreServer> for HelpTool {
"params": [{"name": "project", "required": false}] },
{ "name": "ontoref_get_adr", "description": "Full ADR by id or partial stem (e.g. adr-001).",
"params": [{"name": "id", "required": true}, {"name": "project", "required": false}] },
{ "name": "ontoref_list_ontology_extensions", "description": "List extra .ontology/*.ncl files beyond core/state/gate.",
"params": [{"name": "project", "required": false}] },
{ "name": "ontoref_get_ontology_extension", "description": "Export a project-defined .ontology extension by stem (e.g. career, personal).",
"params": [{"name": "id", "required": true}, {"name": "project", "required": false}] },
{ "name": "ontoref_list_modes", "description": "List all reflection modes with id, trigger, step count.",
"params": [{"name": "project", "required": false}] },
{ "name": "ontoref_get_mode", "description": "Full reflection mode including all steps and preconditions.",
@ -1756,6 +1921,161 @@ impl ServerHandler for OntoreServer {
// ── Entry points
// ────────────────────────────────────────────────────────────────
// ── Tool: bookmark_list
// ─────────────────────────────────────────────────────────────────────────────
struct BookmarkListTool;
impl ToolBase for BookmarkListTool {
type Parameter = BookmarkListInput;
type Output = serde_json::Value;
type Error = ToolError;
fn name() -> Cow<'static, str> {
"ontoref_bookmark_list".into()
}
fn description() -> Option<Cow<'static, str>> {
Some(
"List search bookmarks stored in reflection/search_bookmarks.ncl. Optionally filter \
by node_id or title substring."
.into(),
)
}
fn output_schema() -> Option<Arc<JsonObject>> {
None
}
}
impl AsyncTool<OntoreServer> for BookmarkListTool {
async fn invoke(
service: &OntoreServer,
param: BookmarkListInput,
) -> Result<serde_json::Value, ToolError> {
debug!(tool = "bookmark_list", project = ?param.project);
let ctx = service.project_ctx(param.project.as_deref());
let bm_path = ctx.root.join("reflection").join("search_bookmarks.ncl");
if !bm_path.exists() {
return Ok(serde_json::json!({ "entries": [], "count": 0 }));
}
let (json, _) = ctx
.cache
.export(&bm_path, ctx.import_path.as_deref())
.await
.map_err(|e| ToolError(e.to_string()))?;
let mut entries: Vec<serde_json::Value> = json
.get("entries")
.and_then(|v| v.as_array())
.cloned()
.unwrap_or_default();
if let Some(filter) = param.filter.as_deref() {
let lc = filter.to_lowercase();
entries.retain(|e| {
let id_match = e
.get("node_id")
.and_then(|v| v.as_str())
.map(|s| s.to_lowercase().contains(&lc))
.unwrap_or(false);
let title_match = e
.get("title")
.and_then(|v| v.as_str())
.map(|s| s.to_lowercase().contains(&lc))
.unwrap_or(false);
id_match || title_match
});
}
let count = entries.len();
Ok(serde_json::json!({ "entries": entries, "count": count }))
}
}
// ── Tool: bookmark_add
// ────────────────────────────────────────────────────────
struct BookmarkAddTool;
impl ToolBase for BookmarkAddTool {
type Parameter = BookmarkAddInput;
type Output = serde_json::Value;
type Error = ToolError;
fn name() -> Cow<'static, str> {
"ontoref_bookmark_add".into()
}
fn description() -> Option<Cow<'static, str>> {
Some(
concat!(
"Save a search result as a bookmark in reflection/search_bookmarks.ncl (persisted \
to disk, git-versioned). ",
"Use this when the user stars/bookmarks a search result in the CLI or UI. ",
"Required: node_id, title. Optional: kind, level, term, actor, tags.",
)
.into(),
)
}
fn output_schema() -> Option<Arc<JsonObject>> {
None
}
}
impl AsyncTool<OntoreServer> for BookmarkAddTool {
async fn invoke(
service: &OntoreServer,
param: BookmarkAddInput,
) -> Result<serde_json::Value, ToolError> {
debug!(tool = "bookmark_add", project = ?param.project, node_id = %param.node_id);
let ctx = service.project_ctx(param.project.as_deref());
let bm_path = ctx.root.join("reflection").join("search_bookmarks.ncl");
if !bm_path.exists() {
return Err(ToolError(format!(
"search_bookmarks.ncl not found at {} — run ontoref setup first",
bm_path.display()
)));
}
let kind = param.kind.as_deref().unwrap_or("node");
let level = param.level.as_deref().unwrap_or("");
let term = param.term.as_deref().unwrap_or("");
let actor = param.actor.as_deref().unwrap_or("agent");
let tags = param.tags.as_deref().unwrap_or(&[]);
let now = today_iso();
let id = crate::ui::search_bookmarks_ncl::add_entry(
&bm_path,
crate::ui::search_bookmarks_ncl::NewBookmark {
node_id: &param.node_id,
kind,
title: &param.title,
level,
term,
actor,
created_at: &now,
tags,
},
)
.map_err(|e| ToolError(e.to_string()))?;
ctx.cache.invalidate_file(&bm_path);
Ok(serde_json::json!({
"ok": true,
"id": id,
"created_at": now,
"node_id": param.node_id,
"title": param.title,
}))
}
}
/// Run the MCP server over stdin/stdout — for use as a `command`-mode MCP
/// server in Claude Desktop, Cursor, or any stdio-compatible AI client.
pub async fn serve_stdio(state: AppState) -> anyhow::Result<()> {

View File

@ -110,6 +110,39 @@ fn resolve_logo_url(raw: &str, base_url: &str) -> String {
}
}
/// Load and export `.ontoref/config.ncl`, returning the full JSON value.
/// Returns `None` if the file doesn't exist or Nickel export fails.
async fn load_config_json(
root: &std::path::Path,
cache: &Arc<crate::cache::NclCache>,
import_path: Option<&str>,
) -> Option<serde_json::Value> {
let config_path = root.join(".ontoref").join("config.ncl");
if !config_path.exists() {
return None;
}
match cache.export(&config_path, import_path).await {
Ok((json, _)) => {
tracing::info!(
path = %config_path.display(),
has_card = json.get("card").is_some(),
card_tagline = json.get("card").and_then(|c| c.get("tagline")).and_then(|v| v.as_str()).unwrap_or(""),
"config.ncl loaded"
);
Some(json)
}
Err(e) => {
tracing::warn!(
path = %config_path.display(),
import_path = ?import_path,
error = %e,
"config.ncl export failed"
);
None
}
}
}
/// Load logo URLs from `.ontoref/config.ncl` ui section.
/// Returns `(logo_light_url, logo_dark_url)` — either may be `None`.
async fn load_logos(
@ -118,11 +151,7 @@ async fn load_logos(
import_path: Option<&str>,
base_url: &str,
) -> (Option<String>, Option<String>) {
let config_path = root.join(".ontoref").join("config.ncl");
if !config_path.exists() {
return (None, None);
}
let Ok((json, _)) = cache.export(&config_path, import_path).await else {
let Some(json) = load_config_json(root, cache, import_path).await else {
return (None, None);
};
let ui = json.get("ui");
@ -137,10 +166,27 @@ async fn load_logos(
(logo, logo_dark)
}
/// Extract card data from a config JSON (from `.ontoref/config.ncl` `card`
/// field).
fn extract_card_from_config(json: &serde_json::Value) -> serde_json::Value {
let Some(card) = json.get("card") else {
return serde_json::Value::Null;
};
serde_json::json!({
"tagline": card.get("tagline").and_then(|v| v.as_str()).unwrap_or(""),
"description": card.get("description").and_then(|v| v.as_str()).unwrap_or(""),
"version": card.get("version").and_then(|v| v.as_str()).unwrap_or(""),
"status": card.get("status").and_then(|v| v.as_str()).unwrap_or(""),
"url": card.get("url").and_then(|v| v.as_str()).unwrap_or(""),
"tags": card.get("tags").and_then(|v| v.as_array()).cloned().unwrap_or_default(),
"features": card.get("features").and_then(|v| v.as_array()).cloned().unwrap_or_default(),
})
}
/// Insert logo and MCP metadata into a Tera context.
/// Logos are loaded from `.ontoref/config.ncl`; MCP availability is
/// compile-time.
async fn insert_brand_ctx(
pub(crate) async fn insert_brand_ctx(
ctx: &mut Context,
root: &std::path::Path,
cache: &Arc<crate::cache::NclCache>,
@ -154,7 +200,7 @@ async fn insert_brand_ctx(
}
/// Insert MCP metadata and daemon version into a Tera context.
fn insert_mcp_ctx(ctx: &mut Context) {
pub(crate) fn insert_mcp_ctx(ctx: &mut Context) {
ctx.insert("daemon_version", env!("CARGO_PKG_VERSION"));
#[cfg(feature = "mcp")]
{
@ -369,9 +415,16 @@ pub async fn notifications_page(State(state): State<AppState>) -> Result<Html<St
pub async fn search_page(State(state): State<AppState>) -> Result<Html<String>, UiError> {
let tera = tera_ref(&state)?;
let bookmarks = load_bookmark_entries(
&state.cache,
&state.project_root,
state.nickel_import_path.as_deref(),
)
.await;
let mut ctx = Context::new();
ctx.insert("base_url", "/ui");
ctx.insert("slug", &Option::<String>::None);
ctx.insert("server_bookmarks", &bookmarks);
insert_brand_ctx(
&mut ctx,
&state.project_root,
@ -391,10 +444,17 @@ pub async fn search_page_mp(
let tera = tera_ref(&state)?;
let ctx_ref = state.registry.get(&slug).ok_or(UiError::NotConfigured)?;
let base_url = format!("/ui/{slug}");
let bookmarks = load_bookmark_entries(
&ctx_ref.cache,
&ctx_ref.root,
ctx_ref.import_path.as_deref(),
)
.await;
let mut ctx = Context::new();
ctx.insert("base_url", &base_url);
ctx.insert("slug", &slug);
ctx.insert("current_role", &auth_role_str(&auth));
ctx.insert("server_bookmarks", &bookmarks);
insert_brand_ctx(
&mut ctx,
&ctx_ref.root,
@ -597,8 +657,34 @@ pub async fn project_picker(State(state): State<AppState>) -> Result<Html<String
(vec![], vec![], String::new(), String::new())
};
// Description — first meaningful text line from README.md
let description = readme_description(&proj.root);
// card — loaded from `.ontoref/config.ncl` `card` field (which imports
// ../card.ncl)
let config_json =
load_config_json(&proj.root, &proj.cache, proj.import_path.as_deref()).await;
let card = config_json
.as_ref()
.map(extract_card_from_config)
.unwrap_or(serde_json::Value::Null);
tracing::debug!(
slug = %proj.slug,
import_path = ?proj.import_path,
config_loaded = config_json.is_some(),
card_tagline = card.get("tagline").and_then(|v| v.as_str()).unwrap_or(""),
"project card loaded"
);
// Description — first meaningful text line from README.md (fallback when no
// card)
let description = if card
.get("tagline")
.and_then(|v| v.as_str())
.unwrap_or("")
.is_empty()
{
readme_description(&proj.root)
} else {
String::new()
};
let proj_base = format!("/ui/{}", proj.slug);
let showcase = detect_showcase(&proj.root, &proj_base);
@ -612,6 +698,7 @@ pub async fn project_picker(State(state): State<AppState>) -> Result<Html<String
"slug": proj.slug,
"root": proj.root.display().to_string(),
"auth": proj.auth_enabled(),
"card": card,
"description": description,
"default_mode": default_mode,
"repo_kind": repo_kind,
@ -2576,3 +2663,141 @@ async fn run_action_by_id(
Err(e) => warn!(action_id, mode, error = %e, "actions_run: spawn failed"),
}
}
// ── Search bookmarks mutation
// ─────────────────────────────────────────────────
#[derive(Deserialize)]
pub struct BookmarkAddRequest {
pub node_id: String,
pub kind: Option<String>,
pub title: String,
pub level: Option<String>,
pub term: Option<String>,
pub actor: Option<String>,
pub tags: Option<Vec<String>>,
pub slug: Option<String>,
}
#[derive(Deserialize)]
pub struct BookmarkDeleteRequest {
pub id: String,
pub slug: Option<String>,
}
pub async fn search_bookmark_add(
State(state): State<AppState>,
Json(body): Json<BookmarkAddRequest>,
) -> impl IntoResponse {
let (root, cache) = resolve_bookmark_ctx(&state, body.slug.as_deref());
let bm_path = root.join("reflection").join("search_bookmarks.ncl");
let _guard = state.ncl_write_lock.acquire(&bm_path).await;
if !bm_path.exists() {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "search_bookmarks.ncl not found — run ontoref setup first"
})),
);
}
let kind = body.kind.as_deref().unwrap_or("node");
let level = body.level.as_deref().unwrap_or("");
let term = body.term.as_deref().unwrap_or("");
let actor = body.actor.as_deref().unwrap_or("human");
let tags = body.tags.as_deref().unwrap_or(&[]);
let now = now_iso();
match super::search_bookmarks_ncl::add_entry(
&bm_path,
super::search_bookmarks_ncl::NewBookmark {
node_id: &body.node_id,
kind,
title: &body.title,
level,
term,
actor,
created_at: &now,
tags,
},
) {
Ok(id) => {
cache.invalidate_file(&bm_path);
(
StatusCode::OK,
Json(serde_json::json!({
"ok": true,
"id": id,
"created_at": now,
"node_id": body.node_id,
})),
)
}
Err(e) => {
warn!(error = %e, "search_bookmark_add failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
)
}
}
}
pub async fn search_bookmark_delete(
State(state): State<AppState>,
Json(body): Json<BookmarkDeleteRequest>,
) -> impl IntoResponse {
let (root, cache) = resolve_bookmark_ctx(&state, body.slug.as_deref());
let bm_path = root.join("reflection").join("search_bookmarks.ncl");
let _guard = state.ncl_write_lock.acquire(&bm_path).await;
if !bm_path.exists() {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({ "error": "search_bookmarks.ncl not found" })),
);
}
match super::search_bookmarks_ncl::remove_entry(&bm_path, &body.id) {
Ok(()) => {
cache.invalidate_file(&bm_path);
(StatusCode::OK, Json(serde_json::json!({ "ok": true })))
}
Err(e) => {
warn!(error = %e, "search_bookmark_delete failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
)
}
}
}
pub(crate) async fn load_bookmark_entries(
cache: &Arc<crate::cache::NclCache>,
root: &std::path::Path,
import_path: Option<&str>,
) -> Vec<serde_json::Value> {
let bm_path = root.join("reflection").join("search_bookmarks.ncl");
if !bm_path.exists() {
return vec![];
}
match cache.export(&bm_path, import_path).await {
Ok((json, _)) => json
.get("entries")
.and_then(|v| v.as_array())
.cloned()
.unwrap_or_default(),
Err(_) => vec![],
}
}
fn resolve_bookmark_ctx(
state: &crate::api::AppState,
slug: Option<&str>,
) -> (std::path::PathBuf, Arc<crate::cache::NclCache>) {
if let Some(s) = slug {
if let Some(ctx) = state.registry.get(s) {
return (ctx.root.clone(), ctx.cache.clone());
}
}
(state.project_root.clone(), state.cache.clone())
}

View File

@ -6,7 +6,7 @@ use axum::{
use serde::Deserialize;
use tera::Context;
use super::handlers::{render, UiError};
use super::handlers::{insert_brand_ctx, insert_mcp_ctx, render, UiError};
use crate::api::AppState;
use crate::session::{extract_cookie, COOKIE_NAME};
@ -15,10 +15,24 @@ pub async fn login_page(
Path(slug): Path<String>,
) -> Result<Html<String>, UiError> {
let tera = state.tera.as_ref().ok_or(UiError::NotConfigured)?;
let base_url = format!("/ui/{slug}");
let mut ctx = Context::new();
ctx.insert("slug", &slug);
ctx.insert("error", &false);
ctx.insert("base_url", &format!("/ui/{slug}"));
ctx.insert("base_url", &base_url);
ctx.insert("hide_project_nav", &true);
ctx.insert("current_role", "");
insert_mcp_ctx(&mut ctx);
if let Some(proj) = state.registry.get(&slug) {
insert_brand_ctx(
&mut ctx,
&proj.root,
&proj.cache,
proj.import_path.as_deref(),
&base_url,
)
.await;
}
render(tera, "pages/login.html", &ctx).await
}
@ -56,10 +70,22 @@ pub async fn login_submit(
Some(t) => t,
None => return StatusCode::INTERNAL_SERVER_ERROR.into_response(),
};
let base_url = format!("/ui/{slug}");
let mut tctx = Context::new();
tctx.insert("slug", &slug);
tctx.insert("error", &true);
tctx.insert("base_url", &format!("/ui/{slug}"));
tctx.insert("base_url", &base_url);
tctx.insert("hide_project_nav", &true);
tctx.insert("current_role", "");
insert_mcp_ctx(&mut tctx);
insert_brand_ctx(
&mut tctx,
&ctx.root,
&ctx.cache,
ctx.import_path.as_deref(),
&base_url,
)
.await;
match render(tera, "pages/login.html", &tctx).await {
Ok(html) => html.into_response(),
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
@ -74,6 +100,9 @@ pub async fn manage_login_page(State(state): State<AppState>) -> Result<Html<Str
ctx.insert("error", &false);
ctx.insert("base_url", "/ui");
ctx.insert("daemon_admin_enabled", &state.daemon_admin_hash.is_some());
ctx.insert("hide_project_nav", &true);
ctx.insert("current_role", "");
insert_mcp_ctx(&mut ctx);
render(tera, "pages/manage_login.html", &ctx).await
}
@ -112,6 +141,9 @@ pub async fn manage_login_submit(
tctx.insert("error", &true);
tctx.insert("base_url", "/ui");
tctx.insert("daemon_admin_enabled", &true);
tctx.insert("hide_project_nav", &true);
tctx.insert("current_role", "");
insert_mcp_ctx(&mut tctx);
match render(tera, "pages/manage_login.html", &tctx).await {
Ok(html) => html.into_response(),
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),

View File

@ -5,6 +5,7 @@ pub mod handlers;
pub mod login;
pub mod ncl_write;
pub mod qa_ncl;
pub mod search_bookmarks_ncl;
pub mod watcher;
pub use drift_watcher::DriftWatcher;
@ -39,6 +40,11 @@ fn single_router(state: AppState) -> axum::Router {
.route("/qa", get(handlers::qa_page))
.route("/qa/delete", post(handlers::qa_delete))
.route("/qa/update", post(handlers::qa_update))
.route("/search/bookmark/add", post(handlers::search_bookmark_add))
.route(
"/search/bookmark/delete",
post(handlers::search_bookmark_delete),
)
.with_state(state)
}
@ -90,6 +96,14 @@ fn multi_router(state: AppState) -> axum::Router {
.route("/{slug}/qa", get(handlers::qa_page_mp))
.route("/{slug}/qa/delete", post(handlers::qa_delete))
.route("/{slug}/qa/update", post(handlers::qa_update))
.route(
"/{slug}/search/bookmark/add",
post(handlers::search_bookmark_add),
)
.route(
"/{slug}/search/bookmark/delete",
post(handlers::search_bookmark_delete),
)
// Login is public — no AuthUser extractor
.route(
"/{slug}/login",

View File

@ -0,0 +1,285 @@
//! In-place mutations of reflection/search_bookmarks.ncl.
//!
//! Mirrors qa_ncl.rs — line-level surgery on a predictable Nickel structure.
//! The bookmark store has a single `entries` array of `BookmarkEntry` records.
use std::path::Path;
/// Data for a new bookmark entry.
pub struct NewBookmark<'a> {
pub node_id: &'a str,
pub kind: &'a str,
pub title: &'a str,
pub level: &'a str,
pub term: &'a str,
pub actor: &'a str,
pub created_at: &'a str,
pub tags: &'a [String],
}
/// Append a new bookmark entry to reflection/search_bookmarks.ncl.
///
/// Returns the generated id (`sb-NNN`).
pub fn add_entry(path: &Path, entry: NewBookmark<'_>) -> anyhow::Result<String> {
let content = std::fs::read_to_string(path)?;
let next_id = next_entry_id(&content);
let block = format!(
r#" {{
id = "{id}",
node_id = "{node_id}",
kind = "{kind}",
title = "{title}",
level = "{level}",
term = "{term}",
actor = "{actor}",
created_at = "{created_at}",
tags = {tags},
}},
"#,
id = next_id,
node_id = escape_ncl(entry.node_id),
kind = escape_ncl(entry.kind),
title = escape_ncl(entry.title),
level = escape_ncl(entry.level),
term = escape_ncl(entry.term),
actor = escape_ncl(entry.actor),
created_at = escape_ncl(entry.created_at),
tags = ncl_string_array(entry.tags),
);
let updated = insert_before_entries_close(&content, &block)?;
super::ncl_write::atomic_write(path, &updated)?;
Ok(next_id)
}
/// Remove the bookmark entry block with `id`.
pub fn remove_entry(path: &Path, id: &str) -> anyhow::Result<()> {
let content = std::fs::read_to_string(path)?;
let updated = delete_entry_block(&content, id)?;
super::ncl_write::atomic_write(path, &updated)?;
Ok(())
}
// ── helpers ──────────────────────────────────────────────────────────────────
/// Find the highest `sb-NNN` id and return `sb-(NNN+1)` zero-padded to 3
/// digits.
fn next_entry_id(content: &str) -> String {
let max = content
.lines()
.filter_map(|line| {
let t = line.trim();
let rest = t.strip_prefix("id")?;
let val = rest.split('"').nth(1)?;
let num_str = val.strip_prefix("sb-")?;
num_str.parse::<u32>().ok()
})
.max()
.unwrap_or(0);
format!("sb-{:03}", max + 1)
}
/// Insert `block` before the closing ` ],` of the entries array.
fn insert_before_entries_close(content: &str, block: &str) -> anyhow::Result<String> {
let needle = " ],";
let pos = content.find(needle).ok_or_else(|| {
anyhow::anyhow!("could not locate entries array closing ` ],` in search_bookmarks.ncl")
})?;
let mut result = String::with_capacity(content.len() + block.len());
result.push_str(&content[..pos]);
result.push_str(block);
result.push_str(&content[pos..]);
Ok(result)
}
/// Remove the block containing `id = "sb-NNN"`.
fn delete_entry_block(content: &str, id: &str) -> anyhow::Result<String> {
let id_needle = format!("\"{}\"", id);
let lines: Vec<&str> = content.lines().collect();
let n = lines.len();
let id_line = lines
.iter()
.position(|l| l.contains(&id_needle) && l.contains('='))
.ok_or_else(|| anyhow::anyhow!("entry id {} not found in search_bookmarks.ncl", id))?;
let block_start = (0..=id_line)
.rev()
.find(|&i| lines[i].trim() == "{")
.ok_or_else(|| anyhow::anyhow!("could not find block open for bookmark entry {}", id))?;
let block_end = (id_line..n)
.find(|&i| lines[i].trim() == "},")
.ok_or_else(|| anyhow::anyhow!("could not find block close for bookmark entry {}", id))?;
let mut result = Vec::with_capacity(n - (block_end - block_start + 1));
for (i, line) in lines.iter().enumerate() {
if i < block_start || i > block_end {
result.push(*line);
}
}
Ok(result.join("\n"))
}
fn ncl_string_array(items: &[String]) -> String {
if items.is_empty() {
return "[]".to_string();
}
let inner: Vec<String> = items
.iter()
.map(|s| format!("\"{}\"", escape_ncl(s)))
.collect();
format!("[{}]", inner.join(", "))
}
fn escape_ncl(s: &str) -> String {
s.replace('\\', "\\\\").replace('"', "\\\"")
}
#[cfg(test)]
mod tests {
use super::*;
const SAMPLE: &str = concat!(
"let s = import \"search_bookmarks\" in\n",
"{\n",
" entries = [\n",
" {\n",
" id = \"sb-001\",\n",
" node_id = \"add-project\",\n",
" kind = \"node\",\n",
" title = \"Add a project\",\n",
" level = \"Practice\",\n",
" term = \"add project\",\n",
" actor = \"developer\",\n",
" created_at = \"2026-03-14\",\n",
" tags = [],\n",
" },\n",
" {\n",
" id = \"sb-002\",\n",
" node_id = \"ontology-axiom\",\n",
" kind = \"node\",\n",
" title = \"Ontology axiom\",\n",
" level = \"Axiom\",\n",
" term = \"axiom\",\n",
" actor = \"developer\",\n",
" created_at = \"2026-03-14\",\n",
" tags = [],\n",
" },\n",
" ],\n",
"} | s.BookmarkStore\n",
);
#[test]
fn next_id_empty() {
assert_eq!(next_entry_id(""), "sb-001");
}
#[test]
fn next_id_increments() {
let content = r#"id = "sb-007","#;
assert_eq!(next_entry_id(content), "sb-008");
}
#[test]
fn array_empty() {
assert_eq!(ncl_string_array(&[]), "[]");
}
#[test]
fn array_values() {
let v = vec!["search".to_string(), "ontology".to_string()];
assert_eq!(ncl_string_array(&v), r#"["search", "ontology"]"#);
}
#[test]
fn insert_into_empty_store() {
let content =
"let s = import \"search_bookmarks\" in\n{\n entries = [\n ],\n} | s.BookmarkStore\n";
let block = " { id = \"sb-001\" },\n";
let result = insert_before_entries_close(content, block).unwrap();
assert!(result.contains("{ id = \"sb-001\" }"));
assert!(result.contains(" ],"));
}
#[test]
fn delete_first_entry() {
let updated = delete_entry_block(SAMPLE, "sb-001").unwrap();
assert!(!updated.contains("sb-001"), "sb-001 should be removed");
assert!(updated.contains("sb-002"), "sb-002 should remain");
}
#[test]
fn delete_second_entry() {
let updated = delete_entry_block(SAMPLE, "sb-002").unwrap();
assert!(updated.contains("sb-001"), "sb-001 should remain");
assert!(!updated.contains("sb-002"), "sb-002 should be removed");
}
#[test]
fn delete_missing_id_errors() {
assert!(delete_entry_block(SAMPLE, "sb-999").is_err());
}
#[test]
fn escape_quotes_and_backslashes() {
assert_eq!(escape_ncl(r#"say "hi""#), r#"say \"hi\""#);
assert_eq!(escape_ncl(r"path\to"), r"path\\to");
}
#[tokio::test]
async fn concurrent_add_produces_unique_ids() {
use std::path::PathBuf;
use std::sync::Arc;
use tempfile::NamedTempFile;
const MINIMAL: &str =
"let s = import \"search_bookmarks\" in\n{\n entries = [\n ],\n} | s.BookmarkStore\n";
const TASKS: usize = 6;
let lock = Arc::new(super::super::ncl_write::NclWriteLock::new());
let file = NamedTempFile::new().unwrap();
std::fs::write(file.path(), MINIMAL).unwrap();
let path: Arc<PathBuf> = Arc::new(file.path().to_path_buf());
let handles: Vec<_> = (0..TASKS)
.map(|i| {
let lock = Arc::clone(&lock);
let path = Arc::clone(&path);
tokio::spawn(async move {
let _guard = lock.acquire(&path).await;
add_entry(
&path,
NewBookmark {
node_id: &format!("node-{i}"),
kind: "node",
title: &format!("Title {i}"),
level: "Practice",
term: "search term",
actor: "developer",
created_at: "2026-03-14",
tags: &[],
},
)
})
})
.collect();
let mut ids: Vec<String> = {
let mut v = Vec::with_capacity(TASKS);
for h in handles {
v.push(h.await.unwrap().unwrap());
}
v
};
ids.sort();
ids.dedup();
assert_eq!(
ids.len(),
TASKS,
"concurrent add_entry must produce unique IDs"
);
}
}

View File

@ -55,6 +55,22 @@
{% endblock head %}
{% block content %}
<input type="hidden" id="graph-slug" value="{% if slug %}{{ slug }}{% endif %}">
<!-- ADR modal -->
<dialog id="adr-modal" class="modal">
<div class="modal-box w-11/12 max-w-2xl">
<div class="flex justify-between items-center mb-4">
<h3 class="font-bold text-lg" id="adr-modal-title">ADR</h3>
<form method="dialog"><button class="btn btn-sm btn-circle btn-ghost"></button></form>
</div>
<div id="adr-modal-body" class="text-sm space-y-3 overflow-y-auto max-h-[60vh]">
<span class="loading loading-spinner loading-sm"></span>
</div>
</div>
<form method="dialog" class="modal-backdrop"><button>close</button></form>
</dialog>
<!-- Toolbar -->
<div class="mb-2 flex flex-wrap items-center justify-between gap-2 text-sm">
<h1 class="text-xl font-bold">Ontology Graph</h1>
@ -97,6 +113,10 @@
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-1">Artifacts</p>
<ul id="d-artifact-list" class="text-xs font-mono text-base-content/60 space-y-1 break-all"></ul>
</div>
<div id="d-adrs" class="hidden mb-3">
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-1">Validated by</p>
<ul id="d-adr-list" class="text-xs font-mono space-y-1"></ul>
</div>
<div id="d-edges" class="hidden">
<p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-1">Connections</p>
<ul id="d-edge-list" class="text-xs text-base-content/60 space-y-1"></ul>
@ -141,6 +161,7 @@ const nodes = (GRAPH.nodes || []).map(n => ({
description: n.description || "",
invariant: !!n.invariant,
artifact_paths: n.artifact_paths || [],
adrs: n.adrs || [],
color: POLE_COLOR[n.pole] || "#6b7280",
shape: LEVEL_SHAPE[n.level] || "ellipse",
}
@ -361,6 +382,8 @@ const dBadges = document.getElementById("d-badges");
const dDesc = document.getElementById("d-description");
const dArtifacts = document.getElementById("d-artifacts");
const dList = document.getElementById("d-artifact-list");
const dAdrs = document.getElementById("d-adrs");
const dAdrList = document.getElementById("d-adr-list");
const dEdges = document.getElementById("d-edges");
const dEdgeList = document.getElementById("d-edge-list");
@ -390,6 +413,16 @@ cy.on("tap", "node", evt => {
dArtifacts.classList.add("hidden");
}
if (d.adrs.length) {
dAdrs.classList.remove("hidden");
dAdrList.innerHTML = d.adrs.map(a =>
`<li><span class="text-success mr-1"></span>` +
`<button class="adr-link font-mono text-base-content/70 hover:text-primary underline-offset-2 hover:underline cursor-pointer bg-transparent border-none p-0" data-adr="${a}">${a}</button></li>`
).join("");
} else {
dAdrs.classList.add("hidden");
}
const conn = evt.target.connectedEdges();
if (conn.length) {
dEdges.classList.remove("hidden");
@ -454,5 +487,60 @@ document.addEventListener("mouseup", () => {
handle.classList.remove("dragging");
document.body.style.cursor = "";
});
// ── ADR modal ─────────────────────────────────────────────────
const adrModal = document.getElementById("adr-modal");
const adrModalTitle = document.getElementById("adr-modal-title");
const adrModalBody = document.getElementById("adr-modal-body");
const GRAPH_SLUG = document.getElementById("graph-slug").value || null;
function renderAdrBody(data) {
if (data.error) {
return `<p class="text-error">${data.error}</p>`;
}
const rows = Object.entries(data)
.filter(([k]) => !["id"].includes(k))
.map(([k, v]) => {
const label = k.replace(/_/g, " ");
let val;
if (Array.isArray(v)) {
if (v.length === 0) return null;
val = `<ul class="list-disc pl-4 space-y-0.5">${v.map(item =>
typeof item === "object"
? `<li><pre class="text-xs whitespace-pre-wrap">${JSON.stringify(item, null, 2)}</pre></li>`
: `<li>${item}</li>`
).join("")}</ul>`;
} else if (typeof v === "object" && v !== null) {
val = `<pre class="text-xs whitespace-pre-wrap bg-base-300 p-2 rounded">${JSON.stringify(v, null, 2)}</pre>`;
} else {
val = `<span class="text-base-content/80">${v}</span>`;
}
return `<div><p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-0.5">${label}</p>${val}</div>`;
})
.filter(Boolean)
.join("");
return rows || `<p class="text-base-content/50">No details available.</p>`;
}
async function fetchAdr(id) {
adrModalTitle.textContent = id;
adrModalBody.innerHTML = `<span class="loading loading-spinner loading-sm"></span>`;
adrModal.showModal();
const slug = GRAPH_SLUG ? `&slug=${encodeURIComponent(GRAPH_SLUG)}` : "";
try {
const res = await fetch(`/api/adr/${encodeURIComponent(id)}?${slug}`);
const data = await res.json();
adrModalBody.innerHTML = renderAdrBody(data);
} catch (err) {
adrModalBody.innerHTML = `<p class="text-error">Failed to load ADR: ${err}</p>`;
}
}
document.addEventListener("click", e => {
const btn = e.target.closest(".adr-link");
if (btn) fetchAdr(btn.dataset.adr);
});
</script>
{% endblock scripts %}

View File

@ -5,7 +5,14 @@
<div class="card bg-base-200 shadow-xl w-full max-w-sm">
<div class="card-body gap-4">
<div class="text-center">
{% if logo or logo_dark %}
<div class="flex justify-center mb-2">
{% if logo %}<img id="login-logo-light" src="{{ logo }}" alt="{{ slug }}" class="h-14 max-w-[12rem] object-contain">{% endif %}
{% if logo_dark %}<img id="login-logo-dark" src="{{ logo_dark }}" alt="{{ slug }}" class="h-14 max-w-[12rem] object-contain hidden">{% endif %}
</div>
{% else %}
<h1 class="text-2xl font-bold"><span style="color:#C0CCD8;">onto</span><span style="color:#E8A838;">ref</span></h1>
{% endif %}
<p class="text-base-content/60 text-sm mt-1 font-mono">{{ slug }}</p>
</div>
{% if error %}
@ -32,3 +39,27 @@
</div>
</div>
{% endblock content %}
{% block scripts %}
{% if logo or logo_dark %}
<script>
(function () {
var light = document.getElementById("login-logo-light");
var dark = document.getElementById("login-logo-dark");
function apply(theme) {
if (!light && !dark) return;
if (light && !dark) { light.classList.remove("hidden"); return; }
var isDark = theme === "dark";
if (light) light.classList.toggle("hidden", isDark);
if (dark) dark.classList.toggle("hidden", !isDark);
}
apply(document.documentElement.getAttribute("data-theme") || "dark");
new MutationObserver(function(ms) {
ms.forEach(function(m) {
if (m.attributeName === "data-theme")
apply(document.documentElement.getAttribute("data-theme"));
});
}).observe(document.documentElement, { attributes: true });
})();
</script>
{% endif %}
{% endblock scripts %}

View File

@ -77,6 +77,15 @@
</div>
<!-- Quick-access shortcut icons -->
<div class="flex items-center gap-0.5 flex-shrink-0">
{% if p.card %}
<button onclick="openCard('{{ p.slug }}')" title="Project card"
class="btn btn-ghost btn-xs btn-circle">
<svg class="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</button>
{% endif %}
<a href="/ui/{{ p.slug }}/search" title="Search"
class="btn btn-ghost btn-xs btn-circle">
<svg class="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
@ -100,7 +109,9 @@
</a>
</div>
</div>
{% if p.description %}
{% if p.card and p.card.tagline %}
<p class="text-sm text-base-content/60 italic leading-snug mb-1.5">{{ p.card.tagline }}</p>
{% elif p.description %}
<p class="text-sm text-base-content/70 leading-snug mb-1.5">{{ p.description }}</p>
{% endif %}
<p class="text-xs font-mono text-base-content/35 truncate mb-2" title="{{ p.root }}">{{ p.root }}</p>
@ -298,4 +309,74 @@
<a href="/ui/manage" class="btn btn-sm btn-ghost mt-3">Add a project</a>
</div>
{% endif %}
<!-- Card modal -->
<dialog id="card-modal" class="modal">
<div class="modal-box w-11/12 max-w-lg">
<div class="flex items-start justify-between mb-4">
<div>
<h3 class="font-bold text-lg font-mono" id="card-modal-slug"></h3>
<p class="text-sm text-base-content/50 italic mt-0.5" id="card-modal-tagline"></p>
</div>
<form method="dialog">
<button class="btn btn-sm btn-circle btn-ghost"></button>
</form>
</div>
<div class="space-y-3 text-sm" id="card-modal-body"></div>
</div>
<form method="dialog" class="modal-backdrop"><button>close</button></form>
</dialog>
<script>
(function () {
var CARDS = {
{% for p in projects %}{% if p.card %}
"{{ p.slug }}": {
tagline: {{ p.card.tagline | json_encode | safe }},
description: {{ p.card.description | json_encode | safe }},
version: {{ p.card.version | json_encode | safe }},
status: {{ p.card.status | json_encode | safe }},
url: {{ p.card.url | json_encode | safe }},
tags: {{ p.card.tags | json_encode | safe }},
features: {{ p.card.features | json_encode | safe }},
},
{% endif %}{% endfor %}
};
window.openCard = function(slug) {
var c = CARDS[slug];
if (!c) return;
document.getElementById("card-modal-slug").textContent = slug;
document.getElementById("card-modal-tagline").textContent = c.tagline;
var body = document.getElementById("card-modal-body");
var html = "";
if (c.description) {
html += '<p class="text-base-content/80 leading-relaxed">' + c.description + '</p>';
}
var meta = [];
if (c.version) meta.push('<span class="badge badge-ghost badge-sm font-mono">v' + c.version + '</span>');
if (c.status) meta.push('<span class="badge badge-outline badge-sm">' + c.status + '</span>');
if (meta.length) html += '<div class="flex gap-2 flex-wrap">' + meta.join("") + '</div>';
if (c.features && c.features.length) {
html += '<div><p class="text-xs font-semibold text-base-content/40 uppercase tracking-wider mb-1.5">Features</p><ul class="space-y-1">';
c.features.forEach(function(f) {
html += '<li class="flex gap-2 text-xs text-base-content/70"><span class="text-primary flex-shrink-0"></span>' + f + '</li>';
});
html += '</ul></div>';
}
if (c.tags && c.tags.length) {
html += '<div class="flex flex-wrap gap-1.5">';
c.tags.forEach(function(t) {
html += '<span class="badge badge-xs badge-ghost font-mono">' + t + '</span>';
});
html += '</div>';
}
if (c.url) {
html += '<a href="' + c.url + '" target="_blank" rel="noopener" class="btn btn-xs btn-ghost gap-1 self-start border border-base-content/10">↗ ' + c.url + '</a>';
}
body.innerHTML = html;
document.getElementById("card-modal").showModal();
};
})();
</script>
{% endblock content %}

View File

@ -109,18 +109,19 @@ const resizeHandle = document.getElementById('search-resize');
const LEFT_PANEL = document.querySelector('#search-pane').closest('div.flex-col');
const CONTAINER = LEFT_PANEL.parentElement;
let results = [];
const BASE_URL = "{{ base_url }}";
let results = [];
let searchTimer = null;
let selectedItem = null;
// ── Tab switching ──────────────────────────────────────────────────────────
const tabSearch = document.getElementById('tab-search');
const tabBm = document.getElementById('tab-bookmarks');
const searchPane = document.getElementById('search-pane');
const tabSearch = document.getElementById('tab-search');
const tabBm = document.getElementById('tab-bookmarks');
const searchPane = document.getElementById('search-pane');
const bookmarksPane = document.getElementById('bookmarks-pane');
const TAB_KEY = 'ontoref-search-tab';
const TAB_KEY = 'ontoref-search-tab';
function setTab(tab) {
const isBm = tab === 'bookmarks';
@ -131,49 +132,85 @@ function setTab(tab) {
resetBtn.classList.toggle('hidden', isBm);
try { localStorage.setItem(TAB_KEY, tab); } catch (_) {}
if (isBm) renderBookmarks();
else { input.focus(); }
else input.focus();
}
tabSearch.addEventListener('click', () => setTab('search'));
tabBm.addEventListener('click', () => setTab('bookmarks'));
// ── Bookmarks ──────────────────────────────────────────────────────────────
// ── Bookmarks — server-backed ──────────────────────────────────────────────
//
// `bookmarks` is a Map<node_id, entry> kept in memory.
// Initialised from server-hydrated data; mutations go to HTTP endpoints.
// The NCL sb-NNN id is stored in entry.id so deletes don't need a lookup.
const BM_KEY = 'ontoref-bookmarks';
const bmList = document.getElementById('bookmarks-list');
const bmCount = document.getElementById('bm-count');
const bmEmpty = document.getElementById('bookmarks-empty');
const bmClearBtn = document.getElementById('btn-clear-bookmarks');
const PROJECT = slugInput.value || '__single__';
const SLUG = slugInput.value || null;
function loadBookmarks() {
try { return JSON.parse(localStorage.getItem(BM_KEY) || '[]'); } catch(_) { return []; }
// Hydrate from server — array injected by Tera at render time.
const SERVER_BOOKMARKS = {{ server_bookmarks | json_encode | safe }};
const bookmarks = new Map(); // node_id → { id, node_id, kind, title, level, term, ... }
for (const b of SERVER_BOOKMARKS) {
bookmarks.set(b.node_id, b);
}
function saveBookmarks(bms) {
try { localStorage.setItem(BM_KEY, JSON.stringify(bms)); } catch(_) {}
}
function bmKey(r) { return `${r.kind}:${r.id}:${PROJECT}`; }
function isBookmarked(r) { return loadBookmarks().some(b => b.key === bmKey(r)); }
function toggleBookmark(r) {
let bms = loadBookmarks();
const key = bmKey(r);
const idx = bms.findIndex(b => b.key === key);
if (idx >= 0) {
bms.splice(idx, 1);
function isBookmarked(r) { return bookmarks.has(r.id); }
async function toggleBookmark(r) {
if (isBookmarked(r)) {
const entry = bookmarks.get(r.id);
const url = `${BASE_URL}/search/bookmark/delete`;
const body = { id: entry.id };
if (SLUG) body.slug = SLUG;
try {
const res = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (res.ok) bookmarks.delete(r.id);
} catch (_) {}
} else {
bms.unshift({ key, kind: r.kind, id: r.id, title: r.title,
description: r.description, project: PROJECT,
pole: r.pole || null, level: r.level || null,
saved: Date.now() });
const url = `${BASE_URL}/search/bookmark/add`;
const body = {
node_id: r.id,
kind: r.kind || 'node',
title: r.title || r.id,
level: r.level || '',
term: input.value.trim(),
actor: 'human',
};
if (SLUG) body.slug = SLUG;
try {
const res = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (res.ok) {
const data = await res.json();
bookmarks.set(r.id, {
id: data.id, // sb-NNN — needed for delete
node_id: r.id,
kind: r.kind || 'node',
title: r.title || r.id,
level: r.level || '',
term: input.value.trim(),
created_at: data.created_at || '',
});
}
} catch (_) {}
}
saveBookmarks(bms);
renderBookmarks();
renderResults();
}
function renderBookmarks() {
const bms = loadBookmarks().filter(b => b.project === PROJECT);
const bms = [...bookmarks.values()];
if (bms.length > 0) {
bmCount.textContent = bms.length;
bmCount.classList.remove('hidden');
@ -189,11 +226,12 @@ function renderBookmarks() {
if (bmEmpty) bmEmpty.classList.add('hidden');
bmList.innerHTML = bms.map(b => `
<li class="bm-item cursor-pointer hover:bg-base-300 transition-colors" data-key="${esc(b.key)}">
<li class="bm-item cursor-pointer hover:bg-base-300 transition-colors" data-nid="${esc(b.node_id)}">
<div class="px-3 py-2 flex items-center gap-2">
<span class="badge badge-xs ${kindCls(b.kind)} flex-shrink-0">${b.kind}</span>
<span class="badge badge-xs ${kindCls(b.kind)} flex-shrink-0">${esc(b.kind)}</span>
<span class="text-xs font-medium truncate flex-1">${esc(b.title)}</span>
<button class="btn-unbm btn btn-ghost btn-xs btn-circle flex-shrink-0 opacity-40 hover:opacity-100 hover:text-error" data-key="${esc(b.key)}" title="Remove">
<button class="btn-unbm btn btn-ghost btn-xs btn-circle flex-shrink-0 opacity-40 hover:opacity-100 hover:text-error"
data-nid="${esc(b.node_id)}" title="Remove">
<svg class="w-3 h-3" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
@ -205,8 +243,7 @@ function renderBookmarks() {
bmList.querySelectorAll('.bm-item').forEach(el => {
el.addEventListener('click', e => {
if (e.target.closest('.btn-unbm')) return;
const key = el.dataset.key;
const bm = loadBookmarks().find(b => b.key === key);
const bm = bookmarks.get(el.dataset.nid);
if (!bm) return;
if (selectedItem) selectedItem.classList.remove('bg-base-200');
selectedItem = null;
@ -215,10 +252,22 @@ function renderBookmarks() {
});
bmList.querySelectorAll('.btn-unbm').forEach(el => {
el.addEventListener('click', e => {
el.addEventListener('click', async e => {
e.stopPropagation();
const key = el.dataset.key;
saveBookmarks(loadBookmarks().filter(b => b.key !== key));
const nid = el.dataset.nid;
const bm = bookmarks.get(nid);
if (!bm) return;
const url = `${BASE_URL}/search/bookmark/delete`;
const body = { id: bm.id };
if (SLUG) body.slug = SLUG;
try {
const res = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (res.ok) bookmarks.delete(nid);
} catch (_) {}
renderBookmarks();
renderResults();
});
@ -233,33 +282,48 @@ function showDetailBm(bm) {
<div class="flex-1 min-w-0">
<h2 class="font-bold text-base leading-tight">${esc(bm.title)}</h2>
<div class="flex flex-wrap gap-1 mt-1.5">
<span class="badge badge-xs ${kindCls(bm.kind)}">${bm.kind}</span>
<span class="badge badge-xs ${kindCls(bm.kind)}">${esc(bm.kind)}</span>
${bm.level ? `<span class="badge badge-xs badge-ghost">${esc(bm.level)}</span>` : ''}
${bm.pole ? `<span class="badge badge-xs" style="background:${poleColor(bm.pole)};color:#111;border:none">${esc(bm.pole)}</span>` : ''}
</div>
</div>
<button class="btn btn-ghost btn-xs btn-circle text-warning" title="Remove bookmark"
onclick="toggleBookmark(${JSON.stringify(bm).replace(/</g,'\\u003c')})">
<button id="bm-detail-star" class="btn btn-ghost btn-xs btn-circle text-warning"
title="Remove bookmark">
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 24 24">
<path d="M5 3a2 2 0 00-2 2v16l7-3 7 3V5a2 2 0 00-2-2H5z"/>
</svg>
</button>
</div>
<p class="text-xs text-base-content/40 mb-3">${esc(bm.description)}</p>
<p class="text-xs font-mono text-base-content/30">id: ${esc(bm.id)}</p>
${bm.term ? `<p class="text-xs text-base-content/35 mb-2">Search term: <span class="font-mono">${esc(bm.term)}</span></p>` : ''}
<p class="text-xs font-mono text-base-content/30">id: ${esc(bm.node_id)}</p>
${bm.created_at ? `<p class="text-xs text-base-content/25 mt-1">${esc(bm.created_at)}</p>` : ''}
`;
document.getElementById('bm-detail-star').addEventListener('click', async () => {
await toggleBookmark({ id: bm.node_id, kind: bm.kind, title: bm.title, level: bm.level });
detail.classList.add('hidden');
detailEmpty.classList.remove('hidden');
});
}
bmClearBtn.addEventListener('click', () => {
saveBookmarks(loadBookmarks().filter(b => b.project !== PROJECT));
bmClearBtn.addEventListener('click', async () => {
const ids = [...bookmarks.values()].map(b => b.id);
const url = `${BASE_URL}/search/bookmark/delete`;
await Promise.all(ids.map(id => {
const body = { id };
if (SLUG) body.slug = SLUG;
return fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
}).catch(() => {});
}));
bookmarks.clear();
renderBookmarks();
renderResults();
});
// ── Persistence ────────────────────────────────────────────────────────────
const STORAGE_KEY = 'ontoref-search:' + PROJECT;
// ── Query persistence (session only — not bookmark data) ───────────────────
const STORAGE_KEY = 'ontoref-search:' + (SLUG || '__single__');
function saveQuery(q) { try { sessionStorage.setItem(STORAGE_KEY, q); } catch (_) {} }
function loadQuery() { try { return sessionStorage.getItem(STORAGE_KEY) || ''; } catch (_) { return ''; } }
@ -281,7 +345,7 @@ async function doSearch() {
try {
const res = await fetch(url);
data = await res.json();
} catch (err) {
} catch (_) {
resultsCount.textContent = 'Search error';
resultsCount.classList.remove('hidden');
return;
@ -334,15 +398,31 @@ function renderResults() {
});
});
document.querySelectorAll('.btn-star').forEach(el => {
el.addEventListener('click', e => {
el.addEventListener('click', async e => {
e.stopPropagation();
toggleBookmark(results[parseInt(el.dataset.idx)]);
await toggleBookmark(results[parseInt(el.dataset.idx)]);
});
});
}
async function copyResultToClipboard(r, btn) {
const lines = [
`# ${r.title} [${r.kind}${r.level ? ' · ' + r.level : ''}]`,
r.description ? '' : null,
r.description || null,
r.path ? `\nPath: ${r.path}` : null,
r.id ? `ID: ${r.id}` : null,
].filter(l => l !== null).join('\n');
try {
await navigator.clipboard.writeText(lines);
const orig = btn.innerHTML;
btn.innerHTML = `<svg class="w-4 h-4 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"/></svg>`;
setTimeout(() => { btn.innerHTML = orig; }, 1400);
} catch (_) {}
}
function showDetail(idx) {
const r = results[idx];
const r = results[idx];
const starred = isBookmarked(r);
detail.classList.remove('hidden');
detailEmpty.classList.add('hidden');
@ -356,17 +436,30 @@ function showDetail(idx) {
${r.pole ? `<span class="badge badge-xs" style="background:${poleColor(r.pole)};color:#111;border:none">${esc(r.pole)}</span>` : ''}
</div>
</div>
<button id="detail-star" class="btn btn-ghost btn-xs btn-circle ${starred ? 'text-warning' : 'text-base-content/25 hover:text-warning'}" title="${starred ? 'Remove bookmark' : 'Bookmark this'}">
<svg class="w-4 h-4" fill="${starred ? 'currentColor' : 'none'}" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 3a2 2 0 00-2 2v16l7-3 7 3V5a2 2 0 00-2-2H5z"/>
</svg>
</button>
<div class="flex items-center gap-1 flex-shrink-0 mt-0.5">
<button id="detail-copy" class="btn btn-ghost btn-xs btn-circle text-base-content/25 hover:text-base-content"
title="Copy to clipboard">
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M8 5H6a2 2 0 00-2 2v12a2 2 0 002 2h10a2 2 0 002-2v-1M8 5a2 2 0 002 2h2a2 2 0 002-2M8 5a2 2 0 012-2h2a2 2 0 012 2m0 0h2a2 2 0 012 2v3"/>
</svg>
</button>
<button id="detail-star" class="btn btn-ghost btn-xs btn-circle ${starred ? 'text-warning' : 'text-base-content/25 hover:text-warning'}"
title="${starred ? 'Remove bookmark' : 'Bookmark this'}">
<svg class="w-4 h-4" fill="${starred ? 'currentColor' : 'none'}" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 3a2 2 0 00-2 2v16l7-3 7 3V5a2 2 0 00-2-2H5z"/>
</svg>
</button>
</div>
</div>
<p class="text-xs font-mono text-base-content/30 mb-4 truncate">${esc(r.path)}</p>
<div class="space-y-1 text-sm">${r.detail_html}</div>
`;
document.getElementById('detail-star').addEventListener('click', () => {
toggleBookmark(r);
document.getElementById('detail-copy').addEventListener('click', async e => {
await copyResultToClipboard(r, e.currentTarget);
});
document.getElementById('detail-star').addEventListener('click', async () => {
await toggleBookmark(r);
showDetail(idx);
});
}
@ -407,9 +500,9 @@ document.addEventListener('mouseup', () => {
// ── Helpers ────────────────────────────────────────────────────────────────
function kindCls(kind) { return { node: 'badge-primary', adr: 'badge-secondary', mode: 'badge-accent' }[kind] || 'badge-neutral'; }
function poleColor(p) { return { Yang: '#f59e0b', Yin: '#3b82f6', Spiral: '#8b5cf6' }[p] || '#6b7280'; }
function esc(s) { return String(s ?? '').replace(/&/g,'&amp;').replace(/</g,'&lt;').replace(/>/g,'&gt;').replace(/"/g,'&quot;'); }
function kindCls(kind) { return { node: 'badge-primary', adr: 'badge-secondary', mode: 'badge-accent' }[kind] || 'badge-neutral'; }
function poleColor(p) { return { Yang: '#f59e0b', Yin: '#3b82f6', Spiral: '#8b5cf6' }[p] || '#6b7280'; }
function esc(s) { return String(s ?? '').replace(/&/g,'&amp;').replace(/</g,'&lt;').replace(/>/g,'&gt;').replace(/"/g,'&quot;'); }
// ── Init ───────────────────────────────────────────────────────────────────

View File

@ -45,6 +45,10 @@ pub struct Node {
pub description: String,
#[serde(default)]
pub invariant: bool,
#[serde(default)]
pub artifact_paths: Vec<String>,
#[serde(default)]
pub adrs: Vec<String>,
}
/// A directed edge between two nodes.

View File

@ -31,7 +31,9 @@ def main [
}
# ── Extract existing import paths as plain text (never call nickel here) ────
let content = open $projects_file
# open --raw: .ncl is unknown to Nushell; without --raw it may parse `[]` as
# an empty list instead of a string, which would silently lose all existing entries.
let content = open --raw $projects_file
let existing_paths = (
$content
| lines
@ -49,9 +51,9 @@ def main [
let ncl_path = $"($abs)/.ontoref/project.ncl"
let filtered = ($existing_paths | where { |p| $p != $ncl_path })
if ($filtered | length) == ($existing_paths | length) {
print $" (ansi yellow)not registered(ansi reset): ($ncl_path)"
print --stderr $" (ansi yellow)not registered(ansi reset): ($ncl_path)"
} else {
print $" (ansi green)removed(ansi reset): ($ncl_path)"
print --stderr $" (ansi green)removed(ansi reset): ($ncl_path)"
}
$filtered
} else {
@ -66,7 +68,7 @@ def main [
error make { msg: $"project.ncl not found: ($ncl_path)\nCopy templates/project.ncl to ($add)/.ontoref/project.ncl and fill in the fields." }
}
if ($after_remove | any { |p| $p == $ncl_path }) {
print $" (ansi yellow)already registered(ansi reset): ($ncl_path)"
print --stderr $" (ansi yellow)already registered(ansi reset): ($ncl_path)"
$after_remove
} else {
$after_remove | append $ncl_path
@ -81,9 +83,9 @@ def main [
if not ($p | path exists) {
let project_root = ($p | str replace --regex '(/\.ontoref/project\.ncl)$' '')
if not ($project_root | path exists) {
print $" (ansi yellow)WARN(ansi reset) removing missing project (root deleted): ($project_root)"
print --stderr $" (ansi yellow)WARN(ansi reset) removing missing project (root deleted): ($project_root)"
} else {
print $" (ansi yellow)WARN(ansi reset) removing invalid project (project.ncl missing): ($p)"
print --stderr $" (ansi yellow)WARN(ansi reset) removing invalid project (project.ncl missing): ($p)"
}
null
} else {
@ -95,7 +97,7 @@ def main [
let removed = ($after_add | length) - ($valid_paths | length)
if $removed > 0 {
print $" (ansi yellow)($removed) project(s) removed — path(s) no longer exist(ansi reset)"
print --stderr $" (ansi yellow)($removed) project(s) removed — path(s) no longer exist(ansi reset)"
}
# ── Generate projects.ncl ─────────────────────────────────────────────────────
@ -112,13 +114,13 @@ def main [
}
if $dry_run {
print "── projects.ncl (dry-run) ──────────────────────────────"
print --stderr "── projects.ncl (dry-run) ──────────────────────────────"
print $output
print "────────────────────────────────────────────────────────"
print --stderr "────────────────────────────────────────────────────────"
} else {
$output | save -f $projects_file
let n = ($valid_paths | length)
let label = if $n == 1 { "project" } else { "projects" }
print $" (ansi green)OK(ansi reset) ($n) local ($label) written to ($projects_file)"
print --stderr $" (ansi green)OK(ansi reset) ($n) local ($label) written to ($projects_file)"
}
}

View File

@ -66,9 +66,9 @@ def main [] {
cp $bin_src $bin_dest
chmod +x $bin_dest
if $is_mac {
do { ^xattr -d com.apple.quarantine $bin_dest } | ignore
}
#if $is_mac {
# do { ^xattr -d com.apple.quarantine $bin_dest } | ignore
#}
print $"✓ binary ($bin_dest)"
@ -135,6 +135,38 @@ def main [] {
}
print $"✓ reflection ($reflection_dest)/ updated=($refl_updated) unchanged=($refl_skipped)"
# ── 3c. CLI templates (project.ncl, ontoref-config.ncl, ontology/ stubs) ──
# `ontoref setup` reads from $ONTOREF_ROOT/templates/ — copy the repo-level
# templates/ tree so the installed CLI works without the source repo present.
let cli_templates_src = $"($repo_root)/templates"
let cli_templates_dest = $"($data_dir)/templates"
if ($cli_templates_src | path exists) {
mkdir $cli_templates_dest
mut tmpl_updated = 0
mut tmpl_skipped = 0
for src_file in (glob $"($cli_templates_src)/**/*" | where { |f| ($f | path type) == "file" }) {
let rel = ($src_file | str replace $"($cli_templates_src)/" "")
let dest_file = $"($cli_templates_dest)/($rel)"
let dest_parent = ($dest_file | path dirname)
mkdir $dest_parent
let needs_update = if ($dest_file | path exists) {
(open --raw $src_file | hash sha256) != (open --raw $dest_file | hash sha256)
} else {
true
}
if $needs_update {
cp $src_file $dest_file
$tmpl_updated = $tmpl_updated + 1
} else {
$tmpl_skipped = $tmpl_skipped + 1
}
}
print $"✓ cli-templates ($cli_templates_dest)/ updated=($tmpl_updated) unchanged=($tmpl_skipped)"
} else {
print $" (ansi yellow)warn(ansi reset) templates/ not found at ($cli_templates_src)"
}
# ── 4. UI assets (data dir) ────────────────────────────────────────────────
let templates_src = $"($repo_root)/crates/ontoref-daemon/templates"
let public_src = $"($repo_root)/crates/ontoref-daemon/public"
@ -239,15 +271,24 @@ def main [] {
}
}
# ── 6. Install scripts (gen-projects.nu, etc.) ────────────────────────────
# The bootstrapper (ontoref-daemon-boot) looks for these at $data_dir/install/
# to validate and regenerate projects.ncl before nickel export runs.
# ── 6. Install scripts (gen-projects.nu, etc.) + hooks ────────────────────
# The bootstrapper looks for *.nu at $data_dir/install/.
# `ontoref hooks-install` looks for install/hooks/{post-commit,post-merge}.
let install_dest = $"($data_dir)/install"
mkdir $install_dest
for f in (glob $"($repo_root)/install/*.nu") {
let dest_f = $"($install_dest)/(($f | path basename))"
install-if-changed $f $dest_f $"install/(($f | path basename))"
}
let hooks_src = $"($repo_root)/install/hooks"
let hooks_dest = $"($install_dest)/hooks"
if ($hooks_src | path exists) {
mkdir $hooks_dest
for f in (glob $"($hooks_src)/*" | where { |p| ($p | path type) == "file" }) {
let dest_f = $"($hooks_dest)/(($f | path basename))"
install-if-changed $f $dest_f $"install/hooks/(($f | path basename))"
}
}
# ── 7. Dev extras: ncl-bootstrap Nu helper ────────────────────────────────
if $is_dev {

View File

@ -58,7 +58,7 @@ if [[ "$(uname)" == "Darwin" ]]; then
else
_data_dir="$HOME/.local/share/ontoref"
fi
export NICKEL_IMPORT_PATH="${NICKEL_IMPORT_PATH:+${NICKEL_IMPORT_PATH}:}${_config_dir}:${_data_dir}/schemas:${_data_dir}"
export NICKEL_IMPORT_PATH="${NICKEL_IMPORT_PATH:+${NICKEL_IMPORT_PATH}:}${_config_dir}:${_config_dir}/schemas:${_data_dir}/schemas:${_data_dir}"
# Default NATS stream topology from config dir — project can override via streams_config in config.ncl
export NATS_STREAMS_CONFIG="${NATS_STREAMS_CONFIG:-${_config_dir}/streams.json}"

View File

@ -291,26 +291,96 @@ if [[ "${_has_help}" -eq 1 ]]; then
fi
fi
# ── Fix trailing flags that require a value ────────────────────────────────────
# ── Normalize --fmt/-f: extract from any position and append after subcommand ─
_fmt_val=""
_no_fmt_args=()
_fi=0
while [[ $_fi -lt ${#REMAINING_ARGS[@]} ]]; do
_a="${REMAINING_ARGS[$_fi]}"
case "${_a}" in
--fmt|-f|--format|-fmt)
_fi=$(( _fi + 1 ))
_fmt_val="${REMAINING_ARGS[$_fi]:-}"
;;
--fmt=*|--format=*)
_fmt_val="${_a#*=}"
;;
*) _no_fmt_args+=("${_a}") ;;
esac
_fi=$(( _fi + 1 ))
done
if [[ -n "${_fmt_val}" ]]; then
REMAINING_ARGS=("${_no_fmt_args[@]+"${_no_fmt_args[@]}"}" "--fmt" "${_fmt_val}")
fi
# ── Fix trailing flags that require a value ────────────────────────────────────
if [[ "${#REMAINING_ARGS[@]}" -gt 0 ]]; then
_last="${REMAINING_ARGS[${#REMAINING_ARGS[@]}-1]}"
# shellcheck disable=SC2249
case "${_last}" in
--fmt|--format|-fmt|-f|--actor|--context|--severity|--backend|--kind|--priority|--status)
--fmt|--format|-fmt|--actor|--context|--severity|--backend|--kind|--priority|--status)
REMAINING_ARGS+=("select")
;;
esac
fi
# ── Universal --clip: capture stdout, strip ANSI, copy to clipboard ───────────
_has_clip=0
_no_clip_args=()
for _a in "${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"}"; do
case "${_a}" in
--clip|-c) _has_clip=1 ;;
*) _no_clip_args+=("${_a}") ;;
esac
done
_strip_ansi() { sed $'s/\033\\[[0-9;]*[mGKHFJABCDEFM]//g'; }
_copy_to_clipboard() {
if command -v pbcopy &>/dev/null; then
printf '%s' "${1}" | pbcopy
elif command -v xclip &>/dev/null; then
printf '%s' "${1}" | xclip -selection clipboard
elif command -v wl-copy &>/dev/null; then
printf '%s' "${1}" | wl-copy
else
echo " No clipboard tool found (install pbcopy, xclip, or wl-copy)" >&2
return 1
fi
echo " ✓ Copied to clipboard" >&2
}
# ── Delegate to Nushell dispatcher ────────────────────────────────────────────
LOCK_RESOURCE="$(determine_lock)"
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
nu "${DISPATCHER}" "${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"}"
# --clip strategy:
# Structured --fmt (json/yaml/toml/md): non-interactive subprocess capture via stdin redirect.
# Text (no --fmt or --fmt text): pass --clip to Nushell — it handles clipboard after selection.
_fmt_is_structured=0
case "${_fmt_val}" in
json|yaml|toml|md|j|y|t|m) _fmt_is_structured=1 ;;
esac
if [[ "${_has_clip}" -eq 1 ]] && [[ "${_fmt_is_structured}" -eq 1 ]]; then
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
fi
_captured="$(nu "${DISPATCHER}" "${_no_clip_args[@]+"${_no_clip_args[@]}"}" 2>&1 < /dev/null | _strip_ansi)"
printf '%s\n' "${_captured}"
_copy_to_clipboard "${_captured}"
elif [[ "${_has_clip}" -eq 1 ]]; then
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
fi
# Text mode: pass --clip through; Nushell copies after interactive selection.
nu "${DISPATCHER}" "${_no_clip_args[@]+"${_no_clip_args[@]}"}" "--clip"
else
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
fi
nu "${DISPATCHER}" "${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"}"
fi

View File

@ -0,0 +1,23 @@
let s = import "../schemas/career.ncl" in
{
make_skill = fun data => s.Skill & data,
make_experience = fun data => s.WorkExperience & data,
make_talk = fun data => s.Talk & data,
make_positioning = fun data => s.Positioning & data,
make_company_target = fun data => s.CompanyTarget & data,
make_publication_card = fun data => s.PublicationCard & data,
Skill = s.Skill,
WorkExperience = s.WorkExperience,
Talk = s.Talk,
Positioning = s.Positioning,
CompanyTarget = s.CompanyTarget,
PublicationCard = s.PublicationCard,
CareerConfig = s.CareerConfig,
ProficiencyTier = s.ProficiencyTier,
TalkStatus = s.TalkStatus,
CompanyStatus = s.CompanyStatus,
ProjectPubStatus = s.ProjectPubStatus,
}

View File

@ -0,0 +1,14 @@
let s = import "../schemas/personal.ncl" in
{
make_content = fun data => s.Content & data,
make_opportunity = fun data => s.Opportunity & data,
Content = s.Content,
Opportunity = s.Opportunity,
ContentKind = s.ContentKind,
ContentStatus = s.ContentStatus,
OpportunityKind = s.OpportunityKind,
OpportunityStatus = s.OpportunityStatus,
Audience = s.Audience,
}

View File

@ -0,0 +1,9 @@
let s = import "../schemas/project-card.ncl" in
{
make_card = fun data => s.ProjectCard & data,
ProjectCard = s.ProjectCard,
SourceType = s.SourceType,
ProjectPubStatus = s.ProjectPubStatus,
}

121
ontology/schemas/career.ncl Normal file
View File

@ -0,0 +1,121 @@
# Career schema — typed artifacts for skills, work history, talks, positioning, and publication.
# All types include `linked_nodes` referencing IDs from .ontology/core.ncl.
# This creates the DAG connection between career artifacts and the ontology.
#
# Output: .ontology/career.ncl exports to JSON → Nu script generates YAML for cv_repo.
# ── Skill ────────────────────────────────────────────────────────────────────
let proficiency_tier_type = [| 'Expert, 'Advanced, 'Intermediate, 'Foundational |] in
let skill_type = {
id | String,
name | String,
tier | proficiency_tier_type,
proficiency | Number,
years | Number | default = 0,
linked_nodes | Array String | default = [],
evidence | Array String | default = [],
note | String | default = "",
} in
# ── Work Experience ───────────────────────────────────────────────────────────
let work_experience_type = {
id | String,
company | String,
company_url | String | default = "",
position | String,
date_start | String,
date_end | String | default = "present",
location | String | default = "",
description | String | default = "",
achievements | Array String | default = [],
tools | Array String | default = [],
linked_nodes | Array String | default = [],
} in
# ── Talk / Activity ───────────────────────────────────────────────────────────
let talk_status_type = [| 'Idea, 'Proposed, 'Accepted, 'Delivered, 'Archived |] in
let talk_type = {
id | String,
title | String,
event | String,
date | String | default = "",
location | String | default = "",
description | String | default = "",
slides_url | String | default = "",
video_url | String | default = "",
repository | String | default = "",
status | talk_status_type,
linked_nodes | Array String | default = [],
} in
# ── Positioning Strategy ──────────────────────────────────────────────────────
let positioning_type = {
id | String,
name | String,
core_message | String,
target | String,
linked_nodes | Array String | default = [],
note | String | default = "",
} in
# ── Company Target ─────────────────────────────────────────────────────────────
let company_status_type = [| 'Active, 'Watching, 'Inactive, 'Applied, 'Closed |] in
let company_target_type = {
id | String,
name | String,
url | String | default = "",
status | company_status_type,
fit_signals | Array String | default = [],
linked_nodes | Array String | default = [],
note | String | default = "",
} in
# ── Publication Card ──────────────────────────────────────────────────────────
# Project cards for blog grid, CV, and proposals.
# project_node references a node ID in .ontology/core.ncl.
let project_pub_status_type = [| 'Active, 'Beta, 'Maintenance, 'Archived, 'Stealth |] in
# Career overlay for a project card.
# project_node references the canonical card in the portfolio repo.
# Only career-specific fields live here — display metadata lives in portfolio/projects/{id}/card.ncl.
let publication_card_type = {
project_node | String,
featured | Bool | default = false,
sort_order | Number | default = 0,
# Optional overrides — when career context needs a different tagline than the portfolio card
tagline_override | String | default = "",
} in
# ── Root export ───────────────────────────────────────────────────────────────
{
ProficiencyTier = proficiency_tier_type,
TalkStatus = talk_status_type,
CompanyStatus = company_status_type,
ProjectPubStatus = project_pub_status_type,
Skill = skill_type,
WorkExperience = work_experience_type,
Talk = talk_type,
Positioning = positioning_type,
CompanyTarget = company_target_type,
PublicationCard = publication_card_type,
CareerConfig = {
skills | Array skill_type | default = [],
experiences | Array work_experience_type | default = [],
talks | Array talk_type | default = [],
positioning | Array positioning_type | default = [],
companies | Array company_target_type | default = [],
publications | Array publication_card_type | default = [],
},
}

View File

@ -25,6 +25,7 @@ let edge_type = [|
description | String,
invariant | Bool | default = false,
artifact_paths | Array String | default = [],
adrs | Array String | default = [],
},
Edge = {
@ -36,7 +37,7 @@ let edge_type = [|
},
CoreConfig = {
nodes | Array { id | String, name | String, pole | pole_type, level | level_type, description | String, invariant | Bool, artifact_paths | Array String },
nodes | Array { id | String, name | String, pole | pole_type, level | level_type, description | String, invariant | Bool, artifact_paths | Array String, adrs | Array String },
edges | Array { from | String, to | String, kind | edge_type, weight | Number, note | String },
},
}

View File

@ -5,6 +5,7 @@ let repo_kind_type = [|
'Library,
'AgentResource,
'Mixed,
'PersonalOntology,
|] in
let consumer_type = [|
@ -147,6 +148,9 @@ let manifest_type = {
claude | claude_baseline_type | default = {},
default_audit | audit_level_type | default = 'Standard,
default_mode | String | default = "dev",
# Node ID this project maps to in the ontology DAG.
# Used by portfolio tooling to cross-reference publication cards.
ontology_node | String | default = "",
} in
{

View File

@ -0,0 +1,85 @@
# Personal ontology schema — types for content artifacts and career opportunities.
# Used by PersonalOntology projects to track what to write, where to apply, and how to present work.
#
# Design decisions:
# - Content and Opportunity are independent types; linked_nodes connects them to core.ncl node IDs.
# - audience and fit_signals use closed enums to force explicit categorization.
# - PersonalConfig is the export contract for .ontology/personal.ncl in consumer projects.
let content_kind_type = [|
'BlogPost,
'ConferenceProposal,
'CV,
'Application,
'Email,
'Thread,
|] in
let content_status_type = [|
'Idea,
'Draft,
'Review,
'Published,
'Rejected,
'Archived,
|] in
let opportunity_kind_type = [|
'Conference,
'Job,
'Grant,
'Collaboration,
'Podcast,
|] in
let opportunity_status_type = [|
'Watching,
'Evaluating,
'Active,
'Submitted,
'Closed,
|] in
let audience_type = [|
'Technical,
'HiringManager,
'GeneralPublic,
'Community,
'Academic,
|] in
let content_type = {
id | String,
kind | content_kind_type,
title | String | default = "",
status | content_status_type,
linked_nodes | Array String | default = [],
audience | audience_type,
note | String | default = "",
} in
let opportunity_type = {
id | String,
kind | opportunity_kind_type,
name | String,
status | opportunity_status_type,
fit_signals | Array String | default = [],
linked_nodes | Array String | default = [],
deadline | String | default = "",
note | String | default = "",
} in
{
ContentKind = content_kind_type,
ContentStatus = content_status_type,
OpportunityKind = opportunity_kind_type,
OpportunityStatus = opportunity_status_type,
Audience = audience_type,
Content = content_type,
Opportunity = opportunity_type,
PersonalConfig = {
contents | Array content_type | default = [],
opportunities | Array opportunity_type | default = [],
},
}

View File

@ -0,0 +1,36 @@
# Project card schema — typed self-definition for any project.
# Source of truth for display metadata, web assets, and portfolio publication.
#
# Each project maintains card.ncl locally and publishes (copies) to the
# portfolio repo alongside its assets/. The portfolio is self-contained —
# it does not depend on the original project repo being alive.
let source_type = [| 'Local, 'Remote, 'Historical |] in
let project_pub_status_type = [| 'Active, 'Beta, 'Maintenance, 'Archived, 'Stealth |] in
let project_card_type = {
id | String, # matches ontology_node in jpl DAG
name | String,
tagline | String,
description | String,
version | String | default = "",
status | project_pub_status_type,
source | source_type | default = 'Local,
url | String | default = "",
repo | String | default = "",
docs | String | default = "",
logo | String | default = "",
started_at | String | default = "",
tags | Array String | default = [],
tools | Array String | default = [],
features | Array String | default = [],
featured | Bool | default = false,
sort_order | Number | default = 0,
} in
{
SourceType = source_type,
ProjectPubStatus = project_pub_status_type,
ProjectCard = project_card_type,
}

82
ontoref
View File

@ -277,23 +277,95 @@ if [[ "${_has_help}" -eq 1 ]]; then
fi
fi
# ── Normalize --fmt/-f: extract from any position and append after subcommand ─
# Allows: `ontoref -f json d s` → `ontoref d s --fmt json`
_fmt_val=""
_no_fmt_args=()
_fi=0
while [[ $_fi -lt ${#REMAINING_ARGS[@]} ]]; do
_a="${REMAINING_ARGS[$_fi]}"
case "${_a}" in
--fmt|-f|--format|-fmt)
_fi=$(( _fi + 1 ))
_fmt_val="${REMAINING_ARGS[$_fi]:-}"
;;
--fmt=*|--format=*)
_fmt_val="${_a#*=}"
;;
*) _no_fmt_args+=("${_a}") ;;
esac
_fi=$(( _fi + 1 ))
done
if [[ -n "${_fmt_val}" ]]; then
REMAINING_ARGS=("${_no_fmt_args[@]+"${_no_fmt_args[@]}"}" "--fmt" "${_fmt_val}")
fi
# ── Fix trailing flags that require a value ────────────────────────────────
if [[ "${#REMAINING_ARGS[@]}" -gt 0 ]]; then
_last="${REMAINING_ARGS[${#REMAINING_ARGS[@]}-1]}"
# shellcheck disable=SC2249
case "${_last}" in
--fmt|--format|-fmt|-f|--actor|--context|--severity|--backend|--kind|--priority|--status)
--fmt|--format|-fmt|--actor|--context|--severity|--backend|--kind|--priority|--status)
REMAINING_ARGS+=("select")
;;
esac
fi
# ── Universal --clip: capture stdout, strip ANSI, copy to clipboard ───────────
_has_clip=0
_no_clip_args=()
for _a in "${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"}"; do
case "${_a}" in
--clip|-c) _has_clip=1 ;;
*) _no_clip_args+=("${_a}") ;;
esac
done
_strip_ansi() { sed $'s/\033\\[[0-9;]*[mGKHFJABCDEFM]//g'; }
_copy_to_clipboard() {
if command -v pbcopy &>/dev/null; then
printf '%s' "${1}" | pbcopy
elif command -v xclip &>/dev/null; then
printf '%s' "${1}" | xclip -selection clipboard
elif command -v wl-copy &>/dev/null; then
printf '%s' "${1}" | wl-copy
else
echo " No clipboard tool found (install pbcopy, xclip, or wl-copy)" >&2
return 1
fi
echo " ✓ Copied to clipboard" >&2
}
LOCK_RESOURCE="$(determine_lock)"
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
nu "${DISPATCHER}" "${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"}"
# --clip strategy:
# Structured --fmt (json/yaml/toml/md): non-interactive subprocess capture via stdin redirect.
# Text (no --fmt or --fmt text): pass --clip to Nushell — it handles clipboard after selection.
_fmt_is_structured=0
case "${_fmt_val}" in
json|yaml|toml|md|j|y|t|m) _fmt_is_structured=1 ;;
esac
if [[ "${_has_clip}" -eq 1 ]] && [[ "${_fmt_is_structured}" -eq 1 ]]; then
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
fi
_captured="$(nu "${DISPATCHER}" "${_no_clip_args[@]+"${_no_clip_args[@]}"}" 2>&1 < /dev/null | _strip_ansi)"
printf '%s\n' "${_captured}"
_copy_to_clipboard "${_captured}"
elif [[ "${_has_clip}" -eq 1 ]]; then
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
fi
# Text mode: pass --clip through; Nushell copies after interactive selection.
nu "${DISPATCHER}" "${_no_clip_args[@]+"${_no_clip_args[@]}"}" "--clip"
else
if [[ -n "${LOCK_RESOURCE}" ]]; then
acquire_lock "${LOCK_RESOURCE}" 30
trap 'release_lock' EXIT INT TERM
fi
nu "${DISPATCHER}" "${REMAINING_ARGS[@]+"${REMAINING_ARGS[@]}"}"
fi

View File

@ -64,7 +64,7 @@ def "main" [shortcut?: string] {
}
def show-usage-brief [] {
let caller = ($env.ONTOREF_CALLER? | default "onref")
let caller = ($env.ONTOREF_CALLER? | default "ontoref")
print $"\nUsage: ($caller) [command] [options]\n"
print $"Use '($caller) help' for available commands\n"
}
@ -76,7 +76,7 @@ def "main help" [group?: string] {
}
let actor = ($env.ONTOREF_ACTOR? | default "developer")
let cmd = ($env.ONTOREF_CALLER? | default "./onref")
let cmd = ($env.ONTOREF_CALLER? | default "ontoref")
let brief = adrs-brief
let adr_status = $"($brief.accepted)A/($brief.superseded)S/($brief.proposed)P"
@ -100,6 +100,8 @@ def "main help" [group?: string] {
fmt-cmd $"($cmd) help coder" ".coder/ process memory: record, log, triage, publish"
fmt-cmd $"($cmd) help manifest" "operational modes, publication services, layers"
fmt-cmd $"($cmd) help describe" "project self-knowledge: what, how, why, impact"
fmt-cmd $"($cmd) help search" "ontology search + bookmarks (NCL-persisted)"
fmt-cmd $"($cmd) help qa" "Q&A knowledge base: query, add, list"
fmt-cmd $"($cmd) help log" "action audit trail, follow, filter"
print ""
@ -107,7 +109,9 @@ def "main help" [group?: string] {
print ""
fmt-cmd $"($cmd) init" "run actor-configured init mode (from actor_init in config)"
fmt-cmd $"($cmd) run <mode-id>" "execute a mode (shortcut for mode run)"
fmt-cmd $"($cmd) find <term>" "search ontology: selector, detail, connections, usage"
fmt-cmd $"($cmd) s <term>" "search ontology nodes, ADRs, modes (--fmt <fmt> --clip)"
fmt-cmd $"($cmd) q <term>" "query QA entries (word-overlap score, ontology fallback) (--fmt --clip)"
fmt-cmd $"($cmd) qs <term>" "QA-first then ontology | sq: ontology-first then QA"
fmt-cmd $"($cmd) about" "project identity and summary"
fmt-cmd $"($cmd) diagram" "terminal box diagram of project architecture"
fmt-cmd $"($cmd) overview" "single-screen project snapshot: identity, crates, health"
@ -134,10 +138,11 @@ def "main help" [group?: string] {
print $" (ansi cyan)ad(ansi reset) → adr (ansi cyan)d(ansi reset) → describe (ansi cyan)ck(ansi reset) → check (ansi cyan)con(ansi reset) → constraint"
print $" (ansi cyan)rg(ansi reset) → register (ansi cyan)bkl(ansi reset) → backlog (ansi cyan)cfg(ansi reset) → config (ansi cyan)cod(ansi reset) → coder"
print $" (ansi cyan)mf(ansi reset) → manifest (ansi cyan)dg(ansi reset) → diagram (ansi cyan)md(ansi reset) → mode (ansi cyan)st(ansi reset) → status"
print $" (ansi cyan)fm(ansi reset) → form (ansi cyan)f(ansi reset) → find (ansi cyan)ru(ansi reset) → run \(mode\) (ansi cyan)sv(ansi reset) → services"
print $" (ansi cyan)nv(ansi reset) → nats"
print $" (ansi cyan)fm(ansi reset) → form (ansi cyan)s(ansi reset) → search (ansi cyan)ru(ansi reset) → run \(mode\) (ansi cyan)sv(ansi reset) → services"
print $" (ansi cyan)nv(ansi reset) → nats (ansi cyan)q(ansi reset) → qa query (ansi cyan)f(ansi reset) → search \(alias\)"
print ""
print $" (ansi dark_gray)Tip: any group accepts(ansi reset) (ansi cyan)h(ansi reset) (ansi dark_gray)for help,(ansi reset) (ansi cyan)?(ansi reset) (ansi dark_gray)for interactive selector, or bare for picker(ansi reset)"
print $" (ansi dark_gray)Any command:(ansi reset) (ansi cyan)--fmt|-f(ansi reset) (ansi dark_gray)text*|json|yaml|toml|md(ansi reset) · (ansi cyan)--clip(ansi reset) (ansi dark_gray)copy output to clipboard(ansi reset)"
print ""
}
@ -416,9 +421,14 @@ def "main describe why" [id: string, --fmt (-f): string = ""] {
log-action $"describe why ($id)" "read"
let f = (resolve-fmt $fmt [text table json yaml toml]); describe why $id --fmt $f
}
def "main describe find" [term: string, --level: string = "", --fmt (-f): string = ""] {
log-action $"describe find ($term)" "read"
describe find $term --level $level --fmt $fmt
def "main describe search" [...words: string, --level: string = "", --fmt (-f): string = "", --clip] {
let term = ($words | str join ' ')
log-action $"describe search ($term)" "read"
describe search $term --level $level --fmt $fmt --clip=$clip
}
def "main describe find" [...words: string, --level: string = "", --fmt (-f): string = "", --clip] {
let term = ($words | str join ' ')
main describe search $term --level $level --fmt $fmt --clip=$clip
}
def "main describe features" [id?: string, --fmt (-f): string = "", --actor: string = ""] {
@ -433,6 +443,12 @@ def "main describe connections" [--fmt (-f): string = "", --actor: string = ""]
describe connections --fmt $f --actor $actor
}
def "main describe extensions" [--fmt (-f): string = "", --actor: string = "", --dump: string = "", --clip] {
log-action "describe extensions" "read"
let f = (resolve-fmt $fmt [text json md])
describe extensions --fmt $f --actor $actor --dump $dump --clip=$clip
}
# ── Diagram ───────────────────────────────────────────────────────────────────
def "main diagram" [] {
@ -570,8 +586,10 @@ def "main d con" [--fmt (-f): string = "", --actor: string = ""] { main describe
def "main d tools" [--fmt (-f): string = "", --actor: string = ""] { main describe tools --fmt $fmt --actor $actor }
def "main d t" [--fmt (-f): string = "", --actor: string = ""] { main describe tools --fmt $fmt --actor $actor }
def "main d tls" [--fmt (-f): string = "", --actor: string = ""] { main describe tools --fmt $fmt --actor $actor }
def "main d find" [term: string, --level: string = "", --fmt (-f): string = ""] { main describe find $term --level $level --fmt $fmt }
def "main d fi" [term: string, --level: string = "", --fmt (-f): string = ""] { main describe find $term --level $level --fmt $fmt }
def "main d search" [...words: string, --level: string = "", --fmt (-f): string = "", --clip] { main describe search ...($words) --level $level --fmt $fmt --clip=$clip }
def "main d s" [...words: string, --level: string = "", --fmt (-f): string = "", --clip] { main describe search ...($words) --level $level --fmt $fmt --clip=$clip }
def "main d find" [...words: string, --level: string = "", --fmt (-f): string = "", --clip] { main describe search ...($words) --level $level --fmt $fmt --clip=$clip }
def "main d fi" [...words: string, --level: string = "", --fmt (-f): string = "", --clip] { main describe search ...($words) --level $level --fmt $fmt --clip=$clip }
def "main d features" [id?: string, --fmt (-f): string = "", --actor: string = ""] { main describe features $id --fmt $fmt --actor $actor }
def "main d fea" [id?: string, --fmt (-f): string = "", --actor: string = ""] { main describe features $id --fmt $fmt --actor $actor }
def "main d f" [id?: string, --fmt (-f): string = "", --actor: string = ""] { main describe features $id --fmt $fmt --actor $actor }
@ -582,6 +600,8 @@ def "main d why" [id: string, --fmt (-f): string = ""] { main describe why $id -
def "main d w" [id: string, --fmt (-f): string = ""] { main describe why $id --fmt $fmt }
def "main d connections" [--fmt (-f): string = "", --actor: string = ""] { main describe connections --fmt $fmt --actor $actor }
def "main d conn" [--fmt (-f): string = "", --actor: string = ""] { main describe connections --fmt $fmt --actor $actor }
def "main d extensions" [--fmt (-f): string = "", --actor: string = "", --dump: string = "", --clip] { main describe extensions --fmt $fmt --actor $actor --dump $dump --clip=$clip }
def "main d ext" [--fmt (-f): string = "", --actor: string = "", --dump: string = "", --clip] { main describe extensions --fmt $fmt --actor $actor --dump $dump --clip=$clip }
def "main bkl" [action?: string] { main backlog $action }
def "main bkl help" [] { help-group "backlog" }
@ -669,8 +689,75 @@ def "main run" [id?: string, --dry-run (-n), --yes (-y)] {
}
def "main ru" [id?: string, --dry-run (-n), --yes (-y)] { main run $id --dry-run=$dry_run --yes=$yes }
def "main find" [term: string, --level: string = "", --fmt (-f): string = ""] { main describe find $term --level $level --fmt $fmt }
def "main f" [term: string, --level: string = "", --fmt (-f): string = ""] { main describe find $term --level $level --fmt $fmt }
# Search ontology nodes, ADRs and modes. Interactive picker in TTY; list in non-TTY/pipe.
# Supports --fmt and --clip (handled by the bash wrapper for all commands universally).
def "main search" [
...words: string, # search term (multi-word, no quotes needed)
--level: string = "", # filter by level: Axiom | Tension | Practice | Project
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy selected result to clipboard
] {
let term = ($words | str join ' ')
log-action $"search ($term)" "read"
describe search $term --level $level --fmt $fmt --clip=$clip
}
# Alias for search.
def "main s" [
...words: string, # search term (multi-word, no quotes needed)
--level: string = "", # filter by level: Axiom | Tension | Practice | Project
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy selected result to clipboard
] { main search ...($words) --level $level --fmt $fmt --clip=$clip }
# Alias for search (legacy).
def "main find" [
...words: string, # search term (multi-word, no quotes needed)
--level: string = "", # filter by level: Axiom | Tension | Practice | Project
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy selected result to clipboard
] { main search ...($words) --level $level --fmt $fmt --clip=$clip }
# Alias for search (legacy).
def "main f" [
...words: string, # search term (multi-word, no quotes needed)
--level: string = "", # filter by level: Axiom | Tension | Practice | Project
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy selected result to clipboard
] { main search ...($words) --level $level --fmt $fmt --clip=$clip }
# Search QA entries with word-overlap scoring; falls back to ontology if no QA hit.
def "main q" [
...words: string, # query term (multi-word, no quotes needed)
--global (-g), # also search ONTOREF_ROOT global qa.ncl
--no-fallback, # QA only — skip ontology fallback when no QA hit
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy output to clipboard
] {
let term = ($words | str join ' ')
log-action $"q ($term)" "read"
qa search $term --global=$global --no-fallback=$no_fallback --fmt $fmt --clip=$clip
}
# QA-first search with ontology fallback.
def "main qs" [
...words: string, # query term (multi-word, no quotes needed)
--global (-g), # also search ONTOREF_ROOT global qa.ncl
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy output to clipboard
] {
let term = ($words | str join ' ')
log-action $"qs ($term)" "read"
qa search $term --global=$global --fmt $fmt --clip=$clip
}
# Ontology search + QA results appended.
def "main sq" [
...words: string, # query term (multi-word, no quotes needed)
--level: string = "", # filter ontology by level: Axiom | Tension | Practice | Project
--fmt (-f): string = "", # output format: text* | json (j) | yaml (y) | toml (t) | md (m)
--clip, # copy output to clipboard
] {
let term = ($words | str join ' ')
log-action $"sq ($term)" "read"
describe search $term --level $level --fmt $fmt --clip=$clip
qa search $term --no-fallback --fmt $fmt --clip=$clip
}
def "main dg" [] { main diagram }
def "main h" [group?: string] { main help $group }
@ -720,7 +807,7 @@ def "main setup" [
--gen-keys: list<string> = [], # generate auth keys; format: "role:label" e.g. ["admin:dev", "viewer:ci"]
] {
log-action "setup" "write"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT # install data dir — templates and install/ scripts live here
let cwd = ($env.PWD | path expand)
let valid_kinds = ["Service" "Library" "DevWorkspace" "PublishedCrate" "AgentResource" "Mixed"]
@ -729,7 +816,7 @@ def "main setup" [
}
print ""
print $" (ansi white_bold)ontoref setup(ansi reset) ($cwd) (ansi dark_gray)(kind: ($kind))(ansi reset)"
print $" (ansi white_bold)ontoref setup(ansi reset) ($cwd) (ansi dark_gray)kind: ($kind)(ansi reset)"
if not ($parent | is-empty) {
print $" (ansi dark_gray)parents: ($parent | str join ', ')(ansi reset)"
}
@ -779,7 +866,7 @@ def "main setup" [
| str replace '{{ ui_section }}' $ui_section
| save -f $config_ncl
if ($logo_file | is-not-empty) {
print $" (ansi green)✓(ansi reset) config.ncl created (ansi dark_gray)(logo: ($logo_file))(ansi reset)"
print $" (ansi green)✓(ansi reset) config.ncl created (ansi dark_gray)logo: ($logo_file)(ansi reset)"
} else {
print $" (ansi green)✓(ansi reset) config.ncl created (ansi dark_gray)(no logo found in assets/)(ansi reset)"
}
@ -851,7 +938,7 @@ def "main setup" [
print $" (ansi green)✓(ansi reset) .ontology/manifest.ncl created"
} else {
let parent_slugs = ($resolved_parents | each { |p| $p.slug } | str join ", ")
print $" (ansi green)✓(ansi reset) .ontology/manifest.ncl created (ansi dark_gray)(parents: ($parent_slugs))(ansi reset)"
print $" (ansi green)✓(ansi reset) .ontology/manifest.ncl created (ansi dark_gray)parents: ($parent_slugs)(ansi reset)"
}
}
@ -881,6 +968,12 @@ def "main setup" [
| save -f $qa_dst
print $" (ansi green)✓(ansi reset) reflection/qa.ncl created"
}
let bm_dst = $"($refl_dir)/search_bookmarks.ncl"
if not ($bm_dst | path exists) {
"let s = import \"search_bookmarks\" in\n\n{\n entries = [],\n} | s.BookmarkStore\n"
| save -f $bm_dst
print $" (ansi green)✓(ansi reset) reflection/search_bookmarks.ncl created"
}
# ── 6. Registration in projects.ncl ─────────────────────────────────────────
let projects_file = $"($env.HOME)/.config/ontoref/projects.ncl"
@ -1141,7 +1234,7 @@ def "main project-add" [
project_path: string, # absolute path to the project root
] {
log-action $"project-add ($project_path)" "write"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT
let project_ncl = $"($project_path)/.ontoref/project.ncl"
let template = $"($ontoref_root)/templates/project.ncl"
@ -1171,7 +1264,7 @@ def "main project-remove" [
project_path: string, # absolute path to the project root
] {
log-action $"project-remove ($project_path)" "write"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT
# Read slug before gen-projects removes the entry — project.ncl may still exist on disk.
let ncl_path = $"($project_path)/.ontoref/project.ncl"
let slug = if ($ncl_path | path exists) {
@ -1193,7 +1286,7 @@ def "main project-add-remote" [
--check-git, # verify git remote is reachable before registering
] {
log-action $"project-add-remote ($slug)" "write"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT
if $check_git {
let r = (do { ^git ls-remote --exit-code --heads $remote_url } | complete)
@ -1211,14 +1304,14 @@ def "main project-remove-remote" [
slug: string, # project slug to remove
] {
log-action $"project-remove-remote ($slug)" "write"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT
^nu $"($ontoref_root)/install/gen-remote-projects.nu" --remove $slug
}
# List all registered projects (local and remote).
def "main project-list" [] {
log-action "project-list" "read"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT
print $"(ansi white_bold)Local projects:(ansi reset)"
^nu $"($ontoref_root)/install/gen-projects.nu" --dry-run
print ""
@ -1235,7 +1328,7 @@ def "main hooks-install" [
project_path: string = ".", # absolute or relative path to the project root (default: current dir)
] {
log-action $"hooks-install ($project_path)" "write"
let ontoref_root = (project-root)
let ontoref_root = $env.ONTOREF_ROOT
let target = ($project_path | path expand)
let git_hooks_dir = $"($target)/.git/hooks"
@ -1263,7 +1356,7 @@ def "main hooks-install" [
print ""
print $" Set (ansi cyan)ONTOREF_TOKEN(ansi reset) in your shell to enable attribution."
print $" Your token is returned by the daemon when your actor session is registered"
print $" (POST /actors/register). Store it in your shell profile or .envrc."
print " (POST /actors/register). Store it in your shell profile or .envrc."
}
# ── Init ──────────────────────────────────────────────────────────────────────

View File

@ -0,0 +1,76 @@
let d = import "../defaults.ncl" in
d.make_mode String {
id = "draft-application",
trigger = "Draft a job, grant, or collaboration application anchored in the personal ontology — projects, practices, and active tensions as evidence of fit",
preconditions = [
".ontology/core.ncl and .ontology/personal.ncl export without errors",
"A target Opportunity node exists in personal.ncl with kind in ['Job, 'Grant, 'Collaboration] and status in ['Watching, 'Evaluating, 'Active]",
"Opportunity has at least one entry in linked_nodes or fit_signals",
],
steps = [
{
id = "resolve_opportunity",
action = "Load the target Opportunity node: kind, name, fit_signals, linked_nodes, deadline, note. The fit_signals declare what the opportunity cares about — they drive node selection in subsequent steps.",
cmd = "nickel export .ontology/personal.ncl | from json | get opportunities",
actor = 'Agent,
on_error = { strategy = 'Stop },
},
{
id = "check_gate_alignment",
action = "Compare the Opportunity's fit_signals against signals accepted by active membranes in gate.ncl. 'OpportunityAlignment and 'IdentityReinforcement are the canonical fit signals. If neither active membrane accepts them, flag: this opportunity may not be the right entry point.",
cmd = "nickel export .ontology/gate.ncl | from json | get membranes | where { |m| $m.active }",
actor = 'Both,
depends_on = [{ step = "resolve_opportunity" }],
on_error = { strategy = 'Continue },
note = "Gate check is advisory. Proceeding despite mismatch is valid but should be explicit.",
},
{
id = "select_narrative_nodes",
action = "From core.ncl, select nodes that best answer the opportunity's implicit questions: (1) What have you built? → Project nodes with artifact_paths. (2) Why does it matter? → Tension nodes showing what problem you are navigating. (3) How do you work? → Practice nodes. (4) What do you believe? → Axiom nodes with invariant = true.",
actor = 'Both,
depends_on = [
{ step = "resolve_opportunity" },
{ step = "check_gate_alignment" },
],
on_error = { strategy = 'Stop },
},
{
id = "resolve_career_trajectory",
action = "From state.ncl career dimension, extract current_state → desired_state trajectory and its active blockers/catalysts. This becomes the 'why now' and 'where I am going' section of the application.",
cmd = "nickel export .ontology/state.ncl | from json | get dimensions | where { |d| $d.id == 'career }",
actor = 'Agent,
depends_on = [{ step = "select_narrative_nodes" }],
on_error = { strategy = 'Continue },
},
{
id = "render_draft",
action = "Write the application: opening (why this opportunity from gate alignment check), evidence section (project nodes + artifact_paths as proof), methodology (practices), trajectory (career state), closing (what changes if accepted). Keep each section traceable to a node.",
actor = 'Agent,
depends_on = [{ step = "resolve_career_trajectory" }],
on_error = { strategy = 'Stop },
},
{
id = "review",
action = "Human reviews for: honest representation (does each claim link to real work?), alignment (does it answer what the opportunity actually asks?), coherence (does the narrative arc hold from opening to closing?). Revise or reject.",
actor = 'Human,
depends_on = [{ step = "render_draft" }],
on_error = { strategy = 'Stop },
},
{
id = "update_status",
action = "Update Opportunity status in .ontology/personal.ncl: 'Active if submitting, 'Closed if rejecting. Add a note with the decision rationale — this becomes institutional memory for future fit evaluations.",
actor = 'Human,
depends_on = [{ step = "review" }],
on_error = { strategy = 'Continue },
},
],
postconditions = [
"Application draft exists and is traceable to ontology nodes",
"Gate alignment check is documented regardless of outcome",
"Opportunity status updated with decision rationale in note field",
],
}

View File

@ -0,0 +1,55 @@
let d = import "../defaults.ncl" in
d.make_mode String {
id = "draft-email",
trigger = "Draft a professional email where the ontology provides context about who you are, what you are working on, and what you want — anchored rather than improvised",
preconditions = [
".ontology/core.ncl and .ontology/state.ncl export without errors",
"Recipient context is specified: who they are, what the relationship is, what the intent of the email is",
],
steps = [
{
id = "define_context",
action = "Establish: (1) recipient — who they are and what they care about. (2) relationship — first contact / existing / following up. (3) intent — inform / request / invite / respond / close. These three determine which nodes are relevant and what register to use.",
actor = 'Human,
on_error = { strategy = 'Stop },
},
{
id = "select_narrative",
action = "Based on intent, select the minimum set of ontology nodes that provide grounding: for a first contact email, use 1-2 Project nodes with artifact_paths; for a follow-up, use state.ncl active transitions to show movement; for a close, use relevant Practice or Axiom node descriptions as shared language.",
cmd = "nickel export .ontology/core.ncl | from json",
actor = 'Both,
depends_on = [{ step = "define_context" }],
on_error = { strategy = 'Stop },
},
{
id = "check_active_state",
action = "If the email references active work, export state.ncl and confirm which dimensions are in motion. Do not reference a transition as 'in progress' if the dimension shows it as blocked. The email should reflect actual state.",
cmd = "nickel export .ontology/state.ncl | from json | get dimensions",
actor = 'Agent,
depends_on = [{ step = "select_narrative" }],
on_error = { strategy = 'Continue },
},
{
id = "render_draft",
action = "Write the email: subject line that states the intent directly; opening that establishes context without over-explaining; body that delivers the single thing the email is for; closing that makes the next step explicit. Maximum 250 words unless the intent requires more.",
actor = 'Agent,
depends_on = [{ step = "check_active_state" }],
on_error = { strategy = 'Stop },
},
{
id = "review",
action = "Human reviews for: clarity (does the first sentence state the intent?), grounding (are any claims unsupported by actual project state?), tone (does it match the relationship type?), and ask (is the request or next step unambiguous?).",
actor = 'Human,
depends_on = [{ step = "render_draft" }],
on_error = { strategy = 'Stop },
},
],
postconditions = [
"Email draft exists with explicit intent, grounded claims, and clear next step",
"No project or work referenced that contradicts current state.ncl state",
],
}

View File

@ -0,0 +1,73 @@
let d = import "../defaults.ncl" in
d.make_mode String {
id = "generate-article",
trigger = "Produce a blog post draft rooted in one or more ontology nodes (tensions, practices, axioms)",
preconditions = [
".ontology/core.ncl exports without errors",
".ontology/personal.ncl has at least one Content item with kind = 'BlogPost and status = 'Idea or 'Draft",
"Target Content item has at least one entry in linked_nodes",
],
steps = [
{
id = "resolve_nodes",
action = "Export .ontology/core.ncl and extract the node records referenced by the target Content item's linked_nodes. Include their descriptions and all edges connecting them.",
cmd = "nickel export .ontology/core.ncl | from json | get nodes | where { |n| $n.id in $linked_nodes }",
actor = 'Agent,
on_error = { strategy = 'Stop },
},
{
id = "resolve_edges",
action = "From the exported edges, find all edges where from or to is in linked_nodes. These reveal the narrative structure: what manifests in what, what tensions exist, what validates what.",
actor = 'Agent,
depends_on = [{ step = "resolve_nodes" }],
on_error = { strategy = 'Stop },
},
{
id = "identify_audience",
action = "Read the target Content item's audience field. Map it to a writing register: Technical=implementation details + code; HiringManager=outcomes + credibility; Community=story + invitation; Academic=rigor + citations.",
actor = 'Both,
depends_on = [{ step = "resolve_nodes" }],
on_error = { strategy = 'Stop },
},
{
id = "render_outline",
action = "Produce a structured outline: opening tension (from node descriptions), concrete examples (from practices/projects linked), resolution or open question (from active tensions in state.ncl). Adapt register to audience.",
actor = 'Agent,
depends_on = [
{ step = "resolve_edges" },
{ step = "identify_audience" },
],
on_error = { strategy = 'Stop },
},
{
id = "draft",
action = "Write the full article from the outline. Anchor every claim to a node or edge. Do not introduce content not represented in the ontology without flagging it as an extension.",
actor = 'Agent,
depends_on = [{ step = "render_outline" }],
on_error = { strategy = 'Stop },
},
{
id = "review",
action = "Human reviews draft for accuracy (does it represent the actual tensions?), audience fit (does it land for the target?), and completeness (does it say what needs to be said and stop?).",
actor = 'Human,
depends_on = [{ step = "draft" }],
on_error = { strategy = 'Stop },
},
{
id = "update_status",
action = "Update the Content item status in .ontology/personal.ncl from 'Idea to 'Draft or from 'Draft to 'Review based on outcome of review step.",
actor = 'Human,
depends_on = [{ step = "review" }],
on_error = { strategy = 'Continue },
},
],
postconditions = [
"A blog post draft exists rooted in the specified ontology nodes",
"The draft does not contradict any invariant node (invariant = true)",
"Content item status updated in .ontology/personal.ncl",
],
}

View File

@ -0,0 +1,68 @@
let d = import "../defaults.ncl" in
d.make_mode String {
id = "update-cv",
trigger = "Generate CV sections adapted to a specific opportunity context (job, grant, collaboration) using projects and practices from the ontology",
preconditions = [
".ontology/core.ncl exports without errors — at least one Project node and one Practice node exist",
".ontology/state.ncl exports without errors — career dimension is defined",
"A target context is specified: either an Opportunity node from personal.ncl or a stated purpose (e.g., 'infrastructure engineering role', 'open source grant')",
],
steps = [
{
id = "resolve_context",
action = "Determine the target audience and framing. If an Opportunity node is given, read its kind, fit_signals, and note. If a stated purpose, classify it into an audience type: Technical / HiringManager / Academic / Community.",
actor = 'Both,
on_error = { strategy = 'Stop },
},
{
id = "select_projects",
action = "From core.ncl Project nodes, select those relevant to the target context. Relevance is determined by: (1) node pole alignment with context (Yang for engineering roles, Yin for research/creative), (2) artifact_paths showing real artifacts, (3) edges showing which practices they validate.",
cmd = "nickel export .ontology/core.ncl | from json | get nodes | where { |n| $n.level == 'Project }",
actor = 'Both,
depends_on = [{ step = "resolve_context" }],
on_error = { strategy = 'Stop },
},
{
id = "resolve_practices",
action = "For each selected Project node, traverse outgoing 'ValidatedBy and 'ManifestsIn edges to find linked Practice nodes. These become the skills and methodologies section of the CV.",
actor = 'Agent,
depends_on = [{ step = "select_projects" }],
on_error = { strategy = 'Stop },
},
{
id = "resolve_career_state",
action = "Export state.ncl and read the career dimension: current_state, desired_state, and active transitions. This informs the CV narrative arc — what you are moving toward, not just what you have done.",
cmd = "nickel export .ontology/state.ncl | from json | get dimensions | where { |d| $d.id == 'career }",
actor = 'Agent,
depends_on = [{ step = "resolve_context" }],
on_error = { strategy = 'Continue },
note = "career dimension may not exist in all personal ontology implementations — step continues if absent.",
},
{
id = "render_sections",
action = "Generate CV sections: (1) Summary — 3 sentences from career dimension narrative + key axioms. (2) Projects — one paragraph per selected Project, anchored to artifact_paths. (3) Practices — bullet list from resolved practices. (4) Trajectory — from career state transitions. Adapt register to context audience.",
actor = 'Agent,
depends_on = [
{ step = "resolve_practices" },
{ step = "resolve_career_state" },
],
on_error = { strategy = 'Stop },
},
{
id = "review",
action = "Human reviews for completeness (does it show the work?), accuracy (does each claim link to a real artifact or decision?), and framing (does the summary reflect the desired_state, not just current_state?).",
actor = 'Human,
depends_on = [{ step = "render_sections" }],
on_error = { strategy = 'Stop },
},
],
postconditions = [
"CV sections generated and anchored to Project + Practice nodes",
"Summary narrative consistent with career dimension desired_state",
"No claims made that are not traceable to an ontology node or artifact_path",
],
}

View File

@ -0,0 +1,73 @@
let d = import "../defaults.ncl" in
d.make_mode String {
id = "write-cfp",
trigger = "Produce a conference proposal (CFP) grounded in a Project or Practice node and matched to a specific conference opportunity",
preconditions = [
".ontology/core.ncl exports without errors",
".ontology/personal.ncl has at least one Opportunity with kind = 'Conference and status in ['Watching, 'Evaluating]",
"Target conference opportunity has at least one entry in linked_nodes pointing to a Project or Practice node",
],
steps = [
{
id = "resolve_talk_node",
action = "Load the Project or Practice node(s) referenced in the conference Opportunity's linked_nodes. Extract id, name, description, and all edges. This is the core of what the talk is about.",
cmd = "nickel export .ontology/core.ncl | from json",
actor = 'Agent,
on_error = { strategy = 'Stop },
},
{
id = "resolve_conference",
action = "Load the target Opportunity node from .ontology/personal.ncl. Note: name, deadline, fit_signals, and note field. The fit_signals should map to gate.ncl signal types that are currently active.",
cmd = "nickel export .ontology/personal.ncl | from json | get opportunities | where { |o| $o.kind == 'Conference }",
actor = 'Agent,
depends_on = [{ step = "resolve_talk_node" }],
on_error = { strategy = 'Stop },
},
{
id = "extract_narrative",
action = "From the linked nodes and their edges, build the narrative arc: what tension does this talk address, what practice does it validate, what axiom does it ground in. This becomes the CFP abstract structure.",
actor = 'Both,
depends_on = [{ step = "resolve_conference" }],
on_error = { strategy = 'Stop },
},
{
id = "check_fit",
action = "Verify that the conference's fit_signals align with active signals in gate.ncl. If 'OpportunityAlignment or 'DepthDemonstrated are not in the active membrane, flag the mismatch before writing.",
cmd = "nickel export .ontology/gate.ncl | from json | get membranes | where { |m| $m.active }",
actor = 'Both,
depends_on = [{ step = "extract_narrative" }],
on_error = { strategy = 'Continue },
note = "Mismatch is a warning, not a blocker — the operator decides whether to proceed.",
},
{
id = "render_cfp",
action = "Write the CFP: title (from node name + tension framing), abstract (from narrative arc, 300-500 words), speaker bio anchored to the Project/Practice node's artifact_paths and ADRs, what the audience will take away.",
actor = 'Agent,
depends_on = [{ step = "check_fit" }],
on_error = { strategy = 'Stop },
},
{
id = "review",
action = "Human reviews for accuracy (does the abstract represent what will actually be said?), fit (does it match the conference's expected depth and audience?), and tone (is it an invitation, not a lecture?).",
actor = 'Human,
depends_on = [{ step = "render_cfp" }],
on_error = { strategy = 'Stop },
},
{
id = "update_opportunity",
action = "If proceeding with submission: update Opportunity status from 'Watching/'Evaluating to 'Active in .ontology/personal.ncl. If rejecting: set to 'Closed with a note explaining why.",
actor = 'Human,
depends_on = [{ step = "review" }],
on_error = { strategy = 'Continue },
},
],
postconditions = [
"A CFP draft exists grounded in a specific Project or Practice node",
"Conference Opportunity status updated to reflect decision",
"Fit signal check documented — either confirmed or flagged",
],
}

View File

@ -209,7 +209,7 @@ export def "constraints" [
export def "adr help" [] {
let actor = ($env.ONTOREF_ACTOR? | default "developer")
let cmd = ($env.ONTOREF_CALLER? | default "./onref")
let cmd = ($env.ONTOREF_CALLER? | default "ontoref")
print ""
print "ADR commands:"
print $" ($cmd) adr list list all ADRs with status"

View File

@ -243,10 +243,11 @@ export def "describe why" [
# Extracts doc comments from Rust source, finds examples/tests, shows related nodes.
# Human: interactive selector loop. Agent: structured JSON.
export def "describe find" [
export def "describe search" [
term: string, # Search term (case-insensitive substring match)
--level: string = "", # Filter by level: Axiom | Tension | Practice | Project
--fmt: string = "",
--clip, # Copy selected result to clipboard after rendering
]: nothing -> nothing {
let root = (project-root)
let actor = (actor-default)
@ -287,7 +288,17 @@ export def "describe find" [
}
if $f == "json" or $f == "yaml" or $f == "toml" {
let results = ($matches | each { |n| build-howto $n $nodes $edges $root })
# Use $matches directly — no daemon/build-howto needed for structured output.
let results = ($matches | each { |n| {
id: $n.id,
name: ($n.name? | default ""),
level: ($n.level? | default ""),
description: ($n.description? | default ""),
pole: ($n.pole? | default ""),
invariant: ($n.invariant? | default false),
edges_from: ($edges | where from == $n.id | select kind to),
edges_to: ($edges | where to == $n.id | select kind from),
}})
let payload = { term: $term, count: ($results | length), results: $results }
match $f {
"json" => { print ($payload | to json) },
@ -304,11 +315,132 @@ export def "describe find" [
}
if ($matches | length) == 1 {
render-howto ($matches | first) $nodes $edges $root
let node = ($matches | first)
render-howto $node $nodes $edges $root
if $clip {
let h = (build-howto $node $nodes $edges $root)
clip-text (howto-to-md-string $h)
}
return
}
find-interactive-loop $matches $nodes $edges $root $term
# No TTY (subprocess, pipe, CI): print summary list without interactive selector.
let is_tty = (do { ^test -t 0 } | complete | get exit_code) == 0
if not $is_tty {
print ""
print $" (ansi white_bold)Search:(ansi reset) '($term)' ($matches | length) results"
print ""
for m in $matches {
let level_str = ($m.level? | default "" | fill -w 9)
let name_str = ($m.name? | default $m.id)
let desc_str = ($m.description? | default "")
print $" (ansi cyan)($level_str)(ansi reset) (ansi white_bold)($m.id)(ansi reset) ($name_str)"
if ($desc_str | is-not-empty) {
print $" (ansi dark_gray)($desc_str)(ansi reset)"
}
}
print ""
return
}
find-interactive-loop $matches $nodes $edges $root $term $clip
}
# Backward-compatible alias — delegates to describe search.
export def "describe find" [
term: string,
--level: string = "",
--fmt: string = "",
--clip,
]: nothing -> nothing {
describe search $term --level $level --fmt $fmt --clip=$clip
}
# Load entries from a qa.ncl file path. Returns empty list on missing file or export failure.
def qa-load-entries [qa_path: string]: nothing -> list {
if not ($qa_path | path exists) { return [] }
let r = (do { ^nickel export --format json $qa_path } | complete)
if $r.exit_code != 0 { return [] }
($r.stdout | from json | get entries? | default [])
}
# Word-overlap score: count of query words present in the combined entry text.
def qa-score-entry [words: list, entry: record]: nothing -> int {
let text = ($"($entry.question? | default '') ($entry.answer? | default '') ($entry.tags? | default [] | str join ' ')" | str downcase)
$words | each { |w| if ($text | str contains $w) { 1 } else { 0 } } | math sum
}
# Search Q&A entries in reflection/qa.ncl with word-overlap scoring.
# Falls back to describe search when no QA hits are found.
export def "qa search" [
term: string, # Natural-language query
--global (-g), # Also search ONTOREF_ROOT qa.ncl
--no-fallback, # Do not fall back to ontology search
--fmt: string = "",
--clip, # Copy output to clipboard after rendering
]: nothing -> nothing {
let root = (project-root)
let actor = (actor-default)
let f = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "text" }
let words = ($term | str downcase | split words | where { |w| ($w | str length) > 2 })
let project_entries = (qa-load-entries $"($root)/reflection/qa.ncl")
| each { |e| $e | insert scope "project" }
mut entries = $project_entries
if $global {
let global_root = $env.ONTOREF_ROOT
if $global_root != $root {
let global_entries = (qa-load-entries $"($global_root)/reflection/qa.ncl")
| each { |e| $e | insert scope "global" }
$entries = ($entries | append $global_entries)
}
}
let scored = ($entries
| each { |e| $e | insert _score (qa-score-entry $words $e) }
| where { |e| $e._score > 0 }
| sort-by _score --reverse
)
if ($scored | is-empty) {
if not $no_fallback {
print $" (ansi dark_gray)No QA entries matching '($term)' — searching ontology…(ansi reset)"
describe search $term --fmt $fmt --clip=$clip
} else {
print $" No QA entries matching '($term)'."
}
return
}
if $f == "json" {
let out = ($scored | reject _score | to json)
print $out
if $clip { clip-text $out }
return
}
mut clip_lines: list<string> = []
for e in $scored {
let scope_tag = $"(ansi dark_gray)[($e.scope)](ansi reset)"
let id_tag = $"(ansi cyan)($e.id)(ansi reset)"
print $"($scope_tag) ($id_tag) (ansi white_bold)($e.question)(ansi reset)"
if ($e.answer? | default "" | is-not-empty) {
print $" ($e.answer)"
}
print ""
if $clip {
$clip_lines = ($clip_lines | append $"[($e.scope)] ($e.id) ($e.question)")
if ($e.answer? | default "" | is-not-empty) {
$clip_lines = ($clip_lines | append $" ($e.answer)")
}
$clip_lines = ($clip_lines | append "")
}
}
if $clip and ($clip_lines | is-not-empty) {
clip-text ($clip_lines | str join "\n")
}
}
# ── HOWTO builder ─────────────────────────────────────────────────────────────
@ -377,6 +509,100 @@ def find-tests [root: string, artifact_path: string]: nothing -> list<record> {
} | compact
}
# Copy text to system clipboard (pbcopy / xclip / wl-copy).
def clip-text [text: string]: nothing -> nothing {
if (which pbcopy | is-not-empty) {
$text | ^pbcopy
print --stderr " ✓ Copied to clipboard"
} else if (which xclip | is-not-empty) {
$text | ^xclip -selection clipboard
print --stderr " ✓ Copied to clipboard"
} else if (which "wl-copy" | is-not-empty) {
$text | ^wl-copy
print --stderr " ✓ Copied to clipboard"
} else {
print --stderr " No clipboard tool found (install pbcopy, xclip, or wl-copy)"
}
}
# Build a plain markdown string from a howto record (mirrors render-howto-md).
def howto-to-md-string [h: record]: nothing -> string {
mut lines: list<string> = []
let inv = if $h.invariant { " **invariant**" } else { "" }
$lines = ($lines | append $"# ($h.id)($inv)")
$lines = ($lines | append "")
$lines = ($lines | append $"**Level**: ($h.level) **Name**: ($h.name)")
$lines = ($lines | append "")
$lines = ($lines | append "## What")
$lines = ($lines | append "")
$lines = ($lines | append $h.what)
if ($h.what_docs | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append $h.what_docs)
}
if ($h.source | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append "## Source")
$lines = ($lines | append "")
for s in $h.source {
if ($s.modules? | is-not-empty) {
$lines = ($lines | append $"- `($s.path)/`")
let mods = ($s.modules | each { |m| $m | str replace ".rs" "" } | where { |m| $m != "mod" })
if ($mods | is-not-empty) {
let mod_str = ($mods | each { |m| $"`($m)`" } | str join ", ")
$lines = ($lines | append $" Modules: ($mod_str)")
}
} else {
$lines = ($lines | append $"- `($s.path)`")
}
}
}
if ($h.examples | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append "## Examples")
$lines = ($lines | append "")
for ex in $h.examples {
$lines = ($lines | append "```sh")
$lines = ($lines | append $ex.cmd)
$lines = ($lines | append "```")
if ($ex.description | is-not-empty) { $lines = ($lines | append $ex.description) }
$lines = ($lines | append "")
}
}
if ($h.tests | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append "## Tests")
$lines = ($lines | append "")
for t in $h.tests {
$lines = ($lines | append "```sh")
$lines = ($lines | append $t.cmd)
$lines = ($lines | append "```")
if ($t.description | is-not-empty) { $lines = ($lines | append $t.description) }
$lines = ($lines | append "")
}
}
if ($h.related_to | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append "## Related")
$lines = ($lines | append "")
for r in $h.related_to { $lines = ($lines | append $"- → `($r.id)` ($r.name)") }
}
if ($h.used_by | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append "## Used by")
$lines = ($lines | append "")
for u in $h.used_by { $lines = ($lines | append $"- ← `($u.id)` ($u.name)") }
}
if ($h.adrs | is-not-empty) {
$lines = ($lines | append "")
$lines = ($lines | append "## Validated by")
$lines = ($lines | append "")
for adr in $h.adrs { $lines = ($lines | append $"- `($adr)`") }
}
$lines = ($lines | append "")
$lines | str join "\n"
}
# Build full HOWTO record for a node.
def build-howto [
n: record,
@ -403,7 +629,7 @@ def build-howto [
$source_files = ($source_files | append { path: $a, entry: ($entry | path basename) })
}
# List public source files in the directory.
let rs_files = (glob $"($full)/*.rs" | each { |f| $f | path basename } | sort)
let rs_files = (glob ($full | path join "*.rs") | each { |f| $f | path basename } | sort)
$source_files = ($source_files | append { path: $a, modules: $rs_files })
} else if ($full | str ends-with ".rs") {
let docs = (extract-rust-docs $full)
@ -452,6 +678,7 @@ def build-howto [
tests: $tests,
related_to: $related,
used_by: $used_by,
adrs: ($n.adrs? | default []),
}
}
@ -545,6 +772,13 @@ def render-howto [
print $" (ansi yellow)←(ansi reset) (ansi cyan)($u.id)(ansi reset) ($u.name)"
}
}
if ($h.adrs | is-not-empty) {
print ""
print $" (ansi white_bold)Validated by(ansi reset)"
for adr in $h.adrs {
print $" (ansi magenta)◆(ansi reset) (ansi cyan)($adr)(ansi reset)"
}
}
print ""
}
@ -617,6 +851,12 @@ def render-howto-md [h: record] {
print ""
for u in $h.used_by { print $"- ← `($u.id)` ($u.name)" }
}
if ($h.adrs | is-not-empty) {
print ""
print "## Validated by"
print ""
for adr in $h.adrs { print $"- `($adr)`" }
}
print ""
}
@ -628,6 +868,7 @@ def find-interactive-loop [
edges: list<record>,
root: string,
term: string,
clip: bool,
] {
let match_count = ($matches | length)
print ""
@ -652,10 +893,12 @@ def find-interactive-loop [
let node_matches = ($matches | where id == $picked_id)
if ($node_matches | is-empty) { continue }
render-howto ($node_matches | first) $all_nodes $edges $root
let selected_node = ($node_matches | first)
render-howto $selected_node $all_nodes $edges $root
# Offer to jump to a related node, back to results, or quit.
let h = (build-howto ($node_matches | first) $all_nodes $edges $root)
let h = (build-howto $selected_node $all_nodes $edges $root)
if $clip { clip-text (howto-to-md-string $h) }
let conn_ids = ($h.related_to | get id) | append ($h.used_by | get id) | uniq
if ($conn_ids | is-not-empty) {
let jump_items = ($conn_ids | append "← back" | append "← quit")
@ -665,7 +908,12 @@ def find-interactive-loop [
let jumped = ($all_nodes | where id == $jump)
if ($jumped | is-not-empty) {
render-howto ($jumped | first) $all_nodes $edges $root
let jumped_node = ($jumped | first)
render-howto $jumped_node $all_nodes $edges $root
if $clip {
let jh = (build-howto $jumped_node $all_nodes $edges $root)
clip-text (howto-to-md-string $jh)
}
}
}
}
@ -1175,6 +1423,23 @@ def load-all-adrs [root: string]: nothing -> list<record> {
} | compact
}
def list-ontology-extensions [root: string]: nothing -> list<string> {
let dir = $"($root)/.ontology"
let core = ["core.ncl", "state.ncl", "gate.ncl"]
glob ($dir | path join "*.ncl")
| each { |f| $f | path basename }
| where { |f| $f not-in $core }
| each { |f| $f | str replace ".ncl" "" }
| sort
}
def load-ontology-extension [root: string, stem: string]: nothing -> any {
let file = $"($root)/.ontology/($stem).ncl"
if not ($file | path exists) { return null }
let ip = (nickel-import-path $root)
daemon-export-safe $file --import-path $ip
}
# ── Impact tracer ───────────────────────────────────────────────────────────────
def trace-impacts [
@ -1809,3 +2074,161 @@ export def "describe connections" [
print ""
}
}
# Coerce any NCL value to a plain string safe for a GFM table cell.
# Uses `to json` throughout — accepts any input type including nothing.
def md-cell []: any -> any {
let value = $in
let t = ($value | describe)
if ($t | str starts-with "table") or ($t | str starts-with "list") {
$value | to json | str replace -ar '^\[|\]$' '' | str replace -a '"' '' | str trim
} else if ($t | str starts-with "record") {
$value | to json
} else if $t == "nothing" {
""
} else {
$value | to json | str replace -ar '^"|"$' ''
}
}
# Render one value as a markdown section body (no heading).
def render-val-md [val: any]: nothing -> any {
if $val == null { return "" }
let t = ($val | describe)
if ($t | str starts-with "table") {
# Render each record as vertical key: value block, separated by ---
let cols = ($val | columns)
$val | each { |row|
$cols | each { |c|
let v = ($row | get --optional $c)
let cell = if $v == null { "" } else { $v | md-cell }
$"**($c)**: ($cell) "
} | str join "\n"
} | str join "\n\n---\n\n"
} else if ($t | str starts-with "list") {
if ($val | is-empty) {
"_empty_"
} else {
# split row returns list<string> which each can accept; avoids each on any-typed val
$val | to json | str replace -ar '^\[|\]$' '' | str replace -a '"' '' | str trim
| split row ", " | each { |item| $"- ($item | str trim)" } | str join "\n"
}
} else if ($t | str starts-with "record") {
$val | columns | each { |c|
let raw = ($val | get $c)
let v = if $raw == null { "" } else { $raw | md-cell }
$"- **($c)**: ($v)"
} | str join "\n"
} else {
$val | to json | str replace -ar '^"|"$' ''
}
}
# Try to render a section via a Tera template at {root}/layouts/{stem}/{section}.tera.
# Returns the rendered string if the template exists, null otherwise.
def render-section-tera [root: string, stem: string, section: string, val: any]: nothing -> any {
let tmpl = $"($root)/layouts/($stem)/($section).tera"
if not ($tmpl | path exists) { return null }
let t = ($val | describe)
let ctx = if ($t | str starts-with "table") or ($t | str starts-with "list") {
{items: $val}
} else {
$val
}
$ctx | tera-render $tmpl
}
# Render an arbitrary extension record as Markdown, using Tera templates when available.
def render-extension-md [data: record, stem: string, root: string]: nothing -> any {
let sections = ($data | columns | each { |key|
let val = ($data | get $key)
let body = (
render-section-tera $root $stem $key $val
| default (render-val-md $val)
)
$"\n## ($key)\n\n($body)\n"
})
([$"# ($stem)"] | append $sections | str join "\n")
}
# List and optionally dump ontology extension files (.ontology/*.ncl beyond core/state/gate)
export def "describe extensions" [
--fmt: string = "",
--actor: string = "",
--dump: string = "", # stem to dump (e.g. career, personal); omit to list
--clip, # copy output to clipboard (dump only)
]: nothing -> nothing {
let root = (project-root)
let a = if ($actor | is-not-empty) { $actor } else { (actor-default) }
let f = if ($fmt | is-not-empty) { $fmt } else if $a == "agent" { "json" } else { "text" }
let exts = (list-ontology-extensions $root)
if ($dump | is-not-empty) {
let data = (load-ontology-extension $root $dump)
if $data == null {
if $f == "json" {
print ({"error": $"extension '($dump)' not found"} | to json)
} else {
print $"Extension '($dump).ncl' not found in .ontology/"
}
return
}
let is_rec = ($data | describe | str starts-with "record")
let wrapped = if $is_rec { $data } else { {value: $data} }
match $f {
"md" => {
let md = (render-extension-md $wrapped $dump $root)
if $clip { $md | clip } else { print $md }
},
"json" => { print ($wrapped | to json) },
"yaml" => { print ($wrapped | to yaml) },
_ => {
emit-output $wrapped $f {||
print ""
print $"(ansi white_bold)EXTENSION: ($dump)(ansi reset)"
print $"(ansi dark_gray)─────────────────────────────────(ansi reset)"
for key in ($wrapped | columns) {
let val = ($wrapped | get $key)
let t = ($val | describe)
print $"\n(ansi cyan_bold)($key)(ansi reset)"
if ($t | str starts-with "list") {
if ($val | is-empty) {
print " (empty)"
} else if (($val | first | describe) | str starts-with "record") {
print ($val | table)
} else {
for item in $val { print $" · ($item)" }
}
} else if ($t | str starts-with "record") {
print ($val | table)
} else {
print $" ($val)"
}
}
print ""
}
}
}
return
}
let payload = {extensions: $exts}
emit-output $payload $f {||
print ""
print $"(ansi white_bold)ONTOLOGY EXTENSIONS(ansi reset)"
print $"(ansi dark_gray)─────────────────────────────────(ansi reset)"
if ($exts | is-empty) {
print " (no extensions — only core/state/gate declared)"
} else {
for stem in $exts {
print $" (ansi cyan)◆(ansi reset) ($stem).ncl"
}
print ""
print $"(ansi dark_gray)Use --dump <stem> to inspect a specific extension(ansi reset)"
}
print ""
}
}

View File

@ -5,6 +5,7 @@
export-env {
let root = (
$env.CURRENT_FILE
| path expand # canonicalize — resolves ".." from relative `use ../modules/env.nu` paths
| path dirname # reflection/modules
| path dirname # reflection
| path dirname # <root>

View File

@ -21,7 +21,7 @@ export def fmt-info [text: string] {
# verb_pos: which word (0-indexed from after caller) to highlight.
export def fmt-cmd [cmd: string, desc: string = "", --verb-pos (-v): int = 0] {
let parts = ($cmd | split row " ")
let caller = ($env.ONTOREF_CALLER? | default "./onref")
let caller = ($env.ONTOREF_CALLER? | default "ontoref")
let caller_parts = ($caller | split row " " | length)
let after = ($parts | skip $caller_parts)
let colored = if ($after | is-empty) {

View File

@ -6,7 +6,7 @@ use ../modules/store.nu [daemon-export-safe]
use ../modules/forms.nu ["forms list"]
export def help-group [group: string] {
let cmd = ($env.ONTOREF_CALLER? | default "./onref")
let cmd = ($env.ONTOREF_CALLER? | default "ontoref")
let actor = ($env.ONTOREF_ACTOR? | default "developer")
match $group {
@ -268,9 +268,11 @@ export def help-group [group: string] {
print ""
fmt-section "Search the ontology graph"
print ""
fmt-cmd $"($cmd) find <term>" "search + interactive selector with detail" -v 1
fmt-cmd $"($cmd) find <term> --level Project" "filter by level" -v 1
fmt-cmd $"($cmd) find <term> --fmt <fmt>" "fmt: text* | json | yaml | toml | md (short: j y t m)" -v 1
fmt-cmd $"($cmd) s <term>" "search + interactive selector with detail" -v 1
fmt-cmd $"($cmd) s <term> --level Project" "filter by level: Axiom | Tension | Practice | Project" -v 1
fmt-cmd $"($cmd) s <term> --fmt <fmt>" "fmt: text* | json (j) | yaml (y) | toml (t) | md (m)" -v 1
fmt-cmd $"($cmd) s <term> --clip" "copy output to clipboard, strips ANSI" -v 1
fmt-cmd $"($cmd) s <term> --fmt json --clip" "copy JSON to clipboard" -v 1
fmt-info "1 result → show detail directly. N results → pick, explore, jump, repeat."
fmt-info "Detail includes: description, artifacts, connections, usage examples."
print ""
@ -308,9 +310,15 @@ export def help-group [group: string] {
print ""
fmt-cmd $"($cmd) describe why <id>" "ontology node + ADR + edges" -v 1
print ""
fmt-section "Domain extensions"
print ""
fmt-cmd $"($cmd) describe extensions" "list .ontology/*.ncl extensions (career, personal, …)" -v 1
fmt-cmd $"($cmd) describe extensions --dump <stem>" "dump a specific extension (e.g. --dump career)" -v 1
print ""
fmt-aliases [
{ short: "d", long: "describe" },
{ short: "d fi", long: "describe find <term>" },
{ short: "d s", long: "describe search <term>" },
{ short: "d fi", long: "describe search <term> (legacy alias)" },
{ short: "d p", long: "describe project" },
{ short: "d cap", long: "describe capabilities" },
{ short: "d con", long: "describe constraints" },
@ -321,6 +329,75 @@ export def help-group [group: string] {
{ short: "d i", long: "describe impact <id>" },
{ short: "d imp", long: "describe impact <id>" },
{ short: "d w", long: "describe why <id>" },
{ short: "d ext", long: "describe extensions" },
]
},
"search" | "s" => {
print ""
fmt-header "SEARCH (ontology + bookmarks)"
fmt-sep
fmt-info "Search ontology nodes, ADRs and modes. Results are interactive (picker)"
fmt-info "or machine-readable. Bookmarks persist to reflection/search_bookmarks.ncl."
print ""
fmt-section "Search"
print ""
fmt-cmd $"($cmd) s <term>" "search + interactive selector in TTY, list in pipe/non-TTY" -v 1
fmt-cmd $"($cmd) s <term> --level Axiom" "filter: Axiom | Tension | Practice | Project" -v 1
fmt-cmd $"($cmd) s <term> --fmt <fmt>" "output format: text* | json (j) | yaml (y) | toml (t) | md (m)" -v 1
fmt-cmd $"($cmd) s <term> --clip" "copy output to clipboard — combinable with --fmt" -v 1
fmt-cmd $"($cmd) describe search <term>" "full form (same command)" -v 1
print ""
fmt-info "--fmt and --clip work on any ontoref command, not just search."
print ""
fmt-section "Combined search"
print ""
fmt-cmd $"($cmd) qs <term>" "QA-first → ontology fallback" -v 1
fmt-cmd $"($cmd) sq <term>" "ontology-first + QA results appended" -v 1
print ""
fmt-section "Bookmarks (saved to reflection/search_bookmarks.ncl)"
print ""
fmt-info "Star any result in the UI to bookmark it — persisted to NCL, git-versioned."
fmt-info "Bookmarks are shared between CLI and UI (same NCL file)."
print ""
fmt-aliases [
{ short: "s", long: "search <term>" },
{ short: "f", long: "search <term> (legacy alias)" },
{ short: "d s", long: "describe search <term>" },
{ short: "d fi", long: "describe search <term> (legacy alias)" },
]
},
"qa" | "q" => {
print ""
fmt-header "QA (questions & answers)"
fmt-sep
fmt-info "Curated Q&A pairs persisted to reflection/qa.ncl — git-versioned,"
fmt-info "MCP-accessible, shared between CLI and UI."
print ""
fmt-section "Query"
print ""
fmt-cmd $"($cmd) q <term>" "word-overlap search; falls back to ontology if no QA hit" -v 1
fmt-cmd $"($cmd) q <term> --global" "also search ONTOREF_ROOT global qa.ncl" -v 1
fmt-cmd $"($cmd) q <term> --no-fallback" "QA only, no ontology fallback" -v 1
fmt-cmd $"($cmd) q <term> --fmt <fmt>" "output format: text* | json (j) | yaml (y) | toml (t) | md (m)" -v 1
fmt-cmd $"($cmd) q <term> --clip" "copy output to clipboard — combinable with --fmt" -v 1
fmt-info "--fmt and --clip work on any ontoref command, not just q."
fmt-cmd $"($cmd) qs <term>" "QA-first → ontology fallback (shortcut)" -v 1
print ""
fmt-section "Add entries"
print ""
fmt-cmd $"($cmd) qa add \"<question>\" \"<answer>\"" "add to project qa.ncl (developer+)" -v 1
fmt-cmd $"($cmd) qa add --global \"<q>\" \"<a>\"" "add to global qa.ncl (admin only)" -v 1
print ""
fmt-section "List"
print ""
fmt-cmd $"($cmd) qa list" "list project QA entries" -v 1
fmt-cmd $"($cmd) qa list --global" "list global QA entries" -v 1
fmt-cmd $"($cmd) qa list --all" "merge project + global" -v 1
print ""
fmt-aliases [
{ short: "q", long: "qa search <term>" },
{ short: "qs", long: "qa search → ontology fallback" },
{ short: "sq", long: "search → qa results appended" },
]
},
"log" => {

View File

@ -221,6 +221,6 @@ export def missing-target [group: string, action?: string] {
run-interactive $group
return
}
let cmd = ($env.ONTOREF_CALLER? | default "./onref")
let cmd = ($env.ONTOREF_CALLER? | default "ontoref")
print $" (ansi yellow)($group)(ansi reset): unknown subcommand '($act)'. Run '(ansi green)($cmd) ($group) h(ansi reset)' for options."
}

View File

@ -176,7 +176,13 @@ def actor-can-run-step [step_actor: string]: nothing -> bool {
# Execute a single step's command. Returns { success: bool, output: string }.
def exec-step-cmd [cmd: string]: nothing -> record {
let result = do { ^bash -c $cmd } | complete
let nu_patterns = ["| from json", "| get ", "| where ", "| each ", "| select ", "| sort-by"]
let is_nu = ($nu_patterns | any { |p| $cmd | str contains $p })
let result = if $is_nu {
do { ^nu -c $cmd } | complete
} else {
do { ^bash -c $cmd } | complete
}
{
success: ($result.exit_code == 0),
output: (if $result.exit_code == 0 { $result.stdout } else { $result.stderr }),
@ -361,7 +367,8 @@ export def run-mode [id: string, --dry-run, --yes] {
if $fail_count == 0 {
print $" (ansi green_bold)COMPLETE(ansi reset) All steps executed successfully."
} else {
print $" (ansi yellow_bold)PARTIAL(ansi reset) ($fail_count) step(s) failed: ($failed_steps | str join ', ')"
let step_word = if $fail_count == 1 { "step" } else { "steps" }
print $" (ansi yellow_bold)PARTIAL(ansi reset) ($fail_count) ($step_word) failed: ($failed_steps | str join ', ')"
}
print ""
}

View File

@ -0,0 +1,20 @@
let bookmark_entry_type = {
id | String,
node_id | String, # ontology node id (e.g. "add-project")
kind | String, # "node" | "adr" | "mode"
title | String,
level | String | default = "", # Axiom | Tension | Practice | Project
term | String | default = "", # search term that produced this result
actor | String | default = "human",
created_at | String | default = "",
tags | Array String | default = [],
} in
let bookmark_store_type = {
entries | Array bookmark_entry_type | default = [],
} in
{
BookmarkEntry = bookmark_entry_type,
BookmarkStore = bookmark_store_type,
}

View File

@ -0,0 +1,5 @@
let s = import "search_bookmarks" in
{
entries = [],
} | s.BookmarkStore