diff --git a/.cargo/config.toml b/.cargo/config.toml
index 8f53d51..43796db 100644
--- a/.cargo/config.toml
+++ b/.cargo/config.toml
@@ -18,6 +18,13 @@ lto = false
panic = "unwind"
incremental = true
+[profile.clippy]
+# Lint-only profile: no debug info, no codegen — clippy only needs MIR/HIR.
+# Used by pre-commit to avoid bloating target/debug with DWARF/dSYM artifacts.
+inherits = "dev"
+debug = 0
+incremental = true
+
[profile.release]
# Release profile - slow compilation, optimized binary
opt-level = 3
diff --git a/.gitignore b/.gitignore
index 96550bb..89c9375 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,7 @@
CLAUDE.md
.claude
+logs
+logs-archive
utils/save*sh
.fastembed_cache
presentaciones
diff --git a/.ontology/core.ncl b/.ontology/core.ncl
index 8a43841..8aa492f 100644
--- a/.ontology/core.ncl
+++ b/.ontology/core.ncl
@@ -68,7 +68,7 @@ let d = import "../ontology/defaults/core.ncl" in
name = "ADR Lifecycle",
pole = 'Yang,
level = 'Practice,
- description = "Architectural decisions follow: Proposed → Accepted → Superseded. Superseded ADRs retain constraints for historical reconstruction. Active Hard constraints drive the constraint set.",
+ description = "Architectural decisions follow: Proposed → Accepted → Superseded. Superseded ADRs retain constraints for historical reconstruction. Active Hard constraints drive the constraint set. Nodes declare which ADRs validate them via the adrs field — surfaced by describe and the daemon graph UI.",
artifact_paths = [
"adrs/schema.ncl",
"adrs/reflection.ncl",
@@ -78,8 +78,10 @@ let d = import "../ontology/defaults/core.ncl" in
"adrs/adr-003-qa-and-knowledge-persistence-as-ncl.ncl",
"adrs/adr-004-ncl-pipe-bootstrap-pattern.ncl",
"adrs/adr-005-unified-auth-session-model.ncl",
+ "adrs/adr-006-nushell-0111-string-interpolation-compat.ncl",
"CHANGELOG.md",
],
+ adrs = ["adr-001", "adr-002", "adr-003", "adr-004", "adr-005", "adr-006"],
},
d.make_node {
@@ -105,7 +107,7 @@ let d = import "../ontology/defaults/core.ncl" in
name = "Describe Query Layer",
pole = 'Yang,
level = 'Practice,
- description = "describe.nu aggregates all project sources and answers self-knowledge queries: what IS this, what can I DO, what can I NOT do, what tools exist, what is the impact of changing X.",
+ description = "describe.nu aggregates all project sources and answers self-knowledge queries: what IS this, what can I DO, what can I NOT do, what tools exist, what is the impact of changing X. Renders Validated by section when a node declares adrs — surfacing declared ADR constraints alongside source, examples, and connections.",
artifact_paths = ["reflection/modules/describe.nu"],
},
@@ -114,8 +116,9 @@ let d = import "../ontology/defaults/core.ncl" in
name = "Ontoref Ontology Crate",
pole = 'Yang,
level = 'Practice,
- description = "Rust implementation for loading and querying .ontology/ NCL files as typed structs. Provides the Core, Gate, and State types for ecosystem-level introspection.",
+ description = "Rust implementation for loading and querying .ontology/ NCL files as typed structs. Provides Core, Gate, and State types for ecosystem-level introspection. Node carries artifact_paths (Vec) and adrs (Vec) — both serde(default) for zero-migration backward compatibility.",
artifact_paths = ["crates/ontoref-ontology/"],
+ adrs = ["adr-001"],
},
d.make_node {
@@ -146,6 +149,31 @@ let d = import "../ontology/defaults/core.ncl" in
],
},
+ d.make_node {
+ id = "ontology-three-file-split",
+ name = "Ontology Three-File Split",
+ pole = 'Yang,
+ level = 'Practice,
+ description = "The .ontology/ directory separates three orthogonal concerns into three files. core.ncl captures what the project IS — invariant axioms and structural tensions; touching invariant=true nodes requires a new ADR. state.ncl captures where it IS vs where it wants to BE — current and desired state per dimension. gate.ncl defines when it is READY to cross a boundary — active membranes protecting key conditions. reflection/ reads all three and answers self-knowledge queries. This separation lets an agent understand a project without reading code — only by consulting the declarative graph.",
+ invariant = false,
+ artifact_paths = [".ontology/core.ncl", ".ontology/state.ncl", ".ontology/gate.ncl"],
+ },
+
+ d.make_node {
+ id = "adr-node-linkage",
+ name = "ADR–Node Declared Linkage",
+ pole = 'Yang,
+ level = 'Practice,
+ description = "Nodes declare which ADRs validate them via the adrs field (Array String). This makes the ADR→Node relationship explicit in the graph rather than implicit in prose. describe surfaces a Validated by section per node. The daemon graph UI renders each ADR as a clickable link opening the full ADR via GET /api/adr/{id}. Field is serde(default) and Nickel default=[] — zero migration cost for existing nodes.",
+ artifact_paths = [
+ "ontology/schemas/core.ncl",
+ "crates/ontoref-ontology/src/types.rs",
+ "reflection/modules/describe.nu",
+ "crates/ontoref-daemon/templates/pages/graph.html",
+ "crates/ontoref-daemon/src/api.rs",
+ ],
+ },
+
d.make_node {
id = "web-presence",
name = "Web Presence",
@@ -174,6 +202,7 @@ let d = import "../ontology/defaults/core.ncl" in
"crates/ontoref-daemon/src/session.rs",
"crates/ontoref-daemon/src/ui/auth.rs",
"crates/ontoref-daemon/src/ui/login.rs",
+ "crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs",
"justfiles/ci.just",
],
},
@@ -257,6 +286,53 @@ let d = import "../ontology/defaults/core.ncl" in
],
},
+ d.make_node {
+ id = "personal-ontology-schemas",
+ name = "Personal Ontology Schemas",
+ pole = 'Yin,
+ level = 'Practice,
+ description = "Typed NCL schema layer for personal and career artifacts: career.ncl (Skills, WorkExperience, Talks, Positioning, CompanyTargets, PublicationCards), personal.ncl (Content and Opportunity lifecycle — BlogPost to CV to Application, Job to Conference to Grant), project-card.ncl (canonical display metadata for portfolio and cv_repo publication). All types carry linked_nodes referencing .ontology/core.ncl node IDs — bridging career artifacts into the DAG.",
+ invariant = false,
+ artifact_paths = [
+ "ontology/schemas/career.ncl",
+ "ontology/schemas/personal.ncl",
+ "ontology/schemas/project-card.ncl",
+ "ontology/defaults/career.ncl",
+ "ontology/defaults/personal.ncl",
+ "ontology/defaults/project-card.ncl",
+ ],
+ },
+
+ d.make_node {
+ id = "content-modes",
+ name = "Content & Career Reflection Modes",
+ pole = 'Yang,
+ level = 'Practice,
+ description = "NCL DAG modes for personal content and career operations: draft-application (job/grant/collaboration application anchored in personal ontology — gate alignment check, node selection, career trajectory render), draft-email, generate-article, update-cv, write-cfp. Each mode queries personal.ncl and core.ncl nodes to ground output in declared project artifacts rather than free-form prose.",
+ invariant = false,
+ artifact_paths = [
+ "reflection/modes/draft-application.ncl",
+ "reflection/modes/draft-email.ncl",
+ "reflection/modes/generate-article.ncl",
+ "reflection/modes/update-cv.ncl",
+ "reflection/modes/write-cfp.ncl",
+ ],
+ },
+
+ d.make_node {
+ id = "search-bookmarks",
+ name = "Search Bookmarks",
+ pole = 'Yin,
+ level = 'Practice,
+ description = "Persistent bookmark store for search results over the ontology graph. Entries typed as BookmarkEntry (id, node_id, kind, title, level, term, actor, created_at, tags) and persisted to reflection/search_bookmarks.ncl via line-level NCL surgery — same atomic-write pattern as qa_ncl.rs. IDs are sequential sb-NNN, zero-padded. Concurrency-safe via NclWriteLock. Supports add and remove; accessible from the daemon search UI.",
+ invariant = false,
+ artifact_paths = [
+ "reflection/search_bookmarks.ncl",
+ "reflection/schemas/search_bookmarks.ncl",
+ "crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs",
+ ],
+ },
+
d.make_node {
id = "drift-observation",
name = "Passive Drift Observation",
@@ -283,6 +359,8 @@ let d = import "../ontology/defaults/core.ncl" in
{ from = "no-enforcement", to = "formalization-vs-adoption", kind = 'Resolves, weight = 'Medium },
{ from = "protocol-not-runtime", to = "no-enforcement", kind = 'Implies, weight = 'High },
{ from = "adr-lifecycle", to = "reflection-modes", kind = 'Complements, weight = 'Medium },
+ { from = "adr-node-linkage", to = "adr-lifecycle", kind = 'ManifestsIn, weight = 'High },
+ { from = "adr-node-linkage", to = "describe-query-layer", kind = 'Complements, weight = 'High },
{ from = "describe-query-layer", to = "dag-formalized", kind = 'DependsOn, weight = 'High },
{ from = "coder-process-memory", to = "describe-query-layer", kind = 'Complements, weight = 'Medium },
{ from = "ontoref-daemon", to = "ontoref-ontology-crate", kind = 'Complements, weight = 'High },
@@ -319,6 +397,19 @@ let d = import "../ontology/defaults/core.ncl" in
{ from = "drift-observation", to = "reflection-modes", kind = 'DependsOn, weight = 'High,
note = "Invokes sync-ontology mode steps (scan, diff) as read-only sub-processes." },
+ # Personal Ontology Schemas edges
+ { from = "personal-ontology-schemas", to = "dag-formalized", kind = 'ManifestsIn, weight = 'High,
+ note = "Career and personal artifacts are typed NCL records with linked_nodes — DAG connections into the core ontology." },
+ { from = "personal-ontology-schemas", to = "self-describing", kind = 'Complements, weight = 'Medium,
+ note = "Personal/career schemas let projects describe not just what they ARE but who built them and for what trajectory." },
+ { from = "content-modes", to = "reflection-modes", kind = 'ManifestsIn, weight = 'High },
+ { from = "content-modes", to = "personal-ontology-schemas", kind = 'DependsOn, weight = 'High,
+ note = "Content and career modes query personal.ncl and core.ncl to ground output in declared artifacts." },
+ { from = "search-bookmarks", to = "qa-knowledge-store", kind = 'Complements, weight = 'High,
+ note = "Both are NCL persistence layers using the same atomic-write surgery pattern. Q&A is for accumulated knowledge; bookmarks are for search navigation state." },
+ { from = "search-bookmarks", to = "ontoref-daemon", kind = 'ManifestsIn, weight = 'High },
+ { from = "ontoref-daemon", to = "search-bookmarks", kind = 'Contains, weight = 'High },
+
# Unified Auth Model edges
{ from = "unified-auth-model", to = "ontoref-daemon", kind = 'ManifestsIn, weight = 'High },
{ from = "unified-auth-model", to = "no-enforcement", kind = 'Contradicts, weight = 'Low,
diff --git a/.ontology/state.ncl b/.ontology/state.ncl
index 4200cce..69c8d8f 100644
--- a/.ontology/state.ncl
+++ b/.ontology/state.ncl
@@ -25,7 +25,7 @@ let d = import "../ontology/defaults/state.ncl" in
to = "protocol-stable",
condition = "ADR-001 accepted, ontoref.dev published, at least two external projects consuming the protocol.",
catalyst = "First external adoption.",
- blocker = "ontoref.dev not yet published; no external consumers yet. Auth model complete (session exchange, CLI Bearer, key rotation invalidation). Install pipeline: config form roundtrip and NATS topology operational; check-config-sync CI guard present.",
+ blocker = "ontoref.dev not yet published; no external consumers yet. Auth model complete. Install pipeline complete. Personal/career schema layer present; content modes operational. Nu 0.111 compat fixed (ADR-006). Syntaxis syntaxis-ontology crate has pending ES→EN migration errors.",
horizon = 'Months,
},
],
@@ -52,7 +52,7 @@ let d = import "../ontology/defaults/state.ncl" in
from = "modes-and-web-present",
to = "fully-self-described",
condition = "At least 3 ADRs accepted, reflection/backlog.ncl present, describe project returns complete picture.",
- catalyst = "ADR-001–ADR-004 authored (4 ADRs present, 3+ threshold met). Auth model, project onboarding, and session management nodes added to core.ncl in session 2026-03-13.",
+ catalyst = "ADR-001–ADR-006 authored (6 ADRs present). Auth model, project onboarding, and session management nodes added in 2026-03-13. Personal/career/project-card schemas, 5 content modes, search bookmarks, and ADR-006 (Nu 0.111 compat) added in session 2026-03-15.",
blocker = "none",
horizon = 'Weeks,
},
diff --git a/.ontoref/config.ncl b/.ontoref/config.ncl
index 020b6cd..5b4cafc 100644
--- a/.ontoref/config.ncl
+++ b/.ontoref/config.ncl
@@ -66,4 +66,6 @@
actors = ["developer", "agent"],
},
],
+
+ card = import "../card.ncl",
}
diff --git a/.ontoref/project.ncl b/.ontoref/project.ncl
index 95e92f2..da36af7 100644
--- a/.ontoref/project.ncl
+++ b/.ontoref/project.ncl
@@ -3,6 +3,9 @@ let s = import "ontoref-project.ncl" in
s.make_project {
slug = "ontoref",
root = "/Users/Akasha/Development/ontoref",
- nickel_import_paths = ["/Users/Akasha/Development/ontoref"],
+ nickel_import_paths = [
+ "/Users/Akasha/Development/ontoref",
+ "/Users/Akasha/Development/ontoref/ontology",
+ ],
keys = [],
}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a1319c9..90cf45c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -18,7 +18,7 @@ repos:
- id: rust-clippy
name: Rust linting (cargo clippy)
- entry: bash -c 'cargo clippy --all-targets -- -D warnings'
+ entry: bash -c 'CARGO_TARGET_DIR=target cargo clippy --all-targets --no-deps --profile clippy -- -D warnings'
language: system
types: [rust]
pass_filenames: false
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ccb459c..363fbca 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,121 @@ ADRs referenced below live in `adrs/` as typed Nickel records.
## [Unreleased]
+### Personal Ontology Schemas & Content Modes
+
+Three new typed NCL schema families added to `ontology/schemas/` and `ontology/defaults/`:
+
+| Schema | Types exported |
+| --- | --- |
+| `career.ncl` | `Skill`, `WorkExperience`, `Talk`, `Positioning`, `CompanyTarget`, `PublicationCard`, `CareerConfig` |
+| `personal.ncl` | `Content` (BlogPost / ConferenceProposal / CV / Application / Email / Thread), `Opportunity` (Job / Conference / Grant / Collaboration / Podcast), `PersonalConfig` |
+| `project-card.ncl` | `ProjectCard` — canonical display metadata (name, tagline, status, tags, tools, features, sort_order) for portfolio and cv_repo publication |
+
+All types carry `linked_nodes | Array String` referencing `.ontology/core.ncl` node IDs.
+`PublicationCard` is a career overlay referencing a canonical `project_node` from the portfolio repo.
+
+Five NCL DAG reflection modes added to `reflection/modes/`:
+
+| Mode | Purpose |
+| --- | --- |
+| `draft-application` | Job/grant/collaboration application anchored in personal ontology — gate alignment check, node selection, career trajectory render, status update |
+| `draft-email` | Context-grounded email composition using ontology nodes as evidence |
+| `generate-article` | Blog post / thread generation from project nodes and tensions |
+| `update-cv` | CV refresh loop querying current career.ncl and core.ncl state |
+| `write-cfp` | Conference proposal from Practice/Project nodes with gate alignment check |
+
+### Search Bookmarks
+
+Bookmark persistence for search results over the ontology graph. Mirrors Q&A NCL pattern (ADR-003).
+
+- `reflection/schemas/search_bookmarks.ncl` — `BookmarkEntry` (id, node_id, kind, title, level, term, actor, created_at, tags) and `BookmarkStore` contracts
+- `reflection/search_bookmarks.ncl` — typed store file; conforms to `BookmarkStore` contract
+- `crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs` — `add_entry` / `remove_entry` via
+ line-level NCL surgery; auto-incremented `sb-NNN` ids; concurrency-safe via `NclWriteLock`
+
+Tests: `next_id_empty`, `next_id_increments`, `insert_into_empty_store`, `delete_first_entry`,
+`delete_second_entry`, `delete_missing_id_errors`, `escape_quotes_and_backslashes`,
+`concurrent_add_produces_unique_ids` (tokio, 6 concurrent tasks, asserts unique ids).
+
+### Protocol
+
+- ADR-006 accepted: Nushell 0.111 string interpolation compatibility fix. Four print statements in
+ `reflection/bin/ontoref.nu` used `(identifier: expr)` patterns inside `$"..."` — parsed as
+ command calls by Nu 0.111 parser. Fix: bare `identifier: (expr)` for label-value pairs; plain
+ strings (no `$`) for zero-interpolation prints. Hard constraint: no `(label: expr)` inside
+ `$"..."` in any `.nu` file. Soft constraint: zero-interpolation strings must not use `$"..."`.
+ ([adr-006](adrs/adr-006-nushell-0111-string-interpolation-compat.ncl))
+
+### Self-Description — on+re Update
+
+`.ontology/core.ncl` — 3 new Practice nodes, updated `adr-lifecycle` and `ontoref-daemon` nodes:
+
+| Change | Detail |
+| --- | --- |
+| New node `personal-ontology-schemas` | Yin — career/personal/project-card typed NCL schemas with linked_nodes DAG bridges |
+| New node `content-modes` | Yang — 5 NCL DAG modes for personal content and career operations |
+| New node `search-bookmarks` | Yin — bookmark persistence layer; NCL surgery via search_bookmarks_ncl.rs |
+| `adr-lifecycle` | ADR-006 added to `artifact_paths` and `adrs` list |
+| `ontoref-daemon` | `search_bookmarks_ncl.rs` added to `artifact_paths` |
+
+New edges: `personal-ontology-schemas → dag-formalized` (ManifestsIn/High),
+`personal-ontology-schemas → self-describing` (Complements/Medium),
+`content-modes → reflection-modes` (ManifestsIn/High),
+`content-modes → personal-ontology-schemas` (DependsOn/High),
+`search-bookmarks → qa-knowledge-store` (Complements/High),
+`search-bookmarks → ontoref-daemon` (ManifestsIn/High),
+`ontoref-daemon → search-bookmarks` (Contains/High).
+
+`.ontology/state.ncl` — `self-description-coverage` catalyst updated to include 2026-03-15 session
+additions. `protocol-maturity` blocker updated to reflect Nu 0.111 fix and personal schema layer
+completion.
+
+Previous: 4 axioms, 2 tensions, 17 practices. Current: 4 axioms, 2 tensions, 20 practices.
+
+---
+
+### ADR–Node Declared Linkage
+
+- `Node` schema extended with `adrs | Array String | default = []` (Nickel `ontology/schemas/core.ncl`
+ and inline `CoreConfig` type).
+- Rust `Node` struct gains `artifact_paths: Vec` and `adrs: Vec`, both
+ `#[serde(default)]` — zero migration cost for existing nodes that omit the fields.
+- `describe.nu` `build-howto` populates `adrs` from the node record; `render-howto` (ANSI),
+ `render-howto-md`, and `howto-to-md-string` (clipboard) all emit a **Validated by** section
+ when `adrs` is non-empty.
+- New `GET /api/adr/{id}?slug=` endpoint — reads `adrs/.ncl`, exports via NCL
+ cache, returns JSON. No auth required (read-only, loopback boundary).
+- Graph UI (`graph.html`): `adrs` field passed into Cytoscape node data. Detail panel renders
+ "Validated by" section with clickable `◆ ` buttons that open a DaisyUI modal
+ fetching full ADR content via the new endpoint.
+- Fixed glob pattern error in `describe.nu:build-howto`: `glob $"($full)/*.rs"` replaced with
+ `glob ($full | path join "*.rs")` — eliminates `//` in pattern when path has trailing separator.
+
+### Self-Description — on+re Update
+
+`.ontology/core.ncl` — new node, updated nodes, new edges:
+
+| Change | Detail |
+| --- | --- |
+| New node `adr-node-linkage` | Practice: declares `adrs` field pattern, lists all 5 modified artifacts |
+| `adr-lifecycle` | Description updated; `adrs = ["adr-001"…"adr-005"]` declared |
+| `describe-query-layer` | Description updated to mention Validated by rendering |
+| `ontoref-ontology-crate` | Description updated to mention `artifact_paths` + `adrs` fields; `adrs = ["adr-001"]` |
+| New edge `adr-node-linkage → adr-lifecycle` | ManifestsIn/High |
+| New edge `adr-node-linkage → describe-query-layer` | Complements/High |
+
+Previous: 4 axioms, 2 tensions, 16 practices. Current: 4 axioms, 2 tensions, 17 practices.
+
+### Ontology Three-File Split
+
+- New Practice node `ontology-three-file-split` in `.ontology/core.ncl`: documents the
+ `core.ncl` (what IS) / `state.ncl` (where we ARE vs want to BE) / `gate.ncl` (when READY
+ to cross a boundary) separation and the role of `reflection/` in answering self-knowledge
+ queries without reading code.
+- `assets/presentation/slides.md` speaker note updated to English with reflection mention.
+- `assets/web/src/index.html` "Scattered Project Knowledge" solution bullets updated (bilingual)
+ to express the three-file split and `reflection/` self-knowledge layer.
+
### Auth & Session Model (ADR-005)
Unified key-to-session token exchange across all surfaces. All work gated on `#[cfg(feature = "ui")]`.
diff --git a/README.md b/README.md
index 9f90899..1004259 100644
--- a/README.md
+++ b/README.md
@@ -34,9 +34,9 @@ crates/ Rust implementation — typed struct loaders and mode executo
| Crate | Purpose |
| --- | --- |
-| `ontoref-ontology` | `.ontology/` NCL → typed Rust structs: Node, Edge, Dimension, Gate, Membrane. Graph traversal, invariant queries. Zero deps. |
+| `ontoref-ontology` | `.ontology/` NCL → typed Rust structs: Node, Edge, Dimension, Gate, Membrane. `Node` carries `artifact_paths` and `adrs` (`Vec`, both `serde(default)`). Graph traversal, invariant queries. Zero deps. |
| `ontoref-reflection` | NCL DAG contract executor: ADR lifecycle, step dep resolution, config seal. `stratum-graph` + `stratum-state` required. |
-| `ontoref-daemon` | HTTP UI (10 pages), actor registry, notification barrier, MCP (19 tools), search engine, SurrealDB, NCL export cache. |
+| `ontoref-daemon` | HTTP UI (10 pages), actor registry, notification barrier, MCP (21 tools), search engine, search bookmarks, SurrealDB, NCL export cache. |
`ontoref-daemon` caches `nickel export` results (keyed by path + mtime), reducing full sync
scans from ~2m42s to <30s. The daemon is always optional — every module falls back to direct
@@ -54,8 +54,8 @@ automatically.
**Q&A Knowledge Store** — accumulated Q&A entries persist to `reflection/qa.ncl` (typed NCL,
git-versioned). Not localStorage. Any actor — developer, agent, CI — reads the same store.
-**MCP Server** — 19 tools over stdio and streamable-HTTP. Categories: nodes, ADRs, modes,
-backlog, Q&A, sessions, search, notifications. Representative subset:
+**MCP Server** — 21 tools over stdio and streamable-HTTP. Categories: nodes, ADRs, modes,
+backlog, Q&A, sessions, search, bookmarks, notifications. Representative subset:
| Tool | What it does |
| --- | --- |
@@ -68,6 +68,22 @@ backlog, Q&A, sessions, search, notifications. Representative subset:
| `ontoref_describe` | Describe project ontology and constraints |
| `ontoref_sync_scan` | Scan for ontology drift |
+**Search Bookmarks** — search results persist to `reflection/search_bookmarks.ncl` (typed NCL,
+`BookmarkEntry` schema). Same atomic-write pattern as Q&A. IDs are sequential `sb-NNN`.
+Concurrency-safe via `NclWriteLock`. Add and remove from the daemon search UI.
+
+**Personal Ontology Schemas** — `ontology/schemas/career.ncl`, `personal.ncl`, `project-card.ncl`
+provide typed contract layers for career and content artifacts (Skills, WorkExperience, Talks,
+Content lifecycle, Opportunities, PublicationCards). All types carry `linked_nodes` referencing
+core ontology node IDs — bridging career artifacts into the DAG. Five content/career reflection
+modes (`draft-application`, `draft-email`, `generate-article`, `update-cv`, `write-cfp`) query
+these schemas to ground output in declared project artifacts rather than free-form prose.
+
+**ADR–Node Linkage** — nodes declare which ADRs validate them via `adrs: Array String`.
+`describe` surfaces a **Validated by** section per node (CLI and `--fmt md`). The graph UI
+renders each ADR as a clickable link that opens the full ADR content in a modal via
+`GET /api/adr/{id}`.
+
**Passive Drift Observation** — background file watcher that detects divergence between Yang
code artifacts and Yin ontology. Watches `crates/`, `.ontology/`, `adrs/`, `reflection/modes/`.
After a 15s debounce runs `sync scan + sync diff`; emits an `ontology_drift` notification when
diff --git a/adrs/adr-006-nushell-0111-string-interpolation-compat.ncl b/adrs/adr-006-nushell-0111-string-interpolation-compat.ncl
new file mode 100644
index 0000000..0359aea
--- /dev/null
+++ b/adrs/adr-006-nushell-0111-string-interpolation-compat.ncl
@@ -0,0 +1,76 @@
+let d = import "adr-defaults.ncl" in
+
+d.make_adr {
+ id = "adr-006",
+ title = "Nushell 0.111 String Interpolation Compatibility Fix",
+ status = 'Accepted,
+ date = "2026-03-14",
+
+ context = "Nushell 0.111 introduced a breaking change in string interpolation parsing: expressions inside `$\"...\"` that match the pattern `(identifier: expr)` are now parsed as command calls rather than as record literals or literal text. This broke four print statements in reflection/bin/ontoref.nu that used patterns like `(kind: ($kind))`, `(logo: ($logo_file))`, `(parents: ($parent_slugs))`, and `(POST /actors/register)`. The bug manifested when running `ontoref setup` and `ontoref hooks-install` on any consumer project using Nu 0.111+. The minimum Nu version gate (>= 0.110.0) did not catch 0.111 regressions since it only guards the lower bound.",
+
+ decision = "Fix all four affected print statements by removing the outer parentheses from label-value pairs inside string interpolations, or by removing the `$` prefix from strings that contain no variable interpolation. The fix is minimal and non-semantic: `(kind: ($kind))` becomes `kind: ($kind)` (literal label + variable), and `$\"(POST /actors/register)\"` becomes `\"(POST /actors/register)\"` (plain string). The fix is applied to both the dev repo (reflection/bin/ontoref.nu) and the installed copy (~/.local/bin/ontoref via just install-daemon). The minimum version gate remains >= 0.110.0 but 0.111 is now the tested floor.",
+
+ rationale = [
+ {
+ claim = "Minimal-diff fix over workarounds",
+ detail = "The broken patterns were purely cosmetic print statements. The fix removes one level of parens — no logic change. Alternatives that added escape sequences or string concatenation would obscure the intent.",
+ },
+ {
+ claim = "Plain string for zero-interpolation prints",
+ detail = "Strings with no variable interpolation (like the POST endpoint hint) should never use `$\"...\"`. Removing the `$` prefix makes them immune to any future interpolation parsing changes and is the correct Nushell idiom.",
+ },
+ {
+ claim = "just install-daemon as the sync mechanism",
+ detail = "The installed copy at ~/.local/bin/ontoref is managed via just install-daemon. Patching both the dev repo and the installed copy via install-daemon is the established update path and keeps them in sync.",
+ },
+ ],
+
+ consequences = {
+ positive = [
+ "ontoref setup and hooks-install work correctly on Nushell 0.111+",
+ "All consumer projects (vapora, typedialog, evol-rustelo) can run setup without errors",
+ "Plain-string fix removes implicit fragility from zero-interpolation print statements",
+ ],
+ negative = [
+ "The 0.111 regression was not caught by the version gate — the gate only guards >= 0.110.0 and does not test 0.111 compatibility proactively",
+ ],
+ },
+
+ alternatives_considered = [
+ {
+ option = "Raise minimum Nu version to 0.111 and document the breaking change",
+ why_rejected = "Does not fix the broken syntax — just makes the breakage explicit. Consumer projects already on 0.111 would still fail until the print statements are fixed.",
+ },
+ {
+ option = "Use escape sequences or string concatenation to embed literal parens",
+ why_rejected = "Nushell has no escape for parens in string interpolation. String concatenation (e.g. `'(kind: ' + $kind + ')'`) works but is significantly less readable than bare `kind: ($kind)`.",
+ },
+ ],
+
+ constraints = [
+ {
+ id = "no-label-value-parens-in-interpolation",
+ claim = "String interpolations in ontoref.nu must not use `(identifier: expr)` patterns — use bare `identifier: (expr)` instead",
+ scope = "ontoref (reflection/bin/ontoref.nu, all .nu files)",
+ severity = 'Hard,
+ check_hint = "rg '\\([a-z_]+: \\(' reflection/bin/ontoref.nu",
+ rationale = "Nushell 0.111 parses (identifier: expr) inside $\"...\" as a command call. The fix pattern (bare label + variable interpolation) is equivalent visually and immune to this parser behaviour.",
+ },
+ {
+ id = "plain-string-for-zero-interpolation",
+ claim = "Print statements with no variable interpolation must use plain strings, not `$\"...\"`",
+ scope = "ontoref (all .nu files)",
+ severity = 'Soft,
+ check_hint = "rg '\\$\"[^(]*\"' reflection/ | grep -v '\\$('",
+ rationale = "Zero-interpolation `$\"...\"` strings are fragile against future parser changes and mislead readers into expecting variable substitution.",
+ },
+ ],
+
+ related_adrs = [],
+
+ ontology_check = {
+ decision_string = "Fix four Nu 0.111 string interpolation regressions in ontoref.nu; enforce no (label: expr) inside interpolations; use plain strings for zero-interpolation prints",
+ invariants_at_risk = [],
+ verdict = 'Safe,
+ },
+}
diff --git a/assets/presentation/slides.md b/assets/presentation/slides.md
index ba58823..35a8867 100644
--- a/assets/presentation/slides.md
+++ b/assets/presentation/slides.md
@@ -743,10 +743,14 @@ Es un grafo consultable que el sistema y los agentes leen.
---
diff --git a/assets/web/index.html b/assets/web/index.html
index 308ceab..2810c1d 100644
--- a/assets/web/index.html
+++ b/assets/web/index.html
@@ -1 +1 @@
- Ontoref
Ontology + Reflection + Daemon + MCP — encode what your codebase IS (invariants, tensions, constraints) and what it DOES (operational modes, actor flows, config seals) in machine-queryable directed acyclic graphs. First-class web UI (12 pages), MCP server (19 tools), and live session sharing for AI agents. One protocol for developers, agents, and CI. Protocol + Runtime. Zero enforcement.
The 6 Problems It Solves
01
Decisions Without Memory
Architectural choices made in chat, forgotten after rotation
No machine-queryable source of why something exists
ADRs as typed Nickel: invariants, constraints, supersession chain
Hard constraints enforced at every operation
02
Invisible Configuration Drift
Configs change outside any review cycle
No audit trail linking change to PR or ADR
Rollback requires manual file archaeology
Sealed profiles: sha256 hash, full history, verified rollback
03
Agents Without Context
LLMs start each session with zero project knowledge
Same mistakes, same questions, no accumulation across operations
Actor registry tracks each session token, type, current mode, last seen — persisted to disk
MCP tools give agents direct DAG read/write: nodes, ADRs, backlog, Q&A
Composed tasks shared via daemon — multiple actors see the same operational context live
04
Scattered Project Knowledge
Guidelines in wikis, patterns in docs, decisions in Slack
No single source queryable by humans, agents, and CI equally
.ontology/ as DAG: nodes, edges, invariants, tensions, gates
Same graph serves developer context, agent initialization, CI validation
05
Protocol Fragmentation
Each project re-invents its own conventions
No shared contract for how operations are defined and executed
Reflection modes: typed DAG contracts for any workflow
One protocol adopted per-project, without enforcing uniformity
06
Knowledge Lost Between Sessions
Q&A answered in one session forgotten by the next
Agent re-asks questions already answered in previous sessions
Q&A Knowledge Store: typed NCL, git-versioned, persists across browser resets
Notification barrier surfaces drift to agents proactively — pre_commit, drift, ontology_drift signals block until acknowledged
Ontology & Reflection — Yin and Yang
Yin — The Ontology Layer
What must be true
Invariants — axioms that cannot change without a new ADR
Tensions — structural conflicts the project navigates, never resolves
Practices — confirmed patterns with artifact paths to real files
Ontology without Reflection = correct but static. Perfect invariants with no operations = dead documentation. Reflection without Ontology = fluid but unanchored. Workflows that forget what they protect.
The protocol lives in coexistence.
DECLARATIVE LAYER · Nickel
.ontology/ · adrs/ · reflection/schemas/
Strong types, contracts, enums. Fails at definition time, not at runtime.
Search engine: full-text across nodes / ADRs / reflection modes — returns kind · id · title · snippet · score
SurrealDB persistence (optional --db): actor sessions, seeded ontology tables, search index, notification history — fail-open
NCL export cache: avoids repeated nickel export on unchanged files
db + nats feature flags — builds standalone with --no-default-features
Adopt in Any Project
ontoref setup wires up any new or existing project — idempotent scaffold with optional auth key bootstrap.
stratumiopsMaster orchestration repo
vaporaAI agent orchestration
kogralKnowledge graph + MCP
syntaxisProject orchestration
provisioningDeclarative IaC
your-projectAny codebase
# Onboard a new project (idempotent; kind: Service by default) ontorefsetup ontorefsetup --kind Library ontorefsetup --gen-keys ["admin:dev" "viewer:ci"]# bootstrap auth keys once
# ADR lifecycle ontoref adr new --title "Adopt Nickel for configuration" ontoref adr list --status Accepted
Daemon & MCP — Runtime Intelligence Layer
ontoref-daemon is an optional persistent process. It caches NCL exports, serves 12 UI pages, exposes 19 MCP tools, maintains an actor registry, stores notifications, indexes everything for search, and optionally persists to SurrealDB. Auth is opt-in: all surfaces (CLI, UI, MCP) exchange a project key for a UUID v4 session token via POST /sessions; CLI injects ONTOREF_TOKEN as Bearer automatically. It never changes the protocol — it accelerates and shares access to it. Configured via ~/.config/ontoref/config.ncl (Nickel, type-checked); edit interactively with ontoref config-edit. Started via NCL pipe bootstrap: ontoref-daemon-boot.
/backlogBacklogitems with priority (Critical/High/Medium/Low) and status (Open/InProgress/Done/Cancelled); add/update
/qaQ&Aserver-hydrated from reflection/qa.ncl; add/edit/delete; persisted as typed NCL
/actionsActionsquick actions catalog from .ontoref/config.ncl; execute via POST /actions/run
/modesModesreflection mode list from reflection/modes/ — name, description, DAG contract
/composeComposeagent task composer — renders mode forms interactively; POST /compose/send dispatches to ./ontoref; live sharing for AI actors
The MCP Server — 19 Tools
Tool
Description
ontoref_help
List available tools and usage
ontoref_list_projects
Enumerate all registered projects
ontoref_set_project
Set session default project context
ontoref_project_status
Full project dashboard — health, drift, actors
ontoref_describe
Architecture overview and self-description
ontoref_search
Free-text search across nodes, ADRs, modes
ontoref_get
Fetch ontology node by id
ontoref_get_node
Full ontology node with edges and constraints
ontoref_list_adrs
List ADRs filtered by status
ontoref_get_adr
Full ADR content with constraints
ontoref_list_modes
List all reflection modes
ontoref_get_mode
Mode DAG contract — steps, preconditions, postconditions
ontoref_get_backlog
Backlog items filtered by status
ontoref_backlog
Add or update_status on a backlog item
ontoref_constraints
All hard + soft architectural constraints
ontoref_qa_list
List Q&A knowledge store with optional filter
ontoref_qa_add
Persist new Q&A entry to reflection/qa.ncl
ontoref_action_list
Quick actions catalog from .ontoref/config.ncl
ontoref_action_add
Create reflection mode + register as quick action
SurrealDB Persistence — Optional
Enabled with --db feature flag and --db-url ws://...
Connects via WebSocket at startup — 5s timeout, fail-open (daemon runs without it)
Seeds ontology tables from local NCL files on startup and on file changes
Persists: actor sessions, seeded ontology tables, search index, notification history
Without --db: DashMap-backed in-memory, process-lifetime only
Namespace configurable via --db-namespace; credentials via --db-username/--db-password
Notification Barrier
pre_commit — pre-commit hook polls GET /notifications/pending?token=X&project=Y; blocks git commit until all acked
drift — schema drift detected between codebase and ontology
ontology_drift — emitted by passive observer with missing/stale/drift/broken counts after 15s debounce
Fail-open: if daemon is unreachable, pre-commit hook passes — commits are never blocked by daemon downtime
Ack via UI or POST /notifications/ack; custom notifications via POST /{slug}/notifications/emit
Action buttons in notifications can link to any dashboard page
# Configure and start the daemon (optional — protocol works without it) ontoref config-edit # browser form → ~/.config/ontoref/config.ncl ontoref-daemon-boot # NCL pipe bootstrap: nickel export config.ncl | daemon --config-stdin ontoref-daemon-boot --dry-run # preview composed JSON without starting # With SOPS-encrypted secrets merged at boot ontoref-daemon-boot --sops secrets.enc.json
# Connect Claude Code via MCP (add to .claude/mcp.json) { "mcpServers": { "ontoref": {"type": "http", "url": "http://localhost:7421/mcp"} } }
# Search across ontology nodes, ADRs, and reflection modes ontoref_search({ q: "notification drift", project: "my-project" })
# Persist a Q&A entry (written to reflection/qa.ncl, git-versioned) ontoref_qa_add({ question: "Why does ontoref-ontology have zero stratumiops deps?", answer: "ADR-001: minimal adoption surface. Ontology crate must build standalone.", tags: ["adr-001", "architecture"] })
# Check live actor sessions curl http://localhost:7421/actors # {"sessions": [{"token": "abc123", "actor_type": "agent", "current_mode": "describe", ...}]}
The UI in Action · Graph View
Force-directed graph of the live ontology. Nodes are typed (Axiom · Tension · Practice) and polarized (Yang · Yin · Spiral). Click any node to open its detail panel — artifacts, connections, NCL source.
Start with ontoref setup. Your project gains machine-queryable invariants, living ADRs, actor-aware operational modes, and a daemon that shares context across every actor in real time.
Ontology + Reflection + Daemon + MCP — encode what a system IS (invariants, tensions, constraints) and where it IS GOING (state dimensions, transition conditions, membranes) in machine-queryable directed acyclic graphs. Software projects, personal operational systems, agent contexts — same three files, same protocol. First-class web UI (12 pages), MCP server (19 tools), live session sharing. One protocol for developers, agents, CI, and individuals. Protocol + Runtime. Zero enforcement.
The 7 Problems It Solves
01
Decisions Without Memory
Architectural choices made in chat, forgotten after rotation
No machine-queryable source of why something exists
ADRs as typed Nickel: invariants, constraints, supersession chain
Hard constraints enforced at every operation
02
Invisible Configuration Drift
Configs change outside any review cycle
No audit trail linking change to PR or ADR
Rollback requires manual file archaeology
Sealed profiles: sha256 hash, full history, verified rollback
03
Agents Without Context
LLMs start each session with zero project knowledge
Same mistakes, same questions, no accumulation across operations
Actor registry tracks each session token, type, current mode, last seen — persisted to disk
MCP tools give agents direct DAG read/write: nodes, ADRs, backlog, Q&A
Composed tasks shared via daemon — multiple actors see the same operational context live
04
Scattered Project Knowledge
Guidelines in wikis, patterns in docs, decisions in Slack
No single source queryable by humans, agents, and CI equally
.ontology/ separates three orthogonal concerns: core.ncl (what IS) · state.ncl (where we ARE vs want to BE) · gate.ncl (when READY to cross a boundary)
reflection/ reads all three and answers self-knowledge queries — an agent understands the project without reading code, only by consulting the declarative graph
05
Protocol Fragmentation
Each project re-invents its own conventions
No shared contract for how operations are defined and executed
Reflection modes: typed DAG contracts for any workflow
One protocol adopted per-project, without enforcing uniformity
06
Knowledge Lost Between Sessions
Q&A answered in one session forgotten by the next
Agent re-asks questions already answered in previous sessions
Q&A Knowledge Store: typed NCL, git-versioned, persists across browser resets
Notification barrier surfaces drift to agents proactively — pre_commit, drift, ontology_drift signals block until acknowledged
07
Decisions Without a Map
Personal and professional decisions made against implicit, unverifiable assumptions
No queryable model of what you never compromise
No structured way to ask: does this opportunity violate who I am?
ontoref as personal operational ontology — same core/state/gate files applied to life, career, and ecosystem dimensions
Ontology without Reflection = correct but static. Perfect invariants with no operations = dead documentation. Reflection without Ontology = fluid but unanchored. Workflows that forget what they protect.
The protocol lives in coexistence.
DECLARATIVE LAYER · Nickel
.ontology/ · adrs/ · reflection/schemas/
Strong types, contracts, enums. Fails at definition time, not at runtime.
Search engine: full-text across nodes / ADRs / reflection modes — returns kind · id · title · snippet · score
SurrealDB persistence (optional --db): actor sessions, seeded ontology tables, search index, notification history — fail-open
NCL export cache: avoids repeated nickel export on unchanged files
db + nats feature flags — builds standalone with --no-default-features
Adopt in Any Project
ontoref setup wires up any new or existing project — idempotent scaffold with optional auth key bootstrap.
stratumiopsMaster orchestration repo
vaporaAI agent orchestration
kogralKnowledge graph + MCP
syntaxisProject orchestration
provisioningDeclarative IaC
your-projectAny codebase
# Onboard a new project (idempotent; kind: Service by default) ontorefsetup ontorefsetup --kind Library ontorefsetup --gen-keys ["admin:dev" "viewer:ci"]# bootstrap auth keys once
# ADR lifecycle ontoref adr new --title "Adopt Nickel for configuration" ontoref adr list --status Accepted
Daemon & MCP — Runtime Intelligence Layer
ontoref-daemon is an optional persistent process. It caches NCL exports, serves 12 UI pages, exposes 19 MCP tools, maintains an actor registry, stores notifications, indexes everything for search, and optionally persists to SurrealDB. Auth is opt-in: all surfaces (CLI, UI, MCP) exchange a project key for a UUID v4 session token via POST /sessions; CLI injects ONTOREF_TOKEN as Bearer automatically. It never changes the protocol — it accelerates and shares access to it. Configured via ~/.config/ontoref/config.ncl (Nickel, type-checked); edit interactively with ontoref config-edit. Started via NCL pipe bootstrap: ontoref-daemon-boot.
/Dashboardproject overview, actor count, cache stats, notification count, backlog summary
/graphGraphCytoscape.js ontology graph — nodes colored by pole (Yang=orange, Yin=blue, Spiral=purple), clickable detail panel with artifacts, connections, and ADR links that open the full record in a modal
/searchSearchfull-text search across nodes, ADRs, reflection modes — returns kind/id/title/snippet/score
/sessionsSessionslive actor registry — actor type, mode, last_seen; auth sessions (id, role, key_label, expires) for authed deployments
/backlogBacklogitems with priority (Critical/High/Medium/Low) and status (Open/InProgress/Done/Cancelled); add/update
/qaQ&Aserver-hydrated from reflection/qa.ncl; add/edit/delete; persisted as typed NCL
/actionsActionsquick actions catalog from .ontoref/config.ncl; execute via POST /actions/run
/modesModesreflection mode list from reflection/modes/ — name, description, DAG contract
/composeComposeagent task composer — renders mode forms interactively; POST /compose/send dispatches to ./ontoref; live sharing for AI actors
The MCP Server — 19 Tools
Tool
Description
ontoref_help
List available tools and usage
ontoref_list_projects
Enumerate all registered projects
ontoref_set_project
Set session default project context
ontoref_project_status
Full project dashboard — health, drift, actors
ontoref_describe
Architecture overview and self-description
ontoref_search
Free-text search across nodes, ADRs, modes
ontoref_get
Fetch ontology node by id
ontoref_get_node
Full ontology node with edges and constraints
ontoref_list_adrs
List ADRs filtered by status
ontoref_get_adr
Full ADR content with constraints
ontoref_list_modes
List all reflection modes
ontoref_get_mode
Mode DAG contract — steps, preconditions, postconditions
ontoref_get_backlog
Backlog items filtered by status
ontoref_backlog
Add or update_status on a backlog item
ontoref_constraints
All hard + soft architectural constraints
ontoref_qa_list
List Q&A knowledge store with optional filter
ontoref_qa_add
Persist new Q&A entry to reflection/qa.ncl
ontoref_action_list
Quick actions catalog from .ontoref/config.ncl
ontoref_action_add
Create reflection mode + register as quick action
SurrealDB Persistence — Optional
Enabled with --db feature flag and --db-url ws://...
Connects via WebSocket at startup — 5s timeout, fail-open (daemon runs without it)
Seeds ontology tables from local NCL files on startup and on file changes
Persists: actor sessions, seeded ontology tables, search index, notification history
Without --db: DashMap-backed in-memory, process-lifetime only
Namespace configurable via --db-namespace; credentials via --db-username/--db-password
Notification Barrier
pre_commit — pre-commit hook polls GET /notifications/pending?token=X&project=Y; blocks git commit until all acked
drift — schema drift detected between codebase and ontology
ontology_drift — emitted by passive observer with missing/stale/drift/broken counts after 15s debounce
Fail-open: if daemon is unreachable, pre-commit hook passes — commits are never blocked by daemon downtime
Ack via UI or POST /notifications/ack; custom notifications via POST /{slug}/notifications/emit
Action buttons in notifications can link to any dashboard page
# Configure and start the daemon (optional — protocol works without it) ontoref config-edit # browser form → ~/.config/ontoref/config.ncl ontoref-daemon-boot # NCL pipe bootstrap: nickel export config.ncl | daemon --config-stdin ontoref-daemon-boot --dry-run # preview composed JSON without starting # With SOPS-encrypted secrets merged at boot ontoref-daemon-boot --sops secrets.enc.json
# Connect Claude Code via MCP (add to .claude/mcp.json) { "mcpServers": { "ontoref": {"type": "http", "url": "http://localhost:7421/mcp"} } }
# Search across ontology nodes, ADRs, and reflection modes ontoref_search({ q: "notification drift", project: "my-project" })
# Persist a Q&A entry (written to reflection/qa.ncl, git-versioned) ontoref_qa_add({ question: "Why does ontoref-ontology have zero stratumiops deps?", answer: "ADR-001: minimal adoption surface. Ontology crate must build standalone.", tags: ["adr-001", "architecture"] })
# Check live actor sessions curl http://localhost:7421/actors # {"sessions": [{"token": "abc123", "actor_type": "agent", "current_mode": "describe", ...}]}
The UI in Action · Graph View
Force-directed graph of the live ontology. Nodes are typed (Axiom · Tension · Practice) and polarized (Yang · Yin · Spiral). Click any node to open its detail panel — artifacts, connections, NCL source.
Start with ontoref setup. Your project gains machine-queryable invariants, living ADRs, actor-aware operational modes, and a daemon that shares context across every actor in real time.
@@ -1591,12 +1655,14 @@
class="sub"
data-en="How things move and change"
data-es="Cómo las cosas se mueven y cambian"
+ data-key="ontoref-yang-sub"
>
How things move and change
Modes — typed DAG workflow contracts
@@ -1634,12 +1700,14 @@
Ontology without Reflection = correct but static. Perfect
invariants with no operations = dead documentation. Reflection without Ontology = fluid but unanchored. Workflows that
forget what they protect.
@@ -1647,6 +1715,7 @@
class="tension-thesis"
data-en="The protocol lives in coexistence."
data-es="El protocolo vive en la coexistencia."
+ data-key="ontoref-tension-thesis"
>
The protocol lives in coexistence.
@@ -1659,6 +1728,7 @@
class="layer-label"
data-en="DECLARATIVE LAYER · Nickel"
data-es="CAPA DECLARATIVA · Nickel"
+ data-key="ontoref-layer-decl-label"
>
DECLARATIVE LAYER · Nickel
@@ -1670,6 +1740,7 @@
class="layer-desc"
data-en="Strong types, contracts, enums. Fails at definition time, not at runtime."
data-es="Tipos fuertes, contratos, enums. Falla en definición, no en runtime."
+ data-key="ontoref-layer-decl-desc"
>
Strong types, contracts, enums. Fails at definition time, not at
runtime.
@@ -1680,6 +1751,7 @@
class="layer-label"
data-en="OPERATIONAL LAYER · Nushell"
data-es="CAPA OPERACIONAL · Nushell"
+ data-key="ontoref-layer-op-label"
>
OPERATIONAL LAYER · Nushell
@@ -1691,6 +1763,7 @@
class="layer-desc"
data-en="Typed pipelines over structured data. No text streams."
data-es="Pipelines tipadas sobre datos estructurados. No streams de texto."
+ data-key="ontoref-layer-op-desc"
>
Typed pipelines over structured data. No text streams.
@@ -1700,6 +1773,7 @@
class="layer-label"
data-en="ENTRY POINT · Bash → Nu"
data-es="PUNTO DE ENTRADA · Bash → Nu"
+ data-key="ontoref-layer-entry-label"
>
ENTRY POINT · Bash → Nu
@@ -1711,6 +1785,7 @@
class="layer-desc"
data-en="Single entry point per project. Detects actor (developer/agent/CI), acquires lock, dispatches to correct Nu module."
data-es="Un único entry point por proyecto. Detecta actor (developer/agent/CI), adquiere lock, despacha al módulo Nu correcto."
+ data-key="ontoref-layer-entry-desc"
>
Single entry point per project. Detects actor
(developer/agent/CI), acquires lock, dispatches to correct Nu
@@ -1722,6 +1797,7 @@
class="layer-label"
data-en="KNOWLEDGE GRAPH · .ontology/"
data-es="GRAFO DE CONOCIMIENTO · .ontology/"
+ data-key="ontoref-layer-graph-label"
>
KNOWLEDGE GRAPH · .ontology/
@@ -1732,6 +1808,7 @@
class="layer-desc"
data-en="The project knows what it knows. Actor-agnostic. Machine-queryable via nickel export."
data-es="El proyecto sabe qué sabe. Actor-agnostic. Consultable por máquina vía nickel export."
+ data-key="ontoref-layer-graph-desc"
>
The project knows what it knows. Actor-agnostic. Machine-queryable
via nickel export.
@@ -1742,6 +1819,7 @@
class="layer-label"
data-en="RUNTIME LAYER · Rust + axum"
data-es="CAPA RUNTIME · Rust + axum"
+ data-key="ontoref-layer-runtime-label"
>
RUNTIME LAYER · Rust + axum
@@ -1755,6 +1833,7 @@
class="layer-desc"
data-en="Optional persistent daemon. NCL export cache, HTTP UI (12 pages), MCP server (19 tools), actor registry, notification store, search engine, SurrealDB persistence. Never a protocol requirement."
data-es="Daemon persistente opcional. Caché de exports NCL, UI HTTP (12 páginas), servidor MCP (19 herramientas), registro de actores, almacén de notificaciones, motor de búsqueda, persistencia SurrealDB. Nunca un requisito del protocolo."
+ data-key="ontoref-layer-runtime-desc"
>
Optional persistent daemon. NCL export cache, HTTP UI (12 pages),
MCP server (19 tools), actor registry, notification store, search
@@ -1766,6 +1845,7 @@
class="layer-label"
data-en="ADOPTION LAYER · Per-project"
data-es="CAPA DE ADOPCIÓN · Por proyecto"
+ data-key="ontoref-layer-adopt-label"
>
ADOPTION LAYER · Per-project
@@ -1777,6 +1857,7 @@
class="layer-desc"
data-en="Each project maintains its own .ontology/ data. Ontoref provides the schemas, modules, and migration scripts. Zero lock-in."
data-es="Cada proyecto mantiene sus propios datos de .ontology/. Ontoref provee los schemas, módulos y scripts de migración. Cero vendor lock-in."
+ data-key="ontoref-layer-adopt-desc"
>
Each project maintains its own .ontology/ data.
Ontoref provides the schemas, modules, and migration scripts. Zero
@@ -1789,7 +1870,7 @@
- Crates & Tooling
@@ -1805,7 +1886,7 @@
Load and query .ontology/ NCL files as typed Rust
structs
-
Node, Edge, Dimension, Gate, Membrane types
+
Node, Edge, Dimension, Gate, Membrane types — Node carries artifact_paths and adrs, both serde(default)
+ Graph node detail panel: artifacts, connections, and
+ ADR validators — each ADR is a clickable link
+ that opens the full record via GET /api/adr/{id}
+
Actor registry (DashMap): token, type (developer / agent / CI),
registered_at, last_seen, current_mode — serializable snapshot
@@ -1954,6 +2040,7 @@
class="adopt-title"
data-en="Adopt in Any Project"
data-es="Adoptar en Cualquier Proyecto"
+ data-key="ontoref-adoption-title"
>
Adopt in Any Project
@@ -1961,6 +2048,7 @@
class="adopt-subtitle"
data-en="ontoref setup wires up any new or existing project — idempotent scaffold with optional auth key bootstrap."
data-es="ontoref setup conecta cualquier proyecto nuevo o existente — scaffold idempotente con bootstrap de auth keys opcional."
+ data-key="ontoref-adoption-subtitle"
>
ontoref setup wires up any new or existing project —
idempotent scaffold with optional auth key bootstrap.
@@ -2025,6 +2113,7 @@
Daemon & MCP — Runtime Intelligence Layer
@@ -2037,6 +2126,7 @@
"
data-en="ontoref-daemon is an optional persistent process. It caches NCL exports, serves 12 UI pages, exposes 19 MCP tools, maintains an actor registry, stores notifications, indexes everything for search, and optionally persists to SurrealDB. Auth is opt-in: all surfaces (CLI, UI, MCP) exchange a project key for a UUID v4 session token via POST /sessions; CLI injects ONTOREF_TOKEN as Bearer automatically. It never changes the protocol — it accelerates and shares access to it. Configured via ~/.config/ontoref/config.ncl (Nickel, type-checked); edit interactively with ontoref config-edit. Started via NCL pipe bootstrap: ontoref-daemon-boot."
data-es="ontoref-daemon es un proceso persistente opcional. Cachea exports NCL, sirve 12 páginas de UI, expone 19 herramientas MCP, mantiene un registro de actores, almacena notificaciones, indexa todo para búsqueda y opcionalmente persiste en SurrealDB. Auth es opt-in: todas las superficies (CLI, UI, MCP) intercambian una project key por un token de sesión UUID v4 via POST /sessions; la CLI inyecta ONTOREF_TOKEN como Bearer automáticamente. Nunca cambia el protocolo — acelera y comparte el acceso a él. Configurado via ~/.config/ontoref/config.ncl (Nickel, type-checked); edición interactiva con ontoref config-edit. Iniciado via NCL pipe bootstrap: ontoref-daemon-boot."
+ data-key="ontoref-mcp-core-desc"
>
ontoref-daemon is an optional persistent process. It
caches NCL exports, serves 12 UI pages, exposes 19 MCP tools,
@@ -2058,6 +2148,7 @@
class="daemon-col-title"
data-en="The Web UI — 12 Pages"
data-es="La UI Web — 12 Páginas"
+ data-key="ontoref-ui-dashboard-title"
>
The Web UI — 12 Pages
@@ -2093,9 +2184,10 @@
/graphGraphD3 force-directed ontology graph — nodes colored by pole
- (Yang=orange, Yin=blue, Spiral=purple), clickable with
- detail panel, edge labelsCytoscape.js ontology graph — nodes colored by pole
+ (Yang=orange, Yin=blue, Spiral=purple), clickable detail
+ panel with artifacts, connections, and ADR links that open
+ the full record in a modal
@@ -2174,6 +2266,7 @@
class="daemon-col-title"
data-en="The MCP Server — 19 Tools"
data-es="El Servidor MCP — 19 Herramientas"
+ data-key="ontoref-mcp-query-title"
>
The MCP Server — 19 Tools
@@ -2181,8 +2274,8 @@
-
Tool
-
+
Tool
+
Description
@@ -2193,6 +2286,7 @@
List available tools and usage
@@ -2202,6 +2296,7 @@
Enumerate all registered projects
@@ -2211,6 +2306,7 @@
Set session default project context
@@ -2220,6 +2316,7 @@
Full project dashboard — health, drift, actors
@@ -2229,6 +2326,7 @@
Architecture overview and self-description
@@ -2238,6 +2336,7 @@
Free-text search across nodes, ADRs, modes
@@ -2247,6 +2346,7 @@
Fetch ontology node by id
@@ -2256,6 +2356,7 @@
Full ontology node with edges and constraints
@@ -2265,6 +2366,7 @@
List ADRs filtered by status
@@ -2274,6 +2376,7 @@
Full ADR content with constraints
@@ -2283,6 +2386,7 @@
List all reflection modes
@@ -2292,6 +2396,7 @@
Mode DAG contract — steps, preconditions, postconditions
@@ -2301,6 +2406,7 @@
Backlog items filtered by status
@@ -2310,6 +2416,7 @@
Add or update_status on a backlog item
@@ -2319,6 +2426,7 @@
All hard + soft architectural constraints
@@ -2328,6 +2436,7 @@
List Q&A knowledge store with optional filter
@@ -2337,6 +2446,7 @@
Persist new Q&A entry to reflection/qa.ncl
@@ -2346,6 +2456,7 @@
Quick actions catalog from .ontoref/config.ncl
@@ -2355,6 +2466,7 @@
Create reflection mode + register as quick action
@@ -2371,12 +2483,14 @@
SurrealDB Persistence — Optional
Enabled with --db feature flag and
@@ -2409,12 +2523,14 @@
Notification Barrier
pre_commit — pre-commit hook polls
@@ -2524,6 +2640,7 @@
The UI in Action · Graph View
@@ -2539,6 +2656,7 @@
Force-directed graph of the live ontology. Nodes are typed (Axiom ·
Tension · Practice) and polarized (Yang · Yin · Spiral). Click any
node to open its detail panel — artifacts, connections, NCL
@@ -2595,7 +2713,7 @@
- Technology Stack
@@ -2625,6 +2743,7 @@
Protocol Metrics
@@ -2679,6 +2798,7 @@
class="cta-title"
data-en="Structure That Remembers Why"
data-es="Estructura que Recuerda el Porqué"
+ data-key="ontoref-cta-title"
>
Structure That Remembers Why
@@ -2686,6 +2806,7 @@
class="cta-subtitle"
data-en="Start with ontoref setup. Your project gains machine-queryable invariants, living ADRs, actor-aware operational modes, and a daemon that shares context across every actor in real time."
data-es="Empieza con ontoref setup. Tu proyecto gana invariantes consultables por máquina, ADRs vivos, modos operacionales con actor-awareness y un daemon que comparte contexto entre todos los actores en tiempo real."
+ data-key="ontoref-cta-subtitle"
>
Start with ontoref setup. Your project gains
machine-queryable invariants, living ADRs, actor-aware operational
@@ -2697,6 +2818,7 @@
class="cta-button"
data-en="Explore the Protocol"
data-es="Explorar el Protocolo"
+ data-key="ontoref-cta-explore"
>Explore the Protocol
@@ -2710,6 +2832,7 @@
Protocol + Runtime. Zero enforcement. One graph per project.
diff --git a/card.ncl b/card.ncl
new file mode 100644
index 0000000..5068b55
--- /dev/null
+++ b/card.ncl
@@ -0,0 +1,25 @@
+let d = import "schemas/project-card.ncl" in
+
+d.ProjectCard & {
+ id = "ontoref",
+ name = "Ontoref",
+ tagline = "Structure that remembers why.",
+ description = "Self-describing project ontology protocol. Projects implement it via typed NCL schemas — axioms, tensions, practices, state, gates. A queryable structure for validating architectural decisions and auditing coherence.",
+ version = "0.1.0",
+ status = 'Active,
+ source = 'Local,
+ url = "https://ontoref.jesusperez.pro",
+ started_at = "2025",
+ tags = ["nickel", "ontology", "governance", "protocol", "architecture"],
+ tools = ["Nickel", "Nushell"],
+ features = [
+ "Three-layer NCL pattern: schemas → defaults → config",
+ "Reflection modes: structured agent/developer workflows",
+ "DAG topology for architectural decisions",
+ "Gate membranes for controlled external signal entry",
+ "Protocol — never a runtime dependency",
+ ],
+ featured = false,
+ sort_order = 4,
+ logo = "assets/logo.svg",
+}
diff --git a/crates/ontoref-daemon/src/api.rs b/crates/ontoref-daemon/src/api.rs
index cf7ddc2..2e8d342 100644
--- a/crates/ontoref-daemon/src/api.rs
+++ b/crates/ontoref-daemon/src/api.rs
@@ -77,6 +77,7 @@ impl AuthRateLimiter {
/// Returns true if `s` has the format of a UUID v4 (36 chars, hyphens at
/// positions 8/13/18/23). Used to distinguish session tokens from raw passwords
/// in `check_primary_auth` without needing to attempt argon2 on token strings.
+#[cfg(feature = "ui")]
fn is_uuid_v4(s: &str) -> bool {
if s.len() != 36 {
return false;
@@ -306,6 +307,11 @@ pub fn router(state: AppState) -> axum::Router {
.route("/describe/capabilities", get(describe_capabilities))
.route("/describe/connections", get(describe_connections))
.route("/describe/actor-init", get(describe_actor_init))
+ // ADR read endpoint
+ .route("/adr/{id}", get(get_adr))
+ // Ontology extension endpoints
+ .route("/ontology", get(list_ontology_extensions))
+ .route("/ontology/{file}", get(get_ontology_extension))
// Backlog JSON endpoint
.route("/backlog-json", get(backlog_json))
// Q&A read endpoint
@@ -336,7 +342,15 @@ pub fn router(state: AppState) -> axum::Router {
let app = app
.route("/qa/add", post(crate::ui::handlers::qa_add))
.route("/qa/delete", post(crate::ui::handlers::qa_delete))
- .route("/qa/update", post(crate::ui::handlers::qa_update));
+ .route("/qa/update", post(crate::ui::handlers::qa_update))
+ .route(
+ "/search/bookmark/add",
+ post(crate::ui::handlers::search_bookmark_add),
+ )
+ .route(
+ "/search/bookmark/delete",
+ post(crate::ui::handlers::search_bookmark_delete),
+ );
let app = app.with_state(state.clone());
@@ -1274,6 +1288,151 @@ async fn describe_actor_init(
}
}
+// ── ADR read endpoint ────────────────────────────────────────────────────────
+
+#[derive(Deserialize)]
+struct AdrQuery {
+ slug: Option,
+}
+
+async fn get_adr(
+ State(state): State,
+ Path(id): Path,
+ Query(q): Query,
+) -> impl IntoResponse {
+ state.touch_activity();
+ let (root, cache, import_path) = resolve_project_ctx(&state, q.slug.as_deref());
+ let adrs_dir = root.join("adrs");
+
+ let entries = match std::fs::read_dir(&adrs_dir) {
+ Ok(e) => e,
+ Err(_) => {
+ return (
+ StatusCode::NOT_FOUND,
+ Json(serde_json::json!({ "error": "adrs directory not found" })),
+ );
+ }
+ };
+
+ for entry in entries.flatten() {
+ let path = entry.path();
+ if path.extension().and_then(|e| e.to_str()) != Some("ncl") {
+ continue;
+ }
+ let stem = path
+ .file_stem()
+ .and_then(|s| s.to_str())
+ .unwrap_or("")
+ .to_string();
+ if !stem.contains(id.as_str()) {
+ continue;
+ }
+ return match cache.export(&path, import_path.as_deref()).await {
+ Ok((v, _)) => (StatusCode::OK, Json(v)),
+ Err(e) => (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(serde_json::json!({ "error": e.to_string() })),
+ ),
+ };
+ }
+
+ (
+ StatusCode::NOT_FOUND,
+ Json(serde_json::json!({ "error": format!("ADR '{}' not found", id) })),
+ )
+}
+
+// ── Ontology extension endpoints ─────────────────────────────────────────────
+
+const CORE_FILES: &[&str] = &["core.ncl", "state.ncl", "gate.ncl"];
+
+#[derive(Deserialize)]
+struct OntologyQuery {
+ slug: Option,
+}
+
+async fn list_ontology_extensions(
+ State(state): State,
+ Query(q): Query,
+) -> impl IntoResponse {
+ state.touch_activity();
+ let (root, _, _) = resolve_project_ctx(&state, q.slug.as_deref());
+ let ontology_dir = root.join(".ontology");
+
+ let entries = match std::fs::read_dir(&ontology_dir) {
+ Ok(e) => e,
+ Err(_) => {
+ return (
+ StatusCode::OK,
+ Json(serde_json::json!({ "extensions": [] })),
+ );
+ }
+ };
+
+ let mut extensions: Vec = entries
+ .flatten()
+ .filter_map(|e| {
+ let path = e.path();
+ if path.extension().and_then(|x| x.to_str()) != Some("ncl") {
+ return None;
+ }
+ let name = path.file_name()?.to_str()?.to_string();
+ if CORE_FILES.contains(&name.as_str()) {
+ return None;
+ }
+ let stem = path.file_stem()?.to_str()?.to_string();
+ Some(serde_json::json!({ "file": name, "id": stem }))
+ })
+ .collect();
+
+ extensions.sort_by_key(|v| v["id"].as_str().unwrap_or("").to_string());
+ (
+ StatusCode::OK,
+ Json(serde_json::json!({ "extensions": extensions })),
+ )
+}
+
+async fn get_ontology_extension(
+ State(state): State,
+ Path(file): Path,
+ Query(q): Query,
+) -> impl IntoResponse {
+ state.touch_activity();
+ let (root, cache, import_path) = resolve_project_ctx(&state, q.slug.as_deref());
+
+ // Reject traversal attempts and core files — they have dedicated endpoints.
+ if file.contains('/') || file.contains("..") || CORE_FILES.contains(&file.as_str()) {
+ return (
+ StatusCode::BAD_REQUEST,
+ Json(serde_json::json!({ "error": "invalid file name" })),
+ );
+ }
+
+ let file = if file.ends_with(".ncl") {
+ file
+ } else {
+ format!("{file}.ncl")
+ };
+
+ let path = root.join(".ontology").join(&file);
+ if !path.exists() {
+ return (
+ StatusCode::NOT_FOUND,
+ Json(
+ serde_json::json!({ "error": format!("ontology extension '{}' not found", file) }),
+ ),
+ );
+ }
+
+ match cache.export(&path, import_path.as_deref()).await {
+ Ok((v, _)) => (StatusCode::OK, Json(v)),
+ Err(e) => (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(serde_json::json!({ "error": e.to_string() })),
+ ),
+ }
+}
+
async fn backlog_json(
State(state): State,
Query(q): Query,
diff --git a/crates/ontoref-daemon/src/main.rs b/crates/ontoref-daemon/src/main.rs
index 64eb7f6..2be76cf 100644
--- a/crates/ontoref-daemon/src/main.rs
+++ b/crates/ontoref-daemon/src/main.rs
@@ -108,6 +108,23 @@ fn apply_stdin_config(cli: &mut Cli) -> serde_json::Value {
json
}
+/// Run `nickel export` on `config_path` with an optional `NICKEL_IMPORT_PATH`.
+fn run_nickel_config(
+ config_path: &std::path::Path,
+ import_path: Option<&str>,
+) -> Option {
+ let mut cmd = Command::new("nickel");
+ cmd.arg("export").arg(config_path);
+ if let Some(ip) = import_path {
+ cmd.env("NICKEL_IMPORT_PATH", ip);
+ }
+ let output = cmd.output().ok()?;
+ if !output.status.success() {
+ return None;
+ }
+ serde_json::from_slice(&output.stdout).ok()
+}
+
/// Load daemon config from .ontoref/config.ncl and override CLI defaults.
/// Returns (NICKEL_IMPORT_PATH, parsed config JSON) — both optional.
fn load_config_overrides(cli: &mut Cli) -> (Option, Option) {
@@ -116,27 +133,23 @@ fn load_config_overrides(cli: &mut Cli) -> (Option, Option o,
- Err(e) => {
- warn!(error = %e, path = %config_path.display(), "failed to read config");
- return (None, None);
- }
- };
+ // First attempt: no NICKEL_IMPORT_PATH (fast path, works for configs without
+ // imports). Second attempt: include project root and common sub-paths to
+ // resolve card/schema imports. Canonicalize here so the fallback paths are
+ // absolute even when project_root is ".".
+ let abs_root = cli
+ .project_root
+ .canonicalize()
+ .unwrap_or_else(|_| cli.project_root.clone());
+ let root = abs_root.display().to_string();
+ let fallback_ip = format!("{root}:{root}/ontology:{root}/.ontology:{root}/ontology/schemas");
+ let config_json = run_nickel_config(&config_path, None)
+ .or_else(|| run_nickel_config(&config_path, Some(&fallback_ip)));
- if !output.status.success() {
- warn!("nickel export failed for config");
- return (None, None);
- }
-
- let config_json: serde_json::Value = match serde_json::from_slice(&output.stdout) {
- Ok(v) => v,
- Err(e) => {
- warn!(error = %e, "failed to parse config JSON");
+ let config_json = match config_json {
+ Some(v) => v,
+ None => {
+ warn!("nickel export failed for config");
return (None, None);
}
};
@@ -225,12 +238,23 @@ fn load_config_overrides(cli: &mut Cli) -> (Option, Option>()
.join(":")
})
@@ -498,19 +522,27 @@ async fn main() {
}
// If templates/public dirs were not set by config or CLI, fall back to the
- // XDG share location installed by `just install-daemon`.
+ // platform data dir installed by `just install-daemon`.
+ // install.nu uses ~/Library/Application Support/ontoref on macOS and
+ // ~/.local/share/ontoref on Linux — both without the `-daemon` suffix.
#[cfg(feature = "ui")]
{
- let xdg_share = std::env::var_os("HOME")
- .map(|home| std::path::PathBuf::from(home).join(".local/share/ontoref-daemon"));
+ let data_share = std::env::var_os("HOME").map(|home| {
+ let base = std::path::PathBuf::from(home);
+ #[cfg(target_os = "macos")]
+ let share = base.join("Library/Application Support/ontoref");
+ #[cfg(not(target_os = "macos"))]
+ let share = base.join(".local/share/ontoref");
+ share
+ });
if cli.templates_dir.is_none() {
- let candidate = xdg_share.as_deref().map(|s| s.join("templates"));
+ let candidate = data_share.as_deref().map(|s| s.join("templates"));
if candidate.as_deref().is_some_and(|p| p.exists()) {
cli.templates_dir = candidate;
}
}
if cli.public_dir.is_none() {
- let candidate = xdg_share.as_deref().map(|s| s.join("public"));
+ let candidate = data_share.as_deref().map(|s| s.join("public"));
if candidate.as_deref().is_some_and(|p| p.exists()) {
cli.public_dir = candidate;
}
@@ -550,6 +582,39 @@ async fn main() {
.unwrap_or("default")
.to_string();
+ // In --config-stdin (service) mode, the global nickel_import_paths is always
+ // empty. Per-project import paths live in each project's project.ncl, which
+ // is already included in stdin_projects. The primary project's entry is
+ // skipped by the registry (slug collision), so we must extract its
+ // import_path from the matching stdin_projects entry here.
+ let nickel_import_path = if cli.config_stdin {
+ stdin_projects
+ .iter()
+ .find(|e| {
+ std::path::PathBuf::from(&e.root)
+ .canonicalize()
+ .ok()
+ .as_deref()
+ == Some(project_root.as_path())
+ })
+ .and_then(|e| {
+ let joined = e
+ .nickel_import_paths
+ .iter()
+ .map(|p| resolve_nickel_import_path(p, &project_root))
+ .collect::>()
+ .join(":");
+ if joined.is_empty() {
+ None
+ } else {
+ Some(joined)
+ }
+ })
+ .or(nickel_import_path)
+ } else {
+ nickel_import_path
+ };
+
// Build primary ProjectContext up-front so its Arcs (cache, actors,
// notifications, seed_lock, ontology_version) can be aliased into AppState
// and reused by the watcher before the registry is assembled.
@@ -1253,6 +1318,15 @@ async fn connect_db(cli: &Cli) -> Option> {
}
#[cfg(feature = "ui")]
+fn resolve_nickel_import_path(p: &str, project_root: &std::path::Path) -> String {
+ let c = std::path::Path::new(p);
+ if c.is_absolute() {
+ p.to_owned()
+ } else {
+ project_root.join(c).display().to_string()
+ }
+}
+
fn resolve_asset_dir(project_root: &std::path::Path, config_dir: &str) -> std::path::PathBuf {
let from_root = project_root.join(config_dir);
if from_root.exists() {
diff --git a/crates/ontoref-daemon/src/mcp/mod.rs b/crates/ontoref-daemon/src/mcp/mod.rs
index a7c9e35..e7e1733 100644
--- a/crates/ontoref-daemon/src/mcp/mod.rs
+++ b/crates/ontoref-daemon/src/mcp/mod.rs
@@ -148,6 +148,34 @@ struct QaAddInput {
project: Option,
}
+#[derive(Deserialize, JsonSchema, Default)]
+struct BookmarkListInput {
+ /// Project slug. Omit to use the default project.
+ project: Option,
+ /// Optional substring filter on node_id or title.
+ filter: Option,
+}
+
+#[derive(Deserialize, JsonSchema, Default)]
+struct BookmarkAddInput {
+ /// Ontology node id to bookmark (e.g. `"add-project"`).
+ node_id: String,
+ /// Kind of the result: `"node"`, `"adr"`, or `"mode"`.
+ kind: Option,
+ /// Human-readable title of the bookmarked node.
+ title: String,
+ /// Ontology level: `Axiom`, `Tension`, `Practice`, `Project`. May be empty.
+ level: Option,
+ /// Search term that produced this result.
+ term: Option,
+ /// Actor saving the bookmark. Defaults to `"agent"`.
+ actor: Option,
+ /// Optional tags for categorisation.
+ tags: Option>,
+ /// Project slug. Omit to use the default project.
+ project: Option,
+}
+
#[derive(Deserialize, JsonSchema, Default)]
struct ActionListInput {
/// Project slug. Omit to use the default project.
@@ -201,6 +229,8 @@ impl OntoreServer {
.with_async_tool::()
.with_async_tool::()
.with_async_tool::()
+ .with_async_tool::()
+ .with_async_tool::()
.with_async_tool::()
.with_async_tool::()
.with_async_tool::()
@@ -209,6 +239,8 @@ impl OntoreServer {
.with_async_tool::()
.with_async_tool::()
.with_async_tool::()
+ .with_async_tool::()
+ .with_async_tool::()
.with_async_tool::()
.with_async_tool::()
}
@@ -544,6 +576,135 @@ impl AsyncTool for GetAdrTool {
}
}
+// ── Tool: list_ontology_extensions
+// ──────────────────────────────────────────────
+
+struct ListOntologyExtensionsTool;
+
+impl ToolBase for ListOntologyExtensionsTool {
+ type Parameter = ProjectParam;
+ type Output = serde_json::Value;
+ type Error = ToolError;
+
+ fn name() -> Cow<'static, str> {
+ "ontoref_list_ontology_extensions".into()
+ }
+
+ fn description() -> Option> {
+ Some(
+ "List extra .ontology/*.ncl files beyond core.ncl, state.ncl, and gate.ncl. These are \
+ project-defined domain extensions (e.g. career.ncl, personal.ncl)."
+ .into(),
+ )
+ }
+
+ fn output_schema() -> Option> {
+ None
+ }
+}
+
+impl AsyncTool for ListOntologyExtensionsTool {
+ async fn invoke(
+ service: &OntoreServer,
+ param: ProjectParam,
+ ) -> Result {
+ debug!(tool = "list_ontology_extensions", project = ?param.project);
+ let ctx = service.project_ctx(param.project.as_deref());
+ let ontology_dir = ctx.root.join(".ontology");
+
+ const CORE: &[&str] = &["core.ncl", "state.ncl", "gate.ncl"];
+
+ let Ok(entries) = std::fs::read_dir(&ontology_dir) else {
+ return Ok(serde_json::json!({ "extensions": [] }));
+ };
+
+ let mut extensions: Vec = entries
+ .flatten()
+ .filter_map(|e| {
+ let path = e.path();
+ if path.extension().and_then(|x| x.to_str()) != Some("ncl") {
+ return None;
+ }
+ let name = path.file_name()?.to_str()?.to_string();
+ if CORE.contains(&name.as_str()) {
+ return None;
+ }
+ let stem = path.file_stem()?.to_str()?.to_string();
+ Some(serde_json::json!({ "file": name, "id": stem }))
+ })
+ .collect();
+
+ extensions.sort_by_key(|v| v["id"].as_str().unwrap_or("").to_string());
+ Ok(serde_json::json!({ "extensions": extensions }))
+ }
+}
+
+// ── Tool: get_ontology_extension
+// ────────────────────────────────────────────
+
+struct GetOntologyExtensionTool;
+
+impl ToolBase for GetOntologyExtensionTool {
+ type Parameter = GetItemInput;
+ type Output = serde_json::Value;
+ type Error = ToolError;
+
+ fn name() -> Cow<'static, str> {
+ "ontoref_get_ontology_extension".into()
+ }
+
+ fn description() -> Option> {
+ Some(
+ "Export a project-defined .ontology extension file by stem (e.g. \"career\", \
+ \"personal\"). Returns the full exported JSON. Use ontoref_list_ontology_extensions \
+ to discover available files."
+ .into(),
+ )
+ }
+
+ fn output_schema() -> Option> {
+ None
+ }
+}
+
+impl AsyncTool for GetOntologyExtensionTool {
+ async fn invoke(
+ service: &OntoreServer,
+ param: GetItemInput,
+ ) -> Result {
+ debug!(tool = "get_ontology_extension", id = %param.id, project = ?param.project);
+ let ctx = service.project_ctx(param.project.as_deref());
+
+ const CORE: &[&str] = &["core.ncl", "state.ncl", "gate.ncl"];
+ let file = if param.id.ends_with(".ncl") {
+ param.id.clone()
+ } else {
+ format!("{}.ncl", param.id)
+ };
+
+ if file.contains('/') || file.contains("..") || CORE.contains(&file.as_str()) {
+ return Err(ToolError(format!(
+ "'{}' is a core file — use dedicated tools for core/state/gate",
+ param.id
+ )));
+ }
+
+ let path = ctx.root.join(".ontology").join(&file);
+ if !path.exists() {
+ return Err(ToolError(format!(
+ "ontology extension '{}' not found",
+ param.id
+ )));
+ }
+
+ ctx.cache
+ .export(&path, ctx.import_path.as_deref())
+ .await
+ .map(|(v, _)| v)
+ .map_err(|e| ToolError(e.to_string()))
+ }
+}
+
// ── Tool: list_modes
// ────────────────────────────────────────────────────────────
@@ -919,6 +1080,10 @@ impl AsyncTool for HelpTool {
"params": [{"name": "project", "required": false}] },
{ "name": "ontoref_get_adr", "description": "Full ADR by id or partial stem (e.g. adr-001).",
"params": [{"name": "id", "required": true}, {"name": "project", "required": false}] },
+ { "name": "ontoref_list_ontology_extensions", "description": "List extra .ontology/*.ncl files beyond core/state/gate.",
+ "params": [{"name": "project", "required": false}] },
+ { "name": "ontoref_get_ontology_extension", "description": "Export a project-defined .ontology extension by stem (e.g. career, personal).",
+ "params": [{"name": "id", "required": true}, {"name": "project", "required": false}] },
{ "name": "ontoref_list_modes", "description": "List all reflection modes with id, trigger, step count.",
"params": [{"name": "project", "required": false}] },
{ "name": "ontoref_get_mode", "description": "Full reflection mode including all steps and preconditions.",
@@ -1756,6 +1921,161 @@ impl ServerHandler for OntoreServer {
// ── Entry points
// ────────────────────────────────────────────────────────────────
+// ── Tool: bookmark_list
+// ─────────────────────────────────────────────────────────────────────────────
+
+struct BookmarkListTool;
+
+impl ToolBase for BookmarkListTool {
+ type Parameter = BookmarkListInput;
+ type Output = serde_json::Value;
+ type Error = ToolError;
+
+ fn name() -> Cow<'static, str> {
+ "ontoref_bookmark_list".into()
+ }
+
+ fn description() -> Option> {
+ Some(
+ "List search bookmarks stored in reflection/search_bookmarks.ncl. Optionally filter \
+ by node_id or title substring."
+ .into(),
+ )
+ }
+
+ fn output_schema() -> Option> {
+ None
+ }
+}
+
+impl AsyncTool for BookmarkListTool {
+ async fn invoke(
+ service: &OntoreServer,
+ param: BookmarkListInput,
+ ) -> Result {
+ debug!(tool = "bookmark_list", project = ?param.project);
+ let ctx = service.project_ctx(param.project.as_deref());
+ let bm_path = ctx.root.join("reflection").join("search_bookmarks.ncl");
+
+ if !bm_path.exists() {
+ return Ok(serde_json::json!({ "entries": [], "count": 0 }));
+ }
+
+ let (json, _) = ctx
+ .cache
+ .export(&bm_path, ctx.import_path.as_deref())
+ .await
+ .map_err(|e| ToolError(e.to_string()))?;
+
+ let mut entries: Vec = json
+ .get("entries")
+ .and_then(|v| v.as_array())
+ .cloned()
+ .unwrap_or_default();
+
+ if let Some(filter) = param.filter.as_deref() {
+ let lc = filter.to_lowercase();
+ entries.retain(|e| {
+ let id_match = e
+ .get("node_id")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_lowercase().contains(&lc))
+ .unwrap_or(false);
+ let title_match = e
+ .get("title")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_lowercase().contains(&lc))
+ .unwrap_or(false);
+ id_match || title_match
+ });
+ }
+
+ let count = entries.len();
+ Ok(serde_json::json!({ "entries": entries, "count": count }))
+ }
+}
+
+// ── Tool: bookmark_add
+// ────────────────────────────────────────────────────────
+
+struct BookmarkAddTool;
+
+impl ToolBase for BookmarkAddTool {
+ type Parameter = BookmarkAddInput;
+ type Output = serde_json::Value;
+ type Error = ToolError;
+
+ fn name() -> Cow<'static, str> {
+ "ontoref_bookmark_add".into()
+ }
+
+ fn description() -> Option> {
+ Some(
+ concat!(
+ "Save a search result as a bookmark in reflection/search_bookmarks.ncl (persisted \
+ to disk, git-versioned). ",
+ "Use this when the user stars/bookmarks a search result in the CLI or UI. ",
+ "Required: node_id, title. Optional: kind, level, term, actor, tags.",
+ )
+ .into(),
+ )
+ }
+
+ fn output_schema() -> Option> {
+ None
+ }
+}
+
+impl AsyncTool for BookmarkAddTool {
+ async fn invoke(
+ service: &OntoreServer,
+ param: BookmarkAddInput,
+ ) -> Result {
+ debug!(tool = "bookmark_add", project = ?param.project, node_id = %param.node_id);
+ let ctx = service.project_ctx(param.project.as_deref());
+ let bm_path = ctx.root.join("reflection").join("search_bookmarks.ncl");
+
+ if !bm_path.exists() {
+ return Err(ToolError(format!(
+ "search_bookmarks.ncl not found at {} — run ontoref setup first",
+ bm_path.display()
+ )));
+ }
+
+ let kind = param.kind.as_deref().unwrap_or("node");
+ let level = param.level.as_deref().unwrap_or("");
+ let term = param.term.as_deref().unwrap_or("");
+ let actor = param.actor.as_deref().unwrap_or("agent");
+ let tags = param.tags.as_deref().unwrap_or(&[]);
+ let now = today_iso();
+
+ let id = crate::ui::search_bookmarks_ncl::add_entry(
+ &bm_path,
+ crate::ui::search_bookmarks_ncl::NewBookmark {
+ node_id: ¶m.node_id,
+ kind,
+ title: ¶m.title,
+ level,
+ term,
+ actor,
+ created_at: &now,
+ tags,
+ },
+ )
+ .map_err(|e| ToolError(e.to_string()))?;
+
+ ctx.cache.invalidate_file(&bm_path);
+
+ Ok(serde_json::json!({
+ "ok": true,
+ "id": id,
+ "created_at": now,
+ "node_id": param.node_id,
+ "title": param.title,
+ }))
+ }
+}
+
/// Run the MCP server over stdin/stdout — for use as a `command`-mode MCP
/// server in Claude Desktop, Cursor, or any stdio-compatible AI client.
pub async fn serve_stdio(state: AppState) -> anyhow::Result<()> {
diff --git a/crates/ontoref-daemon/src/ui/handlers.rs b/crates/ontoref-daemon/src/ui/handlers.rs
index 425bed0..1a6afac 100644
--- a/crates/ontoref-daemon/src/ui/handlers.rs
+++ b/crates/ontoref-daemon/src/ui/handlers.rs
@@ -110,6 +110,39 @@ fn resolve_logo_url(raw: &str, base_url: &str) -> String {
}
}
+/// Load and export `.ontoref/config.ncl`, returning the full JSON value.
+/// Returns `None` if the file doesn't exist or Nickel export fails.
+async fn load_config_json(
+ root: &std::path::Path,
+ cache: &Arc,
+ import_path: Option<&str>,
+) -> Option {
+ let config_path = root.join(".ontoref").join("config.ncl");
+ if !config_path.exists() {
+ return None;
+ }
+ match cache.export(&config_path, import_path).await {
+ Ok((json, _)) => {
+ tracing::info!(
+ path = %config_path.display(),
+ has_card = json.get("card").is_some(),
+ card_tagline = json.get("card").and_then(|c| c.get("tagline")).and_then(|v| v.as_str()).unwrap_or(""),
+ "config.ncl loaded"
+ );
+ Some(json)
+ }
+ Err(e) => {
+ tracing::warn!(
+ path = %config_path.display(),
+ import_path = ?import_path,
+ error = %e,
+ "config.ncl export failed"
+ );
+ None
+ }
+ }
+}
+
/// Load logo URLs from `.ontoref/config.ncl` ui section.
/// Returns `(logo_light_url, logo_dark_url)` — either may be `None`.
async fn load_logos(
@@ -118,11 +151,7 @@ async fn load_logos(
import_path: Option<&str>,
base_url: &str,
) -> (Option, Option) {
- let config_path = root.join(".ontoref").join("config.ncl");
- if !config_path.exists() {
- return (None, None);
- }
- let Ok((json, _)) = cache.export(&config_path, import_path).await else {
+ let Some(json) = load_config_json(root, cache, import_path).await else {
return (None, None);
};
let ui = json.get("ui");
@@ -137,10 +166,27 @@ async fn load_logos(
(logo, logo_dark)
}
+/// Extract card data from a config JSON (from `.ontoref/config.ncl` `card`
+/// field).
+fn extract_card_from_config(json: &serde_json::Value) -> serde_json::Value {
+ let Some(card) = json.get("card") else {
+ return serde_json::Value::Null;
+ };
+ serde_json::json!({
+ "tagline": card.get("tagline").and_then(|v| v.as_str()).unwrap_or(""),
+ "description": card.get("description").and_then(|v| v.as_str()).unwrap_or(""),
+ "version": card.get("version").and_then(|v| v.as_str()).unwrap_or(""),
+ "status": card.get("status").and_then(|v| v.as_str()).unwrap_or(""),
+ "url": card.get("url").and_then(|v| v.as_str()).unwrap_or(""),
+ "tags": card.get("tags").and_then(|v| v.as_array()).cloned().unwrap_or_default(),
+ "features": card.get("features").and_then(|v| v.as_array()).cloned().unwrap_or_default(),
+ })
+}
+
/// Insert logo and MCP metadata into a Tera context.
/// Logos are loaded from `.ontoref/config.ncl`; MCP availability is
/// compile-time.
-async fn insert_brand_ctx(
+pub(crate) async fn insert_brand_ctx(
ctx: &mut Context,
root: &std::path::Path,
cache: &Arc,
@@ -154,7 +200,7 @@ async fn insert_brand_ctx(
}
/// Insert MCP metadata and daemon version into a Tera context.
-fn insert_mcp_ctx(ctx: &mut Context) {
+pub(crate) fn insert_mcp_ctx(ctx: &mut Context) {
ctx.insert("daemon_version", env!("CARGO_PKG_VERSION"));
#[cfg(feature = "mcp")]
{
@@ -369,9 +415,16 @@ pub async fn notifications_page(State(state): State) -> Result) -> Result, UiError> {
let tera = tera_ref(&state)?;
+ let bookmarks = load_bookmark_entries(
+ &state.cache,
+ &state.project_root,
+ state.nickel_import_path.as_deref(),
+ )
+ .await;
let mut ctx = Context::new();
ctx.insert("base_url", "/ui");
ctx.insert("slug", &Option::::None);
+ ctx.insert("server_bookmarks", &bookmarks);
insert_brand_ctx(
&mut ctx,
&state.project_root,
@@ -391,10 +444,17 @@ pub async fn search_page_mp(
let tera = tera_ref(&state)?;
let ctx_ref = state.registry.get(&slug).ok_or(UiError::NotConfigured)?;
let base_url = format!("/ui/{slug}");
+ let bookmarks = load_bookmark_entries(
+ &ctx_ref.cache,
+ &ctx_ref.root,
+ ctx_ref.import_path.as_deref(),
+ )
+ .await;
let mut ctx = Context::new();
ctx.insert("base_url", &base_url);
ctx.insert("slug", &slug);
ctx.insert("current_role", &auth_role_str(&auth));
+ ctx.insert("server_bookmarks", &bookmarks);
insert_brand_ctx(
&mut ctx,
&ctx_ref.root,
@@ -597,8 +657,34 @@ pub async fn project_picker(State(state): State) -> Result) -> Result warn!(action_id, mode, error = %e, "actions_run: spawn failed"),
}
}
+
+// ── Search bookmarks mutation
+// ─────────────────────────────────────────────────
+
+#[derive(Deserialize)]
+pub struct BookmarkAddRequest {
+ pub node_id: String,
+ pub kind: Option,
+ pub title: String,
+ pub level: Option,
+ pub term: Option,
+ pub actor: Option,
+ pub tags: Option>,
+ pub slug: Option,
+}
+
+#[derive(Deserialize)]
+pub struct BookmarkDeleteRequest {
+ pub id: String,
+ pub slug: Option,
+}
+
+pub async fn search_bookmark_add(
+ State(state): State,
+ Json(body): Json,
+) -> impl IntoResponse {
+ let (root, cache) = resolve_bookmark_ctx(&state, body.slug.as_deref());
+ let bm_path = root.join("reflection").join("search_bookmarks.ncl");
+ let _guard = state.ncl_write_lock.acquire(&bm_path).await;
+ if !bm_path.exists() {
+ return (
+ StatusCode::NOT_FOUND,
+ Json(serde_json::json!({
+ "error": "search_bookmarks.ncl not found — run ontoref setup first"
+ })),
+ );
+ }
+
+ let kind = body.kind.as_deref().unwrap_or("node");
+ let level = body.level.as_deref().unwrap_or("");
+ let term = body.term.as_deref().unwrap_or("");
+ let actor = body.actor.as_deref().unwrap_or("human");
+ let tags = body.tags.as_deref().unwrap_or(&[]);
+ let now = now_iso();
+
+ match super::search_bookmarks_ncl::add_entry(
+ &bm_path,
+ super::search_bookmarks_ncl::NewBookmark {
+ node_id: &body.node_id,
+ kind,
+ title: &body.title,
+ level,
+ term,
+ actor,
+ created_at: &now,
+ tags,
+ },
+ ) {
+ Ok(id) => {
+ cache.invalidate_file(&bm_path);
+ (
+ StatusCode::OK,
+ Json(serde_json::json!({
+ "ok": true,
+ "id": id,
+ "created_at": now,
+ "node_id": body.node_id,
+ })),
+ )
+ }
+ Err(e) => {
+ warn!(error = %e, "search_bookmark_add failed");
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(serde_json::json!({ "error": e.to_string() })),
+ )
+ }
+ }
+}
+
+pub async fn search_bookmark_delete(
+ State(state): State,
+ Json(body): Json,
+) -> impl IntoResponse {
+ let (root, cache) = resolve_bookmark_ctx(&state, body.slug.as_deref());
+ let bm_path = root.join("reflection").join("search_bookmarks.ncl");
+ let _guard = state.ncl_write_lock.acquire(&bm_path).await;
+ if !bm_path.exists() {
+ return (
+ StatusCode::NOT_FOUND,
+ Json(serde_json::json!({ "error": "search_bookmarks.ncl not found" })),
+ );
+ }
+ match super::search_bookmarks_ncl::remove_entry(&bm_path, &body.id) {
+ Ok(()) => {
+ cache.invalidate_file(&bm_path);
+ (StatusCode::OK, Json(serde_json::json!({ "ok": true })))
+ }
+ Err(e) => {
+ warn!(error = %e, "search_bookmark_delete failed");
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(serde_json::json!({ "error": e.to_string() })),
+ )
+ }
+ }
+}
+
+pub(crate) async fn load_bookmark_entries(
+ cache: &Arc,
+ root: &std::path::Path,
+ import_path: Option<&str>,
+) -> Vec {
+ let bm_path = root.join("reflection").join("search_bookmarks.ncl");
+ if !bm_path.exists() {
+ return vec![];
+ }
+ match cache.export(&bm_path, import_path).await {
+ Ok((json, _)) => json
+ .get("entries")
+ .and_then(|v| v.as_array())
+ .cloned()
+ .unwrap_or_default(),
+ Err(_) => vec![],
+ }
+}
+
+fn resolve_bookmark_ctx(
+ state: &crate::api::AppState,
+ slug: Option<&str>,
+) -> (std::path::PathBuf, Arc) {
+ if let Some(s) = slug {
+ if let Some(ctx) = state.registry.get(s) {
+ return (ctx.root.clone(), ctx.cache.clone());
+ }
+ }
+ (state.project_root.clone(), state.cache.clone())
+}
diff --git a/crates/ontoref-daemon/src/ui/login.rs b/crates/ontoref-daemon/src/ui/login.rs
index b708d25..c9bbfe6 100644
--- a/crates/ontoref-daemon/src/ui/login.rs
+++ b/crates/ontoref-daemon/src/ui/login.rs
@@ -6,7 +6,7 @@ use axum::{
use serde::Deserialize;
use tera::Context;
-use super::handlers::{render, UiError};
+use super::handlers::{insert_brand_ctx, insert_mcp_ctx, render, UiError};
use crate::api::AppState;
use crate::session::{extract_cookie, COOKIE_NAME};
@@ -15,10 +15,24 @@ pub async fn login_page(
Path(slug): Path,
) -> Result, UiError> {
let tera = state.tera.as_ref().ok_or(UiError::NotConfigured)?;
+ let base_url = format!("/ui/{slug}");
let mut ctx = Context::new();
ctx.insert("slug", &slug);
ctx.insert("error", &false);
- ctx.insert("base_url", &format!("/ui/{slug}"));
+ ctx.insert("base_url", &base_url);
+ ctx.insert("hide_project_nav", &true);
+ ctx.insert("current_role", "");
+ insert_mcp_ctx(&mut ctx);
+ if let Some(proj) = state.registry.get(&slug) {
+ insert_brand_ctx(
+ &mut ctx,
+ &proj.root,
+ &proj.cache,
+ proj.import_path.as_deref(),
+ &base_url,
+ )
+ .await;
+ }
render(tera, "pages/login.html", &ctx).await
}
@@ -56,10 +70,22 @@ pub async fn login_submit(
Some(t) => t,
None => return StatusCode::INTERNAL_SERVER_ERROR.into_response(),
};
+ let base_url = format!("/ui/{slug}");
let mut tctx = Context::new();
tctx.insert("slug", &slug);
tctx.insert("error", &true);
- tctx.insert("base_url", &format!("/ui/{slug}"));
+ tctx.insert("base_url", &base_url);
+ tctx.insert("hide_project_nav", &true);
+ tctx.insert("current_role", "");
+ insert_mcp_ctx(&mut tctx);
+ insert_brand_ctx(
+ &mut tctx,
+ &ctx.root,
+ &ctx.cache,
+ ctx.import_path.as_deref(),
+ &base_url,
+ )
+ .await;
match render(tera, "pages/login.html", &tctx).await {
Ok(html) => html.into_response(),
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
@@ -74,6 +100,9 @@ pub async fn manage_login_page(State(state): State) -> Result html.into_response(),
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
diff --git a/crates/ontoref-daemon/src/ui/mod.rs b/crates/ontoref-daemon/src/ui/mod.rs
index 1637025..ab91e1c 100644
--- a/crates/ontoref-daemon/src/ui/mod.rs
+++ b/crates/ontoref-daemon/src/ui/mod.rs
@@ -5,6 +5,7 @@ pub mod handlers;
pub mod login;
pub mod ncl_write;
pub mod qa_ncl;
+pub mod search_bookmarks_ncl;
pub mod watcher;
pub use drift_watcher::DriftWatcher;
@@ -39,6 +40,11 @@ fn single_router(state: AppState) -> axum::Router {
.route("/qa", get(handlers::qa_page))
.route("/qa/delete", post(handlers::qa_delete))
.route("/qa/update", post(handlers::qa_update))
+ .route("/search/bookmark/add", post(handlers::search_bookmark_add))
+ .route(
+ "/search/bookmark/delete",
+ post(handlers::search_bookmark_delete),
+ )
.with_state(state)
}
@@ -90,6 +96,14 @@ fn multi_router(state: AppState) -> axum::Router {
.route("/{slug}/qa", get(handlers::qa_page_mp))
.route("/{slug}/qa/delete", post(handlers::qa_delete))
.route("/{slug}/qa/update", post(handlers::qa_update))
+ .route(
+ "/{slug}/search/bookmark/add",
+ post(handlers::search_bookmark_add),
+ )
+ .route(
+ "/{slug}/search/bookmark/delete",
+ post(handlers::search_bookmark_delete),
+ )
// Login is public — no AuthUser extractor
.route(
"/{slug}/login",
diff --git a/crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs b/crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs
new file mode 100644
index 0000000..9f25d71
--- /dev/null
+++ b/crates/ontoref-daemon/src/ui/search_bookmarks_ncl.rs
@@ -0,0 +1,285 @@
+//! In-place mutations of reflection/search_bookmarks.ncl.
+//!
+//! Mirrors qa_ncl.rs — line-level surgery on a predictable Nickel structure.
+//! The bookmark store has a single `entries` array of `BookmarkEntry` records.
+
+use std::path::Path;
+
+/// Data for a new bookmark entry.
+pub struct NewBookmark<'a> {
+ pub node_id: &'a str,
+ pub kind: &'a str,
+ pub title: &'a str,
+ pub level: &'a str,
+ pub term: &'a str,
+ pub actor: &'a str,
+ pub created_at: &'a str,
+ pub tags: &'a [String],
+}
+
+/// Append a new bookmark entry to reflection/search_bookmarks.ncl.
+///
+/// Returns the generated id (`sb-NNN`).
+pub fn add_entry(path: &Path, entry: NewBookmark<'_>) -> anyhow::Result {
+ let content = std::fs::read_to_string(path)?;
+ let next_id = next_entry_id(&content);
+
+ let block = format!(
+ r#" {{
+ id = "{id}",
+ node_id = "{node_id}",
+ kind = "{kind}",
+ title = "{title}",
+ level = "{level}",
+ term = "{term}",
+ actor = "{actor}",
+ created_at = "{created_at}",
+ tags = {tags},
+ }},
+"#,
+ id = next_id,
+ node_id = escape_ncl(entry.node_id),
+ kind = escape_ncl(entry.kind),
+ title = escape_ncl(entry.title),
+ level = escape_ncl(entry.level),
+ term = escape_ncl(entry.term),
+ actor = escape_ncl(entry.actor),
+ created_at = escape_ncl(entry.created_at),
+ tags = ncl_string_array(entry.tags),
+ );
+
+ let updated = insert_before_entries_close(&content, &block)?;
+ super::ncl_write::atomic_write(path, &updated)?;
+ Ok(next_id)
+}
+
+/// Remove the bookmark entry block with `id`.
+pub fn remove_entry(path: &Path, id: &str) -> anyhow::Result<()> {
+ let content = std::fs::read_to_string(path)?;
+ let updated = delete_entry_block(&content, id)?;
+ super::ncl_write::atomic_write(path, &updated)?;
+ Ok(())
+}
+
+// ── helpers ──────────────────────────────────────────────────────────────────
+
+/// Find the highest `sb-NNN` id and return `sb-(NNN+1)` zero-padded to 3
+/// digits.
+fn next_entry_id(content: &str) -> String {
+ let max = content
+ .lines()
+ .filter_map(|line| {
+ let t = line.trim();
+ let rest = t.strip_prefix("id")?;
+ let val = rest.split('"').nth(1)?;
+ let num_str = val.strip_prefix("sb-")?;
+ num_str.parse::().ok()
+ })
+ .max()
+ .unwrap_or(0);
+ format!("sb-{:03}", max + 1)
+}
+
+/// Insert `block` before the closing ` ],` of the entries array.
+fn insert_before_entries_close(content: &str, block: &str) -> anyhow::Result {
+ let needle = " ],";
+ let pos = content.find(needle).ok_or_else(|| {
+ anyhow::anyhow!("could not locate entries array closing ` ],` in search_bookmarks.ncl")
+ })?;
+ let mut result = String::with_capacity(content.len() + block.len());
+ result.push_str(&content[..pos]);
+ result.push_str(block);
+ result.push_str(&content[pos..]);
+ Ok(result)
+}
+
+/// Remove the block containing `id = "sb-NNN"`.
+fn delete_entry_block(content: &str, id: &str) -> anyhow::Result {
+ let id_needle = format!("\"{}\"", id);
+ let lines: Vec<&str> = content.lines().collect();
+ let n = lines.len();
+
+ let id_line = lines
+ .iter()
+ .position(|l| l.contains(&id_needle) && l.contains('='))
+ .ok_or_else(|| anyhow::anyhow!("entry id {} not found in search_bookmarks.ncl", id))?;
+
+ let block_start = (0..=id_line)
+ .rev()
+ .find(|&i| lines[i].trim() == "{")
+ .ok_or_else(|| anyhow::anyhow!("could not find block open for bookmark entry {}", id))?;
+
+ let block_end = (id_line..n)
+ .find(|&i| lines[i].trim() == "},")
+ .ok_or_else(|| anyhow::anyhow!("could not find block close for bookmark entry {}", id))?;
+
+ let mut result = Vec::with_capacity(n - (block_end - block_start + 1));
+ for (i, line) in lines.iter().enumerate() {
+ if i < block_start || i > block_end {
+ result.push(*line);
+ }
+ }
+ Ok(result.join("\n"))
+}
+
+fn ncl_string_array(items: &[String]) -> String {
+ if items.is_empty() {
+ return "[]".to_string();
+ }
+ let inner: Vec = items
+ .iter()
+ .map(|s| format!("\"{}\"", escape_ncl(s)))
+ .collect();
+ format!("[{}]", inner.join(", "))
+}
+
+fn escape_ncl(s: &str) -> String {
+ s.replace('\\', "\\\\").replace('"', "\\\"")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ const SAMPLE: &str = concat!(
+ "let s = import \"search_bookmarks\" in\n",
+ "{\n",
+ " entries = [\n",
+ " {\n",
+ " id = \"sb-001\",\n",
+ " node_id = \"add-project\",\n",
+ " kind = \"node\",\n",
+ " title = \"Add a project\",\n",
+ " level = \"Practice\",\n",
+ " term = \"add project\",\n",
+ " actor = \"developer\",\n",
+ " created_at = \"2026-03-14\",\n",
+ " tags = [],\n",
+ " },\n",
+ " {\n",
+ " id = \"sb-002\",\n",
+ " node_id = \"ontology-axiom\",\n",
+ " kind = \"node\",\n",
+ " title = \"Ontology axiom\",\n",
+ " level = \"Axiom\",\n",
+ " term = \"axiom\",\n",
+ " actor = \"developer\",\n",
+ " created_at = \"2026-03-14\",\n",
+ " tags = [],\n",
+ " },\n",
+ " ],\n",
+ "} | s.BookmarkStore\n",
+ );
+
+ #[test]
+ fn next_id_empty() {
+ assert_eq!(next_entry_id(""), "sb-001");
+ }
+
+ #[test]
+ fn next_id_increments() {
+ let content = r#"id = "sb-007","#;
+ assert_eq!(next_entry_id(content), "sb-008");
+ }
+
+ #[test]
+ fn array_empty() {
+ assert_eq!(ncl_string_array(&[]), "[]");
+ }
+
+ #[test]
+ fn array_values() {
+ let v = vec!["search".to_string(), "ontology".to_string()];
+ assert_eq!(ncl_string_array(&v), r#"["search", "ontology"]"#);
+ }
+
+ #[test]
+ fn insert_into_empty_store() {
+ let content =
+ "let s = import \"search_bookmarks\" in\n{\n entries = [\n ],\n} | s.BookmarkStore\n";
+ let block = " { id = \"sb-001\" },\n";
+ let result = insert_before_entries_close(content, block).unwrap();
+ assert!(result.contains("{ id = \"sb-001\" }"));
+ assert!(result.contains(" ],"));
+ }
+
+ #[test]
+ fn delete_first_entry() {
+ let updated = delete_entry_block(SAMPLE, "sb-001").unwrap();
+ assert!(!updated.contains("sb-001"), "sb-001 should be removed");
+ assert!(updated.contains("sb-002"), "sb-002 should remain");
+ }
+
+ #[test]
+ fn delete_second_entry() {
+ let updated = delete_entry_block(SAMPLE, "sb-002").unwrap();
+ assert!(updated.contains("sb-001"), "sb-001 should remain");
+ assert!(!updated.contains("sb-002"), "sb-002 should be removed");
+ }
+
+ #[test]
+ fn delete_missing_id_errors() {
+ assert!(delete_entry_block(SAMPLE, "sb-999").is_err());
+ }
+
+ #[test]
+ fn escape_quotes_and_backslashes() {
+ assert_eq!(escape_ncl(r#"say "hi""#), r#"say \"hi\""#);
+ assert_eq!(escape_ncl(r"path\to"), r"path\\to");
+ }
+
+ #[tokio::test]
+ async fn concurrent_add_produces_unique_ids() {
+ use std::path::PathBuf;
+ use std::sync::Arc;
+
+ use tempfile::NamedTempFile;
+
+ const MINIMAL: &str =
+ "let s = import \"search_bookmarks\" in\n{\n entries = [\n ],\n} | s.BookmarkStore\n";
+ const TASKS: usize = 6;
+
+ let lock = Arc::new(super::super::ncl_write::NclWriteLock::new());
+ let file = NamedTempFile::new().unwrap();
+ std::fs::write(file.path(), MINIMAL).unwrap();
+ let path: Arc = Arc::new(file.path().to_path_buf());
+
+ let handles: Vec<_> = (0..TASKS)
+ .map(|i| {
+ let lock = Arc::clone(&lock);
+ let path = Arc::clone(&path);
+ tokio::spawn(async move {
+ let _guard = lock.acquire(&path).await;
+ add_entry(
+ &path,
+ NewBookmark {
+ node_id: &format!("node-{i}"),
+ kind: "node",
+ title: &format!("Title {i}"),
+ level: "Practice",
+ term: "search term",
+ actor: "developer",
+ created_at: "2026-03-14",
+ tags: &[],
+ },
+ )
+ })
+ })
+ .collect();
+
+ let mut ids: Vec = {
+ let mut v = Vec::with_capacity(TASKS);
+ for h in handles {
+ v.push(h.await.unwrap().unwrap());
+ }
+ v
+ };
+ ids.sort();
+ ids.dedup();
+ assert_eq!(
+ ids.len(),
+ TASKS,
+ "concurrent add_entry must produce unique IDs"
+ );
+ }
+}
diff --git a/crates/ontoref-daemon/templates/pages/graph.html b/crates/ontoref-daemon/templates/pages/graph.html
index 814e7c8..98fbb2a 100644
--- a/crates/ontoref-daemon/templates/pages/graph.html
+++ b/crates/ontoref-daemon/templates/pages/graph.html
@@ -55,6 +55,22 @@
{% endblock head %}
{% block content %}
+
+
+
+
+