1371 lines
51 KiB
Plaintext
1371 lines
51 KiB
Plaintext
#!/usr/bin/env nu
|
|
# reflection/modules/sync.nu — ontology↔code synchronization.
|
|
# Detects drift between .ontology/ declarations and actual project artifacts.
|
|
# Scans: crates, scenarios, agents, CI, forms, modes, justfiles, .claude/.
|
|
# Operates on $env.ONTOREF_PROJECT_ROOT (set by onref wrapper).
|
|
|
|
use store.nu [daemon-export, daemon-export-safe]
|
|
|
|
# Resolve project root: ONTOREF_PROJECT_ROOT if set and different from ONTOREF_ROOT, else ONTOREF_ROOT.
|
|
def project-root []: nothing -> string {
|
|
let pr = ($env.ONTOREF_PROJECT_ROOT? | default "")
|
|
if ($pr | is-not-empty) and ($pr != $env.ONTOREF_ROOT) { $pr } else { $env.ONTOREF_ROOT }
|
|
}
|
|
|
|
# ── scan ──────────────────────────────────────────────────────────────────────
|
|
|
|
# Analyze a project's real structure: crates, scenarios, agents, CI, forms, modes, justfiles, claude.
|
|
export def "sync scan" [
|
|
--level: string = "auto", # Analysis level: structural | full | auto (detect nightly)
|
|
]: nothing -> record {
|
|
let root = (project-root)
|
|
|
|
let crates = (scan-crates $root $level)
|
|
let scenarios = (scan-scenarios $root)
|
|
let agents = (scan-agents $root)
|
|
let ci = (scan-ci $root)
|
|
let forms = (scan-forms $root)
|
|
let modes = (scan-modes $root)
|
|
let justfiles = (scan-justfiles $root)
|
|
let claude = (scan-claude $root)
|
|
|
|
let api_level = if ($crates | any { |c| ($c.pub_api | length) > 0 }) { "full" } else { "structural" }
|
|
|
|
{
|
|
crates: $crates,
|
|
scenarios: $scenarios,
|
|
agents: $agents,
|
|
ci: $ci,
|
|
forms: $forms,
|
|
modes: $modes,
|
|
justfiles: $justfiles,
|
|
claude: $claude,
|
|
api_level: $api_level,
|
|
}
|
|
}
|
|
|
|
# ── diff ──────────────────────────────────────────────────────────────────────
|
|
|
|
# Compare scan against ontology, producing drift report.
|
|
export def "sync diff" [
|
|
--quick, # Skip nickel exports; parse NCL text directly for speed
|
|
]: nothing -> table {
|
|
let root = (project-root)
|
|
let scan = (sync scan --level structural)
|
|
|
|
let ontology = if $quick {
|
|
load-ontology-quick $root
|
|
} else {
|
|
load-ontology $root
|
|
}
|
|
|
|
if ($ontology | is-empty) {
|
|
print " No .ontology/core.ncl found — nothing to diff."
|
|
return []
|
|
}
|
|
|
|
let nodes = ($ontology.nodes? | default [])
|
|
let edges = ($ontology.edges? | default [])
|
|
let node_ids = ($nodes | each { |n| $n | get -o id | default "" } | where { $in | is-not-empty })
|
|
|
|
# Build set of all existing paths from scan
|
|
let existing_paths = (collect-existing-paths $scan $root)
|
|
|
|
mut report = []
|
|
|
|
# Check each node: does at least one artifact_path exist on disk?
|
|
for node in $nodes {
|
|
let id = $node.id
|
|
let paths = ($node.artifact_paths? | default [])
|
|
if ($paths | is-empty) {
|
|
# Tensions and abstract concepts have no artifact_paths — always OK
|
|
$report = ($report | append { status: "OK", id: $id, artifact_path: "", detail: "No artifact_paths declared (abstract node)" })
|
|
} else {
|
|
# Check if at least one declared path exists in the project
|
|
let found = ($paths | any { |p|
|
|
let full = $"($root)/($p)"
|
|
($full | path exists) or ($existing_paths | any { |ep| $ep starts-with $p or $p starts-with $ep })
|
|
})
|
|
if $found {
|
|
$report = ($report | append { status: "OK", id: $id, artifact_path: ($paths | first), detail: "" })
|
|
} else {
|
|
$report = ($report | append {
|
|
status: "STALE",
|
|
id: $id,
|
|
artifact_path: ($paths | str join ", "),
|
|
detail: $"Declared paths not found: ($paths | str join ', ')",
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
# Check scan artifacts that no node claims via artifact_paths
|
|
let all_node_paths = ($nodes | each { |n| $n.artifact_paths? | default [] } | flatten)
|
|
let unclaimed = (find-unclaimed-artifacts $scan $root $all_node_paths)
|
|
for art in $unclaimed {
|
|
$report = ($report | append {
|
|
status: "MISSING",
|
|
id: $art.id,
|
|
artifact_path: $art.path,
|
|
detail: $"Artifact exists but no ontology node claims it: ($art.kind) at ($art.path)",
|
|
})
|
|
}
|
|
|
|
# Check edge integrity
|
|
for edge in $edges {
|
|
if not ($edge.from in $node_ids) {
|
|
$report = ($report | append {
|
|
status: "BROKEN",
|
|
id: $"edge:($edge.from)->($edge.to)",
|
|
artifact_path: "",
|
|
detail: $"Edge source '($edge.from)' not found in nodes",
|
|
})
|
|
}
|
|
if not ($edge.to in $node_ids) {
|
|
$report = ($report | append {
|
|
status: "BROKEN",
|
|
id: $"edge:($edge.from)->($edge.to)",
|
|
artifact_path: "",
|
|
detail: $"Edge target '($edge.to)' not found in nodes",
|
|
})
|
|
}
|
|
}
|
|
|
|
$report | sort-by status id
|
|
}
|
|
|
|
# ── propose ───────────────────────────────────────────────────────────────────
|
|
|
|
# Generate NCL patches for drift items.
|
|
export def "sync propose" []: nothing -> string {
|
|
let diff = (sync diff)
|
|
let missing = ($diff | where status == "MISSING")
|
|
let stale = ($diff | where status == "STALE")
|
|
let broken = ($diff | where status == "BROKEN")
|
|
|
|
mut lines = []
|
|
|
|
if ($missing | is-not-empty) {
|
|
$lines = ($lines | append "# ── New nodes (MISSING artifacts found) ──")
|
|
$lines = ($lines | append "")
|
|
for item in $missing {
|
|
let id = $item.id
|
|
let path = $item.artifact_path
|
|
let kind = (detect-artifact-kind $path)
|
|
$lines = ($lines | append (generate-node-ncl $id $path $kind))
|
|
$lines = ($lines | append "")
|
|
}
|
|
}
|
|
|
|
if ($stale | is-not-empty) {
|
|
$lines = ($lines | append "# ── Stale nodes (no artifact found) — remove or update ──")
|
|
$lines = ($lines | append "")
|
|
for item in $stale {
|
|
$lines = ($lines | append $"# REMOVE: ($item.id) — ($item.detail)")
|
|
}
|
|
$lines = ($lines | append "")
|
|
}
|
|
|
|
if ($broken | is-not-empty) {
|
|
$lines = ($lines | append "# ── Broken edges — remove ──")
|
|
$lines = ($lines | append "")
|
|
for item in $broken {
|
|
$lines = ($lines | append $"# REMOVE EDGE: ($item.id) — ($item.detail)")
|
|
}
|
|
$lines = ($lines | append "")
|
|
}
|
|
|
|
if ($lines | is-empty) {
|
|
"# No drift detected — ontology is in sync."
|
|
} else {
|
|
$lines | str join "\n"
|
|
}
|
|
}
|
|
|
|
# ── apply ─────────────────────────────────────────────────────────────────────
|
|
|
|
# Apply proposed changes: insert new nodes, remove stale nodes from core.ncl.
|
|
export def "sync apply" []: nothing -> nothing {
|
|
let diff = (sync diff)
|
|
let missing = ($diff | where status == "MISSING")
|
|
let stale = ($diff | where status == "STALE")
|
|
let broken = ($diff | where status == "BROKEN")
|
|
|
|
if ($missing | is-empty) and ($stale | is-empty) and ($broken | is-empty) {
|
|
print " Ontology is in sync — nothing to apply."
|
|
return
|
|
}
|
|
|
|
let root = (project-root)
|
|
let core_ncl = $"($root)/.ontology/core.ncl"
|
|
|
|
if not ($core_ncl | path exists) {
|
|
print $" Error: ($core_ncl) not found."
|
|
return
|
|
}
|
|
|
|
print ""
|
|
print "Proposed changes:"
|
|
print "──────────────────────────────────────────────────────────────────"
|
|
|
|
if ($missing | is-not-empty) {
|
|
print $" ADD: ($missing | length) new nodes"
|
|
for item in $missing { print $" + ($item.id) ← ($item.artifact_path)" }
|
|
}
|
|
if ($stale | is-not-empty) {
|
|
print $" REMOVE: ($stale | length) stale nodes"
|
|
for item in $stale { print $" - ($item.id)" }
|
|
}
|
|
if ($broken | is-not-empty) {
|
|
print $" FIX: ($broken | length) broken edges"
|
|
for item in $broken { print $" ! ($item.id)" }
|
|
}
|
|
|
|
print ""
|
|
let confirm = (input "Apply changes? [y/N] " | str trim | str downcase)
|
|
if $confirm != "y" {
|
|
print " Aborted."
|
|
return
|
|
}
|
|
|
|
mut lines = (open $core_ncl --raw | lines | each { |l| $l })
|
|
|
|
# Insert new nodes before the '],\n\n edges' boundary
|
|
if ($missing | is-not-empty) {
|
|
# Find the line index of ' edges = ['
|
|
let edges_match = ($lines | enumerate | where { |row| ($row.item | str trim) == "edges = [" })
|
|
if ($edges_match | is-empty) {
|
|
print " Cannot find 'edges = [' in core.ncl — file format not recognized. Skipping node insertion."
|
|
} else {
|
|
let edges_idx = ($edges_match | first | get index)
|
|
# Find the top-level '],' closing the nodes array before edges.
|
|
# Top-level = exactly 2 leading spaces (same indent as 'edges = [').
|
|
mut insert_idx = $edges_idx - 1
|
|
while $insert_idx > 0 {
|
|
let line = ($lines | get $insert_idx)
|
|
let trimmed = ($line | str trim)
|
|
# Top-level array close: line is " ]," (2-space indent, not nested deeper)
|
|
if $trimmed == "]," and ($line | str starts-with " ") and (not ($line | str starts-with " ")) { break }
|
|
$insert_idx = $insert_idx - 1
|
|
}
|
|
if $insert_idx == 0 {
|
|
print " Cannot find nodes array closing bracket in core.ncl. Skipping node insertion."
|
|
} else {
|
|
|
|
mut new_lines = []
|
|
for item in $missing {
|
|
let kind = (detect-artifact-kind $item.artifact_path)
|
|
let block = (generate-node-ncl $item.id $item.artifact_path $kind)
|
|
$new_lines = ($new_lines | append "")
|
|
$new_lines = ($new_lines | append ($block | lines))
|
|
}
|
|
|
|
let before = ($lines | first $insert_idx)
|
|
let after = ($lines | skip $insert_idx)
|
|
$lines = ($before | append $new_lines | append $after)
|
|
print $" Inserted ($missing | length) new nodes."
|
|
} # insert_idx guard
|
|
} # edges_match guard
|
|
}
|
|
|
|
# Remove stale nodes
|
|
for item in $stale {
|
|
let id = $item.id
|
|
let id_pattern = $"\"($id)\""
|
|
let id_matches = ($lines | enumerate | where { |row| ($row.item | str contains $id_pattern) and ($row.item | str contains "id ") })
|
|
|
|
if ($id_matches | is-not-empty) {
|
|
let id_line_idx = ($id_matches | first | get index)
|
|
# Walk back to find 'd.make_node'
|
|
mut start_idx = $id_line_idx
|
|
while $start_idx > 0 {
|
|
if ($lines | get $start_idx | str contains "d.make_node") { break }
|
|
$start_idx = $start_idx - 1
|
|
}
|
|
# Walk forward to find '},' or lone '}' (last node in array has no trailing comma)
|
|
mut end_idx = $id_line_idx
|
|
let total = ($lines | length)
|
|
while $end_idx < ($total - 1) {
|
|
let trimmed_line = ($lines | get $end_idx | str trim)
|
|
if ($trimmed_line | str starts-with "},") or ($trimmed_line == "}") { break }
|
|
$end_idx = $end_idx + 1
|
|
}
|
|
# Guard: only remove if we actually found the closing brace
|
|
let end_line_trimmed = ($lines | get $end_idx | str trim)
|
|
if not (($end_line_trimmed | str starts-with "},") or ($end_line_trimmed == "}")) {
|
|
print $" warn: could not find closing brace for node '($id)' — skipping removal"
|
|
continue
|
|
}
|
|
# Remove lines [start_idx..end_idx] inclusive
|
|
let before = ($lines | first $start_idx)
|
|
let after = ($lines | skip ($end_idx + 1))
|
|
# Skip leading blank line in after
|
|
let trimmed_after = if ($after | is-not-empty) and (($after | first) | str trim | is-empty) {
|
|
$after | skip 1
|
|
} else {
|
|
$after
|
|
}
|
|
$lines = ($before | append $trimmed_after)
|
|
print $" Removed node '($id)'."
|
|
}
|
|
}
|
|
|
|
# Remove broken edges (handles both single-line and multi-line make_edge blocks)
|
|
# Deduplicate: an edge with both from+to broken produces two BROKEN entries with the same id.
|
|
let broken = ($broken | uniq-by id)
|
|
for item in $broken {
|
|
let edge_id = $item.id
|
|
let parts = ($edge_id | str replace "edge:" "" | split row "->")
|
|
if ($parts | length) == 2 {
|
|
let src = ($parts | first)
|
|
let tgt = ($parts | last)
|
|
let from_pattern = $"from = \"($src)\""
|
|
let to_pattern = $"to = \"($tgt)\""
|
|
|
|
# Try single-line match first (from and to on same line)
|
|
let single_matches = ($lines | enumerate | where { |row|
|
|
($row.item | str contains $from_pattern) and ($row.item | str contains $to_pattern)
|
|
})
|
|
|
|
if ($single_matches | is-not-empty) {
|
|
let edge_idx = ($single_matches | first | get index)
|
|
let before = ($lines | first $edge_idx)
|
|
let after = ($lines | skip ($edge_idx + 1))
|
|
$lines = ($before | append $after)
|
|
print $" Removed edge ($src) → ($tgt)."
|
|
} else {
|
|
# Multi-line: find the make_edge block containing both from and to
|
|
let from_lines = ($lines | enumerate | where { |row| $row.item | str contains $from_pattern })
|
|
if ($from_lines | is-not-empty) {
|
|
let from_idx = ($from_lines | first | get index)
|
|
# Search for matching to within ±5 lines
|
|
let search_start = ([$from_idx - 5, 0] | math max)
|
|
let search_end = ([($from_idx + 5), (($lines | length) - 1)] | math min)
|
|
let nearby = ($lines | skip $search_start | first ($search_end - $search_start + 1))
|
|
let has_to = ($nearby | any { |l| $l | str contains $to_pattern })
|
|
if $has_to {
|
|
# Walk back to find d.make_edge
|
|
mut start = $from_idx
|
|
while $start > 0 and (not ($lines | get $start | str contains "d.make_edge")) {
|
|
$start = $start - 1
|
|
}
|
|
# Walk forward to find the closing }
|
|
mut end = $from_idx
|
|
while $end < (($lines | length) - 1) {
|
|
let l = ($lines | get $end | str trim)
|
|
if ($l == "}," or $l == "}") and $end > $from_idx { break }
|
|
$end = $end + 1
|
|
}
|
|
let before = ($lines | first $start)
|
|
let after = ($lines | skip ($end + 1))
|
|
$lines = ($before | append $after)
|
|
print $" Removed multi-line edge ($src) → ($tgt)."
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
$lines | str join "\n" | $in + "\n" | save --force $core_ncl
|
|
print ""
|
|
print " Changes written to .ontology/core.ncl"
|
|
print " Run 'sync diff' to verify."
|
|
}
|
|
|
|
# ── state ─────────────────────────────────────────────────────────────────────
|
|
|
|
# Compare state.ncl dimensions against project reality.
|
|
export def "sync state" []: nothing -> table {
|
|
let root = (project-root)
|
|
let state_ncl = $"($root)/.ontology/state.ncl"
|
|
|
|
if not ($state_ncl | path exists) {
|
|
print " No .ontology/state.ncl found."
|
|
return []
|
|
}
|
|
|
|
let state = (daemon-export-safe $state_ncl)
|
|
if $state == null {
|
|
print " Failed to export state.ncl"
|
|
return []
|
|
}
|
|
let dims = ($state.dimensions? | default [])
|
|
|
|
mut report = []
|
|
for dim in $dims {
|
|
let current = ($dim.current_state? | default "unknown")
|
|
let desired = ($dim.desired_state? | default "unknown")
|
|
let status = if $current == $desired { "REACHED" } else { "ACTIVE" }
|
|
$report = ($report | append {
|
|
id: $dim.id,
|
|
current: $current,
|
|
desired: $desired,
|
|
status: $status,
|
|
horizon: ($dim.horizon? | default ""),
|
|
})
|
|
}
|
|
|
|
$report
|
|
}
|
|
|
|
# ── audit ─────────────────────────────────────────────────────────────────────
|
|
|
|
# Full audit: scan + diff + ADR constraints + gate conditions + NCL integrity.
|
|
export def "sync audit" [
|
|
--fmt: string = "", # Output format: table* | json | silent
|
|
--strict, # Exit with error on MISSING/STALE/BROKEN
|
|
--quick, # Skip expensive operations (no nickel exports for API surface)
|
|
]: nothing -> record {
|
|
let root = (project-root)
|
|
let actor = ($env.ONTOREF_ACTOR? | default "developer")
|
|
let f = if ($fmt | is-not-empty) { $fmt } else if $actor == "agent" { "json" } else { "table" }
|
|
|
|
# Node drift
|
|
let diff = if $quick { sync diff --quick } else { sync diff }
|
|
let ok_count = ($diff | where status == "OK" | length)
|
|
let missing_count = ($diff | where status == "MISSING" | length)
|
|
let stale_count = ($diff | where status == "STALE" | length)
|
|
let broken_count = ($diff | where status == "BROKEN" | length)
|
|
let total_nodes = ($ok_count + $missing_count + $stale_count + $broken_count)
|
|
|
|
# ADR constraint checks
|
|
let adr_results = if $quick { [] } else { audit-adr-constraints $root }
|
|
|
|
# Gate status (pass diff so breach detection can check protected nodes)
|
|
let gate_results = if $quick { [] } else { audit-gates $root $diff }
|
|
|
|
# State dimensions
|
|
let state_report = if $quick { [] } else { sync state }
|
|
|
|
# Justfile + .claude + tools audits (manifest-driven when available)
|
|
let scan = if $quick { { justfiles: { exists: false }, claude: { exists: false } } } else {
|
|
sync scan --level structural
|
|
}
|
|
let manifest = (load-manifest-safe $root)
|
|
let justfile_results = if $quick { [] } else { audit-justfiles ($scan.justfiles? | default { exists: false }) $manifest }
|
|
let claude_results = if $quick { [] } else { audit-claude ($scan.claude? | default { exists: false }) $manifest }
|
|
let tools_results = if $quick { [] } else { audit-tools $manifest }
|
|
|
|
# Health score (0-100) — pooled weighted model
|
|
#
|
|
# Each check earns points proportional to its architectural weight:
|
|
# node OK = 1pt (ontology completeness)
|
|
# ADR Hard PASS = 3pt (architectural guardrail — violation is severe)
|
|
# ADR Soft PASS = 1pt (advisory constraint)
|
|
# infra PASS = 1pt (tooling / .claude / justfiles)
|
|
# Penalties (subtracted from earned points):
|
|
# broken edge = -2pt (graph integrity)
|
|
# gate BREACH = -5pt (active membrana violated)
|
|
#
|
|
# health = earned / max_possible * 100, floored at 0
|
|
|
|
let adr_pass = ($adr_results | where status == "PASS" | length)
|
|
let adr_total = ($adr_results | length)
|
|
let all_infra = ($justfile_results | append $claude_results | append $tools_results)
|
|
let jc_pass = ($all_infra | where status == "PASS" | length)
|
|
let jc_total = ($all_infra | length)
|
|
|
|
let node_earned = ($ok_count | into float)
|
|
let node_max = ($total_nodes | into float)
|
|
|
|
# ADR severity weighting: Hard = 3pt, Soft = 1pt
|
|
let adr_earned_list = ($adr_results | where status == "PASS" | each { |r|
|
|
if ($r.severity? | default "Hard") == "Hard" { 3 } else { 1 }
|
|
})
|
|
let adr_earned = if ($adr_earned_list | is-empty) { 0.0 } else { $adr_earned_list | math sum | into float }
|
|
let adr_max_list = ($adr_results | each { |r|
|
|
if ($r.severity? | default "Hard") == "Hard" { 3 } else { 1 }
|
|
})
|
|
let adr_max = if ($adr_max_list | is-empty) { 0.0 } else { $adr_max_list | math sum | into float }
|
|
let infra_earned = ($jc_pass | into float)
|
|
let infra_max = ($jc_total | into float)
|
|
|
|
let breach_count = ($gate_results | where status == "BREACH" | length)
|
|
let penalty = (($broken_count * 2 + $breach_count * 5) | into float)
|
|
|
|
let max_possible = ($node_max + $adr_max + $infra_max)
|
|
let earned = ($node_earned + $adr_earned + $infra_earned - $penalty)
|
|
|
|
let health = if $max_possible == 0.0 { 100.0 } else {
|
|
let raw = ($earned / $max_possible * 100.0)
|
|
if $raw < 0.0 { 0.0 } else if $raw > 100.0 { 100.0 } else { $raw }
|
|
} | math round --precision 1
|
|
|
|
let report = {
|
|
nodes: {
|
|
ok: $ok_count,
|
|
missing: $missing_count,
|
|
stale: $stale_count,
|
|
broken_edges: $broken_count,
|
|
details: $diff,
|
|
},
|
|
adrs: $adr_results,
|
|
gates: $gate_results,
|
|
state: $state_report,
|
|
justfiles: $justfile_results,
|
|
claude: $claude_results,
|
|
tools: $tools_results,
|
|
health: $health,
|
|
}
|
|
|
|
if $f == "json" {
|
|
print ($report | to json)
|
|
} else if $f == "silent" {
|
|
# No output — caller uses return value only
|
|
} else {
|
|
print ""
|
|
print $"Ontology Audit — ($root | path basename)"
|
|
print "──────────────────────────────────────────────────────────────────"
|
|
print $" Nodes: ($ok_count) OK / ($missing_count) MISSING / ($stale_count) STALE"
|
|
print $" Edges: ($broken_count) broken"
|
|
if ($adr_results | is-not-empty) {
|
|
let adr_fail = ($adr_results | where status == "FAIL" | length)
|
|
let hard_fail = ($adr_results | where status == "FAIL" and severity == "Hard" | length)
|
|
let soft_fail = ($adr_results | where status == "FAIL" and severity == "Soft" | length)
|
|
let severity_detail = if ($hard_fail + $soft_fail) > 0 {
|
|
$" \(($hard_fail)H ($soft_fail)S\)"
|
|
} else { "" }
|
|
print $" ADR constraints: ($adr_pass) PASS / ($adr_fail) FAIL($severity_detail)"
|
|
}
|
|
if ($gate_results | is-not-empty) {
|
|
let active_gates = ($gate_results | where status == "ACTIVE" | length)
|
|
let breach_gates = ($gate_results | where status == "BREACH" | length)
|
|
print $" Gates: ($active_gates) ACTIVE / ($breach_gates) BREACH"
|
|
}
|
|
if ($state_report | is-not-empty) {
|
|
let reached = ($state_report | where status == "REACHED" | length)
|
|
let active_dims = ($state_report | where status == "ACTIVE" | length)
|
|
print $" State: ($reached) REACHED / ($active_dims) ACTIVE"
|
|
}
|
|
if ($justfile_results | is-not-empty) {
|
|
let jf_pass = ($justfile_results | where status == "PASS" | length)
|
|
let jf_miss = ($justfile_results | where status == "MISSING" | length)
|
|
print $" Justfiles: ($jf_pass) PASS / ($jf_miss) MISSING"
|
|
}
|
|
if ($claude_results | is-not-empty) {
|
|
let cl_pass = ($claude_results | where status == "PASS" | length)
|
|
let cl_miss = ($claude_results | where status == "MISSING" | length)
|
|
print $" .claude/: ($cl_pass) PASS / ($cl_miss) MISSING"
|
|
}
|
|
if ($tools_results | is-not-empty) {
|
|
let tl_pass = ($tools_results | where status == "PASS" | length)
|
|
let tl_miss = ($tools_results | where status == "MISSING" | length)
|
|
let tl_opt = ($tools_results | where status == "OPTIONAL" | length)
|
|
print $" Tools: ($tl_pass) PASS / ($tl_miss) MISSING / ($tl_opt) optional"
|
|
}
|
|
print $" Health: ($health)%"
|
|
print ""
|
|
|
|
# Show problem items
|
|
let problems = ($diff | where status != "OK")
|
|
if ($problems | is-not-empty) {
|
|
print " Issues:"
|
|
for p in $problems {
|
|
print $" [($p.status)] ($p.id) ($p.detail)"
|
|
}
|
|
print ""
|
|
}
|
|
|
|
# Show justfile/claude/tools issues
|
|
let jc_issues = ($justfile_results | append $claude_results | append $tools_results | where status == "MISSING")
|
|
if ($jc_issues | is-not-empty) {
|
|
print " Infrastructure gaps:"
|
|
for p in $jc_issues {
|
|
print $" [MISSING] ($p.check): ($p.detail)"
|
|
}
|
|
print ""
|
|
}
|
|
}
|
|
|
|
if $strict and (($missing_count + $stale_count + $broken_count) > 0) {
|
|
error make { msg: $"Audit failed: ($missing_count) MISSING, ($stale_count) STALE, ($broken_count) BROKEN" }
|
|
}
|
|
|
|
$report
|
|
}
|
|
|
|
# ── watch ─────────────────────────────────────────────────────────────────────
|
|
|
|
# Launch bacon in headless mode with ontology-watch job, monitor drift file.
|
|
export def "sync watch" []: nothing -> nothing {
|
|
let root = (project-root)
|
|
let bacon_toml = $"($root)/bacon.toml"
|
|
|
|
if not ($bacon_toml | path exists) {
|
|
print " No bacon.toml found — cannot watch."
|
|
print " Run setup_reflection.nu to install bacon configuration."
|
|
return
|
|
}
|
|
|
|
# Verify ontology-watch job exists in bacon.toml
|
|
let bacon_content = (open $bacon_toml --raw)
|
|
if not ($bacon_content | str contains "[jobs.ontology-watch]") {
|
|
print " bacon.toml found but missing [jobs.ontology-watch] job."
|
|
print " Re-run setup_reflection.nu to enable the ontology-reflection layer."
|
|
return
|
|
}
|
|
|
|
let has_bacon = (which bacon | is-not-empty)
|
|
if not $has_bacon {
|
|
print " bacon not found — install with: cargo install bacon"
|
|
return
|
|
}
|
|
|
|
print " Starting ontology drift watch via bacon..."
|
|
print " Press Ctrl+C to stop."
|
|
^bacon --headless -j ontology-watch
|
|
}
|
|
|
|
# ── Internal helpers ──────────────────────────────────────────────────────────
|
|
|
|
def scan-crates [root: string, level: string]: nothing -> list {
|
|
let cargo_toml = $"($root)/Cargo.toml"
|
|
if not ($cargo_toml | path exists) { return [] }
|
|
|
|
let cargo = (open $cargo_toml)
|
|
let workspace_members = ($cargo | get -o workspace.members | default [])
|
|
|
|
let crate_paths = if ($workspace_members | is-empty) { ["."] } else { $workspace_members }
|
|
|
|
mut crates = []
|
|
for member in $crate_paths {
|
|
let crate_dir = $"($root)/($member)"
|
|
let crate_toml_path = $"($crate_dir)/Cargo.toml"
|
|
if ($crate_toml_path | path exists) {
|
|
$crates = ($crates | append (parse-single-crate $crate_dir $level $root))
|
|
} else {
|
|
# Try globbing (e.g., "crates/*")
|
|
let expanded = (glob $"($root)/($member)/Cargo.toml")
|
|
for ct in $expanded {
|
|
$crates = ($crates | append (parse-single-crate ($ct | path dirname) $level $root))
|
|
}
|
|
}
|
|
}
|
|
$crates
|
|
}
|
|
|
|
def parse-single-crate [crate_dir: string, level: string, root: string]: nothing -> record {
|
|
let crate_toml = $"($crate_dir)/Cargo.toml"
|
|
let cargo = (open $crate_toml)
|
|
let name = ($cargo | get -o package.name | default ($crate_dir | path basename))
|
|
let features = ($cargo | get -o features | default {} | columns)
|
|
let deps_count = ($cargo | get -o dependencies | default {} | columns | length)
|
|
let src_dir = $"($crate_dir)/src"
|
|
|
|
let src_modules = if ($src_dir | path exists) {
|
|
glob $"($src_dir)/**/*.rs" | each { |f| $f | path relative-to $crate_dir }
|
|
} else { [] }
|
|
|
|
let test_dir = $"($crate_dir)/tests"
|
|
let test_count = if ($test_dir | path exists) {
|
|
glob $"($test_dir)/**/*.rs" | length
|
|
} else { 0 }
|
|
|
|
let do_full = match $level {
|
|
"full" => true,
|
|
"structural" => false,
|
|
_ => { (which "cargo" | is-not-empty) and ((do { ^cargo +nightly --version } | complete).exit_code == 0) },
|
|
}
|
|
|
|
let pub_api = if $do_full {
|
|
extract-pub-api $crate_dir $name
|
|
} else { [] }
|
|
|
|
{
|
|
name: $name,
|
|
path: ($crate_dir | path relative-to $root),
|
|
features: $features,
|
|
deps_count: $deps_count,
|
|
src_modules: $src_modules,
|
|
test_count: $test_count,
|
|
pub_api: $pub_api,
|
|
}
|
|
}
|
|
|
|
def extract-pub-api [crate_dir: string, crate_name: string]: nothing -> list {
|
|
let result = do {
|
|
cd $crate_dir
|
|
^cargo +nightly rustdoc -p $crate_name -- -Z unstable-options --output-format json
|
|
} | complete
|
|
|
|
if $result.exit_code != 0 { return [] }
|
|
|
|
# Find the generated JSON doc
|
|
let doc_name = ($crate_name | str replace "-" "_")
|
|
let doc_json = $"($crate_dir)/target/doc/($doc_name).json"
|
|
|
|
if not ($doc_json | path exists) { return [] }
|
|
|
|
let doc = (open $doc_json)
|
|
let index = ($doc | get -o index | default {})
|
|
|
|
$index | transpose k v | where {|item|
|
|
let vis = ($item.v | get -o visibility | default "")
|
|
$vis == "public"
|
|
} | each { |item|
|
|
let inner = ($item.v | get -o inner | default {})
|
|
let kind = ($inner | columns | first | default "unknown")
|
|
{
|
|
kind: $kind,
|
|
path: ($item.v | get -o name | default ""),
|
|
signature: ($item.v | get -o docs | default "" | str substring 0..120),
|
|
}
|
|
}
|
|
}
|
|
|
|
def scan-scenarios [root: string]: nothing -> list {
|
|
let scenarios_dir = $"($root)/reflection/scenarios"
|
|
if not ($scenarios_dir | path exists) {
|
|
# Fallback: check examples/
|
|
let examples_dir = $"($root)/examples"
|
|
if not ($examples_dir | path exists) { return [] }
|
|
ls $examples_dir | where type == "dir" | each { |d|
|
|
{
|
|
category: ($d.name | path basename),
|
|
path: ($d.name | path relative-to $root),
|
|
description: "",
|
|
files: (glob $"($d.name)/*" | length),
|
|
actor: "developer",
|
|
}
|
|
}
|
|
} else {
|
|
ls $scenarios_dir | where type == "dir" | each { |d|
|
|
let meta_file = $"($d.name)/scenario.ncl"
|
|
let meta = if ($meta_file | path exists) {
|
|
daemon-export-safe $meta_file | default {}
|
|
} else { {} }
|
|
{
|
|
category: ($d.name | path basename),
|
|
path: ($d.name | path relative-to $root),
|
|
description: ($meta | get -o purpose | default ""),
|
|
files: (glob $"($d.name)/*" | length),
|
|
actor: ($meta | get -o actor | default "developer"),
|
|
validates: ($meta | get -o validates | default []),
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
def scan-agents [root: string]: nothing -> list {
|
|
let agent_files = (glob $"($root)/**/*.agent.mdx")
|
|
$agent_files | each { |f|
|
|
{
|
|
name: ($f | path basename | str replace ".agent.mdx" ""),
|
|
path: ($f | path relative-to $root),
|
|
format: "mdx",
|
|
}
|
|
}
|
|
}
|
|
|
|
def scan-ci [root: string]: nothing -> list {
|
|
mut providers = []
|
|
if ($"($root)/.github/workflows" | path exists) {
|
|
let wf = (glob $"($root)/.github/workflows/*.yml" | append (glob $"($root)/.github/workflows/*.yaml"))
|
|
for f in $wf {
|
|
$providers = ($providers | append { provider: "github-actions", path: ($f | path relative-to $root) })
|
|
}
|
|
}
|
|
if ($"($root)/.woodpecker" | path exists) {
|
|
let wf = (glob $"($root)/.woodpecker/*.yml" | append (glob $"($root)/.woodpecker/*.yaml"))
|
|
for f in $wf {
|
|
$providers = ($providers | append { provider: "woodpecker", path: ($f | path relative-to $root) })
|
|
}
|
|
}
|
|
$providers
|
|
}
|
|
|
|
def scan-forms [root: string]: nothing -> list {
|
|
let forms_dir = $"($root)/reflection/forms"
|
|
if not ($forms_dir | path exists) { return [] }
|
|
glob $"($forms_dir)/*.ncl" | each { |f|
|
|
{ name: ($f | path basename | str replace ".ncl" ""), path: ($f | path relative-to $root) }
|
|
}
|
|
}
|
|
|
|
def scan-modes [root: string]: nothing -> list {
|
|
# Collect modes from both ONTOREF_ROOT and project root
|
|
let ontoref_modes = (glob $"($env.ONTOREF_ROOT)/reflection/modes/*.ncl")
|
|
let project_modes = if ($root != $env.ONTOREF_ROOT) {
|
|
glob $"($root)/reflection/modes/*.ncl"
|
|
} else { [] }
|
|
let all = ($ontoref_modes | append $project_modes | uniq)
|
|
|
|
$all | each { |f|
|
|
{ name: ($f | path basename | str replace ".ncl" ""), path: $f }
|
|
}
|
|
}
|
|
|
|
# ── Justfile scanner ────────────────────────────────────────────────────────
|
|
# Detects justfile modules, recipes, module system (import/mod), and variables.
|
|
|
|
def scan-justfiles [root: string]: nothing -> record {
|
|
let justfile_path = $"($root)/justfile"
|
|
if not ($justfile_path | path exists) {
|
|
return { exists: false, system: "", modules: [], recipes: [], variables: [] }
|
|
}
|
|
|
|
let raw = (open $justfile_path --raw)
|
|
let lines = ($raw | lines)
|
|
|
|
# Detect module system: import vs mod
|
|
let has_import = ($lines | any { |l| ($l | str trim | str starts-with "import ") or ($l | str trim | str starts-with "import?") })
|
|
let has_mod = ($lines | any { |l| ($l | str trim | str starts-with "mod ") or ($l | str trim | str starts-with "mod?") })
|
|
let system = if $has_import and $has_mod { "hybrid" } else if $has_import { "import" } else if $has_mod { "mod" } else { "flat" }
|
|
|
|
# Extract module declarations
|
|
let mod_lines = ($lines | where { |l|
|
|
let t = ($l | str trim)
|
|
($t | str starts-with "import ") or ($t | str starts-with "import? ") or ($t | str starts-with "mod ") or ($t | str starts-with "mod? ")
|
|
})
|
|
let modules = ($mod_lines | each { |l|
|
|
let t = ($l | str trim)
|
|
let optional = ($t | str starts-with "import?") or ($t | str starts-with "mod?")
|
|
let path_match = ($t | parse --regex "'([^']+)'|\"([^\"]+)\"")
|
|
let mod_path = if ($path_match | is-not-empty) {
|
|
let m = ($path_match | first)
|
|
let c0 = ($m | get -o capture0 | default "")
|
|
let c1 = ($m | get -o capture1 | default "")
|
|
if ($c0 | is-not-empty) { $c0 } else { $c1 }
|
|
} else { "" }
|
|
let name_match = ($t | parse --regex '(?:mod\??\s+)(\w+)')
|
|
let name = if ($name_match | is-not-empty) {
|
|
$name_match | first | get capture0
|
|
} else {
|
|
$mod_path | path basename | str replace ".just" ""
|
|
}
|
|
{ name: $name, path: $mod_path, optional: $optional }
|
|
})
|
|
|
|
# Scan justfiles/ directory for .just files
|
|
let justfiles_dir = $"($root)/justfiles"
|
|
let just_files = if ($justfiles_dir | path exists) {
|
|
glob $"($justfiles_dir)/*.just" | each { |f| $f | path relative-to $root }
|
|
} else { [] }
|
|
|
|
# Extract top-level variables (VAR := "value" or VAR := `command`)
|
|
let variables = ($lines | where { |l|
|
|
let t = ($l | str trim)
|
|
($t =~ '^\w+\s*:=') and (not ($t | str starts-with "#"))
|
|
} | each { |l|
|
|
let parts = ($l | str trim | split row ":=" | each { |p| $p | str trim })
|
|
if ($parts | length) >= 2 { $parts | first } else { "" }
|
|
} | where { |v| $v | is-not-empty })
|
|
|
|
# Extract recipe names (lines matching "name:", "@name:", "name PARAM:", "name PARAM="val":")
|
|
let recipes = ($lines | where { |l|
|
|
let t = ($l | str trim)
|
|
($t =~ '^@?\w[\w-]*(\s.*)?:') and (not ($t | str starts-with "#")) and (not ($t =~ '^\w+\s*:='))
|
|
} | each { |l|
|
|
let t = ($l | str trim | str replace "@" "")
|
|
$t | split row " " | first | str replace ":" ""
|
|
})
|
|
|
|
{
|
|
exists: true,
|
|
system: $system,
|
|
modules: $modules,
|
|
module_files: $just_files,
|
|
recipes: $recipes,
|
|
variables: $variables,
|
|
}
|
|
}
|
|
|
|
# ── .claude/ scanner ───────────────────────────────────────────────────────
|
|
# Inventories .claude/ capabilities: guidelines, commands, hooks, agents, skills.
|
|
|
|
def scan-claude [root: string]: nothing -> record {
|
|
let claude_dir = $"($root)/.claude"
|
|
if not ($claude_dir | path exists) {
|
|
return { exists: false, has_claude_md: false, guidelines: [], commands: [], hooks: [], settings: [] }
|
|
}
|
|
|
|
let has_claude_md = ($"($claude_dir)/CLAUDE.md" | path exists)
|
|
|
|
# Guidelines: check each language subdirectory
|
|
let guidelines_dir = $"($claude_dir)/guidelines"
|
|
let guidelines = if ($guidelines_dir | path exists) {
|
|
ls $guidelines_dir | where type == "dir" | each { |d|
|
|
let lang = ($d.name | path basename)
|
|
let files = (glob $"($d.name)/*.md" | each { |f| $f | path basename })
|
|
{ language: $lang, files: $files }
|
|
}
|
|
} else { [] }
|
|
|
|
# Commands
|
|
let commands_dir = $"($claude_dir)/commands"
|
|
let commands = if ($commands_dir | path exists) {
|
|
glob $"($commands_dir)/*.md" | each { |f|
|
|
let name = ($f | path basename | str replace ".md" "")
|
|
let is_symlink = ((do { ^test -L $f } | complete).exit_code == 0)
|
|
{ name: $name, symlink: $is_symlink }
|
|
}
|
|
} else { [] }
|
|
|
|
# Hooks (check settings.json for hook definitions)
|
|
let settings_path = $"($claude_dir)/settings.json"
|
|
let hooks = if ($settings_path | path exists) {
|
|
let settings = (open $settings_path)
|
|
let hook_events = ($settings | get -o hooks | default {} | columns)
|
|
$hook_events
|
|
} else { [] }
|
|
|
|
# Settings files
|
|
let settings = (glob $"($claude_dir)/settings*.json" | each { |f| $f | path basename })
|
|
|
|
# Layout conventions
|
|
let has_layout = ($"($claude_dir)/layout_conventions.md" | path exists)
|
|
|
|
# Session hook
|
|
let has_session_hook = ($"($claude_dir)/ontoref-session-start.sh" | path exists)
|
|
|
|
{
|
|
exists: true,
|
|
has_claude_md: $has_claude_md,
|
|
has_layout_conventions: $has_layout,
|
|
has_session_hook: $has_session_hook,
|
|
guidelines: $guidelines,
|
|
commands: $commands,
|
|
hooks: $hooks,
|
|
settings: $settings,
|
|
}
|
|
}
|
|
|
|
# Load ontology via daemon-export (full fidelity, includes all fields).
|
|
def load-ontology [root: string]: nothing -> record {
|
|
let core_ncl = $"($root)/.ontology/core.ncl"
|
|
if not ($core_ncl | path exists) { return {} }
|
|
let data = (daemon-export-safe $core_ncl)
|
|
if $data == null {
|
|
print " Warning: failed to export core.ncl"
|
|
return {}
|
|
}
|
|
$data
|
|
}
|
|
|
|
# Load ontology by parsing NCL text directly — no nickel binary needed.
|
|
# Extracts id and artifact_paths from node blocks, and from/to from edge blocks.
|
|
# Intentionally lossy: sufficient for drift detection in pre-commit/quick mode.
|
|
def load-ontology-quick [root: string]: nothing -> record {
|
|
let core_ncl = $"($root)/.ontology/core.ncl"
|
|
if not ($core_ncl | path exists) { return {} }
|
|
|
|
let raw = (open $core_ncl --raw)
|
|
let lines = ($raw | lines)
|
|
|
|
mut nodes = []
|
|
mut edges = []
|
|
mut in_node = false
|
|
mut in_paths = false
|
|
mut in_edge_section = false
|
|
mut in_edge = false
|
|
mut edge_from = ""
|
|
mut edge_to = ""
|
|
mut current_id = ""
|
|
mut current_paths = []
|
|
|
|
for line in $lines {
|
|
let trimmed = ($line | str trim)
|
|
|
|
# Detect section boundaries
|
|
if ($trimmed | str starts-with "nodes = [") {
|
|
$in_edge_section = false
|
|
} else if ($trimmed | str starts-with "edges = [") {
|
|
$in_edge_section = true
|
|
}
|
|
|
|
# Parse node blocks
|
|
if not $in_edge_section {
|
|
if ($trimmed | str contains "d.make_node") {
|
|
$in_node = true
|
|
$in_paths = false
|
|
$current_id = ""
|
|
$current_paths = []
|
|
} else if $in_node and ($trimmed | str starts-with "},") {
|
|
if ($current_id | is-not-empty) {
|
|
$nodes = ($nodes | append { id: $current_id, artifact_paths: $current_paths })
|
|
}
|
|
$in_node = false
|
|
$in_paths = false
|
|
} else if $in_node {
|
|
# Extract id
|
|
if ($trimmed | str starts-with "id ") {
|
|
let id_match = ($trimmed | parse --regex 'id\s+=\s+"([^"]+)"')
|
|
if ($id_match | is-not-empty) {
|
|
$current_id = ($id_match | first | get capture0)
|
|
}
|
|
}
|
|
# Extract artifact_paths (handles multiline arrays)
|
|
if ($trimmed | str starts-with "artifact_paths") {
|
|
$in_paths = true
|
|
let paths_match = ($trimmed | parse --regex '"([^"]+)"')
|
|
if ($paths_match | is-not-empty) {
|
|
$current_paths = ($current_paths | append ($paths_match | get capture0))
|
|
}
|
|
} else if $in_paths {
|
|
if ($trimmed | str starts-with "],") or ($trimmed == "]") {
|
|
$in_paths = false
|
|
} else {
|
|
let paths_match = ($trimmed | parse --regex '"([^"]+)"')
|
|
if ($paths_match | is-not-empty) {
|
|
$current_paths = ($current_paths | append ($paths_match | get capture0))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
# Parse edge blocks (single-line or multi-line d.make_edge)
|
|
if $in_edge_section and ($trimmed | str contains "d.make_edge") {
|
|
$in_edge = true
|
|
$edge_from = ""
|
|
$edge_to = ""
|
|
# Check if single-line (from + to on same line as d.make_edge)
|
|
let from_match = ($trimmed | parse --regex 'from\s*=\s*"([^"]+)"')
|
|
let to_match = ($trimmed | parse --regex 'to\s*=\s*"([^"]+)"')
|
|
if ($from_match | is-not-empty) { $edge_from = ($from_match | first | get capture0) }
|
|
if ($to_match | is-not-empty) { $edge_to = ($to_match | first | get capture0) }
|
|
if ($edge_from | is-not-empty) and ($edge_to | is-not-empty) {
|
|
$edges = ($edges | append { from: $edge_from, to: $edge_to })
|
|
$in_edge = false
|
|
}
|
|
} else if $in_edge {
|
|
# Multi-line: accumulate from/to across lines
|
|
let from_match = ($trimmed | parse --regex 'from\s*=\s*"([^"]+)"')
|
|
let to_match = ($trimmed | parse --regex 'to\s*=\s*"([^"]+)"')
|
|
if ($from_match | is-not-empty) { $edge_from = ($from_match | first | get capture0) }
|
|
if ($to_match | is-not-empty) { $edge_to = ($to_match | first | get capture0) }
|
|
if ($trimmed == "}," or $trimmed == "}") {
|
|
if ($edge_from | is-not-empty) and ($edge_to | is-not-empty) {
|
|
$edges = ($edges | append { from: $edge_from, to: $edge_to })
|
|
}
|
|
$in_edge = false
|
|
}
|
|
}
|
|
}
|
|
|
|
{ nodes: $nodes, edges: $edges }
|
|
}
|
|
|
|
# Collect all paths that exist in the scan output (relative to root).
|
|
def collect-existing-paths [scan: record, root: string]: nothing -> list<string> {
|
|
mut paths = []
|
|
|
|
for c in $scan.crates {
|
|
$paths = ($paths | append $c.path)
|
|
}
|
|
for s in $scan.scenarios {
|
|
$paths = ($paths | append $s.path)
|
|
}
|
|
for a in $scan.agents {
|
|
$paths = ($paths | append $a.path)
|
|
}
|
|
for ci in $scan.ci {
|
|
$paths = ($paths | append $ci.path)
|
|
}
|
|
# Forms
|
|
for f in ($scan.forms? | default []) {
|
|
$paths = ($paths | append $f.path)
|
|
}
|
|
# Modes
|
|
for m in ($scan.modes? | default []) {
|
|
$paths = ($paths | append $m.path)
|
|
}
|
|
# Justfile module files
|
|
if ($scan.justfiles?.module_files? | default [] | is-not-empty) {
|
|
for jf in $scan.justfiles.module_files {
|
|
$paths = ($paths | append $jf)
|
|
}
|
|
}
|
|
# Entry points
|
|
if ($"($root)/onref" | path exists) { $paths = ($paths | append "onref") }
|
|
if ($"($root)/onref" | path exists) { $paths = ($paths | append "onref") }
|
|
|
|
$paths
|
|
}
|
|
|
|
# Find scan artifacts not claimed by any node's artifact_paths.
|
|
def find-unclaimed-artifacts [scan: record, root: string, node_paths: list<string>]: nothing -> list {
|
|
mut unclaimed = []
|
|
|
|
for c in $scan.crates {
|
|
let crate_rel = $c.path
|
|
let claimed = ($node_paths | any { |np| $crate_rel starts-with $np or $np starts-with $crate_rel })
|
|
if not $claimed {
|
|
$unclaimed = ($unclaimed | append { id: $c.name, path: $crate_rel, kind: "crate" })
|
|
}
|
|
}
|
|
|
|
for s in $scan.scenarios {
|
|
let scen_rel = $s.path
|
|
let claimed = ($node_paths | any { |np| $scen_rel starts-with $np or $np starts-with $scen_rel })
|
|
if not $claimed {
|
|
$unclaimed = ($unclaimed | append { id: $"scenario-($s.category)", path: $scen_rel, kind: "scenario" })
|
|
}
|
|
}
|
|
|
|
for a in $scan.agents {
|
|
let agent_rel = $a.path
|
|
let claimed = ($node_paths | any { |np| $agent_rel starts-with $np or $np starts-with $agent_rel })
|
|
if not $claimed {
|
|
$unclaimed = ($unclaimed | append { id: $"agent-($a.name)", path: $agent_rel, kind: "agent" })
|
|
}
|
|
}
|
|
|
|
for ci in $scan.ci {
|
|
let ci_rel = $ci.path
|
|
let claimed = ($node_paths | any { |np| $ci_rel starts-with $np or $np starts-with $ci_rel })
|
|
if not $claimed {
|
|
$unclaimed = ($unclaimed | append { id: $"ci-($ci.provider)", path: $ci_rel, kind: "ci" })
|
|
}
|
|
}
|
|
|
|
for f in ($scan.forms? | default []) {
|
|
let form_rel = $f.path
|
|
let claimed = ($node_paths | any { |np| $form_rel starts-with $np or $np starts-with $form_rel })
|
|
if not $claimed {
|
|
$unclaimed = ($unclaimed | append { id: $"form-($f.name)", path: $form_rel, kind: "form" })
|
|
}
|
|
}
|
|
|
|
for m in ($scan.modes? | default []) {
|
|
let mode_rel = $m.path
|
|
let claimed = ($node_paths | any { |np| $mode_rel starts-with $np or $np starts-with $mode_rel })
|
|
if not $claimed {
|
|
$unclaimed = ($unclaimed | append { id: $"mode-($m.name)", path: $mode_rel, kind: "mode" })
|
|
}
|
|
}
|
|
|
|
$unclaimed
|
|
}
|
|
|
|
def detect-artifact-kind [path: string]: nothing -> string {
|
|
if ($path | str contains "crates/") {
|
|
"crate"
|
|
} else if ($path | str contains "scenarios/") or ($path | str contains "examples/") {
|
|
"scenario"
|
|
} else if ($path | str contains ".agent.mdx") {
|
|
"agent"
|
|
} else if ($path | str contains ".github/") or ($path | str contains ".woodpecker/") {
|
|
"ci"
|
|
} else if ($path | str contains "justfiles/") or ($path == "justfile") {
|
|
"justfile"
|
|
} else if ($path | str contains ".claude/") {
|
|
"claude"
|
|
} else if ($path | str contains "reflection/modules/") {
|
|
"module"
|
|
} else if ($path | str contains "reflection/modes/") {
|
|
"mode"
|
|
} else {
|
|
"unknown"
|
|
}
|
|
}
|
|
|
|
def generate-node-ncl [id: string, path: string, kind: string]: nothing -> string {
|
|
let level = match $kind {
|
|
"crate" => "'Project",
|
|
"scenario" => "'Moment",
|
|
"agent" => "'Practice",
|
|
_ => "'Project",
|
|
}
|
|
[
|
|
$" d.make_node {",
|
|
$" id = \"($id)\",",
|
|
$" name = \"($id | str replace -a '-' ' ' | split words | each { |w| ($w | str capitalize) } | str join ' ')\",",
|
|
$" pole = 'Yang,",
|
|
$" level = ($level),",
|
|
$" description = \"Detected from ($kind) at ($path)\",",
|
|
$" invariant = false,",
|
|
$" artifact_paths = [\"($path)\"],",
|
|
$" },",
|
|
] | str join "\n"
|
|
}
|
|
|
|
def audit-adr-constraints [root: string]: nothing -> list {
|
|
let adr_files = (glob $"($root)/adrs/adr-*.ncl")
|
|
mut results = []
|
|
|
|
for f in $adr_files {
|
|
let adr = (daemon-export-safe $f)
|
|
if $adr == null { continue }
|
|
if ($adr.status? | default "") != "Accepted" { continue }
|
|
|
|
let constraints = ($adr.constraints? | default [])
|
|
for c in $constraints {
|
|
let severity = ($c.severity? | default "")
|
|
if $severity not-in ["Hard", "Soft"] { continue }
|
|
let hint = ($c.check_hint? | default "")
|
|
if ($hint | is-empty) { continue }
|
|
|
|
# Guard: only execute check_hints that start with a known safe command prefix.
|
|
# Prevents arbitrary shell injection from ADR constraint fields.
|
|
# Only read-only commands that do NOT accept code execution flags.
|
|
# Excluded: nu (accepts -c), cargo (accepts run), nickel (accepts eval),
|
|
# cat/jq (can be piped into shell via bash -c).
|
|
let hint_trimmed = ($hint | str trim)
|
|
let parts = ($hint_trimmed | split row " ")
|
|
let cmd = ($parts | first)
|
|
let args = ($parts | skip 1)
|
|
|
|
let safe_commands = ["rg", "grep", "test", "[", "ls", "wc", "file", "stat"]
|
|
if $cmd not-in $safe_commands {
|
|
$results = ($results | append {
|
|
adr: ($adr.id? | default ""),
|
|
constraint: ($c.id? | default ""),
|
|
severity: $severity,
|
|
status: "SKIP",
|
|
detail: $"check_hint command not in whitelist: ($cmd)",
|
|
})
|
|
continue
|
|
}
|
|
|
|
# Block flags that allow code execution via whitelisted commands
|
|
# (e.g., rg --pre <script> executes an external command per file).
|
|
let dangerous_flags = ["--pre", "--pre-glob", "--exec", "--exec-batch"]
|
|
let has_dangerous = ($args | any { |a| $a in $dangerous_flags })
|
|
if $has_dangerous {
|
|
$results = ($results | append {
|
|
adr: ($adr.id? | default ""),
|
|
constraint: ($c.id? | default ""),
|
|
severity: $severity,
|
|
status: "SKIP",
|
|
detail: $"check_hint contains blocked flag: ($args | str join ' ')",
|
|
})
|
|
continue
|
|
}
|
|
|
|
let check = do { cd $root; run-external $cmd ...$args } | complete
|
|
let status = if $check.exit_code == 0 { "PASS" } else { "FAIL" }
|
|
$results = ($results | append {
|
|
adr: ($adr.id? | default ""),
|
|
constraint: ($c.id? | default ""),
|
|
severity: $severity,
|
|
status: $status,
|
|
detail: ($c.claim? | default ""),
|
|
})
|
|
}
|
|
}
|
|
|
|
$results
|
|
}
|
|
|
|
# Load manifest safely — returns empty record if not available.
|
|
def load-manifest-safe [root: string]: nothing -> record {
|
|
let manifest_ncl = $"($root)/.ontology/manifest.ncl"
|
|
if not ($manifest_ncl | path exists) { return {} }
|
|
|
|
let ontoref_ontology = if ($env.ONTOREF_ROOT? | is-not-empty) { $"($env.ONTOREF_ROOT)/ontology" } else { "" }
|
|
let import_path = ([$"($root)/.ontology", $ontoref_ontology, ($env.NICKEL_IMPORT_PATH? | default "")] | where { $in | is-not-empty } | str join ":")
|
|
|
|
daemon-export-safe $manifest_ncl --import-path $import_path | default {}
|
|
}
|
|
|
|
# Verify tools declared in manifest are available on PATH.
|
|
def audit-tools [manifest: record]: nothing -> list {
|
|
let tools = ($manifest.tools? | default [])
|
|
if ($tools | is-empty) { return [] }
|
|
|
|
$tools | each { |t|
|
|
let name = $t.name
|
|
let required = ($t.required? | default true)
|
|
let found = (which $name | is-not-empty)
|
|
let status = if $found { "PASS" } else if $required { "MISSING" } else { "OPTIONAL" }
|
|
let method = ($t.install_method? | default "Builtin")
|
|
{ check: $"tool-($name)", status: $status, detail: $"($name) [($method)]" }
|
|
}
|
|
}
|
|
|
|
# Check justfile completeness against manifest convention or canonical defaults.
|
|
def audit-justfiles [justfiles: record, manifest: record]: nothing -> list {
|
|
if not ($justfiles.exists? | default false) { return [] }
|
|
|
|
mut results = []
|
|
let jf_conv = ($manifest.justfile? | default {})
|
|
let canonical_required = ($jf_conv.required_modules? | default ["build" "test" "dev" "ci"])
|
|
let canonical_recipes = ($jf_conv.required_recipes? | default ["default" "help"])
|
|
|
|
let module_names = ($justfiles.modules? | default [] | get name)
|
|
for req in $canonical_required {
|
|
let found = ($module_names | any { |n| $n == $req })
|
|
let status = if $found { "PASS" } else { "MISSING" }
|
|
$results = ($results | append { check: $"justfile-module-($req)", status: $status, detail: $"Canonical module '($req)'" })
|
|
}
|
|
|
|
let recipe_names = ($justfiles.recipes? | default [])
|
|
for req in $canonical_recipes {
|
|
let found = ($recipe_names | any { |r| $r == $req })
|
|
let status = if $found { "PASS" } else { "MISSING" }
|
|
$results = ($results | append { check: $"justfile-recipe-($req)", status: $status, detail: $"Required recipe '($req)'" })
|
|
}
|
|
|
|
$results
|
|
}
|
|
|
|
# Check .claude/ completeness against manifest baseline or defaults.
|
|
def audit-claude [claude: record, manifest: record]: nothing -> list {
|
|
if not ($claude.exists? | default false) {
|
|
return [{ check: "claude-dir", status: "MISSING", detail: ".claude/ directory not found" }]
|
|
}
|
|
|
|
mut results = []
|
|
|
|
let checks = [
|
|
["claude-md" ($claude.has_claude_md? | default false) "CLAUDE.md"]
|
|
["layout-conventions" ($claude.has_layout_conventions? | default false) "layout_conventions.md"]
|
|
["session-hook" ($claude.has_session_hook? | default false) "ontoref-session-start.sh"]
|
|
]
|
|
|
|
for c in $checks {
|
|
let status = if $c.1 { "PASS" } else { "MISSING" }
|
|
$results = ($results | append { check: $c.0, status: $status, detail: $c.2 })
|
|
}
|
|
|
|
# Check expected guideline languages (from manifest or defaults)
|
|
let guideline_langs = ($claude.guidelines? | default [] | get language)
|
|
let cl_baseline = ($manifest.claude? | default {})
|
|
let expected_langs = ($cl_baseline.guidelines? | default ["bash" "nushell"])
|
|
for lang in $expected_langs {
|
|
let found = ($guideline_langs | any { |l| $l == $lang })
|
|
let status = if $found { "PASS" } else { "MISSING" }
|
|
$results = ($results | append { check: $"guideline-($lang)", status: $status, detail: $"Guidelines for ($lang)" })
|
|
}
|
|
|
|
$results
|
|
}
|
|
|
|
def audit-gates [root: string, diff: list]: nothing -> list {
|
|
let gate_ncl = $"($root)/.ontology/gate.ncl"
|
|
if not ($gate_ncl | path exists) { return [] }
|
|
|
|
let gates = (daemon-export-safe $gate_ncl)
|
|
if $gates == null { return [] }
|
|
let membranes = ($gates.membranes? | default [])
|
|
|
|
# Collect node ids that are not OK (MISSING, STALE, BROKEN)
|
|
let bad_nodes = ($diff | where status != "OK" | get id)
|
|
|
|
$membranes | each { |m|
|
|
let active = ($m.active? | default false)
|
|
let protects = ($m.protects? | default [])
|
|
|
|
# An active gate is in BREACH when any node it protects is not OK
|
|
let breached_nodes = if $active and ($protects | is-not-empty) {
|
|
$protects | where { |p| $bad_nodes | any { |b| $b == $p } }
|
|
} else { [] }
|
|
|
|
let status = if not $active {
|
|
"INACTIVE"
|
|
} else if ($breached_nodes | is-not-empty) {
|
|
"BREACH"
|
|
} else {
|
|
"ACTIVE"
|
|
}
|
|
|
|
{
|
|
id: ($m.id? | default ""),
|
|
status: $status,
|
|
protects: $protects,
|
|
breached_nodes: $breached_nodes,
|
|
}
|
|
}
|
|
}
|