504 lines
16 KiB
Rust
504 lines
16 KiB
Rust
|
|
use std::{collections::HashMap, path::Path};
|
||
|
|
|
||
|
|
use anyhow::{anyhow, Context, Result};
|
||
|
|
use serde_json::Value;
|
||
|
|
use tracing::debug;
|
||
|
|
|
||
|
|
use crate::{
|
||
|
|
error::OntologyError,
|
||
|
|
types::{
|
||
|
|
AbstractionLevel, CoreConfig, Dimension, Edge, GateConfig, Membrane, Node, Permeability,
|
||
|
|
StateConfig, TensionLevel,
|
||
|
|
},
|
||
|
|
};
|
||
|
|
|
||
|
|
/// Full project ontology: core DAG + state FSM + gate membranes.
|
||
|
|
#[derive(Debug)]
|
||
|
|
pub struct Ontology {
|
||
|
|
pub core: Core,
|
||
|
|
pub state: State,
|
||
|
|
pub gate: Gate,
|
||
|
|
}
|
||
|
|
|
||
|
|
impl Ontology {
|
||
|
|
/// Load all three sections from `ontology_dir/` (core.ncl, state.ncl,
|
||
|
|
/// gate.ncl). Each file is exported via `nickel export --format json`.
|
||
|
|
///
|
||
|
|
/// Prefer constructing from pre-fetched JSON via [`Core::from_value`],
|
||
|
|
/// [`State::from_value`], [`Gate::from_value`] when a daemon or cache
|
||
|
|
/// is available.
|
||
|
|
#[deprecated(note = "use from_value() constructors with daemon-provided JSON instead")]
|
||
|
|
pub fn load(ontology_dir: &Path) -> Result<Self> {
|
||
|
|
#[allow(deprecated)]
|
||
|
|
{
|
||
|
|
let core = Core::load(&ontology_dir.join("core.ncl"))?;
|
||
|
|
let state = State::load(&ontology_dir.join("state.ncl"))?;
|
||
|
|
let gate = Gate::load(&ontology_dir.join("gate.ncl"))?;
|
||
|
|
Ok(Self { core, state, gate })
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Construct from pre-fetched JSON values (from stratum-daemon, stratum-db,
|
||
|
|
/// or any other source that provides the NCL export output).
|
||
|
|
pub fn from_values(core_json: &Value, state_json: &Value, gate_json: &Value) -> Result<Self> {
|
||
|
|
Ok(Self {
|
||
|
|
core: Core::from_value(core_json)?,
|
||
|
|
state: State::from_value(state_json)?,
|
||
|
|
gate: Gate::from_value(gate_json)?,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Reload all sections from disk (re-runs nickel export).
|
||
|
|
#[deprecated(note = "use from_values() with daemon-provided JSON instead")]
|
||
|
|
pub fn reload(&mut self, ontology_dir: &Path) -> Result<()> {
|
||
|
|
#[allow(deprecated)]
|
||
|
|
{
|
||
|
|
self.core = Core::load(&ontology_dir.join("core.ncl"))?;
|
||
|
|
self.state = State::load(&ontology_dir.join("state.ncl"))?;
|
||
|
|
self.gate = Gate::load(&ontology_dir.join("gate.ncl"))?;
|
||
|
|
Ok(())
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// ── Core ──────────────────────────────────────────────────────────────────────
|
||
|
|
|
||
|
|
/// The core ontology DAG: nodes (axioms, tensions, practices) and edges.
|
||
|
|
#[derive(Debug)]
|
||
|
|
pub struct Core {
|
||
|
|
nodes: Vec<Node>,
|
||
|
|
edges: Vec<Edge>,
|
||
|
|
by_id: HashMap<String, usize>,
|
||
|
|
}
|
||
|
|
|
||
|
|
impl Core {
|
||
|
|
/// Construct from a pre-fetched JSON value (the output of `nickel export
|
||
|
|
/// core.ncl`).
|
||
|
|
pub fn from_value(value: &Value) -> Result<Self> {
|
||
|
|
let cfg: CoreConfig =
|
||
|
|
serde_json::from_value(value.clone()).map_err(|e| OntologyError::Parse {
|
||
|
|
section: "core",
|
||
|
|
source: e,
|
||
|
|
})?;
|
||
|
|
|
||
|
|
let by_id: HashMap<String, usize> = cfg
|
||
|
|
.nodes
|
||
|
|
.iter()
|
||
|
|
.enumerate()
|
||
|
|
.map(|(i, n)| (n.id.clone(), i))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
Ok(Self {
|
||
|
|
nodes: cfg.nodes,
|
||
|
|
edges: cfg.edges,
|
||
|
|
by_id,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
#[deprecated(note = "use Core::from_value() with daemon-provided JSON instead")]
|
||
|
|
fn load(path: &Path) -> Result<Self> {
|
||
|
|
let raw = nickel_export(path, "core")?;
|
||
|
|
let cfg: CoreConfig = serde_json::from_slice(&raw).map_err(|e| OntologyError::Parse {
|
||
|
|
section: "core",
|
||
|
|
source: e,
|
||
|
|
})?;
|
||
|
|
|
||
|
|
let by_id: HashMap<String, usize> = cfg
|
||
|
|
.nodes
|
||
|
|
.iter()
|
||
|
|
.enumerate()
|
||
|
|
.map(|(i, n)| (n.id.clone(), i))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
Ok(Self {
|
||
|
|
nodes: cfg.nodes,
|
||
|
|
edges: cfg.edges,
|
||
|
|
by_id,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn nodes(&self) -> &[Node] {
|
||
|
|
&self.nodes
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn edges(&self) -> &[Edge] {
|
||
|
|
&self.edges
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn node_by_id(&self, id: &str) -> Option<&Node> {
|
||
|
|
self.by_id.get(id).map(|&i| &self.nodes[i])
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn axioms(&self) -> impl Iterator<Item = &Node> {
|
||
|
|
self.nodes
|
||
|
|
.iter()
|
||
|
|
.filter(|n| n.level == AbstractionLevel::Axiom)
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn tensions(&self) -> impl Iterator<Item = &Node> {
|
||
|
|
self.nodes
|
||
|
|
.iter()
|
||
|
|
.filter(|n| n.level == AbstractionLevel::Tension)
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn practices(&self) -> impl Iterator<Item = &Node> {
|
||
|
|
self.nodes
|
||
|
|
.iter()
|
||
|
|
.filter(|n| n.level == AbstractionLevel::Practice)
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Nodes with `invariant = true` — must never be violated.
|
||
|
|
pub fn invariants(&self) -> impl Iterator<Item = &Node> {
|
||
|
|
self.nodes.iter().filter(|n| n.invariant)
|
||
|
|
}
|
||
|
|
|
||
|
|
/// All edges originating from `node_id`.
|
||
|
|
pub fn edges_from(&self, node_id: &str) -> impl Iterator<Item = &Edge> {
|
||
|
|
let id = node_id.to_owned();
|
||
|
|
self.edges.iter().filter(move |e| e.from == id)
|
||
|
|
}
|
||
|
|
|
||
|
|
/// All edges pointing to `node_id`.
|
||
|
|
pub fn edges_to(&self, node_id: &str) -> impl Iterator<Item = &Edge> {
|
||
|
|
let id = node_id.to_owned();
|
||
|
|
self.edges.iter().filter(move |e| e.to == id)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// ── State ─────────────────────────────────────────────────────────────────────
|
||
|
|
|
||
|
|
/// The state FSM: tracked dimensions and their transition graphs.
|
||
|
|
#[derive(Debug)]
|
||
|
|
pub struct State {
|
||
|
|
dimensions: Vec<Dimension>,
|
||
|
|
by_id: HashMap<String, usize>,
|
||
|
|
}
|
||
|
|
|
||
|
|
impl State {
|
||
|
|
/// Construct from a pre-fetched JSON value (the output of `nickel export
|
||
|
|
/// state.ncl`).
|
||
|
|
pub fn from_value(value: &Value) -> Result<Self> {
|
||
|
|
let cfg: StateConfig =
|
||
|
|
serde_json::from_value(value.clone()).map_err(|e| OntologyError::Parse {
|
||
|
|
section: "state",
|
||
|
|
source: e,
|
||
|
|
})?;
|
||
|
|
|
||
|
|
let by_id: HashMap<String, usize> = cfg
|
||
|
|
.dimensions
|
||
|
|
.iter()
|
||
|
|
.enumerate()
|
||
|
|
.map(|(i, d)| (d.id.clone(), i))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
Ok(Self {
|
||
|
|
dimensions: cfg.dimensions,
|
||
|
|
by_id,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
#[deprecated(note = "use State::from_value() with daemon-provided JSON instead")]
|
||
|
|
fn load(path: &Path) -> Result<Self> {
|
||
|
|
let raw = nickel_export(path, "state")?;
|
||
|
|
let cfg: StateConfig = serde_json::from_slice(&raw).map_err(|e| OntologyError::Parse {
|
||
|
|
section: "state",
|
||
|
|
source: e,
|
||
|
|
})?;
|
||
|
|
|
||
|
|
let by_id: HashMap<String, usize> = cfg
|
||
|
|
.dimensions
|
||
|
|
.iter()
|
||
|
|
.enumerate()
|
||
|
|
.map(|(i, d)| (d.id.clone(), i))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
Ok(Self {
|
||
|
|
dimensions: cfg.dimensions,
|
||
|
|
by_id,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn dimensions(&self) -> &[Dimension] {
|
||
|
|
&self.dimensions
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn dimension_by_id(&self, id: &str) -> Option<&Dimension> {
|
||
|
|
self.by_id.get(id).map(|&i| &self.dimensions[i])
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Dimensions with high tension in their current state.
|
||
|
|
pub fn high_tension_dimensions(&self) -> impl Iterator<Item = &Dimension> {
|
||
|
|
self.dimensions.iter().filter(|d| {
|
||
|
|
d.states
|
||
|
|
.iter()
|
||
|
|
.find(|e| e.id == d.current_state)
|
||
|
|
.is_some_and(|e| e.tension == TensionLevel::High)
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Check if a transition from `current` to `target` is declared for
|
||
|
|
/// dimension `dim_id`. Returns `Ok(())` if valid, `Err` with the
|
||
|
|
/// declared blocker if not.
|
||
|
|
pub fn can_transition(&self, dim_id: &str, to: &str) -> Result<(), String> {
|
||
|
|
let dim = self
|
||
|
|
.dimension_by_id(dim_id)
|
||
|
|
.ok_or_else(|| format!("dimension '{dim_id}' not found"))?;
|
||
|
|
|
||
|
|
let transition = dim
|
||
|
|
.transitions
|
||
|
|
.iter()
|
||
|
|
.find(|t| t.from == dim.current_state && t.to == to);
|
||
|
|
|
||
|
|
match transition {
|
||
|
|
Some(t) if t.blocker.is_empty() => Ok(()),
|
||
|
|
Some(t) => Err(format!("transition blocked: {}", t.blocker)),
|
||
|
|
None => Err(format!(
|
||
|
|
"no declared transition from '{}' to '{to}' in dimension '{dim_id}'",
|
||
|
|
dim.current_state
|
||
|
|
)),
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// ── Gate ──────────────────────────────────────────────────────────────────────
|
||
|
|
|
||
|
|
/// The gate: membranes that filter incoming signals.
|
||
|
|
#[derive(Debug)]
|
||
|
|
pub struct Gate {
|
||
|
|
membranes: Vec<Membrane>,
|
||
|
|
by_id: HashMap<String, usize>,
|
||
|
|
}
|
||
|
|
|
||
|
|
impl Gate {
|
||
|
|
/// Construct from a pre-fetched JSON value (the output of `nickel export
|
||
|
|
/// gate.ncl`).
|
||
|
|
pub fn from_value(value: &Value) -> Result<Self> {
|
||
|
|
let cfg: GateConfig =
|
||
|
|
serde_json::from_value(value.clone()).map_err(|e| OntologyError::Parse {
|
||
|
|
section: "gate",
|
||
|
|
source: e,
|
||
|
|
})?;
|
||
|
|
|
||
|
|
let by_id: HashMap<String, usize> = cfg
|
||
|
|
.membranes
|
||
|
|
.iter()
|
||
|
|
.enumerate()
|
||
|
|
.map(|(i, m)| (m.id.clone(), i))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
Ok(Self {
|
||
|
|
membranes: cfg.membranes,
|
||
|
|
by_id,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
#[deprecated(note = "use Gate::from_value() with daemon-provided JSON instead")]
|
||
|
|
fn load(path: &Path) -> Result<Self> {
|
||
|
|
let raw = nickel_export(path, "gate")?;
|
||
|
|
let cfg: GateConfig = serde_json::from_slice(&raw).map_err(|e| OntologyError::Parse {
|
||
|
|
section: "gate",
|
||
|
|
source: e,
|
||
|
|
})?;
|
||
|
|
|
||
|
|
let by_id: HashMap<String, usize> = cfg
|
||
|
|
.membranes
|
||
|
|
.iter()
|
||
|
|
.enumerate()
|
||
|
|
.map(|(i, m)| (m.id.clone(), i))
|
||
|
|
.collect();
|
||
|
|
|
||
|
|
Ok(Self {
|
||
|
|
membranes: cfg.membranes,
|
||
|
|
by_id,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn membranes(&self) -> &[Membrane] {
|
||
|
|
&self.membranes
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn membrane_by_id(&self, id: &str) -> Option<&Membrane> {
|
||
|
|
self.by_id.get(id).map(|&i| &self.membranes[i])
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Active membranes that are currently open.
|
||
|
|
pub fn active_membranes(&self) -> impl Iterator<Item = &Membrane> {
|
||
|
|
self.membranes.iter().filter(|m| m.active)
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Membranes with `Closed` permeability — signals cannot enter.
|
||
|
|
pub fn closed_membranes(&self) -> impl Iterator<Item = &Membrane> {
|
||
|
|
self.membranes
|
||
|
|
.iter()
|
||
|
|
.filter(|m| m.permeability == Permeability::Closed)
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Membranes that protect the node with the given id.
|
||
|
|
pub fn protecting(&self, node_id: &str) -> impl Iterator<Item = &Membrane> {
|
||
|
|
let id = node_id.to_owned();
|
||
|
|
self.membranes
|
||
|
|
.iter()
|
||
|
|
.filter(move |m| m.protects.iter().any(|p| p == &id))
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// ── Shared ────────────────────────────────────────────────────────────────────
|
||
|
|
|
||
|
|
fn nickel_export(path: &Path, section: &'static str) -> Result<Vec<u8>> {
|
||
|
|
if !path.exists() {
|
||
|
|
return Err(OntologyError::MissingFile(
|
||
|
|
path.parent().unwrap_or(path).display().to_string(),
|
||
|
|
path.file_name()
|
||
|
|
.unwrap_or_default()
|
||
|
|
.to_string_lossy()
|
||
|
|
.into_owned(),
|
||
|
|
)
|
||
|
|
.into());
|
||
|
|
}
|
||
|
|
|
||
|
|
debug!(section, path = %path.display(), "running nickel export");
|
||
|
|
|
||
|
|
let output = std::process::Command::new("nickel")
|
||
|
|
.arg("export")
|
||
|
|
.arg("--format")
|
||
|
|
.arg("json")
|
||
|
|
.arg(path)
|
||
|
|
.output()
|
||
|
|
.with_context(|| format!("running nickel export on '{}'", path.display()))?;
|
||
|
|
|
||
|
|
if !output.status.success() {
|
||
|
|
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_owned();
|
||
|
|
return Err(anyhow!(OntologyError::NickelExport {
|
||
|
|
path: path.display().to_string(),
|
||
|
|
stderr,
|
||
|
|
}));
|
||
|
|
}
|
||
|
|
|
||
|
|
Ok(output.stdout)
|
||
|
|
}
|
||
|
|
|
||
|
|
#[cfg(test)]
|
||
|
|
mod tests {
|
||
|
|
use super::*;
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn core_from_value_parses_valid_json() {
|
||
|
|
let json = serde_json::json!({
|
||
|
|
"nodes": [
|
||
|
|
{
|
||
|
|
"id": "test-axiom",
|
||
|
|
"name": "Test Axiom",
|
||
|
|
"pole": "Yang",
|
||
|
|
"level": "Axiom",
|
||
|
|
"description": "A test axiom",
|
||
|
|
"invariant": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"edges": [
|
||
|
|
{
|
||
|
|
"from": "test-axiom",
|
||
|
|
"to": "test-axiom",
|
||
|
|
"kind": "Contains",
|
||
|
|
"weight": 1.0,
|
||
|
|
"note": ""
|
||
|
|
}
|
||
|
|
]
|
||
|
|
});
|
||
|
|
|
||
|
|
let core = Core::from_value(&json).unwrap();
|
||
|
|
assert_eq!(core.nodes().len(), 1);
|
||
|
|
assert_eq!(core.edges().len(), 1);
|
||
|
|
assert!(core.node_by_id("test-axiom").is_some());
|
||
|
|
assert_eq!(core.axioms().count(), 1);
|
||
|
|
assert_eq!(core.invariants().count(), 1);
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn state_from_value_parses_and_transitions() {
|
||
|
|
let json = serde_json::json!({
|
||
|
|
"dimensions": [
|
||
|
|
{
|
||
|
|
"id": "test-dim",
|
||
|
|
"name": "Test",
|
||
|
|
"description": "",
|
||
|
|
"current_state": "a",
|
||
|
|
"desired_state": "b",
|
||
|
|
"horizon": "Weeks",
|
||
|
|
"states": [],
|
||
|
|
"transitions": [
|
||
|
|
{
|
||
|
|
"from": "a",
|
||
|
|
"to": "b",
|
||
|
|
"condition": "ready",
|
||
|
|
"catalyst": "",
|
||
|
|
"blocker": "",
|
||
|
|
"horizon": "Weeks"
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"coupled_with": []
|
||
|
|
}
|
||
|
|
]
|
||
|
|
});
|
||
|
|
|
||
|
|
let state = State::from_value(&json).unwrap();
|
||
|
|
assert_eq!(state.dimensions().len(), 1);
|
||
|
|
assert!(state.can_transition("test-dim", "b").is_ok());
|
||
|
|
assert!(state.can_transition("test-dim", "c").is_err());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn gate_from_value_parses_membranes() {
|
||
|
|
let json = serde_json::json!({
|
||
|
|
"membranes": [
|
||
|
|
{
|
||
|
|
"id": "test-gate",
|
||
|
|
"name": "Test Gate",
|
||
|
|
"description": "A test membrane",
|
||
|
|
"permeability": "High",
|
||
|
|
"accepts": ["HardBug"],
|
||
|
|
"protects": ["test-axiom"],
|
||
|
|
"opening_condition": {
|
||
|
|
"max_tension_dimensions": 2,
|
||
|
|
"pending_transitions": 1,
|
||
|
|
"core_stable": true,
|
||
|
|
"description": "test"
|
||
|
|
},
|
||
|
|
"closing_condition": "done",
|
||
|
|
"protocol": "Observe",
|
||
|
|
"max_duration": "Weeks",
|
||
|
|
"active": true
|
||
|
|
}
|
||
|
|
]
|
||
|
|
});
|
||
|
|
|
||
|
|
let gate = Gate::from_value(&json).unwrap();
|
||
|
|
assert_eq!(gate.membranes().len(), 1);
|
||
|
|
assert_eq!(gate.active_membranes().count(), 1);
|
||
|
|
assert_eq!(gate.protecting("test-axiom").count(), 1);
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn ontology_from_values_composes_all_three() {
|
||
|
|
let core_json = serde_json::json!({
|
||
|
|
"nodes": [{
|
||
|
|
"id": "ax", "name": "Ax", "pole": "Yang",
|
||
|
|
"level": "Axiom", "description": "d", "invariant": false
|
||
|
|
}],
|
||
|
|
"edges": []
|
||
|
|
});
|
||
|
|
let state_json = serde_json::json!({ "dimensions": [] });
|
||
|
|
let gate_json = serde_json::json!({ "membranes": [] });
|
||
|
|
|
||
|
|
let ont = Ontology::from_values(&core_json, &state_json, &gate_json).unwrap();
|
||
|
|
assert_eq!(ont.core.nodes().len(), 1);
|
||
|
|
assert!(ont.state.dimensions().is_empty());
|
||
|
|
assert!(ont.gate.membranes().is_empty());
|
||
|
|
}
|
||
|
|
|
||
|
|
#[test]
|
||
|
|
fn from_value_rejects_invalid_json() {
|
||
|
|
let bad = serde_json::json!({"nodes": "not_an_array"});
|
||
|
|
assert!(Core::from_value(&bad).is_err());
|
||
|
|
}
|
||
|
|
}
|