#!/usr/bin/env bash # install/ontoref-daemon-boot — NCL pipe bootstrap for ontoref-daemon. # # Implements ADR-004: config is validated by nickel export and piped directly # to the daemon via stdin. No intermediate file is written to disk. # # Stages: # 1. nickel export config.ncl → validated JSON (structural, no secrets) # 2. SOPS decrypt (optional) → merge secrets into JSON stream # 3. ontoref-daemon --config-stdin → reads JSON from stdin, closes stdin # # Usage: # ontoref-daemon-boot [options] # ontoref-daemon-boot --dry-run # print composed JSON to stdout, don't start daemon # ontoref-daemon-boot --sops # enable SOPS stage with encrypted secrets file # ontoref-daemon-boot --vault # enable Vault stage with kv path # # Environment: # ONTOREF_CONFIG path to config.ncl (default: $HOME/.config/ontoref/config.ncl) # ONTOREF_DAEMON path to ontoref-daemon binary (default: ~/.local/bin/ontoref-daemon) # NICKEL_IMPORT_PATH (set automatically below; override to add extra import paths) set -euo pipefail CONFIG="${ONTOREF_CONFIG:-$HOME/.config/ontoref/config.ncl}" DAEMON="${ONTOREF_DAEMON:-$HOME/.local/bin/ontoref-daemon.bin}" DRY_RUN=0 SOPS_FILE="" VAULT_PATH="" PASS_ARGS=() while [[ $# -gt 0 ]]; do case "$1" in --dry-run) DRY_RUN=1; shift ;; --sops) SOPS_FILE="$2"; shift 2 ;; --vault) VAULT_PATH="$2"; shift 2 ;; --config) CONFIG="$2"; shift 2 ;; *) PASS_ARGS+=("$1"); shift ;; esac done if [[ ! -f "$CONFIG" ]]; then echo "error: config not found: $CONFIG" >&2 echo " run: ontoref config-setup or cp ~/.config/ontoref/config.ncl.example ~/.config/ontoref/config.ncl" >&2 exit 1 fi if ! command -v nickel &>/dev/null; then echo "error: nickel not found in PATH" >&2 exit 1 fi # Resolve NICKEL_IMPORT_PATH: config dir + platform data dir schemas # Allows config.ncl to import contracts from the shared data dir without hardcoded paths. _config_dir="$(dirname "$CONFIG")" if [[ "$(uname)" == "Darwin" ]]; then _data_dir="$HOME/Library/Application Support/ontoref" else _data_dir="$HOME/.local/share/ontoref" fi export NICKEL_IMPORT_PATH="${NICKEL_IMPORT_PATH:+${NICKEL_IMPORT_PATH}:}${_config_dir}:${_data_dir}/schemas:${_data_dir}" # Default NATS stream topology from config dir — project can override via streams_config in config.ncl export NATS_STREAMS_CONFIG="${NATS_STREAMS_CONFIG:-${_config_dir}/streams.json}" # Stage 0 — validate project paths and regenerate projects.ncl before nickel sees it. # Any import in projects.ncl that points to a missing path is removed (with warning). # This must run before nickel export — Nickel has no filesystem access. _gen_projects="${_data_dir}/install/gen-projects.nu" if command -v nu &>/dev/null && [[ -f "$_gen_projects" ]]; then nu "$_gen_projects" --config-dir "$_config_dir" || { echo "warn: gen-projects.nu failed — proceeding with existing projects.ncl" >&2 } else echo "warn: gen-projects.nu not found or nu not in PATH — skipping project validation" >&2 fi # Stage 1 — structural validation via nickel export stage1() { nickel export --format json "$CONFIG" } # Stage 2 — optional secret injection stage2() { if [[ -n "$SOPS_FILE" ]]; then if ! command -v sops &>/dev/null; then echo "error: --sops requested but sops not found in PATH" >&2; exit 1 fi if ! command -v jq &>/dev/null; then echo "error: --sops requires jq for JSON merge" >&2; exit 1 fi local struct_json secrets_json struct_json="$(stage1)" secrets_json="$(sops --decrypt "$SOPS_FILE")" echo "$struct_json" "$secrets_json" | jq -s '.[0] * .[1]' elif [[ -n "$VAULT_PATH" ]]; then if ! command -v vault &>/dev/null; then echo "error: --vault requested but vault not found in PATH" >&2; exit 1 fi if ! command -v jq &>/dev/null; then echo "error: --vault requires jq for JSON merge" >&2; exit 1 fi local struct_json secrets_json struct_json="$(stage1)" secrets_json="$(vault kv get -format=json "$VAULT_PATH" | jq '.data.data')" echo "$struct_json" "$secrets_json" | jq -s '.[0] * .[1]' else stage1 fi } if [[ $DRY_RUN -eq 1 ]]; then stage2 exit 0 fi if [[ ! -x "$DAEMON" ]]; then echo "error: daemon not found or not executable: $DAEMON" >&2 echo " run: just install-daemon" >&2 exit 1 fi # Stage 3 — pipe composed JSON to daemon stdin stage2 | exec "$DAEMON" --config-stdin --config-dir "$_config_dir" "${PASS_ARGS[@]}"