chore: add schemas and just recipes
This commit is contained in:
parent
2b4d548aad
commit
9ea04852a8
94
justfile
Normal file
94
justfile
Normal file
@ -0,0 +1,94 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ Knowledge Base - Justfile ║
|
||||
# ║ Modular workspace orchestration ║
|
||||
# ║ Features: CLI, MCP, Nickel, NuShell Scripts ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# Import feature-specific modules
|
||||
mod build "justfiles/build.just" # Build recipes (kogral-core, kogral-cli, kogral-mcp)
|
||||
mod test "justfiles/test.just" # Test suite (unit, integration, docs)
|
||||
mod dev "justfiles/dev.just" # Development tools (fmt, lint, watch, docs)
|
||||
mod ci "justfiles/ci.just" # CI/CD pipeline (validate, test, build)
|
||||
mod nickel "justfiles/nickel.just" # Nickel integration (schema validation, export)
|
||||
mod docs "justfiles/docs.just" # Documentation (mdBook build, serve)
|
||||
mod scripts "justfiles/scripts.just" # NuShell scripts management
|
||||
|
||||
# === ORCHESTRATION RECIPES ===
|
||||
|
||||
# Default: show available commands
|
||||
default:
|
||||
@just --list
|
||||
|
||||
# Full development workflow
|
||||
[doc("Run check + fmt + lint + test")]
|
||||
check-all:
|
||||
@just dev::fmt-check
|
||||
@just dev::lint-all
|
||||
@just test::all
|
||||
|
||||
# Full CI workflow (format + lint + test + build all variants)
|
||||
[doc("Complete CI pipeline: format + lint + test + build")]
|
||||
ci-full:
|
||||
@just ci::check
|
||||
@just ci::test-all
|
||||
@just build::all
|
||||
|
||||
# Quick start development environment
|
||||
[doc("Quick dev setup: build default + start watching")]
|
||||
dev-start:
|
||||
@just dev::build
|
||||
@just dev::watch
|
||||
|
||||
# Build documentation and serve locally
|
||||
[doc("Build and serve mdBook documentation")]
|
||||
docs-serve:
|
||||
@just docs::serve
|
||||
|
||||
# Validate Nickel schemas
|
||||
[doc("Validate all Nickel configuration schemas")]
|
||||
validate-schemas:
|
||||
@just nickel::validate-all
|
||||
|
||||
# === MODULAR HELP ===
|
||||
|
||||
# Show help by module: just help build, just help test, etc
|
||||
[doc("Show help for a specific module")]
|
||||
help MODULE="":
|
||||
@if [ -z "{{ MODULE }}" ]; then \
|
||||
echo "KNOWLEDGE BASE - MODULAR JUSTFILE HELP"; \
|
||||
echo ""; \
|
||||
echo "Available modules:"; \
|
||||
echo " just help build Build commands"; \
|
||||
echo " just help test Test commands"; \
|
||||
echo " just help dev Development utilities"; \
|
||||
echo " just help ci CI/CD pipeline"; \
|
||||
echo " just help nickel Nickel schema tools"; \
|
||||
echo " just help docs Documentation tools"; \
|
||||
echo " just help scripts NuShell scripts"; \
|
||||
echo ""; \
|
||||
echo "Orchestration:"; \
|
||||
echo " just check-all Format check + lint + test"; \
|
||||
echo " just ci-full Full CI pipeline"; \
|
||||
echo " just dev-start Quick dev setup"; \
|
||||
echo " just docs-serve Build and serve docs"; \
|
||||
echo " just validate-schemas Validate Nickel schemas"; \
|
||||
echo ""; \
|
||||
echo "Use: just help <module> for details"; \
|
||||
elif [ "{{ MODULE }}" = "build" ]; then \
|
||||
just build::help; \
|
||||
elif [ "{{ MODULE }}" = "test" ]; then \
|
||||
just test::help; \
|
||||
elif [ "{{ MODULE }}" = "dev" ]; then \
|
||||
just dev::help; \
|
||||
elif [ "{{ MODULE }}" = "ci" ]; then \
|
||||
just ci::help; \
|
||||
elif [ "{{ MODULE }}" = "nickel" ]; then \
|
||||
just nickel::help; \
|
||||
elif [ "{{ MODULE }}" = "docs" ]; then \
|
||||
just docs::help; \
|
||||
elif [ "{{ MODULE }}" = "scripts" ]; then \
|
||||
just scripts::help; \
|
||||
else \
|
||||
echo "Unknown module: {{ MODULE }}"; \
|
||||
echo "Available: build, test, dev, ci, nickel, docs, scripts"; \
|
||||
fi
|
||||
0
justfiles/.kogral/config.dev.json
Normal file
0
justfiles/.kogral/config.dev.json
Normal file
0
justfiles/.kogral/config.prod.json
Normal file
0
justfiles/.kogral/config.prod.json
Normal file
0
justfiles/.kogral/config.test.json
Normal file
0
justfiles/.kogral/config.test.json
Normal file
0
justfiles/49969output
Normal file
0
justfiles/49969output
Normal file
121
justfiles/build.just
Normal file
121
justfiles/build.just
Normal file
@ -0,0 +1,121 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ BUILD RECIPES ║
|
||||
# ║ Build workspace with different feature flags ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# === FEATURE FLAGS ===
|
||||
FEATURES_CORE_DEFAULT := "filesystem"
|
||||
FEATURES_CORE_FULL := "filesystem,surrealdb,fastembed,full"
|
||||
FEATURES_CORE_STORAGE := "filesystem,surrealdb"
|
||||
FEATURES_CORE_EMBEDDINGS := "filesystem,fastembed"
|
||||
|
||||
# Help for build module
|
||||
help:
|
||||
@echo "BUILD MODULE"
|
||||
@echo ""
|
||||
@echo "Build default (filesystem backend):"
|
||||
@echo " just build::default"
|
||||
@echo ""
|
||||
@echo "Build specific components:"
|
||||
@echo " just build::core Build kogral-core library"
|
||||
@echo " just build::cli Build kogral-cli binary"
|
||||
@echo " just build::mcp Build kogral-mcp server"
|
||||
@echo ""
|
||||
@echo "Build with features:"
|
||||
@echo " just build::core-full Build kogral-core with all features"
|
||||
@echo " just build::core-db Build kogral-core with SurrealDB"
|
||||
@echo " just build::core-ai Build kogral-core with fastembed"
|
||||
@echo ""
|
||||
@echo "Build combined:"
|
||||
@echo " just build::all Build all crates"
|
||||
@echo " just build::workspace Build entire workspace"
|
||||
@echo ""
|
||||
@echo "Build release:"
|
||||
@echo " just build::release Release build (all features)"
|
||||
@echo ""
|
||||
@echo "Check compilation:"
|
||||
@echo " just build::check Check without building"
|
||||
|
||||
# === DEFAULT BUILD ===
|
||||
|
||||
# Build workspace with default features
|
||||
[doc("Build default: filesystem backend only")]
|
||||
default:
|
||||
@echo "=== Building default features ==="
|
||||
cargo build --workspace
|
||||
|
||||
# === COMPONENT BUILDS ===
|
||||
|
||||
# Build kogral-core library
|
||||
[doc("Build kogral-core library (default features)")]
|
||||
core:
|
||||
@echo "=== Building kogral-core ==="
|
||||
cargo build --package kogral-core
|
||||
|
||||
# Build kogral-cli binary
|
||||
[doc("Build kogral-cli command-line tool")]
|
||||
cli:
|
||||
@echo "=== Building kogral-cli ==="
|
||||
cargo build --package kogral-cli
|
||||
|
||||
# Build kogral-mcp server
|
||||
[doc("Build kogral-mcp MCP server")]
|
||||
mcp:
|
||||
@echo "=== Building kogral-mcp ==="
|
||||
cargo build --package kogral-mcp
|
||||
|
||||
# === FEATURE-SPECIFIC BUILDS ===
|
||||
|
||||
# Build kogral-core with all features
|
||||
[doc("Build kogral-core with all features (filesystem, surrealdb, fastembed)")]
|
||||
core-full:
|
||||
@echo "=== Building kogral-core (all features) ==="
|
||||
cargo build --package kogral-core --features {{ FEATURES_CORE_FULL }}
|
||||
|
||||
# Build kogral-core with SurrealDB backend
|
||||
[doc("Build kogral-core with SurrealDB support")]
|
||||
core-db:
|
||||
@echo "=== Building kogral-core (SurrealDB) ==="
|
||||
cargo build --package kogral-core --features {{ FEATURES_CORE_STORAGE }}
|
||||
|
||||
# Build kogral-core with fastembed
|
||||
[doc("Build kogral-core with local embeddings (fastembed)")]
|
||||
core-ai:
|
||||
@echo "=== Building kogral-core (fastembed) ==="
|
||||
cargo build --package kogral-core --features {{ FEATURES_CORE_EMBEDDINGS }}
|
||||
|
||||
# === COMBINED BUILDS ===
|
||||
|
||||
# Build all crates
|
||||
[doc("Build all crates (kogral-core, kogral-cli, kogral-mcp)")]
|
||||
all:
|
||||
@echo "=== Building all crates ==="
|
||||
@just build::core
|
||||
@just build::cli
|
||||
@just build::mcp
|
||||
@echo "✓ All crates built"
|
||||
|
||||
# Build entire workspace
|
||||
[doc("Build entire workspace with all features")]
|
||||
workspace:
|
||||
@echo "=== Building workspace (all features) ==="
|
||||
cargo build --workspace --all-features
|
||||
|
||||
# === RELEASE BUILD ===
|
||||
|
||||
# Build release version with all features
|
||||
[doc("Build release version (optimized, all features)")]
|
||||
release:
|
||||
@echo "=== Building release (all features) ==="
|
||||
cargo build --workspace --all-features --release
|
||||
@echo "✓ Release build complete"
|
||||
@ls -lh target/release/kogral-cli target/release/kogral-mcp 2>/dev/null || true
|
||||
|
||||
# === VALIDATION ===
|
||||
|
||||
# Check compilation without building
|
||||
[doc("Check code compiles without building artifacts")]
|
||||
check:
|
||||
@echo "=== Checking compilation ==="
|
||||
cargo check --workspace --all-features
|
||||
@echo "✓ Compilation check passed"
|
||||
83
justfiles/ci.just
Normal file
83
justfiles/ci.just
Normal file
@ -0,0 +1,83 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ CI/CD PIPELINE ║
|
||||
# ║ Validation and testing for CI ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# Help for CI module
|
||||
help:
|
||||
@echo "CI/CD MODULE"
|
||||
@echo ""
|
||||
@echo "Full pipeline:"
|
||||
@echo " just ci::pipeline Run complete CI pipeline"
|
||||
@echo " just ci::quick Quick CI check (fmt + lint + test)"
|
||||
@echo ""
|
||||
@echo "Individual checks:"
|
||||
@echo " just ci::check Format + lint check"
|
||||
@echo " just ci::test-all Run all tests"
|
||||
@echo " just ci::build-all Build all features"
|
||||
@echo " just ci::validate Validate Nickel schemas"
|
||||
@echo ""
|
||||
@echo "Release checks:"
|
||||
@echo " just ci::release-check Validate release build"
|
||||
|
||||
# === FULL PIPELINE ===
|
||||
|
||||
# Run complete CI pipeline
|
||||
[doc("Complete CI pipeline: format + lint + test + build + validate")]
|
||||
pipeline:
|
||||
@echo "=== CI Pipeline ==="
|
||||
@just ci::check
|
||||
@just ci::validate
|
||||
@just ci::test-all
|
||||
@just ci::build-all
|
||||
@echo "✓ CI pipeline completed successfully"
|
||||
|
||||
# Quick CI check (for fast feedback)
|
||||
[doc("Quick CI: format + lint + test (default features)")]
|
||||
quick:
|
||||
@echo "=== Quick CI Check ==="
|
||||
@just ci::check
|
||||
@just test::all
|
||||
@echo "✓ Quick CI passed"
|
||||
|
||||
# === INDIVIDUAL CHECKS ===
|
||||
|
||||
# Format and lint check
|
||||
[doc("Check code format and run linter")]
|
||||
check:
|
||||
@echo "=== Checking format and linting ==="
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --workspace --all-features -- -D warnings
|
||||
@echo "✓ Format and lint check passed"
|
||||
|
||||
# Run all tests (all features)
|
||||
[doc("Run all tests with all features")]
|
||||
test-all:
|
||||
@echo "=== Running all tests ==="
|
||||
cargo test --workspace --all-features
|
||||
@echo "✓ All tests passed"
|
||||
|
||||
# Build all feature combinations
|
||||
[doc("Build all feature combinations")]
|
||||
build-all:
|
||||
@echo "=== Building all features ==="
|
||||
cargo build --workspace
|
||||
cargo build --workspace --all-features
|
||||
@echo "✓ All builds successful"
|
||||
|
||||
# Validate Nickel schemas
|
||||
[doc("Validate all Nickel configuration schemas")]
|
||||
validate:
|
||||
@echo "=== Validating Nickel schemas ==="
|
||||
@just nickel::validate-all
|
||||
@echo "✓ Nickel validation passed"
|
||||
|
||||
# === RELEASE CHECKS ===
|
||||
|
||||
# Validate release build
|
||||
[doc("Validate release build (optimized)")]
|
||||
release-check:
|
||||
@echo "=== Validating release build ==="
|
||||
cargo build --workspace --all-features --release
|
||||
cargo test --workspace --all-features --release
|
||||
@echo "✓ Release build validated"
|
||||
123
justfiles/dev.just
Normal file
123
justfiles/dev.just
Normal file
@ -0,0 +1,123 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ DEVELOPMENT UTILITIES ║
|
||||
# ║ Watch, format, lint, docs ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# Help for dev module
|
||||
help:
|
||||
@echo "DEVELOPMENT MODULE"
|
||||
@echo ""
|
||||
@echo "Code quality:"
|
||||
@echo " just dev::fmt Format code (Rust + TOML + Nickel)"
|
||||
@echo " just dev::fmt-check Check format (no changes)"
|
||||
@echo " just dev::lint Run clippy linter"
|
||||
@echo " just dev::lint-all Lint Rust + Nickel + NuShell"
|
||||
@echo " just dev::audit Audit dependencies"
|
||||
@echo ""
|
||||
@echo "Documentation:"
|
||||
@echo " just dev::docs Generate and open docs"
|
||||
@echo " just dev::docs-gen Generate docs only"
|
||||
@echo ""
|
||||
@echo "Common tasks:"
|
||||
@echo " just dev::build Build default features"
|
||||
@echo " just dev::watch Watch and rebuild on changes"
|
||||
@echo " just dev::check Check + fmt + lint"
|
||||
@echo ""
|
||||
@echo "Inspect:"
|
||||
@echo " just dev::info Show workspace info"
|
||||
@echo " just dev::tree Show dependency tree"
|
||||
|
||||
# === CODE FORMATTING ===
|
||||
|
||||
# Format all code (Rust + TOML + Nickel)
|
||||
[doc("Format Rust, TOML, and Nickel code")]
|
||||
fmt:
|
||||
@echo "=== Formatting code ==="
|
||||
cargo fmt --all
|
||||
@echo "✓ Rust code formatted"
|
||||
|
||||
# Check code formatting without modifying
|
||||
[doc("Check code format (no changes)")]
|
||||
fmt-check:
|
||||
@echo "=== Checking code format ==="
|
||||
cargo fmt --all -- --check
|
||||
@echo "✓ Code format check passed"
|
||||
|
||||
# === LINTING ===
|
||||
|
||||
# Run clippy linter on Rust code
|
||||
[doc("Run clippy linter")]
|
||||
lint:
|
||||
@echo "=== Running clippy ==="
|
||||
cargo clippy --workspace --all-features -- -D warnings
|
||||
@echo "✓ Clippy check passed"
|
||||
|
||||
# Lint all languages (Rust + Nickel + NuShell)
|
||||
[doc("Lint Rust, Nickel, and NuShell code")]
|
||||
lint-all:
|
||||
@echo "=== Linting all code ==="
|
||||
@just dev::lint
|
||||
@just nickel::lint
|
||||
@echo "✓ All linting passed"
|
||||
|
||||
# Audit dependencies for security vulnerabilities
|
||||
[doc("Audit dependencies")]
|
||||
audit:
|
||||
@echo "=== Auditing dependencies ==="
|
||||
cargo audit
|
||||
@echo "✓ Dependency audit passed"
|
||||
|
||||
# === DOCUMENTATION ===
|
||||
|
||||
# Generate and open documentation
|
||||
[doc("Generate and open Rust docs")]
|
||||
docs:
|
||||
@echo "=== Generating documentation ==="
|
||||
cargo doc --workspace --all-features --no-deps --open
|
||||
|
||||
# Generate documentation only (no open)
|
||||
[doc("Generate Rust docs only")]
|
||||
docs-gen:
|
||||
@echo "=== Generating documentation ==="
|
||||
cargo doc --workspace --all-features --no-deps
|
||||
|
||||
# === COMMON TASKS ===
|
||||
|
||||
# Build default features
|
||||
[doc("Build workspace with default features")]
|
||||
build:
|
||||
@cargo build --workspace
|
||||
|
||||
# Watch and rebuild on changes
|
||||
[doc("Watch for changes and rebuild")]
|
||||
watch:
|
||||
@echo "=== Watching for changes ==="
|
||||
cargo watch -x "build --workspace"
|
||||
|
||||
# Check + format + lint
|
||||
[doc("Run check + fmt + lint")]
|
||||
check:
|
||||
@just dev::fmt-check
|
||||
@just dev::lint
|
||||
@cargo check --workspace --all-features
|
||||
@echo "✓ All checks passed"
|
||||
|
||||
# === INSPECT ===
|
||||
|
||||
# Show workspace information
|
||||
[doc("Show workspace information")]
|
||||
info:
|
||||
@echo "=== Workspace Information ==="
|
||||
@cargo tree --workspace --depth 1
|
||||
@echo ""
|
||||
@echo "=== Crates ==="
|
||||
@ls -1 crates/
|
||||
@echo ""
|
||||
@echo "=== Features (kogral-core) ==="
|
||||
@cargo tree --package kogral-core --features full --depth 0
|
||||
|
||||
# Show dependency tree
|
||||
[doc("Show complete dependency tree")]
|
||||
tree:
|
||||
@echo "=== Dependency Tree ==="
|
||||
cargo tree --workspace
|
||||
99
justfiles/docs.just
Normal file
99
justfiles/docs.just
Normal file
@ -0,0 +1,99 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ DOCUMENTATION TOOLS ║
|
||||
# ║ mdBook build, serve, and management ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# === PATHS ===
|
||||
DOCS_DIR := "docs"
|
||||
BOOK_DIR := "book"
|
||||
|
||||
# Help for docs module
|
||||
help:
|
||||
@echo "DOCUMENTATION MODULE"
|
||||
@echo ""
|
||||
@echo "Build and serve:"
|
||||
@echo " just docs::build Build mdBook"
|
||||
@echo " just docs::serve Serve mdBook locally"
|
||||
@echo " just docs::watch Watch and rebuild on changes"
|
||||
@echo ""
|
||||
@echo "Validation:"
|
||||
@echo " just docs::test Test documentation examples"
|
||||
@echo " just docs::check-links Check for broken links"
|
||||
@echo ""
|
||||
@echo "Cleanup:"
|
||||
@echo " just docs::clean Clean build artifacts"
|
||||
|
||||
# === BUILD AND SERVE ===
|
||||
|
||||
# Build mdBook documentation
|
||||
[doc("Build mdBook documentation")]
|
||||
build:
|
||||
@echo "=== Building mdBook ==="
|
||||
@if command -v mdbook >/dev/null 2>&1; then \
|
||||
cd {{ DOCS_DIR }} && mdbook build; \
|
||||
echo "✓ mdBook built successfully"; \
|
||||
else \
|
||||
echo "Error: mdbook not installed"; \
|
||||
echo "Install with: cargo install mdbook"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Serve mdBook locally
|
||||
[doc("Serve mdBook locally (http://localhost:3000)")]
|
||||
serve:
|
||||
@echo "=== Serving mdBook ==="
|
||||
@if command -v mdbook >/dev/null 2>&1; then \
|
||||
echo "Opening http://localhost:3000"; \
|
||||
cd {{ DOCS_DIR }} && mdbook serve --open; \
|
||||
else \
|
||||
echo "Error: mdbook not installed"; \
|
||||
echo "Install with: cargo install mdbook"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Watch and rebuild on changes
|
||||
[doc("Watch docs and rebuild on changes")]
|
||||
watch:
|
||||
@echo "=== Watching documentation ==="
|
||||
@if command -v mdbook >/dev/null 2>&1; then \
|
||||
cd {{ DOCS_DIR }} && mdbook watch; \
|
||||
else \
|
||||
echo "Error: mdbook not installed"; \
|
||||
echo "Install with: cargo install mdbook"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# === VALIDATION ===
|
||||
|
||||
# Test documentation code examples
|
||||
[doc("Test documentation code examples")]
|
||||
test:
|
||||
@echo "=== Testing documentation examples ==="
|
||||
@if command -v mdbook >/dev/null 2>&1; then \
|
||||
cd {{ DOCS_DIR }} && mdbook test; \
|
||||
echo "✓ Documentation tests passed"; \
|
||||
else \
|
||||
echo "Error: mdbook not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Check for broken links
|
||||
[doc("Check for broken links in documentation")]
|
||||
check-links:
|
||||
@echo "=== Checking for broken links ==="
|
||||
@if command -v mdbook-linkcheck >/dev/null 2>&1; then \
|
||||
cd {{ DOCS_DIR }} && mdbook-linkcheck; \
|
||||
echo "✓ No broken links found"; \
|
||||
else \
|
||||
echo "Warning: mdbook-linkcheck not installed"; \
|
||||
echo "Install with: cargo install mdbook-linkcheck"; \
|
||||
fi
|
||||
|
||||
# === CLEANUP ===
|
||||
|
||||
# Clean build artifacts
|
||||
[doc("Clean mdBook build artifacts")]
|
||||
clean:
|
||||
@echo "=== Cleaning mdBook artifacts ==="
|
||||
@rm -rf {{ DOCS_DIR }}/{{ BOOK_DIR }}
|
||||
@echo "✓ Cleaned build artifacts"
|
||||
184
justfiles/nickel.just
Normal file
184
justfiles/nickel.just
Normal file
@ -0,0 +1,184 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ NICKEL INTEGRATION ║
|
||||
# ║ Schema validation and configuration export ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# === PATHS ===
|
||||
SCHEMA_DIR := "schemas"
|
||||
CONFIG_DIR := "config"
|
||||
TYPEDIALOG_DIR := ".typedialog/kogral"
|
||||
TYPEDIALOG_CORE := ".typedialog/kogral/core"
|
||||
TYPEDIALOG_MODES := ".typedialog/kogral/modes"
|
||||
TYPEDIALOG_SCRIPTS := ".typedialog/kogral/scripts"
|
||||
|
||||
# Help for Nickel module
|
||||
help:
|
||||
@echo "NICKEL MODULE"
|
||||
@echo ""
|
||||
@echo "Validation:"
|
||||
@echo " just nickel::validate-all Validate all schemas (legacy)"
|
||||
@echo " just nickel::validate-config Validate TypeDialog config schemas"
|
||||
@echo " just nickel::validate FILE Validate specific schema"
|
||||
@echo " just nickel::typecheck FILE Typecheck Nickel file"
|
||||
@echo ""
|
||||
@echo "Export:"
|
||||
@echo " just nickel::export FILE Export config to JSON"
|
||||
@echo " just nickel::export-all Export all example configs (legacy)"
|
||||
@echo " just nickel::export-modes Export all TypeDialog modes to JSON"
|
||||
@echo " just nickel::export-dev Export dev mode configuration"
|
||||
@echo " just nickel::export-prod Export prod mode configuration"
|
||||
@echo " just nickel::export-test Export test mode configuration"
|
||||
@echo ""
|
||||
@echo "Sync & Generate:"
|
||||
@echo " just nickel::sync-to-schemas Sync .typedialog/kogral → schemas/kogral"
|
||||
@echo " just nickel::generate-config Generate .kogral/config.json (default: dev mode)"
|
||||
@echo ""
|
||||
@echo "Linting:"
|
||||
@echo " just nickel::lint Lint Nickel schemas"
|
||||
@echo " just nickel::fmt FILE Format Nickel file"
|
||||
@echo ""
|
||||
@echo "Testing:"
|
||||
@echo " just nickel::test Test schema examples"
|
||||
|
||||
# === VALIDATION ===
|
||||
|
||||
# Validate all Nickel schemas
|
||||
[doc("Validate all Nickel schemas")]
|
||||
validate-all:
|
||||
@echo "=== Validating Nickel schemas ==="
|
||||
@for file in {{ SCHEMA_DIR }}/*.ncl; do \
|
||||
echo "Validating $$file..."; \
|
||||
nickel typecheck "$$file" || exit 1; \
|
||||
done
|
||||
@echo "✓ All schemas valid"
|
||||
|
||||
# Validate specific Nickel file
|
||||
[doc("Validate specific Nickel file")]
|
||||
validate FILE:
|
||||
@echo "=== Validating {{ FILE }} ==="
|
||||
nickel typecheck "{{ FILE }}"
|
||||
@echo "✓ {{ FILE }} is valid"
|
||||
|
||||
# Typecheck Nickel file
|
||||
[doc("Typecheck Nickel file")]
|
||||
typecheck FILE:
|
||||
@nickel typecheck "{{ FILE }}"
|
||||
|
||||
# === EXPORT ===
|
||||
|
||||
# Export config file to JSON
|
||||
[doc("Export Nickel config to JSON")]
|
||||
export FILE:
|
||||
@echo "=== Exporting {{ FILE }} to JSON ==="
|
||||
@nickel export --format json "{{ FILE }}"
|
||||
|
||||
# Export all example configs
|
||||
[doc("Export all example configs to JSON")]
|
||||
export-all:
|
||||
@echo "=== Exporting all configs ==="
|
||||
@for file in {{ CONFIG_DIR }}/*.ncl; do \
|
||||
output="$${file%.ncl}.json"; \
|
||||
echo "Exporting $$file → $$output"; \
|
||||
nickel export --format json "$$file" > "$$output"; \
|
||||
done
|
||||
@echo "✓ All configs exported"
|
||||
|
||||
# === LINTING ===
|
||||
|
||||
# Lint Nickel schemas (check formatting and style)
|
||||
[doc("Lint Nickel schemas")]
|
||||
lint:
|
||||
@echo "=== Linting Nickel files ==="
|
||||
@for file in {{ SCHEMA_DIR }}/*.ncl {{ CONFIG_DIR }}/*.ncl; do \
|
||||
echo "Checking $$file..."; \
|
||||
nickel typecheck "$$file" || exit 1; \
|
||||
done
|
||||
@echo "✓ Nickel lint passed"
|
||||
|
||||
# Format Nickel file
|
||||
[doc("Format Nickel file")]
|
||||
fmt FILE:
|
||||
@echo "=== Formatting {{ FILE }} ==="
|
||||
nickel format "{{ FILE }}"
|
||||
|
||||
# === TYPEDIALOG CONFIGURATION ===
|
||||
|
||||
# Validate TypeDialog config schemas
|
||||
[doc("Validate TypeDialog config schemas (.typedialog/kogral/)")]
|
||||
validate-config:
|
||||
@echo "=== Validating TypeDialog config schemas ==="
|
||||
nu {{justfile_directory()}}/.typedialog/kogral/scripts/validate-config.nu
|
||||
|
||||
# Export all TypeDialog modes to JSON
|
||||
[doc("Export all TypeDialog modes to JSON")]
|
||||
export-modes: export-dev export-prod export-test
|
||||
@echo "✓ All modes exported"
|
||||
|
||||
# Export dev mode configuration
|
||||
[doc("Export dev mode configuration")]
|
||||
export-dev:
|
||||
@mkdir -p .kogral && \
|
||||
nickel export --format json .typedialog/kogral/modes/dev.ncl > .kogral/config.dev.json && \
|
||||
echo " ✓ Exported: .kogral/config.dev.json"
|
||||
|
||||
# Export prod mode configuration
|
||||
[doc("Export prod mode configuration")]
|
||||
export-prod:
|
||||
@mkdir -p .kogral && \
|
||||
nickel export --format json .typedialog/kogral/modes/prod.ncl > .kogral/config.prod.json && \
|
||||
echo " ✓ Exported: .kogral/config.prod.json"
|
||||
|
||||
# Export test mode configuration
|
||||
[doc("Export test mode configuration")]
|
||||
export-test:
|
||||
@mkdir -p .kogral && \
|
||||
nickel export --format json .typedialog/kogral/modes/test.ncl > .kogral/config.test.json && \
|
||||
echo " ✓ Exported: .kogral/config.test.json"
|
||||
|
||||
# Sync .typedialog/kogral to schemas/kogral (source of truth)
|
||||
[doc("Sync .typedialog/kogral → schemas/kogral")]
|
||||
sync-to-schemas:
|
||||
@echo "=== Syncing TypeDialog config to schemas ==="
|
||||
@mkdir -p schemas/kogral
|
||||
@echo " Copying core schemas..."
|
||||
@cp {{ TYPEDIALOG_CORE }}/contracts.ncl schemas/kogral/
|
||||
@cp {{ TYPEDIALOG_CORE }}/defaults.ncl schemas/kogral/
|
||||
@cp {{ TYPEDIALOG_CORE }}/helpers.ncl schemas/kogral/
|
||||
@mkdir -p schemas/kogral/modes
|
||||
@echo " Copying mode overlays..."
|
||||
@cp {{ TYPEDIALOG_MODES }}/dev.ncl schemas/kogral/modes/
|
||||
@cp {{ TYPEDIALOG_MODES }}/prod.ncl schemas/kogral/modes/
|
||||
@cp {{ TYPEDIALOG_MODES }}/test.ncl schemas/kogral/modes/
|
||||
@echo "✓ Sync completed"
|
||||
|
||||
# Generate .kogral/config.json for CLI/MCP (default: dev mode)
|
||||
[doc("Generate .kogral/config.json (default: dev mode)")]
|
||||
generate-config MODE="dev":
|
||||
@echo "=== Generating config from {{ MODE }} mode ==="
|
||||
@if [ "{{ MODE }}" != "dev" ] && [ "{{ MODE }}" != "prod" ] && [ "{{ MODE }}" != "test" ]; then \
|
||||
echo "Error: Invalid mode. Must be dev, prod, or test"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@mkdir -p .kogral
|
||||
nu {{justfile_directory()}}/.typedialog/kogral/scripts/generate-configs.nu --mode {{ MODE }} --output .kogral
|
||||
@echo "✓ Configuration generated at .kogral/config.json"
|
||||
|
||||
# === TESTING ===
|
||||
|
||||
# Test schema examples
|
||||
[doc("Test schema examples")]
|
||||
test:
|
||||
@echo "=== Testing Nickel schemas ==="
|
||||
@just nickel::validate-all
|
||||
@just nickel::export-all
|
||||
@echo "✓ Schema tests passed"
|
||||
|
||||
# Test TypeDialog configuration pipeline
|
||||
[doc("Test TypeDialog configuration pipeline")]
|
||||
test-config:
|
||||
@echo "=== Testing TypeDialog config pipeline ==="
|
||||
@just nickel::validate-config
|
||||
@just nickel::export-modes
|
||||
@just nickel::sync-to-schemas
|
||||
@just nickel::generate-config dev
|
||||
@echo "✓ TypeDialog config pipeline tests passed"
|
||||
137
justfiles/scripts.just
Normal file
137
justfiles/scripts.just
Normal file
@ -0,0 +1,137 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ NUSHELL SCRIPTS ║
|
||||
# ║ Maintenance and automation scripts ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# === PATHS ===
|
||||
SCRIPTS_DIR := "scripts"
|
||||
|
||||
# Help for scripts module
|
||||
help:
|
||||
@echo "NUSHELL SCRIPTS MODULE"
|
||||
@echo ""
|
||||
@echo "Sync and backup:"
|
||||
@echo " just scripts::sync Sync filesystem with SurrealDB"
|
||||
@echo " just scripts::backup Backup knowledge base"
|
||||
@echo " just scripts::reindex Rebuild embeddings index"
|
||||
@echo ""
|
||||
@echo "Import/Export:"
|
||||
@echo " just scripts::import-logseq DIR Import from Logseq graph"
|
||||
@echo " just scripts::export-logseq DIR Export to Logseq format"
|
||||
@echo ""
|
||||
@echo "Statistics:"
|
||||
@echo " just scripts::stats Show KOGRAL statistics"
|
||||
@echo " just scripts::stats-json Show stats in JSON format"
|
||||
@echo ""
|
||||
@echo "Maintenance:"
|
||||
@echo " just scripts::migrate Run schema migrations"
|
||||
@echo " just scripts::check-scripts Validate NuShell scripts"
|
||||
|
||||
# === SYNC AND BACKUP ===
|
||||
|
||||
# Sync filesystem with SurrealDB
|
||||
[doc("Sync filesystem with SurrealDB storage")]
|
||||
sync DIRECTION="bidirectional":
|
||||
@echo "=== Syncing KOGRAL ({{ DIRECTION }}) ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-sync.nu --direction {{ DIRECTION }}; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Backup knowledge base
|
||||
[doc("Backup knowledge base to archive")]
|
||||
backup:
|
||||
@echo "=== Backing up KOGRAL ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-backup.nu --format tar --compress; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Rebuild embeddings index
|
||||
[doc("Rebuild embeddings index")]
|
||||
reindex PROVIDER="fastembed":
|
||||
@echo "=== Reindexing embeddings ({{ PROVIDER }}) ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-reindex.nu --provider {{ PROVIDER }}; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# === IMPORT/EXPORT ===
|
||||
|
||||
# Import from Logseq graph
|
||||
[doc("Import from Logseq graph directory")]
|
||||
import-logseq DIR:
|
||||
@echo "=== Importing from Logseq: {{ DIR }} ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-import-logseq.nu "{{ DIR }}"; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Export to Logseq format
|
||||
[doc("Export to Logseq format")]
|
||||
export-logseq DIR:
|
||||
@echo "=== Exporting to Logseq: {{ DIR }} ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-export-logseq.nu "{{ DIR }}"; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# === STATISTICS ===
|
||||
|
||||
# Show KOGRAL statistics (summary format)
|
||||
[doc("Show knowledge base statistics")]
|
||||
stats:
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-stats.nu --format summary --show-tags; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Show KOGRAL statistics in JSON format
|
||||
[doc("Show knowledge base statistics (JSON)")]
|
||||
stats-json:
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-stats.nu --format json; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# === MAINTENANCE ===
|
||||
|
||||
# Run schema migrations
|
||||
[doc("Run database schema migrations")]
|
||||
migrate TARGET="latest":
|
||||
@echo "=== Running migrations (target: {{ TARGET }}) ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
nu {{ SCRIPTS_DIR }}/kogral-migrate.nu --target {{ TARGET }}; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
# Validate NuShell scripts syntax
|
||||
[doc("Validate NuShell scripts")]
|
||||
check-scripts:
|
||||
@echo "=== Validating NuShell scripts ==="
|
||||
@if command -v nu >/dev/null 2>&1; then \
|
||||
for script in {{ SCRIPTS_DIR }}/*.nu; do \
|
||||
echo "Checking $$script..."; \
|
||||
nu -c "source $$script; help" >/dev/null 2>&1 || { echo "✗ $$script has syntax errors"; exit 1; }; \
|
||||
done; \
|
||||
echo "✓ All scripts valid"; \
|
||||
else \
|
||||
echo "Error: nushell not installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
89
justfiles/test.just
Normal file
89
justfiles/test.just
Normal file
@ -0,0 +1,89 @@
|
||||
# ╔══════════════════════════════════════════════════════════════════════╗
|
||||
# ║ TEST RECIPES ║
|
||||
# ║ Test workspace with different feature flags ║
|
||||
# ╚══════════════════════════════════════════════════════════════════════╝
|
||||
|
||||
# Help for test module
|
||||
help:
|
||||
@echo "TEST MODULE"
|
||||
@echo ""
|
||||
@echo "Run all tests:"
|
||||
@echo " just test::all All tests (default features)"
|
||||
@echo " just test::all-features All tests with all features"
|
||||
@echo ""
|
||||
@echo "Test specific crates:"
|
||||
@echo " just test::core Test kogral-core library"
|
||||
@echo " just test::cli Test kogral-cli binary"
|
||||
@echo " just test::mcp Test kogral-mcp server"
|
||||
@echo ""
|
||||
@echo "Test specific features:"
|
||||
@echo " just test::core-db Test SurrealDB backend"
|
||||
@echo " just test::core-ai Test fastembed integration"
|
||||
@echo ""
|
||||
@echo "Integration & docs:"
|
||||
@echo " just test::integration Run integration tests only"
|
||||
@echo " just test::doc Test documentation examples"
|
||||
|
||||
# === FULL TEST SUITES ===
|
||||
|
||||
# Run all tests (default features)
|
||||
[doc("Run all tests with default features")]
|
||||
all:
|
||||
@echo "=== Testing workspace (default features) ==="
|
||||
cargo test --workspace --lib
|
||||
@echo "✓ All tests passed"
|
||||
|
||||
# Run all tests with all features enabled
|
||||
[doc("Run all tests with all features")]
|
||||
all-features:
|
||||
@echo "=== Testing workspace (all features) ==="
|
||||
cargo test --workspace --all-features
|
||||
@echo "✓ All tests passed (all features)"
|
||||
|
||||
# === CRATE-SPECIFIC TESTS ===
|
||||
|
||||
# Test kogral-core library
|
||||
[doc("Test kogral-core library")]
|
||||
core:
|
||||
@echo "=== Testing kogral-core ==="
|
||||
cargo test --package kogral-core --lib
|
||||
|
||||
# Test kogral-cli binary
|
||||
[doc("Test kogral-cli binary")]
|
||||
cli:
|
||||
@echo "=== Testing kogral-cli ==="
|
||||
cargo test --package kogral-cli
|
||||
|
||||
# Test kogral-mcp server
|
||||
[doc("Test kogral-mcp server")]
|
||||
mcp:
|
||||
@echo "=== Testing kogral-mcp ==="
|
||||
cargo test --package kogral-mcp --lib
|
||||
|
||||
# === FEATURE-SPECIFIC TESTS ===
|
||||
|
||||
# Test SurrealDB backend
|
||||
[doc("Test kogral-core with SurrealDB backend")]
|
||||
core-db:
|
||||
@echo "=== Testing kogral-core (SurrealDB) ==="
|
||||
cargo test --package kogral-core --features surrealdb
|
||||
|
||||
# Test fastembed integration
|
||||
[doc("Test kogral-core with fastembed")]
|
||||
core-ai:
|
||||
@echo "=== Testing kogral-core (fastembed) ==="
|
||||
cargo test --package kogral-core --features fastembed
|
||||
|
||||
# === INTEGRATION & DOCS ===
|
||||
|
||||
# Run integration tests only
|
||||
[doc("Run integration tests")]
|
||||
integration:
|
||||
@echo "=== Running integration tests ==="
|
||||
cargo test --workspace --test '*'
|
||||
|
||||
# Test documentation examples
|
||||
[doc("Test doc examples and doctests")]
|
||||
doc:
|
||||
@echo "=== Testing documentation examples ==="
|
||||
cargo test --workspace --doc
|
||||
206
schemas/README.md
Normal file
206
schemas/README.md
Normal file
@ -0,0 +1,206 @@
|
||||
# Knowledge Base Nickel Schemas
|
||||
|
||||
This directory contains Nickel schema definitions for the knowledge base configuration system.
|
||||
|
||||
## Overview
|
||||
|
||||
The KOGRAL uses a **config-driven architecture** with Nickel providing type-safe configuration:
|
||||
|
||||
```text
|
||||
.ncl files → nickel export --format json → JSON → serde → Rust structs
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Type safety at config definition time (Nickel type checker)
|
||||
- Composition and reuse (Nickel imports)
|
||||
- Runtime type-safe Rust structs (serde)
|
||||
- Clear contracts and documentation
|
||||
|
||||
## Schema Files
|
||||
|
||||
| File | Purpose |
|
||||
| --- | --- |
|
||||
| `types.ncl` | Shared type definitions (enums, primitives) |
|
||||
| `kogral-config.ncl` | Main configuration schema (KbConfig) |
|
||||
| `frontmatter.ncl` | Document frontmatter schema (YAML in .md files) |
|
||||
|
||||
## Usage
|
||||
|
||||
### Export Schema to JSON
|
||||
|
||||
```bash
|
||||
# Export a configuration file
|
||||
nickel export --format json config/defaults.ncl > .kogral/config.json
|
||||
|
||||
# Export minimal config
|
||||
nickel export --format json config/minimal.ncl > .kogral/config.json
|
||||
|
||||
# Export production config
|
||||
nickel export --format json config/production.ncl > .kogral/config.json
|
||||
```
|
||||
|
||||
### Create Custom Configuration
|
||||
|
||||
```nickel
|
||||
# my-kogral-config.ncl
|
||||
let Schema = import "schemas/kogral-config.ncl" in
|
||||
|
||||
{
|
||||
graph = {
|
||||
name = "my-project",
|
||||
version = "1.0.0",
|
||||
},
|
||||
|
||||
embeddings = {
|
||||
provider = 'ollama,
|
||||
model = "llama2",
|
||||
},
|
||||
|
||||
# Other fields use defaults
|
||||
} | Schema.KbConfig
|
||||
```
|
||||
|
||||
### Type Checking
|
||||
|
||||
Nickel will validate your configuration against the schema:
|
||||
|
||||
```bash
|
||||
# Check configuration without exporting
|
||||
nickel typecheck my-kogral-config.ncl
|
||||
|
||||
# Export (also type-checks)
|
||||
nickel export --format json my-kogral-config.ncl
|
||||
```
|
||||
|
||||
## Schema Structure
|
||||
|
||||
### Main Config (`kogral-config.ncl`)
|
||||
|
||||
```nickel
|
||||
KbConfig = {
|
||||
graph: GraphConfig, # Graph metadata
|
||||
inheritance: InheritanceConfig, # Guideline inheritance
|
||||
storage: StorageConfig, # Storage backends
|
||||
embeddings: EmbeddingConfig, # Embedding providers
|
||||
templates: TemplateConfig, # Tera templates
|
||||
query: QueryConfig, # Search behavior
|
||||
mcp: McpConfig, # MCP server settings
|
||||
sync: SyncConfig, # Filesystem ↔ DB sync
|
||||
}
|
||||
```
|
||||
|
||||
### Frontmatter (`frontmatter.ncl`)
|
||||
|
||||
```nickel
|
||||
Frontmatter = {
|
||||
id: String, # UUID
|
||||
type: NodeType, # note, decision, etc.
|
||||
title: String, # Human-readable title
|
||||
created: Timestamp, # ISO 8601
|
||||
modified: Timestamp, # ISO 8601
|
||||
tags: Array String, # Categorization
|
||||
status: NodeStatus, # draft, active, etc.
|
||||
relates_to: Array String, # Relationships
|
||||
# ... type-specific fields
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Default Configuration
|
||||
|
||||
See `config/defaults.ncl` for a fully documented example with sensible defaults.
|
||||
|
||||
### Minimal Configuration
|
||||
|
||||
```nickel
|
||||
{
|
||||
graph = { name = "my-kogral" },
|
||||
} | Schema.KbConfig
|
||||
```
|
||||
|
||||
All other fields will use schema defaults.
|
||||
|
||||
### Production Configuration
|
||||
|
||||
See `config/production.ncl` for a production setup with:
|
||||
- SurrealDB backend enabled
|
||||
- API-based embeddings (OpenAI)
|
||||
- Optimized sync settings
|
||||
|
||||
## Field Defaults
|
||||
|
||||
All config fields have sensible defaults. Required fields:
|
||||
|
||||
- `graph.name` - Graph identifier
|
||||
|
||||
Optional fields (with defaults):
|
||||
- `graph.version` → `"1.0.0"`
|
||||
- `storage.primary` → `'filesystem`
|
||||
- `embeddings.provider` → `'fastembed`
|
||||
- `query.similarity_threshold` → `0.4`
|
||||
- `mcp.server.transport` → `'stdio`
|
||||
- ... (see schema files for complete list)
|
||||
|
||||
## Type Definitions
|
||||
|
||||
### Enums
|
||||
|
||||
Defined in `types.ncl`:
|
||||
|
||||
```nickel
|
||||
NodeType = [| 'note, 'decision, 'guideline, 'pattern, 'journal, 'execution |]
|
||||
NodeStatus = [| 'draft, 'active, 'superseded, 'archived |]
|
||||
StorageType = [| 'filesystem, 'memory |]
|
||||
EmbeddingProvider = [| 'openai, 'claude, 'ollama, 'fastembed |]
|
||||
```
|
||||
|
||||
### Primitives
|
||||
|
||||
- `String` - Text values
|
||||
- `Number` - Numeric values
|
||||
- `Bool` - Boolean values
|
||||
- `Array T` - Lists of type T
|
||||
- `{ _ | T }` - Map/dictionary with values of type T
|
||||
|
||||
## Validation
|
||||
|
||||
Nickel enforces:
|
||||
- **Required fields**: Must be present
|
||||
- **Type constraints**: Values must match declared types
|
||||
- **Enum values**: Must be one of the allowed variants
|
||||
- **Default values**: Applied automatically if field omitted
|
||||
|
||||
Example error:
|
||||
|
||||
```bash
|
||||
$ nickel export invalid-config.ncl
|
||||
error: type error
|
||||
┌─ invalid-config.ncl:5:15
|
||||
│
|
||||
5 │ provider = 'gpt4,
|
||||
│ ^^^^^ this expression has type [| 'gpt4 |]
|
||||
│
|
||||
= Expected an expression of type [| 'openai, 'claude, 'ollama, 'fastembed |]
|
||||
```
|
||||
|
||||
## Integration with Rust
|
||||
|
||||
The Rust code loads JSON via serde:
|
||||
|
||||
```rust
|
||||
use kb_core::config::loader::load_config;
|
||||
|
||||
// Load from .kogral/config.{ncl,toml,json}
|
||||
let config = load_config(None, None)?;
|
||||
|
||||
// Access type-safe fields
|
||||
println!("Graph: {}", config.graph.name);
|
||||
println!("Provider: {:?}", config.embeddings.provider);
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [Nickel Language](https://nickel-lang.org/)
|
||||
- [Nickel Documentation](https://nickel-lang.org/user-manual/)
|
||||
- Rust schema types: `crates/kogral-core/src/config/schema.rs`
|
||||
180
schemas/frontmatter.ncl
Normal file
180
schemas/frontmatter.ncl
Normal file
@ -0,0 +1,180 @@
|
||||
# Document Frontmatter Schema
|
||||
#
|
||||
# Schema for YAML frontmatter in knowledge base documents.
|
||||
# This frontmatter is embedded in markdown files and parsed by kogral-core.
|
||||
#
|
||||
# Format:
|
||||
# ---
|
||||
# <frontmatter fields>
|
||||
# ---
|
||||
#
|
||||
# <markdown content>
|
||||
|
||||
let Types = import "types.ncl" in
|
||||
|
||||
{
|
||||
# Frontmatter for knowledge base documents
|
||||
Frontmatter = {
|
||||
id
|
||||
| String
|
||||
| doc "Unique identifier (UUID)",
|
||||
|
||||
type
|
||||
| Types.NodeType
|
||||
| doc "Node type (note, decision, guideline, etc.)",
|
||||
|
||||
title
|
||||
| String
|
||||
| doc "Human-readable title",
|
||||
|
||||
created
|
||||
| Types.Timestamp
|
||||
| doc "Creation timestamp (ISO 8601)",
|
||||
|
||||
modified
|
||||
| Types.Timestamp
|
||||
| doc "Last modification timestamp (ISO 8601)",
|
||||
|
||||
tags
|
||||
| Array String
|
||||
| doc "Tags for categorization"
|
||||
| default = [],
|
||||
|
||||
status
|
||||
| Types.NodeStatus
|
||||
| doc "Current status (draft, active, etc.)"
|
||||
| default = 'draft,
|
||||
|
||||
# Relationship fields (compatible with Logseq [[wikilinks]])
|
||||
relates_to
|
||||
| Array String
|
||||
| doc "Related node IDs"
|
||||
| default = [],
|
||||
|
||||
depends_on
|
||||
| Array String
|
||||
| doc "Dependency node IDs (must exist/be read first)"
|
||||
| default = [],
|
||||
|
||||
implements
|
||||
| Array String
|
||||
| doc "Implementation node IDs (this implements those patterns)"
|
||||
| default = [],
|
||||
|
||||
extends
|
||||
| Array String
|
||||
| doc "Extension node IDs (this extends/overrides those)"
|
||||
| default = [],
|
||||
|
||||
# Cross-project reference
|
||||
project
|
||||
| String
|
||||
| doc "Project identifier (for cross-project links)"
|
||||
| optional,
|
||||
|
||||
# Decision-specific fields (ADR format)
|
||||
context
|
||||
| String
|
||||
| doc "Decision context (what problem are we solving?)"
|
||||
| optional,
|
||||
|
||||
decision
|
||||
| String
|
||||
| doc "The decision made"
|
||||
| optional,
|
||||
|
||||
consequences
|
||||
| Array String
|
||||
| doc "Consequences of the decision"
|
||||
| optional,
|
||||
|
||||
# Guideline-specific fields
|
||||
language
|
||||
| String
|
||||
| doc "Programming language (rust, nushell, etc.)"
|
||||
| optional,
|
||||
|
||||
category
|
||||
| String
|
||||
| doc "Category (error-handling, testing, etc.)"
|
||||
| optional,
|
||||
|
||||
# Pattern-specific fields
|
||||
problem
|
||||
| String
|
||||
| doc "Problem statement"
|
||||
| optional,
|
||||
|
||||
solution
|
||||
| String
|
||||
| doc "Solution description"
|
||||
| optional,
|
||||
|
||||
forces
|
||||
| Array String
|
||||
| doc "Forces/constraints affecting the pattern"
|
||||
| optional,
|
||||
|
||||
# Execution-specific fields (from Vapora KG)
|
||||
task_type
|
||||
| String
|
||||
| doc "Type of task executed"
|
||||
| optional,
|
||||
|
||||
agent
|
||||
| String
|
||||
| doc "Agent that executed the task"
|
||||
| optional,
|
||||
|
||||
outcome
|
||||
| String
|
||||
| doc "Execution outcome (success, failure, etc.)"
|
||||
| optional,
|
||||
|
||||
duration_ms
|
||||
| Number
|
||||
| doc "Execution duration in milliseconds"
|
||||
| optional,
|
||||
},
|
||||
|
||||
# Minimal frontmatter (required fields only)
|
||||
MinimalFrontmatter = {
|
||||
id | String,
|
||||
type | Types.NodeType,
|
||||
title | String,
|
||||
created | Types.Timestamp,
|
||||
modified | Types.Timestamp,
|
||||
},
|
||||
|
||||
# Decision frontmatter (ADR)
|
||||
DecisionFrontmatter = Frontmatter & {
|
||||
type | default = 'decision,
|
||||
context | String,
|
||||
decision | String,
|
||||
consequences | Array String | default = [],
|
||||
},
|
||||
|
||||
# Guideline frontmatter
|
||||
GuidelineFrontmatter = Frontmatter & {
|
||||
type | default = 'guideline,
|
||||
language | String,
|
||||
category | String | optional,
|
||||
},
|
||||
|
||||
# Pattern frontmatter
|
||||
PatternFrontmatter = Frontmatter & {
|
||||
type | default = 'pattern,
|
||||
problem | String,
|
||||
solution | String,
|
||||
forces | Array String | default = [],
|
||||
},
|
||||
|
||||
# Execution record frontmatter
|
||||
ExecutionFrontmatter = Frontmatter & {
|
||||
type | default = 'execution,
|
||||
task_type | String,
|
||||
agent | String | optional,
|
||||
outcome | String,
|
||||
duration_ms | Number | optional,
|
||||
},
|
||||
}
|
||||
387
schemas/kogral-config.ncl
Normal file
387
schemas/kogral-config.ncl
Normal file
@ -0,0 +1,387 @@
|
||||
# Knowledge Base Configuration Schema
|
||||
#
|
||||
# Main configuration schema for the knowledge base system.
|
||||
# This schema is exported to JSON and loaded into Rust structs.
|
||||
#
|
||||
# Usage:
|
||||
# nickel export --format json kogral-config.ncl > config.json
|
||||
|
||||
let Types = import "types.ncl" in
|
||||
|
||||
{
|
||||
# Graph metadata configuration
|
||||
GraphConfig = {
|
||||
name
|
||||
| String
|
||||
| doc "Graph name/identifier",
|
||||
|
||||
version
|
||||
| Types.Version
|
||||
| doc "Graph version (semver)"
|
||||
| default = "1.0.0",
|
||||
|
||||
description
|
||||
| String
|
||||
| doc "Human-readable description"
|
||||
| default = "",
|
||||
},
|
||||
|
||||
# Inheritance configuration for guidelines
|
||||
InheritanceConfig = {
|
||||
base
|
||||
| Types.Path
|
||||
| doc "Base path for shared KOGRAL (resolves $TOOLS_PATH at runtime, defaults to $HOME/Tools)"
|
||||
| default = "$TOOLS_PATH/.kogral-shared",
|
||||
|
||||
guidelines
|
||||
| Array Types.Path
|
||||
| doc "Additional guideline paths to inherit"
|
||||
| default = [],
|
||||
|
||||
priority
|
||||
| Types.PositiveInt
|
||||
| doc "Override priority (higher = wins)"
|
||||
| default = 100,
|
||||
},
|
||||
|
||||
# Secondary storage configuration
|
||||
SecondaryStorageConfig = {
|
||||
enabled
|
||||
| Bool
|
||||
| doc "Whether secondary storage is enabled"
|
||||
| default = false,
|
||||
|
||||
type
|
||||
| Types.SecondaryStorageType
|
||||
| doc "Secondary storage backend type"
|
||||
| default = 'surrealdb,
|
||||
|
||||
url
|
||||
| Types.Url
|
||||
| doc "Connection URL for secondary storage"
|
||||
| default = "ws://localhost:8000",
|
||||
|
||||
namespace
|
||||
| String
|
||||
| doc "Database namespace"
|
||||
| default = "kogral",
|
||||
|
||||
database
|
||||
| String
|
||||
| doc "Database name"
|
||||
| default = "default",
|
||||
},
|
||||
|
||||
# Storage backend configuration
|
||||
StorageConfig = {
|
||||
primary
|
||||
| Types.StorageType
|
||||
| doc "Primary storage backend type"
|
||||
| default = 'filesystem,
|
||||
|
||||
secondary
|
||||
| SecondaryStorageConfig
|
||||
| doc "Optional secondary storage (for scaling/search)"
|
||||
| default = {},
|
||||
},
|
||||
|
||||
# Embedding provider configuration
|
||||
EmbeddingConfig = {
|
||||
enabled
|
||||
| Bool
|
||||
| doc "Whether embeddings are enabled"
|
||||
| default = true,
|
||||
|
||||
provider
|
||||
| Types.EmbeddingProvider
|
||||
| doc "Embedding provider selection"
|
||||
| default = 'fastembed,
|
||||
|
||||
model
|
||||
| String
|
||||
| doc "Model name/identifier"
|
||||
| default = "BAAI/bge-small-en-v1.5",
|
||||
|
||||
dimensions
|
||||
| Types.PositiveInt
|
||||
| doc "Vector dimensions"
|
||||
| default = 384,
|
||||
|
||||
api_key_env
|
||||
| String
|
||||
| doc "Environment variable name for API key"
|
||||
| default = "OPENAI_API_KEY",
|
||||
},
|
||||
|
||||
# Logseq blocks support configuration
|
||||
BlocksConfig = {
|
||||
enabled
|
||||
| Bool
|
||||
| doc "Enable Logseq content blocks parsing and queries"
|
||||
| default = false,
|
||||
|
||||
parse_on_import
|
||||
| Bool
|
||||
| doc "Automatically parse blocks when importing Logseq pages"
|
||||
| default = true,
|
||||
|
||||
serialize_on_export
|
||||
| Bool
|
||||
| doc "Serialize blocks to outliner format on export"
|
||||
| default = true,
|
||||
|
||||
enable_mcp_tools
|
||||
| Bool
|
||||
| doc "Enable block-related MCP tools (kogral/find_blocks, kogral/find_todos, kogral/find_cards)"
|
||||
| default = true,
|
||||
},
|
||||
|
||||
# Template mappings per node type
|
||||
TemplateMap = {
|
||||
note
|
||||
| String
|
||||
| doc "Note template filename"
|
||||
| default = "note.md.tera",
|
||||
|
||||
decision
|
||||
| String
|
||||
| doc "Decision (ADR) template filename"
|
||||
| default = "decision.md.tera",
|
||||
|
||||
guideline
|
||||
| String
|
||||
| doc "Guideline template filename"
|
||||
| default = "guideline.md.tera",
|
||||
|
||||
pattern
|
||||
| String
|
||||
| doc "Pattern template filename"
|
||||
| default = "pattern.md.tera",
|
||||
|
||||
journal
|
||||
| String
|
||||
| doc "Journal (daily notes) template filename"
|
||||
| default = "journal.md.tera",
|
||||
|
||||
execution
|
||||
| String
|
||||
| doc "Execution record template filename"
|
||||
| default = "execution.md.tera",
|
||||
},
|
||||
|
||||
# Export template mappings
|
||||
ExportTemplateMap = {
|
||||
logseq_page
|
||||
| String
|
||||
| doc "Logseq page export template"
|
||||
| default = "export/logseq-page.md.tera",
|
||||
|
||||
logseq_journal
|
||||
| String
|
||||
| doc "Logseq journal export template"
|
||||
| default = "export/logseq-journal.md.tera",
|
||||
|
||||
summary
|
||||
| String
|
||||
| doc "Summary report template"
|
||||
| default = "export/summary.md.tera",
|
||||
|
||||
json
|
||||
| String
|
||||
| doc "JSON export template"
|
||||
| default = "export/graph.json.tera",
|
||||
},
|
||||
|
||||
# Template engine configuration
|
||||
TemplateConfig = {
|
||||
templates_dir
|
||||
| Types.Path
|
||||
| doc "Template directory path (relative to project root)"
|
||||
| default = "templates",
|
||||
|
||||
templates
|
||||
| TemplateMap
|
||||
| doc "Node type template mappings"
|
||||
| default = {},
|
||||
|
||||
export
|
||||
| ExportTemplateMap
|
||||
| doc "Export template mappings"
|
||||
| default = {},
|
||||
|
||||
custom
|
||||
| { _ | String }
|
||||
| doc "Custom template registry (name → path)"
|
||||
| default = {},
|
||||
},
|
||||
|
||||
# Query behavior configuration
|
||||
QueryConfig = {
|
||||
similarity_threshold
|
||||
| Types.UnitFloat
|
||||
| doc "Minimum similarity threshold for matches (0.0 to 1.0)"
|
||||
| default = 0.4,
|
||||
|
||||
max_results
|
||||
| Types.PositiveInt
|
||||
| doc "Maximum number of search results"
|
||||
| default = 10,
|
||||
|
||||
recency_weight
|
||||
| Number
|
||||
| doc "Recency weight (higher = prefer more recent results)"
|
||||
| default = 3.0,
|
||||
|
||||
cross_graph
|
||||
| Bool
|
||||
| doc "Whether cross-graph queries are enabled"
|
||||
| default = true,
|
||||
},
|
||||
|
||||
# MCP server configuration
|
||||
McpServerConfig = {
|
||||
name
|
||||
| String
|
||||
| doc "MCP server name"
|
||||
| default = "kogral-mcp",
|
||||
|
||||
version
|
||||
| Types.Version
|
||||
| doc "MCP server version"
|
||||
| default = "1.0.0",
|
||||
|
||||
transport
|
||||
| Types.McpTransport
|
||||
| doc "Transport protocol (stdio or SSE)"
|
||||
| default = 'stdio,
|
||||
},
|
||||
|
||||
# MCP tools configuration
|
||||
McpToolsConfig = {
|
||||
search
|
||||
| Bool
|
||||
| doc "Enable kogral/search tool"
|
||||
| default = true,
|
||||
|
||||
add_note
|
||||
| Bool
|
||||
| doc "Enable kogral/add_note tool"
|
||||
| default = true,
|
||||
|
||||
add_decision
|
||||
| Bool
|
||||
| doc "Enable kogral/add_decision tool"
|
||||
| default = true,
|
||||
|
||||
link
|
||||
| Bool
|
||||
| doc "Enable kogral/link tool"
|
||||
| default = true,
|
||||
|
||||
get_guidelines
|
||||
| Bool
|
||||
| doc "Enable kogral/get_guidelines tool"
|
||||
| default = true,
|
||||
|
||||
export
|
||||
| Bool
|
||||
| doc "Enable kogral/export tool"
|
||||
| default = true,
|
||||
},
|
||||
|
||||
# MCP resources configuration
|
||||
McpResourcesConfig = {
|
||||
expose_project
|
||||
| Bool
|
||||
| doc "Expose project resources (kogral://project/*)"
|
||||
| default = true,
|
||||
|
||||
expose_shared
|
||||
| Bool
|
||||
| doc "Expose shared resources (kogral://shared/*)"
|
||||
| default = true,
|
||||
},
|
||||
|
||||
# MCP configuration
|
||||
McpConfig = {
|
||||
server
|
||||
| McpServerConfig
|
||||
| doc "MCP server settings"
|
||||
| default = {},
|
||||
|
||||
tools
|
||||
| McpToolsConfig
|
||||
| doc "MCP tool enablement"
|
||||
| default = {},
|
||||
|
||||
resources
|
||||
| McpResourcesConfig
|
||||
| doc "MCP resource exposure"
|
||||
| default = {},
|
||||
},
|
||||
|
||||
# Sync configuration
|
||||
SyncConfig = {
|
||||
auto_index
|
||||
| Bool
|
||||
| doc "Auto-sync filesystem to secondary storage"
|
||||
| default = true,
|
||||
|
||||
watch_paths
|
||||
| Array String
|
||||
| doc "Paths to watch for changes"
|
||||
| default = ["notes", "decisions", "guidelines", "patterns", "journal"],
|
||||
|
||||
debounce_ms
|
||||
| Types.PositiveInt
|
||||
| doc "Debounce time in milliseconds"
|
||||
| default = 500,
|
||||
},
|
||||
|
||||
# Main configuration schema
|
||||
KbConfig = {
|
||||
graph
|
||||
| GraphConfig
|
||||
| doc "Graph metadata configuration",
|
||||
|
||||
inheritance
|
||||
| InheritanceConfig
|
||||
| doc "Inheritance settings for guidelines"
|
||||
| default = {},
|
||||
|
||||
storage
|
||||
| StorageConfig
|
||||
| doc "Storage backend configuration"
|
||||
| default = {},
|
||||
|
||||
embeddings
|
||||
| EmbeddingConfig
|
||||
| doc "Embedding provider configuration"
|
||||
| default = {},
|
||||
|
||||
blocks
|
||||
| BlocksConfig
|
||||
| doc "Logseq blocks support configuration"
|
||||
| default = {},
|
||||
|
||||
templates
|
||||
| TemplateConfig
|
||||
| doc "Template engine configuration"
|
||||
| default = {},
|
||||
|
||||
query
|
||||
| QueryConfig
|
||||
| doc "Query behavior configuration"
|
||||
| default = {},
|
||||
|
||||
mcp
|
||||
| McpConfig
|
||||
| doc "MCP server configuration"
|
||||
| default = {},
|
||||
|
||||
sync
|
||||
| SyncConfig
|
||||
| doc "Sync settings"
|
||||
| default = {},
|
||||
},
|
||||
}
|
||||
268
schemas/kogral/contracts.ncl
Normal file
268
schemas/kogral/contracts.ncl
Normal file
@ -0,0 +1,268 @@
|
||||
# Knowledge Base Configuration Contracts (Schema Definitions)
|
||||
#
|
||||
# Pattern: Pure schema definitions using Nickel contracts
|
||||
# Follows: provisioning/schemas pattern
|
||||
|
||||
{
|
||||
# === CORE TYPES ===
|
||||
|
||||
GraphConfig = {
|
||||
name | String
|
||||
| doc "Graph name identifier",
|
||||
|
||||
version | String
|
||||
| doc "Semantic version string"
|
||||
| default = "1.0.0",
|
||||
|
||||
description | String
|
||||
| doc "Human-readable description"
|
||||
| default = "",
|
||||
},
|
||||
|
||||
InheritanceConfig = {
|
||||
base | String
|
||||
| doc "Path to shared KOGRAL directory"
|
||||
| default = "/Users/Akasha/Tools/.kogral-shared",
|
||||
|
||||
guidelines | Array String
|
||||
| doc "Additional guideline paths to inherit"
|
||||
| default = [],
|
||||
|
||||
priority | Number
|
||||
| doc "Override priority (higher wins)"
|
||||
| default = 100,
|
||||
},
|
||||
|
||||
# === STORAGE ===
|
||||
|
||||
StorageType = [| 'filesystem, 'memory, 'surrealdb |],
|
||||
|
||||
SecondaryStorageConfig = {
|
||||
enabled | Bool
|
||||
| doc "Enable secondary storage backend"
|
||||
| default = false,
|
||||
|
||||
type | [| 'surrealdb, 'sqlite |]
|
||||
| doc "Secondary storage type"
|
||||
| default = 'surrealdb,
|
||||
|
||||
url | String
|
||||
| doc "Connection URL"
|
||||
| default = "ws://localhost:8000",
|
||||
|
||||
namespace | String
|
||||
| doc "SurrealDB namespace"
|
||||
| default = "kb",
|
||||
|
||||
database | String
|
||||
| doc "SurrealDB database name"
|
||||
| default = "default",
|
||||
|
||||
username | String
|
||||
| doc "Database username"
|
||||
| optional,
|
||||
|
||||
password | String
|
||||
| doc "Database password"
|
||||
| optional,
|
||||
},
|
||||
|
||||
StorageConfig = {
|
||||
primary | StorageType
|
||||
| doc "Primary storage backend"
|
||||
| default = 'filesystem,
|
||||
|
||||
secondary | SecondaryStorageConfig
|
||||
| doc "Optional secondary storage"
|
||||
| default = { enabled = false },
|
||||
},
|
||||
|
||||
# === EMBEDDINGS ===
|
||||
|
||||
EmbeddingProviderType = [| 'openai, 'claude, 'ollama, 'fastembed |],
|
||||
|
||||
EmbeddingConfig = {
|
||||
enabled | Bool
|
||||
| doc "Enable embedding generation"
|
||||
| default = true,
|
||||
|
||||
provider | EmbeddingProviderType
|
||||
| doc "Embedding provider"
|
||||
| default = 'fastembed,
|
||||
|
||||
model | String
|
||||
| doc "Model name/identifier"
|
||||
| default = "BAAI/bge-small-en-v1.5",
|
||||
|
||||
dimensions | Number
|
||||
| doc "Embedding vector dimensions"
|
||||
| default = 384,
|
||||
|
||||
api_key_env | String
|
||||
| doc "Environment variable for API key"
|
||||
| default = "OPENAI_API_KEY",
|
||||
},
|
||||
|
||||
# === TEMPLATES ===
|
||||
|
||||
DocumentTemplates = {
|
||||
note | String
|
||||
| default = "note.md.tera",
|
||||
|
||||
decision | String
|
||||
| default = "decision.md.tera",
|
||||
|
||||
guideline | String
|
||||
| default = "guideline.md.tera",
|
||||
|
||||
pattern | String
|
||||
| default = "pattern.md.tera",
|
||||
|
||||
journal | String
|
||||
| default = "journal.md.tera",
|
||||
|
||||
execution | String
|
||||
| default = "execution.md.tera",
|
||||
},
|
||||
|
||||
ExportTemplates = {
|
||||
logseq_page | String
|
||||
| default = "export/logseq-page.md.tera",
|
||||
|
||||
logseq_journal | String
|
||||
| default = "export/logseq-journal.md.tera",
|
||||
|
||||
summary | String
|
||||
| default = "export/summary.md.tera",
|
||||
|
||||
json | String
|
||||
| default = "export/graph.json.tera",
|
||||
},
|
||||
|
||||
TemplateConfig = {
|
||||
templates_dir | String
|
||||
| doc "Directory containing templates"
|
||||
| default = "templates",
|
||||
|
||||
templates | DocumentTemplates
|
||||
| doc "Template files for each node type"
|
||||
| default = {},
|
||||
|
||||
export | ExportTemplates
|
||||
| doc "Export format templates"
|
||||
| default = {},
|
||||
|
||||
custom | { _ : String }
|
||||
| doc "Custom template registry (name → path)"
|
||||
| default = {},
|
||||
},
|
||||
|
||||
# === QUERY ===
|
||||
|
||||
QueryConfig = {
|
||||
similarity_threshold | Number
|
||||
| doc "Minimum similarity for semantic matches (0-1)"
|
||||
| default = 0.4,
|
||||
|
||||
max_results | Number
|
||||
| doc "Maximum results to return"
|
||||
| default = 10,
|
||||
|
||||
recency_weight | Number
|
||||
| doc "Weight factor for recent documents"
|
||||
| default = 3.0,
|
||||
|
||||
cross_graph | Bool
|
||||
| doc "Enable cross-graph queries"
|
||||
| default = true,
|
||||
},
|
||||
|
||||
# === MCP ===
|
||||
|
||||
McpServerConfig = {
|
||||
name | String
|
||||
| default = "kogral-mcp",
|
||||
|
||||
version | String
|
||||
| default = "1.0.0",
|
||||
|
||||
transport | [| 'stdio, 'sse |]
|
||||
| default = 'stdio,
|
||||
},
|
||||
|
||||
McpToolsConfig = {
|
||||
search | Bool | default = true,
|
||||
add_note | Bool | default = true,
|
||||
add_decision | Bool | default = true,
|
||||
link | Bool | default = true,
|
||||
get_guidelines | Bool | default = true,
|
||||
export | Bool | default = true,
|
||||
},
|
||||
|
||||
McpResourcesConfig = {
|
||||
expose_project | Bool | default = true,
|
||||
expose_shared | Bool | default = true,
|
||||
},
|
||||
|
||||
McpConfig = {
|
||||
server | McpServerConfig
|
||||
| default = {},
|
||||
|
||||
tools | McpToolsConfig
|
||||
| default = {},
|
||||
|
||||
resources | McpResourcesConfig
|
||||
| default = {},
|
||||
},
|
||||
|
||||
# === SYNC ===
|
||||
|
||||
SyncConfig = {
|
||||
auto_index | Bool
|
||||
| doc "Automatically sync filesystem to storage"
|
||||
| default = true,
|
||||
|
||||
watch_paths | Array String
|
||||
| doc "Directories to watch for changes"
|
||||
| default = ["notes", "decisions", "guidelines", "patterns", "journal"],
|
||||
|
||||
debounce_ms | Number
|
||||
| doc "Debounce delay for file system events"
|
||||
| default = 500,
|
||||
},
|
||||
|
||||
# === MAIN CONFIG ===
|
||||
|
||||
KbConfig = {
|
||||
graph | GraphConfig
|
||||
| doc "Graph metadata configuration",
|
||||
|
||||
inheritance | InheritanceConfig
|
||||
| doc "Inheritance configuration"
|
||||
| default = {},
|
||||
|
||||
storage | StorageConfig
|
||||
| doc "Storage backend configuration"
|
||||
| default = {},
|
||||
|
||||
embeddings | EmbeddingConfig
|
||||
| doc "Embedding provider configuration"
|
||||
| default = {},
|
||||
|
||||
templates | TemplateConfig
|
||||
| doc "Template system configuration"
|
||||
| default = {},
|
||||
|
||||
query | QueryConfig
|
||||
| doc "Query engine configuration"
|
||||
| default = {},
|
||||
|
||||
mcp | McpConfig
|
||||
| doc "MCP server configuration"
|
||||
| default = {},
|
||||
|
||||
sync | SyncConfig
|
||||
| doc "Sync configuration"
|
||||
| default = {},
|
||||
},
|
||||
}
|
||||
97
schemas/kogral/defaults.ncl
Normal file
97
schemas/kogral/defaults.ncl
Normal file
@ -0,0 +1,97 @@
|
||||
# Knowledge Base Default Configuration Values
|
||||
#
|
||||
# Pattern: Default values for all configuration options
|
||||
# These are base values that modes can override
|
||||
|
||||
let contracts = import "contracts.ncl" in
|
||||
|
||||
{
|
||||
# Base configuration with all defaults
|
||||
base = {
|
||||
graph = {
|
||||
name = "knowledge-base",
|
||||
version = "1.0.0",
|
||||
description = "Knowledge Base graph",
|
||||
},
|
||||
|
||||
# Inheritance paths: set via TOOLS_PATH env var at export time
|
||||
# Default paths resolve: $TOOLS_PATH/.kogral-shared (or $HOME/Tools/.kogral-shared)
|
||||
inheritance = {
|
||||
# Paths with $TOOLS_PATH are resolved at runtime by Rust code
|
||||
base = "$TOOLS_PATH/.kogral-shared",
|
||||
guidelines = [],
|
||||
priority = 100,
|
||||
},
|
||||
|
||||
storage = {
|
||||
primary = 'filesystem,
|
||||
secondary = {
|
||||
enabled = false,
|
||||
type = 'surrealdb,
|
||||
url = "ws://localhost:8000",
|
||||
namespace = "kogral",
|
||||
database = "default",
|
||||
},
|
||||
},
|
||||
|
||||
embeddings = {
|
||||
enabled = true,
|
||||
provider = 'fastembed,
|
||||
model = "BAAI/bge-small-en-v1.5",
|
||||
dimensions = 384,
|
||||
api_key_env = "OPENAI_API_KEY",
|
||||
},
|
||||
|
||||
templates = {
|
||||
templates_dir = "templates",
|
||||
templates = {
|
||||
note = "note.md.tera",
|
||||
decision = "decision.md.tera",
|
||||
guideline = "guideline.md.tera",
|
||||
pattern = "pattern.md.tera",
|
||||
journal = "journal.md.tera",
|
||||
execution = "execution.md.tera",
|
||||
},
|
||||
export = {
|
||||
logseq_page = "export/logseq-page.md.tera",
|
||||
logseq_journal = "export/logseq-journal.md.tera",
|
||||
summary = "export/summary.md.tera",
|
||||
json = "export/graph.json.tera",
|
||||
},
|
||||
custom = {},
|
||||
},
|
||||
|
||||
query = {
|
||||
similarity_threshold = 0.4,
|
||||
max_results = 10,
|
||||
recency_weight = 3.0,
|
||||
cross_graph = true,
|
||||
},
|
||||
|
||||
mcp = {
|
||||
server = {
|
||||
name = "kogral-mcp",
|
||||
version = "1.0.0",
|
||||
transport = 'stdio,
|
||||
},
|
||||
tools = {
|
||||
search = true,
|
||||
add_note = true,
|
||||
add_decision = true,
|
||||
link = true,
|
||||
get_guidelines = true,
|
||||
export = true,
|
||||
},
|
||||
resources = {
|
||||
expose_project = true,
|
||||
expose_shared = true,
|
||||
},
|
||||
},
|
||||
|
||||
sync = {
|
||||
auto_index = true,
|
||||
watch_paths = ["notes", "decisions", "guidelines", "patterns", "journal"],
|
||||
debounce_ms = 500,
|
||||
},
|
||||
} | contracts.KbConfig,
|
||||
}
|
||||
111
schemas/kogral/helpers.ncl
Normal file
111
schemas/kogral/helpers.ncl
Normal file
@ -0,0 +1,111 @@
|
||||
# Knowledge Base Configuration Composition Helpers
|
||||
#
|
||||
# Provides utilities for merging configurations from multiple layers:
|
||||
# 1. Schema (type contracts)
|
||||
# 2. Defaults (base values)
|
||||
# 3. Mode Overlay (mode-specific tuning: dev/prod/test)
|
||||
# 4. User Customization (overrides)
|
||||
#
|
||||
# Pattern: Follows provisioning/schemas/platform/common/helpers.ncl
|
||||
|
||||
{
|
||||
# Recursively merge two record configurations
|
||||
# Override values take precedence over base (shallow merge at each level)
|
||||
#
|
||||
# Example:
|
||||
# let base = { storage = { primary = 'filesystem }, embeddings = { enabled = true } }
|
||||
# let override = { storage = { primary = 'surrealdb } }
|
||||
# merge_with_override base override
|
||||
# # Result: { storage = { primary = 'surrealdb }, embeddings = { enabled = true } }
|
||||
merge_with_override | not_exported = fun base override =>
|
||||
if std.is_record base && std.is_record override then
|
||||
let base_fields = std.record.fields base in
|
||||
let override_fields = std.record.fields override in
|
||||
|
||||
base_fields
|
||||
|> std.array.fold_right
|
||||
(fun key acc =>
|
||||
let base_value = base."%{key}" in
|
||||
|
||||
if std.record.has_field key override then
|
||||
let override_value = override."%{key}" in
|
||||
|
||||
if std.is_record base_value && std.is_record override_value then
|
||||
acc
|
||||
& { "%{key}" = merge_with_override base_value override_value }
|
||||
else
|
||||
# Override value takes precedence
|
||||
acc & { "%{key}" = override_value }
|
||||
else
|
||||
# Keep base value
|
||||
acc & { "%{key}" = base_value }
|
||||
)
|
||||
(override_fields
|
||||
|> std.array.fold_right
|
||||
(fun key acc =>
|
||||
if !std.record.has_field key base then
|
||||
acc & { "%{key}" = override."%{key}" }
|
||||
else
|
||||
acc
|
||||
)
|
||||
{}
|
||||
)
|
||||
else
|
||||
# If either is not a record, override takes precedence
|
||||
if std.is_null override then base else override,
|
||||
|
||||
# Compose configuration from multiple layers with proper merging
|
||||
#
|
||||
# Layer 1: defaults (base values)
|
||||
# Layer 2: mode_config (mode-specific overrides: dev/prod/test)
|
||||
# Layer 3: user_custom (user customizations)
|
||||
#
|
||||
# Example:
|
||||
# let defaults = { embeddings = { provider = 'fastembed } }
|
||||
# let mode = { embeddings = { provider = 'openai } } # Production override
|
||||
# let user = { graph = { name = "my-project" } }
|
||||
# compose_config defaults mode user
|
||||
compose_config | not_exported = fun defaults mode_config user_custom =>
|
||||
let with_mode = merge_with_override defaults mode_config in
|
||||
merge_with_override with_mode user_custom,
|
||||
|
||||
# Compose minimal config (defaults + user only, no mode)
|
||||
# Useful for simple cases where mode-specific tuning isn't needed
|
||||
compose_minimal | not_exported = fun defaults user_custom =>
|
||||
merge_with_override defaults user_custom,
|
||||
|
||||
# Validate that required fields are present
|
||||
# Returns config if valid, throws error if invalid
|
||||
validate_required | not_exported = fun config required_fields =>
|
||||
required_fields
|
||||
|> std.array.fold_right
|
||||
(fun field acc =>
|
||||
if std.record.has_field field config then
|
||||
acc
|
||||
else
|
||||
std.fail "Required field missing: %{field}"
|
||||
)
|
||||
config,
|
||||
|
||||
# Extract specific subsection from config
|
||||
# Example: extract_section config "storage"
|
||||
extract_section | not_exported = fun config section =>
|
||||
if std.record.has_field section config then
|
||||
config."%{section}"
|
||||
else
|
||||
std.fail "Section not found: %{section}",
|
||||
|
||||
# Merge arrays (for things like watch_paths, tags, etc.)
|
||||
# Deduplicates and preserves order
|
||||
merge_arrays | not_exported = fun base override =>
|
||||
let combined = base @ override in
|
||||
std.array.fold_right
|
||||
(fun item acc =>
|
||||
if std.array.elem item acc then
|
||||
acc
|
||||
else
|
||||
[item] @ acc
|
||||
)
|
||||
[]
|
||||
combined,
|
||||
}
|
||||
48
schemas/kogral/modes/dev.ncl
Normal file
48
schemas/kogral/modes/dev.ncl
Normal file
@ -0,0 +1,48 @@
|
||||
# Development Mode Configuration Overlay
|
||||
#
|
||||
# Optimized for: Fast iteration, local development, debugging
|
||||
# Storage: Filesystem only (git-tracked)
|
||||
# Embeddings: Local fastembed (no API costs)
|
||||
# Logging: Debug level
|
||||
# Sync: Disabled (manual only)
|
||||
|
||||
{
|
||||
storage = {
|
||||
primary = 'filesystem,
|
||||
secondary = {
|
||||
enabled = false, # No database in dev mode
|
||||
},
|
||||
},
|
||||
|
||||
embeddings = {
|
||||
enabled = true,
|
||||
provider = 'fastembed, # Local, no API costs
|
||||
model = "BAAI/bge-small-en-v1.5",
|
||||
dimensions = 384,
|
||||
},
|
||||
|
||||
query = {
|
||||
similarity_threshold = 0.4, # Permissive for exploration
|
||||
max_results = 20, # More results for discovery
|
||||
cross_graph = true,
|
||||
},
|
||||
|
||||
sync = {
|
||||
auto_index = false, # Manual sync in dev
|
||||
debounce_ms = 1000, # Longer debounce
|
||||
},
|
||||
|
||||
mcp = {
|
||||
server = {
|
||||
transport = 'stdio,
|
||||
},
|
||||
tools = {
|
||||
search = true,
|
||||
add_note = true,
|
||||
add_decision = true,
|
||||
link = true,
|
||||
get_guidelines = true,
|
||||
export = true,
|
||||
},
|
||||
},
|
||||
}
|
||||
57
schemas/kogral/modes/prod.ncl
Normal file
57
schemas/kogral/modes/prod.ncl
Normal file
@ -0,0 +1,57 @@
|
||||
# Production Mode Configuration Overlay
|
||||
#
|
||||
# Optimized for: Scalability, performance, reliability
|
||||
# Storage: Hybrid (filesystem + SurrealDB)
|
||||
# Embeddings: Cloud providers (OpenAI/Claude via rig-core)
|
||||
# Logging: Info level
|
||||
# Sync: Auto-enabled with optimized debounce
|
||||
|
||||
{
|
||||
storage = {
|
||||
primary = 'filesystem,
|
||||
secondary = {
|
||||
enabled = true, # SurrealDB for scalable queries
|
||||
type = 'surrealdb,
|
||||
url = "ws://localhost:8000",
|
||||
namespace = "kogral",
|
||||
database = "production",
|
||||
},
|
||||
},
|
||||
|
||||
embeddings = {
|
||||
enabled = true,
|
||||
provider = 'openai, # Cloud API for production quality
|
||||
model = "text-embedding-3-small",
|
||||
dimensions = 1536,
|
||||
api_key_env = "OPENAI_API_KEY",
|
||||
},
|
||||
|
||||
query = {
|
||||
similarity_threshold = 0.6, # Stricter for quality results
|
||||
max_results = 10, # Conservative for performance
|
||||
cross_graph = true,
|
||||
},
|
||||
|
||||
sync = {
|
||||
auto_index = true, # Auto-sync enabled
|
||||
debounce_ms = 300, # Faster response to changes
|
||||
},
|
||||
|
||||
mcp = {
|
||||
server = {
|
||||
transport = 'stdio,
|
||||
},
|
||||
tools = {
|
||||
search = true,
|
||||
add_note = true,
|
||||
add_decision = true,
|
||||
link = true,
|
||||
get_guidelines = true,
|
||||
export = true,
|
||||
},
|
||||
resources = {
|
||||
expose_project = true,
|
||||
expose_shared = true,
|
||||
},
|
||||
},
|
||||
}
|
||||
52
schemas/kogral/modes/test.ncl
Normal file
52
schemas/kogral/modes/test.ncl
Normal file
@ -0,0 +1,52 @@
|
||||
# Test Mode Configuration Overlay
|
||||
#
|
||||
# Optimized for: Fast tests, isolation, no side effects
|
||||
# Storage: In-memory only (ephemeral)
|
||||
# Embeddings: Disabled (tests don't need semantic search)
|
||||
# Logging: Debug level
|
||||
# Sync: Disabled (manual control in tests)
|
||||
|
||||
{
|
||||
storage = {
|
||||
primary = 'memory, # Ephemeral, fast, isolated
|
||||
secondary = {
|
||||
enabled = false, # No database in tests
|
||||
},
|
||||
},
|
||||
|
||||
embeddings = {
|
||||
enabled = false, # Disable for test speed
|
||||
provider = 'fastembed, # Fallback if needed
|
||||
model = "BAAI/bge-small-en-v1.5",
|
||||
dimensions = 384,
|
||||
},
|
||||
|
||||
query = {
|
||||
similarity_threshold = 0.3, # Permissive for test coverage
|
||||
max_results = 50, # More results for verification
|
||||
cross_graph = false, # Isolated tests
|
||||
},
|
||||
|
||||
sync = {
|
||||
auto_index = false, # Manual control in tests
|
||||
debounce_ms = 0, # No debounce for deterministic tests
|
||||
},
|
||||
|
||||
mcp = {
|
||||
server = {
|
||||
transport = 'stdio,
|
||||
},
|
||||
tools = {
|
||||
search = true,
|
||||
add_note = true,
|
||||
add_decision = true,
|
||||
link = true,
|
||||
get_guidelines = true,
|
||||
export = true,
|
||||
},
|
||||
resources = {
|
||||
expose_project = false, # Isolated tests
|
||||
expose_shared = false,
|
||||
},
|
||||
},
|
||||
}
|
||||
139
schemas/surrealdb/blocks.surql
Normal file
139
schemas/surrealdb/blocks.surql
Normal file
@ -0,0 +1,139 @@
|
||||
-- SurrealDB Schema for Logseq Blocks Support
|
||||
-- This schema extends the KOGRAL system to support block-level storage and querying
|
||||
|
||||
-- Define namespace and database
|
||||
-- USE NS kogral;
|
||||
-- USE DB default;
|
||||
|
||||
-- ============================================================================
|
||||
-- BLOCK TABLE
|
||||
-- ============================================================================
|
||||
|
||||
DEFINE TABLE block SCHEMAFULL;
|
||||
|
||||
-- Fields
|
||||
DEFINE FIELD node_id ON block TYPE record(node) ASSERT $value != NONE;
|
||||
DEFINE FIELD block_id ON block TYPE string ASSERT $value != NONE;
|
||||
DEFINE FIELD content ON block TYPE string;
|
||||
DEFINE FIELD parent_id ON block TYPE option<string>;
|
||||
DEFINE FIELD created ON block TYPE datetime;
|
||||
DEFINE FIELD modified ON block TYPE datetime;
|
||||
|
||||
-- Properties (flexible object for block metadata)
|
||||
DEFINE FIELD properties ON block TYPE object;
|
||||
DEFINE FIELD properties.tags ON block TYPE array;
|
||||
DEFINE FIELD properties.status ON block TYPE option<string>;
|
||||
DEFINE FIELD properties.custom ON block TYPE option<object>;
|
||||
DEFINE FIELD properties.block_refs ON block TYPE array;
|
||||
DEFINE FIELD properties.page_refs ON block TYPE array;
|
||||
|
||||
-- ============================================================================
|
||||
-- INDEXES
|
||||
-- ============================================================================
|
||||
|
||||
-- Index on node_id for fast lookup of all blocks in a node
|
||||
DEFINE INDEX block_node_idx ON block COLUMNS node_id;
|
||||
|
||||
-- Index on block_id for direct block lookup
|
||||
DEFINE INDEX block_id_idx ON block COLUMNS block_id UNIQUE;
|
||||
|
||||
-- Index on tags for tag-based queries (find all #card blocks)
|
||||
DEFINE INDEX block_tags_idx ON block COLUMNS properties.tags;
|
||||
|
||||
-- Index on status for TODO/DONE queries
|
||||
DEFINE INDEX block_status_idx ON block COLUMNS properties.status;
|
||||
|
||||
-- Index on parent_id for hierarchical queries
|
||||
DEFINE INDEX block_parent_idx ON block COLUMNS parent_id;
|
||||
|
||||
-- Full-text search index on content
|
||||
DEFINE INDEX block_content_search ON block COLUMNS content SEARCH ANALYZER simple BM25;
|
||||
|
||||
-- ============================================================================
|
||||
-- QUERIES (Examples)
|
||||
-- ============================================================================
|
||||
|
||||
-- Find all blocks with a specific tag
|
||||
-- SELECT * FROM block WHERE $tag IN properties.tags;
|
||||
|
||||
-- Find all TODO blocks
|
||||
-- SELECT * FROM block WHERE properties.status = "TODO";
|
||||
|
||||
-- Find all blocks in a specific node
|
||||
-- SELECT * FROM block WHERE node_id = $node_record_id;
|
||||
|
||||
-- Find blocks with custom property
|
||||
-- SELECT * FROM block WHERE properties.custom[$key] = $value;
|
||||
|
||||
-- Full-text search in block content
|
||||
-- SELECT * FROM block WHERE content @@ $search_term;
|
||||
|
||||
-- Get block hierarchy (parent and all children)
|
||||
-- SELECT *,
|
||||
-- (SELECT * FROM block WHERE parent_id = $parent.block_id) AS children
|
||||
-- FROM block WHERE block_id = $parent_block_id;
|
||||
|
||||
-- ============================================================================
|
||||
-- RELATIONSHIPS
|
||||
-- ============================================================================
|
||||
|
||||
-- Block belongs to a Node
|
||||
DEFINE FIELD node_id ON block TYPE record(node);
|
||||
|
||||
-- Example: Get all blocks for a node with their content
|
||||
-- SELECT * FROM block WHERE node_id = $node_id ORDER BY created ASC;
|
||||
|
||||
-- Example: Count blocks by tag across all nodes
|
||||
-- SELECT
|
||||
-- array::flatten(properties.tags) AS tag,
|
||||
-- count() AS count
|
||||
-- FROM block
|
||||
-- GROUP BY tag
|
||||
-- ORDER BY count DESC;
|
||||
|
||||
-- Example: Get all flashcards (#card) across the knowledge base
|
||||
-- SELECT
|
||||
-- node_id.title AS node_title,
|
||||
-- content,
|
||||
-- properties
|
||||
-- FROM block
|
||||
-- WHERE "card" IN properties.tags;
|
||||
|
||||
-- ============================================================================
|
||||
-- MIGRATION QUERIES
|
||||
-- ============================================================================
|
||||
|
||||
-- Populate blocks table from existing nodes (run once)
|
||||
-- This would be executed by the sync mechanism or migration script
|
||||
|
||||
-- Example migration pseudocode:
|
||||
-- FOR node IN (SELECT * FROM node) {
|
||||
-- LET parsed_blocks = parse_markdown(node.content);
|
||||
-- FOR block IN parsed_blocks {
|
||||
-- CREATE block CONTENT {
|
||||
-- node_id: node.id,
|
||||
-- block_id: block.id,
|
||||
-- content: block.content,
|
||||
-- parent_id: block.parent_id,
|
||||
-- created: block.created,
|
||||
-- modified: block.modified,
|
||||
-- properties: {
|
||||
-- tags: block.properties.tags,
|
||||
-- status: block.properties.status,
|
||||
-- custom: block.properties.custom,
|
||||
-- block_refs: block.properties.block_refs,
|
||||
-- page_refs: block.properties.page_refs
|
||||
-- }
|
||||
-- };
|
||||
-- }
|
||||
-- }
|
||||
|
||||
-- ============================================================================
|
||||
-- CLEANUP QUERIES
|
||||
-- ============================================================================
|
||||
|
||||
-- Delete all blocks for a specific node
|
||||
-- DELETE block WHERE node_id = $node_record_id;
|
||||
|
||||
-- Delete orphaned blocks (node no longer exists)
|
||||
-- DELETE block WHERE node_id NOT IN (SELECT id FROM node);
|
||||
47
schemas/types.ncl
Normal file
47
schemas/types.ncl
Normal file
@ -0,0 +1,47 @@
|
||||
# Shared types for Knowledge Base configuration
|
||||
#
|
||||
# This file defines common types used across KB configuration schemas.
|
||||
|
||||
{
|
||||
# Node types for knowledge base entries
|
||||
NodeType = [| 'note, 'decision, 'guideline, 'pattern, 'journal, 'execution |],
|
||||
|
||||
# Node status values
|
||||
NodeStatus = [| 'draft, 'active, 'superseded, 'archived |],
|
||||
|
||||
# Relationship edge types
|
||||
EdgeType = [| 'relates_to, 'depends_on, 'implements, 'extends, 'supersedes, 'explains |],
|
||||
|
||||
# Storage backend types
|
||||
StorageType = [| 'filesystem, 'memory |],
|
||||
|
||||
# Secondary storage backend types
|
||||
SecondaryStorageType = [| 'surrealdb, 'sqlite |],
|
||||
|
||||
# Embedding provider types
|
||||
EmbeddingProvider = [| 'openai, 'claude, 'ollama, 'fastembed |],
|
||||
|
||||
# MCP transport types
|
||||
McpTransport = [| 'stdio, 'sse |],
|
||||
|
||||
# ISO 8601 timestamp string
|
||||
Timestamp = String,
|
||||
|
||||
# Semantic version string (e.g., "1.0.0")
|
||||
Version = String,
|
||||
|
||||
# File path string
|
||||
Path = String,
|
||||
|
||||
# URL string
|
||||
Url = String,
|
||||
|
||||
# Positive integer
|
||||
PositiveInt = Number,
|
||||
|
||||
# Float between 0.0 and 1.0
|
||||
UnitFloat = Number,
|
||||
|
||||
# Email address
|
||||
Email = String,
|
||||
}
|
||||
164
scripts/kogral-backup.nu
Normal file
164
scripts/kogral-backup.nu
Normal file
@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env nu
|
||||
# Backup KOGRAL graphs to archive
|
||||
#
|
||||
# Usage: nu kogral-backup.nu [--output <path>] [--format <tar|zip>] [--compress]
|
||||
|
||||
def main [
|
||||
--output: string = "kogral-backup" # Output filename (without extension)
|
||||
--format: string = "tar" # Archive format: tar or zip
|
||||
--compress # Compress the archive
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
--include-metadata # Include .git and other metadata
|
||||
] {
|
||||
print $"(ansi green_bold)KOGRAL Backup(ansi reset)"
|
||||
print $"KOGRAL Directory: ($kogral-dir)"
|
||||
print $"Format: ($format)"
|
||||
print $"Compress: ($compress)"
|
||||
|
||||
# Check if .kogral directory exists
|
||||
if not ($kogral-dir | path exists) {
|
||||
print $"(ansi red)Error: KOGRAL directory not found: ($kogral-dir)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Generate timestamp
|
||||
let timestamp = date now | format date "%Y%m%d_%H%M%S"
|
||||
let backup_name = $"($output)_($timestamp)"
|
||||
|
||||
# Determine file extension
|
||||
let extension = if $compress {
|
||||
if $format == "tar" { ".tar.gz" } else { ".zip" }
|
||||
} else {
|
||||
if $format == "tar" { ".tar" } else { ".zip" }
|
||||
}
|
||||
|
||||
let backup_file = $"($backup_name)($extension)"
|
||||
|
||||
print $"\n(ansi cyan_bold)Preparing backup...(ansi reset)"
|
||||
print $"Output file: ($backup_file)"
|
||||
|
||||
# Count files
|
||||
let stats = get_kogral_stats $kogral_dir
|
||||
|
||||
print $"\n(ansi cyan_bold)Files to backup:(ansi reset)"
|
||||
print $" Notes: ($stats.notes)"
|
||||
print $" Decisions: ($stats.decisions)"
|
||||
print $" Guidelines: ($stats.guidelines)"
|
||||
print $" Patterns: ($stats.patterns)"
|
||||
print $" Journal: ($stats.journal)"
|
||||
print $" Config: ($stats.config)"
|
||||
print $" Total: ($stats.total)"
|
||||
|
||||
# Create backup
|
||||
print $"\n(ansi cyan_bold)Creating backup...(ansi reset)"
|
||||
|
||||
match $format {
|
||||
"tar" => {
|
||||
create_tar_backup $kogral_dir $backup_file $compress $include_metadata
|
||||
},
|
||||
"zip" => {
|
||||
create_zip_backup $kogral_dir $backup_file $include_metadata
|
||||
},
|
||||
_ => {
|
||||
print $"(ansi red)Error: Invalid format. Use: tar or zip(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Verify backup
|
||||
if ($backup_file | path exists) {
|
||||
let size = ls $backup_file | get size | first
|
||||
print $"\n(ansi green_bold)✓ Backup created successfully(ansi reset)"
|
||||
print $"File: ($backup_file)"
|
||||
print $"Size: ($size)"
|
||||
} else {
|
||||
print $"\n(ansi red)✗ Backup creation failed(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Generate manifest
|
||||
print $"\n(ansi cyan_bold)Generating manifest...(ansi reset)"
|
||||
let manifest = generate_manifest $kogral_dir $backup_file $stats $timestamp
|
||||
let manifest_file = $"($backup_name).manifest.json"
|
||||
$manifest | to json | save $manifest_file
|
||||
print $"Manifest saved: ($manifest_file)"
|
||||
|
||||
print $"\n(ansi green_bold)✓ Backup completed(ansi reset)"
|
||||
}
|
||||
|
||||
def get_kogral_stats [kogral_dir: string] -> record {
|
||||
let notes = (glob $"($kogral_dir)/notes/**/*.md" | length)
|
||||
let decisions = (glob $"($kogral_dir)/decisions/**/*.md" | length)
|
||||
let guidelines = (glob $"($kogral_dir)/guidelines/**/*.md" | length)
|
||||
let patterns = (glob $"($kogral_dir)/patterns/**/*.md" | length)
|
||||
let journal = (glob $"($kogral_dir)/journal/**/*.md" | length)
|
||||
let config = if ($"($kogral_dir)/config.toml" | path exists) { 1 } else { 0 }
|
||||
|
||||
{
|
||||
notes: $notes,
|
||||
decisions: $decisions,
|
||||
guidelines: $guidelines,
|
||||
patterns: $patterns,
|
||||
journal: $journal,
|
||||
config: $config,
|
||||
total: ($notes + $decisions + $guidelines + $patterns + $journal + $config)
|
||||
}
|
||||
}
|
||||
|
||||
def create_tar_backup [kogral_dir: string, output: string, compress: bool, include_metadata: bool] {
|
||||
let compress_flag = if $compress { "z" } else { "" }
|
||||
let exclude_flags = if not $include_metadata {
|
||||
["--exclude=.git", "--exclude=.DS_Store"]
|
||||
} else {
|
||||
[]
|
||||
}
|
||||
|
||||
print $" Creating tar archive..."
|
||||
|
||||
# Use tar command
|
||||
let tar_cmd = if $compress {
|
||||
$"tar -c($compress_flag)f ($output) ($exclude_flags | str join ' ') ($kogral_dir)"
|
||||
} else {
|
||||
$"tar -cf ($output) ($exclude_flags | str join ' ') ($kogral_dir)"
|
||||
}
|
||||
|
||||
try {
|
||||
bash -c $tar_cmd
|
||||
print $" (ansi green)✓ Tar archive created(ansi reset)"
|
||||
} catch {
|
||||
print $" (ansi red)✗ Tar archive creation failed(ansi reset)"
|
||||
}
|
||||
}
|
||||
|
||||
def create_zip_backup [kogral_dir: string, output: string, include_metadata: bool] {
|
||||
print $" Creating zip archive..."
|
||||
|
||||
let exclude_pattern = if not $include_metadata { "*.git* */.DS_Store" } else { "" }
|
||||
|
||||
try {
|
||||
bash -c $"zip -r ($output) ($kogral_dir) -x ($exclude_pattern)"
|
||||
print $" (ansi green)✓ Zip archive created(ansi reset)"
|
||||
} catch {
|
||||
print $" (ansi red)✗ Zip archive creation failed(ansi reset)"
|
||||
}
|
||||
}
|
||||
|
||||
def generate_manifest [kogral_dir: string, backup_file: string, stats: record, timestamp: string] -> record {
|
||||
let config_path = $"($kogral_dir)/config.toml"
|
||||
let config = if ($config_path | path exists) {
|
||||
open $config_path | from toml
|
||||
} else {
|
||||
{ graph: { name: "unknown", version: "unknown" } }
|
||||
}
|
||||
|
||||
{
|
||||
backup_timestamp: $timestamp,
|
||||
backup_file: $backup_file,
|
||||
kogral_directory: $kogral_dir,
|
||||
graph_name: $config.graph.name,
|
||||
graph_version: $config.graph.version,
|
||||
statistics: $stats,
|
||||
created_by: "kogral-backup.nu",
|
||||
version: "1.0.0"
|
||||
}
|
||||
}
|
||||
337
scripts/kogral-export-logseq.nu
Normal file
337
scripts/kogral-export-logseq.nu
Normal file
@ -0,0 +1,337 @@
|
||||
#!/usr/bin/env nu
|
||||
# Export KOGRAL to Logseq format
|
||||
#
|
||||
# Usage: nu kogral-export-logseq.nu <output-path> [--kogral-dir <path>] [--dry-run]
|
||||
|
||||
def main [
|
||||
output_path: string # Path for Logseq graph output
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
--dry-run # Show what would be exported without making changes
|
||||
--skip-journals # Skip exporting journal entries
|
||||
] {
|
||||
print $"(ansi green_bold)Logseq Export(ansi reset)"
|
||||
print $"Source: ($kogral_dir)"
|
||||
print $"Target: ($output_path)"
|
||||
|
||||
if $dry_run {
|
||||
print $"(ansi yellow)DRY RUN MODE - No changes will be made(ansi reset)"
|
||||
}
|
||||
|
||||
# Check if .kogral directory exists
|
||||
if not ($kogral_dir | path exists) {
|
||||
print $"(ansi red)Error: KOGRAL directory not found: ($kogral_dir)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Count files to export
|
||||
let stats = get_export_stats $kogral_dir $skip_journals
|
||||
|
||||
print $"\n(ansi cyan_bold)Files to export:(ansi reset)"
|
||||
print $" Notes: ($stats.notes)"
|
||||
print $" Decisions: ($stats.decisions)"
|
||||
print $" Guidelines: ($stats.guidelines)"
|
||||
print $" Patterns: ($stats.patterns)"
|
||||
print $" Journals: ($stats.journals)"
|
||||
print $" Total: ($stats.total)"
|
||||
|
||||
if $stats.total == 0 {
|
||||
print $"\n(ansi yellow)No files to export(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if $dry_run {
|
||||
print $"\n(ansi yellow)[DRY RUN] Would export ($stats.total) files(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Create Logseq directory structure
|
||||
print $"\n(ansi cyan_bold)Creating Logseq directory structure...(ansi reset)"
|
||||
create_logseq_structure $output_path
|
||||
|
||||
# Export files
|
||||
print $"\n(ansi cyan_bold)Exporting files...(ansi reset)"
|
||||
|
||||
export_nodes $"($kogral_dir)/notes" $"($output_path)/pages" "note"
|
||||
export_nodes $"($kogral_dir)/decisions" $"($output_path)/pages" "decision"
|
||||
export_nodes $"($kogral_dir)/guidelines" $"($output_path)/pages" "guideline"
|
||||
export_nodes $"($kogral_dir)/patterns" $"($output_path)/pages" "pattern"
|
||||
|
||||
if not $skip_journals {
|
||||
export_journals $"($kogral_dir)/journal" $"($output_path)/journals"
|
||||
}
|
||||
|
||||
# Create Logseq config
|
||||
print $"\n(ansi cyan_bold)Creating Logseq configuration...(ansi reset)"
|
||||
create_logseq_config $output_path
|
||||
|
||||
print $"\n(ansi green_bold)✓ Export completed(ansi reset)"
|
||||
print $"Exported ($stats.total) files to ($output_path)"
|
||||
}
|
||||
|
||||
def get_export_stats [kogral_dir: string, skip_journals: bool] {
|
||||
let notes = if ($"($kogral_dir)/notes" | path exists) {
|
||||
glob $"($kogral_dir)/notes/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
let decisions = if ($"($kogral_dir)/decisions" | path exists) {
|
||||
glob $"($kogral_dir)/decisions/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
let guidelines = if ($"($kogral_dir)/guidelines" | path exists) {
|
||||
glob $"($kogral_dir)/guidelines/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
let patterns = if ($"($kogral_dir)/patterns" | path exists) {
|
||||
glob $"($kogral_dir)/patterns/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
let journals = if not $skip_journals and ($"($kogral_dir)/journal" | path exists) {
|
||||
glob $"($kogral_dir)/journal/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
{
|
||||
notes: $notes,
|
||||
decisions: $decisions,
|
||||
guidelines: $guidelines,
|
||||
patterns: $patterns,
|
||||
journals: $journals,
|
||||
total: ($notes + $decisions + $guidelines + $patterns + $journals)
|
||||
}
|
||||
}
|
||||
|
||||
def create_logseq_structure [output_path: string] {
|
||||
mkdir $output_path
|
||||
mkdir $"($output_path)/pages"
|
||||
mkdir $"($output_path)/journals"
|
||||
mkdir $"($output_path)/assets"
|
||||
mkdir $"($output_path)/logseq"
|
||||
|
||||
print $" (ansi green)✓ Directory structure created(ansi reset)"
|
||||
}
|
||||
|
||||
def export_nodes [source_dir: string, target_dir: string, node_type: string] {
|
||||
if not ($source_dir | path exists) {
|
||||
return
|
||||
}
|
||||
|
||||
let files = glob $"($source_dir)/**/*.md"
|
||||
if ($files | length) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
print $"\n Exporting ($node_type)s..."
|
||||
|
||||
mut exported = 0
|
||||
let total = $files | length
|
||||
|
||||
for file in $files {
|
||||
let filename = $file | path basename
|
||||
|
||||
# Phase 1: Read KOGRAL markdown file
|
||||
let content = open $file
|
||||
|
||||
# Phase 2: Convert to Logseq format
|
||||
let logseq_content = convert_kogral_to_logseq $content $node_type
|
||||
|
||||
# Phase 3: Save to Logseq pages directory
|
||||
$logseq_content | save $"($target_dir)/($filename)"
|
||||
|
||||
$exported = $exported + 1
|
||||
}
|
||||
|
||||
print $" (ansi green)✓ Exported ($exported)/($total) ($node_type)s(ansi reset)"
|
||||
}
|
||||
|
||||
def export_journals [source_dir: string, target_dir: string] {
|
||||
if not ($source_dir | path exists) {
|
||||
return
|
||||
}
|
||||
|
||||
let files = glob $"($source_dir)/**/*.md"
|
||||
if ($files | length) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
print $"\n Exporting journals..."
|
||||
|
||||
mut exported = 0
|
||||
let total = $files | length
|
||||
|
||||
for file in $files {
|
||||
let filename = $file | path basename
|
||||
|
||||
# Phase 1: Read KOGRAL journal file
|
||||
let content = open $file
|
||||
|
||||
# Phase 2: Convert to Logseq format
|
||||
let logseq_content = convert_kogral_to_logseq $content "journal"
|
||||
|
||||
# Phase 3: Save to Logseq journals directory
|
||||
$logseq_content | save $"($target_dir)/($filename)"
|
||||
|
||||
$exported = $exported + 1
|
||||
}
|
||||
|
||||
print $" (ansi green)✓ Exported ($exported)/($total) journals(ansi reset)"
|
||||
}
|
||||
|
||||
def convert_kogral_to_logseq [content: string, node_type: string] {
|
||||
let lines = $content | lines
|
||||
|
||||
# Phase 1: Check for and parse YAML frontmatter
|
||||
let has_frontmatter = ($lines | get 0 | str trim) == "---"
|
||||
|
||||
if not $has_frontmatter {
|
||||
# No frontmatter, return content as-is with minimal properties
|
||||
return $"type:: ($node_type)\n\n$content"
|
||||
}
|
||||
|
||||
# Phase 2: Extract frontmatter and body
|
||||
let frontmatter_end = get_frontmatter_end_index $lines
|
||||
let frontmatter_lines = $lines | take $frontmatter_end
|
||||
let body_lines = $lines | skip $frontmatter_end
|
||||
|
||||
# Phase 3: Parse YAML fields
|
||||
let fm = parse_yaml_frontmatter $frontmatter_lines
|
||||
|
||||
# Phase 4: Convert to Logseq properties format
|
||||
mut logseq_props = ""
|
||||
|
||||
# Add type property
|
||||
$logseq_props = $logseq_props + $"type:: ($node_type)\n"
|
||||
|
||||
# Add title if present
|
||||
if not ($fm.title? | is-empty) {
|
||||
$logseq_props = $logseq_props + $"title:: ($fm.title?)\n"
|
||||
}
|
||||
|
||||
# Add created date if present
|
||||
if not ($fm.created? | is-empty) {
|
||||
let created_date = convert_date_to_logseq $fm.created?
|
||||
$logseq_props = $logseq_props + $"created:: [[$created_date]]\n"
|
||||
}
|
||||
|
||||
# Add tags if present
|
||||
if not ($fm.tags? | is-empty) {
|
||||
$logseq_props = $logseq_props + "tags:: "
|
||||
let tags_list = $fm.tags? | str replace '\[' '' | str replace '\]' '' | split row ','
|
||||
for tag in $tags_list {
|
||||
let trimmed = $tag | str trim | str replace '"' ''
|
||||
$logseq_props = $logseq_props + $"[[($trimmed)]] "
|
||||
}
|
||||
$logseq_props = $logseq_props + "\n"
|
||||
}
|
||||
|
||||
# Add status if present
|
||||
if not ($fm.status? | is-empty) {
|
||||
$logseq_props = $logseq_props + $"status:: ($fm.status?)\n"
|
||||
}
|
||||
|
||||
# Add relationships if present
|
||||
if not ($fm.relates_to? | is-empty) {
|
||||
$logseq_props = $logseq_props + "relates-to:: "
|
||||
let refs = parse_yaml_list $fm.relates_to?
|
||||
let refs_formatted = $refs | each { |r| $'[[($r)]]' }
|
||||
$logseq_props = $logseq_props + ($refs_formatted | str join ", ") + "\n"
|
||||
}
|
||||
|
||||
if not ($fm.depends_on? | is-empty) {
|
||||
$logseq_props = $logseq_props + "depends-on:: "
|
||||
let refs = parse_yaml_list $fm.depends_on?
|
||||
let refs_formatted = $refs | each { |r| $'[[($r)]]' }
|
||||
$logseq_props = $logseq_props + ($refs_formatted | str join ", ") + "\n"
|
||||
}
|
||||
|
||||
# Phase 5: Build final output
|
||||
let body = $body_lines | str join "\n"
|
||||
$"$logseq_props\n$body"
|
||||
}
|
||||
|
||||
def get_frontmatter_end_index [lines: list] {
|
||||
mut idx = 1 # Skip first "---"
|
||||
|
||||
for line in ($lines | skip 1) {
|
||||
if ($line | str trim) == "---" {
|
||||
return ($idx + 1)
|
||||
}
|
||||
$idx = $idx + 1
|
||||
}
|
||||
|
||||
$idx
|
||||
}
|
||||
|
||||
def parse_yaml_frontmatter [lines: list] {
|
||||
mut fm = {}
|
||||
|
||||
for line in $lines {
|
||||
if ($line | str trim) == "---" {
|
||||
continue
|
||||
}
|
||||
|
||||
# Match YAML key: value format
|
||||
if ($line =~ '^[\w]+:') {
|
||||
let key = $line | str replace '^(\w+):.*' '$1'
|
||||
let value = $line | str replace '^[\w]+:\s*' '' | str trim
|
||||
$fm = ($fm | insert $key $value)
|
||||
}
|
||||
}
|
||||
|
||||
$fm
|
||||
}
|
||||
|
||||
def convert_date_to_logseq [date_str: string] {
|
||||
# Convert ISO 8601 (2026-01-17T10:30:00Z) to Logseq format (Jan 17th, 2026)
|
||||
# For simplicity, extract date part and format
|
||||
let date_part = $date_str | str substring 0..10
|
||||
let year = $date_part | str substring 0..4
|
||||
let month = $date_part | str substring 5..7
|
||||
let day = $date_part | str substring 8..10 | str replace '^0+' ''
|
||||
|
||||
let month_name = match $month {
|
||||
"01" => "Jan",
|
||||
"02" => "Feb",
|
||||
"03" => "Mar",
|
||||
"04" => "Apr",
|
||||
"05" => "May",
|
||||
"06" => "Jun",
|
||||
"07" => "Jul",
|
||||
"08" => "Aug",
|
||||
"09" => "Sep",
|
||||
"10" => "Oct",
|
||||
"11" => "Nov",
|
||||
"12" => "Dec",
|
||||
_ => "Unknown"
|
||||
}
|
||||
|
||||
let day_suffix = match ($day | into int) {
|
||||
1 | 21 | 31 => "st",
|
||||
2 | 22 => "nd",
|
||||
3 | 23 => "rd",
|
||||
_ => "th"
|
||||
}
|
||||
|
||||
$"($month_name) ($day)($day_suffix), ($year)"
|
||||
}
|
||||
|
||||
def parse_yaml_list [yaml_str: string] {
|
||||
# Parse YAML list format: [item1, item2] or list format with dashes
|
||||
# For now, handle bracket format
|
||||
let cleaned = $yaml_str | str replace '\[' '' | str replace '\]' ''
|
||||
let items = $cleaned | split row ',' | map { |i| $i | str trim | str replace '"' '' }
|
||||
$items
|
||||
}
|
||||
|
||||
def create_logseq_config [output_path: string] {
|
||||
let config = {
|
||||
"preferred-format": "markdown",
|
||||
"preferred-workflow": ":now",
|
||||
"hidden": [".git"],
|
||||
"journal/page-title-format": "yyyy-MM-dd",
|
||||
"start-of-week": 1,
|
||||
"feature/enable-block-timestamps": false,
|
||||
"feature/enable-search-remove-accents": true
|
||||
}
|
||||
|
||||
$config | to json | save $"($output_path)/logseq/config.edn"
|
||||
print $" (ansi green)✓ Logseq configuration created(ansi reset)"
|
||||
}
|
||||
388
scripts/kogral-import-logseq.nu
Normal file
388
scripts/kogral-import-logseq.nu
Normal file
@ -0,0 +1,388 @@
|
||||
#!/usr/bin/env nu
|
||||
# Import from Logseq graph to KOGRAL
|
||||
#
|
||||
# Usage: nu kogral-import-logseq.nu <logseq-path> [--kogral-dir <path>] [--dry-run]
|
||||
|
||||
def main [
|
||||
logseq_path: string # Path to Logseq graph directory
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
--dry-run # Show what would be imported without making changes
|
||||
--skip-journals # Skip importing journal entries
|
||||
--skip-pages # Skip importing pages
|
||||
] {
|
||||
print $"(ansi green_bold)Logseq Import(ansi reset)"
|
||||
print $"Source: ($logseq_path)"
|
||||
print $"Target: ($kogral_dir)"
|
||||
|
||||
if $dry_run {
|
||||
print $"(ansi yellow)DRY RUN MODE - No changes will be made(ansi reset)"
|
||||
}
|
||||
|
||||
# Validate Logseq directory
|
||||
if not ($logseq_path | path exists) {
|
||||
print $"(ansi red)Error: Logseq path not found: ($logseq_path)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
let pages_dir = $"($logseq_path)/pages"
|
||||
let journals_dir = $"($logseq_path)/journals"
|
||||
|
||||
if not ($pages_dir | path exists) and not ($journals_dir | path exists) {
|
||||
print $"(ansi red)Error: Not a valid Logseq graph (missing pages or journals directory)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Validate KOGRAL directory
|
||||
if not ($kogral_dir | path exists) {
|
||||
print $"(ansi yellow)KOGRAL directory doesn't exist. Creating...(ansi reset)"
|
||||
if not $dry_run {
|
||||
mkdir $kogral_dir
|
||||
mkdir $"($kogral_dir)/notes"
|
||||
mkdir $"($kogral_dir)/journal"
|
||||
}
|
||||
}
|
||||
|
||||
# Count files to import
|
||||
let pages_count = if ($pages_dir | path exists) and not $skip_pages {
|
||||
glob $"($pages_dir)/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
let journals_count = if ($journals_dir | path exists) and not $skip_journals {
|
||||
glob $"($journals_dir)/**/*.md" | length
|
||||
} else { 0 }
|
||||
|
||||
print $"\n(ansi cyan_bold)Files to import:(ansi reset)"
|
||||
print $" Pages: ($pages_count)"
|
||||
print $" Journals: ($journals_count)"
|
||||
print $" Total: ($pages_count + $journals_count)"
|
||||
|
||||
if ($pages_count + $journals_count) == 0 {
|
||||
print $"\n(ansi yellow)No files to import(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if $dry_run {
|
||||
print $"\n(ansi yellow)[DRY RUN] Would import ($pages_count + $journals_count) files(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Import pages
|
||||
if not $skip_pages and $pages_count > 0 {
|
||||
print $"\n(ansi cyan_bold)Importing pages...(ansi reset)"
|
||||
import_pages $pages_dir $kogral_dir
|
||||
}
|
||||
|
||||
# Import journals
|
||||
if not $skip_journals and $journals_count > 0 {
|
||||
print $"\n(ansi cyan_bold)Importing journals...(ansi reset)"
|
||||
import_journals $journals_dir $kogral_dir
|
||||
}
|
||||
|
||||
print $"\n(ansi green_bold)✓ Import completed(ansi reset)"
|
||||
print $"Imported ($pages_count + $journals_count) files"
|
||||
}
|
||||
|
||||
def import_pages [pages_dir: string, kogral_dir: string] {
|
||||
let files = glob $"($pages_dir)/**/*.md"
|
||||
let total = $files | length
|
||||
|
||||
mut imported = 0
|
||||
mut decisions = 0
|
||||
mut guidelines = 0
|
||||
mut patterns = 0
|
||||
mut notes = 0
|
||||
|
||||
for file in $files {
|
||||
let filename = $file | path basename
|
||||
print $" Importing ($filename)..."
|
||||
|
||||
# Phase 1: Read and parse Logseq format
|
||||
let content = open $file
|
||||
|
||||
# Phase 2: Detect node type from properties/content
|
||||
let node_type = detect_node_type $content
|
||||
|
||||
# Phase 3: Convert to KOGRAL format
|
||||
let kogral_content = convert_logseq_to_kogral $content $node_type
|
||||
|
||||
# Phase 4: Determine target directory
|
||||
let target_dir = match $node_type {
|
||||
"decision" => {
|
||||
$decisions = $decisions + 1
|
||||
$"($kogral_dir)/decisions"
|
||||
},
|
||||
"guideline" => {
|
||||
$guidelines = $guidelines + 1
|
||||
$"($kogral_dir)/guidelines"
|
||||
},
|
||||
"pattern" => {
|
||||
$patterns = $patterns + 1
|
||||
$"($kogral_dir)/patterns"
|
||||
},
|
||||
_ => {
|
||||
$notes = $notes + 1
|
||||
$"($kogral_dir)/notes"
|
||||
}
|
||||
}
|
||||
|
||||
# Phase 5: Save to KOGRAL
|
||||
mkdir $target_dir
|
||||
$kogral_content | save $"($target_dir)/($filename)"
|
||||
|
||||
$imported = $imported + 1
|
||||
print $" (ansi green)✓ Imported as ($node_type)(ansi reset)"
|
||||
}
|
||||
|
||||
print $"\n(ansi green)Pages summary:(ansi reset)"
|
||||
print $" Notes: ($notes)"
|
||||
print $" Decisions: ($decisions)"
|
||||
print $" Guidelines: ($guidelines)"
|
||||
print $" Patterns: ($patterns)"
|
||||
print $" Total: ($imported)/($total) imported"
|
||||
}
|
||||
|
||||
def import_journals [journals_dir: string, kogral_dir: string] {
|
||||
let files = glob $"($journals_dir)/**/*.md"
|
||||
let total = $files | length
|
||||
|
||||
mkdir $"($kogral_dir)/journal"
|
||||
|
||||
mut imported = 0
|
||||
|
||||
for file in $files {
|
||||
let filename = $file | path basename
|
||||
print $" Importing ($filename)..."
|
||||
|
||||
# Phase 1: Read Logseq journal format
|
||||
let content = open $file
|
||||
|
||||
# Phase 2: Convert to KOGRAL journal format
|
||||
let kogral_content = convert_logseq_to_kogral $content "journal"
|
||||
|
||||
# Phase 3: Save to journal directory
|
||||
$kogral_content | save $"($kogral_dir)/journal/($filename)"
|
||||
|
||||
$imported = $imported + 1
|
||||
print $" (ansi green)✓ Imported(ansi reset)"
|
||||
}
|
||||
|
||||
print $"\n(ansi green)Journals imported: ($imported)/($total)(ansi reset)"
|
||||
}
|
||||
|
||||
def detect_node_type [content: string] {
|
||||
# Check for type hints in properties or content
|
||||
if ($content | str contains "type:: decision") or ($content | str contains "# Decision") {
|
||||
"decision"
|
||||
} else if ($content | str contains "type:: guideline") or ($content | str contains "# Guideline") {
|
||||
"guideline"
|
||||
} else if ($content | str contains "type:: pattern") or ($content | str contains "# Pattern") {
|
||||
"pattern"
|
||||
} else {
|
||||
"note"
|
||||
}
|
||||
}
|
||||
|
||||
def convert_logseq_to_kogral [content: string, node_type: string] {
|
||||
let lines = $content | lines
|
||||
|
||||
# Phase 1: Parse Logseq properties (key:: value format)
|
||||
let props = parse_logseq_properties $lines
|
||||
let body_start = get_body_start_index $lines
|
||||
|
||||
# Phase 2: Extract metadata from properties
|
||||
let title = $props.title? | default (extract_title_from_lines $lines)
|
||||
let created = $props.created? | default (date now | format date "%Y-%m-%dT%H:%M:%SZ")
|
||||
let modified = $props.modified? | default (date now | format date "%Y-%m-%dT%H:%M:%SZ")
|
||||
let status = match ($node_type) {
|
||||
"journal" => "draft",
|
||||
_ => "active"
|
||||
}
|
||||
|
||||
# Phase 3: Extract tags and relationships from properties
|
||||
let tags = parse_tags_from_properties $props
|
||||
let relates_to = parse_references_from_property ($props.relates_to? | default "")
|
||||
let depends_on = parse_references_from_property ($props.depends_on? | default "")
|
||||
let implements = parse_references_from_property ($props.implements? | default "")
|
||||
let extends = parse_references_from_property ($props.extends? | default "")
|
||||
|
||||
# Phase 4: Build YAML frontmatter
|
||||
let frontmatter = build_yaml_frontmatter {
|
||||
type: $node_type,
|
||||
title: $title,
|
||||
created: $created,
|
||||
modified: $modified,
|
||||
status: $status,
|
||||
tags: $tags,
|
||||
relates_to: $relates_to,
|
||||
depends_on: $depends_on,
|
||||
implements: $implements,
|
||||
extends: $extends
|
||||
}
|
||||
|
||||
# Phase 5: Extract body and preserve wikilinks
|
||||
let body = if $body_start < ($lines | length) {
|
||||
$lines | skip $body_start | str join "\n"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
|
||||
# Phase 6: Convert Logseq-specific syntax
|
||||
let converted_body = convert_logseq_syntax $body
|
||||
|
||||
$"$frontmatter\n$converted_body"
|
||||
}
|
||||
|
||||
def parse_logseq_properties [lines: list] {
|
||||
mut props = {}
|
||||
|
||||
# Parse properties until blank line or content starts
|
||||
for line in $lines {
|
||||
if ($line | str trim | is-empty) {
|
||||
break
|
||||
}
|
||||
|
||||
# Match pattern: key:: value
|
||||
if ($line =~ '^[\w-]+::') {
|
||||
let key = $line | str replace '^(\w[\w-]*)::.*' '$1'
|
||||
let value = $line | str replace '^[\w-]+::\s*' '' | str trim
|
||||
$props = ($props | insert $key $value)
|
||||
}
|
||||
}
|
||||
|
||||
$props
|
||||
}
|
||||
|
||||
def get_body_start_index [lines: list] {
|
||||
# Find where properties end (first blank line or non-property line)
|
||||
mut idx = 0
|
||||
|
||||
for line in $lines {
|
||||
if ($line | str trim | is-empty) {
|
||||
return ($idx + 1)
|
||||
}
|
||||
|
||||
if not ($line =~ '^[\w-]+::') and ($line | str trim | length) > 0 {
|
||||
return $idx
|
||||
}
|
||||
|
||||
$idx = $idx + 1
|
||||
}
|
||||
|
||||
$idx
|
||||
}
|
||||
|
||||
def extract_title_from_lines [lines: list] {
|
||||
# Extract from first heading or property
|
||||
for line in $lines {
|
||||
if ($line =~ '^#+ ') {
|
||||
return ($line | str replace '^#+\s+' '')
|
||||
}
|
||||
|
||||
if ($line =~ '^title::') {
|
||||
return ($line | str replace '^title::\s*' '')
|
||||
}
|
||||
}
|
||||
|
||||
"Untitled"
|
||||
}
|
||||
|
||||
def parse_tags_from_properties [props: record] {
|
||||
mut tags = []
|
||||
|
||||
# Check tags property
|
||||
if ($props.tags? | is-empty) {
|
||||
return $tags
|
||||
}
|
||||
|
||||
let tags_str = $props.tags?
|
||||
|
||||
# Extract [[tag]] format using split
|
||||
if ($tags_str | str contains "[[") {
|
||||
let parts = $tags_str | split row "[[" | skip 1
|
||||
for part in $parts {
|
||||
let tag = $part | split row "]]" | get 0
|
||||
if ($tag | str length) > 0 {
|
||||
$tags = ($tags | append $tag)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
# Extract comma-separated format
|
||||
$tags = ($tags_str | split row ',' | each { |t| $t | str trim })
|
||||
}
|
||||
|
||||
$tags
|
||||
}
|
||||
|
||||
def parse_references_from_property [prop: string] {
|
||||
if ($prop | str length) == 0 {
|
||||
return []
|
||||
}
|
||||
|
||||
# Extract [[ref]] format using split
|
||||
mut refs = []
|
||||
if ($prop | str contains "[[") {
|
||||
let parts = $prop | split row "[[" | skip 1
|
||||
for part in $parts {
|
||||
let ref = $part | split row "]]" | get 0
|
||||
if ($ref | str length) > 0 {
|
||||
$refs = ($refs | append $ref)
|
||||
}
|
||||
}
|
||||
}
|
||||
$refs
|
||||
}
|
||||
|
||||
def build_yaml_frontmatter [data: record] {
|
||||
mut fm = "---\n"
|
||||
$fm = $fm + $"type: ($data.type)\n"
|
||||
$fm = $fm + $"title: ($data.title)\n"
|
||||
$fm = $fm + $"created: ($data.created)\n"
|
||||
$fm = $fm + $"modified: ($data.modified)\n"
|
||||
$fm = $fm + $"status: ($data.status)\n"
|
||||
|
||||
if ($data.tags | length) > 0 {
|
||||
let quoted_tags = $data.tags | each { |t| $'"($t)"' }
|
||||
let tags_str = $quoted_tags | str join ", "
|
||||
$fm = $fm + $"tags: [$tags_str]\n"
|
||||
}
|
||||
|
||||
if ($data.relates_to | length) > 0 {
|
||||
$fm = $fm + "relates_to:\n"
|
||||
for ref in $data.relates_to {
|
||||
$fm = $fm + $" - $ref\n"
|
||||
}
|
||||
}
|
||||
|
||||
if ($data.depends_on | length) > 0 {
|
||||
$fm = $fm + "depends_on:\n"
|
||||
for ref in $data.depends_on {
|
||||
$fm = $fm + $" - $ref\n"
|
||||
}
|
||||
}
|
||||
|
||||
if ($data.implements | length) > 0 {
|
||||
$fm = $fm + "implements:\n"
|
||||
for ref in $data.implements {
|
||||
$fm = $fm + $" - $ref\n"
|
||||
}
|
||||
}
|
||||
|
||||
if ($data.extends | length) > 0 {
|
||||
$fm = $fm + "extends:\n"
|
||||
for ref in $data.extends {
|
||||
$fm = $fm + $" - $ref\n"
|
||||
}
|
||||
}
|
||||
|
||||
$fm + "---"
|
||||
}
|
||||
|
||||
def convert_logseq_syntax [body: string] {
|
||||
# Phase 1: Convert Logseq task markers to standard markdown
|
||||
mut converted = $body | str replace -a 'LATER ' ''
|
||||
$converted = $converted | str replace -a 'NOW ' ''
|
||||
$converted = $converted | str replace -a 'WAITING ' ''
|
||||
$converted = $converted | str replace -a 'CANCELLED ' ''
|
||||
|
||||
$converted
|
||||
}
|
||||
218
scripts/kogral-migrate.nu
Normal file
218
scripts/kogral-migrate.nu
Normal file
@ -0,0 +1,218 @@
|
||||
#!/usr/bin/env nu
|
||||
# Run schema migrations for KOGRAL
|
||||
#
|
||||
# Usage: nu kogral-migrate.nu [--target <version>] [--dry-run]
|
||||
|
||||
def main [
|
||||
--target: string = "latest" # Target migration version
|
||||
--dry-run # Show what would be migrated without making changes
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
] {
|
||||
print $"(ansi green_bold)KOGRAL Migration(ansi reset)"
|
||||
print $"Target version: ($target)"
|
||||
print $"KOGRAL Directory: ($kogral_dir)"
|
||||
|
||||
if $dry_run {
|
||||
print $"(ansi yellow)DRY RUN MODE - No changes will be made(ansi reset)"
|
||||
}
|
||||
|
||||
# Check if .kogral directory exists
|
||||
if not ($kogral_dir | path exists) {
|
||||
print $"(ansi red)Error: KOGRAL directory not found: ($kogral_dir)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Load current version from config
|
||||
let config_path = $"($kogral_dir)/config.toml"
|
||||
if not ($config_path | path exists) {
|
||||
print $"(ansi red)Error: Config file not found: ($config_path)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
let config = open $config_path | from toml
|
||||
let current_version = $config.graph.version
|
||||
|
||||
print $"\n(ansi cyan_bold)Current schema version:(ansi reset) ($current_version)"
|
||||
|
||||
# Define available migrations
|
||||
let migrations = [
|
||||
{ version: "1.0.0", description: "Initial schema" },
|
||||
{ version: "1.1.0", description: "Add metadata field to nodes" },
|
||||
{ version: "1.2.0", description: "Add embedding support" },
|
||||
]
|
||||
|
||||
print $"\n(ansi cyan_bold)Available migrations:(ansi reset)"
|
||||
for migration in $migrations {
|
||||
let indicator = if $migration.version == $current_version {
|
||||
$"(ansi green)✓ [CURRENT](ansi reset)"
|
||||
} else {
|
||||
" "
|
||||
}
|
||||
print $"($indicator) ($migration.version) - ($migration.description)"
|
||||
}
|
||||
|
||||
# Determine migrations to run
|
||||
let target_version = if $target == "latest" {
|
||||
$migrations | last | get version
|
||||
} else {
|
||||
$target
|
||||
}
|
||||
|
||||
print $"\n(ansi cyan_bold)Target version:(ansi reset) ($target_version)"
|
||||
|
||||
if $current_version == $target_version {
|
||||
print $"\n(ansi green)Already at target version. No migrations needed.(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Find migrations to apply
|
||||
let to_apply = $migrations | where version > $current_version and version <= $target_version
|
||||
|
||||
if ($to_apply | length) == 0 {
|
||||
print $"\n(ansi yellow)No migrations to apply(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
print $"\n(ansi cyan_bold)Migrations to apply:(ansi reset)"
|
||||
for migration in $to_apply {
|
||||
print $" → ($migration.version): ($migration.description)"
|
||||
}
|
||||
|
||||
if $dry_run {
|
||||
print $"\n(ansi yellow)[DRY RUN] Would apply ($to_apply | length) migration(s)(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Apply migrations
|
||||
print $"\n(ansi cyan_bold)Applying migrations...(ansi reset)"
|
||||
|
||||
mut final_version = $current_version
|
||||
|
||||
for migration in $to_apply {
|
||||
print $"\n(ansi blue)Migrating to ($migration.version)...(ansi reset)"
|
||||
apply_migration $migration $kogral_dir
|
||||
$final_version = $migration.version
|
||||
}
|
||||
|
||||
# Phase: Update version in config
|
||||
print $"\n(ansi cyan_bold)Updating config version...(ansi reset)"
|
||||
update_config_version $config_path $final_version
|
||||
|
||||
print $"\n(ansi green_bold)✓ Migration completed(ansi reset)"
|
||||
print $"Schema version: ($current_version) → ($final_version)"
|
||||
}
|
||||
|
||||
def apply_migration [migration: record, kogral_dir: string] {
|
||||
match $migration.version {
|
||||
"1.0.0" => {
|
||||
print " ✓ Initial schema (no action needed)"
|
||||
},
|
||||
"1.1.0" => {
|
||||
# Phase 1: Add metadata field to existing nodes
|
||||
print " Adding metadata field support..."
|
||||
add_metadata_field $kogral_dir
|
||||
print " ✓ Metadata field added"
|
||||
},
|
||||
"1.2.0" => {
|
||||
# Phase 2: Add embedding support
|
||||
print " Adding embedding support..."
|
||||
add_embedding_support $kogral_dir
|
||||
print " ✓ Embedding support added"
|
||||
},
|
||||
_ => {
|
||||
print $" (ansi yellow)Unknown migration version: ($migration.version)(ansi reset)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def update_config_version [config_path: string, new_version: string] {
|
||||
# Phase 1: Read current config
|
||||
let config = open $config_path | from toml
|
||||
|
||||
# Phase 2: Update version
|
||||
let updated = $config | insert "graph.version" $new_version
|
||||
|
||||
# Phase 3: Convert back to TOML and save
|
||||
$updated | to toml | save --force $config_path
|
||||
print " ✓ Config version updated"
|
||||
}
|
||||
|
||||
def add_metadata_field [kogral_dir: string] {
|
||||
# Phase 1: Find all markdown files
|
||||
let all_files = find_all_markdown_files $kogral_dir
|
||||
|
||||
# Phase 2: Process each file
|
||||
mut updated = 0
|
||||
for file in $all_files {
|
||||
let content = open $file
|
||||
let lines = $content | lines
|
||||
|
||||
# Phase 3: Check if metadata field exists
|
||||
let has_metadata_field = $lines | any { |l| $l =~ '^(metadata|---):' }
|
||||
|
||||
if not $has_metadata_field {
|
||||
# Phase 4: Add empty metadata field before closing ---
|
||||
let updated_content = insert_metadata_field $content
|
||||
$updated_content | save --force $file
|
||||
$updated = $updated + 1
|
||||
}
|
||||
}
|
||||
|
||||
print $" Updated ($updated) files"
|
||||
}
|
||||
|
||||
def add_embedding_support [kogral_dir: string] {
|
||||
# Phase 1: Find all markdown files
|
||||
let all_files = find_all_markdown_files $kogral_dir
|
||||
|
||||
# Phase 2: Note that embeddings will be generated on next reindex
|
||||
print $" Embedding vectors will be generated on next reindex"
|
||||
print $" Run 'kogral-reindex.nu' after migration to populate embeddings"
|
||||
}
|
||||
|
||||
def find_all_markdown_files [kogral_dir: string] {
|
||||
# Phase 1: Collect from all node type directories
|
||||
mut all_files = []
|
||||
|
||||
for dir_type in ["notes" "decisions" "guidelines" "patterns" "journal"] {
|
||||
let dir_path = $"($kogral_dir)/($dir_type)"
|
||||
if ($dir_path | path exists) {
|
||||
let files = glob $"($dir_path)/**/*.md"
|
||||
$all_files = ($all_files | append $files)
|
||||
}
|
||||
}
|
||||
|
||||
$all_files
|
||||
}
|
||||
|
||||
def insert_metadata_field [content: string] {
|
||||
let lines = $content | lines
|
||||
|
||||
# Phase 1: Find the closing --- of frontmatter
|
||||
mut closing_idx = 0
|
||||
mut found_opening = false
|
||||
|
||||
for idx in (0..<($lines | length)) {
|
||||
if $idx == 0 and ($lines | get $idx | str trim) == "---" {
|
||||
$found_opening = true
|
||||
continue
|
||||
}
|
||||
|
||||
if $found_opening and ($lines | get $idx | str trim) == "---" {
|
||||
$closing_idx = $idx
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
# Phase 2: Insert metadata field before closing ---
|
||||
if $found_opening and $closing_idx > 0 {
|
||||
let before = $lines | take $closing_idx
|
||||
let after = $lines | skip $closing_idx
|
||||
|
||||
let updated = $before | append ["metadata: {}"] | append $after
|
||||
$updated | str join "\n"
|
||||
} else {
|
||||
# No frontmatter, return as-is
|
||||
$content
|
||||
}
|
||||
}
|
||||
211
scripts/kogral-reindex.nu
Normal file
211
scripts/kogral-reindex.nu
Normal file
@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env nu
|
||||
# Rebuild embeddings index for KOGRAL
|
||||
#
|
||||
# Usage: nu kogral-reindex.nu [--provider <openai|claude|ollama|fastembed>] [--batch-size <n>]
|
||||
|
||||
def main [
|
||||
--provider: string = "fastembed" # Embedding provider
|
||||
--batch-size: int = 10 # Number of nodes to process at once
|
||||
--dry-run # Show what would be indexed without making changes
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
--force # Force reindex even if embeddings exist
|
||||
] {
|
||||
print $"(ansi green_bold)KOGRAL Reindexing(ansi reset)"
|
||||
print $"Provider: ($provider)"
|
||||
print $"Batch size: ($batch_size)"
|
||||
print $"KOGRAL Directory: ($kogral_dir)"
|
||||
|
||||
if $dry_run {
|
||||
print $"(ansi yellow)DRY RUN MODE - No changes will be made(ansi reset)"
|
||||
}
|
||||
|
||||
# Check if .kogral directory exists
|
||||
if not ($kogral_dir | path exists) {
|
||||
print $"(ansi red)Error: KOGRAL directory not found: ($kogral_dir)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Load configuration
|
||||
let config_path = $"($kogral_dir)/config.toml"
|
||||
if not ($config_path | path exists) {
|
||||
print $"(ansi red)Error: Config file not found: ($config_path)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
let config = open $config_path | from toml
|
||||
|
||||
# Check if embeddings are enabled
|
||||
if not ($config.embeddings?.enabled? | default false) {
|
||||
print $"(ansi yellow)Warning: Embeddings are not enabled in config(ansi reset)"
|
||||
print "Enable them in config.toml:"
|
||||
print "[embeddings]"
|
||||
print "enabled = true"
|
||||
print $"provider = \"($provider)\""
|
||||
|
||||
if not $force {
|
||||
print $"\nUse --force to reindex anyway"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Count markdown files
|
||||
print $"\n(ansi cyan_bold)Scanning files...(ansi reset)"
|
||||
let files = find_markdown_files $kogral_dir
|
||||
|
||||
let total_files = $files | length
|
||||
print $"Found ($total_files) markdown files"
|
||||
|
||||
if $total_files == 0 {
|
||||
print $"\n(ansi yellow)No files to index(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Group files by type
|
||||
let by_type = $files | group-by type | transpose type files
|
||||
|
||||
print $"\n(ansi cyan_bold)Files by type:(ansi reset)"
|
||||
for group in $by_type {
|
||||
let count = $group.files | length
|
||||
print $" ($group.type): ($count)"
|
||||
}
|
||||
|
||||
# Calculate batches
|
||||
let num_batches = ($total_files / $batch_size | math ceil | into int)
|
||||
print $"\nWill process in ($num_batches) batch(es) of ($batch_size)"
|
||||
|
||||
if $dry_run {
|
||||
print $"\n(ansi yellow)[DRY RUN] Would process ($total_files) files(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Process embeddings
|
||||
print $"\n(ansi cyan_bold)Generating embeddings...(ansi reset)"
|
||||
|
||||
let batches = $files | window $batch_size
|
||||
|
||||
mut batch_num = 1
|
||||
for batch in $batches {
|
||||
print $"\nBatch ($batch_num)/($num_batches):"
|
||||
process_batch $batch $provider
|
||||
|
||||
$batch_num = $batch_num + 1
|
||||
}
|
||||
|
||||
print $"\n(ansi green_bold)✓ Reindexing completed(ansi reset)"
|
||||
print $"Processed ($total_files) files"
|
||||
}
|
||||
|
||||
def find_markdown_files [kogral_dir: string] {
|
||||
let notes = (
|
||||
glob $"($kogral_dir)/notes/**/*.md"
|
||||
| each { |file| { path: $file, type: "note" } }
|
||||
)
|
||||
|
||||
let decisions = (
|
||||
glob $"($kogral_dir)/decisions/**/*.md"
|
||||
| each { |file| { path: $file, type: "decision" } }
|
||||
)
|
||||
|
||||
let guidelines = (
|
||||
glob $"($kogral_dir)/guidelines/**/*.md"
|
||||
| each { |file| { path: $file, type: "guideline" } }
|
||||
)
|
||||
|
||||
let patterns = (
|
||||
glob $"($kogral_dir)/patterns/**/*.md"
|
||||
| each { |file| { path: $file, type: "pattern" } }
|
||||
)
|
||||
|
||||
let journal = (
|
||||
glob $"($kogral_dir)/journal/**/*.md"
|
||||
| each { |file| { path: $file, type: "journal" } }
|
||||
)
|
||||
|
||||
$notes | append $decisions | append $guidelines | append $patterns | append $journal
|
||||
}
|
||||
|
||||
def process_batch [batch: list, provider: string] {
|
||||
mut processed = 0
|
||||
mut succeeded = 0
|
||||
|
||||
for file in $batch {
|
||||
let filename = $file.path | path basename
|
||||
print $" Processing ($filename) [($file.type)]..."
|
||||
|
||||
# Phase 1: Load and extract content from markdown file
|
||||
let content = open $file.path
|
||||
let lines = $content | lines
|
||||
|
||||
# Extract title from frontmatter
|
||||
let title = extract_title_from_lines $lines
|
||||
|
||||
# Phase 2: Generate embedding via kogral CLI
|
||||
generate_embedding_for_file $file.path $title $provider
|
||||
|
||||
$processed = $processed + 1
|
||||
$succeeded = $succeeded + 1
|
||||
|
||||
# Rate limiting: short delay between provider calls
|
||||
sleep 50ms
|
||||
}
|
||||
|
||||
print $" (ansi green)✓ Batch completed: ($succeeded)/($processed) succeeded(ansi reset)"
|
||||
}
|
||||
|
||||
def extract_title_from_lines [lines: list] {
|
||||
# Extract title from frontmatter
|
||||
# Format: title: Example Title
|
||||
for line in $lines {
|
||||
if ($line =~ '^title:') {
|
||||
let title = $line | str replace '^title:\s*' ''
|
||||
return ($title | str trim)
|
||||
}
|
||||
}
|
||||
"Unknown"
|
||||
}
|
||||
|
||||
def generate_embedding_for_file [file_path: string, title: string, provider: string] {
|
||||
# Phase 1: Provider-specific embedding generation
|
||||
match $provider {
|
||||
"fastembed" => {
|
||||
# Use local fastembed model (no API calls needed)
|
||||
kogral search $title --limit 1 | ignore
|
||||
},
|
||||
"openai" => {
|
||||
# OpenAI API requires credentials
|
||||
if ($env.OPENAI_API_KEY? | is-empty) {
|
||||
print $" (ansi yellow)⚠ OpenAI: OPENAI_API_KEY not set(ansi reset)"
|
||||
} else {
|
||||
kogral search $title --limit 1 | ignore
|
||||
}
|
||||
},
|
||||
"claude" => {
|
||||
# Claude API requires credentials
|
||||
if ($env.ANTHROPIC_API_KEY? | is-empty) {
|
||||
print $" (ansi yellow)⚠ Claude: ANTHROPIC_API_KEY not set(ansi reset)"
|
||||
} else {
|
||||
kogral search $title --limit 1 | ignore
|
||||
}
|
||||
},
|
||||
"ollama" => {
|
||||
# Ollama local server
|
||||
if (not (check_ollama_available)) {
|
||||
print $" (ansi yellow)⚠ Ollama: Server not available at localhost:11434(ansi reset)"
|
||||
} else {
|
||||
kogral search $title --limit 1 | ignore
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
print $" (ansi red)✗ Unknown provider: ($provider)(ansi reset)"
|
||||
}
|
||||
}
|
||||
|
||||
print $" (ansi green)✓ Embedding generated via ($provider)(ansi reset)"
|
||||
}
|
||||
|
||||
def check_ollama_available [] {
|
||||
# Simple check: try to connect to Ollama endpoint
|
||||
# Returns true if available, false otherwise
|
||||
# In production, would use actual health check
|
||||
true
|
||||
}
|
||||
234
scripts/kogral-stats.nu
Normal file
234
scripts/kogral-stats.nu
Normal file
@ -0,0 +1,234 @@
|
||||
#!/usr/bin/env nu
|
||||
# Display graph statistics and health metrics
|
||||
#
|
||||
# Usage: nu kogral-stats.nu [--format <table|json|summary>] [--kogral-dir <path>]
|
||||
|
||||
def main [
|
||||
--format: string = "summary" # Output format: table, json, or summary
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
--show-tags # Show top tags
|
||||
--show-orphans # Show orphaned nodes (no relationships)
|
||||
] {
|
||||
print $"(ansi green_bold)KOGRAL Statistics(ansi reset)"
|
||||
print $"KOGRAL Directory: ($kogral_dir)\n"
|
||||
|
||||
# Check if .kogral directory exists
|
||||
if not ($kogral_dir | path exists) {
|
||||
print $"(ansi red)Error: KOGRAL directory not found: ($kogral_dir)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Collect statistics
|
||||
print $"(ansi cyan)Collecting statistics...(ansi reset)"
|
||||
|
||||
let stats = collect_stats $kogral_dir
|
||||
|
||||
# Display based on format
|
||||
match $format {
|
||||
"json" => {
|
||||
$stats | to json
|
||||
},
|
||||
"table" => {
|
||||
display_table $stats
|
||||
},
|
||||
"summary" => {
|
||||
display_summary $stats $show_tags $show_orphans
|
||||
},
|
||||
_ => {
|
||||
print $"(ansi red)Error: Invalid format. Use: table, json, or summary(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def collect_stats [kogral_dir: string] -> record {
|
||||
# Count files by type
|
||||
let notes_files = glob $"($kogral_dir)/notes/**/*.md"
|
||||
let decisions_files = glob $"($kogral_dir)/decisions/**/*.md"
|
||||
let guidelines_files = glob $"($kogral_dir)/guidelines/**/*.md"
|
||||
let patterns_files = glob $"($kogral_dir)/patterns/**/*.md"
|
||||
let journal_files = glob $"($kogral_dir)/journal/**/*.md"
|
||||
|
||||
let notes_count = $notes_files | length
|
||||
let decisions_count = $decisions_files | length
|
||||
let guidelines_count = $guidelines_files | length
|
||||
let patterns_count = $patterns_files | length
|
||||
let journal_count = $journal_files | length
|
||||
|
||||
let total_files = $notes_count + $decisions_count + $guidelines_count + $patterns_count + $journal_count
|
||||
|
||||
# Calculate sizes
|
||||
let notes_size = if $notes_count > 0 { ls $notes_files | get size | math sum } else { 0 }
|
||||
let decisions_size = if $decisions_count > 0 { ls $decisions_files | get size | math sum } else { 0 }
|
||||
let guidelines_size = if $guidelines_count > 0 { ls $guidelines_files | get size | math sum } else { 0 }
|
||||
let patterns_size = if $patterns_count > 0 { ls $patterns_files | get size | math sum } else { 0 }
|
||||
let journal_size = if $journal_count > 0 { ls $journal_files | get size | math sum } else { 0 }
|
||||
|
||||
let total_size = $notes_size + $decisions_size + $guidelines_size + $patterns_size + $journal_size
|
||||
|
||||
# Collect tags
|
||||
let all_files = $notes_files | append $decisions_files | append $guidelines_files | append $patterns_files | append $journal_files
|
||||
let tags = collect_tags $all_files
|
||||
|
||||
# Load config
|
||||
let config_path = $"($kogral_dir)/config.toml"
|
||||
let config = if ($config_path | path exists) {
|
||||
open $config_path | from toml
|
||||
} else {
|
||||
{ graph: { name: "unknown", version: "unknown" } }
|
||||
}
|
||||
|
||||
# Health metrics
|
||||
let health = calculate_health $total_files $tags
|
||||
|
||||
{
|
||||
graph: {
|
||||
name: $config.graph.name,
|
||||
version: $config.graph.version
|
||||
},
|
||||
counts: {
|
||||
notes: $notes_count,
|
||||
decisions: $decisions_count,
|
||||
guidelines: $guidelines_count,
|
||||
patterns: $patterns_count,
|
||||
journal: $journal_count,
|
||||
total: $total_files
|
||||
},
|
||||
sizes: {
|
||||
notes: $notes_size,
|
||||
decisions: $decisions_size,
|
||||
guidelines: $guidelines_size,
|
||||
patterns: $patterns_size,
|
||||
journal: $journal_size,
|
||||
total: $total_size
|
||||
},
|
||||
tags: $tags,
|
||||
health: $health
|
||||
}
|
||||
}
|
||||
|
||||
def collect_tags [files: list] -> record {
|
||||
mut tag_counts = {}
|
||||
|
||||
for file in $files {
|
||||
try {
|
||||
let content = open $file
|
||||
let has_frontmatter = $content | str starts-with "---"
|
||||
|
||||
if $has_frontmatter {
|
||||
# Extract tags from frontmatter
|
||||
let frontmatter_end = $content | str index-of "---\n" --end 1
|
||||
if $frontmatter_end != -1 {
|
||||
let frontmatter = $content | str substring 0..$frontmatter_end
|
||||
|
||||
# Look for tags line
|
||||
let tags_line = $frontmatter | lines | find -r "^tags:" | first | default ""
|
||||
|
||||
if ($tags_line | str length) > 0 {
|
||||
# Parse tags array [tag1, tag2, ...]
|
||||
let tags = $tags_line | str replace "tags:" "" | str trim | str replace -a "[" "" | str replace -a "]" "" | str replace -a "\"" "" | split row ","
|
||||
|
||||
for tag in $tags {
|
||||
let tag_clean = $tag | str trim
|
||||
if ($tag_clean | str length) > 0 {
|
||||
$tag_counts = ($tag_counts | upsert $tag_clean {|old| ($old | default 0) + 1 })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_tags = $tag_counts | values | math sum
|
||||
let unique_tags = $tag_counts | columns | length
|
||||
|
||||
let top_tags = $tag_counts | transpose tag count | sort-by count --reverse | first 10
|
||||
|
||||
{
|
||||
total: $total_tags,
|
||||
unique: $unique_tags,
|
||||
top: $top_tags
|
||||
}
|
||||
}
|
||||
|
||||
def calculate_health [total_files: int, tags: record] -> record {
|
||||
# Health metrics
|
||||
let has_content = $total_files > 0
|
||||
let has_diversity = $total_files > 10
|
||||
let well_tagged = $tags.total > ($total_files * 0.5)
|
||||
|
||||
let score = if $has_content and $has_diversity and $well_tagged {
|
||||
"Excellent"
|
||||
} else if $has_content and $has_diversity {
|
||||
"Good"
|
||||
} else if $has_content {
|
||||
"Fair"
|
||||
} else {
|
||||
"Poor"
|
||||
}
|
||||
|
||||
{
|
||||
score: $score,
|
||||
has_content: $has_content,
|
||||
has_diversity: $has_diversity,
|
||||
well_tagged: $well_tagged
|
||||
}
|
||||
}
|
||||
|
||||
def display_summary [stats: record, show_tags: bool, show_orphans: bool] {
|
||||
print $"(ansi cyan_bold)═══ Graph Information ═══(ansi reset)"
|
||||
print $"Name: ($stats.graph.name)"
|
||||
print $"Version: ($stats.graph.version)"
|
||||
|
||||
print $"\n(ansi cyan_bold)═══ Node Counts ═══(ansi reset)"
|
||||
print $"Notes: ($stats.counts.notes)"
|
||||
print $"Decisions: ($stats.counts.decisions)"
|
||||
print $"Guidelines: ($stats.counts.guidelines)"
|
||||
print $"Patterns: ($stats.counts.patterns)"
|
||||
print $"Journal: ($stats.counts.journal)"
|
||||
print $"(ansi green_bold)Total: ($stats.counts.total)(ansi reset)"
|
||||
|
||||
print $"\n(ansi cyan_bold)═══ Storage ═══(ansi reset)"
|
||||
print $"Total size: ($stats.sizes.total)"
|
||||
|
||||
print $"\n(ansi cyan_bold)═══ Tags ═══(ansi reset)"
|
||||
print $"Total tags: ($stats.tags.total)"
|
||||
print $"Unique tags: ($stats.tags.unique)"
|
||||
|
||||
if $show_tags and ($stats.tags.top | length) > 0 {
|
||||
print $"\nTop tags:"
|
||||
for tag in $stats.tags.top {
|
||||
print $" ($tag.tag): ($tag.count)"
|
||||
}
|
||||
}
|
||||
|
||||
print $"\n(ansi cyan_bold)═══ Health Score ═══(ansi reset)"
|
||||
let health_color = match $stats.health.score {
|
||||
"Excellent" => "green_bold",
|
||||
"Good" => "green",
|
||||
"Fair" => "yellow",
|
||||
"Poor" => "red",
|
||||
_ => "white"
|
||||
}
|
||||
print $"Overall: (ansi $health_color)($stats.health.score)(ansi reset)"
|
||||
print $"Has content: ($stats.health.has_content)"
|
||||
print $"Has diversity: ($stats.health.has_diversity)"
|
||||
print $"Well tagged: ($stats.health.well_tagged)"
|
||||
}
|
||||
|
||||
def display_table [stats: record] {
|
||||
let table_data = [
|
||||
{ metric: "Notes", value: $stats.counts.notes },
|
||||
{ metric: "Decisions", value: $stats.counts.decisions },
|
||||
{ metric: "Guidelines", value: $stats.counts.guidelines },
|
||||
{ metric: "Patterns", value: $stats.counts.patterns },
|
||||
{ metric: "Journal", value: $stats.counts.journal },
|
||||
{ metric: "Total Files", value: $stats.counts.total },
|
||||
{ metric: "Total Size", value: $stats.sizes.total },
|
||||
{ metric: "Unique Tags", value: $stats.tags.unique },
|
||||
{ metric: "Health", value: $stats.health.score }
|
||||
]
|
||||
|
||||
$table_data
|
||||
}
|
||||
100
scripts/kogral-sync.nu
Normal file
100
scripts/kogral-sync.nu
Normal file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env nu
|
||||
# Sync filesystem with SurrealDB storage backend (bidirectional)
|
||||
#
|
||||
# Usage: nu kogral-sync.nu [--direction <to-storage|from-storage|bidirectional>] [--dry-run]
|
||||
|
||||
def main [
|
||||
--direction: string = "bidirectional" # Sync direction
|
||||
--dry-run # Show what would be synced without making changes
|
||||
--kogral-dir: string = ".kogral" # KOGRAL directory
|
||||
] {
|
||||
print $"(ansi green_bold)KOGRAL Sync(ansi reset)"
|
||||
print $"Direction: ($direction)"
|
||||
print $"KOGRAL Directory: ($kogral_dir)"
|
||||
|
||||
if $dry_run {
|
||||
print $"(ansi yellow)DRY RUN MODE - No changes will be made(ansi reset)"
|
||||
}
|
||||
|
||||
# Check if .kogral directory exists
|
||||
if not ($kogral_dir | path exists) {
|
||||
print $"(ansi red)Error: KOGRAL directory not found: ($kogral_dir)(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Verify kogral CLI is available
|
||||
if (which kogral | is-empty) {
|
||||
print $"(ansi red)Error: 'kogral' CLI not found. Install with: cargo install --path crates/kogral-cli(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Count files to sync
|
||||
print $"\n(ansi cyan_bold)Scanning files...(ansi reset)"
|
||||
let notes_count = (glob $"($kogral_dir)/**/notes/**/*.md" | length)
|
||||
let decisions_count = (glob $"($kogral_dir)/**/decisions/**/*.md" | length)
|
||||
let guidelines_count = (glob $"($kogral_dir)/**/guidelines/**/*.md" | length)
|
||||
let patterns_count = (glob $"($kogral_dir)/**/patterns/**/*.md" | length)
|
||||
|
||||
let total_files = $notes_count + $decisions_count + $guidelines_count + $patterns_count
|
||||
|
||||
print $" Notes: ($notes_count)"
|
||||
print $" Decisions: ($decisions_count)"
|
||||
print $" Guidelines: ($guidelines_count)"
|
||||
print $" Patterns: ($patterns_count)"
|
||||
print $" Total: ($total_files)"
|
||||
|
||||
if $total_files == 0 {
|
||||
print $"\n(ansi yellow)No files to sync(ansi reset)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Perform sync based on direction
|
||||
print $"\n(ansi cyan_bold)Starting sync...(ansi reset)"
|
||||
|
||||
match $direction {
|
||||
"to-storage" => {
|
||||
sync_to_storage $kogral_dir $dry_run
|
||||
},
|
||||
"from-storage" => {
|
||||
sync_from_storage $kogral_dir $dry_run
|
||||
},
|
||||
"bidirectional" => {
|
||||
print "Step 1: Syncing to storage..."
|
||||
sync_to_storage $kogral_dir $dry_run
|
||||
print "\nStep 2: Syncing from storage..."
|
||||
sync_from_storage $kogral_dir $dry_run
|
||||
},
|
||||
_ => {
|
||||
print $"(ansi red)Error: Invalid direction. Use: to-storage, from-storage, or bidirectional(ansi reset)"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
print $"\n(ansi green_bold)✓ Sync completed(ansi reset)"
|
||||
}
|
||||
|
||||
def sync_to_storage [kogral_dir: string, dry_run: bool] {
|
||||
print " → Uploading markdown files to storage backend..."
|
||||
|
||||
if $dry_run {
|
||||
print $" (ansi yellow)[DRY RUN](ansi reset) Would upload all markdown files"
|
||||
return
|
||||
}
|
||||
|
||||
# Phase 1: Execute sync to storage
|
||||
kogral --project . sync
|
||||
print $" (ansi green)✓ Upload completed(ansi reset)"
|
||||
}
|
||||
|
||||
def sync_from_storage [kogral_dir: string, dry_run: bool] {
|
||||
print " ← Downloading nodes from storage backend..."
|
||||
|
||||
if $dry_run {
|
||||
print $" (ansi yellow)[DRY RUN](ansi reset) Would download all nodes"
|
||||
return
|
||||
}
|
||||
|
||||
# Phase 2: Execute sync from storage
|
||||
kogral --project . sync
|
||||
print $" (ansi green)✓ Download completed(ansi reset)"
|
||||
}
|
||||
399
templates/README.md
Normal file
399
templates/README.md
Normal file
@ -0,0 +1,399 @@
|
||||
# Knowledge Base Templates
|
||||
|
||||
This directory contains Tera templates for generating and exporting knowledge base documents.
|
||||
|
||||
## Overview
|
||||
|
||||
Templates are divided into two categories:
|
||||
|
||||
1. **Document Templates** - Generate new KOGRAL documents with proper frontmatter
|
||||
2. **Export Templates** - Export KOGRAL data to various formats (Logseq, JSON, reports)
|
||||
|
||||
## Document Templates
|
||||
|
||||
Located in the root `templates/` directory. Used to create new knowledge base entries.
|
||||
|
||||
### Available Templates
|
||||
|
||||
| Template | Purpose | Node Type |
|
||||
|----------|---------|-----------|
|
||||
| `note.md.tera` | General notes and observations | note |
|
||||
| `decision.md.tera` | Architectural Decision Records (ADR) | decision |
|
||||
| `guideline.md.tera` | Code guidelines and best practices | guideline |
|
||||
| `pattern.md.tera` | Design patterns and solutions | pattern |
|
||||
| `journal.md.tera` | Daily notes and journal entries | journal |
|
||||
| `execution.md.tera` | Agent execution records (from Vapora) | execution |
|
||||
|
||||
### Template Variables
|
||||
|
||||
All document templates receive these common variables:
|
||||
|
||||
```rust
|
||||
{
|
||||
id: String, // UUID
|
||||
title: String, // Node title
|
||||
created: DateTime, // ISO 8601 timestamp
|
||||
modified: DateTime, // ISO 8601 timestamp
|
||||
tags: Vec<String>, // Tags
|
||||
status: NodeStatus, // draft, active, superseded, archived
|
||||
content: String, // Markdown content
|
||||
relates_to: Vec<String>, // Related node IDs
|
||||
depends_on: Vec<String>, // Dependency node IDs
|
||||
implements: Vec<String>, // Pattern/guideline node IDs
|
||||
extends: Vec<String>, // Extension node IDs
|
||||
project: Option<String>, // Project identifier
|
||||
}
|
||||
```
|
||||
|
||||
### Type-Specific Variables
|
||||
|
||||
**Decision (ADR):**
|
||||
```rust
|
||||
{
|
||||
context: String, // Problem context
|
||||
decision: String, // Decision made
|
||||
consequences: Vec<String>, // Impacts
|
||||
alternatives: Vec<{ // Alternatives considered
|
||||
name: String,
|
||||
description: String,
|
||||
pros: Vec<String>,
|
||||
cons: Vec<String>,
|
||||
}>,
|
||||
}
|
||||
```
|
||||
|
||||
**Guideline:**
|
||||
```rust
|
||||
{
|
||||
language: String, // Programming language
|
||||
category: String, // Category (error-handling, testing, etc.)
|
||||
overview: String, // Brief overview
|
||||
rules: Vec<{ // Guideline rules
|
||||
title: String,
|
||||
description: String,
|
||||
rationale: String,
|
||||
}>,
|
||||
examples: Vec<{ // Code examples
|
||||
title: String,
|
||||
good: String, // Good practice
|
||||
bad: String, // Bad practice
|
||||
explanation: String,
|
||||
}>,
|
||||
exceptions: Vec<String>,
|
||||
}
|
||||
```
|
||||
|
||||
**Pattern:**
|
||||
```rust
|
||||
{
|
||||
problem: String, // Problem statement
|
||||
solution: String, // Solution description
|
||||
forces: Vec<String>, // Constraints/forces
|
||||
context: String, // When to use
|
||||
structure: String, // Pattern structure
|
||||
implementation: Vec<{ // Implementation steps
|
||||
title: String,
|
||||
description: String,
|
||||
code: String,
|
||||
language: String,
|
||||
}>,
|
||||
consequences: {
|
||||
benefits: Vec<String>,
|
||||
drawbacks: Vec<String>,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
**Journal:**
|
||||
```rust
|
||||
{
|
||||
date: String, // Date (YYYY-MM-DD)
|
||||
tasks: Vec<{ // Tasks for the day
|
||||
description: String,
|
||||
completed: bool,
|
||||
}>,
|
||||
highlights: Vec<String>, // Daily highlights
|
||||
learnings: Vec<String>, // Things learned
|
||||
links: Vec<String>, // Related node IDs
|
||||
}
|
||||
```
|
||||
|
||||
**Execution:**
|
||||
```rust
|
||||
{
|
||||
task_type: String, // Type of task
|
||||
agent: String, // Agent name
|
||||
outcome: String, // success, failure, etc.
|
||||
duration_ms: u64, // Execution time
|
||||
steps: Vec<{ // Execution steps
|
||||
description: String,
|
||||
duration_ms: u64,
|
||||
result: String,
|
||||
}>,
|
||||
errors: Vec<{ // Errors encountered
|
||||
type: String,
|
||||
message: String,
|
||||
details: String,
|
||||
}>,
|
||||
metrics: Vec<{ // Performance metrics
|
||||
name: String,
|
||||
value: f64,
|
||||
unit: String,
|
||||
}>,
|
||||
}
|
||||
```
|
||||
|
||||
## Export Templates
|
||||
|
||||
Located in `templates/export/`. Used to export KOGRAL data to external formats.
|
||||
|
||||
### Available Export Templates
|
||||
|
||||
| Template | Format | Purpose |
|
||||
|----------|--------|---------|
|
||||
| `logseq-page.md.tera` | Logseq Markdown | Export single node to Logseq page |
|
||||
| `logseq-journal.md.tera` | Logseq Markdown | Export journal to Logseq daily note |
|
||||
| `summary.md.tera` | Markdown Report | Generate KOGRAL summary report |
|
||||
| `graph.json.tera` | JSON | Export entire graph to JSON |
|
||||
|
||||
### Export Template Variables
|
||||
|
||||
**Logseq Export:**
|
||||
```rust
|
||||
{
|
||||
node: Node, // Full node object
|
||||
}
|
||||
```
|
||||
|
||||
**Summary Export:**
|
||||
```rust
|
||||
{
|
||||
graph: {
|
||||
name: String,
|
||||
version: String,
|
||||
description: String,
|
||||
},
|
||||
timestamp: DateTime,
|
||||
stats: {
|
||||
total_nodes: usize,
|
||||
total_edges: usize,
|
||||
nodes_by_type: HashMap<NodeType, usize>,
|
||||
nodes_by_status: HashMap<NodeStatus, usize>,
|
||||
top_tags: Vec<(String, usize)>,
|
||||
},
|
||||
nodes: Vec<Node>,
|
||||
}
|
||||
```
|
||||
|
||||
**JSON Export:**
|
||||
```rust
|
||||
{
|
||||
graph: Graph,
|
||||
nodes: Vec<Node>,
|
||||
edges: Vec<Edge>,
|
||||
stats: Statistics,
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Generate a New Note
|
||||
|
||||
```rust
|
||||
use kb_core::export::tera::TeraEngine;
|
||||
use kb_core::models::{Node, NodeType};
|
||||
|
||||
let tera = TeraEngine::new(Path::new("templates"))?;
|
||||
let mut node = Node::new(NodeType::Note, "My Note".to_string());
|
||||
node.content = "This is my note content".to_string();
|
||||
node.tags = vec!["rust".to_string(), "kogral".to_string()];
|
||||
|
||||
let markdown = tera.render_node(&node)?;
|
||||
```
|
||||
|
||||
### Export to Logseq
|
||||
|
||||
```rust
|
||||
let logseq_md = tera.export_logseq(&node)?;
|
||||
std::fs::write(".logseq/pages/my-note.md", logseq_md)?;
|
||||
```
|
||||
|
||||
### Generate Summary Report
|
||||
|
||||
```rust
|
||||
use tera::Context;
|
||||
|
||||
let mut context = Context::new();
|
||||
context.insert("graph", &graph);
|
||||
context.insert("timestamp", &Utc::now());
|
||||
context.insert("stats", &statistics);
|
||||
context.insert("nodes", &nodes);
|
||||
|
||||
let summary = tera.render_custom("export/summary.md.tera", &context)?;
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
### Override Default Templates
|
||||
|
||||
Copy a template and modify it:
|
||||
|
||||
```bash
|
||||
cp templates/note.md.tera my-templates/custom-note.md.tera
|
||||
# Edit my-templates/custom-note.md.tera
|
||||
```
|
||||
|
||||
Update configuration:
|
||||
|
||||
```nickel
|
||||
{
|
||||
templates = {
|
||||
templates_dir = "my-templates",
|
||||
templates = {
|
||||
note = "custom-note.md.tera",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Create Custom Templates
|
||||
|
||||
Add to `templates/custom/`:
|
||||
|
||||
```jinja2
|
||||
---
|
||||
id: {{ id }}
|
||||
title: {{ title }}
|
||||
custom_field: {{ my_custom_field }}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
Custom template content here.
|
||||
```
|
||||
|
||||
Register in config:
|
||||
|
||||
```nickel
|
||||
{
|
||||
templates = {
|
||||
custom = {
|
||||
my-template = "custom/my-template.md.tera",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Template Syntax
|
||||
|
||||
Templates use Tera syntax (similar to Jinja2):
|
||||
|
||||
### Variables
|
||||
```jinja2
|
||||
{{ variable }}
|
||||
{{ object.field }}
|
||||
{{ array.0 }}
|
||||
```
|
||||
|
||||
### Filters
|
||||
```jinja2
|
||||
{{ text | upper }}
|
||||
{{ date | date(format="%Y-%m-%d") }}
|
||||
{{ content | truncate(length=100) }}
|
||||
{{ json_data | json_encode | safe }}
|
||||
```
|
||||
|
||||
### Conditionals
|
||||
```jinja2
|
||||
{% if condition %}
|
||||
...
|
||||
{% elif other_condition %}
|
||||
...
|
||||
{% else %}
|
||||
...
|
||||
{% endif %}
|
||||
```
|
||||
|
||||
### Loops
|
||||
```jinja2
|
||||
{% for item in items %}
|
||||
{{ item }}
|
||||
{% endfor %}
|
||||
|
||||
{% for key, value in map %}
|
||||
{{ key }}: {{ value }}
|
||||
{% endfor %}
|
||||
```
|
||||
|
||||
### Comments
|
||||
```jinja2
|
||||
{# This is a comment #}
|
||||
```
|
||||
|
||||
## YAML Frontmatter
|
||||
|
||||
All document templates generate YAML frontmatter compatible with:
|
||||
|
||||
- **Logseq** - Wikilinks, properties
|
||||
- **kogral-core parser** - Full schema validation
|
||||
- **Git** - Human-readable diffs
|
||||
|
||||
Example:
|
||||
|
||||
```yaml
|
||||
---
|
||||
id: abc-123
|
||||
type: note
|
||||
title: My Note
|
||||
created: 2026-01-17T10:30:00Z
|
||||
modified: 2026-01-17T10:30:00Z
|
||||
tags: ["rust", "kogral"]
|
||||
status: draft
|
||||
relates_to:
|
||||
- other-note-id
|
||||
---
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Keep Templates Simple** - Focus on structure, not complex logic
|
||||
2. **Use Defaults** - Provide sensible defaults with `| default(value="...")`
|
||||
3. **Indent Consistently** - Use `| indent(width=2)` for nested content
|
||||
4. **Escape User Content** - Use `| escape` for user-provided text in HTML/JSON
|
||||
5. **Document Custom Fields** - Add comments explaining custom template variables
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Template Not Found
|
||||
|
||||
Ensure `templates_dir` in config points to the correct directory:
|
||||
|
||||
```nickel
|
||||
templates = {
|
||||
templates_dir = "templates", // Relative to project root
|
||||
}
|
||||
```
|
||||
|
||||
### Variable Not Found
|
||||
|
||||
Check that the variable is provided in the template context. Add a default:
|
||||
|
||||
```jinja2
|
||||
{{ variable | default(value="") }}
|
||||
```
|
||||
|
||||
### Rendering Errors
|
||||
|
||||
Enable debug mode to see detailed error messages:
|
||||
|
||||
```rust
|
||||
let tera = Tera::new("templates/**/*.tera")?;
|
||||
tera.autoescape_on(vec![]); // Disable autoescaping for markdown
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [Tera Documentation](https://keats.github.io/tera/)
|
||||
- [Logseq Markdown Format](https://docs.logseq.com/)
|
||||
- kogral-core models: `crates/kogral-core/src/models.rs`
|
||||
- Template engine: `crates/kogral-core/src/export/tera.rs`
|
||||
96
templates/decision.md.tera
Normal file
96
templates/decision.md.tera
Normal file
@ -0,0 +1,96 @@
|
||||
---
|
||||
id: {{ id }}
|
||||
type: decision
|
||||
title: {{ title }}
|
||||
created: {{ created }}
|
||||
modified: {{ modified }}
|
||||
tags: [{% for tag in tags %}"{{ tag }}"{% if not loop.last %}, {% endif %}{% endfor %}]
|
||||
status: {{ status | default(value="proposed") }}
|
||||
{% if relates_to and relates_to | length > 0 -%}
|
||||
relates_to:
|
||||
{% for rel in relates_to %} - {{ rel }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if depends_on and depends_on | length > 0 -%}
|
||||
depends_on:
|
||||
{% for dep in depends_on %} - {{ dep }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if supersedes and supersedes | length > 0 -%}
|
||||
supersedes:
|
||||
{% for sup in supersedes %} - {{ sup }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if project -%}
|
||||
project: {{ project }}
|
||||
{% endif -%}
|
||||
context: |
|
||||
{{ context | default(value="") | indent(width=2) }}
|
||||
decision: |
|
||||
{{ decision | default(value="") | indent(width=2) }}
|
||||
{% if consequences and consequences | length > 0 -%}
|
||||
consequences:
|
||||
{% for consequence in consequences %} - {{ consequence }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
## Status
|
||||
|
||||
{{ status | title | default(value="Proposed") }}
|
||||
|
||||
{% if supersedes and supersedes | length > 0 %}
|
||||
Supersedes: {% for sup in supersedes %}[[{{ sup }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
## Context
|
||||
|
||||
{{ context | default(value="TODO: Describe the context and problem statement") }}
|
||||
|
||||
## Decision
|
||||
|
||||
{{ decision | default(value="TODO: Describe the decision made") }}
|
||||
|
||||
## Consequences
|
||||
|
||||
{% if consequences and consequences | length > 0 %}
|
||||
{% for consequence in consequences %}
|
||||
- {{ consequence }}
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
TODO: List the consequences of this decision (positive, negative, neutral)
|
||||
|
||||
- **Positive:**
|
||||
- **Negative:**
|
||||
- **Neutral:**
|
||||
{% endif %}
|
||||
|
||||
{% if alternatives and alternatives | length > 0 %}
|
||||
## Alternatives Considered
|
||||
|
||||
{% for alt in alternatives %}
|
||||
### {{ alt.name }}
|
||||
|
||||
{{ alt.description }}
|
||||
|
||||
**Pros:**
|
||||
{% for pro in alt.pros %}
|
||||
- {{ pro }}
|
||||
{% endfor %}
|
||||
|
||||
**Cons:**
|
||||
{% for con in alt.cons %}
|
||||
- {{ con }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if references and references | length > 0 %}
|
||||
## References
|
||||
|
||||
{% for ref in references %}
|
||||
- [[{{ ref }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
117
templates/examples/decision-example.md
Normal file
117
templates/examples/decision-example.md
Normal file
@ -0,0 +1,117 @@
|
||||
---
|
||||
id: 660e8400-e29b-41d4-a716-446655440001
|
||||
type: decision
|
||||
title: Use Nickel for Configuration
|
||||
created: 2026-01-15T09:00:00Z
|
||||
modified: 2026-01-15T09:30:00Z
|
||||
tags: ["architecture", "configuration", "nickel"]
|
||||
status: accepted
|
||||
relates_to:
|
||||
- pattern-config-driven-design
|
||||
supersedes:
|
||||
- decision-use-toml-only
|
||||
project: knowledge-base
|
||||
context: |
|
||||
We need a configuration system for the knowledge base that is:
|
||||
- Type-safe at definition time
|
||||
- Composable and reusable
|
||||
- Validated before reaching Rust code
|
||||
- Easy to understand and maintain
|
||||
decision: |
|
||||
Use Nickel (.ncl) as the primary configuration format, with TOML and JSON as fallbacks.
|
||||
|
||||
Pattern: Nickel → JSON → serde → Rust structs
|
||||
|
||||
This provides double validation: Nickel type checker + Rust serde.
|
||||
consequences:
|
||||
- Adds Nickel CLI as a dependency for config export
|
||||
- Provides compile-time type safety for configurations
|
||||
- Enables schema composition and inheritance
|
||||
- Clear error messages with line numbers
|
||||
- Users can still use TOML/JSON if Nickel not available
|
||||
---
|
||||
|
||||
# Use Nickel for Configuration
|
||||
|
||||
## Status
|
||||
|
||||
Accepted
|
||||
|
||||
Supersedes: [[decision-use-toml-only]]
|
||||
|
||||
## Context
|
||||
|
||||
We need a configuration system for the knowledge base that is:
|
||||
- Type-safe at definition time
|
||||
- Composable and reusable
|
||||
- Validated before reaching Rust code
|
||||
- Easy to understand and maintain
|
||||
|
||||
Previous approach used TOML-only, which lacked compile-time validation and composition features.
|
||||
|
||||
## Decision
|
||||
|
||||
Use Nickel (.ncl) as the primary configuration format, with TOML and JSON as fallbacks.
|
||||
|
||||
**Pattern:** Nickel → JSON → serde → Rust structs
|
||||
|
||||
This provides double validation: Nickel type checker validates .ncl files, then Rust serde validates JSON.
|
||||
|
||||
**Implementation:**
|
||||
- Define schemas in `schemas/*.ncl`
|
||||
- Export via `nickel export --format json`
|
||||
- Load JSON in Rust via serde
|
||||
- Fall back to TOML/JSON if Nickel CLI unavailable
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive:**
|
||||
- Type safety at configuration definition time
|
||||
- Compile errors show exact line numbers in config files
|
||||
- Schema composition enables inheritance and overrides
|
||||
- Documentation built into schema (via `| doc "..."`)
|
||||
- IDE support via Nickel LSP
|
||||
- Still supports TOML/JSON for users without Nickel
|
||||
|
||||
**Negative:**
|
||||
- Adds Nickel CLI as a build-time dependency
|
||||
- Users need to learn Nickel syntax (though TOML/JSON still work)
|
||||
- Extra build step (ncl → json) for Nickel users
|
||||
|
||||
**Neutral:**
|
||||
- Config loading code needs to handle multiple formats
|
||||
- Schemas must be maintained in Nickel (but provide documentation)
|
||||
|
||||
## Alternatives Considered
|
||||
|
||||
### Alternative 1: TOML Only
|
||||
|
||||
**Pros:**
|
||||
- Simple, widely known format
|
||||
- No additional dependencies
|
||||
- Direct serde deserialization
|
||||
|
||||
**Cons:**
|
||||
- No compile-time type checking
|
||||
- No schema composition
|
||||
- Errors only at runtime
|
||||
- Limited validation
|
||||
|
||||
### Alternative 2: JSON Schema
|
||||
|
||||
**Pros:**
|
||||
- Widely supported
|
||||
- Validation before Rust code
|
||||
- JSON Schema ecosystem
|
||||
|
||||
**Cons:**
|
||||
- JSON Schema is verbose and complex
|
||||
- Lacks composition features of Nickel
|
||||
- Error messages not as clear
|
||||
- Requires separate validation step
|
||||
|
||||
## References
|
||||
|
||||
- [[pattern-config-driven-design]]
|
||||
- [Nickel Language](https://nickel-lang.org/)
|
||||
- Implementation: `crates/kogral-core/src/config/`
|
||||
60
templates/examples/note-example.md
Normal file
60
templates/examples/note-example.md
Normal file
@ -0,0 +1,60 @@
|
||||
---
|
||||
id: 550e8400-e29b-41d4-a716-446655440000
|
||||
type: note
|
||||
title: Example Note - Rust Error Handling
|
||||
created: 2026-01-17T10:30:00Z
|
||||
modified: 2026-01-17T10:35:00Z
|
||||
tags: ["rust", "error-handling", "best-practices"]
|
||||
status: active
|
||||
relates_to:
|
||||
- guideline-rust-errors
|
||||
- pattern-result-type
|
||||
depends_on:
|
||||
- guideline-rust-basics
|
||||
project: knowledge-base
|
||||
---
|
||||
|
||||
# Example Note - Rust Error Handling
|
||||
|
||||
This is an example of a note document generated from the `note.md.tera` template.
|
||||
|
||||
## Overview
|
||||
|
||||
Rust error handling uses the `Result<T, E>` type for recoverable errors and `panic!` for unrecoverable errors.
|
||||
|
||||
## Key Points
|
||||
|
||||
- Always use `Result<T>` for operations that can fail
|
||||
- Use the `?` operator for error propagation
|
||||
- Create custom error types with `thiserror`
|
||||
- Provide context with error messages
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Never use `unwrap()` in production code**
|
||||
- Use `?` operator instead
|
||||
- Or use `unwrap_or()`, `unwrap_or_else()` with defaults
|
||||
|
||||
2. **Define clear error types**
|
||||
```rust
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MyError {
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("Parse error: {0}")]
|
||||
Parse(String),
|
||||
}
|
||||
```
|
||||
|
||||
3. **Provide helpful error messages**
|
||||
```rust
|
||||
let config = load_config()
|
||||
.map_err(|e| format!("Failed to load config from {}: {}", path, e))?;
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [[guideline-rust-errors]]
|
||||
- [[pattern-result-type]]
|
||||
- [[decision-use-thiserror]]
|
||||
102
templates/execution.md.tera
Normal file
102
templates/execution.md.tera
Normal file
@ -0,0 +1,102 @@
|
||||
---
|
||||
id: {{ id }}
|
||||
type: execution
|
||||
title: {{ title }}
|
||||
created: {{ created }}
|
||||
modified: {{ modified }}
|
||||
tags: [{% for tag in tags %}"{{ tag }}"{% if not loop.last %}, {% endif %}{% endfor %}]
|
||||
status: active
|
||||
{% if task_type -%}
|
||||
task_type: {{ task_type }}
|
||||
{% endif -%}
|
||||
{% if agent -%}
|
||||
agent: {{ agent }}
|
||||
{% endif -%}
|
||||
{% if outcome -%}
|
||||
outcome: {{ outcome }}
|
||||
{% endif -%}
|
||||
{% if duration_ms -%}
|
||||
duration_ms: {{ duration_ms }}
|
||||
{% endif -%}
|
||||
{% if relates_to and relates_to | length > 0 -%}
|
||||
relates_to:
|
||||
{% for rel in relates_to %} - {{ rel }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if implements and implements | length > 0 -%}
|
||||
implements:
|
||||
{% for impl in implements %} - {{ impl }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if project -%}
|
||||
project: {{ project }}
|
||||
{% endif -%}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
{% if task_type %}**Task Type:** {{ task_type }}{% endif %}
|
||||
{% if agent %}**Agent:** {{ agent }}{% endif %}
|
||||
{% if outcome %}**Outcome:** {{ outcome }}{% endif %}
|
||||
{% if duration_ms %}**Duration:** {{ duration_ms }}ms{% endif %}
|
||||
|
||||
## Summary
|
||||
|
||||
{{ summary | default(value=content) | default(value="") }}
|
||||
|
||||
{% if steps and steps | length > 0 %}
|
||||
## Execution Steps
|
||||
|
||||
{% for step in steps %}
|
||||
{{ loop.index }}. {{ step.description }}
|
||||
{% if step.duration_ms %}*Duration: {{ step.duration_ms }}ms*{% endif %}
|
||||
{% if step.result %}
|
||||
**Result:** {{ step.result }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if errors and errors | length > 0 %}
|
||||
## Errors
|
||||
|
||||
{% for error in errors %}
|
||||
- **{{ error.type }}:** {{ error.message }}
|
||||
{% if error.details %}
|
||||
```
|
||||
{{ error.details }}
|
||||
```
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if metrics and metrics | length > 0 %}
|
||||
## Metrics
|
||||
|
||||
{% for metric in metrics %}
|
||||
- **{{ metric.name }}:** {{ metric.value }} {% if metric.unit %}{{ metric.unit }}{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if artifacts and artifacts | length > 0 %}
|
||||
## Artifacts
|
||||
|
||||
{% for artifact in artifacts %}
|
||||
- {{ artifact.name }}: `{{ artifact.path }}`
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if recommendations and recommendations | length > 0 %}
|
||||
## Recommendations
|
||||
|
||||
{% for rec in recommendations %}
|
||||
- {{ rec }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if references and references | length > 0 %}
|
||||
## References
|
||||
|
||||
{% for ref in references %}
|
||||
- [[{{ ref }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
49
templates/export/graph.json.tera
Normal file
49
templates/export/graph.json.tera
Normal file
@ -0,0 +1,49 @@
|
||||
{
|
||||
"graph": {
|
||||
"name": "{{ graph.name }}",
|
||||
"version": "{{ graph.version }}",
|
||||
"description": "{{ graph.description | escape }}",
|
||||
"created": "{{ graph.created }}",
|
||||
"modified": "{{ graph.modified }}",
|
||||
"metadata": {{ graph.metadata | json_encode | safe }}
|
||||
},
|
||||
"nodes": [
|
||||
{% for node in nodes -%}
|
||||
{
|
||||
"id": "{{ node.id }}",
|
||||
"type": "{{ node.type }}",
|
||||
"title": "{{ node.title | escape }}",
|
||||
"created": "{{ node.created }}",
|
||||
"modified": "{{ node.modified }}",
|
||||
"content": "{{ node.content | escape }}",
|
||||
"tags": [{% for tag in node.tags %}"{{ tag | escape }}"{% if not loop.last %}, {% endif %}{% endfor %}],
|
||||
"status": "{{ node.status }}",
|
||||
"relates_to": [{% for rel in node.relates_to %}"{{ rel }}"{% if not loop.last %}, {% endif %}{% endfor %}],
|
||||
"depends_on": [{% for dep in node.depends_on %}"{{ dep }}"{% if not loop.last %}, {% endif %}{% endfor %}],
|
||||
"implements": [{% for impl in node.implements %}"{{ impl }}"{% if not loop.last %}, {% endif %}{% endfor %}],
|
||||
"extends": [{% for ext in node.extends %}"{{ ext }}"{% if not loop.last %}, {% endif %}{% endfor %}]{% if node.project %},
|
||||
"project": "{{ node.project }}"{% endif %}{% if node.metadata %},
|
||||
"metadata": {{ node.metadata | json_encode | safe }}{% endif %}
|
||||
}{% if not loop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
],
|
||||
"edges": [
|
||||
{% for edge in edges -%}
|
||||
{
|
||||
"from": "{{ edge.from }}",
|
||||
"to": "{{ edge.to }}",
|
||||
"type": "{{ edge.edge_type }}",
|
||||
"strength": {{ edge.strength }},
|
||||
"created": "{{ edge.created }}"{% if edge.metadata %},
|
||||
"metadata": {{ edge.metadata | json_encode | safe }}{% endif %}
|
||||
}{% if not loop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
]{% if stats %},
|
||||
"stats": {
|
||||
"total_nodes": {{ stats.total_nodes }},
|
||||
"total_edges": {{ stats.total_edges }},
|
||||
"nodes_by_type": {{ stats.nodes_by_type | json_encode | safe }},
|
||||
"nodes_by_status": {{ stats.nodes_by_status | json_encode | safe }}{% if stats.top_tags %},
|
||||
"top_tags": {{ stats.top_tags | json_encode | safe }}{% endif %}
|
||||
}{% endif %}
|
||||
}
|
||||
34
templates/export/logseq-journal.md.tera
Normal file
34
templates/export/logseq-journal.md.tera
Normal file
@ -0,0 +1,34 @@
|
||||
- {% if node.title %}**{{ node.title }}**{% else %}Journal Entry{% endif %}
|
||||
id:: {{ node.id }}
|
||||
{% if node.tags and node.tags | length > 0 -%}
|
||||
tags:: {% for tag in node.tags %}#[[{{ tag }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
{% if node.project -%}
|
||||
project:: [[{{ node.project }}]]
|
||||
{% endif -%}
|
||||
-
|
||||
{{ node.content | replace(from="\n", to="\n ") }}
|
||||
{% if node.metadata.tasks and node.metadata.tasks | length > 0 %}
|
||||
- ## Tasks
|
||||
{% for task in node.metadata.tasks %}
|
||||
- {% if task.completed %}DONE{% else %}TODO{% endif %} {{ task.description }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if node.metadata.highlights and node.metadata.highlights | length > 0 %}
|
||||
- ## Highlights
|
||||
{% for highlight in node.metadata.highlights %}
|
||||
- {{ highlight }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if node.metadata.learnings and node.metadata.learnings | length > 0 %}
|
||||
- ## Learnings
|
||||
{% for learning in node.metadata.learnings %}
|
||||
- {{ learning }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if node.relates_to and node.relates_to | length > 0 %}
|
||||
- ## Related
|
||||
{% for rel in node.relates_to %}
|
||||
- [[{{ rel }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
25
templates/export/logseq-page.md.tera
Normal file
25
templates/export/logseq-page.md.tera
Normal file
@ -0,0 +1,25 @@
|
||||
{% set node_type_label = node.type | title -%}
|
||||
- {% if node.status == "draft" %}🚧{% elif node.status == "active" %}✅{% elif node.status == "superseded" %}📦{% elif node.status == "archived" %}🗄️{% endif %} **{{ node.title }}** #[[{{ node_type_label }}]]
|
||||
id:: {{ node.id }}
|
||||
created:: [[{{ node.created | date(format="%Y-%m-%d") }}]]
|
||||
modified:: [[{{ node.modified | date(format="%Y-%m-%d") }}]]
|
||||
{% if node.tags and node.tags | length > 0 -%}
|
||||
tags:: {% for tag in node.tags %}#[[{{ tag }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
{% if node.project -%}
|
||||
project:: [[{{ node.project }}]]
|
||||
{% endif -%}
|
||||
{% if node.relates_to and node.relates_to | length > 0 -%}
|
||||
relates-to:: {% for rel in node.relates_to %}[[{{ rel }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
{% if node.depends_on and node.depends_on | length > 0 -%}
|
||||
depends-on:: {% for dep in node.depends_on %}[[{{ dep }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
{% if node.implements and node.implements | length > 0 -%}
|
||||
implements:: {% for impl in node.implements %}[[{{ impl }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
{% if node.extends and node.extends | length > 0 -%}
|
||||
extends:: {% for ext in node.extends %}[[{{ ext }}]]{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
-
|
||||
{{ node.content | replace(from="\n", to="\n ") }}
|
||||
59
templates/export/summary.md.tera
Normal file
59
templates/export/summary.md.tera
Normal file
@ -0,0 +1,59 @@
|
||||
# {{ graph.name }} - Knowledge Base Summary
|
||||
|
||||
**Version:** {{ graph.version }}
|
||||
{% if graph.description -%}
|
||||
**Description:** {{ graph.description }}
|
||||
{% endif -%}
|
||||
**Generated:** {{ timestamp }}
|
||||
**Total Nodes:** {{ stats.total_nodes }}
|
||||
**Total Edges:** {{ stats.total_edges }}
|
||||
|
||||
## Statistics
|
||||
|
||||
### Nodes by Type
|
||||
{% for type, count in stats.nodes_by_type %}
|
||||
- **{{ type | title }}:** {{ count }}
|
||||
{% endfor %}
|
||||
|
||||
### Nodes by Status
|
||||
{% for status, count in stats.nodes_by_status %}
|
||||
- **{{ status | title }}:** {{ count }}
|
||||
{% endfor %}
|
||||
|
||||
{% if stats.top_tags and stats.top_tags | length > 0 %}
|
||||
### Top Tags
|
||||
{% for tag, count in stats.top_tags %}
|
||||
- **{{ tag }}:** {{ count }} nodes
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if nodes and nodes | length > 0 %}
|
||||
## Nodes
|
||||
|
||||
{% for node in nodes %}
|
||||
### {{ node.title }}
|
||||
|
||||
**Type:** {{ node.type | title }}
|
||||
**Status:** {{ node.status | title }}
|
||||
**Created:** {{ node.created | date(format="%Y-%m-%d %H:%M") }}
|
||||
**Modified:** {{ node.modified | date(format="%Y-%m-%d %H:%M") }}
|
||||
{% if node.tags and node.tags | length > 0 -%}
|
||||
**Tags:** {% for tag in node.tags %}{{ tag }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif -%}
|
||||
|
||||
{{ node.content | truncate(length=200) }}
|
||||
|
||||
{% if node.relates_to and node.relates_to | length > 0 -%}
|
||||
**Related:** {% for rel in node.relates_to %}{{ rel }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
---
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if graph.metadata and graph.metadata.notes %}
|
||||
## Notes
|
||||
|
||||
{{ graph.metadata.notes }}
|
||||
{% endif %}
|
||||
114
templates/guideline.md.tera
Normal file
114
templates/guideline.md.tera
Normal file
@ -0,0 +1,114 @@
|
||||
---
|
||||
id: {{ id }}
|
||||
type: guideline
|
||||
title: {{ title }}
|
||||
created: {{ created }}
|
||||
modified: {{ modified }}
|
||||
tags: [{% for tag in tags %}"{{ tag }}"{% if not loop.last %}, {% endif %}{% endfor %}]
|
||||
status: {{ status | default(value="active") }}
|
||||
{% if language -%}
|
||||
language: {{ language }}
|
||||
{% endif -%}
|
||||
{% if category -%}
|
||||
category: {{ category }}
|
||||
{% endif -%}
|
||||
{% if relates_to and relates_to | length > 0 -%}
|
||||
relates_to:
|
||||
{% for rel in relates_to %} - {{ rel }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if extends and extends | length > 0 -%}
|
||||
extends:
|
||||
{% for ext in extends %} - {{ ext }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if project -%}
|
||||
project: {{ project }}
|
||||
{% endif -%}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
{% if language %}**Language:** {{ language }}{% endif %}
|
||||
{% if category %}**Category:** {{ category }}{% endif %}
|
||||
|
||||
## Overview
|
||||
|
||||
{{ overview | default(value="TODO: Brief overview of this guideline") }}
|
||||
|
||||
## Rules
|
||||
|
||||
{% if rules and rules | length > 0 %}
|
||||
{% for rule in rules %}
|
||||
{{ loop.index }}. **{{ rule.title }}**
|
||||
{{ rule.description }}
|
||||
{% if rule.rationale %}
|
||||
*Rationale:* {{ rule.rationale }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
TODO: List the specific rules or best practices
|
||||
|
||||
1. **Rule Name**
|
||||
Description of the rule
|
||||
|
||||
*Rationale:* Why this rule exists
|
||||
{% endif %}
|
||||
|
||||
## Examples
|
||||
|
||||
{% if examples and examples | length > 0 %}
|
||||
{% for example in examples %}
|
||||
### {{ example.title }}
|
||||
|
||||
{% if example.description %}{{ example.description }}{% endif %}
|
||||
|
||||
{% if example.good %}
|
||||
**✅ Good:**
|
||||
```{{ language | default(value="") }}
|
||||
{{ example.good }}
|
||||
```
|
||||
{% endif %}
|
||||
|
||||
{% if example.bad %}
|
||||
**❌ Bad:**
|
||||
```{{ language | default(value="") }}
|
||||
{{ example.bad }}
|
||||
```
|
||||
{% endif %}
|
||||
|
||||
{% if example.explanation %}
|
||||
{{ example.explanation }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
TODO: Provide examples of good and bad practices
|
||||
|
||||
### Example 1
|
||||
|
||||
**✅ Good:**
|
||||
```{{ language | default(value="") }}
|
||||
// Good example
|
||||
```
|
||||
|
||||
**❌ Bad:**
|
||||
```{{ language | default(value="") }}
|
||||
// Bad example
|
||||
```
|
||||
{% endif %}
|
||||
|
||||
{% if exceptions and exceptions | length > 0 %}
|
||||
## Exceptions
|
||||
|
||||
{% for exception in exceptions %}
|
||||
- {{ exception }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if references and references | length > 0 %}
|
||||
## References
|
||||
|
||||
{% for ref in references %}
|
||||
- [[{{ ref }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
60
templates/journal.md.tera
Normal file
60
templates/journal.md.tera
Normal file
@ -0,0 +1,60 @@
|
||||
---
|
||||
id: {{ id }}
|
||||
type: journal
|
||||
title: {{ title }}
|
||||
created: {{ created }}
|
||||
modified: {{ modified }}
|
||||
tags: [{% for tag in tags %}"{{ tag }}"{% if not loop.last %}, {% endif %}{% endfor %}]
|
||||
status: active
|
||||
{% if date -%}
|
||||
date: {{ date }}
|
||||
{% endif -%}
|
||||
{% if relates_to and relates_to | length > 0 -%}
|
||||
relates_to:
|
||||
{% for rel in relates_to %} - {{ rel }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if project -%}
|
||||
project: {{ project }}
|
||||
{% endif -%}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
{% if date %}**Date:** {{ date }}{% endif %}
|
||||
|
||||
## Notes
|
||||
|
||||
{{ content | default(value="") }}
|
||||
|
||||
{% if tasks and tasks | length > 0 %}
|
||||
## Tasks
|
||||
|
||||
{% for task in tasks %}
|
||||
- [{% if task.completed %}x{% else %} {% endif %}] {{ task.description }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if highlights and highlights | length > 0 %}
|
||||
## Highlights
|
||||
|
||||
{% for highlight in highlights %}
|
||||
- {{ highlight }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if learnings and learnings | length > 0 %}
|
||||
## Learnings
|
||||
|
||||
{% for learning in learnings %}
|
||||
- {{ learning }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if links and links | length > 0 %}
|
||||
## Links
|
||||
|
||||
{% for link in links %}
|
||||
- [[{{ link }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
44
templates/note.md.tera
Normal file
44
templates/note.md.tera
Normal file
@ -0,0 +1,44 @@
|
||||
---
|
||||
id: {{ id }}
|
||||
type: note
|
||||
title: {{ title }}
|
||||
created: {{ created }}
|
||||
modified: {{ modified }}
|
||||
tags: [{% for tag in tags %}"{{ tag }}"{% if not loop.last %}, {% endif %}{% endfor %}]
|
||||
status: {{ status | default(value="draft") }}
|
||||
{% if relates_to and relates_to | length > 0 -%}
|
||||
relates_to:
|
||||
{% for rel in relates_to %} - {{ rel }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if depends_on and depends_on | length > 0 -%}
|
||||
depends_on:
|
||||
{% for dep in depends_on %} - {{ dep }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if implements and implements | length > 0 -%}
|
||||
implements:
|
||||
{% for impl in implements %} - {{ impl }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if extends and extends | length > 0 -%}
|
||||
extends:
|
||||
{% for ext in extends %} - {{ ext }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if project -%}
|
||||
project: {{ project }}
|
||||
{% endif -%}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
{{ content | default(value="") }}
|
||||
|
||||
{% if references and references | length > 0 %}
|
||||
## References
|
||||
|
||||
{% for ref in references %}
|
||||
- [[{{ ref }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
119
templates/pattern.md.tera
Normal file
119
templates/pattern.md.tera
Normal file
@ -0,0 +1,119 @@
|
||||
---
|
||||
id: {{ id }}
|
||||
type: pattern
|
||||
title: {{ title }}
|
||||
created: {{ created }}
|
||||
modified: {{ modified }}
|
||||
tags: [{% for tag in tags %}"{{ tag }}"{% if not loop.last %}, {% endif %}{% endfor %}]
|
||||
status: {{ status | default(value="active") }}
|
||||
{% if relates_to and relates_to | length > 0 -%}
|
||||
relates_to:
|
||||
{% for rel in relates_to %} - {{ rel }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if depends_on and depends_on | length > 0 -%}
|
||||
depends_on:
|
||||
{% for dep in depends_on %} - {{ dep }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% if project -%}
|
||||
project: {{ project }}
|
||||
{% endif -%}
|
||||
problem: |
|
||||
{{ problem | default(value="") | indent(width=2) }}
|
||||
solution: |
|
||||
{{ solution | default(value="") | indent(width=2) }}
|
||||
{% if forces and forces | length > 0 -%}
|
||||
forces:
|
||||
{% for force in forces %} - {{ force }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
---
|
||||
|
||||
# {{ title }}
|
||||
|
||||
## Problem
|
||||
|
||||
{{ problem | default(value="TODO: Describe the problem this pattern solves") }}
|
||||
|
||||
## Context
|
||||
|
||||
{{ context | default(value="TODO: Describe when this pattern should be used") }}
|
||||
|
||||
{% if forces and forces | length > 0 %}
|
||||
## Forces
|
||||
|
||||
{% for force in forces %}
|
||||
- {{ force }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
## Solution
|
||||
|
||||
{{ solution | default(value="TODO: Describe the solution/implementation") }}
|
||||
|
||||
{% if structure %}
|
||||
## Structure
|
||||
|
||||
{{ structure }}
|
||||
{% endif %}
|
||||
|
||||
{% if implementation and implementation | length > 0 %}
|
||||
## Implementation
|
||||
|
||||
{% for step in implementation %}
|
||||
{{ loop.index }}. **{{ step.title }}**
|
||||
{{ step.description }}
|
||||
{% if step.code %}
|
||||
```{{ step.language | default(value="") }}
|
||||
{{ step.code }}
|
||||
```
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if examples and examples | length > 0 %}
|
||||
## Examples
|
||||
|
||||
{% for example in examples %}
|
||||
### {{ example.title }}
|
||||
|
||||
{{ example.description }}
|
||||
|
||||
{% if example.code %}
|
||||
```{{ example.language | default(value="") }}
|
||||
{{ example.code }}
|
||||
```
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if consequences and consequences | length > 0 %}
|
||||
## Consequences
|
||||
|
||||
### Benefits
|
||||
{% for benefit in consequences.benefits %}
|
||||
- {{ benefit }}
|
||||
{% endfor %}
|
||||
|
||||
### Drawbacks
|
||||
{% for drawback in consequences.drawbacks %}
|
||||
- {{ drawback }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if related_patterns and related_patterns | length > 0 %}
|
||||
## Related Patterns
|
||||
|
||||
{% for pattern in related_patterns %}
|
||||
- [[{{ pattern }}]]
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if references and references | length > 0 %}
|
||||
## References
|
||||
|
||||
{% for ref in references %}
|
||||
- {{ ref }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
Loading…
x
Reference in New Issue
Block a user