Compare commits

..

No commits in common. "b6e4099ebc3b6319381d147c2a77f8810e79c960" and "5b0dbd30fd8a442b0f29caee9b8b7eeb5eb485cf" have entirely different histories.

155 changed files with 3665 additions and 9121 deletions

View File

@ -4,21 +4,21 @@
[advisories]
# Ignore advisories for unmaintained crates that have no alternatives
ignore = [
# atty - unmaintained but widely used, replacement (is-terminal) requires code changes
"RUSTSEC-2021-0145",
"RUSTSEC-2024-0375",
# atty - unmaintained but widely used, replacement (is-terminal) requires code changes
"RUSTSEC-2021-0145",
"RUSTSEC-2024-0375",
# atomic-polyfill - unmaintained, comes from surrealdb dependency
"RUSTSEC-2023-0089",
# atomic-polyfill - unmaintained, comes from surrealdb dependency
"RUSTSEC-2023-0089",
# paste - unmaintained, comes from multiple dependencies (ratatui, nickel)
"RUSTSEC-2024-0436",
# paste - unmaintained, comes from multiple dependencies (ratatui, nickel)
"RUSTSEC-2024-0436",
# rustls-pemfile - FIXED by updating reqwest to v0.12.28
# "RUSTSEC-2025-0134",
# rustls-pemfile - FIXED by updating reqwest to v0.12.28
# "RUSTSEC-2025-0134",
# yaml-rust - unmaintained, comes from nickel-lang-core
"RUSTSEC-2024-0320",
# yaml-rust - unmaintained, comes from nickel-lang-core
"RUSTSEC-2024-0320",
]
[database]

View File

@ -1,6 +1,6 @@
[alias]
fmt-all = "fmt --all"
lint = "clippy --all-targets --all-features -- -D warnings"
fmt-all = "fmt --all"
[build]
rustflags = [

View File

@ -1,49 +0,0 @@
# Taplo configuration for TOML formatting and linting
# https://taplo.tamasfe.dev/configuration/
[formatting]
# Indent tables with 2 spaces
indent_string = " "
indent_tables = true
# Reorder keys alphabetically within tables
reorder_keys = true
# Reorder arrays to be more readable
reorder_arrays = false
# Align entries vertically in inline tables
align_entries = false
# Allow compact inline tables
allowed_blank_lines = 1
# Trailing newline
trailing_newline = true
# Column width for wrapping
column_width = 100
# Compact arrays
compact_arrays = true
# Compact inline tables
compact_inline_tables = false
# === INCLUDE/EXCLUDE PATTERNS ===
include = ["Cargo.toml", "*/Cargo.toml", "config/**/*.toml", "**/*.toml"]
exclude = ["target/**", "node_modules/**", ".git/**"]
# === SCHEMA VALIDATION ===
# Cargo.toml schema validation
[[rule]]
include = ["**/Cargo.toml"]
# Taplo includes built-in Cargo.toml schema
# Form definition TOML files (custom schema could be added)
[[rule]]
include = ["config/**/forms/*.toml", "tests/fixtures/**/*.toml"]
keys = ["name", "description", "fields", "items", "elements"]

View File

@ -1,120 +1,121 @@
[workspace]
members = [
"crates/typedialog-core",
"crates/typedialog",
"crates/typedialog-tui",
"crates/typedialog-web",
"crates/typedialog-ai",
"crates/typedialog-prov-gen",
"crates/typedialog-agent/typedialog-ag-core",
"crates/typedialog-agent/typedialog-ag",
"crates/typedialog-core",
"crates/typedialog",
"crates/typedialog-tui",
"crates/typedialog-web",
"crates/typedialog-ai",
"crates/typedialog-prov-gen",
"crates/typedialog-agent/typedialog-ag-core",
"crates/typedialog-agent/typedialog-ag",
]
resolver = "2"
[workspace.package]
authors = ["Jesús Pérez <jpl@jesusperez.com>"]
categories = ["command-line-utilities", "web-programming"]
edition = "2021"
keywords = ["forms", "cli", "tui", "web", "ai"]
license = "MIT"
repository = "https://github.com/jesusperezlorenzo/typedialog"
rust-version = "1.75"
version = "0.1.0"
[workspace.package]
version = "0.1.0"
authors = ["Jesús Pérez <jpl@jesusperez.com>"]
edition = "2021"
rust-version = "1.75"
repository = "https://github.com/jesusperezlorenzo/typedialog"
license = "MIT"
keywords = ["forms", "cli", "tui", "web", "ai"]
categories = ["command-line-utilities", "web-programming"]
[workspace.dependencies]
# Internal crates
typedialog-ag-core = { path = "crates/typedialog-agent/typedialog-ag-core" }
typedialog-core = { path = "crates/typedialog-core" }
[workspace.dependencies]
# Internal crates
typedialog-ag-core = { path = "crates/typedialog-agent/typedialog-ag-core" }
typedialog-core = { path = "crates/typedialog-core" }
# Core serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_yaml = "0.9"
toml = "0.9"
# Core serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_yaml = "0.9"
toml = "0.9"
# Utility
anyhow = "1.0"
async-trait = "0.1"
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4.5", features = ["derive", "cargo"] }
thiserror = "2.0"
# Utility
chrono = { version = "0.4", features = ["serde"] }
anyhow = "1.0"
thiserror = "2.0"
clap = { version = "4.5", features = ["derive", "cargo"] }
async-trait = "0.1"
# Async
futures = "0.3"
tokio = { version = "1", features = ["full"] }
# Async
tokio = { version = "1", features = ["full"] }
futures = "0.3"
# Templates
tera = "1.20"
# Templates
tera = "1.20"
# i18n
dirs = "6.0"
fluent = "0.17"
fluent-bundle = "0.16"
sys-locale = "0.3"
unic-langid = "0.9"
# i18n
fluent = "0.17"
fluent-bundle = "0.16"
unic-langid = "0.9"
sys-locale = "0.3"
dirs = "6.0"
# Nushell integration
nu-plugin = "0.109.1"
nu-protocol = "0.109.1"
# Nushell integration
nu-protocol = "0.109.1"
nu-plugin = "0.109.1"
# CLI Backend (inquire)
colored = "3"
dialoguer = "0.12"
inquire = { version = "0.9", features = ["editor", "date"] }
rpassword = "7.4"
# CLI Backend (inquire)
inquire = { version = "0.9", features = ["editor", "date"] }
dialoguer = "0.12"
colored = "3"
rpassword = "7.4"
# TUI Backend (ratatui)
atty = "0.2"
crossterm = "0.29"
ratatui = "0.30"
# TUI Backend (ratatui)
ratatui = "0.30"
crossterm = "0.29"
atty = "0.2"
# Web Backend (axum)
axum = { version = "0.8.8", features = ["multipart", "ws"] }
tower = "0.5.2"
tower-http = { version = "0.6.8", features = ["fs", "cors", "trace"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Web Backend (axum)
axum = { version = "0.8.8", features = ["multipart", "ws"] }
tower = "0.5.2"
tower-http = { version = "0.6.8", features = ["fs", "cors", "trace"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# AI/ML Backend
instant-distance = "0.6"
tantivy = "0.25"
# this bincode can not be updated - Wait for tantivy/syntect to support 2.0
# and Migrate 8 code locations to new API with explicit config
bincode = "=1.3"
petgraph = "0.8"
rand = "0.9"
serde_bytes = "0.11"
surrealdb = { version = "2.4", features = ["kv-mem"] }
# AI/ML Backend
instant-distance = "0.6"
tantivy = "0.25"
# this bincode can not be updated - Wait for tantivy/syntect to support 2.0
# and Migrate 8 code locations to new API with explicit config
bincode = "=1.3"
serde_bytes = "0.11"
rand = "0.9"
petgraph = "0.8"
surrealdb = { version = "2.4", features = ["kv-mem"] }
# Misc
tempfile = "3.24"
# Misc
tempfile = "3.24"
# Testing & Benchmarking
criterion = { version = "0.8", features = ["html_reports"] }
proptest = "1.9"
# Testing & Benchmarking
criterion = { version = "0.8", features = ["html_reports"] }
proptest = "1.9"
# TypeAgent dependencies
console = "0.16"
globset = "0.4"
hex = "0.4"
ignore = "0.4"
indicatif = "0.18"
lru = "0.16"
nickel-lang-core = "0.16"
nom = "8"
notify = "8"
reqwest = { version = "0.12", features = ["json", "rustls-tls", "stream"] }
sha2 = "0.10"
uuid = { version = "1.19", features = ["v4", "serde"] }
# TypeAgent dependencies
nickel-lang-core = "0.16"
nom = "8"
lru = "0.16"
reqwest = { version = "0.12", features = ["json", "rustls-tls", "stream"] }
globset = "0.4"
ignore = "0.4"
notify = "8"
sha2 = "0.10"
hex = "0.4"
uuid = { version = "1.19", features = ["v4", "serde"] }
console = "0.16"
indicatif = "0.18"
cargo_toml = "0.22"
regex = "1.12"
strum = { version = "0.27", features = ["derive"] }
strum_macros = "0.27"
cargo_toml = "0.22"
regex = "1.12"
strum = { version = "0.27", features = ["derive"] }
strum_macros = "0.27"
[workspace.lints.rust]
unsafe_code = "forbid"
[workspace.lints.clippy]
all = "warn"
[workspace.lints.rust]
unsafe_code = "forbid"
[workspace.lints.clippy]
all = "warn"

View File

@ -4,36 +4,36 @@
# Default LLM provider (claude, openai, gemini, ollama)
default_provider = "claude"
# Default model per provider
[agent.models]
claude = "claude-3-5-haiku-20241022"
gemini = "gemini-2.0-flash-exp"
ollama = "llama2"
openai = "gpt-4o-mini"
# Default model per provider
[agent.models]
claude = "claude-3-5-haiku-20241022"
openai = "gpt-4o-mini"
gemini = "gemini-2.0-flash-exp"
ollama = "llama2"
# Default settings
[agent.defaults]
max_tokens = 4096
streaming = true
temperature = 0.7
# Default settings
[agent.defaults]
max_tokens = 4096
temperature = 0.7
streaming = true
# Template settings
[agent.template]
engine = "tera" # Jinja2-compatible
strict_variables = false
# Template settings
[agent.template]
engine = "tera" # Jinja2-compatible
strict_variables = false
# Validation settings
[agent.validation]
enabled = true
strict = false
# Validation settings
[agent.validation]
enabled = true
strict = false
# Output settings
[agent.output]
color = true
format = "markdown"
timestamp = false
# Output settings
[agent.output]
format = "markdown"
color = true
timestamp = false
# Logging
[agent.logging]
file = false
level = "info"
# Logging
[agent.logging]
level = "info"
file = false

View File

@ -1,32 +1,32 @@
# TypeDialog Agent - Development Configuration
[agent]
default_provider = "ollama" # Use local for dev
default_provider = "ollama" # Use local for dev
[agent.models]
claude = "claude-3-5-haiku-20241022"
gemini = "gemini-2.0-flash-exp"
ollama = "llama2"
openai = "gpt-4o-mini"
[agent.models]
claude = "claude-3-5-haiku-20241022"
openai = "gpt-4o-mini"
gemini = "gemini-2.0-flash-exp"
ollama = "llama2"
[agent.defaults]
max_tokens = 2048 # Lower for dev
streaming = true
temperature = 0.7
[agent.defaults]
max_tokens = 2048 # Lower for dev
temperature = 0.7
streaming = true
[agent.template]
engine = "tera"
strict_variables = true # Catch template errors in dev
[agent.template]
engine = "tera"
strict_variables = true # Catch template errors in dev
[agent.validation]
enabled = true
strict = true # Strict validation in dev
[agent.validation]
enabled = true
strict = true # Strict validation in dev
[agent.output]
color = true
format = "markdown"
timestamp = true
[agent.output]
format = "markdown"
color = true
timestamp = true
[agent.logging]
file = true
level = "debug" # Verbose in dev
[agent.logging]
level = "debug" # Verbose in dev
file = true

View File

@ -3,35 +3,35 @@
[agent]
default_provider = "claude"
[agent.models]
claude = "claude-3-5-sonnet-20241022" # Higher quality for production
gemini = "gemini-1.5-pro"
ollama = "llama2"
openai = "gpt-4o"
[agent.models]
claude = "claude-3-5-sonnet-20241022" # Higher quality for production
openai = "gpt-4o"
gemini = "gemini-1.5-pro"
ollama = "llama2"
[agent.defaults]
max_tokens = 8192
streaming = true
temperature = 0.3 # More consistent
[agent.defaults]
max_tokens = 8192
temperature = 0.3 # More consistent
streaming = true
[agent.template]
engine = "tera"
strict_variables = true
[agent.template]
engine = "tera"
strict_variables = true
[agent.validation]
enabled = true
strict = true
[agent.validation]
enabled = true
strict = true
[agent.output]
color = false # No color in production logs
format = "markdown"
timestamp = true
[agent.output]
format = "markdown"
color = false # No color in production logs
timestamp = true
[agent.logging]
file = true
level = "warn" # Less verbose in production
[agent.logging]
level = "warn" # Less verbose in production
file = true
# Rate limiting (production)
[agent.rate_limit]
enabled = true
max_requests_per_minute = 60
# Rate limiting (production)
[agent.rate_limit]
enabled = true
max_requests_per_minute = 60

View File

@ -17,15 +17,15 @@ model = "gpt-3.5-turbo"
# Ollama: http://localhost:11434/api
api_endpoint = ""
[llm.generation]
# Temperature: 0.0-2.0, higher = more creative, lower = more focused
temperature = 0.7
[llm.generation]
# Temperature: 0.0-2.0, higher = more creative, lower = more focused
temperature = 0.7
# Maximum tokens in response
max_tokens = 2048
# Maximum tokens in response
max_tokens = 2048
# Top-p (nucleus) sampling: 0.0-1.0
top_p = 0.9
# Top-p (nucleus) sampling: 0.0-1.0
top_p = 0.9
[rag]
# Enable RAG (Retrieval-Augmented Generation) system

View File

@ -3,29 +3,29 @@
[llm]
# Use ollama for local development (requires local Ollama instance)
api_endpoint = "http://localhost:11434/api"
model = "llama2" # Or whatever model you have installed locally
provider = "ollama"
model = "llama2" # Or whatever model you have installed locally
api_endpoint = "http://localhost:11434/api"
[llm.generation]
# Faster responses for iteration
max_tokens = 1024
temperature = 0.5
[llm.generation]
# Faster responses for iteration
temperature = 0.5
max_tokens = 1024
[rag]
# Enable RAG for development
cache_size = 500
embedding_dims = 384
enabled = true
index_path = "~/.config/typedialog/ai/rag-index-dev"
embedding_dims = 384
cache_size = 500
[microservice]
enable_cors = true # Allow localhost:3000, localhost:5173, etc.
enable_websocket = true
host = "127.0.0.1"
port = 3001
enable_cors = true # Allow localhost:3000, localhost:5173, etc.
enable_websocket = true
[appearance]
interaction_mode = "interactive"
show_suggestions = true
suggestion_confidence_threshold = 0.3 # Lower threshold for dev feedback
suggestion_confidence_threshold = 0.3 # Lower threshold for dev feedback

View File

@ -3,32 +3,32 @@
[llm]
# Production uses high-quality, stable models
api_endpoint = "" # Uses provider defaults (api.anthropic.com)
model = "claude-3-sonnet-20240229"
provider = "anthropic"
model = "claude-3-sonnet-20240229"
api_endpoint = "" # Uses provider defaults (api.anthropic.com)
[llm.generation]
# Conservative settings for production
max_tokens = 1024 # Reasonable limit for cost control
temperature = 0.3 # More focused, less random
top_p = 0.95
[llm.generation]
# Conservative settings for production
temperature = 0.3 # More focused, less random
max_tokens = 1024 # Reasonable limit for cost control
top_p = 0.95
[rag]
# Production RAG system with larger cache
cache_size = 10000 # Larger cache for frequently accessed data
embedding_dims = 768 # Higher quality embeddings
enabled = true
index_path = "/var/lib/typedialog/ai/rag-index" # System-wide index path
index_path = "/var/lib/typedialog/ai/rag-index" # System-wide index path
embedding_dims = 768 # Higher quality embeddings
cache_size = 10000 # Larger cache for frequently accessed data
[microservice]
# Listen on all interfaces for container deployments
enable_cors = false # Restrict CORS for security
enable_websocket = true
host = "0.0.0.0"
port = 3001
enable_cors = false # Restrict CORS for security
enable_websocket = true
[appearance]
# Production uses validation mode
interaction_mode = "validate_only"
show_suggestions = false # Don't show raw LLM output to users
suggestion_confidence_threshold = 0.8 # Only very confident suggestions
show_suggestions = false # Don't show raw LLM output to users
suggestion_confidence_threshold = 0.8 # Only very confident suggestions

View File

@ -2,12 +2,12 @@
# Used for standard command-line form rendering
[form]
description = "Standard command-line interface form"
title = "CLI Form"
description = "Standard command-line interface form"
[form.validation]
show_errors_inline = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
[output]
format = "json"

View File

@ -2,31 +2,31 @@
# Extended configuration for development and testing
[form]
description = "Development CLI form with debugging enabled"
title = "CLI Form (Dev)"
description = "Development CLI form with debugging enabled"
[form.validation]
show_errors_inline = true
strict_validation = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
strict_validation = true
[output]
debug_output = true
format = "json"
pretty_print = true
debug_output = true
[terminal]
use_raw_mode = true
enable_mouse = true
use_color = true
use_raw_mode = true
[appearance]
show_field_types = true
theme = "default"
show_help = true
show_placeholders = true
theme = "default"
show_field_types = true
[debug]
enabled = true
log_level = "info"
trace_execution = false
log_level = "info"

View File

@ -2,13 +2,13 @@
# Optimized for production deployment
[form]
description = ""
title = "Form"
description = ""
[form.validation]
show_errors_inline = true
strict_validation = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
strict_validation = true
[output]
format = "json"
@ -17,18 +17,18 @@ pretty_print = false
debug_output = false
[terminal]
use_raw_mode = true
enable_mouse = false
use_color = true
use_raw_mode = true
[appearance]
theme = "default"
show_help = false
show_placeholders = false
theme = "default"
[logging]
file = "/var/log/typedialog/cli.log"
level = "error"
file = "/var/log/typedialog/cli.log"
[timeout]
# Maximum form completion time (seconds)

View File

@ -7,36 +7,36 @@ output_dir = "./provisioning"
# Default providers to include
default_providers = ["aws", "hetzner"]
# Generation settings
[provisioning.generation]
dry_run = false
overwrite = false
verbose = false
# Generation settings
[provisioning.generation]
overwrite = false
dry_run = false
verbose = false
# Template settings
[provisioning.templates]
# Use local path in development; installed binaries use ~/.config/typedialog/prov-gen/templates
base_path = "crates/typedialog-prov-gen/templates"
# custom_path = "path/to/custom/templates" # Uncomment to override
# Template settings
[provisioning.templates]
# Use local path in development; installed binaries use ~/.config/typedialog/prov-gen/templates
base_path = "crates/typedialog-prov-gen/templates"
# custom_path = "path/to/custom/templates" # Uncomment to override
# Infrastructure defaults
[provisioning.infrastructure]
environment = "development"
region = "us-east-1"
# Infrastructure defaults
[provisioning.infrastructure]
environment = "development"
region = "us-east-1"
# Nickel integration
[provisioning.nickel]
generate_defaults = true
use_constraints = true
validate_schemas = true
# Nickel integration
[provisioning.nickel]
validate_schemas = true
generate_defaults = true
use_constraints = true
# AI assistance
[provisioning.ai]
enabled = false
model = "claude-3-5-sonnet-20241022"
provider = "claude"
# AI assistance
[provisioning.ai]
enabled = false
provider = "claude"
model = "claude-3-5-sonnet-20241022"
# Logging
[provisioning.logging]
file = false
level = "info"
# Logging
[provisioning.logging]
level = "info"
file = false

View File

@ -1,32 +1,32 @@
# TypeDialog Provisioning Generator - Development Configuration
[provisioning]
default_providers = ["hetzner", "lxd"] # Cheaper for dev
output_dir = "./provisioning"
default_providers = ["hetzner", "lxd"] # Cheaper for dev
[provisioning.generation]
dry_run = false
overwrite = true # Allow overwrite in dev
verbose = true # Verbose in dev
[provisioning.generation]
overwrite = true # Allow overwrite in dev
dry_run = false
verbose = true # Verbose in dev
[provisioning.templates]
base_path = "templates"
custom_path = "./custom-templates"
[provisioning.templates]
base_path = "templates"
custom_path = "./custom-templates"
[provisioning.infrastructure]
environment = "development"
region = "eu-central-1"
[provisioning.infrastructure]
environment = "development"
region = "eu-central-1"
[provisioning.nickel]
generate_defaults = true
use_constraints = true
validate_schemas = true
[provisioning.nickel]
validate_schemas = true
generate_defaults = true
use_constraints = true
[provisioning.ai]
enabled = true # Enable AI in dev
model = "llama2"
provider = "ollama" # Use local for dev
[provisioning.ai]
enabled = true # Enable AI in dev
provider = "ollama" # Use local for dev
model = "llama2"
[provisioning.logging]
file = true
level = "debug"
[provisioning.logging]
level = "debug"
file = true

View File

@ -1,41 +1,41 @@
# TypeDialog Provisioning Generator - Production Configuration
[provisioning]
default_providers = ["aws", "gcp"]
output_dir = "./provisioning"
default_providers = ["aws", "gcp"]
[provisioning.generation]
dry_run = false
overwrite = false # Require explicit --force
verbose = false
[provisioning.generation]
overwrite = false # Require explicit --force
dry_run = false
verbose = false
[provisioning.templates]
base_path = "templates"
# custom_path = "" # Optional: set custom templates path
[provisioning.templates]
base_path = "templates"
# custom_path = "" # Optional: set custom templates path
[provisioning.infrastructure]
environment = "production"
region = "us-east-1"
[provisioning.infrastructure]
environment = "production"
region = "us-east-1"
[provisioning.nickel]
generate_defaults = true
use_constraints = true
validate_schemas = true
[provisioning.nickel]
validate_schemas = true
generate_defaults = true
use_constraints = true
[provisioning.ai]
enabled = true
model = "claude-3-5-sonnet-20241022"
provider = "claude"
[provisioning.ai]
enabled = true
provider = "claude"
model = "claude-3-5-sonnet-20241022"
[provisioning.logging]
file = true
level = "warn"
[provisioning.logging]
level = "warn"
file = true
# Production-specific settings
[provisioning.validation]
require_tests = true
strict = true
# Production-specific settings
[provisioning.validation]
strict = true
require_tests = true
[provisioning.security]
require_encryption = true
scan_templates = true
[provisioning.security]
require_encryption = true
scan_templates = true

View File

@ -2,12 +2,12 @@
# Terminal User Interface rendering
[form]
description = "Interactive terminal user interface form"
title = "TUI Form"
description = "Interactive terminal user interface form"
[form.validation]
show_errors_inline = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
[output]
format = "json"
@ -15,9 +15,9 @@ pretty_print = true
[terminal]
# Full TUI features
use_raw_mode = true
enable_mouse = true
enable_scrolling = true
use_raw_mode = true
# Fixed height (-1 = auto)
height = -1
# Fixed width (-1 = auto)
@ -34,8 +34,8 @@ highlight_on_hover = true
enable_animations = true
[appearance]
border_style = "rounded"
theme = "default"
border_style = "rounded"
# Color scheme: default, dark, light, high_contrast
color_scheme = "default"

View File

@ -2,42 +2,42 @@
# Extended TUI features for development
[form]
description = "Development TUI form with all features enabled"
title = "TUI Form (Dev)"
description = "Development TUI form with all features enabled"
[form.validation]
show_errors_inline = true
strict_validation = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
strict_validation = true
[output]
debug_output = true
format = "json"
pretty_print = true
debug_output = true
[terminal]
use_raw_mode = true
enable_mouse = true
enable_scrolling = true
height = -1
use_raw_mode = true
width = -1
[ui]
enable_animations = true
highlight_on_hover = true
show_borders = true
show_focus = true
highlight_on_hover = true
enable_animations = true
# Show field indices for debugging
show_field_indices = true
[appearance]
theme = "default"
border_style = "double"
color_scheme = "default"
theme = "default"
[keyboard]
emacs_mode = false
vi_mode = false
emacs_mode = false
[debug]
enabled = true

View File

@ -2,44 +2,44 @@
# Optimized TUI for production deployment
[form]
description = ""
title = ""
description = ""
[form.validation]
show_errors_inline = true
strict_validation = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
strict_validation = true
[output]
debug_output = false
format = "json"
pretty_print = false
debug_output = false
[terminal]
use_raw_mode = true
enable_mouse = true
enable_scrolling = true
height = -1
use_raw_mode = true
width = -1
[ui]
enable_animations = false
highlight_on_hover = true
show_borders = true
show_focus = true
highlight_on_hover = true
enable_animations = false
[appearance]
theme = "default"
border_style = "rounded"
color_scheme = "default"
theme = "default"
[keyboard]
emacs_mode = false
vi_mode = false
emacs_mode = false
[logging]
file = "/var/log/typedialog/tui.log"
level = "error"
file = "/var/log/typedialog/tui.log"
[performance]
# Render throttle (milliseconds)

View File

@ -9,13 +9,13 @@ cors_enabled = true
cors_origins = ["localhost", "127.0.0.1"]
[form]
description = "Interactive web form"
title = "Web Form"
description = "Interactive web form"
[form.validation]
client_validation = true
show_errors_inline = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
client_validation = true
[output]
format = "json"

View File

@ -10,30 +10,30 @@ hot_reload = true
debug = true
[form]
description = "Development web form"
title = "Web Form (Dev)"
description = "Development web form"
[form.validation]
client_validation = true
show_errors_inline = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
client_validation = true
[output]
format = "json"
[html]
css_framework = "none"
dark_mode = true
inline_styles = true
responsive = true
dark_mode = true
# Show field metadata
show_field_metadata = true
[submission]
log_submissions = true
method = "post"
redirect_on_success = false
webhook_url = "http://localhost:8000/webhook"
redirect_on_success = false
log_submissions = true
[security]
csrf_enabled = true
@ -41,10 +41,10 @@ rate_limit = 0
require_https = false
[logging]
file = "/tmp/typedialog-web.log"
level = "debug"
file = "/tmp/typedialog-web.log"
[api]
# API documentation enabled
docs_path = "/docs"
enable_docs = true
docs_path = "/docs"

View File

@ -5,35 +5,35 @@
host = "0.0.0.0"
port = 8080
# Disable development features
debug = false
hot_reload = false
debug = false
# Worker threads
workers = 4
[form]
description = ""
title = ""
description = ""
[form.validation]
client_validation = true
show_errors_inline = true
validate_on_change = true
[form.validation]
validate_on_change = true
show_errors_inline = true
client_validation = true
[output]
format = "json"
[html]
css_framework = "none"
dark_mode = true
inline_styles = false
responsive = true
dark_mode = true
[submission]
method = "post"
# Required: webhook for production submissions
webhook_url = "https://api.example.com/forms"
redirect_on_success = true
redirect_url = "https://example.com/thank-you"
webhook_url = "https://api.example.com/forms"
[security]
# Strict CSRF protection
@ -46,8 +46,8 @@ require_https = true
add_security_headers = true
[logging]
file = "/var/log/typedialog/web.log"
level = "error"
file = "/var/log/typedialog/web.log"
[performance]
# Cache static assets
@ -60,6 +60,6 @@ compression_threshold = 1024
[tls]
# Optional TLS configuration
cert_path = "/etc/typedialog/cert.pem"
enabled = false
cert_path = "/etc/typedialog/cert.pem"
key_path = "/etc/typedialog/key.pem"

View File

@ -1,20 +1,20 @@
[package]
authors.workspace = true
categories.workspace = true
description = "Core library for type-safe AI agent execution with MDX → Nickel → MD pipeline"
edition.workspace = true
keywords.workspace = true
license.workspace = true
name = "typedialog-ag-core"
repository.workspace = true
rust-version.workspace = true
version.workspace = true
edition.workspace = true
rust-version.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
description = "Core library for type-safe AI agent execution with MDX → Nickel → MD pipeline"
keywords.workspace = true
categories.workspace = true
[dependencies]
# Async
async-trait = { workspace = true }
futures = { workspace = true }
tokio = { workspace = true }
futures = { workspace = true }
async-trait = { workspace = true }
# Nickel
nickel-lang-core = { workspace = true }
@ -36,36 +36,36 @@ globset = { workspace = true }
ignore = { workspace = true }
# Cache
bincode = { workspace = true }
hex = { workspace = true }
lru = { workspace = true }
sha2 = { workspace = true }
hex = { workspace = true }
bincode = { workspace = true }
# Parsing
nom = { workspace = true }
# Error handling
anyhow = { workspace = true }
thiserror = { workspace = true }
anyhow = { workspace = true }
# Logging
tracing = { workspace = true }
# Utilities
uuid = { workspace = true }
chrono = { workspace = true }
dirs = { workspace = true }
uuid = { workspace = true }
[dev-dependencies]
criterion.workspace = true
proptest.workspace = true
criterion.workspace = true
[features]
cache = [] # Cache layer
default = ["markup", "nickel", "cache"]
markdown = [] # Legacy .agent.md support
markup = [] # MDX parsing + transpiler
nickel = [] # Nickel evaluation
markup = [] # MDX parsing + transpiler
nickel = [] # Nickel evaluation
markdown = [] # Legacy .agent.md support
cache = [] # Cache layer
[lib]
name = "typedialog_ag_core"

View File

@ -3,81 +3,28 @@
//! TypeAgent Core Library
//!
//! Type-safe AI agent execution with a 3-layer pipeline architecture:
//!
//! ## Pipeline Layers
//!
//! ### Layer 0: Foundation
//! - **error** - Error types and result handling
//! - **utils** - Common utilities
//!
//! ### Layer 1: Parsing (MDX → AST)
//! - **parser** - Markup node parsing from MDX
//! - **formats** - Agent format detection
//!
//! ### Layer 2: Transpilation (AST → Nickel)
//! - **transpiler** - AST to Nickel code generation
//! - **nickel** - Nickel evaluation and type checking
//!
//! ### Layer 3: Execution (Nickel → Output)
//! - **executor** - Agent execution orchestration
//! - **cache** - Execution caching strategies
//! - **llm** - LLM provider abstraction
//! - **loader** - Agent definition loading
//!
//! ## Recommended Entry Points
//!
//! - [`AgentExecutor`] - Execute agents end-to-end
//! - [`LlmClient`] - Simplified LLM interactions
//! - [`AgentLoader`] - Load agent definitions from files
// ============================================================================
// LAYER 0: FOUNDATION
// ============================================================================
pub mod error;
pub mod utils;
// ============================================================================
// LAYER 1: PARSING (MDX → AST)
// ============================================================================
pub mod formats;
pub mod parser;
// ============================================================================
// LAYER 2: TRANSPILATION (AST → Nickel)
// ============================================================================
pub mod nickel;
pub mod transpiler;
// ============================================================================
// LAYER 3: EXECUTION (Nickel → Output)
// ============================================================================
//! Type-safe AI agent execution with 3-layer pipeline:
//! - Layer 1: MDX → AST (markup parsing)
//! - Layer 2: AST → Nickel (transpilation + type checking)
//! - Layer 3: Nickel → Output (execution + validation)
pub mod cache;
pub mod error;
pub mod executor;
pub mod formats;
pub mod llm;
mod loader;
pub mod nickel;
pub mod parser;
pub mod transpiler;
pub mod utils;
// ============================================================================
// PUBLIC API EXPORTS (grouped by pipeline layer)
// ============================================================================
// Layer 0: Foundation
pub use error::{Error, Result};
// Layer 1: Parsing
pub use formats::{AgentFormat, FormatDetector};
pub use parser::{AgentDirective, MarkupNode, MarkupParser};
// Layer 2: Transpilation
pub use nickel::{AgentConfig, AgentDefinition, NickelEvaluator};
pub use transpiler::NickelTranspiler;
// Layer 3: Execution (recommended entry points)
// Public API exports
pub use cache::{CacheManager, CacheStats, CacheStrategy};
pub use error::{Error, Result};
pub use executor::{AgentExecutor, ExecutionResult};
pub use llm::LlmClient;
pub use formats::{AgentFormat, FormatDetector};
pub use loader::AgentLoader;
pub use nickel::{AgentConfig, AgentDefinition, NickelEvaluator};
pub use parser::{AgentDirective, MarkupNode, MarkupParser};
pub use transpiler::NickelTranspiler;

View File

@ -1,23 +1,4 @@
//! LLM provider abstraction and implementations
//!
//! This module provides both low-level provider traits and a high-level client facade.
//!
//! # Recommended Usage
//!
//! For most use cases, use [`LlmClient`] which provides a simplified API:
//!
//! ```no_run
//! use typedialog_ag_core::llm::LlmClient;
//!
//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
//! let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
//! let response = client.ask("What is Rust?").await?;
//! println!("{}", response);
//! # Ok(())
//! # }
//! ```
//!
//! For advanced use cases (implementing custom providers), use [`LlmProvider`] trait.
pub mod claude;
pub mod gemini;
@ -35,7 +16,6 @@ pub use provider::{
};
use crate::error::{Error, Result};
use std::sync::Arc;
/// Create an LLM provider based on model name
pub fn create_provider(model: &str) -> Result<Box<dyn LlmProvider>> {
@ -88,259 +68,3 @@ fn is_ollama_model(model: &str) -> bool {
.iter()
.any(|prefix| model.starts_with(prefix))
}
// ============================================================================
// LLM CLIENT FACADE (Recommended High-Level API)
// ============================================================================
/// High-level LLM client facade providing a simplified API
///
/// `LlmClient` wraps the lower-level [`LlmProvider`] trait and provides
/// convenience methods for common use cases. This is the recommended way
/// to interact with LLMs for most applications.
///
/// # Examples
///
/// ## Simple question-answer
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
///
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let answer = client.ask("Explain Rust ownership").await?;
/// println!("{}", answer);
/// # Ok(())
/// # }
/// ```
///
/// ## With custom parameters
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
///
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("gpt-4")?
/// .with_temperature(0.7)
/// .with_max_tokens(2000);
///
/// let response = client.ask("Write a haiku about coding").await?;
/// println!("{}", response);
/// # Ok(())
/// # }
/// ```
///
/// ## Streaming responses
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
/// use futures::StreamExt;
///
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let mut stream = client.ask_stream("Tell me a story").await?;
///
/// while let Some(chunk) = stream.next().await {
/// match chunk? {
/// typedialog_ag_core::llm::StreamChunk::Content(text) => print!("{}", text),
/// typedialog_ag_core::llm::StreamChunk::Done(_) => println!("\n[Done]"),
/// typedialog_ag_core::llm::StreamChunk::Error(e) => eprintln!("Error: {}", e),
/// }
/// }
/// # Ok(())
/// # }
/// ```
pub struct LlmClient {
provider: Arc<dyn LlmProvider>,
model: String,
temperature: Option<f64>,
max_tokens: Option<usize>,
system: Option<String>,
}
impl LlmClient {
/// Create a new client from a specific provider
///
/// For most use cases, prefer [`from_model`](Self::from_model) which auto-detects the provider.
pub fn new(provider: Arc<dyn LlmProvider>, model: impl Into<String>) -> Self {
Self {
provider,
model: model.into(),
temperature: None,
max_tokens: None,
system: None,
}
}
/// Create a new client by auto-detecting the provider from the model name
///
/// # Examples
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
///
/// # fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let claude = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let gpt = LlmClient::from_model("gpt-4")?;
/// let gemini = LlmClient::from_model("gemini-pro")?;
/// let ollama = LlmClient::from_model("llama2")?;
/// # Ok(())
/// # }
/// ```
pub fn from_model(model: impl Into<String>) -> Result<Self> {
let model_str = model.into();
let provider = create_provider(&model_str)?;
Ok(Self::new(Arc::from(provider), model_str))
}
/// Set the temperature parameter (0.0 to 1.0)
///
/// Higher values make output more random, lower values more deterministic.
pub fn with_temperature(mut self, temperature: f64) -> Self {
self.temperature = Some(temperature);
self
}
/// Set the maximum number of tokens to generate
pub fn with_max_tokens(mut self, max_tokens: usize) -> Self {
self.max_tokens = Some(max_tokens);
self
}
/// Set a system message to guide the model's behavior
pub fn with_system(mut self, system: impl Into<String>) -> Self {
self.system = Some(system.into());
self
}
/// Ask a simple question and get a text response
///
/// This is the simplest way to interact with an LLM.
///
/// # Examples
///
/// ```no_run
/// # use typedialog_ag_core::llm::LlmClient;
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let response = client.ask("What is 2+2?").await?;
/// assert!(response.contains("4"));
/// # Ok(())
/// # }
/// ```
pub async fn ask(&self, prompt: impl Into<String>) -> Result<String> {
let request = LlmRequest {
model: self.model.clone(),
messages: vec![LlmMessage {
role: MessageRole::User,
content: prompt.into(),
}],
max_tokens: self.max_tokens,
temperature: self.temperature,
system: self.system.clone(),
};
let response = self.provider.complete(request).await?;
Ok(response.content)
}
/// Ask a question and receive a streaming response
///
/// Use this for long responses or when you want to display progress.
pub async fn ask_stream(&self, prompt: impl Into<String>) -> Result<LlmStream> {
let request = LlmRequest {
model: self.model.clone(),
messages: vec![LlmMessage {
role: MessageRole::User,
content: prompt.into(),
}],
max_tokens: self.max_tokens,
temperature: self.temperature,
system: self.system.clone(),
};
self.provider.stream(request).await
}
/// Execute a full completion request with complete control
///
/// Use this for multi-turn conversations or advanced scenarios.
///
/// # Examples
///
/// ```no_run
/// # use typedialog_ag_core::llm::{LlmClient, LlmRequest, LlmMessage, MessageRole};
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
///
/// let request = LlmRequest {
/// model: "claude-3-5-sonnet-20241022".to_string(),
/// messages: vec![
/// LlmMessage {
/// role: MessageRole::User,
/// content: "Hello!".to_string(),
/// },
/// LlmMessage {
/// role: MessageRole::Assistant,
/// content: "Hi! How can I help?".to_string(),
/// },
/// LlmMessage {
/// role: MessageRole::User,
/// content: "Tell me about Rust".to_string(),
/// },
/// ],
/// max_tokens: Some(1000),
/// temperature: Some(0.7),
/// system: None,
/// };
///
/// let response = client.complete(request).await?;
/// println!("{}", response.content);
/// # Ok(())
/// # }
/// ```
pub async fn complete(&self, request: LlmRequest) -> Result<LlmResponse> {
self.provider.complete(request).await
}
/// Stream a full completion request
pub async fn stream_complete(&self, request: LlmRequest) -> Result<LlmStream> {
self.provider.stream(request).await
}
/// Get the underlying provider name
pub fn provider_name(&self) -> &str {
self.provider.name()
}
/// Get the model name
pub fn model(&self) -> &str {
&self.model
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_client_builder_pattern() {
// This just tests the builder pattern compiles
let model = "claude-3-5-sonnet-20241022";
let _would_create = || -> Result<LlmClient> {
Ok(LlmClient::from_model(model)?
.with_temperature(0.7)
.with_max_tokens(2000)
.with_system("You are a helpful assistant"))
};
}
#[test]
fn test_is_ollama_model() {
assert!(is_ollama_model("llama2"));
assert!(is_ollama_model("mistral"));
assert!(is_ollama_model("phi"));
assert!(!is_ollama_model("claude-3"));
assert!(!is_ollama_model("gpt-4"));
}
}

View File

@ -1,14 +1,14 @@
[package]
authors.workspace = true
categories.workspace = true
description = "CLI for executing type-safe AI agents"
edition.workspace = true
keywords.workspace = true
license.workspace = true
name = "typedialog-ag"
repository.workspace = true
rust-version.workspace = true
version.workspace = true
edition.workspace = true
rust-version.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
description = "CLI for executing type-safe AI agents"
keywords.workspace = true
categories.workspace = true
[dependencies]
# Internal
@ -23,17 +23,17 @@ tower-http = { workspace = true }
# CLI
clap = { workspace = true }
inquire = { workspace = true }
console = { workspace = true }
indicatif = { workspace = true }
inquire = { workspace = true }
# Serialization
serde = { workspace = true }
serde_json = { workspace = true }
# Error handling
anyhow = { workspace = true }
thiserror = { workspace = true }
anyhow = { workspace = true }
# Logging
tracing = { workspace = true }
@ -47,6 +47,6 @@ toml = { workspace = true }
default = []
watch = ["dep:notify"]
[dependencies.notify]
optional = true
workspace = true
[dependencies.notify]
workspace = true
optional = true

View File

@ -1,52 +1,52 @@
[package]
authors.workspace = true
description = "AI-powered configuration assistant backend and microservice for TypeDialog"
edition.workspace = true
license.workspace = true
name = "typedialog-ai"
repository.workspace = true
version.workspace = true
edition.workspace = true
authors.workspace = true
repository.workspace = true
license.workspace = true
description = "AI-powered configuration assistant backend and microservice for TypeDialog"
[dependencies]
# Internal
typedialog-core = { path = "../typedialog-core", features = ["ai_backend"] }
# Workspace dependencies (shared with other crates)
anyhow = { workspace = true }
tokio = { workspace = true }
async-trait = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
dialoguer = { workspace = true }
dirs = { workspace = true }
futures = { workspace = true }
surrealdb = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
surrealdb = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
toml = { workspace = true }
clap = { workspace = true }
dialoguer = { workspace = true }
colored = { workspace = true }
uuid = { workspace = true }
chrono = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
uuid = { workspace = true }
thiserror = { workspace = true }
anyhow = { workspace = true }
dirs = { workspace = true }
# Web and HTTP dependencies (now aligned with workspace versions)
# Code migrated to support workspace versions:
# - axum: Upgraded from 0.7 to 0.8.8 (WebSocket Message::Text now uses Utf8Bytes)
# - reqwest: Using workspace 0.12 (streaming API compatible)
# - tower/tower-http: Aligned with axum 0.8.8
axum = { workspace = true, features = ["ws"] }
reqwest = { workspace = true, features = ["json", "stream"] }
axum = { workspace = true, features = ["ws"] }
tower = { workspace = true }
tower-http = { workspace = true, features = ["cors", "trace"] }
[features]
all-providers = ["openai", "anthropic", "ollama"]
anthropic = []
default = ["openai"]
ollama = []
openai = []
anthropic = []
ollama = []
all-providers = ["openai", "anthropic", "ollama"]
[lib]
name = "typedialog_ai"
@ -56,7 +56,7 @@ path = "src/lib.rs"
name = "typedialog-ai"
path = "src/main.rs"
[package.metadata.binstall]
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
[package.metadata.binstall]
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"

View File

@ -1,11 +1,11 @@
[package]
authors.workspace = true
description = "Core library for TypeDialog - form handling and multiple rendering backends"
edition.workspace = true
license.workspace = true
name = "typedialog-core"
repository.workspace = true
version.workspace = true
authors.workspace = true
edition.workspace = true
repository.workspace = true
license.workspace = true
description = "Core library for TypeDialog - form handling and multiple rendering backends"
[lib]
name = "typedialog_core"
@ -13,81 +13,81 @@ path = "src/lib.rs"
[dependencies]
# Core dependencies
anyhow.workspace = true
async-trait.workspace = true
chrono.workspace = true
dirs.workspace = true # For config path resolution
serde = { workspace = true }
serde_json.workspace = true
serde_yaml.workspace = true
tempfile.workspace = true
tera = { workspace = true, optional = true }
thiserror.workspace = true
toml.workspace = true
tracing.workspace = true # Logging framework
chrono.workspace = true
anyhow.workspace = true
thiserror.workspace = true
async-trait.workspace = true
tera = { workspace = true, optional = true }
tempfile.workspace = true
dirs.workspace = true # For config path resolution
tracing.workspace = true # Logging framework
# i18n (optional)
fluent = { workspace = true, optional = true }
fluent-bundle = { workspace = true, optional = true }
sys-locale = { workspace = true, optional = true }
unic-langid = { workspace = true, optional = true }
sys-locale = { workspace = true, optional = true }
# Nushell integration (optional)
nu-plugin = { workspace = true, optional = true }
nu-protocol = { workspace = true, optional = true }
nu-plugin = { workspace = true, optional = true }
# CLI Backend (inquire) - optional
dialoguer = { workspace = true, optional = true }
inquire = { workspace = true, optional = true }
dialoguer = { workspace = true, optional = true }
rpassword = { workspace = true, optional = true }
# TUI Backend (ratatui) - optional
atty = { workspace = true, optional = true }
crossterm = { workspace = true, optional = true }
ratatui = { workspace = true, optional = true }
crossterm = { workspace = true, optional = true }
atty = { workspace = true, optional = true }
# Web Backend (axum) - optional
axum = { workspace = true, optional = true }
futures = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
tower = { workspace = true, optional = true }
tower-http = { workspace = true, optional = true }
tracing-subscriber = { workspace = true, optional = true }
futures = { workspace = true, optional = true }
# Encryption - optional (prov-ecosystem integration)
encrypt = { path = "../../../prov-ecosystem/crates/encrypt", optional = true }
# AI Backend - optional
bincode = { workspace = true, optional = true }
instant-distance = { workspace = true, optional = true }
petgraph = { workspace = true, optional = true }
rand = { workspace = true, optional = true }
serde_bytes = { workspace = true, optional = true }
tantivy = { workspace = true, optional = true }
bincode = { workspace = true, optional = true }
serde_bytes = { workspace = true, optional = true }
rand = { workspace = true, optional = true }
petgraph = { workspace = true, optional = true }
[dev-dependencies]
age = "0.11"
criterion.workspace = true
proptest.workspace = true
serde_json.workspace = true
tokio = { workspace = true, features = ["full"] }
age = "0.11"
proptest.workspace = true
criterion.workspace = true
[features]
ai_backend = ["instant-distance", "tantivy", "bincode", "serde_bytes", "rand", "petgraph"]
all-backends = ["cli", "tui", "web"]
cli = ["inquire", "dialoguer", "rpassword"]
default = ["cli", "i18n", "templates"]
encryption = ["encrypt"]
full = ["i18n", "templates", "nushell", "encryption", "ai_backend", "all-backends"]
i18n = ["fluent", "fluent-bundle", "unic-langid", "sys-locale"]
nushell = ["nu-protocol", "nu-plugin"]
templates = ["tera"]
cli = ["inquire", "dialoguer", "rpassword"]
tui = ["ratatui", "crossterm", "atty"]
web = ["axum", "tokio", "tower", "tower-http", "tracing-subscriber", "futures"]
i18n = ["fluent", "fluent-bundle", "unic-langid", "sys-locale"]
templates = ["tera"]
nushell = ["nu-protocol", "nu-plugin"]
encryption = ["encrypt"]
ai_backend = ["instant-distance", "tantivy", "bincode", "serde_bytes", "rand", "petgraph"]
all-backends = ["cli", "tui", "web"]
full = ["i18n", "templates", "nushell", "encryption", "ai_backend", "all-backends"]
[[bench]]
harness = false
name = "parsing_benchmarks"
harness = false
[lints]
workspace = true

View File

@ -1,59 +0,0 @@
//! Advanced API for extending typedialog
//!
//! This module provides types and traits for advanced use cases like:
//! - Implementing custom backends
//! - Creating custom autocompletion strategies
//! - Building form parsers and renderers
//! - Template context manipulation
//!
//! # Examples
//!
//! ```no_run
//! use typedialog_core::advanced::{FormBackend, RenderContext};
//! use typedialog_core::error::Result;
//!
//! // Implement a custom backend
//! struct MyCustomBackend;
//!
//! #[async_trait::async_trait]
//! impl FormBackend for MyCustomBackend {
//! async fn render_text(
//! &mut self,
//! prompt: &str,
//! default: Option<&str>,
//! placeholder: Option<&str>,
//! _context: &RenderContext,
//! ) -> Result<String> {
//! // Custom implementation
//! Ok(format!("{}: custom", prompt))
//! }
//!
//! // ... implement other required methods
//! # async fn render_confirm(&mut self, _: &str, _: Option<bool>, _: &RenderContext) -> Result<bool> { Ok(true) }
//! # async fn render_select(&mut self, _: &str, _: &[String], _: Option<usize>, _: Option<usize>, _: bool, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_multiselect(&mut self, _: &str, _: &[String], _: &[String], _: Option<usize>, _: bool, _: &RenderContext) -> Result<Vec<String>> { Ok(vec![]) }
//! # async fn render_password(&mut self, _: &str, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_custom(&mut self, _: &str, _: &str, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_editor(&mut self, _: &str, _: Option<&str>, _: Option<&str>, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_date(&mut self, _: &str, _: Option<chrono::NaiveDate>, _: Option<chrono::NaiveDate>, _: Option<chrono::NaiveDate>, _: chrono::Weekday, _: &RenderContext) -> Result<chrono::NaiveDate> { Ok(chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap()) }
//! }
//! ```
pub use crate::error::{Error, ErrorWrapper, Result};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::backends::{FormBackend, RenderContext};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::form_parser::{parse_toml, DisplayItem};
#[cfg(feature = "templates")]
pub use crate::templates::TemplateContextBuilder;
pub use crate::helpers::EncryptionContext;
#[cfg(feature = "i18n")]
pub use crate::i18n::LocaleLoader;

View File

@ -482,7 +482,6 @@ impl From<chrono::ParseError> for ErrorWrapper {
}
}
#[cfg(feature = "cli")]
impl From<inquire::InquireError> for ErrorWrapper {
fn from(err: inquire::InquireError) -> Self {
match err {
@ -511,66 +510,6 @@ pub type Result<T> = std::result::Result<T, ErrorWrapper>;
/// Error type alias for convenient use
pub type Error = ErrorWrapper;
// ============================================================================
// ERROR CONTEXT TRAITS (for cleaner error boundaries)
// ============================================================================
/// Trait for converting module-specific errors to ErrorWrapper at boundaries
pub trait IntoTypedialogError {
fn into_typedialog_error(self) -> ErrorWrapper;
}
/// Trait for operations that can produce typedialog errors with context
pub trait TypedialogErrorContext<T> {
/// Add context message to any error
fn context(self, msg: impl Into<String>) -> Result<T>;
/// Add field-specific context to validation errors
fn with_field(self, field: impl Into<String>) -> Result<T>;
/// Add I/O operation context
fn with_io_context(self, operation: impl Into<String>, path: Option<PathBuf>) -> Result<T>;
}
impl<T, E> TypedialogErrorContext<T> for std::result::Result<T, E>
where
E: std::error::Error + Send + Sync + 'static,
{
fn context(self, msg: impl Into<String>) -> Result<T> {
self.map_err(|e| {
ErrorWrapper::FormParse(FormParseError {
kind: FormParseErrorKind::InvalidToml { line: 0, column: 0 },
message: format!("{}: {}", msg.into(), e),
source: Some(Box::new(e)),
})
})
}
fn with_field(self, field: impl Into<String>) -> Result<T> {
self.map_err(|e| {
ErrorWrapper::Validation(ValidationError {
kind: ValidationErrorKind::TypeMismatch {
expected: "valid value".into(),
got: e.to_string(),
},
field: field.into(),
value: None,
message: e.to_string(),
})
})
}
fn with_io_context(self, operation: impl Into<String>, path: Option<PathBuf>) -> Result<T> {
self.map_err(|e| {
ErrorWrapper::Io(IoError {
operation: operation.into(),
path,
source: io::Error::other(e.to_string()),
})
})
}
}
// ============================================================================
// HELPER CONSTRUCTORS (for migration compatibility)
// ============================================================================

View File

@ -3,7 +3,6 @@
//! Provides conversion functions between JSON values and other formats
//! for serialization and display purposes.
use crate::error::TypedialogErrorContext;
use serde_json::{json, Value};
use std::collections::HashMap;
@ -23,11 +22,15 @@ pub fn format_results(
) -> crate::error::Result<String> {
match format {
"json" => {
let json_obj = serde_json::to_value(results).context("JSON serialization")?;
let json_obj = serde_json::to_value(results).map_err(|e| {
crate::ErrorWrapper::new(format!("JSON serialization error: {}", e))
})?;
Ok(serde_json::to_string_pretty(&json_obj)?)
}
"yaml" => {
let yaml_string = serde_yaml::to_string(results).context("YAML serialization")?;
let yaml_string = serde_yaml::to_string(results).map_err(|e| {
crate::ErrorWrapper::new(format!("YAML serialization error: {}", e))
})?;
Ok(yaml_string)
}
"text" => {
@ -37,7 +40,8 @@ pub fn format_results(
}
Ok(output)
}
"toml" => toml::to_string_pretty(results).context("TOML serialization"),
"toml" => toml::to_string_pretty(results)
.map_err(|e| crate::ErrorWrapper::new(format!("TOML serialization error: {}", e))),
_ => Err(crate::ErrorWrapper::new(format!(
"Unknown output format: {}",
format
@ -73,7 +77,8 @@ pub fn to_json_value(results: &HashMap<String, Value>) -> Value {
/// Convert results to JSON string
pub fn to_json_string(results: &HashMap<String, Value>) -> crate::error::Result<String> {
serde_json::to_string(&to_json_value(results)).context("JSON conversion")
serde_json::to_string(&to_json_value(results))
.map_err(|e| crate::ErrorWrapper::new(format!("JSON error: {}", e)))
}
/// Encryption context controlling redaction/encryption behavior
@ -131,11 +136,6 @@ impl EncryptionContext {
/// 2. CLI/context config (default_backend, backend_config)
/// 3. Global config (typedialog_config defaults)
/// 4. Hard default ("age")
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
pub fn resolve_encryption_config(
field: &crate::form_parser::FieldDefinition,
context: &EncryptionContext,
@ -187,11 +187,7 @@ pub fn resolve_encryption_config(
/// # Returns
///
/// Transformed results with redacted or encrypted sensitive values
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
#[cfg(feature = "encryption")]
pub fn transform_results(
results: &HashMap<String, Value>,
fields: &[crate::form_parser::FieldDefinition],
@ -227,11 +223,7 @@ pub fn transform_results(
/// Fallback version when encryption feature is not enabled
/// Still supports redaction by checking field sensitivity
#[cfg(all(
not(feature = "encryption"),
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
#[cfg(not(feature = "encryption"))]
pub fn transform_results(
results: &HashMap<String, Value>,
fields: &[crate::form_parser::FieldDefinition],
@ -272,11 +264,7 @@ pub fn transform_results(
}
/// Transform a single sensitive value based on context
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
#[cfg(feature = "encryption")]
fn transform_sensitive_value(
value: &Value,
field: &crate::form_parser::FieldDefinition,
@ -319,11 +307,7 @@ fn transform_sensitive_value(
/// # Returns
///
/// Formatted string with sensitive values redacted/encrypted
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
#[cfg(feature = "encryption")]
pub fn format_results_secure(
results: &HashMap<String, Value>,
fields: &[crate::form_parser::FieldDefinition],
@ -336,11 +320,7 @@ pub fn format_results_secure(
}
/// No-op when encryption feature disabled
#[cfg(all(
not(feature = "encryption"),
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
#[cfg(not(feature = "encryption"))]
pub fn format_results_secure(
results: &HashMap<String, Value>,
_fields: &[crate::form_parser::FieldDefinition],

View File

@ -18,59 +18,30 @@
//!
//! # Quick Start as Library
//!
//! ## Recommended: Using the Prelude
//!
//! ```no_run
//! use typedialog_core::prelude::*;
//! use typedialog_core::prompts;
//!
//! # async fn example() -> Result<()> {
//! // Create a backend and execute forms
//! let mut backend = BackendFactory::create(BackendType::Cli)?;
//! let form = FormDefinition {
//! title: Some("User Registration".to_string()),
//! description: None,
//! locale: None,
//! fields: vec![],
//! display_items: vec![],
//! };
//! # Ok(())
//! # }
//! // Simple text prompt
//! let name = prompts::text("Enter your name", None, None)?;
//! println!("Hello, {}!", name);
//!
//! # Ok::<(), Box<dyn std::error::Error>>(())
//! ```
//!
//! ## Simple Prompts with prompt_api
//! # Quick Start with Backends
//!
//! ```no_run
//! use typedialog_core::prompt_api::{self, Result};
//! ```ignore
//! use typedialog_core::backends::{BackendFactory, BackendType};
//! use typedialog_core::form_parser;
//!
//! fn example() -> Result<()> {
//! let name = prompt_api::text("Enter your name", None, None)?;
//! let confirmed = prompt_api::confirm("Continue?", Some(true))?;
//! println!("Hello, {}!", name);
//! async fn example() -> Result<(), Box<dyn std::error::Error>> {
//! let mut backend = BackendFactory::create(BackendType::Cli)?;
//! let form = form_parser::parse_toml("[[fields]]\nname = \"username\"\ntype = \"text\"\n")?;
//! let results = form_parser::execute_with_backend(form, &mut backend).await?;
//! Ok(())
//! }
//! ```
//!
//! ## Advanced: Custom Backends
//!
//! ```ignore
//! use typedialog_core::advanced::{FormBackend, RenderContext};
//!
//! // Implement your custom backend
//! struct MyBackend;
//!
//! #[async_trait::async_trait]
//! impl FormBackend for MyBackend {
//! // ... implement required methods
//! }
//! ```
//!
//! # Module Organization
//!
//! - [`prelude`] - Common types for form execution (recommended starting point)
//! - [`prompt_api`] - Direct prompt functions without forms
//! - [`advanced`] - Advanced APIs for extending backends
//! - Individual modules: [`error`], [`backends`], [`form_parser`], [`helpers`], etc.
//!
//! # Quick Start as CLI
//!
//! ```bash
@ -88,55 +59,13 @@
//! typedialog form-to-nickel form.toml results.json -o output.ncl --validate
//! ```
// ============================================================================
// CORE MODULES (always compiled)
// ============================================================================
pub mod error;
pub mod helpers;
/// Common CLI patterns and help text
pub mod cli_common;
// ============================================================================
// FOCUSED API MODULES (recommended entry points)
// ============================================================================
/// Prelude module for common form execution use cases
///
/// Import with `use typedialog_core::prelude::*;` for quick access.
pub mod prelude;
/// Prompt-only API for direct interactive prompts without forms
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod prompt_api;
/// Advanced API for extending backends and custom implementations
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod advanced;
// ============================================================================
// BACKEND MODULES (feature-gated)
// ============================================================================
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod backends;
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod form_parser;
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod prompts;
// ============================================================================
// OPTIONAL FEATURE MODULES
// ============================================================================
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod autocompletion;
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod backends;
pub mod error;
pub mod form_parser;
pub mod helpers;
pub mod nickel;
pub mod prompts;
#[cfg(feature = "ai_backend")]
pub mod ai;
@ -150,71 +79,29 @@ pub mod i18n;
#[cfg(feature = "templates")]
pub mod templates;
/// Common CLI patterns and help text
pub mod cli_common;
#[cfg(feature = "encryption")]
pub mod encryption_bridge;
#[cfg(feature = "encryption")]
pub use encrypt;
// ============================================================================
// RE-EXPORTS (backward compatibility - prefer prelude/prompt_api/advanced)
// ============================================================================
// Core exports (always available)
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `error::Result` directly"
)]
// Re-export main types for convenient access
pub use autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter};
pub use backends::{BackendFactory, BackendType, FormBackend, RenderContext};
pub use error::{Error, ErrorWrapper, Result};
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `helpers::format_results` directly"
)]
pub use form_parser::{DisplayItem, FieldDefinition, FieldType, FormDefinition};
pub use helpers::{format_results, to_json_string, to_json_value};
// Backend exports (require at least one backend)
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::advanced::*` or specific module imports"
)]
pub use autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` for BackendFactory/BackendType, or `advanced::*` for FormBackend/RenderContext"
)]
pub use backends::{BackendFactory, BackendType, FormBackend, RenderContext};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `form_parser::*` directly"
)]
pub use form_parser::{DisplayItem, FieldDefinition, FieldType, FormDefinition};
// Optional feature exports
#[cfg(feature = "i18n")]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::TypeDialogConfig` or `config::TypeDialogConfig` directly"
)]
pub use config::TypeDialogConfig;
#[cfg(feature = "i18n")]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `i18n::*` directly"
)]
pub use i18n::{I18nBundle, LocaleResolver};
#[cfg(feature = "templates")]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::TemplateEngine` or `advanced::TemplateContextBuilder`"
)]
pub use templates::{TemplateContextBuilder, TemplateEngine};
/// Library version
@ -232,7 +119,6 @@ mod tests {
}
#[test]
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
fn test_backend_factory_cli() {
let result = BackendFactory::create(BackendType::Cli);
#[cfg(feature = "cli")]

View File

@ -1,41 +0,0 @@
//! Prelude module for common typedialog use cases
//!
//! This module re-exports the most commonly used types for form execution.
//! Import with `use typedialog_core::prelude::*;` for quick access to core functionality.
//!
//! # Examples
//!
//! ```no_run
//! use typedialog_core::prelude::*;
//!
//! async fn example() -> Result<()> {
//! let mut backend = BackendFactory::create(BackendType::Cli)?;
//! let form = FormDefinition {
//! title: Some("Example Form".to_string()),
//! description: None,
//! locale: None,
//! fields: vec![],
//! display_items: vec![],
//! };
//! Ok(())
//! }
//! ```
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::backends::{BackendFactory, BackendType, FormBackend};
pub use crate::error::{Error, ErrorWrapper, Result};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::form_parser::{FieldDefinition, FieldType, FormDefinition};
pub use crate::helpers::{format_results, to_json_string, to_json_value};
#[cfg(feature = "i18n")]
pub use crate::config::TypeDialogConfig;
#[cfg(feature = "i18n")]
pub use crate::i18n::{I18nBundle, LocaleResolver};
#[cfg(feature = "templates")]
pub use crate::templates::TemplateEngine;

View File

@ -1,26 +0,0 @@
//! Prompt-only API for direct interactive prompts
//!
//! This module provides a simplified API for using prompts directly without forms.
//! Use this when you need quick interactive prompts in your CLI applications.
//!
//! # Examples
//!
//! ```no_run
//! use typedialog_core::prompt_api::{self, Result};
//!
//! fn example() -> Result<()> {
//! let name = prompt_api::text("Enter your name", None, None)?;
//! let confirmed = prompt_api::confirm("Continue?", Some(true))?;
//! println!("Hello, {}!", name);
//! Ok(())
//! }
//! ```
pub use crate::error::{Error, ErrorWrapper, Result};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::prompts;
// Re-export commonly used prompt functions at module level
#[cfg(feature = "cli")]
pub use crate::prompts::{confirm, custom, date, editor, multi_select, password, select, text};

View File

@ -1,41 +1,41 @@
[package]
authors.workspace = true
edition.workspace = true
license.workspace = true
name = "typedialog-prov-gen"
repository.workspace = true
version.workspace = true
edition.workspace = true
authors.workspace = true
repository.workspace = true
license.workspace = true
[dependencies]
# Workspace dependencies
anyhow = { workspace = true }
async-trait = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true }
dirs = { workspace = true }
futures = { workspace = true }
rand = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
tempfile = { workspace = true }
tera = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
toml = { workspace = true }
anyhow = { workspace = true }
thiserror = { workspace = true }
clap = { workspace = true }
async-trait = { workspace = true }
tokio = { workspace = true }
futures = { workspace = true }
tera = { workspace = true }
chrono = { workspace = true }
rand = { workspace = true }
tempfile = { workspace = true }
dirs = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
# Internal dependencies (workspace path)
typedialog-ai = { path = "../typedialog-ai" }
typedialog-core = { path = "../typedialog-core", features = ["ai_backend"] }
typedialog-ai = { path = "../typedialog-ai" }
# Additional workspace dependencies
cargo_toml = { workspace = true }
uuid = { workspace = true }
regex = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
uuid = { workspace = true }
[dev-dependencies]
tempfile = { workspace = true }
@ -44,10 +44,10 @@ tempfile = { workspace = true }
name = "typedialog-prov-gen"
path = "src/main.rs"
[package.metadata.binstall]
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
[package.metadata.binstall]
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
[lib]
name = "typedialog_prov_gen"

View File

@ -1,7 +1,9 @@
//! Mode C: Interactive AI-powered wizard for project configuration.
use crate::error::Result;
use crate::models::{ConfigField, DomainFeature, FieldType, ProjectSpec, ProjectType};
use crate::models::{
ConfigField, DomainFeature, FieldType, InfrastructureSpec, ProjectSpec, ProjectType,
};
/// Interactive wizard using typedialog-ai for conversational generation.
pub struct InteractiveWizard;
@ -15,8 +17,13 @@ impl InteractiveWizard {
let name = project_name.unwrap_or_else(|| "my-project".to_string());
// Simple defaults for wizard mode
let mut spec = ProjectSpec::new(name, ProjectType::WebService);
spec.domain_features = vec![DomainFeature::new("basic_config".to_string())];
let spec = ProjectSpec {
name,
project_type: ProjectType::WebService,
infrastructure: InfrastructureSpec::default(),
domain_features: vec![DomainFeature::new("basic_config".to_string())],
constraints: Vec::new(),
};
Ok(spec)
}

View File

@ -1,10 +1,8 @@
//! Generate command: orchestrates the provisioning generation pipeline.
use crate::config::Config;
use crate::error::Result;
use crate::input::{CargoIntrospector, ConfigLoader, NickelSchemaLoader};
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::PathBuf;
use tracing::{debug, info};
@ -65,63 +63,39 @@ impl GenerateCommand {
info!("Generating provisioning structure to {}", output.display());
// Initialize template loader for generators that need it
let config = Config::default();
let template_loader = TemplateLoader::new(&config)?;
// Execute the complete generation pipeline in dependency order
// Execute the 7-layer generation pipeline in order
// Layer 1: Constraints (required by validators and fragments)
use crate::generator::{
ConstraintGenerator, DefaultsGenerator, DocumentationGenerator, FormComposerGenerator,
FragmentGenerator, IacTemplateGenerator, LocalesGenerator, SchemaGenerator,
ScriptGenerator, ValidatorGenerator, ValuesGenerator,
ConstraintGenerator, DefaultsGenerator, FragmentGenerator, SchemaGenerator,
ScriptGenerator, ValidatorGenerator,
};
// Layer 1: Constraints (foundation - needed by validators and fragments)
ConstraintGenerator::generate(&spec, &output)?;
debug!("✓ Constraints");
debug!("✓ Constraints layer");
// Layer 2: Schemas (domain types - needed by defaults and validators)
// Layer 2: Schemas (domain types)
SchemaGenerator::generate(&spec, &output)?;
debug!("✓ Schemas");
debug!("✓ Schemas layer");
// Layer 3: Validators (validation logic)
ValidatorGenerator::generate(&spec, &output)?;
debug!("✓ Validators");
debug!("✓ Validators layer");
// Layer 4: Defaults (sensible defaults - needed by values)
// Layer 4: Defaults (sensible defaults)
DefaultsGenerator::generate(&spec, &output)?;
debug!("✓ Defaults");
debug!("✓ Defaults layer");
// Layer 5: Fragments (form UI - needed by form composer)
// Layer 5: Fragments (form UI components)
FragmentGenerator::generate(&spec, &output)?;
debug!("✓ Fragments");
debug!("✓ Fragments layer");
// Layer 6: Scripts (orchestration)
ScriptGenerator::generate(&spec, &output, &template_loader)?;
debug!("✓ Scripts");
ScriptGenerator::generate(&spec, &output)?;
debug!("✓ Scripts layer");
// Layer 7: Values (runtime separation)
ValuesGenerator::generate(&spec, &output)?;
debug!("✓ Values");
// Layer 8: Locales (i18n support)
LocalesGenerator::generate(&spec, &output)?;
debug!("✓ Locales");
// Layer 9: Form Composer (master form configuration)
FormComposerGenerator::generate(&spec, &output)?;
debug!("✓ Form Composer");
// Layer 10: IaC Templates (infrastructure)
IacTemplateGenerator::generate(&spec, &output, &template_loader)?;
debug!("✓ IaC Templates");
// Layer 11: Documentation (comprehensive docs)
DocumentationGenerator::generate(&spec, &output, &template_loader)?;
debug!("✓ Documentation");
// TODO: Layer 7: JSON output generation
info!("Provisioning generation completed successfully!");
info!("Generated {} generators across 11 layers", 11);
info!("Generated structure at: {}", output.display());
Ok(())

View File

@ -8,37 +8,28 @@ use std::path::Path;
pub struct ConstraintGenerator;
impl ConstraintGenerator {
/// Generate constraints.toml file and README.
/// Generate constraints.toml file.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating constraints for project: {}", spec.name);
let mut constraints_content = String::new();
// Add header with interpolation documentation
// Add header
constraints_content.push_str(&format!(
"# Constraint definitions for {}\n\
# Single source of truth for validation rules\n\
#\n\
# Constraints can be referenced in fragments using interpolation:\n\
# max_items = \"${{constraint.tracker.udp.max_items}}\"\n\
#\n\
# This enables centralized constraint management.\n\n",
"# Constraint definitions for {}\n# Single source of truth for validation rules\n\n",
spec.name
));
// Generate hierarchical constraint sections for each feature
// Generate constraint sections for each feature
for feature in &spec.domain_features {
// Add feature comment
if let Some(desc) = &feature.description {
constraints_content.push_str(&format!("# {}\n", desc));
}
constraints_content.push_str(&format!("[feature.{}]\n", feature.name));
constraints_content.push_str("# Field constraints\n\n");
// Generate hierarchical sections for fields with constraints
for field in &feature.fields {
if field.min.is_some() || field.max.is_some() {
// Use hierarchical path: [tracker.udp] instead of [feature.tracker.udp]
constraints_content.push_str(&format!("[{}.{}]\n", feature.name, field.name));
constraints_content
.push_str(&format!("[feature.{}.{}]\n", feature.name, field.name));
if let Some(min) = field.min {
constraints_content.push_str(&format!("min = {}\n", min));
@ -52,13 +43,12 @@ impl ConstraintGenerator {
}
}
// Add global array constraints with hierarchical paths
// Add global constraints from the spec
if !spec.constraints.is_empty() {
constraints_content.push_str("# Array constraints\n\n");
constraints_content.push_str("\n# Global constraints\n\n");
for constraint in &spec.constraints {
// Hierarchical path: [tracker.udp_trackers] instead of [constraint."tracker.udp_trackers"]
constraints_content.push_str(&format!("[{}]\n", constraint.path));
constraints_content.push_str(&format!("[constraint.\"{}\"]\n", constraint.path));
if let Some(min) = constraint.min_items {
constraints_content.push_str(&format!("min_items = {}\n", min));
@ -78,16 +68,7 @@ impl ConstraintGenerator {
}
}
// Create constraints directory
let constraints_dir = output_dir.join("constraints");
std::fs::create_dir_all(&constraints_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create constraints directory: {}",
e
))
})?;
// Write constraints.toml file
// Write constraints file
let constraints_file = output_dir.join("constraints.toml");
std::fs::write(&constraints_file, constraints_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
@ -97,103 +78,6 @@ impl ConstraintGenerator {
})?;
tracing::info!("Generated constraints file: {}", constraints_file.display());
// Generate README.md explaining the constraint system
let readme_content = Self::generate_readme(spec);
let readme_file = constraints_dir.join("README.md");
std::fs::write(&readme_file, readme_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write constraints README: {}",
e
))
})?;
tracing::info!("Generated constraints README: {}", readme_file.display());
Ok(())
}
/// Generate README.md documenting the constraint system.
fn generate_readme(spec: &ProjectSpec) -> String {
let mut content = String::new();
content.push_str(&format!("# Constraint System - {}\n\n", spec.name));
content.push_str(
"This directory contains documentation for the constraint system used in this project.\n\n\
## Overview\n\n\
Constraints define validation rules that apply to configuration values. They are centralized \
in `constraints.toml` and can be referenced throughout the provisioning system.\n\n\
## Usage\n\n\
### In Form Fragments\n\n\
Reference constraints using interpolation syntax:\n\n\
```toml\n\
[[elements]]\n\
type = \"repeatinggroup\"\n\
max_items = \"${constraint.tracker.udp_trackers.max_items}\"\n\
unique = true\n\
```\n\n\
### In Validators\n\n\
Validators read constraints from `constraints.toml` to enforce rules:\n\n\
```nickel\n\
let constraints = import \"../constraints.toml\" in\n\
std.contract.from_predicate (fun arr =>\n\
std.array.length arr <= constraints.tracker.udp_trackers.max_items\n\
)\n\
```\n\n\
## Constraint Types\n\n\
### Field Constraints\n\n\
- `min` - Minimum value for numbers\n\
- `max` - Maximum value for numbers\n\n\
### Array Constraints\n\n\
- `min_items` - Minimum array length\n\
- `max_items` - Maximum array length\n\
- `unique` - Items must be unique\n\
- `unique_key` - Field name to check for uniqueness\n\n\
## Example\n\n\
```toml\n\
[tracker.udp_trackers]\n\
min_items = 1\n\
max_items = 4\n\
unique = true\n\
unique_key = \"bind_address\"\n\
```\n\n\
This ensures:\n\
- At least 1 UDP tracker is configured\n\
- No more than 4 UDP trackers\n\
- Each tracker has a unique bind_address\n\n\
## Hierarchical Paths\n\n\
Constraints use hierarchical TOML paths for clarity:\n\n\
- `[tracker.udp]` - Hierarchical\n\
- `[feature.tracker.udp]` - Old flat style\n\n\
This matches the Nickel schema structure and makes interpolation cleaner.\n\n"
);
content.push_str("## Constraints in This Project\n\n");
if !spec.constraints.is_empty() {
for constraint in &spec.constraints {
content.push_str(&format!("### `{}`\n\n", constraint.path));
if let Some(min) = constraint.min_items {
content.push_str(&format!("- Min items: {}\n", min));
}
if let Some(max) = constraint.max_items {
content.push_str(&format!("- Max items: {}\n", max));
}
if constraint.unique {
content.push_str("- Uniqueness: Required\n");
if let Some(key) = &constraint.unique_key {
content.push_str(&format!("- Unique key: `{}`\n", key));
}
}
content.push('\n');
}
} else {
content.push_str("*No global constraints defined.*\n\n");
}
content
}
}

View File

@ -1,4 +1,4 @@
//! Defaults generator: produces default configuration values in Nickel with type contracts.
//! Defaults generator: produces default configuration values in Nickel.
use crate::error::Result;
use crate::models::{FieldType, ProjectSpec};
@ -27,55 +27,28 @@ impl DefaultsGenerator {
let mut defaults_content = String::new();
defaults_content.push_str(&format!(
"# Default configuration for {} feature\n# Generated for project: {}\n",
"# Default configuration for {} feature\n# Generated for project: {}\n\n",
feature.name, spec.name
));
if let Some(desc) = &feature.description {
defaults_content.push_str(&format!("# {}\n", desc));
}
defaults_content.push_str(&format!("let {} = {{\n", feature.name));
defaults_content.push('\n');
// Import schema if using type contracts
if feature.use_type_contracts {
defaults_content.push_str(&format!(
"let schemas = import \"../schemas/{}.ncl\" in\n\n",
feature.name
));
}
// Define defaults record
if feature.use_type_contracts {
let type_name = Self::capitalize_first(&feature.name);
defaults_content.push_str(&format!(
"# Default values with type contract\n{{\n {} | schemas.{} = {{\n",
feature.name, type_name
));
} else {
defaults_content.push_str(&format!("{{\n {} = {{\n", feature.name));
}
// Generate field defaults
for field in &feature.fields {
defaults_content.push_str(&format!(" # {}\n", field.prompt));
defaults_content.push_str(&format!(" # {}\n", field.prompt));
if let Some(default) = &field.default {
// Use provided default (same format regardless of type contracts)
defaults_content.push_str(&format!(" {} = {},\n", field.name, default));
defaults_content.push_str(&format!(" {} = {},\n", field.name, default));
} else {
// Generate sensible defaults based on field type
let default_val =
Self::generate_default_value(field, feature.use_type_contracts);
let default_val = Self::generate_default_value(field);
defaults_content.push_str(&format!(
" {} = {}, # Auto-generated default\n",
" {} = {}, # No default provided\n",
field.name, default_val
));
}
}
// Same closing format regardless of type contracts
defaults_content.push_str(" },\n}\n");
defaults_content.push_str("}\n\n");
// Write defaults file
let defaults_file = defaults_dir.join(format!("{}.ncl", feature.name));
@ -89,150 +62,22 @@ impl DefaultsGenerator {
tracing::debug!("Generated defaults for feature: {}", feature.name);
}
// Generate main defaults.ncl that imports all features
Self::generate_main_defaults(spec, &defaults_dir)?;
// Generate README
Self::generate_defaults_readme(spec, &defaults_dir)?;
tracing::info!("Successfully generated defaults");
Ok(())
}
/// Generate main defaults.ncl that imports all feature defaults.
fn generate_main_defaults(spec: &ProjectSpec, defaults_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Main defaults configuration for {}\n\
# Imports all feature defaults\n\n",
spec.name
));
// Import all feature defaults
for feature in &spec.domain_features {
content.push_str(&format!(
"let {} = import \"./{}.ncl\" in\n",
feature.name, feature.name
));
}
content.push_str("\n# Merge all defaults\nstd.record.merge_all [\n");
for feature in &spec.domain_features {
content.push_str(&format!(" {},\n", feature.name));
}
content.push_str("]\n");
let config_file = defaults_dir.join("config.ncl");
std::fs::write(&config_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write main defaults: {}",
e
))
})?;
Ok(())
}
/// Generate README.md for defaults directory.
fn generate_defaults_readme(spec: &ProjectSpec, defaults_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Default Configuration - {}\n\n", spec.name));
content.push_str(
"This directory contains default configuration values in Nickel format.\n\n\
## Overview\n\n\
Defaults provide sensible starting values for all configuration fields. \
Users can override these by merging their own values.\n\n\
## Files\n\n",
);
for feature in &spec.domain_features {
content.push_str(&format!("### `{}.ncl`\n\n", feature.name));
if let Some(desc) = &feature.description {
content.push_str(&format!("{}\n\n", desc));
}
if feature.use_type_contracts {
content.push_str("*Uses type contracts for validation.*\n\n");
}
}
content.push_str(
"## Usage\n\n\
### Merging with User Values\n\n\
```nickel\n\
let defaults = import \"./defaults/config.ncl\" in\n\
let user_values = {\n\
server.port = 9090,\n\
} in\n\
std.record.merge defaults user_values\n\
```\n\n\
### Type Contract Application\n\n\
When features use type contracts, defaults automatically enforce schemas:\n\n\
```nickel\n\
{\n\
tracker | schemas.Tracker = {\n\
port = 8080, # Type-checked\n\
},\n\
}\n\
```\n\n",
);
let readme_file = defaults_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write defaults README: {}",
e
))
})?;
Ok(())
}
/// Generate a sensible default value for a field type.
fn generate_default_value(
field: &crate::models::ConfigField,
use_type_contract: bool,
) -> String {
fn generate_default_value(field: &crate::models::ConfigField) -> String {
match field.field_type {
FieldType::Text => "\"\"".to_string(),
FieldType::Number => {
if let Some(min) = field.min {
min.to_string()
} else {
"0".to_string()
}
}
FieldType::Number => "0".to_string(),
FieldType::Password => "\"\"".to_string(),
FieldType::Confirm => "false".to_string(),
FieldType::Select => {
if let Some(first_option) = field.options.first() {
format!("\"{}\"", first_option)
} else {
"\"\"".to_string()
}
}
FieldType::Select => "\"\"".to_string(),
FieldType::MultiSelect => "[]".to_string(),
FieldType::Editor => "\"\"".to_string(),
FieldType::Date => "\"\"".to_string(),
FieldType::RepeatingGroup => {
if use_type_contract {
// Generate array with one default item and type contract
"[\n # Add default items here\n ]".to_string()
} else {
"[]".to_string()
}
}
}
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
FieldType::RepeatingGroup => "[]".to_string(),
}
}
}

View File

@ -1,107 +0,0 @@
//! Documentation generator: creates comprehensive project documentation.
use crate::error::Result;
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::Path;
use tera::Context;
/// Generates project documentation using templates.
pub struct DocumentationGenerator;
impl DocumentationGenerator {
/// Generate all project documentation.
pub fn generate(
spec: &ProjectSpec,
output_dir: impl AsRef<Path>,
template_loader: &TemplateLoader,
) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating documentation for project: {}", spec.name);
// Ensure docs directory exists
let docs_dir = output_dir.join("docs");
std::fs::create_dir_all(&docs_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create docs directory: {}",
e
))
})?;
// Prepare template context
let mut context = Context::new();
context.insert("project_name", &spec.name);
context.insert("project_type", &format!("{:?}", spec.project_type));
context.insert("features", &spec.domain_features);
context.insert("infrastructure", &spec.infrastructure);
// Generate main README
Self::generate_readme(spec, output_dir, template_loader, &context)?;
// Generate quickstart guide
Self::generate_quickstart(spec, &docs_dir, template_loader, &context)?;
// Generate nickel-roundtrip workflow documentation
Self::generate_nickel_roundtrip_guide(spec, &docs_dir, template_loader, &context)?;
tracing::info!("Successfully generated documentation");
Ok(())
}
/// Generate main project README.md.
fn generate_readme(
_spec: &ProjectSpec,
output_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let readme = template_loader.render("docs/readme.md.tera", context)?;
let readme_file = output_dir.join("README.md");
std::fs::write(&readme_file, readme).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write README.md: {}", e))
})?;
tracing::debug!("Generated README.md");
Ok(())
}
/// Generate quickstart guide.
fn generate_quickstart(
_spec: &ProjectSpec,
docs_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let quickstart = template_loader.render("docs/quickstart.md.tera", context)?;
let quickstart_file = docs_dir.join("quickstart.md");
std::fs::write(&quickstart_file, quickstart).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write quickstart.md: {}",
e
))
})?;
tracing::debug!("Generated quickstart.md");
Ok(())
}
/// Generate nickel-roundtrip workflow documentation.
fn generate_nickel_roundtrip_guide(
_spec: &ProjectSpec,
docs_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let roundtrip = template_loader.render("docs/nickel-roundtrip.md.tera", context)?;
let roundtrip_file = docs_dir.join("nickel-roundtrip.md");
std::fs::write(&roundtrip_file, roundtrip).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nickel-roundtrip.md: {}",
e
))
})?;
tracing::debug!("Generated nickel-roundtrip.md");
Ok(())
}
}

View File

@ -1,125 +0,0 @@
//! Form Composer generator: creates master form configuration from fragments.
use crate::error::Result;
use crate::models::ProjectSpec;
use std::path::Path;
/// Generates master form configuration that includes all fragments.
pub struct FormComposerGenerator;
impl FormComposerGenerator {
/// Generate master config-form.toml file.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!(
"Generating master form configuration for project: {}",
spec.name
);
let mut form_content = String::new();
// Generate form metadata
form_content.push_str(&Self::generate_form_metadata(spec)?);
// Generate includes for all feature fragments
form_content.push_str(&Self::generate_includes(spec)?);
// Write master config-form.toml
let form_file = output_dir.join("config-form.toml");
std::fs::write(&form_file, form_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write config-form.toml: {}",
e
))
})?;
tracing::info!("Successfully generated master form configuration");
Ok(())
}
/// Generate form metadata section.
fn generate_form_metadata(spec: &ProjectSpec) -> Result<String> {
let mut metadata = String::new();
metadata.push_str("# Master Form Configuration\n");
metadata.push_str(&format!("# Auto-generated for project: {}\n", spec.name));
metadata.push_str(
"# This form composes all feature fragments into a complete configuration workflow\n\n",
);
metadata.push_str("[form]\n");
metadata.push_str(&format!("name = \"{}-config\"\n", spec.name));
metadata.push_str(&format!(
"title = \"{} Configuration\"\n",
Self::capitalize_first(&spec.name)
));
metadata.push_str(&format!(
"description = \"Complete configuration wizard for {}\"\n",
spec.name
));
metadata.push_str("backend = \"cli\" # Use CLI backend by default (supports TUI, Web)\n");
metadata.push_str("multi_step = true # Enable multi-step form with sections\n");
metadata.push('\n');
Ok(metadata)
}
/// Generate includes section for all feature fragments.
fn generate_includes(spec: &ProjectSpec) -> Result<String> {
let mut includes = String::new();
includes.push_str("# Feature Fragment Includes\n");
includes.push_str(
"# Each feature fragment is conditionally included based on configuration\n\n",
);
for feature in &spec.domain_features {
includes.push_str(&Self::generate_feature_include(spec, feature)?);
includes.push('\n');
}
Ok(includes)
}
/// Generate include entry for a single feature fragment.
fn generate_feature_include(
_spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
) -> Result<String> {
let mut include = String::new();
include.push_str("[[includes]]\n");
include.push_str(&format!(
"path = \"fragments/{}-section.toml\"\n",
feature.name
));
// Add description/label
if let Some(desc) = &feature.description {
include.push_str(&format!("label = \"{}\"\n", desc));
} else {
include.push_str(&format!(
"label = \"{} Configuration\"\n",
Self::capitalize_first(&feature.name)
));
}
// Add conditional logic if feature has optional flag
// For now, we'll include all features by default
// In the future, this could be based on a "required" field in DomainFeature
if !feature.fields.is_empty() {
include.push_str("required = true\n");
}
Ok(include)
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -24,319 +24,85 @@ impl FragmentGenerator {
// Generate fragments for each feature
for feature in &spec.domain_features {
Self::generate_feature_fragment(spec, feature, &fragments_dir)?;
}
let mut fragment_content = String::new();
// Generate README
Self::generate_fragments_readme(spec, &fragments_dir)?;
tracing::info!("Successfully generated form fragments");
Ok(())
}
/// Generate a fragment file for a single feature.
fn generate_feature_fragment(
spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
fragments_dir: &Path,
) -> Result<()> {
let mut fragment_content = String::new();
fragment_content.push_str(&format!(
"# Form fragment for {} feature\n# Auto-generated for project: {}\n\n",
feature.name, spec.name
));
fragment_content.push_str(&format!("[section.{}]\n", feature.name));
if let Some(desc) = &feature.description {
fragment_content.push_str(&format!("description = \"{}\"\n", desc));
}
fragment_content.push('\n');
// Generate field definitions for this feature
for field in &feature.fields {
fragment_content.push_str(&Self::generate_field_definition(
feature,
field,
spec,
fragments_dir,
)?);
}
// Write fragment file
let fragment_file = fragments_dir.join(format!("{}-section.toml", feature.name));
std::fs::write(&fragment_file, fragment_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write fragment file: {}",
e
))
})?;
tracing::debug!("Generated fragment for feature: {}", feature.name);
Ok(())
}
/// Generate TOML definition for a single field.
fn generate_field_definition(
feature: &crate::models::DomainFeature,
field: &crate::models::ConfigField,
spec: &ProjectSpec,
fragments_dir: &Path,
) -> Result<String> {
let mut field_def = String::new();
field_def.push_str(&format!("[[section.{}.fields]]\n", feature.name));
field_def.push_str(&format!("name = \"{}\"\n", field.name));
field_def.push_str(&format!("prompt = \"{}\"\n", field.prompt));
field_def.push_str(&format!(
"type = \"{}\"\n",
Self::field_type_to_form_type(&field.field_type)
));
// Add nickel_path for hierarchical Nickel output
let nickel_path = format!("{}.{}", feature.name, field.name);
field_def.push_str(&format!("nickel_path = \"{}\"\n", nickel_path));
if let Some(help) = &field.help {
field_def.push_str(&format!("help = \"{}\"\n", help));
}
if let Some(placeholder) = &field.placeholder {
field_def.push_str(&format!("placeholder = \"{}\"\n", placeholder));
}
if !field.required {
field_def.push_str("required = false\n");
}
if field.sensitive {
field_def.push_str("sensitive = true\n");
if let Some(backend) = &field.encryption_backend {
field_def.push_str(&format!("encryption_backend = \"{}\"\n", backend));
}
}
if !field.options.is_empty() {
field_def.push_str("options = [\n");
for option in &field.options {
field_def.push_str(&format!(" \"{}\",\n", option));
}
field_def.push_str("]\n");
}
// Use constraint interpolation for min/max from constraints.toml
if field.min.is_some() || field.max.is_some() {
if field.min.is_some() {
// Use interpolation if constraint might exist, otherwise hardcode
field_def.push_str(&format!(
"min = \"${{constraint.{}.{}.min}}\"\n",
feature.name, field.name
));
}
if field.max.is_some() {
field_def.push_str(&format!(
"max = \"${{constraint.{}.{}.max}}\"\n",
feature.name, field.name
));
}
}
// Handle repeating groups - add constraint interpolation for array bounds
if matches!(field.field_type, FieldType::RepeatingGroup) {
// Check if this field has array constraints in spec
let constraint_path = format!("{}.{}", feature.name, field.name);
if let Some(constraint) = spec.constraints.iter().find(|c| c.path == constraint_path) {
if constraint.min_items.is_some() {
field_def.push_str(&format!(
"min_items = \"${{constraint.{}.min_items}}\"\n",
constraint_path
));
}
if constraint.max_items.is_some() {
field_def.push_str(&format!(
"max_items = \"${{constraint.{}.max_items}}\"\n",
constraint_path
));
}
if constraint.unique {
field_def.push_str("unique = true\n");
if let Some(unique_key) = &constraint.unique_key {
field_def.push_str(&format!("unique_key = \"{}\"\n", unique_key));
}
}
}
// Reference item fragment if it exists
field_def.push_str(&format!(
"item_fragment = \"fragments/{}-item.toml\"\n",
field.name
fragment_content.push_str(&format!(
"# Form fragment for {} feature\n# Auto-generated for project: {}\n\n",
feature.name, spec.name
));
// Generate item fragment file for this repeating group
Self::generate_item_fragment(feature, field, fragments_dir)?;
}
fragment_content.push_str(&format!("[section.{}]\n", feature.name));
field_def.push('\n');
Ok(field_def)
}
/// Generate item fragment for repeating group fields.
fn generate_item_fragment(
feature: &crate::models::DomainFeature,
parent_field: &crate::models::ConfigField,
fragments_dir: &Path,
) -> Result<()> {
// For now, generate a minimal item fragment
// In a real implementation, this would extract nested fields from the repeating group
let mut item_content = String::new();
item_content.push_str(&format!(
"# Item fragment for {} repeating group\n# Auto-generated for feature: {}\n\n",
parent_field.name, feature.name
));
item_content.push_str(&format!("[item.{}]\n", parent_field.name));
item_content.push_str(&format!(
"description = \"Item definition for {}\"\n\n",
parent_field.prompt
));
// If the parent field has nested structure, we would generate fields here
// For now, we indicate this is a placeholder for future enhancement
item_content.push_str("# Fields for this item would be defined here\n");
item_content.push_str("# Example:\n");
item_content.push_str("# [[item.fields]]\n");
item_content.push_str("# name = \"field_name\"\n");
item_content.push_str("# type = \"text\"\n");
let item_file = fragments_dir.join(format!("{}-item.toml", parent_field.name));
std::fs::write(&item_file, item_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write item fragment: {}",
e
))
})?;
Ok(())
}
/// Generate README.md for fragments directory.
fn generate_fragments_readme(spec: &ProjectSpec, fragments_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Form Fragments - {}\n\n", spec.name));
content.push_str(
"This directory contains TypeDialog form fragments for configuration collection.\n\n\
## Overview\n\n\
Fragments are modular TOML files that define interactive forms for each domain feature. \
They use TypeDialog's declarative syntax to create CLI, TUI, or Web forms.\n\n\
## Files\n\n",
);
for feature in &spec.domain_features {
content.push_str(&format!("### `{}-section.toml`\n\n", feature.name));
if let Some(desc) = &feature.description {
content.push_str(&format!("{}\\n\n", desc));
fragment_content.push_str(&format!("description = \"{}\"\n", desc));
}
content.push_str(&format!("**Fields:** {}\\n\n", feature.fields.len()));
fragment_content.push('\n');
// List repeating groups if any
let repeating_fields: Vec<_> = feature
.fields
.iter()
.filter(|f| matches!(f.field_type, FieldType::RepeatingGroup))
.collect();
// Generate field definitions for this feature
for field in &feature.fields {
fragment_content.push_str(&format!("[[section.{}.fields]]\n", feature.name));
if !repeating_fields.is_empty() {
content.push_str("**Repeating Groups:**\\n");
for field in repeating_fields {
content.push_str(&format!("- `{}` - {}\\n", field.name, field.prompt));
fragment_content.push_str(&format!("name = \"{}\"\n", field.name));
fragment_content.push_str(&format!("prompt = \"{}\"\n", field.prompt));
fragment_content.push_str(&format!(
"type = \"{}\"\n",
Self::field_type_to_form_type(&field.field_type)
));
if let Some(help) = &field.help {
fragment_content.push_str(&format!("help = \"{}\"\n", help));
}
content.push('\n');
if let Some(placeholder) = &field.placeholder {
fragment_content.push_str(&format!("placeholder = \"{}\"\n", placeholder));
}
if !field.required {
fragment_content.push_str("required = false\n");
}
if field.sensitive {
fragment_content.push_str("sensitive = true\n");
if let Some(backend) = &field.encryption_backend {
fragment_content
.push_str(&format!("encryption_backend = \"{}\"\n", backend));
}
}
if !field.options.is_empty() {
fragment_content.push_str("options = [\n");
for option in &field.options {
fragment_content.push_str(&format!(" \"{}\",\n", option));
}
fragment_content.push_str("]\n");
}
if field.min.is_some() || field.max.is_some() {
if let Some(min) = field.min {
fragment_content.push_str(&format!("min = {}\n", min));
}
if let Some(max) = field.max {
fragment_content.push_str(&format!("max = {}\n", max));
}
}
fragment_content.push('\n');
}
// Write fragment file
let fragment_file = fragments_dir.join(format!("{}-section.toml", feature.name));
std::fs::write(&fragment_file, fragment_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write fragment file: {}",
e
))
})?;
tracing::debug!("Generated fragment for feature: {}", feature.name);
}
content.push_str(
"## Features\n\n\
### Hierarchical Nickel Paths\n\n\
Fields include `nickel_path` attribute for structured output:\n\n\
```toml\n\
[[section.tracker.fields]]\n\
name = \"bind_address\"\n\
nickel_path = \"tracker.bind_address\"\n\
```\n\n\
This generates Nickel output like:\n\n\
```nickel\n\
{\n\
tracker.bind_address = \"0.0.0.0:6969\",\n\
}\n\
```\n\n\
### Constraint Interpolation\n\n\
Min/max values reference centralized constraints:\n\n\
```toml\n\
min = \"${constraint.tracker.port.min}\"\n\
max = \"${constraint.tracker.port.max}\"\n\
```\n\n\
This pulls values from `constraints.toml` at runtime, ensuring single source of truth.\n\n\
### Repeating Groups\n\n\
Array fields with item fragments:\n\n\
```toml\n\
type = \"repeating_group\"\n\
min_items = \"${constraint.tracker.udp_trackers.min_items}\"\n\
max_items = \"${constraint.tracker.udp_trackers.max_items}\"\n\
unique = true\n\
unique_key = \"bind_address\"\n\
item_fragment = \"fragments/udp_trackers-item.toml\"\n\
```\n\n\
Item fragments define the structure of each array element.\n\n\
## Usage\n\n\
### CLI Backend\n\n\
```bash\n\
typedialog --form fragments/tracker-section.toml --output config.json\n\
```\n\n\
### TUI Backend\n\n\
```bash\n\
typedialog-tui --form fragments/tracker-section.toml\n\
```\n\n\
### Web Backend\n\n\
```bash\n\
typedialog-web --fragments-dir fragments/ --port 8080\n\
```\n\n\
### Composing Multiple Fragments\n\n\
Use a master form that includes fragments:\n\n\
```toml\n\
# config-form.toml\n\
[form]\n\
name = \"complete-config\"\n\
\n\
[[includes]]\n\
path = \"fragments/tracker-section.toml\"\n\
\n\
[[includes]]\n\
path = \"fragments/database-section.toml\"\n\
condition = \"${config.enable_database}\"\n\
```\n\n\
## Integration with Nickel\n\n\
Fragment output feeds into Nickel roundtrip workflow:\n\n\
1. **TypeDialog execution** - User fills form JSON output\n\
2. **json-to-nickel** - Convert JSON to Nickel syntax\n\
3. **Validation** - Apply schemas and validators\n\
4. **Export** - Generate final config files\n\n\
See `docs/nickel-roundtrip.md` for complete workflow.\n\n",
);
let readme_file = fragments_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write fragments README: {}",
e
))
})?;
tracing::info!("Successfully generated form fragments");
Ok(())
}

View File

@ -1,433 +0,0 @@
//! IaC Template generator: renders Terraform, Ansible, and Docker Compose templates.
use crate::error::Result;
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::Path;
use tera::Context;
/// Generates Infrastructure as Code templates from domain features.
pub struct IacTemplateGenerator;
impl IacTemplateGenerator {
/// Generate IaC templates for all configured providers.
pub fn generate(
spec: &ProjectSpec,
output_dir: impl AsRef<Path>,
template_loader: &TemplateLoader,
) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating IaC templates for project: {}", spec.name);
// Ensure iac directory exists
let iac_dir = output_dir.join("iac");
std::fs::create_dir_all(&iac_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create iac directory: {}",
e
))
})?;
// Prepare template context
let mut context = Context::new();
context.insert("project_name", &spec.name);
context.insert("features", &spec.domain_features);
context.insert("infrastructure", &spec.infrastructure);
// Generate Terraform/OpenTofu templates
if spec.iac_templates.terraform_enabled {
Self::generate_terraform_templates(spec, &iac_dir, template_loader, &context)?;
}
// Generate Ansible templates
if spec.iac_templates.ansible_enabled {
Self::generate_ansible_templates(spec, &iac_dir, template_loader, &context)?;
}
// Generate Docker Compose templates
if spec.iac_templates.docker_compose_enabled {
Self::generate_docker_compose_templates(spec, &iac_dir, template_loader, &context)?;
}
// Generate README
Self::generate_iac_readme(spec, &iac_dir)?;
tracing::info!("Successfully generated IaC templates");
Ok(())
}
/// Generate Terraform/OpenTofu templates.
fn generate_terraform_templates(
spec: &ProjectSpec,
iac_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let tofu_dir = iac_dir.join("tofu");
std::fs::create_dir_all(&tofu_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create tofu directory: {}",
e
))
})?;
// Generate common Terraform configuration
let common_tf = template_loader.render("iac/tofu/common/main.tf.tera", context)?;
let common_dir = tofu_dir.join("common");
std::fs::create_dir_all(&common_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create tofu/common directory: {}",
e
))
})?;
std::fs::write(common_dir.join("main.tf"), common_tf).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write common/main.tf: {}",
e
))
})?;
// Generate provider-specific configurations
let providers = vec!["hetzner", "lxd"];
for provider in providers {
let provider_dir = tofu_dir.join(provider);
std::fs::create_dir_all(&provider_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create tofu/{} directory: {}",
provider, e
))
})?;
// Render main.tf for this provider
let template_path = format!("iac/tofu/{}/main.tf.tera", provider);
let provider_tf = template_loader.render(&template_path, context)?;
std::fs::write(provider_dir.join("main.tf"), provider_tf).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}/main.tf: {}",
provider, e
))
})?;
// Generate variables.tf
Self::generate_terraform_variables(&provider_dir, spec)?;
// Generate outputs.tf
Self::generate_terraform_outputs(&provider_dir, spec)?;
}
tracing::debug!("Generated Terraform templates");
Ok(())
}
/// Generate Terraform variables.tf file.
fn generate_terraform_variables(provider_dir: &Path, spec: &ProjectSpec) -> Result<()> {
let mut variables = String::new();
variables.push_str("# Terraform variables\n\n");
// Generate variables for each domain feature
for feature in &spec.domain_features {
variables.push_str(&format!("# {} configuration\n", feature.name));
for field in &feature.fields {
variables.push_str(&format!(
"variable \"{}_{} \" {{\n description = \"{}\"\n type = string\n",
feature.name, field.name, field.prompt
));
if let Some(default) = &field.default {
variables.push_str(&format!(" default = {}\n", default));
}
variables.push_str("}\n\n");
}
}
std::fs::write(provider_dir.join("variables.tf"), variables).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write variables.tf: {}",
e
))
})?;
Ok(())
}
/// Generate Terraform outputs.tf file.
fn generate_terraform_outputs(provider_dir: &Path, spec: &ProjectSpec) -> Result<()> {
let mut outputs = String::new();
outputs.push_str("# Terraform outputs\n\n");
outputs.push_str(&format!(
"output \"{}_instance_ip\" {{\n description = \"IP address of the {} instance\"\n value = \"placeholder\"\n}}\n\n",
spec.name, spec.name
));
outputs.push_str(&format!(
"output \"{}_connection_string\" {{\n description = \"Connection string for {}\"\n value = \"placeholder\"\n sensitive = true\n}}\n",
spec.name, spec.name
));
std::fs::write(provider_dir.join("outputs.tf"), outputs).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write outputs.tf: {}", e))
})?;
Ok(())
}
/// Generate Ansible templates.
fn generate_ansible_templates(
spec: &ProjectSpec,
iac_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let ansible_dir = iac_dir.join("ansible");
std::fs::create_dir_all(&ansible_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create ansible directory: {}",
e
))
})?;
// Generate playbook
let playbook = template_loader.render("iac/ansible/playbook.yml.tera", context)?;
std::fs::write(ansible_dir.join("playbook.yml"), playbook).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write playbook.yml: {}",
e
))
})?;
// Generate inventory
let inventory = template_loader.render("iac/ansible/inventory.ini.tera", context)?;
std::fs::write(ansible_dir.join("inventory.ini"), inventory).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write inventory.ini: {}",
e
))
})?;
// Generate roles directory with basic role structure
let roles_dir = ansible_dir.join("roles").join(&spec.name);
std::fs::create_dir_all(roles_dir.join("tasks")).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create roles/{}/tasks directory: {}",
spec.name, e
))
})?;
// Generate tasks/main.yml
let tasks_main = format!(
"---\n# Tasks for {}\n\n- name: Deploy {} configuration\n template:\n src: config.j2\n dest: /etc/{}/config.ncl\n",
spec.name, spec.name, spec.name
);
std::fs::write(roles_dir.join("tasks/main.yml"), tasks_main).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write tasks/main.yml: {}",
e
))
})?;
tracing::debug!("Generated Ansible templates");
Ok(())
}
/// Generate Docker Compose templates.
fn generate_docker_compose_templates(
spec: &ProjectSpec,
iac_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let docker_dir = iac_dir.join("docker");
std::fs::create_dir_all(&docker_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create docker directory: {}",
e
))
})?;
// Generate docker-compose.yml
let compose = template_loader.render("iac/docker-compose/services.yml.tera", context)?;
std::fs::write(docker_dir.join("docker-compose.yml"), compose).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write docker-compose.yml: {}",
e
))
})?;
// Generate monitoring stack if enabled
if !spec.infrastructure.monitoring.is_empty() {
let monitoring =
template_loader.render("iac/docker-compose/monitoring.yml.tera", context)?;
std::fs::write(docker_dir.join("docker-compose.monitoring.yml"), monitoring).map_err(
|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write docker-compose.monitoring.yml: {}",
e
))
},
)?;
}
// Generate Dockerfile
Self::generate_dockerfile(&docker_dir, spec)?;
tracing::debug!("Generated Docker Compose templates");
Ok(())
}
/// Generate Dockerfile for the application.
fn generate_dockerfile(docker_dir: &Path, spec: &ProjectSpec) -> Result<()> {
let dockerfile = format!(
"# Dockerfile for {}\n\
FROM rust:1.75 as builder\n\
\n\
WORKDIR /app\n\
COPY . .\n\
RUN cargo build --release\n\
\n\
FROM debian:bookworm-slim\n\
\n\
# Install Nickel\n\
RUN apt-get update && apt-get install -y curl \\\n\
&& curl -L https://github.com/tweag/nickel/releases/download/1.3.0/nickel-1.3.0-x86_64-unknown-linux-gnu.tar.gz | tar xz -C /usr/local/bin \\\n\
&& rm -rf /var/lib/apt/lists/*\n\
\n\
COPY --from=builder /app/target/release/{} /usr/local/bin/{}\n\
COPY config.ncl /etc/{}/config.ncl\n\
\n\
EXPOSE 8080\n\
\n\
CMD [\"{}\", \"--config\", \"/etc/{}/config.ncl\"]\n",
spec.name, spec.name, spec.name, spec.name, spec.name, spec.name
);
std::fs::write(docker_dir.join("Dockerfile"), dockerfile).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write Dockerfile: {}", e))
})?;
Ok(())
}
/// Generate README.md for iac directory.
fn generate_iac_readme(spec: &ProjectSpec, iac_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Infrastructure as Code - {}\n\n", spec.name));
content.push_str(
"This directory contains Infrastructure as Code templates for provisioning and deployment.\n\n\
## Overview\n\n\
IaC templates use Terraform/OpenTofu, Ansible, and Docker Compose to deploy the application.\n\n\
## Directory Structure\n\n",
);
if spec.iac_templates.terraform_enabled {
content.push_str(
"### `tofu/` - Terraform/OpenTofu\n\n\
Terraform configurations for infrastructure provisioning.\n\n\
- `common/` - Shared Terraform modules\n\
- `hetzner/` - Hetzner Cloud provider configuration\n\
- `lxd/` - LXD/Incus local containers\n\n\
**Usage:**\n\n\
```bash\n\
cd iac/tofu/hetzner\n\
terraform init\n\
terraform plan\n\
terraform apply\n\
```\n\n",
);
}
if spec.iac_templates.ansible_enabled {
content.push_str(
"### `ansible/` - Configuration Management\n\n\
Ansible playbooks for application deployment and configuration.\n\n\
- `playbook.yml` - Main deployment playbook\n\
- `inventory.ini` - Host inventory\n\
- `roles/` - Ansible roles\n\n\
**Usage:**\n\n\
```bash\n\
cd iac/ansible\n\
ansible-playbook -i inventory.ini playbook.yml\n\
```\n\n",
);
}
if spec.iac_templates.docker_compose_enabled {
content.push_str(
"### `docker/` - Container Orchestration\n\n\
Docker Compose configurations for containerized deployment.\n\n\
- `docker-compose.yml` - Main services\n\
- `docker-compose.monitoring.yml` - Monitoring stack (optional)\n\
- `Dockerfile` - Application container image\n\n\
**Usage:**\n\n\
```bash\n\
cd iac/docker\n\
docker-compose up -d\n\
docker-compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d # With monitoring\n\
```\n\n",
);
}
content.push_str(
"## Integration with Nickel Configuration\n\n\
All IaC templates reference the Nickel configuration generated by typedialog:\n\n\
1. **TypeDialog** collects configuration `config.json`\n\
2. **json-to-nickel** converts to Nickel `config.ncl`\n\
3. **Validators** enforce constraints\n\
4. **IaC templates** reference validated config\n\n\
### Example: Terraform\n\n\
```hcl\n\
# Use Nickel config for variables\n\
locals {\n\
config = yamldecode(file(\"../../config.ncl\"))\n\
}\n\
\n\
resource \"server\" \"main\" {\n\
name = local.config.server.name\n\
port = local.config.server.port\n\
}\n\
```\n\n\
### Example: Ansible\n\n\
```yaml\n\
- name: Deploy configuration\n\
template:\n\
src: config.ncl.j2\n\
dest: /etc/app/config.ncl\n\
vars:\n\
config_data: \"{{ lookup('file', '../../config.ncl') }}\"\n\
```\n\n\
## Prerequisites\n\n",
);
if spec.iac_templates.terraform_enabled {
content.push_str("- **terraform** or **tofu** - Infrastructure provisioning\n");
}
if spec.iac_templates.ansible_enabled {
content.push_str("- **ansible** - Configuration management\n");
}
if spec.iac_templates.docker_compose_enabled {
content.push_str("- **docker** and **docker-compose** - Container runtime\n");
}
content.push_str("- **nickel** - Configuration language runtime\n\n");
content.push_str(
"## Workflow\n\n\
1. Generate configuration: `./scripts/configure.sh`\n\
2. Provision infrastructure: `cd iac/tofu/hetzner && terraform apply`\n\
3. Deploy application: `cd iac/ansible && ansible-playbook playbook.yml`\n\
4. Verify deployment: Check monitoring dashboards\n\n",
);
let readme_file = iac_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write iac README: {}", e))
})?;
Ok(())
}
}

View File

@ -1,205 +0,0 @@
//! Locales generator: creates Fluent translation files for i18n support.
use crate::error::Result;
use crate::models::ProjectSpec;
use std::path::Path;
/// Generates Fluent translation files for internationalization.
pub struct LocalesGenerator;
impl LocalesGenerator {
/// Generate locale files for all configured languages.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating locale files for project: {}", spec.name);
// Ensure locales directory exists
let locales_dir = output_dir.join("locales");
std::fs::create_dir_all(&locales_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create locales directory: {}",
e
))
})?;
// Generate English locale (default)
Self::generate_locale(spec, &locales_dir, "en-US")?;
// Generate additional configured locales
for locale_spec in &spec.locales {
Self::generate_locale(spec, &locales_dir, &locale_spec.language_code)?;
}
// Generate README
Self::generate_locales_readme(spec, &locales_dir)?;
tracing::info!("Successfully generated locale files");
Ok(())
}
/// Generate Fluent translation file for a specific locale.
fn generate_locale(spec: &ProjectSpec, locales_dir: &Path, locale_code: &str) -> Result<()> {
let mut fluent_content = String::new();
fluent_content.push_str(&format!(
"# Fluent translations for {} ({})\n",
spec.name, locale_code
));
fluent_content.push_str("# https://projectfluent.org/\n\n");
// App-level strings
fluent_content.push_str("## Application\n\n");
fluent_content.push_str(&format!("app-name = {}\n", spec.name));
fluent_content.push_str(&format!("app-title = {} Configuration\n", spec.name));
fluent_content.push_str("app-description = Interactive configuration wizard\n\n");
// Feature-specific strings
for feature in &spec.domain_features {
fluent_content.push_str(&format!("## {} Feature\n\n", feature.name));
// Feature metadata
fluent_content.push_str(&format!("{}-title = ", feature.name));
if let Some(desc) = &feature.description {
fluent_content.push_str(&format!("{}\n", desc));
} else {
fluent_content.push_str(&format!(
"{} Configuration\n",
Self::capitalize_first(&feature.name)
));
}
// Field labels and help text
for field in &feature.fields {
// Field prompt
fluent_content.push_str(&format!(
"{}-{}-prompt = {}\n",
feature.name, field.name, field.prompt
));
// Field help text
if let Some(help) = &field.help {
fluent_content.push_str(&format!(
"{}-{}-help = {}\n",
feature.name, field.name, help
));
}
// Placeholder
if let Some(placeholder) = &field.placeholder {
fluent_content.push_str(&format!(
"{}-{}-placeholder = {}\n",
feature.name, field.name, placeholder
));
}
}
fluent_content.push('\n');
}
// Common UI strings
fluent_content.push_str("## Common UI Strings\n\n");
fluent_content.push_str("common-continue = Continue\n");
fluent_content.push_str("common-back = Back\n");
fluent_content.push_str("common-cancel = Cancel\n");
fluent_content.push_str("common-finish = Finish\n");
fluent_content.push_str("common-save = Save\n");
fluent_content.push_str("common-required = Required\n");
fluent_content.push_str("common-optional = Optional\n\n");
// Validation messages
fluent_content.push_str("## Validation Messages\n\n");
fluent_content.push_str("validation-required = This field is required\n");
fluent_content.push_str("validation-invalid = Invalid value\n");
fluent_content.push_str("validation-too-short = Value is too short\n");
fluent_content.push_str("validation-too-long = Value is too long\n");
fluent_content.push_str("validation-out-of-range = Value out of range\n\n");
// Success/error messages
fluent_content.push_str("## Messages\n\n");
fluent_content.push_str("success-saved = Configuration saved successfully\n");
fluent_content.push_str("error-load-failed = Failed to load configuration\n");
fluent_content.push_str("error-save-failed = Failed to save configuration\n");
// Write locale file
let locale_file = locales_dir.join(format!("{}.ftl", locale_code));
std::fs::write(&locale_file, fluent_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {} locale file: {}",
locale_code, e
))
})?;
tracing::debug!("Generated {} locale file", locale_code);
Ok(())
}
/// Generate README.md for locales directory.
fn generate_locales_readme(spec: &ProjectSpec, locales_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Localization - {}\n\n", spec.name));
content.push_str(
"This directory contains Fluent translation files for internationalization (i18n).\n\n\
## Overview\n\n\
Fluent is a modern localization system designed for natural-sounding translations. \
See https://projectfluent.org/ for more information.\n\n\
## Files\n\n\
- `en-US.ftl` - English (United States) translations (default)\n",
);
for locale_spec in &spec.locales {
content.push_str(&format!(
"- `{}.ftl` - Translations for {}\n",
locale_spec.language_code, locale_spec.language_code
));
}
content.push_str(
"\n\
## Adding New Locales\n\n\
1. Copy `en-US.ftl` to `<locale-code>.ftl` (e.g., `es-ES.ftl` for Spanish)\n\
2. Translate all strings while preserving the message IDs\n\
3. Update the form configuration to detect and use the new locale\n\n\
## Message Structure\n\n\
Fluent messages follow this pattern:\n\n\
```fluent\n\
# Comments\n\
message-id = Translation text\n\
message-with-variable = Hello, {{ $name }}!\n\
```\n\n\
## Usage in TypeDialog\n\n\
Enable i18n support in forms:\n\n\
```toml\n\
[form]\n\
name = \"config\"\n\
locale = \"en-US\" # Default locale\n\
locale_dir = \"locales\" # Path to .ftl files\n\
\n\
[[fields]]\n\
name = \"server_port\"\n\
prompt_i18n = \"server-port-prompt\" # References locale key\n\
```\n\n\
TypeDialog will load the appropriate translation file based on the user's locale.\n\n",
);
let readme_file = locales_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write locales README: {}",
e
))
})?;
Ok(())
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -2,24 +2,14 @@
pub mod constraint_generator;
pub mod defaults_generator;
pub mod documentation_generator;
pub mod form_composer_generator;
pub mod fragment_generator;
pub mod iac_template_generator;
pub mod locales_generator;
pub mod schema_generator;
pub mod script_generator;
pub mod validator_generator;
pub mod values_generator;
pub use constraint_generator::ConstraintGenerator;
pub use defaults_generator::DefaultsGenerator;
pub use documentation_generator::DocumentationGenerator;
pub use form_composer_generator::FormComposerGenerator;
pub use fragment_generator::FragmentGenerator;
pub use iac_template_generator::IacTemplateGenerator;
pub use locales_generator::LocalesGenerator;
pub use schema_generator::SchemaGenerator;
pub use script_generator::ScriptGenerator;
pub use validator_generator::ValidatorGenerator;
pub use values_generator::ValuesGenerator;

View File

@ -32,9 +32,6 @@ impl SchemaGenerator {
// Generate a main schema that imports all features
Self::generate_main_schema(spec, output_dir)?;
// Generate README.md explaining the schema system
Self::generate_schemas_readme(spec, &schemas_dir)?;
tracing::info!("Successfully generated schemas for project: {}", spec.name);
Ok(())
}
@ -49,53 +46,27 @@ impl SchemaGenerator {
let mut schema_content = String::new();
// Add file header
// Add file header and imports
schema_content.push_str(&format!(
"# Schema for {} feature\n# Generated for project: {}\n",
"# Schema for {} feature\n# Generated for project: {}\n\n",
feature.name, spec.name
));
if let Some(desc) = &feature.description {
schema_content.push_str(&format!("# {}\n", desc));
}
schema_content.push('\n');
// Add Nickel imports if feature has them
if !feature.nickel_imports.is_empty() {
schema_content.push_str("# Schema imports\n");
for import in &feature.nickel_imports {
schema_content.push_str(&format!(
"let {} = import \"{}\" in\n",
import.name, import.path
));
}
schema_content.push('\n');
}
// Define the feature type/schema
let type_name = Self::capitalize_first(&feature.name);
schema_content.push_str(&format!("# {} type schema\n", type_name));
if feature.use_type_contracts {
schema_content.push_str(&format!("{} = {{\n", type_name));
} else {
schema_content.push_str(&format!("let {} = {{\n", feature.name));
}
// Define the feature record
schema_content.push_str(&format!("let {} = {{\n", feature.name));
// Add fields to the record
for field in &feature.fields {
schema_content.push_str(&Self::generate_field_schema(field, feature)?);
schema_content.push_str(&Self::generate_field_schema(field)?);
}
if feature.use_type_contracts {
schema_content.push_str("}\n\n");
// Export the type
schema_content.push_str(&format!("{{ {} }}\n", type_name));
} else {
schema_content.push_str("}\n\n");
// Export the schema
schema_content.push_str(&format!("{{ {} }}\n", feature.name));
schema_content.push_str("}\n\n");
// Add validators for fields with constraints
if let Some(constraints) = &feature.constraints {
for path in constraints.keys() {
schema_content.push_str(&format!("# Constraint for {}\n", path));
}
}
// Write the schema file
@ -115,8 +86,8 @@ impl SchemaGenerator {
Ok(())
}
/// Generate Nickel schema syntax for a single field with type contracts.
fn generate_field_schema(field: &ConfigField, feature: &DomainFeature) -> Result<String> {
/// Generate Nickel schema syntax for a single field.
fn generate_field_schema(field: &ConfigField) -> Result<String> {
let mut field_def = String::new();
// Add field comment if help text exists
@ -124,33 +95,20 @@ impl SchemaGenerator {
field_def.push_str(&format!(" # {}\n", help));
}
// Field name
let optional_marker = if !field.required { " | optional" } else { "" };
// Field name and type
let nickel_type = Self::map_field_type_to_nickel(&field.field_type);
let required_marker = if field.required { "" } else { "?" };
// Check if this field references an imported schema
let nickel_type = if feature.use_type_contracts {
// Look for import that matches this field
let matching_import = feature
.nickel_imports
.iter()
.find(|imp| field.name.contains(&imp.name) || imp.name.contains(&field.name));
if let Some(import) = matching_import {
// Use imported type with contract: database | database_schema.Database
format!("{}.{}", import.name, Self::capitalize_first(&field.name))
} else {
Self::map_field_type_to_nickel(&field.field_type).to_string()
}
} else {
Self::map_field_type_to_nickel(&field.field_type).to_string()
};
// Generate field definition with type contract (same format regardless)
field_def.push_str(&format!(
" {} | {}{},\n",
field.name, nickel_type, optional_marker
" {}{} | {},\n",
field.name, required_marker, nickel_type
));
// Add default value comment if present
if let Some(default) = &field.default {
field_def.push_str(&format!(" # default: {}\n", default));
}
Ok(field_def)
}
@ -162,19 +120,10 @@ impl SchemaGenerator {
FieldType::Password => "String",
FieldType::Confirm => "Bool",
FieldType::Select => "String",
FieldType::MultiSelect => "Array String",
FieldType::MultiSelect => "[String]",
FieldType::Editor => "String",
FieldType::Date => "String",
FieldType::RepeatingGroup => "Array Dyn", // Will be refined with type contracts
}
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
FieldType::RepeatingGroup => "[_]",
}
}
@ -268,91 +217,4 @@ impl SchemaGenerator {
Ok(infra_schema)
}
/// Generate README.md documenting the schema system.
fn generate_schemas_readme(spec: &ProjectSpec, schemas_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Nickel Schemas - {}\n\n", spec.name));
content.push_str(
"This directory contains Nickel type schemas for configuration validation.\n\n\
## Overview\n\n\
Schemas define the structure and types of configuration values. They use Nickel's \
type contracts to enforce correctness at evaluation time.\n\n\
## Schema Files\n\n",
);
for feature in &spec.domain_features {
let type_name = Self::capitalize_first(&feature.name);
content.push_str(&format!(
"### `{}.ncl` - {} Schema\n\n",
feature.name, type_name
));
if let Some(desc) = &feature.description {
content.push_str(&format!("{}\n\n", desc));
}
if !feature.nickel_imports.is_empty() {
content.push_str("**Imports:**\n");
for import in &feature.nickel_imports {
content.push_str(&format!("- `{}` from `{}`\n", import.name, import.path));
}
content.push('\n');
}
content.push_str(&format!("**Fields:** {}\n\n", feature.fields.len()));
if feature.use_type_contracts {
content.push_str("*This schema uses type contracts for validation.*\n\n");
}
}
content.push_str(
"## Usage\n\n\
### Importing Schemas\n\n\
```nickel\n\
let schemas = import \"./schemas/config.ncl\" in\n\
let my_config = {...} in\n\
my_config | schemas.Config\n\
```\n\n\
### Type Contracts\n\n\
Type contracts ensure values match expected types:\n\n\
```nickel\n\
{\n\
server | schemas.Server = {\n\
port | Number = 8080,\n\
},\n\
}\n\
```\n\n\
### Nested Contracts\n\n\
Schemas can reference imported types:\n\n\
```nickel\n\
let database_schema = import \"./database.ncl\" in\n\
{\n\
database | database_schema.Database | optional,\n\
}\n\
```\n\n\
### Array Type Contracts\n\n\
Arrays can have typed elements:\n\n\
```nickel\n\
udp_trackers | Array TrackerUdp | optional,\n\
```\n\n\
## Validation\n\n\
Type-check your configuration:\n\n\
```bash\n\
nickel typecheck schemas/config.ncl\n\
```\n\n",
);
let readme_file = schemas_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| crate::error::SchemaGenerationError {
feature_name: "readme".to_string(),
reason: format!("Failed to write schemas README: {}", e),
})?;
tracing::debug!("Generated schemas README: {}", readme_file.display());
Ok(())
}
}

View File

@ -2,20 +2,14 @@
use crate::error::Result;
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::Path;
use tera::Context;
/// Generates orchestration scripts for provisioning.
pub struct ScriptGenerator;
impl ScriptGenerator {
/// Generate bash and nushell scripts for provisioning orchestration.
pub fn generate(
spec: &ProjectSpec,
output_dir: impl AsRef<Path>,
template_loader: &TemplateLoader,
) -> Result<()> {
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!(
"Generating orchestration scripts for project: {}",
@ -31,677 +25,126 @@ impl ScriptGenerator {
))
})?;
// Prepare template context
let mut context = Context::new();
context.insert("project_name", &spec.name);
context.insert("features", &spec.domain_features);
// Generate bash scripts
Self::generate_bash_scripts(spec, &scripts_dir)?;
// Generate main configuration scripts
Self::generate_configure_scripts(spec, &scripts_dir, template_loader, &context)?;
// Generate nickel conversion scripts
Self::generate_conversion_scripts(spec, &scripts_dir, template_loader, &context)?;
// Generate validation scripts
Self::generate_validation_scripts(spec, &scripts_dir, &context)?;
// Generate utility scripts
Self::generate_utility_scripts(spec, &scripts_dir, &context)?;
// Generate README
Self::generate_scripts_readme(spec, &scripts_dir)?;
// Generate nushell scripts
Self::generate_nushell_scripts(spec, &scripts_dir)?;
tracing::info!("Successfully generated orchestration scripts");
Ok(())
}
/// Generate main configure scripts (configure.sh / configure.nu).
fn generate_configure_scripts(
_spec: &ProjectSpec,
scripts_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
// Generate configure.sh from template
let configure_sh = template_loader.render("scripts/configure.sh.tera", context)?;
let configure_sh_path = scripts_dir.join("configure.sh");
std::fs::write(&configure_sh_path, configure_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write configure.sh: {}",
e
))
})?;
Self::make_executable(&configure_sh_path);
// Generate configure.nu from template
let configure_nu = template_loader.render("scripts/configure.nu.tera", context)?;
let configure_nu_path = scripts_dir.join("configure.nu");
std::fs::write(&configure_nu_path, configure_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write configure.nu: {}",
e
))
})?;
Self::make_executable(&configure_nu_path);
tracing::debug!("Generated configure scripts");
Ok(())
}
/// Generate nickel conversion scripts (nickel-to-X, X-to-nickel).
fn generate_conversion_scripts(
spec: &ProjectSpec,
scripts_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let conversions = vec![
(
"nickel-to-json",
"scripts/nickel-to-json.sh.tera",
"scripts/nickel-to-json.nu.tera",
),
(
"nickel-to-yaml",
"scripts/nickel-to-yaml.sh.tera",
"scripts/nickel-to-yaml.nu.tera",
),
(
"json-to-nickel",
"scripts/json-to-nickel.sh.tera",
"scripts/json-to-nickel.nu.tera",
),
];
for (name, sh_template, nu_template) in conversions {
// Generate bash version
let sh_content = template_loader.render(sh_template, context)?;
let sh_path = scripts_dir.join(format!("{}.sh", name));
std::fs::write(&sh_path, sh_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}.sh: {}",
name, e
))
})?;
Self::make_executable(&sh_path);
// Generate nushell version
let nu_content = template_loader.render(nu_template, context)?;
let nu_path = scripts_dir.join(format!("{}.nu", name));
std::fs::write(&nu_path, nu_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}.nu: {}",
name, e
))
})?;
Self::make_executable(&nu_path);
}
// Generate additional converters (yaml-to-nickel, toml-to-nickel)
Self::generate_yaml_to_nickel(spec, scripts_dir)?;
Self::generate_toml_to_nickel(spec, scripts_dir)?;
Self::generate_nickel_to_toml(spec, scripts_dir)?;
tracing::debug!("Generated conversion scripts");
Ok(())
}
/// Generate validation scripts (validate-nickel.sh, apply-validators.sh).
fn generate_validation_scripts(
spec: &ProjectSpec,
scripts_dir: &Path,
_context: &Context,
) -> Result<()> {
// validate-nickel.sh
let validate_sh = format!(
/// Generate bash orchestration scripts.
fn generate_bash_scripts(spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
// Config loading script
let config_script = format!(
"#!/bin/bash\n\
# Validate Nickel configuration for {}\n\
# Load and validate configuration for {}\n\
set -euo pipefail\n\n\
NICKEL_FILE=\"${{1:-config.ncl}}\"\n\
CONFIG_DIR=\"{{CONFIG_DIR:-.}}\"\n\
\n\
if [[ ! -f \"$NICKEL_FILE\" ]]; then\n\
echo \"Error: Nickel file not found: $NICKEL_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
echo \"Validating $NICKEL_FILE...\"\n\
nickel typecheck \"$NICKEL_FILE\" || {{\n\
echo \"Error: Type checking failed\" >&2\n\
exit 1\n\
}}\n\
\n\
nickel eval --raw \"$NICKEL_FILE\" > /dev/null || {{\n\
echo \"Error: Evaluation failed\" >&2\n\
exit 1\n\
}}\n\
\n\
echo \"Validation successful\"\n",
spec.name
);
let validate_sh_path = scripts_dir.join("validate-nickel.sh");
std::fs::write(&validate_sh_path, validate_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validate-nickel.sh: {}",
e
))
})?;
Self::make_executable(&validate_sh_path);
// validate-nickel.nu
let validate_nu = format!(
"#!/usr/bin/env nu\n\
# Validate Nickel configuration for {} (nushell version)\n\n\
def main [nickel_file: path = \"config.ncl\"] {{\n\
if not ($nickel_file | path exists) {{\n\
error make {{\n\
msg: $\"Nickel file not found: ($nickel_file)\"\n\
}}\n\
}}\n\
\n\
print $\"Validating ($nickel_file)...\"\n\
\n\
# Type check\n\
let typecheck = (nickel typecheck $nickel_file | complete)\n\
if $typecheck.exit_code != 0 {{\n\
error make {{\n\
msg: \"Type checking failed\"\n\
}}\n\
}}\n\
\n\
# Evaluate\n\
let eval = (nickel eval --raw $nickel_file | complete)\n\
if $eval.exit_code != 0 {{\n\
error make {{\n\
msg: \"Evaluation failed\"\n\
}}\n\
}}\n\
\n\
print \"Validation successful\"\n\
}}\n",
spec.name
);
let validate_nu_path = scripts_dir.join("validate-nickel.nu");
std::fs::write(&validate_nu_path, validate_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validate-nickel.nu: {}",
e
))
})?;
Self::make_executable(&validate_nu_path);
// apply-validators.sh
let apply_validators_sh = format!(
"#!/bin/bash\n\
# Apply Nickel validators to configuration for {}\n\
set -euo pipefail\n\n\
CONFIG_FILE=\"${{1:-config.ncl}}\"\n\
VALIDATORS_DIR=\"${{VALIDATORS_DIR:-validators}}\"\n\
\n\
echo \"Applying validators from $VALIDATORS_DIR to $CONFIG_FILE...\"\n\
\n\
# Import validators and merge with config\n\
nickel eval --raw <<EOF\n\
let validators = import \"$VALIDATORS_DIR/config.ncl\" in\n\
let config = import \"$CONFIG_FILE\" in\n\
validators.validate config\n\
EOF\n\
\n\
echo \"Validators applied successfully\"\n",
spec.name
);
let apply_validators_sh_path = scripts_dir.join("apply-validators.sh");
std::fs::write(&apply_validators_sh_path, apply_validators_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write apply-validators.sh: {}",
e
))
})?;
Self::make_executable(&apply_validators_sh_path);
// apply-validators.nu
let apply_validators_nu = format!(
"#!/usr/bin/env nu\n\
# Apply Nickel validators to configuration for {} (nushell version)\n\n\
def main [\n\
config_file: path = \"config.ncl\",\n\
--validators_dir: path = \"validators\"\n\
] {{\n\
print $\"Applying validators from ($validators_dir) to ($config_file)...\"\n\
\n\
let nickel_code = $\"let validators = import \\\"($validators_dir)/config.ncl\\\" in\\nlet config = import \\\"($config_file)\\\" in\\nvalidators.validate config\"\n\
\n\
let result = ($nickel_code | nickel eval --raw | complete)\n\
if $result.exit_code != 0 {{\n\
error make {{\n\
msg: $\"Validation failed: ($result.stderr)\"\n\
}}\n\
}}\n\
\n\
print \"Validators applied successfully\"\n\
}}\n",
spec.name
);
let apply_validators_nu_path = scripts_dir.join("apply-validators.nu");
std::fs::write(&apply_validators_nu_path, apply_validators_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write apply-validators.nu: {}",
e
))
})?;
Self::make_executable(&apply_validators_nu_path);
tracing::debug!("Generated validation scripts");
Ok(())
}
/// Generate utility scripts (merge-configs.sh, export-final.sh).
fn generate_utility_scripts(
spec: &ProjectSpec,
scripts_dir: &Path,
_context: &Context,
) -> Result<()> {
// merge-configs.sh
let merge_configs_sh = format!(
"#!/bin/bash\n\
# Merge multiple Nickel configurations for {}\n\
set -euo pipefail\n\n\
if [[ $# -lt 2 ]]; then\n\
echo \"Usage: $0 <base.ncl> <override.ncl> [output.ncl]\" >&2\n\
exit 1\n\
fi\n\
\n\
BASE_FILE=\"$1\"\n\
OVERRIDE_FILE=\"$2\"\n\
OUTPUT_FILE=\"${{3:-merged.ncl}}\"\n\
\n\
echo \"Merging $OVERRIDE_FILE into $BASE_FILE -> $OUTPUT_FILE\"\n\
\n\
nickel eval --raw <<EOF > \"$OUTPUT_FILE\"\n\
let base = import \"$BASE_FILE\" in\n\
let override = import \"$OVERRIDE_FILE\" in\n\
std.record.merge base override\n\
EOF\n\
\n\
echo \"Merge complete: $OUTPUT_FILE\"\n",
spec.name
);
let merge_configs_sh_path = scripts_dir.join("merge-configs.sh");
std::fs::write(&merge_configs_sh_path, merge_configs_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write merge-configs.sh: {}",
e
))
})?;
Self::make_executable(&merge_configs_sh_path);
// merge-configs.nu
let merge_configs_nu = format!(
"#!/usr/bin/env nu\n\
# Merge multiple Nickel configurations for {} (nushell version)\n\n\
def main [\n\
base_file: path,\n\
override_file: path,\n\
output_file: path = \"merged.ncl\"\n\
] {{\n\
print $\"Merging ($override_file) into ($base_file) -> ($output_file)\"\n\
\n\
let nickel_code = $\"let base = import \\\"($base_file)\\\" in\\nlet override = import \\\"($override_file)\\\" in\\nstd.record.merge base override\"\n\
\n\
$nickel_code | nickel eval --raw | save -f $output_file\n\
\n\
print $\"Merge complete: ($output_file)\"\n\
}}\n",
spec.name
);
let merge_configs_nu_path = scripts_dir.join("merge-configs.nu");
std::fs::write(&merge_configs_nu_path, merge_configs_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write merge-configs.nu: {}",
e
))
})?;
Self::make_executable(&merge_configs_nu_path);
// export-final.sh
let export_final_sh = format!(
"#!/bin/bash\n\
# Export final configuration for deployment for {}\n\
set -euo pipefail\n\n\
NICKEL_FILE=\"${{1:-config.ncl}}\"\n\
FORMAT=\"${{2:-json}}\"\n\
OUTPUT_FILE=\"${{3:-config.$FORMAT}}\"\n\
\n\
echo \"Exporting $NICKEL_FILE to $OUTPUT_FILE ($FORMAT format)\"\n\
\n\
case \"$FORMAT\" in\n\
json)\n\
nickel export --format json \"$NICKEL_FILE\" > \"$OUTPUT_FILE\"\n\
;;\n\
yaml)\n\
nickel export --format yaml \"$NICKEL_FILE\" > \"$OUTPUT_FILE\"\n\
;;\n\
toml)\n\
nickel export --format toml \"$NICKEL_FILE\" > \"$OUTPUT_FILE\"\n\
;;\n\
*)\n\
echo \"Error: Unsupported format: $FORMAT\" >&2\n\
echo \"Supported: json, yaml, toml\" >&2\n\
# Load configuration from JSON\n\
load_config() {{\n\
local config_file=\"$1\"\n\
if [[ ! -f \"$config_file\" ]]; then\n\
echo \"Error: Configuration file not found: $config_file\" >&2\n\
exit 1\n\
;;\n\
esac\n\
fi\n\
cat \"$config_file\"\n\
}}\n\
\n\
echo \"Export complete: $OUTPUT_FILE\"\n",
# Validate using Nickel\n\
validate_config() {{\n\
local config_file=\"$1\"\n\
nickel eval --raw \"$config_file\" > /dev/null 2>&1 || {{\n\
echo \"Error: Configuration validation failed for $config_file\" >&2\n\
exit 1\n\
}}\n\
}}\n\
\n\
# Main\n\
main() {{\n\
local config_file=\"${{CONFIG_DIR}}/config.json\"\n\
load_config \"$config_file\"\n\
validate_config \"$config_file\"\n\
echo \"Configuration loaded and validated successfully\"\n\
}}\n\
\n\
main \"$@\"\n",
spec.name
);
let export_final_sh_path = scripts_dir.join("export-final.sh");
std::fs::write(&export_final_sh_path, export_final_sh).map_err(|e| {
let config_script_path = scripts_dir.join("config.sh");
std::fs::write(&config_script_path, config_script).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write export-final.sh: {}",
"Failed to write config script: {}",
e
))
})?;
Self::make_executable(&export_final_sh_path);
// export-final.nu
let export_final_nu = format!(
// Make executable
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let perms = std::fs::Permissions::from_mode(0o755);
std::fs::set_permissions(&config_script_path, perms).ok();
}
tracing::debug!("Generated bash config script");
Ok(())
}
/// Generate nushell orchestration scripts.
fn generate_nushell_scripts(spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
// Config loading script in nushell
let config_script = format!(
"#!/usr/bin/env nu\n\
# Export final configuration for deployment for {} (nushell version)\n\n\
def main [\n\
nickel_file: path = \"config.ncl\",\n\
format: string = \"json\",\n\
output_file?: path\n\
] {{\n\
let out = if ($output_file | is-empty) {{ $\"config.($format)\" }} else {{ $output_file }}\n\
\n\
print $\"Exporting ($nickel_file) to ($out) (($format) format)\"\n\
\n\
match $format {{\n\
\"json\" => {{ nickel export --format json $nickel_file | save -f $out }},\n\
\"yaml\" => {{ nickel export --format yaml $nickel_file | save -f $out }},\n\
\"toml\" => {{ nickel export --format toml $nickel_file | save -f $out }},\n\
_ => {{\n\
error make {{\n\
msg: $\"Unsupported format: ($format). Supported: json, yaml, toml\"\n\
}}\n\
# Load and validate configuration for {} (nushell version)\n\n\
def load_config [config_file: path] {{\n\
if ($config_file | path exists) {{\n\
open $config_file\n\
}} else {{\n\
error make {{\n\
msg: $\"Configuration file not found: ($config_file)\"\n\
}}\n\
}}\n\
\n\
print $\"Export complete: ($out)\"\n\
}}\n",
}}\n\
\n\
def validate_config [config_file: path] {{\n\
let config = (load_config $config_file)\n\
# TODO: Validate against Nickel schema\n\
$config\n\
}}\n\
\n\
def main [--config_dir: path = \".\"] {{\n\
let config_file = ($config_dir | path join config.json)\n\
let config = (validate_config $config_file)\n\
print $\"Configuration loaded: ($config_file)\"\n\
$config\n\
}}\n\
\n\
main $nu.env\n",
spec.name
);
let export_final_nu_path = scripts_dir.join("export-final.nu");
std::fs::write(&export_final_nu_path, export_final_nu).map_err(|e| {
let config_script_path = scripts_dir.join("config.nu");
std::fs::write(&config_script_path, config_script).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write export-final.nu: {}",
e
))
})?;
Self::make_executable(&export_final_nu_path);
tracing::debug!("Generated utility scripts");
Ok(())
}
/// Generate yaml-to-nickel converter.
fn generate_yaml_to_nickel(_spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let yaml_to_nickel_sh = "#!/bin/bash\n\
# Convert YAML to Nickel\n\
set -euo pipefail\n\n\
YAML_FILE=\"${1}\"\n\
NICKEL_FILE=\"${2:-${YAML_FILE%.yaml}.ncl}\"\n\
\n\
if [[ ! -f \"$YAML_FILE\" ]]; then\n\
echo \"Error: YAML file not found: $YAML_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
# Convert via JSON intermediary\n\
TEMP_JSON=\"$(mktemp)\"\n\
yq -o json \"$YAML_FILE\" > \"$TEMP_JSON\"\n\
./json-to-nickel.sh \"$TEMP_JSON\" \"$NICKEL_FILE\"\n\
rm -f \"$TEMP_JSON\"\n";
let yaml_to_nickel_sh_path = scripts_dir.join("yaml-to-nickel.sh");
std::fs::write(&yaml_to_nickel_sh_path, yaml_to_nickel_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write yaml-to-nickel.sh: {}",
e
))
})?;
Self::make_executable(&yaml_to_nickel_sh_path);
let yaml_to_nickel_nu = "#!/usr/bin/env nu\n\
# Convert YAML to Nickel (nushell version)\n\n\
def main [yaml_file: path, nickel_file?: path] {{\n\
let out = if ($nickel_file | is-empty) {{ ($yaml_file | path parse | update extension ncl | path join) }} else {{ $nickel_file }}\n\
\n\
if not ($yaml_file | path exists) {{\n\
error make {{ msg: $\"YAML file not found: ($yaml_file)\" }}\n\
}}\n\
\n\
# Convert via JSON intermediary\n\
let json_content = (open $yaml_file | to json)\n\
$json_content | save -f temp.json\n\
nu json-to-nickel.nu temp.json $out\n\
rm temp.json\n\
}}\n";
let yaml_to_nickel_nu_path = scripts_dir.join("yaml-to-nickel.nu");
std::fs::write(&yaml_to_nickel_nu_path, yaml_to_nickel_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write yaml-to-nickel.nu: {}",
e
))
})?;
Self::make_executable(&yaml_to_nickel_nu_path);
Ok(())
}
/// Generate toml-to-nickel converter.
fn generate_toml_to_nickel(_spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let toml_to_nickel_sh = "#!/bin/bash\n\
# Convert TOML to Nickel\n\
set -euo pipefail\n\n\
TOML_FILE=\"${1}\"\n\
NICKEL_FILE=\"${2:-${TOML_FILE%.toml}.ncl}\"\n\
\n\
if [[ ! -f \"$TOML_FILE\" ]]; then\n\
echo \"Error: TOML file not found: $TOML_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
# Convert via JSON intermediary\n\
TEMP_JSON=\"$(mktemp)\"\n\
cat \"$TOML_FILE\" | toml2json > \"$TEMP_JSON\"\n\
./json-to-nickel.sh \"$TEMP_JSON\" \"$NICKEL_FILE\"\n\
rm -f \"$TEMP_JSON\"\n";
let toml_to_nickel_sh_path = scripts_dir.join("toml-to-nickel.sh");
std::fs::write(&toml_to_nickel_sh_path, toml_to_nickel_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write toml-to-nickel.sh: {}",
e
))
})?;
Self::make_executable(&toml_to_nickel_sh_path);
let toml_to_nickel_nu = "#!/usr/bin/env nu\n\
# Convert TOML to Nickel (nushell version)\n\n\
def main [toml_file: path, nickel_file?: path] {{\n\
let out = if ($nickel_file | is-empty) {{ ($toml_file | path parse | update extension ncl | path join) }} else {{ $nickel_file }}\n\
\n\
if not ($toml_file | path exists) {{\n\
error make {{ msg: $\"TOML file not found: ($toml_file)\" }}\n\
}}\n\
\n\
# Convert via JSON intermediary\n\
let json_content = (open $toml_file | to json)\n\
$json_content | save -f temp.json\n\
nu json-to-nickel.nu temp.json $out\n\
rm temp.json\n\
}}\n";
let toml_to_nickel_nu_path = scripts_dir.join("toml-to-nickel.nu");
std::fs::write(&toml_to_nickel_nu_path, toml_to_nickel_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write toml-to-nickel.nu: {}",
e
))
})?;
Self::make_executable(&toml_to_nickel_nu_path);
Ok(())
}
/// Generate nickel-to-toml converter.
fn generate_nickel_to_toml(_spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let nickel_to_toml_sh = "#!/bin/bash\n\
# Convert Nickel to TOML\n\
set -euo pipefail\n\n\
NICKEL_FILE=\"${1}\"\n\
TOML_FILE=\"${2:-${NICKEL_FILE%.ncl}.toml}\"\n\
\n\
if [[ ! -f \"$NICKEL_FILE\" ]]; then\n\
echo \"Error: Nickel file not found: $NICKEL_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
nickel export --format toml \"$NICKEL_FILE\" > \"$TOML_FILE\"\n\
echo \"Converted $NICKEL_FILE to $TOML_FILE\"\n";
let nickel_to_toml_sh_path = scripts_dir.join("nickel-to-toml.sh");
std::fs::write(&nickel_to_toml_sh_path, nickel_to_toml_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nickel-to-toml.sh: {}",
e
))
})?;
Self::make_executable(&nickel_to_toml_sh_path);
let nickel_to_toml_nu = "#!/usr/bin/env nu\n\
# Convert Nickel to TOML (nushell version)\n\n\
def main [nickel_file: path, toml_file?: path] {{\n\
let out = if ($toml_file | is-empty) {{ ($nickel_file | path parse | update extension toml | path join) }} else {{ $toml_file }}\n\
\n\
if not ($nickel_file | path exists) {{\n\
error make {{ msg: $\"Nickel file not found: ($nickel_file)\" }}\n\
}}\n\
\n\
nickel export --format toml $nickel_file | save -f $out\n\
print $\"Converted ($nickel_file) to ($out)\"\n\
}}\n";
let nickel_to_toml_nu_path = scripts_dir.join("nickel-to-toml.nu");
std::fs::write(&nickel_to_toml_nu_path, nickel_to_toml_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nickel-to-toml.nu: {}",
e
))
})?;
Self::make_executable(&nickel_to_toml_nu_path);
Ok(())
}
/// Generate README.md for scripts directory.
fn generate_scripts_readme(spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Orchestration Scripts - {}\n\n", spec.name));
content.push_str(
"This directory contains bash and nushell scripts for provisioning orchestration.\n\n\
## Overview\n\n\
Scripts implement the nickel-roundtrip workflow: TypeDialog JSON Nickel Validation Export.\n\n\
## Script Categories\n\n\
### Main Configuration\n\n\
- `configure.sh` / `configure.nu` - Main nickel-roundtrip orchestration script\n\n\
### Format Conversion\n\n\
**From Nickel:**\n\
- `nickel-to-json.sh` / `.nu` - Convert Nickel to JSON\n\
- `nickel-to-yaml.sh` / `.nu` - Convert Nickel to YAML\n\
- `nickel-to-toml.sh` / `.nu` - Convert Nickel to TOML\n\n\
**To Nickel:**\n\
- `json-to-nickel.sh` / `.nu` - Convert JSON to Nickel\n\
- `yaml-to-nickel.sh` / `.nu` - Convert YAML to Nickel\n\
- `toml-to-nickel.sh` / `.nu` - Convert TOML to Nickel\n\n\
### Validation\n\n\
- `validate-nickel.sh` / `.nu` - Type-check and evaluate Nickel files\n\
- `apply-validators.sh` / `.nu` - Apply custom validators to configuration\n\n\
### Utilities\n\n\
- `merge-configs.sh` / `.nu` - Merge multiple Nickel configurations\n\
- `export-final.sh` / `.nu` - Export final configuration for deployment\n\n\
## Nickel-Roundtrip Workflow\n\n\
Complete configuration workflow:\n\n\
```bash\n\
# 1. Collect configuration via TypeDialog\n\
typedialog --form fragments/complete.toml --output user-input.json\n\
\n\
# 2. Convert JSON to Nickel\n\
./scripts/json-to-nickel.sh user-input.json config.ncl\n\
\n\
# 3. Validate against schemas\n\
./scripts/validate-nickel.sh config.ncl\n\
\n\
# 4. Apply custom validators\n\
./scripts/apply-validators.sh config.ncl\n\
\n\
# 5. Merge with defaults\n\
./scripts/merge-configs.sh defaults/config.ncl config.ncl final.ncl\n\
\n\
# 6. Export to deployment format\n\
./scripts/export-final.sh final.ncl yaml deployment.yaml\n\
```\n\n\
Or use the main orchestration script:\n\n\
```bash\n\
./scripts/configure.sh\n\
```\n\n\
## Nushell Versions\n\n\
All scripts have `.nu` equivalents with better error handling:\n\n\
```bash\n\
nu scripts/configure.nu\n\
```\n\n\
## Requirements\n\n\
- **nickel** - Nickel language runtime\n\
- **typedialog** - For interactive forms (CLI backend)\n\
- **jq** - JSON processing (for bash scripts)\n\
- **yq** - YAML processing (for YAML conversions)\n\
- **toml2json** - TOML to JSON conversion\n\
- **nushell** - For `.nu` script versions (optional)\n\n\
## Error Handling\n\n\
All scripts use `set -euo pipefail` (bash) or structured error handling (nushell) to fail fast on errors.\n\n\
## Permissions\n\n\
Scripts are automatically marked executable. Manual override:\n\n\
```bash\n\
chmod +x scripts/*.sh scripts/*.nu\n\
```\n\n",
);
let readme_file = scripts_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write scripts README: {}",
"Failed to write nushell config script: {}",
e
))
})?;
Ok(())
}
/// Make a script file executable on Unix systems.
#[cfg(unix)]
fn make_executable(path: &Path) {
use std::os::unix::fs::PermissionsExt;
if let Ok(metadata) = std::fs::metadata(path) {
let mut perms = metadata.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(path, perms).ok();
// Make executable
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let perms = std::fs::Permissions::from_mode(0o755);
std::fs::set_permissions(&config_script_path, perms).ok();
}
}
/// No-op on non-Unix systems.
#[cfg(not(unix))]
fn make_executable(_path: &Path) {}
tracing::debug!("Generated nushell config script");
Ok(())
}
}

View File

@ -1,7 +1,6 @@
//! Validator generator: produces Nickel validators using templates.
//! Validator generator: produces Nickel validators from constraints.
use crate::error::Result;
use crate::models::project_spec::ValidatorType;
use crate::models::{FieldType, ProjectSpec};
use std::path::Path;
@ -23,395 +22,133 @@ impl ValidatorGenerator {
))
})?;
// Always generate common.ncl with reusable validators
Self::generate_common_validators(&validators_dir)?;
// Generate feature-specific validators
// Generate validators for each feature
for feature in &spec.domain_features {
if !feature.custom_validators.is_empty() {
Self::generate_custom_validators(spec, feature, &validators_dir)?;
} else {
Self::generate_basic_validators(spec, feature, &validators_dir)?;
let mut validator_content = String::new();
validator_content.push_str(&format!(
"# Validators for {} feature\n# Generated for project: {}\n\n",
feature.name, spec.name
));
// Add field-specific validators
for field in &feature.fields {
validator_content.push_str(&Self::generate_field_validator(field)?);
}
// Write validator file
let validator_file = validators_dir.join(format!("{}.ncl", feature.name));
std::fs::write(&validator_file, validator_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validator file: {}",
e
))
})?;
tracing::debug!("Generated validator for feature: {}", feature.name);
}
// Generate main validators config
Self::generate_main_validators(spec, &validators_dir)?;
// Generate README
Self::generate_validators_readme(spec, &validators_dir)?;
tracing::info!("Successfully generated validators");
Ok(())
}
/// Generate common.ncl with reusable validation contracts.
fn generate_common_validators(validators_dir: &Path) -> Result<()> {
let content = r#"# Common validators
# Reusable validation predicates and type contracts
/// Generate validator function for a single field.
fn generate_field_validator(field: &crate::models::ConfigField) -> Result<String> {
let mut validator = String::new();
{
# Port number validation (1-65535)
ValidPort = fun label =>
std.contract.from_predicate (fun port =>
std.is_number port &&
port >= 1 &&
port <= 65535
) {
label = label,
message = "must be a valid port number (1-65535)",
},
validator.push_str(&format!("# Validator for field: {}\n", field.name));
# Non-empty string validation
NonEmptyString = fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.length s > 0
) {
label = label,
message = "must be a non-empty string",
},
# Valid bind address format (IP:PORT)
ValidBindAddress = fun label =>
std.contract.from_predicate (fun addr =>
std.is_string addr &&
std.string.contains ":" addr &&
let parts = std.string.split ":" addr in
std.array.length parts == 2
) {
label = label,
message = "must be a valid bind address (IP:PORT format)",
},
# Valid URL format
ValidUrl = fun label =>
std.contract.from_predicate (fun url =>
std.is_string url &&
(std.string.is_match "^https?://" url)
) {
label = label,
message = "must be a valid HTTP/HTTPS URL",
},
# Positive number validation
PositiveNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n > 0
) {
label = label,
message = "must be a positive number",
},
# Non-negative number validation
NonNegativeNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n >= 0
) {
label = label,
message = "must be a non-negative number",
},
# Range validation
Range = fun min => fun max => fun label =>
std.contract.from_predicate (fun n =>
std.is_number n &&
n >= min &&
n <= max
) {
label = label,
message = "must be between %{std.to_string min} and %{std.to_string max}",
},
# String pattern matching
MatchesPattern = fun pattern => fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.is_match pattern s
) {
label = label,
message = "must match pattern: %{pattern}",
},
# Enum validation
OneOf = fun allowed => fun label =>
std.contract.from_predicate (fun value =>
std.array.any (fun v => v == value) allowed
) {
label = label,
message = "must be one of: %{std.serialize 'Json allowed}",
},
# Array length validation
ArrayLength = fun min => fun max => fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let len = std.array.length arr in
len >= min && len <= max
) {
label = label,
message = "array length must be between %{std.to_string min} and %{std.to_string max}",
},
}
"#;
let common_file = validators_dir.join("common.ncl");
std::fs::write(&common_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write common validators: {}",
e
))
})?;
tracing::debug!("Generated common validators");
Ok(())
}
/// Generate custom validators for features with custom validator specs.
fn generate_custom_validators(
spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
validators_dir: &Path,
) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Custom validators for {}\n# Generated for project: {}\n\n",
feature.name, spec.name
));
content.push_str("let common = import \"./common.ncl\" in\n\n{\n");
// Generate each custom validator
for validator in &feature.custom_validators {
content.push_str(&format!(
" # {}\n",
validator
.description
.as_ref()
.unwrap_or(&format!("Validator for {}", validator.name))
));
match validator.validator_type {
ValidatorType::Range => {
content.push_str(&format!(
" {} = common.Range 0 100 \"{}\",\n\n",
validator.name, validator.name
));
}
ValidatorType::Pattern => {
content.push_str(&format!(
" {} = common.MatchesPattern \".*\" \"{}\",\n\n",
validator.name, validator.name
));
}
ValidatorType::ArrayUniqueness => {
content.push_str(&format!(
" {} = fun label =>\n std.contract.from_predicate (fun arr =>\n std.is_array arr &&\n let values = std.array.map (fun item => item.id) arr in\n std.array.length values == std.array.length (std.array.sort values)\n ) {{\n label = label,\n message = \"array items must have unique values\",\n }},\n\n",
validator.name
));
}
ValidatorType::Composite => {
content.push_str(&format!(
" {} = fun label =>\n std.contract.from_predicate (fun value =>\n true # Composite validation\n ) {{\n label = label,\n message = \"composite validation failed\",\n }},\n\n",
validator.name
));
}
ValidatorType::CustomPredicate => {
content.push_str(&format!(
" {} = fun label =>\n std.contract.from_predicate (fun value =>\n true # Custom predicate\n ) {{\n label = label,\n message = \"{}\",\n }},\n\n",
validator.name,
validator.description.as_ref().unwrap_or(&"validation failed".to_string())
));
}
}
}
content.push_str(" # Master validation function\n");
content.push_str(" validate = fun config => config,\n");
content.push_str("}\n");
let validator_file = validators_dir.join(format!("{}.ncl", feature.name));
std::fs::write(&validator_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write custom validators: {}",
e
))
})?;
tracing::debug!("Generated custom validators for: {}", feature.name);
Ok(())
}
/// Generate basic validators for features without custom validators.
fn generate_basic_validators(
spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
validators_dir: &Path,
) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Validators for {} feature\n# Generated for project: {}\n\n",
feature.name, spec.name
));
content.push_str("let common = import \"./common.ncl\" in\n\n{\n");
// Generate field validators
for field in &feature.fields {
if let Some(validator_fn) = Self::get_common_validator_for_field(field) {
content.push_str(&format!(
" # {} validator\n validate_{} = {},\n\n",
field.name, field.name, validator_fn
));
}
}
content.push_str(" # Validation function\n");
content.push_str(" validate = fun config => config,\n");
content.push_str("}\n");
let validator_file = validators_dir.join(format!("{}.ncl", feature.name));
std::fs::write(&validator_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write validators: {}", e))
})?;
Ok(())
}
/// Get appropriate common validator for a field.
fn get_common_validator_for_field(field: &crate::models::ConfigField) -> Option<String> {
match field.field_type {
FieldType::Number => {
if field.min.is_some() && field.max.is_some() {
Some(format!(
"common.Range {} {} \"{}\"",
field.min.unwrap(),
field.max.unwrap(),
field.name
))
} else if field.min == Some(0) {
Some(format!("common.NonNegativeNumber \"{}\"", field.name))
} else if field.min == Some(1) {
Some(format!("common.PositiveNumber \"{}\"", field.name))
} else {
None
}
}
FieldType::Text => {
if field.name.contains("address") && field.name.contains("bind") {
Some(format!("common.ValidBindAddress \"{}\"", field.name))
} else if field.name.contains("url") {
Some(format!("common.ValidUrl \"{}\"", field.name))
} else if !field.required {
None
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_string value) &&\n");
if let Some(min) = field.min {
validator.push_str(&format!(" ((std.string.length value) >= {}) &&\n", min));
}
if let Some(max) = field.max {
validator.push_str(&format!(" ((std.string.length value) <= {})\n", max));
} else {
Some(format!("common.NonEmptyString \"{}\"", field.name))
validator.push_str(" true\n");
}
validator.push_str(")\n\n");
}
FieldType::Number => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_number value) &&\n");
if let Some(min) = field.min {
validator.push_str(&format!(" (value >= {}) &&\n", min));
}
if let Some(max) = field.max {
validator.push_str(&format!(" (value <= {})\n", max));
} else {
validator.push_str(" true\n");
}
validator.push_str(")\n\n");
}
FieldType::Password => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_string value) &&\n");
validator
.push_str(" ((std.string.length value) >= 8) # Minimum password length\n");
validator.push_str(")\n\n");
}
FieldType::Confirm => {
validator.push_str(&format!(
"let validate_{} = fun value => std.is_bool value\n\n",
field.name
));
}
FieldType::Select | FieldType::MultiSelect => {
if !field.options.is_empty() {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" let valid_options = [");
let options_str = field
.options
.iter()
.map(|opt| format!("\"{}\"", opt))
.collect::<Vec<_>>()
.join(", ");
validator.push_str(&options_str);
validator.push_str("] in\n");
validator.push_str(" std.arrays.elem value valid_options\n");
validator.push_str(")\n\n");
}
}
FieldType::Password => Some(format!("common.NonEmptyString \"{}\"", field.name)),
_ => None,
}
}
/// Generate main validators.ncl.
fn generate_main_validators(spec: &ProjectSpec, validators_dir: &Path) -> Result<()> {
let mut content = String::new();
FieldType::RepeatingGroup => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_array value) &&\n");
content.push_str(&format!(
"# Main validators configuration for {}\n\n",
spec.name
));
if let Some(min) = field.min {
validator.push_str(&format!(" ((std.array.length value) >= {}) &&\n", min));
}
if let Some(max) = field.max {
validator.push_str(&format!(" ((std.array.length value) <= {})\n", max));
} else {
validator.push_str(" true\n");
}
content.push_str("let common = import \"./common.ncl\" in\n");
validator.push_str(")\n\n");
}
for feature in &spec.domain_features {
content.push_str(&format!(
"let {} = import \"./{}.ncl\" in\n",
feature.name, feature.name
));
}
content.push_str("\n{\n common,\n");
for feature in &spec.domain_features {
content.push_str(&format!(" {},\n", feature.name));
}
content.push_str("}\n");
let config_file = validators_dir.join("config.ncl");
std::fs::write(&config_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write main validators: {}",
e
))
})?;
Ok(())
}
/// Generate README.md for validators directory.
fn generate_validators_readme(spec: &ProjectSpec, validators_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Validators - {}\n\n", spec.name));
content.push_str(
"This directory contains Nickel validation contracts.\n\n\
## Overview\n\n\
Validators enforce rules on configuration values using Nickel's contract system.\n\n\
## Files\n\n\
### `common.ncl` - Reusable Validators\n\n\
Common validation contracts used across features:\n\n\
- `ValidPort` - Port numbers (1-65535)\n\
- `NonEmptyString` - Non-empty strings\n\
- `ValidBindAddress` - IP:PORT format\n\
- `ValidUrl` - HTTP/HTTPS URLs\n\
- `PositiveNumber` - Numbers > 0\n\
- `NonNegativeNumber` - Numbers >= 0\n\
- `Range` - Number range validation\n\
- `MatchesPattern` - Regex pattern matching\n\
- `OneOf` - Enum validation\n\
- `ArrayLength` - Array size constraints\n\n",
);
for feature in &spec.domain_features {
content.push_str(&format!("### `{}.ncl`\n\n", feature.name));
if !feature.custom_validators.is_empty() {
content.push_str(&format!(
"Feature-specific validators ({} custom validators).\n\n",
feature.custom_validators.len()
_ => {
validator.push_str(&format!(
"let validate_{} = fun value => true # No specific validation\n\n",
field.name
));
} else {
content.push_str("Standard field validators.\n\n");
}
}
content.push_str(
"## Usage\n\n\
### Applying Validators\n\n\
```nickel\n\
let validators = import \"./validators/config.ncl\" in\n\
let config = {...} in\n\
validators.validate config\n\
```\n\n\
### Custom Validation\n\n\
```nickel\n\
let common = import \"./validators/common.ncl\" in\n\
{\n\
port | common.ValidPort \"config.port\" = 8080,\n\
}\n\
```\n\n",
);
let readme_file = validators_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validators README: {}",
e
))
})?;
Ok(())
Ok(validator)
}
}

View File

@ -1,210 +0,0 @@
//! Values generator: creates runtime values configuration for deployment environments.
use crate::error::Result;
use crate::models::ProjectSpec;
use std::path::Path;
/// Generates runtime values files for environment-specific configuration.
pub struct ValuesGenerator;
impl ValuesGenerator {
/// Generate values configuration files.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating values configuration for project: {}", spec.name);
// Ensure values directory exists
let values_dir = output_dir.join("values");
std::fs::create_dir_all(&values_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create values directory: {}",
e
))
})?;
// Generate main values file
Self::generate_values_config(spec, &values_dir)?;
// Generate environment-specific values
Self::generate_environment_values(spec, &values_dir, "development")?;
Self::generate_environment_values(spec, &values_dir, "staging")?;
Self::generate_environment_values(spec, &values_dir, "production")?;
// Generate README
Self::generate_values_readme(spec, &values_dir)?;
tracing::info!("Successfully generated values configuration");
Ok(())
}
/// Generate main values/config.ncl file.
fn generate_values_config(spec: &ProjectSpec, values_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Runtime values configuration for {}\n\
# This file separates runtime values from configuration structure\n\n",
spec.name
));
content.push_str("# Import defaults\n");
content.push_str("let defaults = import \"../defaults/config.ncl\" in\n\n");
content.push_str("# Runtime values (override these per environment)\n");
content.push_str("{\n");
for feature in &spec.domain_features {
content.push_str(&format!(" {} = {{\n", feature.name));
for field in &feature.fields {
if field.sensitive {
content.push_str(&format!(
" {} = \"OVERRIDE_IN_ENVIRONMENT\", # Sensitive\n",
field.name
));
} else if let Some(default) = &field.default {
content.push_str(&format!(" {} = {},\n", field.name, default));
} else {
content.push_str(&format!(
" {} = defaults.{}.{},\n",
field.name, feature.name, field.name
));
}
}
content.push_str(" },\n");
}
content.push_str("}\n");
std::fs::write(values_dir.join("config.ncl"), content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write values/config.ncl: {}",
e
))
})?;
Ok(())
}
/// Generate environment-specific values file.
fn generate_environment_values(
spec: &ProjectSpec,
values_dir: &Path,
environment: &str,
) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# {} environment values for {}\n\n",
Self::capitalize_first(environment),
spec.name
));
content.push_str("# Import base values\n");
content.push_str("let base = import \"./config.ncl\" in\n\n");
content.push_str(&format!(
"# {} overrides\n",
Self::capitalize_first(environment)
));
content.push_str("std.record.merge base {\n");
// Add environment-specific examples
match environment {
"development" => {
content.push_str(" # Development-specific values\n");
content.push_str(" # Example: lower resource limits, verbose logging\n");
}
"staging" => {
content.push_str(" # Staging-specific values\n");
content.push_str(" # Example: production-like but with test data\n");
}
"production" => {
content.push_str(" # Production-specific values\n");
content.push_str(" # Example: high availability, strict security\n");
}
_ => {}
}
content.push_str("}\n");
std::fs::write(values_dir.join(format!("{}.ncl", environment)), content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}.ncl: {}",
environment, e
))
})?;
Ok(())
}
/// Generate README.md for values directory.
fn generate_values_readme(spec: &ProjectSpec, values_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Runtime Values - {}\n\n", spec.name));
content.push_str(
"This directory contains runtime values separated from configuration structure.\n\n\
## Overview\n\n\
Values files allow environment-specific overrides while keeping the core \
configuration structure unchanged.\n\n\
## Files\n\n\
- `config.ncl` - Base runtime values (references defaults)\n\
- `development.ncl` - Development environment overrides\n\
- `staging.ncl` - Staging environment overrides\n\
- `production.ncl` - Production environment overrides\n\n\
## Usage\n\n\
### Development\n\n\
```bash\n\
nickel export values/development.ncl > config.json\n\
```\n\n\
### Production\n\n\
```bash\n\
nickel export values/production.ncl > config.json\n\
```\n\n\
## Environment Variables\n\n\
Sensitive values should be provided via environment variables:\n\n\
```nickel\n\
{\n\
database = {\n\
password = std.env.DATABASE_PASSWORD,\n\
},\n\
}\n\
```\n\n\
## Merging Strategy\n\n\
1. **Defaults** (`defaults/config.ncl`) - Sensible defaults for all fields\n\
2. **Base values** (`values/config.ncl`) - Runtime values common to all environments\n\
3. **Environment values** (`values/{env}.ncl`) - Environment-specific overrides\n\n\
```nickel\n\
let defaults = import \"defaults/config.ncl\" in\n\
let values = import \"values/config.ncl\" in\n\
let env_values = import \"values/production.ncl\" in\n\
std.record.merge_all [defaults, values, env_values]\n\
```\n\n\
## Security\n\n\
- Never commit sensitive values (passwords, API keys) to version control\n\
- Use environment variables or secret management systems\n\
- Mark sensitive fields in `values/config.ncl` with `# Sensitive` comment\n\n",
);
std::fs::write(values_dir.join("README.md"), content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write values README: {}",
e
))
})?;
Ok(())
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -50,10 +50,13 @@ impl CargoIntrospector {
// Build constraints from inferred features
let constraints = Self::infer_constraints(&domain_features);
let mut spec = ProjectSpec::new(name, project_type);
spec.infrastructure = infrastructure;
spec.domain_features = domain_features;
spec.constraints = constraints;
let spec = ProjectSpec {
name,
project_type,
infrastructure,
domain_features,
constraints,
};
// Validate the spec
spec.validate().map_err(|errors| CargoIntrospectionError {

View File

@ -209,10 +209,13 @@ impl ConfigLoader {
// Convert constraints
let constraints = Self::convert_constraints(&config.constraints)?;
let mut spec = ProjectSpec::new(config.name, project_type);
spec.infrastructure = infrastructure;
spec.domain_features = domain_features;
spec.constraints = constraints;
let spec = ProjectSpec {
name: config.name,
project_type,
infrastructure,
domain_features,
constraints,
};
// Validate the spec
spec.validate().map_err(|errors| ConfigLoadingError {
@ -289,11 +292,12 @@ impl ConfigLoader {
.map(Self::convert_field)
.collect::<Result<Vec<_>>>()?;
let mut feature = DomainFeature::new(f.name.clone());
feature.description = f.description.clone();
feature.fields = fields;
Ok(feature)
Ok(DomainFeature {
name: f.name.clone(),
description: f.description.clone(),
fields,
constraints: None,
})
})
.collect()
}

View File

@ -62,10 +62,13 @@ impl NickelSchemaLoader {
// Extract constraints from field definitions
let constraints = Self::extract_constraints(content)?;
let mut spec = ProjectSpec::new(project_name.to_string(), project_type);
spec.infrastructure = infrastructure;
spec.domain_features = domain_features;
spec.constraints = constraints;
let spec = ProjectSpec {
name: project_name.to_string(),
project_type,
infrastructure,
domain_features,
constraints,
};
// Validate the spec
spec.validate().map_err(|errors| NickelSchemaLoadingError {

View File

@ -26,22 +26,6 @@ pub struct ProjectSpec {
/// Validation constraints (array sizes, uniqueness rules, etc.)
pub constraints: Vec<Constraint>,
/// Infrastructure-as-Code template configuration
#[serde(default)]
pub iac_templates: IacTemplateSpec,
/// Script generation configuration
#[serde(default)]
pub scripts: ScriptSpec,
/// Documentation generation configuration
#[serde(default)]
pub docs: DocsSpec,
/// Supported locales for i18n
#[serde(default)]
pub locales: Vec<LocaleSpec>,
}
impl ProjectSpec {
@ -53,10 +37,6 @@ impl ProjectSpec {
infrastructure: InfrastructureSpec::default(),
domain_features: Vec::new(),
constraints: Vec::new(),
iac_templates: IacTemplateSpec::default(),
scripts: ScriptSpec::default(),
docs: DocsSpec::default(),
locales: Vec::new(),
}
}
@ -229,18 +209,6 @@ pub struct DomainFeature {
/// Constraints specific to this feature (e.g., array bounds)
pub constraints: Option<HashMap<String, FeatureConstraint>>,
/// Nickel schema imports for this feature
#[serde(default)]
pub nickel_imports: Vec<NickelImport>,
/// Whether to use type contracts in generated schemas
#[serde(default)]
pub use_type_contracts: bool,
/// Custom validators for this feature
#[serde(default)]
pub custom_validators: Vec<ValidatorSpec>,
}
impl DomainFeature {
@ -251,9 +219,6 @@ impl DomainFeature {
description: None,
fields: Vec::new(),
constraints: None,
nickel_imports: Vec::new(),
use_type_contracts: false,
custom_validators: Vec::new(),
}
}
@ -518,129 +483,6 @@ pub struct FeatureConstraint {
pub unique: bool,
}
/// Infrastructure-as-Code template configuration.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct IacTemplateSpec {
/// Generate Terraform/OpenTofu templates
pub terraform_enabled: bool,
/// Generate Ansible playbooks
pub ansible_enabled: bool,
/// Generate docker-compose files
pub docker_compose_enabled: bool,
}
/// Script generation configuration.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ScriptSpec {
/// Generate Bash scripts
pub bash_enabled: bool,
/// Generate Nushell scripts
pub nushell_enabled: bool,
/// Enable nickel-roundtrip integration
pub nickel_roundtrip: bool,
}
/// Documentation generation configuration.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct DocsSpec {
/// Generate main README.md
pub generate_readme: bool,
/// Generate quickstart guide
pub generate_quickstart: bool,
/// Generate nickel-roundtrip workflow guide
pub generate_nickel_roundtrip_guide: bool,
}
/// Locale/translation specification.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LocaleSpec {
/// Language code (e.g., "en-US", "es-ES")
pub language_code: String,
/// Whether this is the default locale
pub is_default: bool,
}
impl LocaleSpec {
/// Create a new locale specification.
pub fn new(language_code: impl Into<String>, is_default: bool) -> Self {
Self {
language_code: language_code.into(),
is_default,
}
}
}
/// Nickel schema import declaration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NickelImport {
/// Import name (e.g., "database_schema")
pub name: String,
/// Import path relative to schema file (e.g., "./database.ncl")
pub path: String,
}
impl NickelImport {
/// Create a new Nickel import.
pub fn new(name: impl Into<String>, path: impl Into<String>) -> Self {
Self {
name: name.into(),
path: path.into(),
}
}
}
/// Custom validator specification for advanced validation logic.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidatorSpec {
/// Validator name (e.g., "ValidPort", "UniqueBindAddresses")
pub name: String,
/// Validation logic description
pub description: Option<String>,
/// Fields this validator applies to
pub applies_to: Vec<String>,
/// Validator implementation type
pub validator_type: ValidatorType,
}
/// Type of validator implementation.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum ValidatorType {
/// Range validation (min/max)
Range,
/// Pattern matching (regex)
Pattern,
/// Custom Nickel predicate function
CustomPredicate,
/// Array uniqueness validator
ArrayUniqueness,
/// Composite validator (combines multiple validators)
Composite,
}
impl ValidatorSpec {
/// Create a new validator specification.
pub fn new(name: impl Into<String>, validator_type: ValidatorType) -> Self {
Self {
name: name.into(),
description: None,
applies_to: Vec::new(),
validator_type,
}
}
/// Add field that this validator applies to.
pub fn applies_to(mut self, field: impl Into<String>) -> Self {
self.applies_to.push(field.into());
self
}
/// Set description.
pub fn with_description(mut self, description: impl Into<String>) -> Self {
self.description = Some(description.into());
self
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -1,16 +1,14 @@
//! Template loader and renderer.
use crate::config::Config;
use crate::error::{Result, TemplateRenderError};
use crate::error::Result;
use std::collections::BTreeMap;
use std::fs;
use std::path::PathBuf;
use tera::Tera;
/// Loads and renders Tera templates for code generation.
pub struct TemplateLoader {
path: PathBuf,
tera: Tera,
}
/// Template category with its templates.
@ -25,16 +23,7 @@ impl TemplateLoader {
/// Load template library from configuration.
pub fn new(config: &Config) -> Result<Self> {
let path = config.templates_dir();
let glob_pattern = path.join("**/*.tera").to_string_lossy().to_string();
let mut tera = Tera::new(&glob_pattern).map_err(|e| TemplateRenderError {
template_name: glob_pattern.clone(),
reason: format!("Failed to initialize Tera engine: {}", e),
})?;
tera.autoescape_on(vec![]);
Ok(TemplateLoader { path, tera })
Ok(TemplateLoader { path })
}
/// Get the templates directory path.
@ -109,13 +98,8 @@ impl TemplateLoader {
}
/// Render a template with given context.
pub fn render(&self, template_name: &str, context: &tera::Context) -> Result<String> {
self.tera.render(template_name, context).map_err(|e| {
TemplateRenderError {
template_name: template_name.to_string(),
reason: e.to_string(),
}
.into()
})
pub fn render(&self, _template_name: &str, _context: &tera::Context) -> Result<String> {
// TODO: Implement template rendering
Ok(String::new())
}
}

View File

@ -1,359 +0,0 @@
# Nickel Roundtrip Workflow
Complete guide to the TypeDialog + Nickel configuration roundtrip for {{ project_name }}.
## Overview
The **Nickel Roundtrip** workflow integrates TypeDialog interactive forms with Nickel's powerful type system and validation:
```
TypeDialog Form → JSON → Nickel → Validation → Export (JSON/YAML/TOML)
↑ ↓
└────────────────── Edit & Re-import ──────────────────┘
```
This enables:
- ✅ User-friendly configuration via TypeDialog forms
- ✅ Type-safe configuration with Nickel contracts
- ✅ Validation against schemas and custom predicates
- ✅ Round-trip editing (export, edit, re-import)
## Workflow Steps
### 1. Collect User Input (TypeDialog)
Run the interactive form:
```bash
typedialog config-form.toml \
--output-format json \
--output generated/user-input.json
```
TypeDialog presents form fields, validates input, and outputs JSON.
### 2. Convert JSON to Nickel
Embed user values into Nickel configuration:
```bash
./scripts/json-to-nickel.sh generated/user-input.json generated/config.ncl
```
Generated `config.ncl`:
```nickel
let schemas = import "../schemas/config.ncl" in
let defaults = import "../defaults/config.ncl" in
let user_values = {
# JSON values embedded here
server = {
bind_address = "0.0.0.0:8080",
max_connections = 100,
},
} in
let final_config = std.record.merge defaults user_values in
final_config | schemas.Config
```
### 3. Validate with Nickel
Type-check the configuration:
```bash
nickel typecheck generated/config.ncl
```
Nickel verifies:
- All required fields present
- Type contracts satisfied (e.g., `ValidPort`)
- Custom validators pass
- Imports resolve correctly
### 4. Export to Target Format
Export validated configuration:
**JSON:**
```bash
nickel export --format json generated/config.ncl > generated/config.json
```
**YAML:**
```bash
nickel export --format yaml generated/config.ncl > generated/config.yaml
```
**TOML (via JSON):**
```bash
nickel export --format json generated/config.ncl | json2toml > generated/config.toml
```
### 5. Apply to Infrastructure
Use exported configuration with your deployment tool:
```bash
# Terraform
terraform apply -var-file="generated/config.tfvars"
# Ansible
ansible-playbook deploy.yml -e @generated/config.yaml
# Docker Compose
docker-compose -f service.yml --env-file generated/config.env up
```
## Round-Trip Editing
### Export → Edit → Re-import
**Step 1: Export current config**
```bash
nickel export --format json values/config.ncl > /tmp/config.json
```
**Step 2: Edit JSON**
```bash
# Edit with jq
jq '.server.max_connections = 200' /tmp/config.json > /tmp/config-modified.json
# Or edit manually
vim /tmp/config-modified.json
```
**Step 3: Re-import to Nickel**
```bash
./scripts/json-to-nickel.sh /tmp/config-modified.json values/config.ncl
```
**Step 4: Validate**
```bash
nickel typecheck values/config.ncl
```
If validation passes, your edited config is ready!
## Advanced Workflows
### Merging Multiple Configurations
Combine base config with environment-specific overrides:
```nickel
let base = import "./config.ncl" in
let production_overrides = {
server.max_connections = 500,
logging.level = "warn",
} in
std.record.merge base production_overrides
```
### Custom Validators
Add domain-specific validation in `validators/`:
```nickel
# validators/custom.ncl
{
ValidTrackerArray = fun label =>
std.contract.from_predicate (fun trackers =>
std.is_array trackers &&
let bind_addresses = std.array.map (fun t => t.bind_address) trackers in
# Ensure all bind addresses are unique
std.array.length bind_addresses ==
std.array.length (std.array.sort bind_addresses)
) {
label = label,
message = "tracker bind addresses must be unique",
},
}
```
Use in schema:
```nickel
let validators = import "../validators/custom.ncl" in
{
Config = {
trackers | validators.ValidTrackerArray "config.trackers",
},
}
```
### Conditional Configuration
Use Nickel's `if-then-else` for environment-specific logic:
```nickel
let env = "production" in # or "development"
{
server = {
max_connections = if env == "production" then 500 else 50,
logging_level = if env == "production" then "warn" else "debug",
},
}
```
### Schema Imports and Composition
Organize schemas into modules:
```nickel
# schemas/server.ncl
{
Server = {
bind_address | String,
port | Number,
max_connections | Number,
},
}
# schemas/database.ncl
{
Database = {
url | String,
pool_size | Number,
},
}
# schemas/config.ncl
let server_schema = import "./server.ncl" in
let database_schema = import "./database.ncl" in
{
Config = {
server | server_schema.Server,
database | database_schema.Database | optional,
},
}
```
## Integration with CI/CD
### Pre-commit Hook
Validate Nickel config before commit:
```bash
#!/bin/bash
# .git/hooks/pre-commit
if ! nickel typecheck values/config.ncl; then
echo "❌ Nickel validation failed!"
exit 1
fi
echo "✅ Nickel validation passed"
```
### CI Pipeline (GitHub Actions)
``yaml
name: Validate Config
on: [push, pull_request]
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install Nickel
run: cargo install nickel
- name: Validate configuration
run: nickel typecheck provisioning/values/config.ncl
- name: Export to JSON
run: nickel export provisioning/values/config.ncl > config.json
```
## Troubleshooting
### Error: "contract violated"
**Example:**
```
Error: contract violated by value
--> bind_address: "invalid"
expected: ValidBindAddress
```
**Fix:** The value doesn't match the validator. Check `validators/common.ncl` for the contract definition.
### Error: "infinite recursion detected"
**Cause:** Circular imports in Nickel files.
**Fix:** Ensure import graph is acyclic:
```
schemas/config.ncl
├→ schemas/server.ncl
└→ schemas/database.ncl
```
Never create cycles like:
```
server.ncl → database.ncl → server.ncl # ❌ BAD
```
### Error: "field not found"
**Cause:** Missing required field in configuration.
**Fix:** Add the field or mark it as `optional` in schema:
```nickel
{
Config = {
required_field | String,
optional_field | String | optional, # Won't error if missing
},
}
```
## Best Practices
1. **Always validate before exporting:**
```bash
nickel typecheck config.ncl && nickel export config.ncl
```
2. **Use defaults for common values:**
```nickel
let defaults = {max_connections = 100} in
std.record.merge defaults user_values
```
3. **Separate schemas from values:**
- `schemas/` → Type contracts (versioned, stable)
- `values/` → Runtime configuration (user-editable)
4. **Document contracts:**
```nickel
{
# Port number must be 1-65535
port | ValidPort "config.port",
}
```
5. **Test with `nickel query`:**
```bash
nickel query --field server.port config.ncl
```
## Resources
- [Nickel Language Documentation](https://nickel-lang.org/user-manual/)
- [TypeDialog Forms Guide](https://github.com/yourusername/typedialog)
- [Example: torrust-tracker-deployer](https://github.com/torrust/torrust-tracker-deployer)
---
**Generated by typedialog-prov-gen for {{ project_name }}**

View File

@ -1,201 +0,0 @@
# {{ project_name }} - Quick Start Guide
Get up and running with {{ project_name }} provisioning in 5 minutes.
## Prerequisites
Install required tools:
```bash
# TypeDialog (interactive forms)
cargo install typedialog
# Nickel (configuration language)
cargo install nickel
# Optional: Format converters
apt-get install jq # JSON processor
cargo install json2toml # TOML converter
```
## Step 1: Run Configuration Wizard
Choose your preferred shell:
**Bash:**
```bash
chmod +x scripts/configure.sh
./scripts/configure.sh
```
**Nushell:**
```bash
chmod +x scripts/configure.nu
./scripts/configure.nu
```
The wizard will:
1. Display an interactive TypeDialog form
2. Collect configuration values
3. Generate Nickel configuration
4. Validate with schema/type contracts
5. Export to JSON, YAML, TOML
## Step 2: Review Generated Configuration
Check the `generated/` directory:
```bash
ls -lh generated/
```
You should see:
- `config.ncl` - Nickel source
- `config.json` - JSON export
- `config.yaml` - YAML export
- `user-input.json` - Raw form data
## Step 3: Validate Configuration
Type-check with Nickel:
```bash
nickel typecheck generated/config.ncl
```
If validation passes, your configuration is correct!
## Step 4: Deploy Infrastructure
{% if iac_templates.terraform_enabled %}
### Option A: Terraform/OpenTofu
```bash
cd templates/tofu/{{ infrastructure.providers[0] | default(value="common") }}
# Initialize Terraform
terraform init
# Preview changes
terraform plan -var-file="../../../generated/config.tfvars"
# Apply infrastructure
terraform apply -var-file="../../../generated/config.tfvars"
```
{% endif %}
{% if iac_templates.ansible_enabled %}
### Option B: Ansible
```bash
# Run deployment playbook
ansible-playbook templates/ansible/deploy.yml \
-i templates/ansible/inventory.yml \
-e @generated/config.yaml
```
{% endif %}
{% if iac_templates.docker_compose_enabled %}
### Option C: Docker Compose
```bash
# Start services
docker-compose -f templates/docker-compose/service.yml up -d
# View logs
docker-compose logs -f
# Stop services
docker-compose down
```
{% endif %}
## Customizing Configuration
### Edit Nickel Values Directly
1. Open `values/config.ncl` in your editor
2. Modify values (type contracts ensure correctness)
3. Validate: `nickel typecheck values/config.ncl`
4. Export: `nickel export values/config.ncl > generated/config.json`
### Re-run the Form
Simply run `./scripts/configure.sh` again to update values.
## Common Tasks
### Export to Different Format
```bash
# Nickel → JSON
./scripts/nickel-to-json.sh values/config.ncl generated/config.json
# Nickel → YAML
./scripts/nickel-to-yaml.sh values/config.ncl generated/config.yaml
```
### Import Existing JSON
```bash
# JSON → Nickel
./scripts/json-to-nickel.sh existing-config.json values/config.ncl
```
### Validate Without Exporting
```bash
nickel typecheck values/config.ncl
```
## Troubleshooting
### Error: "Type checking failed"
**Cause:** Configuration values don't match schema contracts.
**Fix:** Check the error message for the failing field and correct the value.
Example:
```
Error: contract violated by value
--> port: 70000
expected: ValidPort (1-65535)
```
Fix by setting `port = 8080` in `values/config.ncl`.
### Error: "Form file not found"
**Cause:** Missing form fragments or master form.
**Fix:** Ensure all files in `fragments/` directory exist:
```bash
ls -1 fragments/
```
### Error: "nickel command not found"
**Cause:** Nickel not installed.
**Fix:**
```bash
cargo install nickel
```
## Next Steps
- Read [NICKEL_ROUNDTRIP.md](NICKEL_ROUNDTRIP.md) for advanced workflows
- Explore `schemas/` to understand type contracts
- Customize `validators/` for custom validation logic
- Add locales in `locales/` for i18n support
## Support
For issues or questions:
- GitHub: {{ repository | default(value="https://github.com/yourusername/" ~ project_name) }}
- Documentation: `docs/`
---
**Generated by typedialog-prov-gen**

View File

@ -1,140 +0,0 @@
# {{ project_name }} - Provisioning Configuration
**Generated by typedialog-prov-gen**
This directory contains the complete provisioning configuration for {{ project_name }}, using TypeDialog forms with Nickel configuration validation.
## Quick Start
1. **Run the configuration wizard:**
```bash
./scripts/configure.sh
```
2. **Generated files will be in `generated/`:**
- `config.ncl` - Nickel source configuration
- `config.json` - JSON export
- `config.yaml` - YAML export
3. **Validate configuration:**
```bash
nickel typecheck generated/config.ncl
```
## Directory Structure
```
provisioning/
├── config.ncl # Main Nickel configuration entry point
├── config-form.toml # TypeDialog master form
├── constraints.toml # Validation constraints
├── defaults/ # Default values for all features
├── docs/ # Documentation guides
├── fragments/ # Form field fragments
├── generated/ # Generated configurations (git-ignored)
├── locales/ # i18n translations (en-US, es-ES)
├── schemas/ # Nickel type schemas
├── scripts/ # Configuration and deployment scripts
├── templates/ # Infrastructure templates (Terraform, Ansible, etc.)
├── validators/ # Validation predicates
└── values/ # User-editable runtime values
```
## Features
{% for feature in features %}
### {{ feature.name }}
{{ feature.description | default(value="No description available") }}
**Fields:** {{ feature.fields | length }} configuration fields
{% endfor %}
## Configuration Workflow
1. **Interactive Form** → Run `./scripts/configure.sh` to collect user input
2. **JSON Generation** → TypeDialog outputs user values as JSON
3. **Nickel Conversion** → JSON is embedded into Nickel configuration
4. **Schema Validation** → Nickel type contracts enforce correctness
5. **Export** → Final configuration exported to JSON/YAML/TOML
## Infrastructure as Code
{% if iac_templates.terraform_enabled %}
### Terraform/OpenTofu
Templates in `templates/tofu/`:
- `common/` - Shared Terraform modules
- `hetzner/` - Hetzner Cloud provider
- `lxd/` - LXD container provider
**Usage:**
```bash
cd templates/tofu/hetzner
terraform init
terraform plan -var-file="../../../generated/config.tfvars"
```
{% endif %}
{% if iac_templates.ansible_enabled %}
### Ansible
Playbooks in `templates/ansible/`:
- `deploy.yml` - Main deployment playbook
- `inventory.yml` - Dynamic inventory
**Usage:**
```bash
ansible-playbook templates/ansible/deploy.yml -i templates/ansible/inventory.yml
```
{% endif %}
{% if iac_templates.docker_compose_enabled %}
### Docker Compose
Compose files in `templates/docker-compose/`:
- `service.yml` - Main service definition
- `monitoring.yml` - Monitoring stack (Prometheus, Grafana)
**Usage:**
```bash
docker-compose -f templates/docker-compose/service.yml up -d
```
{% endif %}
## Scripts
- `configure.sh` / `configure.nu` - Main configuration wizard (Bash/Nushell)
- `nickel-to-json.sh` / `.nu` - Export Nickel → JSON
- `nickel-to-yaml.sh` / `.nu` - Export Nickel → YAML
- `json-to-nickel.sh` / `.nu` - Import JSON → Nickel
## Nickel Roundtrip Workflow
See [NICKEL_ROUNDTRIP.md](docs/NICKEL_ROUNDTRIP.md) for detailed workflow documentation.
**Summary:**
1. Edit values in `values/config.ncl`
2. Run `nickel typecheck values/config.ncl`
3. Export with `nickel export values/config.ncl > generated/config.json`
4. Apply to infrastructure
## Localization
Supported locales:
{% for locale in locales %}
- {{ locale.language_code }}{% if locale.is_default %} (default){% endif %}
{% endfor %}
Translation files in `locales/{lang}/forms.ftl` (Fluent format).
## Documentation
- [README.md](README.md) - This file
- [docs/QUICKSTART.md](docs/QUICKSTART.md) - Getting started guide
- [docs/NICKEL_ROUNDTRIP.md](docs/NICKEL_ROUNDTRIP.md) - Nickel roundtrip workflow
## License
{{ license | default(value="MIT") }}

View File

@ -1,61 +1,86 @@
name = "mysql_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "mysql_header"
title = "💾 MySQL Database Configuration"
type = "section_header"
title = "💾 MySQL Database Configuration"
border_top = true
border_bottom = true
[[elements]]
default = "localhost"
help = "MySQL server hostname or IP address"
name = "mysql_host"
nickel_alias = "mysql_host"
nickel_path = ["tracker", "core", "database", "mysql_host"]
placeholder = "localhost"
type = "text"
prompt = "MySQL host"
placeholder = "localhost"
default = "localhost"
required = true
type = "text"
help = "MySQL server hostname or IP address"
nickel_path = [
"tracker",
"core",
"database",
"mysql_host",
]
nickel_alias = "mysql_host"
[[elements]]
default = "3306"
help = "MySQL server port (default 3306). Must be between 1-65535."
name = "mysql_port"
nickel_alias = "mysql_port"
nickel_path = ["tracker", "core", "database", "mysql_port"]
placeholder = "3306"
type = "text"
prompt = "MySQL port"
placeholder = "3306"
default = "3306"
required = true
type = "text"
help = "MySQL server port (default 3306). Must be between 1-65535."
nickel_path = [
"tracker",
"core",
"database",
"mysql_port",
]
nickel_alias = "mysql_port"
[[elements]]
default = "torrust_tracker"
help = "Name of the MySQL database"
name = "mysql_database_name"
nickel_alias = "mysql_database_name"
nickel_path = ["tracker", "core", "database", "database_name"]
placeholder = "torrust_tracker"
type = "text"
prompt = "Database name"
placeholder = "torrust_tracker"
default = "torrust_tracker"
required = true
type = "text"
help = "Name of the MySQL database"
nickel_path = [
"tracker",
"core",
"database",
"database_name",
]
nickel_alias = "mysql_database_name"
[[elements]]
default = "tracker_user"
help = "MySQL username for authentication"
name = "mysql_username"
nickel_alias = "mysql_username"
nickel_path = ["tracker", "core", "database", "mysql_username"]
placeholder = "tracker_user"
prompt = "Database username"
required = true
type = "text"
prompt = "Database username"
placeholder = "tracker_user"
default = "tracker_user"
required = true
help = "MySQL username for authentication"
nickel_path = [
"tracker",
"core",
"database",
"mysql_username",
]
nickel_alias = "mysql_username"
[[elements]]
help = "MySQL password for authentication"
name = "mysql_password"
nickel_alias = "mysql_password"
nickel_path = ["tracker", "core", "database", "mysql_password"]
type = "password"
prompt = "Database password"
required = true
type = "password"
help = "MySQL password for authentication"
nickel_path = [
"tracker",
"core",
"database",
"mysql_password",
]
nickel_alias = "mysql_password"

View File

@ -1,19 +1,24 @@
name = "sqlite_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "sqlite_header"
title = "💾 SQLite Database Configuration"
type = "section_header"
title = "💾 SQLite Database Configuration"
border_top = true
border_bottom = true
[[elements]]
default = "tracker.db"
help = "Name of the SQLite database file (will be created in the tracker data directory)"
name = "sqlite_database_name"
nickel_alias = "sqlite_database_name"
nickel_path = ["tracker", "core", "database", "database_name"]
placeholder = "tracker.db"
prompt = "Database filename"
required = true
type = "text"
prompt = "Database filename"
placeholder = "tracker.db"
default = "tracker.db"
required = true
help = "Name of the SQLite database file (will be created in the tracker data directory)"
nickel_path = [
"tracker",
"core",
"database",
"database_name",
]
nickel_alias = "sqlite_database_name"

View File

@ -1,25 +1,31 @@
name = "environment_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "environment_header"
title = "🏗️ Environment Identification"
type = "section_header"
title = "🏗️ Environment Identification"
border_top = true
border_bottom = true
[[elements]]
help = "Lowercase letters, numbers, dashes. Cannot start with number or dash. Examples: dev, staging, e2e-config"
name = "environment_name"
nickel_path = ["environment", "name"]
placeholder = "dev, staging, production, e2e-test"
prompt = "Environment name"
required = true
type = "text"
prompt = "Environment name"
placeholder = "dev, staging, production, e2e-test"
required = true
help = "Lowercase letters, numbers, dashes. Cannot start with number or dash. Examples: dev, staging, e2e-config"
nickel_path = [
"environment",
"name",
]
[[elements]]
help = "1-63 chars, ASCII letters/numbers/dashes, no leading digit/dash, no trailing dash. Will be auto-generated if omitted."
name = "instance_name"
nickel_path = ["environment", "instance_name"]
placeholder = "Leave empty for auto-generation: torrust-tracker-vm-{env-name}"
prompt = "Instance/VM name (optional)"
type = "text"
prompt = "Instance/VM name (optional)"
placeholder = "Leave empty for auto-generation: torrust-tracker-vm-{env-name}"
help = "1-63 chars, ASCII letters/numbers/dashes, no leading digit/dash, no trailing dash. Will be auto-generated if omitted."
nickel_path = [
"environment",
"instance_name",
]

View File

@ -1,25 +1,31 @@
name = "grafana_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "grafana_header"
title = "📈 Grafana Configuration"
type = "section_header"
title = "📈 Grafana Configuration"
border_top = true
border_bottom = true
[[elements]]
name = "grafana_bind_address"
type = "text"
prompt = "Grafana bind address"
placeholder = "0.0.0.0:3000"
default = "0.0.0.0:3000"
help = "Address and port for Grafana. Format: IP:PORT (e.g., 0.0.0.0:3000)"
name = "grafana_bind_address"
nickel_path = ["grafana", "bind_address"]
placeholder = "0.0.0.0:3000"
prompt = "Grafana bind address"
type = "text"
nickel_path = [
"grafana",
"bind_address",
]
[[elements]]
help = "Admin password for Grafana access. Keep this secure!"
name = "grafana_admin_password"
nickel_path = ["grafana", "admin_password"]
type = "password"
prompt = "Grafana admin password"
required = true
type = "password"
help = "Admin password for Grafana access. Keep this secure!"
nickel_path = [
"grafana",
"admin_password",
]

View File

@ -1,26 +1,32 @@
name = "prometheus_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "prometheus_header"
title = "📊 Prometheus Configuration"
type = "section_header"
title = "📊 Prometheus Configuration"
border_top = true
border_bottom = true
[[elements]]
name = "prometheus_bind_address"
type = "text"
prompt = "Prometheus bind address"
placeholder = "0.0.0.0:9090"
default = "0.0.0.0:9090"
help = "Address and port for Prometheus. Format: IP:PORT (e.g., 0.0.0.0:9090)"
name = "prometheus_bind_address"
nickel_path = ["prometheus", "bind_address"]
placeholder = "0.0.0.0:9090"
prompt = "Prometheus bind address"
type = "text"
nickel_path = [
"prometheus",
"bind_address",
]
[[elements]]
name = "prometheus_scrape_interval"
type = "text"
prompt = "Scrape interval (seconds)"
placeholder = "15"
default = "15"
help = "How often Prometheus should scrape metrics (in seconds). Default: 15 seconds."
name = "prometheus_scrape_interval"
nickel_path = ["prometheus", "scrape_interval"]
placeholder = "15"
prompt = "Scrape interval (seconds)"
type = "text"
nickel_path = [
"prometheus",
"scrape_interval",
]

View File

@ -1,121 +1,145 @@
name = "aws_provider_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "aws_header"
title = "☁️ AWS Cloud Configuration"
type = "section_header"
title = "☁️ AWS Cloud Configuration"
border_top = true
border_bottom = true
[[elements]]
help = "Your AWS IAM Access Key ID for authentication"
name = "aws_access_key_id"
nickel_path = ["provider", "aws_access_key_id"]
type = "text"
prompt = "AWS Access Key ID"
required = true
type = "text"
help = "Your AWS IAM Access Key ID for authentication"
nickel_path = [
"provider",
"aws_access_key_id",
]
[[elements]]
help = "Your AWS IAM Secret Access Key (will be masked)"
name = "aws_secret_access_key"
nickel_path = ["provider", "aws_secret_access_key"]
type = "password"
prompt = "AWS Secret Access Key"
required = true
type = "password"
help = "Your AWS IAM Secret Access Key (will be masked)"
nickel_path = [
"provider",
"aws_secret_access_key",
]
[[elements]]
default = "us-east-1"
help = "AWS region where resources will be deployed"
name = "aws_region"
nickel_path = ["provider", "aws_region"]
options = [
{ value = "us-east-1", label = "US East (N. Virginia)" },
{ value = "us-east-2", label = "US East (Ohio)" },
{ value = "us-west-1", label = "US West (N. California)" },
{ value = "us-west-2", label = "US West (Oregon)" },
{ value = "eu-west-1", label = "Europe (Ireland)" },
{ value = "eu-west-2", label = "Europe (London)" },
{ value = "eu-west-3", label = "Europe (Paris)" },
{ value = "eu-central-1", label = "Europe (Frankfurt)" },
{ value = "eu-north-1", label = "Europe (Stockholm)" },
{ value = "ap-northeast-1", label = "Asia Pacific (Tokyo)" },
{ value = "ap-northeast-2", label = "Asia Pacific (Seoul)" },
{ value = "ap-northeast-3", label = "Asia Pacific (Osaka)" },
{ value = "ap-southeast-1", label = "Asia Pacific (Singapore)" },
{ value = "ap-southeast-2", label = "Asia Pacific (Sydney)" },
{ value = "ap-south-1", label = "Asia Pacific (Mumbai)" },
{ value = "sa-east-1", label = "South America (São Paulo)" },
{ value = "ca-central-1", label = "Canada (Central)" },
]
type = "select"
prompt = "AWS Region"
options = [
{ value = "us-east-1", label = "US East (N. Virginia)" },
{ value = "us-east-2", label = "US East (Ohio)" },
{ value = "us-west-1", label = "US West (N. California)" },
{ value = "us-west-2", label = "US West (Oregon)" },
{ value = "eu-west-1", label = "Europe (Ireland)" },
{ value = "eu-west-2", label = "Europe (London)" },
{ value = "eu-west-3", label = "Europe (Paris)" },
{ value = "eu-central-1", label = "Europe (Frankfurt)" },
{ value = "eu-north-1", label = "Europe (Stockholm)" },
{ value = "ap-northeast-1", label = "Asia Pacific (Tokyo)" },
{ value = "ap-northeast-2", label = "Asia Pacific (Seoul)" },
{ value = "ap-northeast-3", label = "Asia Pacific (Osaka)" },
{ value = "ap-southeast-1", label = "Asia Pacific (Singapore)" },
{ value = "ap-southeast-2", label = "Asia Pacific (Sydney)" },
{ value = "ap-south-1", label = "Asia Pacific (Mumbai)" },
{ value = "sa-east-1", label = "South America (São Paulo)" },
{ value = "ca-central-1", label = "Canada (Central)" },
]
default = "us-east-1"
required = true
type = "select"
help = "AWS region where resources will be deployed"
nickel_path = [
"provider",
"aws_region",
]
[[elements]]
default = "t3.medium"
help = "EC2 instance type (determines CPU, RAM, and pricing)"
name = "aws_instance_type"
nickel_path = ["provider", "aws_instance_type"]
options = [
{ value = "t3.micro", label = "t3.micro - 2 vCPU, 1 GB RAM (Free tier)" },
{ value = "t3.small", label = "t3.small - 2 vCPU, 2 GB RAM" },
{ value = "t3.medium", label = "t3.medium - 2 vCPU, 4 GB RAM" },
{ value = "t3.large", label = "t3.large - 2 vCPU, 8 GB RAM" },
{ value = "t3.xlarge", label = "t3.xlarge - 4 vCPU, 16 GB RAM" },
{ value = "t3.2xlarge", label = "t3.2xlarge - 8 vCPU, 32 GB RAM" },
{ value = "m5.large", label = "m5.large - 2 vCPU, 8 GB RAM" },
{ value = "m5.xlarge", label = "m5.xlarge - 4 vCPU, 16 GB RAM" },
{ value = "m5.2xlarge", label = "m5.2xlarge - 8 vCPU, 32 GB RAM" },
{ value = "m5.4xlarge", label = "m5.4xlarge - 16 vCPU, 64 GB RAM" },
{ value = "c5.large", label = "c5.large - 2 vCPU, 4 GB RAM (compute optimized)" },
{ value = "c5.xlarge", label = "c5.xlarge - 4 vCPU, 8 GB RAM (compute optimized)" },
{ value = "r5.large", label = "r5.large - 2 vCPU, 16 GB RAM (memory optimized)" },
{ value = "r5.xlarge", label = "r5.xlarge - 4 vCPU, 32 GB RAM (memory optimized)" },
]
type = "select"
prompt = "EC2 Instance Type"
required = true
type = "select"
[[elements]]
default = "ami-ubuntu-24-04"
help = "Operating system image for EC2 instances"
name = "aws_ami"
nickel_path = ["provider", "aws_ami"]
options = [
{ value = "ami-ubuntu-24-04", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "ami-ubuntu-22-04", label = "Ubuntu 22.04 LTS" },
{ value = "ami-ubuntu-20-04", label = "Ubuntu 20.04 LTS" },
{ value = "ami-debian-12", label = "Debian 12 (Bookworm)" },
{ value = "ami-debian-11", label = "Debian 11 (Bullseye)" },
{ value = "ami-amazon-linux-2", label = "Amazon Linux 2" },
{ value = "ami-amazon-linux-2023", label = "Amazon Linux 2023" },
{ value = "t3.micro", label = "t3.micro - 2 vCPU, 1 GB RAM (Free tier)" },
{ value = "t3.small", label = "t3.small - 2 vCPU, 2 GB RAM" },
{ value = "t3.medium", label = "t3.medium - 2 vCPU, 4 GB RAM" },
{ value = "t3.large", label = "t3.large - 2 vCPU, 8 GB RAM" },
{ value = "t3.xlarge", label = "t3.xlarge - 4 vCPU, 16 GB RAM" },
{ value = "t3.2xlarge", label = "t3.2xlarge - 8 vCPU, 32 GB RAM" },
{ value = "m5.large", label = "m5.large - 2 vCPU, 8 GB RAM" },
{ value = "m5.xlarge", label = "m5.xlarge - 4 vCPU, 16 GB RAM" },
{ value = "m5.2xlarge", label = "m5.2xlarge - 8 vCPU, 32 GB RAM" },
{ value = "m5.4xlarge", label = "m5.4xlarge - 16 vCPU, 64 GB RAM" },
{ value = "c5.large", label = "c5.large - 2 vCPU, 4 GB RAM (compute optimized)" },
{ value = "c5.xlarge", label = "c5.xlarge - 4 vCPU, 8 GB RAM (compute optimized)" },
{ value = "r5.large", label = "r5.large - 2 vCPU, 16 GB RAM (memory optimized)" },
{ value = "r5.xlarge", label = "r5.xlarge - 4 vCPU, 32 GB RAM (memory optimized)" },
]
prompt = "Amazon Machine Image (AMI)"
default = "t3.medium"
required = true
help = "EC2 instance type (determines CPU, RAM, and pricing)"
nickel_path = [
"provider",
"aws_instance_type",
]
[[elements]]
name = "aws_ami"
type = "select"
prompt = "Amazon Machine Image (AMI)"
options = [
{ value = "ami-ubuntu-24-04", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "ami-ubuntu-22-04", label = "Ubuntu 22.04 LTS" },
{ value = "ami-ubuntu-20-04", label = "Ubuntu 20.04 LTS" },
{ value = "ami-debian-12", label = "Debian 12 (Bookworm)" },
{ value = "ami-debian-11", label = "Debian 11 (Bullseye)" },
{ value = "ami-amazon-linux-2", label = "Amazon Linux 2" },
{ value = "ami-amazon-linux-2023", label = "Amazon Linux 2023" },
]
default = "ami-ubuntu-24-04"
required = true
help = "Operating system image for EC2 instances"
nickel_path = [
"provider",
"aws_ami",
]
[[elements]]
default = "10.0.0.0/16"
help = "CIDR block for the VPC (e.g., 10.0.0.0/16)"
name = "aws_vpc_cidr"
nickel_path = ["provider", "aws_vpc_cidr"]
type = "text"
prompt = "VPC CIDR Block"
default = "10.0.0.0/16"
required = true
type = "text"
help = "CIDR block for the VPC (e.g., 10.0.0.0/16)"
nickel_path = [
"provider",
"aws_vpc_cidr",
]
[[elements]]
default = "10.0.1.0/24"
help = "CIDR block for the subnet (e.g., 10.0.1.0/24)"
name = "aws_subnet_cidr"
nickel_path = ["provider", "aws_subnet_cidr"]
prompt = "Subnet CIDR Block"
required = true
type = "text"
prompt = "Subnet CIDR Block"
default = "10.0.1.0/24"
required = true
help = "CIDR block for the subnet (e.g., 10.0.1.0/24)"
nickel_path = [
"provider",
"aws_subnet_cidr",
]
[[elements]]
help = "Name of the EC2 SSH key pair for instance access"
name = "aws_ssh_key_name"
nickel_path = ["provider", "aws_ssh_key_name"]
type = "text"
prompt = "SSH Key Pair Name"
required = true
type = "text"
help = "Name of the EC2 SSH key pair for instance access"
nickel_path = [
"provider",
"aws_ssh_key_name",
]

View File

@ -1,248 +1,305 @@
name = "azure_provider_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "azure_header"
title = "☁️ Microsoft Azure Configuration"
type = "section_header"
title = "☁️ Microsoft Azure Configuration"
border_top = true
border_bottom = true
[[elements]]
help = "Your Azure subscription ID (GUID format)"
name = "azure_subscription_id"
nickel_path = ["provider", "azure_subscription_id"]
type = "text"
prompt = "Azure Subscription ID"
required = true
type = "text"
help = "Your Azure subscription ID (GUID format)"
nickel_path = [
"provider",
"azure_subscription_id",
]
[[elements]]
help = "Your Azure Active Directory tenant ID"
name = "azure_tenant_id"
nickel_path = ["provider", "azure_tenant_id"]
type = "text"
prompt = "Azure Tenant ID"
required = true
type = "text"
help = "Your Azure Active Directory tenant ID"
nickel_path = [
"provider",
"azure_tenant_id",
]
[[elements]]
help = "Client ID of the service principal (app registration)"
name = "azure_client_id"
nickel_path = ["provider", "azure_client_id"]
type = "text"
prompt = "Service Principal Client ID"
required = true
type = "text"
help = "Client ID of the service principal (app registration)"
nickel_path = [
"provider",
"azure_client_id",
]
[[elements]]
help = "Client secret for authentication (will be masked)"
name = "azure_client_secret"
nickel_path = ["provider", "azure_client_secret"]
type = "password"
prompt = "Service Principal Client Secret"
required = true
type = "password"
[[elements]]
default = "westeurope"
help = "Azure region where resources will be deployed"
name = "azure_location"
nickel_path = ["provider", "azure_location"]
options = [
{ value = "eastus", label = "East US - Virginia, USA" },
{ value = "eastus2", label = "East US 2 - Virginia, USA" },
{ value = "westus", label = "West US - California, USA" },
{ value = "westus2", label = "West US 2 - Washington, USA" },
{ value = "westus3", label = "West US 3 - Arizona, USA" },
{ value = "centralus", label = "Central US - Iowa, USA" },
{ value = "northcentralus", label = "North Central US - Illinois, USA" },
{ value = "southcentralus", label = "South Central US - Texas, USA" },
{ value = "northeurope", label = "North Europe - Ireland" },
{ value = "westeurope", label = "West Europe - Netherlands" },
{ value = "francecentral", label = "France Central - Paris" },
{ value = "germanywestcentral", label = "Germany West Central - Frankfurt" },
{ value = "switzerlandnorth", label = "Switzerland North - Zurich" },
{ value = "uksouth", label = "UK South - London" },
{ value = "ukwest", label = "UK West - Cardiff" },
{ value = "norwayeast", label = "Norway East - Oslo" },
{ value = "swedencentral", label = "Sweden Central - Gävle" },
{ value = "eastasia", label = "East Asia - Hong Kong" },
{ value = "southeastasia", label = "Southeast Asia - Singapore" },
{ value = "japaneast", label = "Japan East - Tokyo" },
{ value = "japanwest", label = "Japan West - Osaka" },
{ value = "koreacentral", label = "Korea Central - Seoul" },
{ value = "australiaeast", label = "Australia East - Sydney" },
{ value = "australiasoutheast", label = "Australia Southeast - Melbourne" },
{ value = "canadacentral", label = "Canada Central - Toronto" },
{ value = "canadaeast", label = "Canada East - Quebec" },
{ value = "brazilsouth", label = "Brazil South - São Paulo" },
{ value = "southafricanorth", label = "South Africa North - Johannesburg" },
{ value = "uaenorth", label = "UAE North - Dubai" },
{ value = "centralindia", label = "Central India - Pune" },
{ value = "southindia", label = "South India - Chennai" },
help = "Client secret for authentication (will be masked)"
nickel_path = [
"provider",
"azure_client_secret",
]
[[elements]]
name = "azure_location"
type = "select"
prompt = "Azure Region"
options = [
{ value = "eastus", label = "East US - Virginia, USA" },
{ value = "eastus2", label = "East US 2 - Virginia, USA" },
{ value = "westus", label = "West US - California, USA" },
{ value = "westus2", label = "West US 2 - Washington, USA" },
{ value = "westus3", label = "West US 3 - Arizona, USA" },
{ value = "centralus", label = "Central US - Iowa, USA" },
{ value = "northcentralus", label = "North Central US - Illinois, USA" },
{ value = "southcentralus", label = "South Central US - Texas, USA" },
{ value = "northeurope", label = "North Europe - Ireland" },
{ value = "westeurope", label = "West Europe - Netherlands" },
{ value = "francecentral", label = "France Central - Paris" },
{ value = "germanywestcentral", label = "Germany West Central - Frankfurt" },
{ value = "switzerlandnorth", label = "Switzerland North - Zurich" },
{ value = "uksouth", label = "UK South - London" },
{ value = "ukwest", label = "UK West - Cardiff" },
{ value = "norwayeast", label = "Norway East - Oslo" },
{ value = "swedencentral", label = "Sweden Central - Gävle" },
{ value = "eastasia", label = "East Asia - Hong Kong" },
{ value = "southeastasia", label = "Southeast Asia - Singapore" },
{ value = "japaneast", label = "Japan East - Tokyo" },
{ value = "japanwest", label = "Japan West - Osaka" },
{ value = "koreacentral", label = "Korea Central - Seoul" },
{ value = "australiaeast", label = "Australia East - Sydney" },
{ value = "australiasoutheast", label = "Australia Southeast - Melbourne" },
{ value = "canadacentral", label = "Canada Central - Toronto" },
{ value = "canadaeast", label = "Canada East - Quebec" },
{ value = "brazilsouth", label = "Brazil South - São Paulo" },
{ value = "southafricanorth", label = "South Africa North - Johannesburg" },
{ value = "uaenorth", label = "UAE North - Dubai" },
{ value = "centralindia", label = "Central India - Pune" },
{ value = "southindia", label = "South India - Chennai" },
]
default = "westeurope"
required = true
help = "Azure region where resources will be deployed"
nickel_path = [
"provider",
"azure_location",
]
prompt = "Azure Region"
required = true
type = "select"
[[elements]]
help = "Name of the Azure Resource Group (will be created if doesn't exist)"
name = "azure_resource_group_name"
nickel_path = ["provider", "azure_resource_group_name"]
type = "text"
prompt = "Resource Group name"
required = true
type = "text"
help = "Name of the Azure Resource Group (will be created if doesn't exist)"
nickel_path = [
"provider",
"azure_resource_group_name",
]
[[elements]]
default = "Standard_B2s"
help = "Azure VM size (determines CPU, RAM, and pricing)"
name = "azure_vm_size"
nickel_path = ["provider", "azure_vm_size"]
options = [
{ value = "Standard_B1s", label = "Standard_B1s - 1 vCPU, 1 GB RAM (burstable)" },
{ value = "Standard_B1ms", label = "Standard_B1ms - 1 vCPU, 2 GB RAM (burstable)" },
{ value = "Standard_B2s", label = "Standard_B2s - 2 vCPU, 4 GB RAM (burstable)" },
{ value = "Standard_B2ms", label = "Standard_B2ms - 2 vCPU, 8 GB RAM (burstable)" },
{ value = "Standard_B4ms", label = "Standard_B4ms - 4 vCPU, 16 GB RAM (burstable)" },
{ value = "Standard_D2s_v3", label = "Standard_D2s_v3 - 2 vCPU, 8 GB RAM (general purpose)" },
{ value = "Standard_D4s_v3", label = "Standard_D4s_v3 - 4 vCPU, 16 GB RAM (general purpose)" },
{ value = "Standard_D8s_v3", label = "Standard_D8s_v3 - 8 vCPU, 32 GB RAM (general purpose)" },
{ value = "Standard_E2s_v3", label = "Standard_E2s_v3 - 2 vCPU, 16 GB RAM (memory optimized)" },
{ value = "Standard_E4s_v3", label = "Standard_E4s_v3 - 4 vCPU, 32 GB RAM (memory optimized)" },
{ value = "Standard_F2s_v2", label = "Standard_F2s_v2 - 2 vCPU, 4 GB RAM (compute optimized)" },
{ value = "Standard_F4s_v2", label = "Standard_F4s_v2 - 4 vCPU, 8 GB RAM (compute optimized)" },
]
type = "select"
prompt = "Virtual Machine Size"
options = [
{ value = "Standard_B1s", label = "Standard_B1s - 1 vCPU, 1 GB RAM (burstable)" },
{ value = "Standard_B1ms", label = "Standard_B1ms - 1 vCPU, 2 GB RAM (burstable)" },
{ value = "Standard_B2s", label = "Standard_B2s - 2 vCPU, 4 GB RAM (burstable)" },
{ value = "Standard_B2ms", label = "Standard_B2ms - 2 vCPU, 8 GB RAM (burstable)" },
{ value = "Standard_B4ms", label = "Standard_B4ms - 4 vCPU, 16 GB RAM (burstable)" },
{ value = "Standard_D2s_v3", label = "Standard_D2s_v3 - 2 vCPU, 8 GB RAM (general purpose)" },
{ value = "Standard_D4s_v3", label = "Standard_D4s_v3 - 4 vCPU, 16 GB RAM (general purpose)" },
{ value = "Standard_D8s_v3", label = "Standard_D8s_v3 - 8 vCPU, 32 GB RAM (general purpose)" },
{ value = "Standard_E2s_v3", label = "Standard_E2s_v3 - 2 vCPU, 16 GB RAM (memory optimized)" },
{ value = "Standard_E4s_v3", label = "Standard_E4s_v3 - 4 vCPU, 32 GB RAM (memory optimized)" },
{ value = "Standard_F2s_v2", label = "Standard_F2s_v2 - 2 vCPU, 4 GB RAM (compute optimized)" },
{ value = "Standard_F4s_v2", label = "Standard_F4s_v2 - 4 vCPU, 8 GB RAM (compute optimized)" },
]
default = "Standard_B2s"
required = true
type = "select"
help = "Azure VM size (determines CPU, RAM, and pricing)"
nickel_path = [
"provider",
"azure_vm_size",
]
[[elements]]
default = "Canonical"
help = "Publisher of the VM image"
name = "azure_image_publisher"
nickel_path = ["provider", "azure_image_publisher"]
options = [
{ value = "Canonical", label = "Canonical (Ubuntu)" },
{ value = "Debian", label = "Debian (Debian Linux)" },
{ value = "RedHat", label = "Red Hat (RHEL)" },
{ value = "OpenLogic", label = "OpenLogic (CentOS)" },
{ value = "AlmaLinux", label = "AlmaLinux Foundation" },
{ value = "MicrosoftWindowsServer", label = "Microsoft (Windows Server)" },
]
type = "select"
prompt = "Image Publisher"
options = [
{ value = "Canonical", label = "Canonical (Ubuntu)" },
{ value = "Debian", label = "Debian (Debian Linux)" },
{ value = "RedHat", label = "Red Hat (RHEL)" },
{ value = "OpenLogic", label = "OpenLogic (CentOS)" },
{ value = "AlmaLinux", label = "AlmaLinux Foundation" },
{ value = "MicrosoftWindowsServer", label = "Microsoft (Windows Server)" },
]
default = "Canonical"
required = true
type = "select"
help = "Publisher of the VM image"
nickel_path = [
"provider",
"azure_image_publisher",
]
[[elements]]
default = "0001-com-ubuntu-server-jammy"
help = "Specific offer from the publisher (must match publisher)"
name = "azure_image_offer"
nickel_path = ["provider", "azure_image_offer"]
options = [
{ value = "0001-com-ubuntu-server-jammy", label = "Ubuntu Server 22.04 LTS" },
{ value = "0001-com-ubuntu-server-focal", label = "Ubuntu Server 20.04 LTS" },
{ value = "debian-11", label = "Debian 11" },
{ value = "debian-12", label = "Debian 12" },
{ value = "RHEL", label = "Red Hat Enterprise Linux" },
{ value = "CentOS", label = "CentOS" },
{ value = "almalinux", label = "AlmaLinux" },
]
type = "select"
prompt = "Image Offer"
options = [
{ value = "0001-com-ubuntu-server-jammy", label = "Ubuntu Server 22.04 LTS" },
{ value = "0001-com-ubuntu-server-focal", label = "Ubuntu Server 20.04 LTS" },
{ value = "debian-11", label = "Debian 11" },
{ value = "debian-12", label = "Debian 12" },
{ value = "RHEL", label = "Red Hat Enterprise Linux" },
{ value = "CentOS", label = "CentOS" },
{ value = "almalinux", label = "AlmaLinux" },
]
default = "0001-com-ubuntu-server-jammy"
required = true
type = "select"
help = "Specific offer from the publisher (must match publisher)"
nickel_path = [
"provider",
"azure_image_offer",
]
[[elements]]
default = "22_04-lts-gen2"
help = "SKU (version) of the image (must match offer)"
name = "azure_image_sku"
nickel_path = ["provider", "azure_image_sku"]
options = [
{ value = "22_04-lts-gen2", label = "Ubuntu 22.04 LTS Gen2" },
{ value = "20_04-lts-gen2", label = "Ubuntu 20.04 LTS Gen2" },
{ value = "11-gen2", label = "Debian 11 Gen2" },
{ value = "12-gen2", label = "Debian 12 Gen2" },
{ value = "9-lvm-gen2", label = "RHEL 9 Gen2" },
{ value = "8-lvm-gen2", label = "RHEL 8 Gen2" },
]
type = "select"
prompt = "Image SKU"
required = true
type = "select"
[[elements]]
default = "30"
help = "Size of the OS disk in GB (minimum 30 GB)"
name = "azure_os_disk_size_gb"
nickel_path = ["provider", "azure_os_disk_size_gb"]
prompt = "OS Disk size (GB)"
required = true
type = "text"
[[elements]]
default = "StandardSSD_LRS"
help = "Type of managed disk for OS volume"
name = "azure_os_disk_type"
nickel_path = ["provider", "azure_os_disk_type"]
options = [
{ value = "Standard_LRS", label = "Standard_LRS - Standard HDD (locally redundant)" },
{ value = "StandardSSD_LRS", label = "StandardSSD_LRS - Standard SSD (locally redundant)" },
{ value = "Premium_LRS", label = "Premium_LRS - Premium SSD (locally redundant)" },
{ value = "UltraSSD_LRS", label = "UltraSSD_LRS - Ultra SSD (highest performance)" },
{ value = "22_04-lts-gen2", label = "Ubuntu 22.04 LTS Gen2" },
{ value = "20_04-lts-gen2", label = "Ubuntu 20.04 LTS Gen2" },
{ value = "11-gen2", label = "Debian 11 Gen2" },
{ value = "12-gen2", label = "Debian 12 Gen2" },
{ value = "9-lvm-gen2", label = "RHEL 9 Gen2" },
{ value = "8-lvm-gen2", label = "RHEL 8 Gen2" },
]
prompt = "OS Disk type"
default = "22_04-lts-gen2"
required = true
type = "select"
help = "SKU (version) of the image (must match offer)"
nickel_path = [
"provider",
"azure_image_sku",
]
[[elements]]
default = "azureuser"
help = "Administrator username for the VM"
name = "azure_admin_username"
nickel_path = ["provider", "azure_admin_username"]
prompt = "Admin username"
required = true
name = "azure_os_disk_size_gb"
type = "text"
prompt = "OS Disk size (GB)"
default = "30"
required = true
help = "Size of the OS disk in GB (minimum 30 GB)"
nickel_path = [
"provider",
"azure_os_disk_size_gb",
]
[[elements]]
name = "azure_os_disk_type"
type = "select"
prompt = "OS Disk type"
options = [
{ value = "Standard_LRS", label = "Standard_LRS - Standard HDD (locally redundant)" },
{ value = "StandardSSD_LRS", label = "StandardSSD_LRS - Standard SSD (locally redundant)" },
{ value = "Premium_LRS", label = "Premium_LRS - Premium SSD (locally redundant)" },
{ value = "UltraSSD_LRS", label = "UltraSSD_LRS - Ultra SSD (highest performance)" },
]
default = "StandardSSD_LRS"
required = true
help = "Type of managed disk for OS volume"
nickel_path = [
"provider",
"azure_os_disk_type",
]
[[elements]]
name = "azure_admin_username"
type = "text"
prompt = "Admin username"
default = "azureuser"
required = true
help = "Administrator username for the VM"
nickel_path = [
"provider",
"azure_admin_username",
]
[[elements]]
help = "SSH public key for authentication (e.g., ~/.ssh/id_rsa.pub content)"
name = "azure_ssh_public_key"
nickel_path = ["provider", "azure_ssh_public_key"]
type = "text"
prompt = "SSH public key"
required = true
type = "text"
help = "SSH public key for authentication (e.g., ~/.ssh/id_rsa.pub content)"
nickel_path = [
"provider",
"azure_ssh_public_key",
]
[[elements]]
default = "10.0.0.0/16"
help = "Address space for the Virtual Network (CIDR notation)"
name = "azure_vnet_address_space"
nickel_path = ["provider", "azure_vnet_address_space"]
type = "text"
prompt = "Virtual Network address space"
default = "10.0.0.0/16"
required = true
type = "text"
help = "Address space for the Virtual Network (CIDR notation)"
nickel_path = [
"provider",
"azure_vnet_address_space",
]
[[elements]]
default = "10.0.1.0/24"
help = "Address prefix for the subnet (CIDR notation)"
name = "azure_subnet_address_prefix"
nickel_path = ["provider", "azure_subnet_address_prefix"]
prompt = "Subnet address prefix"
required = true
type = "text"
prompt = "Subnet address prefix"
default = "10.0.1.0/24"
required = true
help = "Address prefix for the subnet (CIDR notation)"
nickel_path = [
"provider",
"azure_subnet_address_prefix",
]
[[elements]]
name = "azure_enable_public_ip"
type = "confirm"
prompt = "Assign public IP address?"
default = true
help = "Assign a public IP to the VM for external access"
name = "azure_enable_public_ip"
nickel_path = ["provider", "azure_enable_public_ip"]
prompt = "Assign public IP address?"
type = "confirm"
nickel_path = [
"provider",
"azure_enable_public_ip",
]
[[elements]]
name = "azure_enable_accelerated_networking"
type = "confirm"
prompt = "Enable accelerated networking?"
default = false
help = "Enable SR-IOV for better network performance (requires compatible VM size)"
name = "azure_enable_accelerated_networking"
nickel_path = ["provider", "azure_enable_accelerated_networking"]
prompt = "Enable accelerated networking?"
type = "confirm"
nickel_path = [
"provider",
"azure_enable_accelerated_networking",
]
[[elements]]
name = "azure_enable_boot_diagnostics"
type = "confirm"
prompt = "Enable boot diagnostics?"
default = true
help = "Enable boot diagnostics for troubleshooting"
name = "azure_enable_boot_diagnostics"
nickel_path = ["provider", "azure_enable_boot_diagnostics"]
prompt = "Enable boot diagnostics?"
type = "confirm"
nickel_path = [
"provider",
"azure_enable_boot_diagnostics",
]

View File

@ -1,211 +1,256 @@
name = "gcp_provider_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "gcp_header"
title = "☁️ Google Cloud Platform (GCP) Configuration"
type = "section_header"
title = "☁️ Google Cloud Platform (GCP) Configuration"
border_top = true
border_bottom = true
[[elements]]
help = "Your Google Cloud Project ID (e.g., my-project-123456)"
name = "gcp_project_id"
nickel_path = ["provider", "gcp_project_id"]
type = "text"
prompt = "GCP Project ID"
required = true
type = "text"
help = "Your Google Cloud Project ID (e.g., my-project-123456)"
nickel_path = [
"provider",
"gcp_project_id",
]
[[elements]]
help = "Path to service account JSON key file (e.g., ~/.gcp/credentials.json)"
name = "gcp_credentials_file"
nickel_path = ["provider", "gcp_credentials_file"]
type = "text"
prompt = "Service Account credentials file path"
required = true
type = "text"
help = "Path to service account JSON key file (e.g., ~/.gcp/credentials.json)"
nickel_path = [
"provider",
"gcp_credentials_file",
]
[[elements]]
default = "europe-west3"
help = "GCP region where resources will be deployed"
name = "gcp_region"
nickel_path = ["provider", "gcp_region"]
options = [
{ value = "us-central1", label = "US-CENTRAL1 - Iowa, USA" },
{ value = "us-east1", label = "US-EAST1 - South Carolina, USA" },
{ value = "us-east4", label = "US-EAST4 - Northern Virginia, USA" },
{ value = "us-west1", label = "US-WEST1 - Oregon, USA" },
{ value = "us-west2", label = "US-WEST2 - Los Angeles, USA" },
{ value = "us-west3", label = "US-WEST3 - Salt Lake City, USA" },
{ value = "us-west4", label = "US-WEST4 - Las Vegas, USA" },
{ value = "europe-west1", label = "EUROPE-WEST1 - Belgium" },
{ value = "europe-west2", label = "EUROPE-WEST2 - London, UK" },
{ value = "europe-west3", label = "EUROPE-WEST3 - Frankfurt, Germany" },
{ value = "europe-west4", label = "EUROPE-WEST4 - Netherlands" },
{ value = "europe-west6", label = "EUROPE-WEST6 - Zurich, Switzerland" },
{ value = "europe-north1", label = "EUROPE-NORTH1 - Finland" },
{ value = "asia-east1", label = "ASIA-EAST1 - Taiwan" },
{ value = "asia-east2", label = "ASIA-EAST2 - Hong Kong" },
{ value = "asia-northeast1", label = "ASIA-NORTHEAST1 - Tokyo, Japan" },
{ value = "asia-northeast2", label = "ASIA-NORTHEAST2 - Osaka, Japan" },
{ value = "asia-northeast3", label = "ASIA-NORTHEAST3 - Seoul, South Korea" },
{ value = "asia-south1", label = "ASIA-SOUTH1 - Mumbai, India" },
{ value = "asia-southeast1", label = "ASIA-SOUTHEAST1 - Singapore" },
{ value = "asia-southeast2", label = "ASIA-SOUTHEAST2 - Jakarta, Indonesia" },
{ value = "australia-southeast1", label = "AUSTRALIA-SOUTHEAST1 - Sydney, Australia" },
{ value = "southamerica-east1", label = "SOUTHAMERICA-EAST1 - São Paulo, Brazil" },
]
type = "select"
prompt = "GCP Region"
options = [
{ value = "us-central1", label = "US-CENTRAL1 - Iowa, USA" },
{ value = "us-east1", label = "US-EAST1 - South Carolina, USA" },
{ value = "us-east4", label = "US-EAST4 - Northern Virginia, USA" },
{ value = "us-west1", label = "US-WEST1 - Oregon, USA" },
{ value = "us-west2", label = "US-WEST2 - Los Angeles, USA" },
{ value = "us-west3", label = "US-WEST3 - Salt Lake City, USA" },
{ value = "us-west4", label = "US-WEST4 - Las Vegas, USA" },
{ value = "europe-west1", label = "EUROPE-WEST1 - Belgium" },
{ value = "europe-west2", label = "EUROPE-WEST2 - London, UK" },
{ value = "europe-west3", label = "EUROPE-WEST3 - Frankfurt, Germany" },
{ value = "europe-west4", label = "EUROPE-WEST4 - Netherlands" },
{ value = "europe-west6", label = "EUROPE-WEST6 - Zurich, Switzerland" },
{ value = "europe-north1", label = "EUROPE-NORTH1 - Finland" },
{ value = "asia-east1", label = "ASIA-EAST1 - Taiwan" },
{ value = "asia-east2", label = "ASIA-EAST2 - Hong Kong" },
{ value = "asia-northeast1", label = "ASIA-NORTHEAST1 - Tokyo, Japan" },
{ value = "asia-northeast2", label = "ASIA-NORTHEAST2 - Osaka, Japan" },
{ value = "asia-northeast3", label = "ASIA-NORTHEAST3 - Seoul, South Korea" },
{ value = "asia-south1", label = "ASIA-SOUTH1 - Mumbai, India" },
{ value = "asia-southeast1", label = "ASIA-SOUTHEAST1 - Singapore" },
{ value = "asia-southeast2", label = "ASIA-SOUTHEAST2 - Jakarta, Indonesia" },
{ value = "australia-southeast1", label = "AUSTRALIA-SOUTHEAST1 - Sydney, Australia" },
{ value = "southamerica-east1", label = "SOUTHAMERICA-EAST1 - São Paulo, Brazil" },
]
default = "europe-west3"
required = true
type = "select"
help = "GCP region where resources will be deployed"
nickel_path = [
"provider",
"gcp_region",
]
[[elements]]
default = "a"
help = "Zone within the selected region (e.g., 'a' for europe-west3-a)"
name = "gcp_zone"
nickel_path = ["provider", "gcp_zone"]
options = [
{ value = "a", label = "Zone A (primary)" },
{ value = "b", label = "Zone B" },
{ value = "c", label = "Zone C" },
{ value = "d", label = "Zone D (if available)" },
]
type = "select"
prompt = "Availability Zone"
options = [
{ value = "a", label = "Zone A (primary)" },
{ value = "b", label = "Zone B" },
{ value = "c", label = "Zone C" },
{ value = "d", label = "Zone D (if available)" },
]
default = "a"
required = true
type = "select"
help = "Zone within the selected region (e.g., 'a' for europe-west3-a)"
nickel_path = [
"provider",
"gcp_zone",
]
[[elements]]
default = "e2-medium"
help = "GCP Compute Engine machine type (determines CPU, RAM, and pricing)"
name = "gcp_machine_type"
nickel_path = ["provider", "gcp_machine_type"]
options = [
{ value = "e2-micro", label = "e2-micro - 2 vCPU (shared), 1 GB RAM (Free tier)" },
{ value = "e2-small", label = "e2-small - 2 vCPU (shared), 2 GB RAM" },
{ value = "e2-medium", label = "e2-medium - 2 vCPU (shared), 4 GB RAM" },
{ value = "e2-standard-2", label = "e2-standard-2 - 2 vCPU, 8 GB RAM" },
{ value = "e2-standard-4", label = "e2-standard-4 - 4 vCPU, 16 GB RAM" },
{ value = "e2-standard-8", label = "e2-standard-8 - 8 vCPU, 32 GB RAM" },
{ value = "n1-standard-1", label = "n1-standard-1 - 1 vCPU, 3.75 GB RAM" },
{ value = "n1-standard-2", label = "n1-standard-2 - 2 vCPU, 7.5 GB RAM" },
{ value = "n1-standard-4", label = "n1-standard-4 - 4 vCPU, 15 GB RAM" },
{ value = "n2-standard-2", label = "n2-standard-2 - 2 vCPU, 8 GB RAM (newer generation)" },
{ value = "n2-standard-4", label = "n2-standard-4 - 4 vCPU, 16 GB RAM (newer generation)" },
{ value = "n2-standard-8", label = "n2-standard-8 - 8 vCPU, 32 GB RAM (newer generation)" },
{ value = "n2-highmem-2", label = "n2-highmem-2 - 2 vCPU, 16 GB RAM (memory optimized)" },
{ value = "n2-highmem-4", label = "n2-highmem-4 - 4 vCPU, 32 GB RAM (memory optimized)" },
{ value = "c2-standard-4", label = "c2-standard-4 - 4 vCPU, 16 GB RAM (compute optimized)" },
{ value = "c2-standard-8", label = "c2-standard-8 - 8 vCPU, 32 GB RAM (compute optimized)" },
]
type = "select"
prompt = "Machine Type"
options = [
{ value = "e2-micro", label = "e2-micro - 2 vCPU (shared), 1 GB RAM (Free tier)" },
{ value = "e2-small", label = "e2-small - 2 vCPU (shared), 2 GB RAM" },
{ value = "e2-medium", label = "e2-medium - 2 vCPU (shared), 4 GB RAM" },
{ value = "e2-standard-2", label = "e2-standard-2 - 2 vCPU, 8 GB RAM" },
{ value = "e2-standard-4", label = "e2-standard-4 - 4 vCPU, 16 GB RAM" },
{ value = "e2-standard-8", label = "e2-standard-8 - 8 vCPU, 32 GB RAM" },
{ value = "n1-standard-1", label = "n1-standard-1 - 1 vCPU, 3.75 GB RAM" },
{ value = "n1-standard-2", label = "n1-standard-2 - 2 vCPU, 7.5 GB RAM" },
{ value = "n1-standard-4", label = "n1-standard-4 - 4 vCPU, 15 GB RAM" },
{ value = "n2-standard-2", label = "n2-standard-2 - 2 vCPU, 8 GB RAM (newer generation)" },
{ value = "n2-standard-4", label = "n2-standard-4 - 4 vCPU, 16 GB RAM (newer generation)" },
{ value = "n2-standard-8", label = "n2-standard-8 - 8 vCPU, 32 GB RAM (newer generation)" },
{ value = "n2-highmem-2", label = "n2-highmem-2 - 2 vCPU, 16 GB RAM (memory optimized)" },
{ value = "n2-highmem-4", label = "n2-highmem-4 - 4 vCPU, 32 GB RAM (memory optimized)" },
{ value = "c2-standard-4", label = "c2-standard-4 - 4 vCPU, 16 GB RAM (compute optimized)" },
{ value = "c2-standard-8", label = "c2-standard-8 - 8 vCPU, 32 GB RAM (compute optimized)" },
]
default = "e2-medium"
required = true
type = "select"
help = "GCP Compute Engine machine type (determines CPU, RAM, and pricing)"
nickel_path = [
"provider",
"gcp_machine_type",
]
[[elements]]
default = "ubuntu-2404-lts"
help = "Operating system image family for the instance"
name = "gcp_image_family"
nickel_path = ["provider", "gcp_image_family"]
options = [
{ value = "ubuntu-2404-lts", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "ubuntu-2204-lts", label = "Ubuntu 22.04 LTS" },
{ value = "ubuntu-2004-lts", label = "Ubuntu 20.04 LTS" },
{ value = "debian-12", label = "Debian 12 (Bookworm)" },
{ value = "debian-11", label = "Debian 11 (Bullseye)" },
{ value = "rocky-linux-9", label = "Rocky Linux 9" },
{ value = "rocky-linux-8", label = "Rocky Linux 8" },
{ value = "rhel-9", label = "Red Hat Enterprise Linux 9" },
{ value = "rhel-8", label = "Red Hat Enterprise Linux 8" },
{ value = "centos-stream-9", label = "CentOS Stream 9" },
]
type = "select"
prompt = "Image Family"
options = [
{ value = "ubuntu-2404-lts", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "ubuntu-2204-lts", label = "Ubuntu 22.04 LTS" },
{ value = "ubuntu-2004-lts", label = "Ubuntu 20.04 LTS" },
{ value = "debian-12", label = "Debian 12 (Bookworm)" },
{ value = "debian-11", label = "Debian 11 (Bullseye)" },
{ value = "rocky-linux-9", label = "Rocky Linux 9" },
{ value = "rocky-linux-8", label = "Rocky Linux 8" },
{ value = "rhel-9", label = "Red Hat Enterprise Linux 9" },
{ value = "rhel-8", label = "Red Hat Enterprise Linux 8" },
{ value = "centos-stream-9", label = "CentOS Stream 9" },
]
default = "ubuntu-2404-lts"
required = true
type = "select"
help = "Operating system image family for the instance"
nickel_path = [
"provider",
"gcp_image_family",
]
[[elements]]
default = "ubuntu-os-cloud"
help = "GCP project that provides the image"
name = "gcp_image_project"
nickel_path = ["provider", "gcp_image_project"]
options = [
{ value = "ubuntu-os-cloud", label = "ubuntu-os-cloud (Ubuntu images)" },
{ value = "debian-cloud", label = "debian-cloud (Debian images)" },
{ value = "rocky-linux-cloud", label = "rocky-linux-cloud (Rocky Linux)" },
{ value = "rhel-cloud", label = "rhel-cloud (Red Hat)" },
{ value = "centos-cloud", label = "centos-cloud (CentOS)" },
]
type = "select"
prompt = "Image Project"
required = true
type = "select"
[[elements]]
default = "20"
help = "Boot disk size in GB (minimum 10 GB)"
name = "gcp_disk_size_gb"
nickel_path = ["provider", "gcp_disk_size_gb"]
prompt = "Boot disk size (GB)"
required = true
type = "text"
[[elements]]
default = "pd-balanced"
help = "Type of persistent disk for boot volume"
name = "gcp_disk_type"
nickel_path = ["provider", "gcp_disk_type"]
options = [
{ value = "pd-standard", label = "pd-standard - Standard persistent disk (HDD)" },
{ value = "pd-balanced", label = "pd-balanced - Balanced persistent disk (SSD, recommended)" },
{ value = "pd-ssd", label = "pd-ssd - SSD persistent disk (high performance)" },
{ value = "pd-extreme", label = "pd-extreme - Extreme persistent disk (highest IOPS)" },
{ value = "ubuntu-os-cloud", label = "ubuntu-os-cloud (Ubuntu images)" },
{ value = "debian-cloud", label = "debian-cloud (Debian images)" },
{ value = "rocky-linux-cloud", label = "rocky-linux-cloud (Rocky Linux)" },
{ value = "rhel-cloud", label = "rhel-cloud (Red Hat)" },
{ value = "centos-cloud", label = "centos-cloud (CentOS)" },
]
prompt = "Disk type"
default = "ubuntu-os-cloud"
required = true
help = "GCP project that provides the image"
nickel_path = [
"provider",
"gcp_image_project",
]
[[elements]]
name = "gcp_disk_size_gb"
type = "text"
prompt = "Boot disk size (GB)"
default = "20"
required = true
help = "Boot disk size in GB (minimum 10 GB)"
nickel_path = [
"provider",
"gcp_disk_size_gb",
]
[[elements]]
name = "gcp_disk_type"
type = "select"
prompt = "Disk type"
options = [
{ value = "pd-standard", label = "pd-standard - Standard persistent disk (HDD)" },
{ value = "pd-balanced", label = "pd-balanced - Balanced persistent disk (SSD, recommended)" },
{ value = "pd-ssd", label = "pd-ssd - SSD persistent disk (high performance)" },
{ value = "pd-extreme", label = "pd-extreme - Extreme persistent disk (highest IOPS)" },
]
default = "pd-balanced"
required = true
help = "Type of persistent disk for boot volume"
nickel_path = [
"provider",
"gcp_disk_type",
]
[[elements]]
default = "default"
help = "Name of the VPC network (use 'default' for default network)"
name = "gcp_network_name"
nickel_path = ["provider", "gcp_network_name"]
type = "text"
prompt = "VPC Network name"
required = true
type = "text"
[[elements]]
default = "default"
help = "Name of the subnetwork within the VPC"
name = "gcp_subnetwork_name"
nickel_path = ["provider", "gcp_subnetwork_name"]
prompt = "Subnetwork name"
required = true
type = "text"
help = "Name of the VPC network (use 'default' for default network)"
nickel_path = [
"provider",
"gcp_network_name",
]
[[elements]]
name = "gcp_subnetwork_name"
type = "text"
prompt = "Subnetwork name"
default = "default"
required = true
help = "Name of the subnetwork within the VPC"
nickel_path = [
"provider",
"gcp_subnetwork_name",
]
[[elements]]
name = "gcp_enable_external_ip"
type = "confirm"
prompt = "Enable external IP address?"
default = true
help = "Assign a public IP address to the instance"
name = "gcp_enable_external_ip"
nickel_path = ["provider", "gcp_enable_external_ip"]
prompt = "Enable external IP address?"
type = "confirm"
nickel_path = [
"provider",
"gcp_enable_external_ip",
]
[[elements]]
name = "gcp_preemptible"
type = "confirm"
prompt = "Use preemptible instance?"
default = false
help = "Preemptible instances are cheaper but can be stopped by GCP (not for production)"
name = "gcp_preemptible"
nickel_path = ["provider", "gcp_preemptible"]
prompt = "Use preemptible instance?"
type = "confirm"
nickel_path = [
"provider",
"gcp_preemptible",
]
[[elements]]
name = "gcp_enable_deletion_protection"
type = "confirm"
prompt = "Enable deletion protection?"
default = false
help = "Prevent accidental deletion of the instance"
name = "gcp_enable_deletion_protection"
nickel_path = ["provider", "gcp_enable_deletion_protection"]
prompt = "Enable deletion protection?"
type = "confirm"
nickel_path = [
"provider",
"gcp_enable_deletion_protection",
]
[[elements]]
help = "SSH public key for instance access (leave empty to skip)"
name = "gcp_ssh_keys"
nickel_path = ["provider", "gcp_ssh_keys"]
type = "text"
prompt = "SSH public key (optional)"
required = false
type = "text"
help = "SSH public key for instance access (leave empty to skip)"
nickel_path = [
"provider",
"gcp_ssh_keys",
]

View File

@ -1,77 +1,89 @@
name = "hetzner_provider_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "hetzner_header"
title = "☁️ Hetzner Cloud Configuration"
type = "section_header"
title = "☁️ Hetzner Cloud Configuration"
border_top = true
border_bottom = true
[[elements]]
help = "Your Hetzner Cloud API token for authentication"
name = "hetzner_api_token"
nickel_path = ["provider", "hetzner_api_token"]
type = "password"
prompt = "Hetzner API token"
required = true
type = "password"
help = "Your Hetzner Cloud API token for authentication"
nickel_path = [
"provider",
"hetzner_api_token",
]
[[elements]]
default = "cx22"
help = "Hetzner Cloud server instance type"
name = "hetzner_server_type"
nickel_path = ["provider", "hetzner_server_type"]
options = [
{ value = "cx11", label = "CX11 - 1 vCPU, 1 GB RAM" },
{ value = "cx21", label = "CX21 - 2 vCPU, 4 GB RAM" },
{ value = "cx31", label = "CX31 - 2 vCPU, 8 GB RAM" },
{ value = "cx41", label = "CX41 - 4 vCPU, 16 GB RAM" },
{ value = "cx51", label = "CX51 - 8 vCPU, 32 GB RAM" },
{ value = "cpx11", label = "CPX11 - 2 vCPU (dedicated), 4 GB RAM" },
{ value = "cpx21", label = "CPX21 - 4 vCPU (dedicated), 8 GB RAM" },
{ value = "cpx31", label = "CPX31 - 8 vCPU (dedicated), 16 GB RAM" },
{ value = "cx22", label = "CX22 - 2 vCPU, 4 GB RAM" },
{ value = "cx32", label = "CX32 - 2 vCPU, 8 GB RAM" },
{ value = "cx42", label = "CX42 - 4 vCPU, 16 GB RAM" },
{ value = "cx52", label = "CX52 - 8 vCPU, 32 GB RAM" },
]
type = "select"
prompt = "Server type"
options = [
{ value = "cx11", label = "CX11 - 1 vCPU, 1 GB RAM" },
{ value = "cx21", label = "CX21 - 2 vCPU, 4 GB RAM" },
{ value = "cx31", label = "CX31 - 2 vCPU, 8 GB RAM" },
{ value = "cx41", label = "CX41 - 4 vCPU, 16 GB RAM" },
{ value = "cx51", label = "CX51 - 8 vCPU, 32 GB RAM" },
{ value = "cpx11", label = "CPX11 - 2 vCPU (dedicated), 4 GB RAM" },
{ value = "cpx21", label = "CPX21 - 4 vCPU (dedicated), 8 GB RAM" },
{ value = "cpx31", label = "CPX31 - 8 vCPU (dedicated), 16 GB RAM" },
{ value = "cx22", label = "CX22 - 2 vCPU, 4 GB RAM" },
{ value = "cx32", label = "CX32 - 2 vCPU, 8 GB RAM" },
{ value = "cx42", label = "CX42 - 4 vCPU, 16 GB RAM" },
{ value = "cx52", label = "CX52 - 8 vCPU, 32 GB RAM" },
]
default = "cx22"
required = true
type = "select"
help = "Hetzner Cloud server instance type"
nickel_path = [
"provider",
"hetzner_server_type",
]
[[elements]]
default = "nbg1"
help = "Hetzner datacenter location"
name = "hetzner_location"
nickel_path = ["provider", "hetzner_location"]
options = [
{ value = "fsn1", label = "FSN1 - Frankfurt, Germany" },
{ value = "fsn1-dc14", label = "FSN1-DC14 - Frankfurt 14, Germany" },
{ value = "nbg1", label = "NBG1 - Nuremberg, Germany" },
{ value = "nbg1-dc3", label = "NBG1-DC3 - Nuremberg 3, Germany" },
{ value = "hel1", label = "HEL1 - Helsinki, Finland" },
{ value = "hel1-dc8", label = "HEL1-DC8 - Helsinki 8, Finland" },
{ value = "ash", label = "ASH - Ashburn, Virginia USA" },
{ value = "ash-dc1", label = "ASH-DC1 - Ashburn 1, Virginia USA" },
{ value = "hil", label = "HIL - Hildesheim, Germany" },
{ value = "hil-dc1", label = "HIL-DC1 - Hildesheim 1, Germany" },
]
prompt = "Datacenter location"
required = true
type = "select"
prompt = "Datacenter location"
options = [
{ value = "fsn1", label = "FSN1 - Frankfurt, Germany" },
{ value = "fsn1-dc14", label = "FSN1-DC14 - Frankfurt 14, Germany" },
{ value = "nbg1", label = "NBG1 - Nuremberg, Germany" },
{ value = "nbg1-dc3", label = "NBG1-DC3 - Nuremberg 3, Germany" },
{ value = "hel1", label = "HEL1 - Helsinki, Finland" },
{ value = "hel1-dc8", label = "HEL1-DC8 - Helsinki 8, Finland" },
{ value = "ash", label = "ASH - Ashburn, Virginia USA" },
{ value = "ash-dc1", label = "ASH-DC1 - Ashburn 1, Virginia USA" },
{ value = "hil", label = "HIL - Hildesheim, Germany" },
{ value = "hil-dc1", label = "HIL-DC1 - Hildesheim 1, Germany" },
]
default = "nbg1"
required = true
help = "Hetzner datacenter location"
nickel_path = [
"provider",
"hetzner_location",
]
[[elements]]
default = "ubuntu-24.04"
help = "OS image to use for the server"
name = "hetzner_image"
nickel_path = ["provider", "hetzner_image"]
options = [
{ value = "ubuntu-24.04", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "ubuntu-22.04", label = "Ubuntu 22.04 LTS" },
{ value = "ubuntu-20.04", label = "Ubuntu 20.04 LTS" },
{ value = "debian-12", label = "Debian 12 (Bookworm)" },
{ value = "debian-11", label = "Debian 11 (Bullseye)" },
{ value = "debian-10", label = "Debian 10 (Buster)" },
]
prompt = "Operating system image"
required = true
type = "select"
prompt = "Operating system image"
options = [
{ value = "ubuntu-24.04", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "ubuntu-22.04", label = "Ubuntu 22.04 LTS" },
{ value = "ubuntu-20.04", label = "Ubuntu 20.04 LTS" },
{ value = "debian-12", label = "Debian 12 (Bookworm)" },
{ value = "debian-11", label = "Debian 11 (Bullseye)" },
{ value = "debian-10", label = "Debian 10 (Buster)" },
]
default = "ubuntu-24.04"
required = true
help = "OS image to use for the server"
nickel_path = [
"provider",
"hetzner_image",
]

View File

@ -1,193 +1,247 @@
name = "lxd_provider_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "lxd_header"
title = "🖥️ LXD Container/VM Configuration"
type = "section_header"
title = "🖥️ LXD Container/VM Configuration"
border_top = true
border_bottom = true
[[elements]]
default = "local"
help = "Use local LXD or connect to remote LXD server"
name = "lxd_remote"
nickel_path = ["provider", "lxd_remote"]
options = [
{ value = "local", label = "local - Local LXD server" },
{ value = "remote", label = "remote - Remote LXD server" },
]
prompt = "LXD Remote"
required = true
type = "select"
prompt = "LXD Remote"
options = [
{ value = "local", label = "local - Local LXD server" },
{ value = "remote", label = "remote - Remote LXD server" },
]
default = "local"
required = true
help = "Use local LXD or connect to remote LXD server"
nickel_path = [
"provider",
"lxd_remote",
]
[[elements]]
help = "Address of remote LXD server (e.g., https://lxd.example.com:8443)"
name = "lxd_remote_address"
nickel_path = ["provider", "lxd_remote_address"]
type = "text"
prompt = "Remote server address (if remote)"
required = false
type = "text"
help = "Address of remote LXD server (e.g., https://lxd.example.com:8443)"
nickel_path = [
"provider",
"lxd_remote_address",
]
[[elements]]
help = "Trust password for remote LXD server (will be masked)"
name = "lxd_remote_password"
nickel_path = ["provider", "lxd_remote_password"]
type = "password"
prompt = "Remote server password (if remote)"
required = false
type = "password"
help = "Trust password for remote LXD server (will be masked)"
nickel_path = [
"provider",
"lxd_remote_password",
]
[[elements]]
default = "container"
help = "Run as container (fast, lightweight) or virtual machine (isolated)"
name = "lxd_instance_type"
nickel_path = ["provider", "lxd_instance_type"]
options = [
{ value = "container", label = "Container - Lightweight, shared kernel" },
{ value = "virtual-machine", label = "Virtual Machine - Full VM with own kernel" },
]
type = "select"
prompt = "Instance Type"
required = true
type = "select"
[[elements]]
default = "ubuntu:24.04"
help = "Operating system image for the instance"
name = "lxd_image"
nickel_path = ["provider", "lxd_image"]
options = [
{ value = "ubuntu:24.04", label = "Ubuntu 24.04 LTS (Noble)" },
{ value = "ubuntu:22.04", label = "Ubuntu 22.04 LTS (Jammy)" },
{ value = "ubuntu:20.04", label = "Ubuntu 20.04 LTS (Focal)" },
{ value = "debian:12", label = "Debian 12 (Bookworm)" },
{ value = "debian:11", label = "Debian 11 (Bullseye)" },
{ value = "alpine:3.19", label = "Alpine Linux 3.19 (minimal)" },
{ value = "alpine:3.18", label = "Alpine Linux 3.18" },
{ value = "rockylinux:9", label = "Rocky Linux 9" },
{ value = "rockylinux:8", label = "Rocky Linux 8" },
{ value = "archlinux", label = "Arch Linux (rolling release)" },
{ value = "container", label = "Container - Lightweight, shared kernel" },
{ value = "virtual-machine", label = "Virtual Machine - Full VM with own kernel" },
]
prompt = "Base Image"
default = "container"
required = true
type = "select"
help = "Run as container (fast, lightweight) or virtual machine (isolated)"
nickel_path = [
"provider",
"lxd_instance_type",
]
[[elements]]
name = "lxd_image"
type = "select"
prompt = "Base Image"
options = [
{ value = "ubuntu:24.04", label = "Ubuntu 24.04 LTS (Noble)" },
{ value = "ubuntu:22.04", label = "Ubuntu 22.04 LTS (Jammy)" },
{ value = "ubuntu:20.04", label = "Ubuntu 20.04 LTS (Focal)" },
{ value = "debian:12", label = "Debian 12 (Bookworm)" },
{ value = "debian:11", label = "Debian 11 (Bullseye)" },
{ value = "alpine:3.19", label = "Alpine Linux 3.19 (minimal)" },
{ value = "alpine:3.18", label = "Alpine Linux 3.18" },
{ value = "rockylinux:9", label = "Rocky Linux 9" },
{ value = "rockylinux:8", label = "Rocky Linux 8" },
{ value = "archlinux", label = "Arch Linux (rolling release)" },
]
default = "ubuntu:24.04"
required = true
help = "Operating system image for the instance"
nickel_path = [
"provider",
"lxd_image",
]
[[elements]]
help = "Unique name for the LXD instance"
name = "lxd_instance_name"
nickel_path = ["provider", "lxd_instance_name"]
type = "text"
prompt = "Instance name"
required = true
type = "text"
help = "Unique name for the LXD instance"
nickel_path = [
"provider",
"lxd_instance_name",
]
[[elements]]
default = "2"
help = "Number of CPU cores (e.g., 2 or leave empty for unlimited)"
name = "lxd_cpu_limit"
nickel_path = ["provider", "lxd_cpu_limit"]
type = "text"
prompt = "CPU limit (cores)"
default = "2"
required = false
type = "text"
help = "Number of CPU cores (e.g., 2 or leave empty for unlimited)"
nickel_path = [
"provider",
"lxd_cpu_limit",
]
[[elements]]
default = "2GB"
help = "Memory limit (e.g., 2GB, 4GB, or leave empty for unlimited)"
name = "lxd_memory_limit"
nickel_path = ["provider", "lxd_memory_limit"]
type = "text"
prompt = "Memory limit"
default = "2GB"
required = false
type = "text"
help = "Memory limit (e.g., 2GB, 4GB, or leave empty for unlimited)"
nickel_path = [
"provider",
"lxd_memory_limit",
]
[[elements]]
default = "10GB"
help = "Root disk size (e.g., 10GB, 20GB, or leave empty for default)"
name = "lxd_disk_size"
nickel_path = ["provider", "lxd_disk_size"]
prompt = "Root disk size"
required = false
type = "text"
prompt = "Root disk size"
default = "10GB"
required = false
help = "Root disk size (e.g., 10GB, 20GB, or leave empty for default)"
nickel_path = [
"provider",
"lxd_disk_size",
]
[[elements]]
default = "default"
help = "Storage pool for the instance root disk"
name = "lxd_storage_pool"
nickel_path = ["provider", "lxd_storage_pool"]
options = [
{ value = "default", label = "default - Default storage pool" },
{ value = "dir", label = "dir - Directory-backed pool" },
{ value = "zfs", label = "zfs - ZFS pool (best performance)" },
{ value = "btrfs", label = "btrfs - Btrfs pool" },
{ value = "lvm", label = "lvm - LVM pool" },
]
type = "select"
prompt = "Storage pool"
required = true
type = "select"
[[elements]]
default = "lxdbr0"
help = "Network configuration for the instance"
name = "lxd_network"
nickel_path = ["provider", "lxd_network"]
options = [
{ value = "lxdbr0", label = "lxdbr0 - Default LXD bridge (NAT)" },
{ value = "host", label = "host - Direct host networking" },
{ value = "macvlan", label = "macvlan - MAC VLAN (bridge to physical)" },
{ value = "default", label = "default - Default storage pool" },
{ value = "dir", label = "dir - Directory-backed pool" },
{ value = "zfs", label = "zfs - ZFS pool (best performance)" },
{ value = "btrfs", label = "btrfs - Btrfs pool" },
{ value = "lvm", label = "lvm - LVM pool" },
]
prompt = "Network"
default = "default"
required = true
type = "select"
help = "Storage pool for the instance root disk"
nickel_path = [
"provider",
"lxd_storage_pool",
]
[[elements]]
name = "lxd_network"
type = "select"
prompt = "Network"
options = [
{ value = "lxdbr0", label = "lxdbr0 - Default LXD bridge (NAT)" },
{ value = "host", label = "host - Direct host networking" },
{ value = "macvlan", label = "macvlan - MAC VLAN (bridge to physical)" },
]
default = "lxdbr0"
required = true
help = "Network configuration for the instance"
nickel_path = [
"provider",
"lxd_network",
]
[[elements]]
help = "Assign static IPv4 (e.g., 10.0.0.100) or leave empty for DHCP"
name = "lxd_ipv4_address"
nickel_path = ["provider", "lxd_ipv4_address"]
type = "text"
prompt = "Static IPv4 address (optional)"
required = false
type = "text"
help = "Assign static IPv4 (e.g., 10.0.0.100) or leave empty for DHCP"
nickel_path = [
"provider",
"lxd_ipv4_address",
]
[[elements]]
help = "Assign static IPv6 or leave empty for auto"
name = "lxd_ipv6_address"
nickel_path = ["provider", "lxd_ipv6_address"]
type = "text"
prompt = "Static IPv6 address (optional)"
required = false
type = "text"
help = "Assign static IPv6 or leave empty for auto"
nickel_path = [
"provider",
"lxd_ipv6_address",
]
[[elements]]
default = "default"
help = "LXD profiles to apply (e.g., default,docker)"
name = "lxd_profiles"
nickel_path = ["provider", "lxd_profiles"]
prompt = "Additional profiles (comma-separated)"
required = false
type = "text"
prompt = "Additional profiles (comma-separated)"
default = "default"
required = false
help = "LXD profiles to apply (e.g., default,docker)"
nickel_path = [
"provider",
"lxd_profiles",
]
[[elements]]
name = "lxd_enable_nesting"
type = "confirm"
prompt = "Enable nesting (for Docker/LXD inside)?"
default = false
help = "Allow running containers/VMs inside this instance"
name = "lxd_enable_nesting"
nickel_path = ["provider", "lxd_enable_nesting"]
prompt = "Enable nesting (for Docker/LXD inside)?"
type = "confirm"
nickel_path = [
"provider",
"lxd_enable_nesting",
]
[[elements]]
name = "lxd_privileged"
type = "confirm"
prompt = "Run as privileged container?"
default = false
help = "Run container without user namespace isolation (less secure)"
name = "lxd_privileged"
nickel_path = ["provider", "lxd_privileged"]
prompt = "Run as privileged container?"
type = "confirm"
nickel_path = [
"provider",
"lxd_privileged",
]
[[elements]]
name = "lxd_autostart"
type = "confirm"
prompt = "Auto-start on boot?"
default = true
help = "Automatically start instance when LXD daemon starts"
name = "lxd_autostart"
nickel_path = ["provider", "lxd_autostart"]
prompt = "Auto-start on boot?"
type = "confirm"
nickel_path = [
"provider",
"lxd_autostart",
]
[[elements]]
help = "Path to cloud-init configuration file (leave empty to skip)"
name = "lxd_cloud_init"
nickel_path = ["provider", "lxd_cloud_init"]
type = "text"
prompt = "Cloud-init user data (optional)"
required = false
type = "text"
help = "Path to cloud-init configuration file (leave empty to skip)"
nickel_path = [
"provider",
"lxd_cloud_init",
]

View File

@ -1,122 +1,149 @@
name = "upcloud_provider_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "upcloud_header"
title = "☁️ UpCloud Configuration"
type = "section_header"
title = "☁️ UpCloud Configuration"
border_top = true
border_bottom = true
[[elements]]
help = "Your UpCloud account username for API authentication"
name = "upcloud_username"
nickel_path = ["provider", "upcloud_username"]
type = "text"
prompt = "UpCloud username"
required = true
type = "text"
help = "Your UpCloud account username for API authentication"
nickel_path = [
"provider",
"upcloud_username",
]
[[elements]]
help = "Your UpCloud account password (will be masked)"
name = "upcloud_password"
nickel_path = ["provider", "upcloud_password"]
type = "password"
prompt = "UpCloud password"
required = true
type = "password"
help = "Your UpCloud account password (will be masked)"
nickel_path = [
"provider",
"upcloud_password",
]
[[elements]]
default = "de-fra1"
help = "UpCloud zone where resources will be deployed"
name = "upcloud_zone"
nickel_path = ["provider", "upcloud_zone"]
options = [
{ value = "fi-hel1", label = "FI-HEL1 - Helsinki, Finland" },
{ value = "fi-hel2", label = "FI-HEL2 - Helsinki, Finland (Secondary)" },
{ value = "de-fra1", label = "DE-FRA1 - Frankfurt, Germany" },
{ value = "uk-lon1", label = "UK-LON1 - London, United Kingdom" },
{ value = "nl-ams1", label = "NL-AMS1 - Amsterdam, Netherlands" },
{ value = "us-chi1", label = "US-CHI1 - Chicago, USA" },
{ value = "us-nyc1", label = "US-NYC1 - New York, USA" },
{ value = "us-sjo1", label = "US-SJO1 - San Jose, USA" },
{ value = "sg-sin1", label = "SG-SIN1 - Singapore" },
{ value = "au-syd1", label = "AU-SYD1 - Sydney, Australia" },
{ value = "es-mad1", label = "ES-MAD1 - Madrid, Spain" },
{ value = "pl-waw1", label = "PL-WAW1 - Warsaw, Poland" },
]
type = "select"
prompt = "Availability zone"
options = [
{ value = "fi-hel1", label = "FI-HEL1 - Helsinki, Finland" },
{ value = "fi-hel2", label = "FI-HEL2 - Helsinki, Finland (Secondary)" },
{ value = "de-fra1", label = "DE-FRA1 - Frankfurt, Germany" },
{ value = "uk-lon1", label = "UK-LON1 - London, United Kingdom" },
{ value = "nl-ams1", label = "NL-AMS1 - Amsterdam, Netherlands" },
{ value = "us-chi1", label = "US-CHI1 - Chicago, USA" },
{ value = "us-nyc1", label = "US-NYC1 - New York, USA" },
{ value = "us-sjo1", label = "US-SJO1 - San Jose, USA" },
{ value = "sg-sin1", label = "SG-SIN1 - Singapore" },
{ value = "au-syd1", label = "AU-SYD1 - Sydney, Australia" },
{ value = "es-mad1", label = "ES-MAD1 - Madrid, Spain" },
{ value = "pl-waw1", label = "PL-WAW1 - Warsaw, Poland" },
]
default = "de-fra1"
required = true
type = "select"
help = "UpCloud zone where resources will be deployed"
nickel_path = [
"provider",
"upcloud_zone",
]
[[elements]]
default = "2xCPU-4GB"
help = "UpCloud server plan (determines CPU, RAM, and storage)"
name = "upcloud_plan"
nickel_path = ["provider", "upcloud_plan"]
options = [
{ value = "1xCPU-1GB", label = "1xCPU-1GB - 1 vCPU, 1 GB RAM, 25 GB SSD" },
{ value = "1xCPU-2GB", label = "1xCPU-2GB - 1 vCPU, 2 GB RAM, 50 GB SSD" },
{ value = "2xCPU-4GB", label = "2xCPU-4GB - 2 vCPU, 4 GB RAM, 80 GB SSD" },
{ value = "4xCPU-8GB", label = "4xCPU-8GB - 4 vCPU, 8 GB RAM, 160 GB SSD" },
{ value = "6xCPU-16GB", label = "6xCPU-16GB - 6 vCPU, 16 GB RAM, 320 GB SSD" },
{ value = "8xCPU-32GB", label = "8xCPU-32GB - 8 vCPU, 32 GB RAM, 640 GB SSD" },
{ value = "12xCPU-48GB", label = "12xCPU-48GB - 12 vCPU, 48 GB RAM, 960 GB SSD" },
{ value = "16xCPU-64GB", label = "16xCPU-64GB - 16 vCPU, 64 GB RAM, 1280 GB SSD" },
{ value = "20xCPU-96GB", label = "20xCPU-96GB - 20 vCPU, 96 GB RAM, 1920 GB SSD" },
{ value = "20xCPU-128GB", label = "20xCPU-128GB - 20 vCPU, 128 GB RAM, 2048 GB SSD" },
]
type = "select"
prompt = "Server plan"
required = true
type = "select"
[[elements]]
default = "Ubuntu Server 24.04 LTS (Noble Numbat)"
help = "Operating system template for the server"
name = "upcloud_template"
nickel_path = ["provider", "upcloud_template"]
options = [
{ value = "Ubuntu Server 24.04 LTS (Noble Numbat)", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "Ubuntu Server 22.04 LTS (Jammy Jellyfish)", label = "Ubuntu 22.04 LTS" },
{ value = "Ubuntu Server 20.04 LTS (Focal Fossa)", label = "Ubuntu 20.04 LTS" },
{ value = "Debian 12 (Bookworm)", label = "Debian 12 (Bookworm)" },
{ value = "Debian 11 (Bullseye)", label = "Debian 11 (Bullseye)" },
{ value = "Debian 10 (Buster)", label = "Debian 10 (Buster)" },
{ value = "Rocky Linux 9", label = "Rocky Linux 9" },
{ value = "Rocky Linux 8", label = "Rocky Linux 8" },
{ value = "AlmaLinux 9", label = "AlmaLinux 9" },
{ value = "AlmaLinux 8", label = "AlmaLinux 8" },
{ value = "1xCPU-1GB", label = "1xCPU-1GB - 1 vCPU, 1 GB RAM, 25 GB SSD" },
{ value = "1xCPU-2GB", label = "1xCPU-2GB - 1 vCPU, 2 GB RAM, 50 GB SSD" },
{ value = "2xCPU-4GB", label = "2xCPU-4GB - 2 vCPU, 4 GB RAM, 80 GB SSD" },
{ value = "4xCPU-8GB", label = "4xCPU-8GB - 4 vCPU, 8 GB RAM, 160 GB SSD" },
{ value = "6xCPU-16GB", label = "6xCPU-16GB - 6 vCPU, 16 GB RAM, 320 GB SSD" },
{ value = "8xCPU-32GB", label = "8xCPU-32GB - 8 vCPU, 32 GB RAM, 640 GB SSD" },
{ value = "12xCPU-48GB", label = "12xCPU-48GB - 12 vCPU, 48 GB RAM, 960 GB SSD" },
{ value = "16xCPU-64GB", label = "16xCPU-64GB - 16 vCPU, 64 GB RAM, 1280 GB SSD" },
{ value = "20xCPU-96GB", label = "20xCPU-96GB - 20 vCPU, 96 GB RAM, 1920 GB SSD" },
{ value = "20xCPU-128GB", label = "20xCPU-128GB - 20 vCPU, 128 GB RAM, 2048 GB SSD" },
]
prompt = "Operating system template"
default = "2xCPU-4GB"
required = true
type = "select"
help = "UpCloud server plan (determines CPU, RAM, and storage)"
nickel_path = [
"provider",
"upcloud_plan",
]
[[elements]]
name = "upcloud_template"
type = "select"
prompt = "Operating system template"
options = [
{ value = "Ubuntu Server 24.04 LTS (Noble Numbat)", label = "Ubuntu 24.04 LTS (Latest)" },
{ value = "Ubuntu Server 22.04 LTS (Jammy Jellyfish)", label = "Ubuntu 22.04 LTS" },
{ value = "Ubuntu Server 20.04 LTS (Focal Fossa)", label = "Ubuntu 20.04 LTS" },
{ value = "Debian 12 (Bookworm)", label = "Debian 12 (Bookworm)" },
{ value = "Debian 11 (Bullseye)", label = "Debian 11 (Bullseye)" },
{ value = "Debian 10 (Buster)", label = "Debian 10 (Buster)" },
{ value = "Rocky Linux 9", label = "Rocky Linux 9" },
{ value = "Rocky Linux 8", label = "Rocky Linux 8" },
{ value = "AlmaLinux 9", label = "AlmaLinux 9" },
{ value = "AlmaLinux 8", label = "AlmaLinux 8" },
]
default = "Ubuntu Server 24.04 LTS (Noble Numbat)"
required = true
help = "Operating system template for the server"
nickel_path = [
"provider",
"upcloud_template",
]
[[elements]]
help = "Hostname for the UpCloud server"
name = "upcloud_hostname"
nickel_path = ["provider", "upcloud_hostname"]
type = "text"
prompt = "Server hostname"
required = true
type = "text"
help = "Hostname for the UpCloud server"
nickel_path = [
"provider",
"upcloud_hostname",
]
[[elements]]
default = "25"
help = "Additional storage size in GB (beyond plan default)"
name = "upcloud_storage_size"
nickel_path = ["provider", "upcloud_storage_size"]
prompt = "Storage size (GB)"
required = true
type = "text"
prompt = "Storage size (GB)"
default = "25"
required = true
help = "Additional storage size in GB (beyond plan default)"
nickel_path = [
"provider",
"upcloud_storage_size",
]
[[elements]]
name = "upcloud_private_networking"
type = "confirm"
prompt = "Enable private networking?"
default = true
help = "Enable UpCloud private networking (SDN) for this server"
name = "upcloud_private_networking"
nickel_path = ["provider", "upcloud_private_networking"]
prompt = "Enable private networking?"
type = "confirm"
nickel_path = [
"provider",
"upcloud_private_networking",
]
[[elements]]
name = "upcloud_backups"
type = "confirm"
prompt = "Enable automated backups?"
default = false
help = "Enable automated daily backups (additional cost)"
name = "upcloud_backups"
nickel_path = ["provider", "upcloud_backups"]
prompt = "Enable automated backups?"
type = "confirm"
nickel_path = [
"provider",
"upcloud_backups",
]

View File

@ -1,44 +1,56 @@
name = "ssh_fragment"
[[elements]]
border_bottom = true
border_top = true
name = "ssh_header"
title = "🔐 SSH Credentials"
type = "section_header"
title = "🔐 SSH Credentials"
border_top = true
border_bottom = true
[[elements]]
help = "Absolute or relative path to SSH private key file"
name = "ssh_private_key_path"
nickel_path = ["ssh_credentials", "private_key_path"]
placeholder = "~/.ssh/id_rsa"
type = "text"
prompt = "Private key path"
placeholder = "~/.ssh/id_rsa"
required = true
type = "text"
help = "Absolute or relative path to SSH private key file"
nickel_path = [
"ssh_credentials",
"private_key_path",
]
[[elements]]
help = "Absolute or relative path to SSH public key file"
name = "ssh_public_key_path"
nickel_path = ["ssh_credentials", "public_key_path"]
placeholder = "~/.ssh/id_rsa.pub"
prompt = "Public key path"
required = true
type = "text"
prompt = "Public key path"
placeholder = "~/.ssh/id_rsa.pub"
required = true
help = "Absolute or relative path to SSH public key file"
nickel_path = [
"ssh_credentials",
"public_key_path",
]
[[elements]]
name = "ssh_username"
type = "text"
prompt = "SSH username"
placeholder = "torrust"
default = "torrust"
help = "Linux username for SSH access. Defaults to 'torrust'. Must be 1-32 characters, starting with letter or underscore."
name = "ssh_username"
nickel_path = ["ssh_credentials", "username"]
placeholder = "torrust"
prompt = "SSH username"
type = "text"
nickel_path = [
"ssh_credentials",
"username",
]
[[elements]]
name = "ssh_port"
type = "text"
prompt = "SSH port"
placeholder = "22"
default = "22"
help = "SSH port number (default 22). Must be between 1-65535."
name = "ssh_port"
nickel_path = ["ssh_credentials", "port"]
placeholder = "22"
prompt = "SSH port"
type = "text"
nickel_path = [
"ssh_credentials",
"port",
]

View File

@ -1,99 +0,0 @@
---
# {{ project_name }} - Ansible Deployment Playbook
# Generated by typedialog-prov-gen
- name: Deploy {{ project_name }}
hosts: app_servers
become: yes
vars:
project_name: "{{ project_name }}"
deploy_user: "{{ deploy_user | default(value="deploy") }}"
app_dir: "/opt/{{ project_name }}"
tasks:
- name: Update apt cache
apt:
update_cache: yes
cache_valid_time: 3600
- name: Install system dependencies
apt:
name:
- curl
- wget
- git
- build-essential
{% if infrastructure.database.db_type == "postgres" %}
- postgresql-client
{% elif infrastructure.database.db_type == "mysql" %}
- mysql-client
{% endif %}
state: present
- name: Create deployment user
user:
name: "{{"{{ deploy_user }}"}}"
shell: /bin/bash
createhome: yes
state: present
- name: Create application directory
file:
path: "{{"{{ app_dir }}"}}"
state: directory
owner: "{{"{{ deploy_user }}"}}"
group: "{{"{{ deploy_user }}"}}"
mode: '0755'
{% if infrastructure.database %}
- name: Configure database
include_tasks: tasks/database.yml
{% endif %}
- name: Deploy application
git:
repo: "{{ repository_url | default(value="https://github.com/youruser/" ~ project_name) }}"
dest: "{{"{{ app_dir }}"}}"
version: "{{ git_branch | default(value="main") }}"
become_user: "{{"{{ deploy_user }}"}}"
- name: Install application dependencies
shell: |
cd {{ app_dir }}
cargo build --release
become_user: "{{"{{ deploy_user }}"}}"
- name: Install systemd service
template:
src: templates/{{ project_name }}.service.j2
dest: /etc/systemd/system/{{ project_name }}.service
mode: '0644'
notify: Reload systemd
- name: Enable and start service
systemd:
name: "{{ project_name }}"
enabled: yes
state: started
{% if infrastructure.monitoring contains "prometheus" %}
- name: Install Prometheus node exporter
include_tasks: tasks/prometheus-exporter.yml
{% endif %}
handlers:
- name: Reload systemd
systemd:
daemon_reload: yes
- name: Verify deployment
hosts: app_servers
tasks:
- name: Check service status
systemd:
name: "{{ project_name }}"
register: service_status
- name: Display service status
debug:
msg: "Service {{ project_name }} is {{"{{ service_status.status.ActiveState }}"}}"

View File

@ -1,30 +0,0 @@
---
# {{ project_name }} - Ansible Inventory
# Generated by typedialog-prov-gen
all:
vars:
ansible_user: "{{ ansible_user | default(value="ubuntu") }}"
ansible_python_interpreter: /usr/bin/python3
children:
app_servers:
hosts:
{% for i in range(start=1, end=server_count | default(value="1") + 1) %}
app-{{ i }}:
ansible_host: "{{ hostvars[\"app-\" ~ i].address | default(value="192.168.1." ~ (100 + i)) }}"
{% endfor %}
{% if infrastructure.database %}
database_servers:
hosts:
db-1:
ansible_host: "{{ database_host | default(value="192.168.1.10") }}"
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
monitoring_servers:
hosts:
monitoring-1:
ansible_host: "{{ monitoring_host | default(value="192.168.1.20") }}"
{% endif %}

View File

@ -1,37 +0,0 @@
{# {{ project_name }} - Nickel Configuration Template (Jinja2) #}
{# Generated by typedialog-prov-gen #}
{# This template is rendered by infrastructure tools (Ansible, Terraform) #}
{# to produce final Nickel configuration files #}
# {{ project_name }} Configuration
# Generated from template at: {{ "{{" }} template_generation_time {{ "}}" }}
# Environment: {{ "{{" }} environment {{ "}}" }}
let schemas = import "./schemas/config.ncl" in
let defaults = import "./defaults/config.ncl" in
{
{% for feature in features %}
# {{ feature.name }} configuration
{{ feature.name }} = {
{% for field in feature.fields %}
{% if field.field_type == "Text" or field.field_type == "Password" %}
{{ field.name }} = "{{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }}",
{% elif field.field_type == "Number" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }},
{% elif field.field_type == "Confirm" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} | lower {{ "}}" }},
{% elif field.field_type == "Select" %}
{{ field.name }} = "{{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }}",
{% elif field.field_type == "MultiSelect" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} | to_json {{ "}}" }},
{% elif field.field_type == "RepeatingGroup" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} | to_json {{ "}}" }},
{% else %}
{{ field.name }} = "{{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }}",
{% endif %}
{% endfor %}
},
{% endfor %}
} | schemas.Config

View File

@ -1,69 +0,0 @@
# {{ project_name }} - Docker Compose Monitoring Stack
# Generated by typedialog-prov-gen
version: '3.8'
services:
prometheus:
image: prom/prometheus:latest
container_name: "{{ project_name }}-prometheus"
restart: unless-stopped
ports:
- "9090:9090"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml:ro
- prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
- '--web.console.libraries=/usr/share/prometheus/console_libraries'
- '--web.console.templates=/usr/share/prometheus/consoles'
grafana:
image: grafana/grafana:latest
container_name: "{{ project_name }}-grafana"
restart: unless-stopped
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_PASSWORD:-admin}
- GF_USERS_ALLOW_SIGN_UP=false
volumes:
- grafana_data:/var/lib/grafana
depends_on:
- prometheus
node_exporter:
image: prom/node-exporter:latest
container_name: "{{ project_name }}-node-exporter"
restart: unless-stopped
ports:
- "9100:9100"
command:
- '--path.procfs=/host/proc'
- '--path.sysfs=/host/sys'
- '--collector.filesystem.mount-points-exclude=^/(sys|proc|dev|host|etc)($$|/)'
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
volumes:
prometheus_data:
driver: local
grafana_data:
driver: local
networks:
default:
name: {{ project_name }}_monitoring

View File

@ -1,86 +0,0 @@
# {{ project_name }} - Docker Compose Service Definition
# Generated by typedialog-prov-gen
version: '3.8'
services:
app:
image: "{{ docker_image | default(value=project_name ~ ":latest") }}"
container_name: "{{ project_name }}-app"
restart: unless-stopped
ports:
- "${APP_PORT:-8080}:8080"
environment:
- RUST_LOG=${RUST_LOG:-info}
- APP_ENV=${APP_ENV:-production}
{% if infrastructure.database %}
- DATABASE_URL=${DATABASE_URL}
{% endif %}
{% if infrastructure.database %}
database:
image: "{% if infrastructure.database.db_type == "postgres" %}postgres:15-alpine
{%- elif infrastructure.database.db_type == "mysql" %}mysql:8.0
{%- elif infrastructure.database.db_type == "redis" %}redis:7-alpine
{%- else %}sqlite:latest{% endif %}"
container_name: "{{ project_name }}-db"
restart: unless-stopped
{% if infrastructure.database.db_type == "postgres" %}
environment:
- POSTGRES_DB={{ project_name }}
- POSTGRES_USER=${DB_USER:-{{ project_name }}}
- POSTGRES_PASSWORD=${DB_PASSWORD}
{% elif infrastructure.database.db_type == "mysql" %}
environment:
- MYSQL_DATABASE={{ project_name }}
- MYSQL_USER=${DB_USER:-{{ project_name }}}
- MYSQL_PASSWORD=${DB_PASSWORD}
- MYSQL_ROOT_PASSWORD=${DB_ROOT_PASSWORD}
{% endif %}
volumes:
- db_data:/var/lib/{% if infrastructure.database.db_type == "postgres" %}postgresql/data
{%- elif infrastructure.database.db_type == "mysql" %}mysql
{%- elif infrastructure.database.db_type == "redis" %}redis{% endif %}
healthcheck:
{% if infrastructure.database.db_type == "postgres" %}
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-{{ project_name }}}"]
{% elif infrastructure.database.db_type == "mysql" %}
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
{% elif infrastructure.database.db_type == "redis" %}
test: ["CMD", "redis-cli", "ping"]
{% endif %}
interval: 10s
timeout: 5s
retries: 5
{% endif %}
nginx:
image: nginx:alpine
container_name: "{{ project_name }}-nginx"
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- ./certs:/etc/nginx/certs:ro
depends_on:
- app
volumes:
{% if infrastructure.database %}
db_data:
driver: local
{% endif %}
networks:
default:
name: {{ project_name }}_network

View File

@ -1,64 +0,0 @@
# {{ project_name }} - Prometheus Configuration
# Generated by typedialog-prov-gen
global:
scrape_interval: 15s
evaluation_interval: 15s
external_labels:
cluster: '{{ project_name }}'
environment: '${ENVIRONMENT:-production}'
# Alertmanager configuration
alerting:
alertmanagers:
- static_configs:
- targets:
- 'alertmanager:9093'
# Load rules once and periodically evaluate them
rule_files:
- '/etc/prometheus/rules/*.yml'
# Scrape configurations
scrape_configs:
# Prometheus itself
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
# {{ project_name }} application metrics
- job_name: '{{ project_name }}'
static_configs:
- targets:
{% for i in range(start=1, end=server_count | default(value="1") + 1) %}
- 'app-{{ i }}:${METRICS_PORT:-9091}'
{% endfor %}
relabel_configs:
- source_labels: [__address__]
target_label: instance
- source_labels: [__address__]
target_label: __param_target
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: '{{ project_name }}:${METRICS_PORT:-9091}'
# Node exporter for system metrics
- job_name: 'node-exporter'
static_configs:
- targets:
{% for i in range(start=1, end=server_count | default(value="1") + 1) %}
- 'app-{{ i }}:9100'
{% endfor %}
{% if infrastructure.database %}
# Database exporter
- job_name: 'database-exporter'
static_configs:
- targets:
- 'db-exporter:9187'
{% endif %}
# Remote write for long-term storage (optional)
# remote_write:
# - url: 'http://mimir:9009/api/v1/push'

View File

@ -1,95 +0,0 @@
# {{ project_name }} - Main Terraform Configuration
# Generated by typedialog-prov-gen
terraform {
required_version = ">= 1.0"
required_providers {
{% if infrastructure.providers contains "hetzner" %}
hetzner = {
source = "hetznercloud/hetzner"
version = "~> 1.44"
}
{% endif %}
{% if infrastructure.providers contains "lxd" %}
lxd = {
source = "terraform-lxd/lxd"
version = "~> 1.10"
}
{% endif %}
{% if infrastructure.providers contains "aws" %}
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
{% endif %}
}
}
# Local variables from Nickel configuration
locals {
project_name = "{{ project_name }}"
environment = var.environment
common_tags = {
Project = local.project_name
Environment = local.environment
ManagedBy = "Terraform"
Generated = "typedialog-prov-gen"
}
}
{% if infrastructure.database %}
# Database resources
resource "{{ infrastructure.database.db_type }}_instance" "main" {
name = "${local.project_name}-${local.environment}-db"
size = var.database_size
region = var.region
tags = local.common_tags
}
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
# Monitoring instance
resource "compute_instance" "monitoring" {
name = "${local.project_name}-${local.environment}-monitoring"
machine_type = var.monitoring_machine_type
zone = var.zone
boot_disk {
initialize_params {
image = var.monitoring_image
}
}
metadata_startup_script = file("${path.module}/../../scripts/install-prometheus.sh")
tags = merge(local.common_tags, {
Role = "monitoring"
})
}
{% endif %}
# Output values
output "infrastructure_id" {
description = "Infrastructure resource IDs"
value = {
{% if infrastructure.database %}
database_id = {{ infrastructure.database.db_type }}_instance.main.id
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
monitoring_id = compute_instance.monitoring.id
{% endif %}
}
}
output "connection_strings" {
description = "Connection information"
sensitive = true
value = {
{% if infrastructure.database %}
database_url = {{ infrastructure.database.db_type }}_instance.main.connection_string
{% endif %}
}
}

View File

@ -1,77 +0,0 @@
# {{ project_name }} - Terraform Variables
# Generated by typedialog-prov-gen
variable "environment" {
description = "Deployment environment (development, staging, production)"
type = string
default = "development"
validation {
condition = contains(["development", "staging", "production"], var.environment)
error_message = "Environment must be development, staging, or production."
}
}
variable "region" {
description = "Cloud provider region"
type = string
default = "{{ infrastructure.default_region | default(value="us-west-2") }}"
}
variable "zone" {
description = "Cloud provider availability zone"
type = string
default = "${var.region}-a"
}
{% if infrastructure.database %}
variable "database_size" {
description = "Database instance size"
type = string
default = "{{ infrastructure.database.size | default(value="db-small") }}"
}
variable "database_backup_enabled" {
description = "Enable automated database backups"
type = bool
default = true
}
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
variable "monitoring_machine_type" {
description = "Monitoring instance machine type"
type = string
default = "{{ infrastructure.monitoring_machine_type | default(value="e2-small") }}"
}
variable "monitoring_image" {
description = "Monitoring instance OS image"
type = string
default = "ubuntu-2204-lts"
}
{% endif %}
variable "ssh_keys" {
description = "SSH public keys for instance access"
type = list(string)
default = []
}
variable "network_cidr" {
description = "Network CIDR block"
type = string
default = "10.0.0.0/16"
}
variable "allowed_ips" {
description = "IP addresses allowed to access infrastructure"
type = list(string)
default = ["0.0.0.0/0"] # WARNING: Restrict in production
}
variable "tags" {
description = "Additional tags for resources"
type = map(string)
default = {}
}

View File

@ -1,103 +0,0 @@
# {{ project_name }} - Hetzner Cloud Provider Configuration
# Generated by typedialog-prov-gen
terraform {
required_providers {
hcloud = {
source = "hetznercloud/hcloud"
version = "~> 1.44"
}
}
}
provider "hcloud" {
token = var.hetzner_token
}
# Hetzner-specific variables
variable "hetzner_token" {
description = "Hetzner Cloud API token"
type = string
sensitive = true
}
variable "hetzner_server_type" {
description = "Hetzner server type (cx11, cx21, cx31, etc.)"
type = string
default = "cx11" # 1 vCPU, 2GB RAM
}
variable "hetzner_location" {
description = "Hetzner datacenter location"
type = string
default = "{{ infrastructure.hetzner_location | default(value="nbg1") }}"
validation {
condition = contains(["nbg1", "fsn1", "hel1", "ash"], var.hetzner_location)
error_message = "Location must be nbg1 (Nuremberg), fsn1 (Falkenstein), hel1 (Helsinki), or ash (Ashburn)."
}
}
# SSH key for Hetzner instances
resource "hcloud_ssh_key" "default" {
name = "${local.project_name}-${local.environment}"
public_key = file("~/.ssh/id_ed25519.pub")
}
# Hetzner server instance
resource "hcloud_server" "app" {
name = "${local.project_name}-${local.environment}-app"
server_type = var.hetzner_server_type
location = var.hetzner_location
image = "ubuntu-22.04"
ssh_keys = [hcloud_ssh_key.default.id]
labels = local.common_tags
user_data = templatefile("${path.module}/../../scripts/cloud-init.yml", {
project_name = local.project_name
environment = local.environment
})
}
# Hetzner firewall
resource "hcloud_firewall" "app" {
name = "${local.project_name}-${local.environment}-firewall"
rule {
direction = "in"
protocol = "tcp"
port = "22"
source_ips = var.allowed_ips
}
rule {
direction = "in"
protocol = "tcp"
port = "80"
source_ips = ["0.0.0.0/0", "::/0"]
}
rule {
direction = "in"
protocol = "tcp"
port = "443"
source_ips = ["0.0.0.0/0", "::/0"]
}
}
resource "hcloud_firewall_attachment" "app" {
firewall_id = hcloud_firewall.app.id
server_ids = [hcloud_server.app.id]
}
output "hetzner_server_ip" {
description = "Hetzner server public IP"
value = hcloud_server.app.ipv4_address
}
output "hetzner_server_id" {
description = "Hetzner server ID"
value = hcloud_server.app.id
}

View File

@ -1,115 +0,0 @@
# {{ project_name }} - LXD Provider Configuration
# Generated by typedialog-prov-gen
terraform {
required_providers {
lxd = {
source = "terraform-lxd/lxd"
version = "~> 1.10"
}
}
}
provider "lxd" {
generate_client_certificates = true
accept_remote_certificate = true
lxd_remote {
name = var.lxd_remote_name
scheme = "https"
address = var.lxd_remote_address
password = var.lxd_remote_password
default = true
}
}
# LXD-specific variables
variable "lxd_remote_name" {
description = "LXD remote name"
type = string
default = "local"
}
variable "lxd_remote_address" {
description = "LXD remote server address"
type = string
default = "{{ infrastructure.lxd_address | default(value="127.0.0.1:8443") }}"
}
variable "lxd_remote_password" {
description = "LXD remote server password"
type = string
sensitive = true
default = ""
}
variable "lxd_image" {
description = "LXD container image"
type = string
default = "ubuntu:22.04"
}
# LXD profile for containers
resource "lxd_profile" "app" {
name = "${local.project_name}-${local.environment}"
config = {
"boot.autostart" = "true"
"security.nesting" = "true"
"security.privileged" = "false"
}
device {
name = "root"
type = "disk"
properties = {
pool = "default"
path = "/"
}
}
device {
name = "eth0"
type = "nic"
properties = {
network = "lxdbr0"
name = "eth0"
}
}
}
# LXD container instance
resource "lxd_container" "app" {
name = "${local.project_name}-${local.environment}-app"
image = var.lxd_image
profiles = [lxd_profile.app.name]
config = {
"user.project" = local.project_name
"user.environment" = local.environment
}
limits = {
cpu = "2"
memory = "2GB"
}
provisioner "remote-exec" {
inline = [
"apt-get update",
"apt-get install -y curl wget git",
]
}
}
output "lxd_container_ip" {
description = "LXD container IP address"
value = lxd_container.app.ip_address
}
output "lxd_container_name" {
description = "LXD container name"
value = lxd_container.app.name
}

View File

@ -1,187 +0,0 @@
#!/usr/bin/env nu
# {{ project_name }} - TypeDialog Nickel Roundtrip Configuration Script (Nushell)
#
# This script integrates TypeDialog forms with Nickel configuration:
# 1. Runs TypeDialog form to collect user input (JSON output)
# 2. Converts JSON to Nickel configuration
# 3. Validates Nickel configuration against schemas
# 4. Merges with defaults and applies type contracts
# Configuration paths
const FORM_PATH = "{{ form_path }}"
const CONFIG_PATH = "{{ config_path }}"
const TEMPLATE_PATH = "{{ template_path }}"
const GENERATED_DIR = "generated"
# Color formatting
def log-info [message: string] {
print $"(ansi blue)[INFO](ansi reset) ($message)"
}
def log-success [message: string] {
print $"(ansi green)[SUCCESS](ansi reset) ($message)"
}
def log-warn [message: string] {
print $"(ansi yellow)[WARN](ansi reset) ($message)"
}
def log-error [message: string] {
print $"(ansi red)[ERROR](ansi reset) ($message)"
}
# Check if command exists
def command-exists [cmd: string]: nothing -> bool {
(which $cmd | length) > 0
}
# Check dependencies
def check-dependencies []: nothing -> nothing {
let missing = [
"typedialog",
"nickel"
] | filter {|cmd| not (command-exists $cmd)}
if ($missing | length) > 0 {
log-error $"Missing required dependencies: ($missing | str join ', ')"
log-info "Install with: cargo install typedialog nickel"
error make {msg: "Missing dependencies"}
}
}
# Step 1: Run TypeDialog form
def run-form []: nothing -> nothing {
log-info $"Running TypeDialog form: ($FORM_PATH)"
if not ($FORM_PATH | path exists) {
log-error $"Form file not found: ($FORM_PATH)"
error make {msg: "Form file not found"}
}
mkdir $GENERATED_DIR
try {
^typedialog $FORM_PATH --output-format json --output $"($GENERATED_DIR)/user-input.json"
log-success $"User input captured: ($GENERATED_DIR)/user-input.json"
} catch {|err|
log-error "TypeDialog form execution failed"
error make {msg: $"Form failed: ($err)"}
}
}
# Step 2: Convert JSON to Nickel
def json-to-nickel []: nothing -> nothing {
log-info "Converting JSON to Nickel configuration"
let json_file = $"($GENERATED_DIR)/user-input.json"
let nickel_file = $"($GENERATED_DIR)/config.ncl"
if not ($json_file | path exists) {
log-error $"JSON input file not found: ($json_file)"
error make {msg: "JSON input not found"}
}
let timestamp = (date now | format date "%Y-%m-%dT%H:%M:%SZ")
let user_json = (open $json_file)
# Generate Nickel configuration with embedded JSON
let nickel_content = $"# Generated Nickel configuration from TypeDialog form
# Generated at: ($timestamp)
let schemas = import \"../schemas/config.ncl\" in
let defaults = import \"../defaults/config.ncl\" in
let validators = import \"../validators/config.ncl\" in
# User-provided values \(from TypeDialog form\)
let user_values = ($user_json | to json) in
# Merge user values with defaults and apply type contracts
let final_config = std.record.merge defaults user_values in
# Apply validators
let validated_config = validators.validate final_config in
validated_config | schemas.Config"
$nickel_content | save --force $nickel_file
log-success $"Nickel configuration generated: ($nickel_file)"
}
# Step 3: Validate Nickel configuration
def validate-nickel []: nothing -> nothing {
log-info "Validating Nickel configuration"
let nickel_file = $"($GENERATED_DIR)/config.ncl"
# Type checking
try {
^nickel typecheck $nickel_file
log-success "Nickel type checking passed"
} catch {|err|
log-error "Nickel type checking failed"
error make {msg: $"Type check failed: ($err)"}
}
# Test evaluation
try {
^nickel export $nickel_file | ignore
log-success "Nickel evaluation succeeded"
} catch {|err|
log-error "Nickel evaluation failed"
error make {msg: $"Evaluation failed: ($err)"}
}
}
# Step 4: Export to final formats
def export-config []: nothing -> nothing {
log-info "Exporting configuration to final formats"
let nickel_file = $"($GENERATED_DIR)/config.ncl"
# Export to JSON
try {
^nickel export --format json $nickel_file | save --force $"($GENERATED_DIR)/config.json"
log-success $"Exported: ($GENERATED_DIR)/config.json"
} catch {|err|
log-error $"JSON export failed: ($err)"
}
# Export to YAML
try {
^nickel export --format yaml $nickel_file | save --force $"($GENERATED_DIR)/config.yaml"
log-success $"Exported: ($GENERATED_DIR)/config.yaml"
} catch {|err|
log-error $"YAML export failed: ($err)"
}
# Export to TOML (convert from JSON)
if (command-exists "json2toml") {
try {
open $"($GENERATED_DIR)/config.json" | ^json2toml | save --force $"($GENERATED_DIR)/config.toml"
log-success $"Exported: ($GENERATED_DIR)/config.toml"
} catch {|err|
log-warn $"TOML export failed: ($err)"
}
} else {
log-warn "json2toml not found, skipping TOML export"
}
}
# Main execution
def main []: nothing -> nothing {
log-info "Starting {{ project_name }} configuration"
check-dependencies
run-form
json-to-nickel
validate-nickel
export-config
log-success "Configuration complete!"
log-info $"Generated files in: ($GENERATED_DIR)/"
log-info " - config.ncl (Nickel source)"
log-info " - config.json (JSON export)"
log-info " - config.yaml (YAML export)"
}
main

View File

@ -1,186 +0,0 @@
#!/usr/bin/env bash
# {{ project_name }} - TypeDialog Nickel Roundtrip Configuration Script
#
# This script integrates TypeDialog forms with Nickel configuration:
# 1. Runs TypeDialog form to collect user input (JSON output)
# 2. Converts JSON to Nickel configuration
# 3. Validates Nickel configuration against schemas
# 4. Merges with defaults and applies type contracts
#
# Usage: ./configure.sh
set -euo pipefail
# Configuration paths
FORM_PATH="{{ form_path }}"
CONFIG_PATH="{{ config_path }}"
TEMPLATE_PATH="{{ template_path }}"
GENERATED_DIR="generated"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check dependencies
check_dependencies() {
local missing_deps=()
if ! command -v typedialog &> /dev/null; then
missing_deps+=("typedialog")
fi
if ! command -v nickel &> /dev/null; then
missing_deps+=("nickel")
fi
if ! command -v jq &> /dev/null; then
missing_deps+=("jq")
fi
if [ ${#missing_deps[@]} -gt 0 ]; then
log_error "Missing required dependencies: ${missing_deps[*]}"
log_info "Install with: cargo install typedialog nickel && apt-get install jq"
exit 1
fi
}
# Step 1: Run TypeDialog form
run_form() {
log_info "Running TypeDialog form: ${FORM_PATH}"
if [ ! -f "${FORM_PATH}" ]; then
log_error "Form file not found: ${FORM_PATH}"
exit 1
fi
mkdir -p "${GENERATED_DIR}"
typedialog "${FORM_PATH}" \
--output-format json \
--output "${GENERATED_DIR}/user-input.json" || {
log_error "TypeDialog form execution failed"
exit 1
}
log_success "User input captured: ${GENERATED_DIR}/user-input.json"
}
# Step 2: Convert JSON to Nickel
json_to_nickel() {
log_info "Converting JSON to Nickel configuration"
local json_file="${GENERATED_DIR}/user-input.json"
local nickel_file="${GENERATED_DIR}/config.ncl"
if [ ! -f "${json_file}" ]; then
log_error "JSON input file not found: ${json_file}"
exit 1
fi
# Use Nickel's import capability to embed JSON
cat > "${nickel_file}" <<EOF
# Generated Nickel configuration from TypeDialog form
# Generated at: $(date -u +"%Y-%m-%dT%H:%M:%SZ")
let schemas = import "../schemas/config.ncl" in
let defaults = import "../defaults/config.ncl" in
let validators = import "../validators/config.ncl" in
# User-provided values (from TypeDialog form)
let user_values = $(cat "${json_file}") in
# Merge user values with defaults and apply type contracts
let final_config = std.record.merge defaults user_values in
# Apply validators
let validated_config = validators.validate final_config in
validated_config | schemas.Config
EOF
log_success "Nickel configuration generated: ${nickel_file}"
}
# Step 3: Validate Nickel configuration
validate_nickel() {
log_info "Validating Nickel configuration"
local nickel_file="${GENERATED_DIR}/config.ncl"
if ! nickel typecheck "${nickel_file}"; then
log_error "Nickel type checking failed"
exit 1
fi
log_success "Nickel type checking passed"
# Test evaluation
if ! nickel export "${nickel_file}" > /dev/null; then
log_error "Nickel evaluation failed"
exit 1
fi
log_success "Nickel evaluation succeeded"
}
# Step 4: Export to final formats
export_config() {
log_info "Exporting configuration to final formats"
local nickel_file="${GENERATED_DIR}/config.ncl"
# Export to JSON
nickel export --format json "${nickel_file}" > "${GENERATED_DIR}/config.json"
log_success "Exported: ${GENERATED_DIR}/config.json"
# Export to YAML
nickel export --format yaml "${nickel_file}" > "${GENERATED_DIR}/config.yaml"
log_success "Exported: ${GENERATED_DIR}/config.yaml"
# Export to TOML (via JSON)
if command -v json2toml &> /dev/null; then
jq -r '.' "${GENERATED_DIR}/config.json" | json2toml > "${GENERATED_DIR}/config.toml"
log_success "Exported: ${GENERATED_DIR}/config.toml"
else
log_warn "json2toml not found, skipping TOML export"
fi
}
# Main execution
main() {
log_info "Starting {{ project_name }} configuration"
check_dependencies
run_form
json_to_nickel
validate_nickel
export_config
log_success "Configuration complete!"
log_info "Generated files in: ${GENERATED_DIR}/"
log_info " - config.ncl (Nickel source)"
log_info " - config.json (JSON export)"
log_info " - config.yaml (YAML export)"
}
main "$@"

View File

@ -1,40 +0,0 @@
#!/usr/bin/env nu
# Convert JSON to Nickel configuration with schema imports (Nushell)
# Usage: ./json-to-nickel.nu <input.json> [output.ncl]
def main [
input: path, # Input JSON file
output?: path # Output Nickel file (optional)
] {
let output_path = if ($output == null) {
$"($input | path parse | get stem).ncl"
} else {
$output
}
if not ($input | path exists) {
error make {msg: $"Input file not found: ($input)"}
}
let json_content = (open $input | to json)
let timestamp = (date now | format date "%Y-%m-%dT%H:%M:%SZ")
let nickel_content = $"# Generated Nickel configuration from JSON
# Source: ($input)
# Generated at: ($timestamp)
let schemas = import \"../schemas/config.ncl\" in
let defaults = import \"../defaults/config.ncl\" in
# User-provided values
let user_values = ($json_content) in
# Merge with defaults
let final_config = std.record.merge defaults user_values in
final_config | schemas.Config"
$nickel_content | save --force $output_path
print $"Converted: ($output_path)"
print $"Validate with: nickel typecheck ($output_path)"
}

View File

@ -1,45 +0,0 @@
#!/usr/bin/env bash
# Convert JSON to Nickel configuration with schema imports
# Usage: ./json-to-nickel.sh <input.json> [output.ncl]
set -euo pipefail
if [ $# -lt 1 ]; then
echo "Usage: $0 <input.json> [output.ncl]"
exit 1
fi
INPUT="$1"
OUTPUT="${2:-${INPUT%.json}.ncl}"
if [ ! -f "${INPUT}" ]; then
echo "Error: Input file not found: ${INPUT}"
exit 1
fi
if ! command -v jq &> /dev/null; then
echo "Error: jq command not found"
echo "Install with: apt-get install jq"
exit 1
fi
# Generate Nickel file with embedded JSON
cat > "${OUTPUT}" <<EOF
# Generated Nickel configuration from JSON
# Source: ${INPUT}
# Generated at: $(date -u +"%Y-%m-%dT%H:%M:%SZ")
let schemas = import "../schemas/config.ncl" in
let defaults = import "../defaults/config.ncl" in
# User-provided values
let user_values = $(cat "${INPUT}") in
# Merge with defaults
let final_config = std.record.merge defaults user_values in
final_config | schemas.Config
EOF
echo "Converted: ${OUTPUT}"
echo "Validate with: nickel typecheck ${OUTPUT}"

View File

@ -1,25 +0,0 @@
#!/usr/bin/env nu
# Convert Nickel configuration to JSON (Nushell)
# Usage: ./nickel-to-json.nu <input.ncl> [output.json]
def main [
input: path, # Input Nickel file
output?: path # Output JSON file (optional)
] {
let output_path = if ($output == null) {
$"($input | path parse | get stem).json"
} else {
$output
}
if not ($input | path exists) {
error make {msg: $"Input file not found: ($input)"}
}
if not ((which nickel | length) > 0) {
error make {msg: "nickel command not found. Install with: cargo install nickel"}
}
^nickel export --format json $input | save --force $output_path
print $"Exported: ($output_path)"
}

View File

@ -1,27 +0,0 @@
#!/usr/bin/env bash
# Convert Nickel configuration to JSON
# Usage: ./nickel-to-json.sh <input.ncl> [output.json]
set -euo pipefail
if [ $# -lt 1 ]; then
echo "Usage: $0 <input.ncl> [output.json]"
exit 1
fi
INPUT="$1"
OUTPUT="${2:-${INPUT%.ncl}.json}"
if [ ! -f "${INPUT}" ]; then
echo "Error: Input file not found: ${INPUT}"
exit 1
fi
if ! command -v nickel &> /dev/null; then
echo "Error: nickel command not found"
echo "Install with: cargo install nickel"
exit 1
fi
nickel export --format json "${INPUT}" > "${OUTPUT}"
echo "Exported: ${OUTPUT}"

View File

@ -1,25 +0,0 @@
#!/usr/bin/env nu
# Convert Nickel configuration to YAML (Nushell)
# Usage: ./nickel-to-yaml.nu <input.ncl> [output.yaml]
def main [
input: path, # Input Nickel file
output?: path # Output YAML file (optional)
] {
let output_path = if ($output == null) {
$"($input | path parse | get stem).yaml"
} else {
$output
}
if not ($input | path exists) {
error make {msg: $"Input file not found: ($input)"}
}
if not ((which nickel | length) > 0) {
error make {msg: "nickel command not found. Install with: cargo install nickel"}
}
^nickel export --format yaml $input | save --force $output_path
print $"Exported: ($output_path)"
}

View File

@ -1,27 +0,0 @@
#!/usr/bin/env bash
# Convert Nickel configuration to YAML
# Usage: ./nickel-to-yaml.sh <input.ncl> [output.yaml]
set -euo pipefail
if [ $# -lt 1 ]; then
echo "Usage: $0 <input.ncl> [output.yaml]"
exit 1
fi
INPUT="$1"
OUTPUT="${2:-${INPUT%.ncl}.yaml}"
if [ ! -f "${INPUT}" ]; then
echo "Error: Input file not found: ${INPUT}"
exit 1
fi
if ! command -v nickel &> /dev/null; then
echo "Error: nickel command not found"
echo "Install with: cargo install nickel"
exit 1
fi
nickel export --format yaml "${INPUT}" > "${OUTPUT}"
echo "Exported: ${OUTPUT}"

View File

@ -1,61 +0,0 @@
# Array validators for {{ feature_name }}
#
# Validators for repeating groups and array constraints.
let common = import "./common.ncl" in
let constraints = import "../constraints.toml" in
{
{% for array_field in array_fields %}
# Uniqueness validator for {{ array_field.name }}
Unique{{ array_field.name | capitalize }} = fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let values = std.array.map (fun item => item.{{ array_field.unique_key }}) arr in
let sorted = std.array.sort values in
let unique_count = std.array.fold_left
(fun acc => fun val =>
if acc.prev == val then
acc
else
{count = acc.count + 1, prev = val}
)
{count = 0, prev = null}
sorted
in
unique_count.count == std.array.length arr
) {
label = label,
message = "{{ array_field.name }} items must have unique {{ array_field.unique_key }} values",
},
# Length validator for {{ array_field.name }}
Valid{{ array_field.name | capitalize }}Length = fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let len = std.array.length arr in
{% if array_field.min_items %}len >= {{ array_field.min_items }} &&{% endif %}
{% if array_field.max_items %}len <= {{ array_field.max_items }} &&{% endif %}
true
) {
label = label,
message = "{{ array_field.name }} array length must be between {{ array_field.min_items | default(value="0") }} and {{ array_field.max_items | default(value="unlimited") }}",
},
# Composite validator for {{ array_field.name }}
Valid{{ array_field.name | capitalize }}Full = fun label =>
fun value =>
value
| Valid{{ array_field.name | capitalize }}Length label
{% if array_field.unique %}| Unique{{ array_field.name | capitalize }} label{% endif %}
,
{% endfor %}
# Master array validation
validate_arrays = fun config => {
{% for array_field in array_fields %}
{{ array_field.name }} = config.{{ array_field.name }} | Valid{{ array_field.name | capitalize }}Full "{{ feature_name }}.{{ array_field.name }}",
{% endfor %}
..config
},
}

View File

@ -1,108 +0,0 @@
# Common validators for {{ project_name }}
#
# Reusable validation predicates and type contracts.
# Import with: let validators = import "validators/common.ncl" in
{
# Port number validation (1-65535)
ValidPort = fun label =>
std.contract.from_predicate (fun port =>
std.is_number port &&
port >= 1 &&
port <= 65535
) {
label = label,
message = "must be a valid port number (1-65535)",
},
# Non-empty string validation
NonEmptyString = fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.length s > 0
) {
label = label,
message = "must be a non-empty string",
},
# Valid bind address format (IP:PORT)
ValidBindAddress = fun label =>
std.contract.from_predicate (fun addr =>
std.is_string addr &&
std.string.contains ":" addr &&
let parts = std.string.split ":" addr in
std.array.length parts == 2
) {
label = label,
message = "must be a valid bind address (IP:PORT format)",
},
# Valid URL format
ValidUrl = fun label =>
std.contract.from_predicate (fun url =>
std.is_string url &&
(std.string.is_match "^https?://" url)
) {
label = label,
message = "must be a valid HTTP/HTTPS URL",
},
# Positive number validation
PositiveNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n > 0
) {
label = label,
message = "must be a positive number",
},
# Non-negative number validation
NonNegativeNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n >= 0
) {
label = label,
message = "must be a non-negative number",
},
# Range validation
Range = fun min => fun max => fun label =>
std.contract.from_predicate (fun n =>
std.is_number n &&
n >= min &&
n <= max
) {
label = label,
message = "must be between %{std.to_string min} and %{std.to_string max}",
},
# String pattern matching (regex)
MatchesPattern = fun pattern => fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.is_match pattern s
) {
label = label,
message = "must match pattern: %{pattern}",
},
# Enum validation (one of allowed values)
OneOf = fun allowed => fun label =>
std.contract.from_predicate (fun value =>
std.array.any (fun v => v == value) allowed
) {
label = label,
message = "must be one of: %{std.serialize 'Json allowed}",
},
# Array length validation
ArrayLength = fun min => fun max => fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let len = std.array.length arr in
len >= min && len <= max
) {
label = label,
message = "array length must be between %{std.to_string min} and %{std.to_string max}",
},
}

View File

@ -1,51 +0,0 @@
# Custom validators for {{ feature_name }}
#
# Feature-specific validation logic.
let common = import "./common.ncl" in
{
{% for validator in validators %}
# {{ validator.description | default(value="Custom validator for " ~ validator.name) }}
{{ validator.name }} = {% if validator.validator_type == "Range" %}common.Range {{ validator.min | default(value="0") }} {{ validator.max | default(value="100") }} "{{ validator.name }}"
{% elif validator.validator_type == "Pattern" %}common.MatchesPattern "{{ validator.pattern | default(value=".*") }}" "{{ validator.name }}"
{% elif validator.validator_type == "ArrayUniqueness" %}fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let values = std.array.map (fun item => item.{{ validator.unique_field | default(value="id") }}) arr in
std.array.length values == std.array.length (std.array.sort values)
) {
label = label,
message = "array items must have unique {{ validator.unique_field | default(value="id") }} values",
}
{% elif validator.validator_type == "Composite" %}fun label =>
std.contract.from_predicate (fun value =>
# Composite validation logic
{% for field in validator.applies_to %}
{{ field }}_valid value &&
{% endfor %}
true
) {
label = label,
message = "composite validation failed for {{ validator.name }}",
}
{% else %}fun label =>
std.contract.from_predicate (fun value =>
# Custom predicate logic
true # TODO: Implement custom validation
) {
label = label,
message = "{{ validator.description | default(value="validation failed") }}",
}
{% endif %},
{% endfor %}
# Master validation function for {{ feature_name }}
validate = fun config =>
config
# Apply all validators here
{% for validator in validators %}
# | {{ validator.name }} "{{ feature_name }}.{{ validator.name }}"
{% endfor %}
,
}

View File

@ -42,10 +42,6 @@ fn test_project_spec_validation() {
infrastructure: Default::default(),
domain_features: vec![DomainFeature::new("basic".to_string())],
constraints: vec![],
iac_templates: Default::default(),
scripts: Default::default(),
docs: Default::default(),
locales: vec![],
};
let result = spec.validate();

View File

@ -1,30 +1,30 @@
[package]
authors.workspace = true
description = "TypeDialog TUI tool for interactive forms using ratatui"
edition.workspace = true
license.workspace = true
name = "typedialog-tui"
repository.workspace = true
version.workspace = true
authors.workspace = true
edition.workspace = true
repository.workspace = true
license.workspace = true
description = "TypeDialog TUI tool for interactive forms using ratatui"
[[bin]]
name = "typedialog-tui"
path = "src/main.rs"
[package.metadata.binstall]
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
[package.metadata.binstall]
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread"] }
toml = { workspace = true }
tracing-subscriber = { workspace = true }
typedialog-core = { path = "../typedialog-core", features = ["tui", "i18n", "encryption"] }
clap = { workspace = true }
anyhow = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread"] }
serde_json = { workspace = true }
toml = { workspace = true }
unic-langid = { workspace = true }
tracing-subscriber = { workspace = true }
[lints]
workspace = true

View File

@ -3,11 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::i18n::LocaleLoader;
use typedialog_core::backends::{BackendFactory, BackendType};
use typedialog_core::config::TypeDialogConfig;
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
use typedialog_core::nickel::NickelCli;
use typedialog_core::prelude::*;
use typedialog_core::{form_parser, helpers, Error, Result};
use unic_langid::LanguageIdentifier;
use super::helpers::{extract_nickel_defaults, flatten_json_object};

View File

@ -3,11 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use typedialog_core::form_parser;
use typedialog_core::backends::{BackendFactory, BackendType};
use typedialog_core::nickel::{
I18nExtractor, MetadataParser, NickelCli, TemplateEngine, TomlGenerator,
};
use typedialog_core::prelude::*;
use typedialog_core::{form_parser, Error, Result};
#[allow(clippy::too_many_arguments)]
pub fn nickel_to_form(

View File

@ -10,8 +10,8 @@ mod commands;
use clap::{Parser, Subcommand};
use std::path::PathBuf;
use typedialog_core::cli_common;
use typedialog_core::config::load_backend_config;
use typedialog_core::prelude::*;
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
use typedialog_core::{Error, Result};
#[derive(Parser)]
#[command(

View File

@ -1,30 +1,30 @@
[package]
authors.workspace = true
description = "TypeDialog Web server for interactive forms using axum"
edition.workspace = true
license.workspace = true
name = "typedialog-web"
repository.workspace = true
version.workspace = true
authors.workspace = true
edition.workspace = true
repository.workspace = true
license.workspace = true
description = "TypeDialog Web server for interactive forms using axum"
[[bin]]
name = "typedialog-web"
path = "src/main.rs"
[package.metadata.binstall]
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
[package.metadata.binstall]
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread"] }
toml = { workspace = true }
tracing-subscriber = { workspace = true }
typedialog-core = { path = "../typedialog-core", features = ["web", "i18n", "encryption"] }
clap = { workspace = true }
anyhow = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread"] }
serde_json = { workspace = true }
toml = { workspace = true }
unic-langid = { workspace = true }
tracing-subscriber = { workspace = true }
[lints]
workspace = true

View File

@ -8,12 +8,11 @@
use clap::{Parser, Subcommand};
use std::fs;
use std::path::{Path, PathBuf};
use typedialog_core::backends::{BackendFactory, BackendType};
use typedialog_core::cli_common;
use typedialog_core::config::load_backend_config;
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::i18n::LocaleLoader;
use typedialog_core::prelude::*;
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
use typedialog_core::{form_parser, helpers, Error, Result};
use unic_langid::LanguageIdentifier;
#[derive(Parser)]

View File

@ -1,30 +1,30 @@
[package]
authors.workspace = true
description = "TypeDialog CLI tool for interactive forms and prompts"
edition.workspace = true
license.workspace = true
name = "typedialog"
repository.workspace = true
version.workspace = true
authors.workspace = true
edition.workspace = true
repository.workspace = true
license.workspace = true
description = "TypeDialog CLI tool for interactive forms and prompts"
[[bin]]
name = "typedialog"
path = "src/main.rs"
[package.metadata.binstall]
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
[package.metadata.binstall]
pkg-url = "{ repo }/releases/download/v{ version }/typedialog-{ target }.tar.gz"
bin-dir = "bin/{ bin }"
pkg-fmt = "tgz"
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread"] }
toml = { workspace = true }
tracing-subscriber = { workspace = true }
typedialog-core = { path = "../typedialog-core", features = ["cli", "i18n", "encryption"] }
clap = { workspace = true }
anyhow = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread"] }
serde_json = { workspace = true }
toml = { workspace = true }
unic-langid = { workspace = true }
tracing-subscriber = { workspace = true }
[lints]
workspace = true

View File

@ -3,11 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::i18n::LocaleLoader;
use typedialog_core::backends::BackendFactory;
use typedialog_core::config::TypeDialogConfig;
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
use typedialog_core::nickel::{NickelCli, TemplateEngine};
use typedialog_core::prelude::*;
use typedialog_core::{form_parser, helpers, Error, Result};
use unic_langid::LanguageIdentifier;
use super::helpers::{extract_nickel_defaults, flatten_json_object, print_results};

View File

@ -2,9 +2,7 @@
use std::collections::HashMap;
use std::path::PathBuf;
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::prelude::*;
use typedialog_core::{form_parser, helpers, Error, Result};
/// Print results with encryption/redaction support
pub fn print_results(

Some files were not shown because too many files have changed in this diff Show More