chore: review, complete prov-gen, coupling review, prelude on crates
Some checks failed
CI / Lint (bash) (push) Has been cancelled
CI / Lint (markdown) (push) Has been cancelled
CI / Lint (nickel) (push) Has been cancelled
CI / Lint (nushell) (push) Has been cancelled
CI / Lint (rust) (push) Has been cancelled
CI / Code Coverage (push) Has been cancelled
CI / Test (macos-latest) (push) Has been cancelled
CI / Test (ubuntu-latest) (push) Has been cancelled
CI / Test (windows-latest) (push) Has been cancelled
CI / Build (macos-latest) (push) Has been cancelled
CI / Build (ubuntu-latest) (push) Has been cancelled
CI / Build (windows-latest) (push) Has been cancelled
CI / Benchmark (push) Has been cancelled
CI / Security Audit (push) Has been cancelled
CI / License Compliance (push) Has been cancelled

This commit is contained in:
Jesús Pérez 2025-12-30 05:20:35 +00:00
parent f084937c70
commit b6e4099ebc
Signed by: jesus
GPG Key ID: 9F243E355E0BC939
66 changed files with 6122 additions and 465 deletions

View File

@ -3,28 +3,81 @@
//! TypeAgent Core Library
//!
//! Type-safe AI agent execution with 3-layer pipeline:
//! - Layer 1: MDX → AST (markup parsing)
//! - Layer 2: AST → Nickel (transpilation + type checking)
//! - Layer 3: Nickel → Output (execution + validation)
//! Type-safe AI agent execution with a 3-layer pipeline architecture:
//!
//! ## Pipeline Layers
//!
//! ### Layer 0: Foundation
//! - **error** - Error types and result handling
//! - **utils** - Common utilities
//!
//! ### Layer 1: Parsing (MDX → AST)
//! - **parser** - Markup node parsing from MDX
//! - **formats** - Agent format detection
//!
//! ### Layer 2: Transpilation (AST → Nickel)
//! - **transpiler** - AST to Nickel code generation
//! - **nickel** - Nickel evaluation and type checking
//!
//! ### Layer 3: Execution (Nickel → Output)
//! - **executor** - Agent execution orchestration
//! - **cache** - Execution caching strategies
//! - **llm** - LLM provider abstraction
//! - **loader** - Agent definition loading
//!
//! ## Recommended Entry Points
//!
//! - [`AgentExecutor`] - Execute agents end-to-end
//! - [`LlmClient`] - Simplified LLM interactions
//! - [`AgentLoader`] - Load agent definitions from files
// ============================================================================
// LAYER 0: FOUNDATION
// ============================================================================
pub mod cache;
pub mod error;
pub mod executor;
pub mod formats;
pub mod llm;
mod loader;
pub mod nickel;
pub mod parser;
pub mod transpiler;
pub mod utils;
// Public API exports
pub use cache::{CacheManager, CacheStats, CacheStrategy};
// ============================================================================
// LAYER 1: PARSING (MDX → AST)
// ============================================================================
pub mod formats;
pub mod parser;
// ============================================================================
// LAYER 2: TRANSPILATION (AST → Nickel)
// ============================================================================
pub mod nickel;
pub mod transpiler;
// ============================================================================
// LAYER 3: EXECUTION (Nickel → Output)
// ============================================================================
pub mod cache;
pub mod executor;
pub mod llm;
mod loader;
// ============================================================================
// PUBLIC API EXPORTS (grouped by pipeline layer)
// ============================================================================
// Layer 0: Foundation
pub use error::{Error, Result};
pub use executor::{AgentExecutor, ExecutionResult};
// Layer 1: Parsing
pub use formats::{AgentFormat, FormatDetector};
pub use loader::AgentLoader;
pub use nickel::{AgentConfig, AgentDefinition, NickelEvaluator};
pub use parser::{AgentDirective, MarkupNode, MarkupParser};
// Layer 2: Transpilation
pub use nickel::{AgentConfig, AgentDefinition, NickelEvaluator};
pub use transpiler::NickelTranspiler;
// Layer 3: Execution (recommended entry points)
pub use cache::{CacheManager, CacheStats, CacheStrategy};
pub use executor::{AgentExecutor, ExecutionResult};
pub use llm::LlmClient;
pub use loader::AgentLoader;

View File

@ -1,4 +1,23 @@
//! LLM provider abstraction and implementations
//!
//! This module provides both low-level provider traits and a high-level client facade.
//!
//! # Recommended Usage
//!
//! For most use cases, use [`LlmClient`] which provides a simplified API:
//!
//! ```no_run
//! use typedialog_ag_core::llm::LlmClient;
//!
//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
//! let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
//! let response = client.ask("What is Rust?").await?;
//! println!("{}", response);
//! # Ok(())
//! # }
//! ```
//!
//! For advanced use cases (implementing custom providers), use [`LlmProvider`] trait.
pub mod claude;
pub mod gemini;
@ -16,6 +35,7 @@ pub use provider::{
};
use crate::error::{Error, Result};
use std::sync::Arc;
/// Create an LLM provider based on model name
pub fn create_provider(model: &str) -> Result<Box<dyn LlmProvider>> {
@ -68,3 +88,259 @@ fn is_ollama_model(model: &str) -> bool {
.iter()
.any(|prefix| model.starts_with(prefix))
}
// ============================================================================
// LLM CLIENT FACADE (Recommended High-Level API)
// ============================================================================
/// High-level LLM client facade providing a simplified API
///
/// `LlmClient` wraps the lower-level [`LlmProvider`] trait and provides
/// convenience methods for common use cases. This is the recommended way
/// to interact with LLMs for most applications.
///
/// # Examples
///
/// ## Simple question-answer
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
///
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let answer = client.ask("Explain Rust ownership").await?;
/// println!("{}", answer);
/// # Ok(())
/// # }
/// ```
///
/// ## With custom parameters
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
///
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("gpt-4")?
/// .with_temperature(0.7)
/// .with_max_tokens(2000);
///
/// let response = client.ask("Write a haiku about coding").await?;
/// println!("{}", response);
/// # Ok(())
/// # }
/// ```
///
/// ## Streaming responses
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
/// use futures::StreamExt;
///
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let mut stream = client.ask_stream("Tell me a story").await?;
///
/// while let Some(chunk) = stream.next().await {
/// match chunk? {
/// typedialog_ag_core::llm::StreamChunk::Content(text) => print!("{}", text),
/// typedialog_ag_core::llm::StreamChunk::Done(_) => println!("\n[Done]"),
/// typedialog_ag_core::llm::StreamChunk::Error(e) => eprintln!("Error: {}", e),
/// }
/// }
/// # Ok(())
/// # }
/// ```
pub struct LlmClient {
provider: Arc<dyn LlmProvider>,
model: String,
temperature: Option<f64>,
max_tokens: Option<usize>,
system: Option<String>,
}
impl LlmClient {
/// Create a new client from a specific provider
///
/// For most use cases, prefer [`from_model`](Self::from_model) which auto-detects the provider.
pub fn new(provider: Arc<dyn LlmProvider>, model: impl Into<String>) -> Self {
Self {
provider,
model: model.into(),
temperature: None,
max_tokens: None,
system: None,
}
}
/// Create a new client by auto-detecting the provider from the model name
///
/// # Examples
///
/// ```no_run
/// use typedialog_ag_core::llm::LlmClient;
///
/// # fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let claude = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let gpt = LlmClient::from_model("gpt-4")?;
/// let gemini = LlmClient::from_model("gemini-pro")?;
/// let ollama = LlmClient::from_model("llama2")?;
/// # Ok(())
/// # }
/// ```
pub fn from_model(model: impl Into<String>) -> Result<Self> {
let model_str = model.into();
let provider = create_provider(&model_str)?;
Ok(Self::new(Arc::from(provider), model_str))
}
/// Set the temperature parameter (0.0 to 1.0)
///
/// Higher values make output more random, lower values more deterministic.
pub fn with_temperature(mut self, temperature: f64) -> Self {
self.temperature = Some(temperature);
self
}
/// Set the maximum number of tokens to generate
pub fn with_max_tokens(mut self, max_tokens: usize) -> Self {
self.max_tokens = Some(max_tokens);
self
}
/// Set a system message to guide the model's behavior
pub fn with_system(mut self, system: impl Into<String>) -> Self {
self.system = Some(system.into());
self
}
/// Ask a simple question and get a text response
///
/// This is the simplest way to interact with an LLM.
///
/// # Examples
///
/// ```no_run
/// # use typedialog_ag_core::llm::LlmClient;
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
/// let response = client.ask("What is 2+2?").await?;
/// assert!(response.contains("4"));
/// # Ok(())
/// # }
/// ```
pub async fn ask(&self, prompt: impl Into<String>) -> Result<String> {
let request = LlmRequest {
model: self.model.clone(),
messages: vec![LlmMessage {
role: MessageRole::User,
content: prompt.into(),
}],
max_tokens: self.max_tokens,
temperature: self.temperature,
system: self.system.clone(),
};
let response = self.provider.complete(request).await?;
Ok(response.content)
}
/// Ask a question and receive a streaming response
///
/// Use this for long responses or when you want to display progress.
pub async fn ask_stream(&self, prompt: impl Into<String>) -> Result<LlmStream> {
let request = LlmRequest {
model: self.model.clone(),
messages: vec![LlmMessage {
role: MessageRole::User,
content: prompt.into(),
}],
max_tokens: self.max_tokens,
temperature: self.temperature,
system: self.system.clone(),
};
self.provider.stream(request).await
}
/// Execute a full completion request with complete control
///
/// Use this for multi-turn conversations or advanced scenarios.
///
/// # Examples
///
/// ```no_run
/// # use typedialog_ag_core::llm::{LlmClient, LlmRequest, LlmMessage, MessageRole};
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
/// let client = LlmClient::from_model("claude-3-5-sonnet-20241022")?;
///
/// let request = LlmRequest {
/// model: "claude-3-5-sonnet-20241022".to_string(),
/// messages: vec![
/// LlmMessage {
/// role: MessageRole::User,
/// content: "Hello!".to_string(),
/// },
/// LlmMessage {
/// role: MessageRole::Assistant,
/// content: "Hi! How can I help?".to_string(),
/// },
/// LlmMessage {
/// role: MessageRole::User,
/// content: "Tell me about Rust".to_string(),
/// },
/// ],
/// max_tokens: Some(1000),
/// temperature: Some(0.7),
/// system: None,
/// };
///
/// let response = client.complete(request).await?;
/// println!("{}", response.content);
/// # Ok(())
/// # }
/// ```
pub async fn complete(&self, request: LlmRequest) -> Result<LlmResponse> {
self.provider.complete(request).await
}
/// Stream a full completion request
pub async fn stream_complete(&self, request: LlmRequest) -> Result<LlmStream> {
self.provider.stream(request).await
}
/// Get the underlying provider name
pub fn provider_name(&self) -> &str {
self.provider.name()
}
/// Get the model name
pub fn model(&self) -> &str {
&self.model
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_client_builder_pattern() {
// This just tests the builder pattern compiles
let model = "claude-3-5-sonnet-20241022";
let _would_create = || -> Result<LlmClient> {
Ok(LlmClient::from_model(model)?
.with_temperature(0.7)
.with_max_tokens(2000)
.with_system("You are a helpful assistant"))
};
}
#[test]
fn test_is_ollama_model() {
assert!(is_ollama_model("llama2"));
assert!(is_ollama_model("mistral"));
assert!(is_ollama_model("phi"));
assert!(!is_ollama_model("claude-3"));
assert!(!is_ollama_model("gpt-4"));
}
}

View File

@ -0,0 +1,59 @@
//! Advanced API for extending typedialog
//!
//! This module provides types and traits for advanced use cases like:
//! - Implementing custom backends
//! - Creating custom autocompletion strategies
//! - Building form parsers and renderers
//! - Template context manipulation
//!
//! # Examples
//!
//! ```no_run
//! use typedialog_core::advanced::{FormBackend, RenderContext};
//! use typedialog_core::error::Result;
//!
//! // Implement a custom backend
//! struct MyCustomBackend;
//!
//! #[async_trait::async_trait]
//! impl FormBackend for MyCustomBackend {
//! async fn render_text(
//! &mut self,
//! prompt: &str,
//! default: Option<&str>,
//! placeholder: Option<&str>,
//! _context: &RenderContext,
//! ) -> Result<String> {
//! // Custom implementation
//! Ok(format!("{}: custom", prompt))
//! }
//!
//! // ... implement other required methods
//! # async fn render_confirm(&mut self, _: &str, _: Option<bool>, _: &RenderContext) -> Result<bool> { Ok(true) }
//! # async fn render_select(&mut self, _: &str, _: &[String], _: Option<usize>, _: Option<usize>, _: bool, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_multiselect(&mut self, _: &str, _: &[String], _: &[String], _: Option<usize>, _: bool, _: &RenderContext) -> Result<Vec<String>> { Ok(vec![]) }
//! # async fn render_password(&mut self, _: &str, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_custom(&mut self, _: &str, _: &str, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_editor(&mut self, _: &str, _: Option<&str>, _: Option<&str>, _: &RenderContext) -> Result<String> { Ok("".into()) }
//! # async fn render_date(&mut self, _: &str, _: Option<chrono::NaiveDate>, _: Option<chrono::NaiveDate>, _: Option<chrono::NaiveDate>, _: chrono::Weekday, _: &RenderContext) -> Result<chrono::NaiveDate> { Ok(chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap()) }
//! }
//! ```
pub use crate::error::{Error, ErrorWrapper, Result};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::backends::{FormBackend, RenderContext};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::form_parser::{parse_toml, DisplayItem};
#[cfg(feature = "templates")]
pub use crate::templates::TemplateContextBuilder;
pub use crate::helpers::EncryptionContext;
#[cfg(feature = "i18n")]
pub use crate::i18n::LocaleLoader;

View File

@ -482,6 +482,7 @@ impl From<chrono::ParseError> for ErrorWrapper {
}
}
#[cfg(feature = "cli")]
impl From<inquire::InquireError> for ErrorWrapper {
fn from(err: inquire::InquireError) -> Self {
match err {
@ -510,6 +511,66 @@ pub type Result<T> = std::result::Result<T, ErrorWrapper>;
/// Error type alias for convenient use
pub type Error = ErrorWrapper;
// ============================================================================
// ERROR CONTEXT TRAITS (for cleaner error boundaries)
// ============================================================================
/// Trait for converting module-specific errors to ErrorWrapper at boundaries
pub trait IntoTypedialogError {
fn into_typedialog_error(self) -> ErrorWrapper;
}
/// Trait for operations that can produce typedialog errors with context
pub trait TypedialogErrorContext<T> {
/// Add context message to any error
fn context(self, msg: impl Into<String>) -> Result<T>;
/// Add field-specific context to validation errors
fn with_field(self, field: impl Into<String>) -> Result<T>;
/// Add I/O operation context
fn with_io_context(self, operation: impl Into<String>, path: Option<PathBuf>) -> Result<T>;
}
impl<T, E> TypedialogErrorContext<T> for std::result::Result<T, E>
where
E: std::error::Error + Send + Sync + 'static,
{
fn context(self, msg: impl Into<String>) -> Result<T> {
self.map_err(|e| {
ErrorWrapper::FormParse(FormParseError {
kind: FormParseErrorKind::InvalidToml { line: 0, column: 0 },
message: format!("{}: {}", msg.into(), e),
source: Some(Box::new(e)),
})
})
}
fn with_field(self, field: impl Into<String>) -> Result<T> {
self.map_err(|e| {
ErrorWrapper::Validation(ValidationError {
kind: ValidationErrorKind::TypeMismatch {
expected: "valid value".into(),
got: e.to_string(),
},
field: field.into(),
value: None,
message: e.to_string(),
})
})
}
fn with_io_context(self, operation: impl Into<String>, path: Option<PathBuf>) -> Result<T> {
self.map_err(|e| {
ErrorWrapper::Io(IoError {
operation: operation.into(),
path,
source: io::Error::other(e.to_string()),
})
})
}
}
// ============================================================================
// HELPER CONSTRUCTORS (for migration compatibility)
// ============================================================================

View File

@ -3,6 +3,7 @@
//! Provides conversion functions between JSON values and other formats
//! for serialization and display purposes.
use crate::error::TypedialogErrorContext;
use serde_json::{json, Value};
use std::collections::HashMap;
@ -22,15 +23,11 @@ pub fn format_results(
) -> crate::error::Result<String> {
match format {
"json" => {
let json_obj = serde_json::to_value(results).map_err(|e| {
crate::ErrorWrapper::new(format!("JSON serialization error: {}", e))
})?;
let json_obj = serde_json::to_value(results).context("JSON serialization")?;
Ok(serde_json::to_string_pretty(&json_obj)?)
}
"yaml" => {
let yaml_string = serde_yaml::to_string(results).map_err(|e| {
crate::ErrorWrapper::new(format!("YAML serialization error: {}", e))
})?;
let yaml_string = serde_yaml::to_string(results).context("YAML serialization")?;
Ok(yaml_string)
}
"text" => {
@ -40,8 +37,7 @@ pub fn format_results(
}
Ok(output)
}
"toml" => toml::to_string_pretty(results)
.map_err(|e| crate::ErrorWrapper::new(format!("TOML serialization error: {}", e))),
"toml" => toml::to_string_pretty(results).context("TOML serialization"),
_ => Err(crate::ErrorWrapper::new(format!(
"Unknown output format: {}",
format
@ -77,8 +73,7 @@ pub fn to_json_value(results: &HashMap<String, Value>) -> Value {
/// Convert results to JSON string
pub fn to_json_string(results: &HashMap<String, Value>) -> crate::error::Result<String> {
serde_json::to_string(&to_json_value(results))
.map_err(|e| crate::ErrorWrapper::new(format!("JSON error: {}", e)))
serde_json::to_string(&to_json_value(results)).context("JSON conversion")
}
/// Encryption context controlling redaction/encryption behavior
@ -136,6 +131,11 @@ impl EncryptionContext {
/// 2. CLI/context config (default_backend, backend_config)
/// 3. Global config (typedialog_config defaults)
/// 4. Hard default ("age")
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
pub fn resolve_encryption_config(
field: &crate::form_parser::FieldDefinition,
context: &EncryptionContext,
@ -187,7 +187,11 @@ pub fn resolve_encryption_config(
/// # Returns
///
/// Transformed results with redacted or encrypted sensitive values
#[cfg(feature = "encryption")]
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
pub fn transform_results(
results: &HashMap<String, Value>,
fields: &[crate::form_parser::FieldDefinition],
@ -223,7 +227,11 @@ pub fn transform_results(
/// Fallback version when encryption feature is not enabled
/// Still supports redaction by checking field sensitivity
#[cfg(not(feature = "encryption"))]
#[cfg(all(
not(feature = "encryption"),
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
pub fn transform_results(
results: &HashMap<String, Value>,
fields: &[crate::form_parser::FieldDefinition],
@ -264,7 +272,11 @@ pub fn transform_results(
}
/// Transform a single sensitive value based on context
#[cfg(feature = "encryption")]
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
fn transform_sensitive_value(
value: &Value,
field: &crate::form_parser::FieldDefinition,
@ -307,7 +319,11 @@ fn transform_sensitive_value(
/// # Returns
///
/// Formatted string with sensitive values redacted/encrypted
#[cfg(feature = "encryption")]
#[cfg(all(
feature = "encryption",
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
pub fn format_results_secure(
results: &HashMap<String, Value>,
fields: &[crate::form_parser::FieldDefinition],
@ -320,7 +336,11 @@ pub fn format_results_secure(
}
/// No-op when encryption feature disabled
#[cfg(not(feature = "encryption"))]
#[cfg(all(
not(feature = "encryption"),
any(feature = "cli", feature = "tui", feature = "web"),
feature = "i18n"
))]
pub fn format_results_secure(
results: &HashMap<String, Value>,
_fields: &[crate::form_parser::FieldDefinition],

View File

@ -18,30 +18,59 @@
//!
//! # Quick Start as Library
//!
//! ## Recommended: Using the Prelude
//!
//! ```no_run
//! use typedialog_core::prompts;
//! use typedialog_core::prelude::*;
//!
//! // Simple text prompt
//! let name = prompts::text("Enter your name", None, None)?;
//! println!("Hello, {}!", name);
//!
//! # Ok::<(), Box<dyn std::error::Error>>(())
//! # async fn example() -> Result<()> {
//! // Create a backend and execute forms
//! let mut backend = BackendFactory::create(BackendType::Cli)?;
//! let form = FormDefinition {
//! title: Some("User Registration".to_string()),
//! description: None,
//! locale: None,
//! fields: vec![],
//! display_items: vec![],
//! };
//! # Ok(())
//! # }
//! ```
//!
//! # Quick Start with Backends
//! ## Simple Prompts with prompt_api
//!
//! ```ignore
//! use typedialog_core::backends::{BackendFactory, BackendType};
//! use typedialog_core::form_parser;
//! ```no_run
//! use typedialog_core::prompt_api::{self, Result};
//!
//! async fn example() -> Result<(), Box<dyn std::error::Error>> {
//! let mut backend = BackendFactory::create(BackendType::Cli)?;
//! let form = form_parser::parse_toml("[[fields]]\nname = \"username\"\ntype = \"text\"\n")?;
//! let results = form_parser::execute_with_backend(form, &mut backend).await?;
//! fn example() -> Result<()> {
//! let name = prompt_api::text("Enter your name", None, None)?;
//! let confirmed = prompt_api::confirm("Continue?", Some(true))?;
//! println!("Hello, {}!", name);
//! Ok(())
//! }
//! ```
//!
//! ## Advanced: Custom Backends
//!
//! ```ignore
//! use typedialog_core::advanced::{FormBackend, RenderContext};
//!
//! // Implement your custom backend
//! struct MyBackend;
//!
//! #[async_trait::async_trait]
//! impl FormBackend for MyBackend {
//! // ... implement required methods
//! }
//! ```
//!
//! # Module Organization
//!
//! - [`prelude`] - Common types for form execution (recommended starting point)
//! - [`prompt_api`] - Direct prompt functions without forms
//! - [`advanced`] - Advanced APIs for extending backends
//! - Individual modules: [`error`], [`backends`], [`form_parser`], [`helpers`], etc.
//!
//! # Quick Start as CLI
//!
//! ```bash
@ -59,14 +88,56 @@
//! typedialog form-to-nickel form.toml results.json -o output.ncl --validate
//! ```
pub mod autocompletion;
pub mod backends;
// ============================================================================
// CORE MODULES (always compiled)
// ============================================================================
pub mod error;
pub mod form_parser;
pub mod helpers;
pub mod nickel;
/// Common CLI patterns and help text
pub mod cli_common;
// ============================================================================
// FOCUSED API MODULES (recommended entry points)
// ============================================================================
/// Prelude module for common form execution use cases
///
/// Import with `use typedialog_core::prelude::*;` for quick access.
pub mod prelude;
/// Prompt-only API for direct interactive prompts without forms
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod prompt_api;
/// Advanced API for extending backends and custom implementations
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod advanced;
// ============================================================================
// BACKEND MODULES (feature-gated)
// ============================================================================
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod backends;
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod form_parser;
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod prompts;
// ============================================================================
// OPTIONAL FEATURE MODULES
// ============================================================================
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod autocompletion;
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub mod nickel;
#[cfg(feature = "ai_backend")]
pub mod ai;
@ -79,29 +150,71 @@ pub mod i18n;
#[cfg(feature = "templates")]
pub mod templates;
/// Common CLI patterns and help text
pub mod cli_common;
#[cfg(feature = "encryption")]
pub mod encryption_bridge;
#[cfg(feature = "encryption")]
pub use encrypt;
// Re-export main types for convenient access
pub use autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter};
pub use backends::{BackendFactory, BackendType, FormBackend, RenderContext};
// ============================================================================
// RE-EXPORTS (backward compatibility - prefer prelude/prompt_api/advanced)
// ============================================================================
// Core exports (always available)
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `error::Result` directly"
)]
pub use error::{Error, ErrorWrapper, Result};
pub use form_parser::{DisplayItem, FieldDefinition, FieldType, FormDefinition};
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `helpers::format_results` directly"
)]
pub use helpers::{format_results, to_json_string, to_json_value};
// Backend exports (require at least one backend)
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::advanced::*` or specific module imports"
)]
pub use autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` for BackendFactory/BackendType, or `advanced::*` for FormBackend/RenderContext"
)]
pub use backends::{BackendFactory, BackendType, FormBackend, RenderContext};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `form_parser::*` directly"
)]
pub use form_parser::{DisplayItem, FieldDefinition, FieldType, FormDefinition};
// Optional feature exports
#[cfg(feature = "i18n")]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::TypeDialogConfig` or `config::TypeDialogConfig` directly"
)]
pub use config::TypeDialogConfig;
#[cfg(feature = "i18n")]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::*` or `i18n::*` directly"
)]
pub use i18n::{I18nBundle, LocaleResolver};
#[cfg(feature = "templates")]
#[deprecated(
since = "0.2.0",
note = "Use `typedialog_core::prelude::TemplateEngine` or `advanced::TemplateContextBuilder`"
)]
pub use templates::{TemplateContextBuilder, TemplateEngine};
/// Library version
@ -119,6 +232,7 @@ mod tests {
}
#[test]
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
fn test_backend_factory_cli() {
let result = BackendFactory::create(BackendType::Cli);
#[cfg(feature = "cli")]

View File

@ -0,0 +1,41 @@
//! Prelude module for common typedialog use cases
//!
//! This module re-exports the most commonly used types for form execution.
//! Import with `use typedialog_core::prelude::*;` for quick access to core functionality.
//!
//! # Examples
//!
//! ```no_run
//! use typedialog_core::prelude::*;
//!
//! async fn example() -> Result<()> {
//! let mut backend = BackendFactory::create(BackendType::Cli)?;
//! let form = FormDefinition {
//! title: Some("Example Form".to_string()),
//! description: None,
//! locale: None,
//! fields: vec![],
//! display_items: vec![],
//! };
//! Ok(())
//! }
//! ```
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::backends::{BackendFactory, BackendType, FormBackend};
pub use crate::error::{Error, ErrorWrapper, Result};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::form_parser::{FieldDefinition, FieldType, FormDefinition};
pub use crate::helpers::{format_results, to_json_string, to_json_value};
#[cfg(feature = "i18n")]
pub use crate::config::TypeDialogConfig;
#[cfg(feature = "i18n")]
pub use crate::i18n::{I18nBundle, LocaleResolver};
#[cfg(feature = "templates")]
pub use crate::templates::TemplateEngine;

View File

@ -0,0 +1,26 @@
//! Prompt-only API for direct interactive prompts
//!
//! This module provides a simplified API for using prompts directly without forms.
//! Use this when you need quick interactive prompts in your CLI applications.
//!
//! # Examples
//!
//! ```no_run
//! use typedialog_core::prompt_api::{self, Result};
//!
//! fn example() -> Result<()> {
//! let name = prompt_api::text("Enter your name", None, None)?;
//! let confirmed = prompt_api::confirm("Continue?", Some(true))?;
//! println!("Hello, {}!", name);
//! Ok(())
//! }
//! ```
pub use crate::error::{Error, ErrorWrapper, Result};
#[cfg(any(feature = "cli", feature = "tui", feature = "web"))]
pub use crate::prompts;
// Re-export commonly used prompt functions at module level
#[cfg(feature = "cli")]
pub use crate::prompts::{confirm, custom, date, editor, multi_select, password, select, text};

View File

@ -1,9 +1,7 @@
//! Mode C: Interactive AI-powered wizard for project configuration.
use crate::error::Result;
use crate::models::{
ConfigField, DomainFeature, FieldType, InfrastructureSpec, ProjectSpec, ProjectType,
};
use crate::models::{ConfigField, DomainFeature, FieldType, ProjectSpec, ProjectType};
/// Interactive wizard using typedialog-ai for conversational generation.
pub struct InteractiveWizard;
@ -17,13 +15,8 @@ impl InteractiveWizard {
let name = project_name.unwrap_or_else(|| "my-project".to_string());
// Simple defaults for wizard mode
let spec = ProjectSpec {
name,
project_type: ProjectType::WebService,
infrastructure: InfrastructureSpec::default(),
domain_features: vec![DomainFeature::new("basic_config".to_string())],
constraints: Vec::new(),
};
let mut spec = ProjectSpec::new(name, ProjectType::WebService);
spec.domain_features = vec![DomainFeature::new("basic_config".to_string())];
Ok(spec)
}

View File

@ -1,8 +1,10 @@
//! Generate command: orchestrates the provisioning generation pipeline.
use crate::config::Config;
use crate::error::Result;
use crate::input::{CargoIntrospector, ConfigLoader, NickelSchemaLoader};
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::PathBuf;
use tracing::{debug, info};
@ -63,39 +65,63 @@ impl GenerateCommand {
info!("Generating provisioning structure to {}", output.display());
// Execute the 7-layer generation pipeline in order
// Layer 1: Constraints (required by validators and fragments)
// Initialize template loader for generators that need it
let config = Config::default();
let template_loader = TemplateLoader::new(&config)?;
// Execute the complete generation pipeline in dependency order
use crate::generator::{
ConstraintGenerator, DefaultsGenerator, FragmentGenerator, SchemaGenerator,
ScriptGenerator, ValidatorGenerator,
ConstraintGenerator, DefaultsGenerator, DocumentationGenerator, FormComposerGenerator,
FragmentGenerator, IacTemplateGenerator, LocalesGenerator, SchemaGenerator,
ScriptGenerator, ValidatorGenerator, ValuesGenerator,
};
// Layer 1: Constraints (foundation - needed by validators and fragments)
ConstraintGenerator::generate(&spec, &output)?;
debug!("✓ Constraints layer");
debug!("✓ Constraints");
// Layer 2: Schemas (domain types)
// Layer 2: Schemas (domain types - needed by defaults and validators)
SchemaGenerator::generate(&spec, &output)?;
debug!("✓ Schemas layer");
debug!("✓ Schemas");
// Layer 3: Validators (validation logic)
ValidatorGenerator::generate(&spec, &output)?;
debug!("✓ Validators layer");
debug!("✓ Validators");
// Layer 4: Defaults (sensible defaults)
// Layer 4: Defaults (sensible defaults - needed by values)
DefaultsGenerator::generate(&spec, &output)?;
debug!("✓ Defaults layer");
debug!("✓ Defaults");
// Layer 5: Fragments (form UI components)
// Layer 5: Fragments (form UI - needed by form composer)
FragmentGenerator::generate(&spec, &output)?;
debug!("✓ Fragments layer");
debug!("✓ Fragments");
// Layer 6: Scripts (orchestration)
ScriptGenerator::generate(&spec, &output)?;
debug!("✓ Scripts layer");
ScriptGenerator::generate(&spec, &output, &template_loader)?;
debug!("✓ Scripts");
// TODO: Layer 7: JSON output generation
// Layer 7: Values (runtime separation)
ValuesGenerator::generate(&spec, &output)?;
debug!("✓ Values");
// Layer 8: Locales (i18n support)
LocalesGenerator::generate(&spec, &output)?;
debug!("✓ Locales");
// Layer 9: Form Composer (master form configuration)
FormComposerGenerator::generate(&spec, &output)?;
debug!("✓ Form Composer");
// Layer 10: IaC Templates (infrastructure)
IacTemplateGenerator::generate(&spec, &output, &template_loader)?;
debug!("✓ IaC Templates");
// Layer 11: Documentation (comprehensive docs)
DocumentationGenerator::generate(&spec, &output, &template_loader)?;
debug!("✓ Documentation");
info!("Provisioning generation completed successfully!");
info!("Generated {} generators across 11 layers", 11);
info!("Generated structure at: {}", output.display());
Ok(())

View File

@ -8,28 +8,37 @@ use std::path::Path;
pub struct ConstraintGenerator;
impl ConstraintGenerator {
/// Generate constraints.toml file.
/// Generate constraints.toml file and README.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating constraints for project: {}", spec.name);
let mut constraints_content = String::new();
// Add header
// Add header with interpolation documentation
constraints_content.push_str(&format!(
"# Constraint definitions for {}\n# Single source of truth for validation rules\n\n",
"# Constraint definitions for {}\n\
# Single source of truth for validation rules\n\
#\n\
# Constraints can be referenced in fragments using interpolation:\n\
# max_items = \"${{constraint.tracker.udp.max_items}}\"\n\
#\n\
# This enables centralized constraint management.\n\n",
spec.name
));
// Generate constraint sections for each feature
// Generate hierarchical constraint sections for each feature
for feature in &spec.domain_features {
constraints_content.push_str(&format!("[feature.{}]\n", feature.name));
constraints_content.push_str("# Field constraints\n\n");
// Add feature comment
if let Some(desc) = &feature.description {
constraints_content.push_str(&format!("# {}\n", desc));
}
// Generate hierarchical sections for fields with constraints
for field in &feature.fields {
if field.min.is_some() || field.max.is_some() {
constraints_content
.push_str(&format!("[feature.{}.{}]\n", feature.name, field.name));
// Use hierarchical path: [tracker.udp] instead of [feature.tracker.udp]
constraints_content.push_str(&format!("[{}.{}]\n", feature.name, field.name));
if let Some(min) = field.min {
constraints_content.push_str(&format!("min = {}\n", min));
@ -43,12 +52,13 @@ impl ConstraintGenerator {
}
}
// Add global constraints from the spec
// Add global array constraints with hierarchical paths
if !spec.constraints.is_empty() {
constraints_content.push_str("\n# Global constraints\n\n");
constraints_content.push_str("# Array constraints\n\n");
for constraint in &spec.constraints {
constraints_content.push_str(&format!("[constraint.\"{}\"]\n", constraint.path));
// Hierarchical path: [tracker.udp_trackers] instead of [constraint."tracker.udp_trackers"]
constraints_content.push_str(&format!("[{}]\n", constraint.path));
if let Some(min) = constraint.min_items {
constraints_content.push_str(&format!("min_items = {}\n", min));
@ -68,7 +78,16 @@ impl ConstraintGenerator {
}
}
// Write constraints file
// Create constraints directory
let constraints_dir = output_dir.join("constraints");
std::fs::create_dir_all(&constraints_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create constraints directory: {}",
e
))
})?;
// Write constraints.toml file
let constraints_file = output_dir.join("constraints.toml");
std::fs::write(&constraints_file, constraints_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
@ -78,6 +97,103 @@ impl ConstraintGenerator {
})?;
tracing::info!("Generated constraints file: {}", constraints_file.display());
// Generate README.md explaining the constraint system
let readme_content = Self::generate_readme(spec);
let readme_file = constraints_dir.join("README.md");
std::fs::write(&readme_file, readme_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write constraints README: {}",
e
))
})?;
tracing::info!("Generated constraints README: {}", readme_file.display());
Ok(())
}
/// Generate README.md documenting the constraint system.
fn generate_readme(spec: &ProjectSpec) -> String {
let mut content = String::new();
content.push_str(&format!("# Constraint System - {}\n\n", spec.name));
content.push_str(
"This directory contains documentation for the constraint system used in this project.\n\n\
## Overview\n\n\
Constraints define validation rules that apply to configuration values. They are centralized \
in `constraints.toml` and can be referenced throughout the provisioning system.\n\n\
## Usage\n\n\
### In Form Fragments\n\n\
Reference constraints using interpolation syntax:\n\n\
```toml\n\
[[elements]]\n\
type = \"repeatinggroup\"\n\
max_items = \"${constraint.tracker.udp_trackers.max_items}\"\n\
unique = true\n\
```\n\n\
### In Validators\n\n\
Validators read constraints from `constraints.toml` to enforce rules:\n\n\
```nickel\n\
let constraints = import \"../constraints.toml\" in\n\
std.contract.from_predicate (fun arr =>\n\
std.array.length arr <= constraints.tracker.udp_trackers.max_items\n\
)\n\
```\n\n\
## Constraint Types\n\n\
### Field Constraints\n\n\
- `min` - Minimum value for numbers\n\
- `max` - Maximum value for numbers\n\n\
### Array Constraints\n\n\
- `min_items` - Minimum array length\n\
- `max_items` - Maximum array length\n\
- `unique` - Items must be unique\n\
- `unique_key` - Field name to check for uniqueness\n\n\
## Example\n\n\
```toml\n\
[tracker.udp_trackers]\n\
min_items = 1\n\
max_items = 4\n\
unique = true\n\
unique_key = \"bind_address\"\n\
```\n\n\
This ensures:\n\
- At least 1 UDP tracker is configured\n\
- No more than 4 UDP trackers\n\
- Each tracker has a unique bind_address\n\n\
## Hierarchical Paths\n\n\
Constraints use hierarchical TOML paths for clarity:\n\n\
- `[tracker.udp]` - Hierarchical\n\
- `[feature.tracker.udp]` - Old flat style\n\n\
This matches the Nickel schema structure and makes interpolation cleaner.\n\n"
);
content.push_str("## Constraints in This Project\n\n");
if !spec.constraints.is_empty() {
for constraint in &spec.constraints {
content.push_str(&format!("### `{}`\n\n", constraint.path));
if let Some(min) = constraint.min_items {
content.push_str(&format!("- Min items: {}\n", min));
}
if let Some(max) = constraint.max_items {
content.push_str(&format!("- Max items: {}\n", max));
}
if constraint.unique {
content.push_str("- Uniqueness: Required\n");
if let Some(key) = &constraint.unique_key {
content.push_str(&format!("- Unique key: `{}`\n", key));
}
}
content.push('\n');
}
} else {
content.push_str("*No global constraints defined.*\n\n");
}
content
}
}

View File

@ -1,4 +1,4 @@
//! Defaults generator: produces default configuration values in Nickel.
//! Defaults generator: produces default configuration values in Nickel with type contracts.
use crate::error::Result;
use crate::models::{FieldType, ProjectSpec};
@ -27,28 +27,55 @@ impl DefaultsGenerator {
let mut defaults_content = String::new();
defaults_content.push_str(&format!(
"# Default configuration for {} feature\n# Generated for project: {}\n\n",
"# Default configuration for {} feature\n# Generated for project: {}\n",
feature.name, spec.name
));
defaults_content.push_str(&format!("let {} = {{\n", feature.name));
if let Some(desc) = &feature.description {
defaults_content.push_str(&format!("# {}\n", desc));
}
defaults_content.push('\n');
// Import schema if using type contracts
if feature.use_type_contracts {
defaults_content.push_str(&format!(
"let schemas = import \"../schemas/{}.ncl\" in\n\n",
feature.name
));
}
// Define defaults record
if feature.use_type_contracts {
let type_name = Self::capitalize_first(&feature.name);
defaults_content.push_str(&format!(
"# Default values with type contract\n{{\n {} | schemas.{} = {{\n",
feature.name, type_name
));
} else {
defaults_content.push_str(&format!("{{\n {} = {{\n", feature.name));
}
// Generate field defaults
for field in &feature.fields {
defaults_content.push_str(&format!(" # {}\n", field.prompt));
defaults_content.push_str(&format!(" # {}\n", field.prompt));
if let Some(default) = &field.default {
defaults_content.push_str(&format!(" {} = {},\n", field.name, default));
// Use provided default (same format regardless of type contracts)
defaults_content.push_str(&format!(" {} = {},\n", field.name, default));
} else {
// Generate sensible defaults based on field type
let default_val = Self::generate_default_value(field);
let default_val =
Self::generate_default_value(field, feature.use_type_contracts);
defaults_content.push_str(&format!(
" {} = {}, # No default provided\n",
" {} = {}, # Auto-generated default\n",
field.name, default_val
));
}
}
defaults_content.push_str("}\n\n");
// Same closing format regardless of type contracts
defaults_content.push_str(" },\n}\n");
// Write defaults file
let defaults_file = defaults_dir.join(format!("{}.ncl", feature.name));
@ -62,22 +89,150 @@ impl DefaultsGenerator {
tracing::debug!("Generated defaults for feature: {}", feature.name);
}
// Generate main defaults.ncl that imports all features
Self::generate_main_defaults(spec, &defaults_dir)?;
// Generate README
Self::generate_defaults_readme(spec, &defaults_dir)?;
tracing::info!("Successfully generated defaults");
Ok(())
}
/// Generate main defaults.ncl that imports all feature defaults.
fn generate_main_defaults(spec: &ProjectSpec, defaults_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Main defaults configuration for {}\n\
# Imports all feature defaults\n\n",
spec.name
));
// Import all feature defaults
for feature in &spec.domain_features {
content.push_str(&format!(
"let {} = import \"./{}.ncl\" in\n",
feature.name, feature.name
));
}
content.push_str("\n# Merge all defaults\nstd.record.merge_all [\n");
for feature in &spec.domain_features {
content.push_str(&format!(" {},\n", feature.name));
}
content.push_str("]\n");
let config_file = defaults_dir.join("config.ncl");
std::fs::write(&config_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write main defaults: {}",
e
))
})?;
Ok(())
}
/// Generate README.md for defaults directory.
fn generate_defaults_readme(spec: &ProjectSpec, defaults_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Default Configuration - {}\n\n", spec.name));
content.push_str(
"This directory contains default configuration values in Nickel format.\n\n\
## Overview\n\n\
Defaults provide sensible starting values for all configuration fields. \
Users can override these by merging their own values.\n\n\
## Files\n\n",
);
for feature in &spec.domain_features {
content.push_str(&format!("### `{}.ncl`\n\n", feature.name));
if let Some(desc) = &feature.description {
content.push_str(&format!("{}\n\n", desc));
}
if feature.use_type_contracts {
content.push_str("*Uses type contracts for validation.*\n\n");
}
}
content.push_str(
"## Usage\n\n\
### Merging with User Values\n\n\
```nickel\n\
let defaults = import \"./defaults/config.ncl\" in\n\
let user_values = {\n\
server.port = 9090,\n\
} in\n\
std.record.merge defaults user_values\n\
```\n\n\
### Type Contract Application\n\n\
When features use type contracts, defaults automatically enforce schemas:\n\n\
```nickel\n\
{\n\
tracker | schemas.Tracker = {\n\
port = 8080, # Type-checked\n\
},\n\
}\n\
```\n\n",
);
let readme_file = defaults_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write defaults README: {}",
e
))
})?;
Ok(())
}
/// Generate a sensible default value for a field type.
fn generate_default_value(field: &crate::models::ConfigField) -> String {
fn generate_default_value(
field: &crate::models::ConfigField,
use_type_contract: bool,
) -> String {
match field.field_type {
FieldType::Text => "\"\"".to_string(),
FieldType::Number => "0".to_string(),
FieldType::Number => {
if let Some(min) = field.min {
min.to_string()
} else {
"0".to_string()
}
}
FieldType::Password => "\"\"".to_string(),
FieldType::Confirm => "false".to_string(),
FieldType::Select => "\"\"".to_string(),
FieldType::Select => {
if let Some(first_option) = field.options.first() {
format!("\"{}\"", first_option)
} else {
"\"\"".to_string()
}
}
FieldType::MultiSelect => "[]".to_string(),
FieldType::Editor => "\"\"".to_string(),
FieldType::Date => "\"\"".to_string(),
FieldType::RepeatingGroup => "[]".to_string(),
FieldType::RepeatingGroup => {
if use_type_contract {
// Generate array with one default item and type contract
"[\n # Add default items here\n ]".to_string()
} else {
"[]".to_string()
}
}
}
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -0,0 +1,107 @@
//! Documentation generator: creates comprehensive project documentation.
use crate::error::Result;
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::Path;
use tera::Context;
/// Generates project documentation using templates.
pub struct DocumentationGenerator;
impl DocumentationGenerator {
/// Generate all project documentation.
pub fn generate(
spec: &ProjectSpec,
output_dir: impl AsRef<Path>,
template_loader: &TemplateLoader,
) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating documentation for project: {}", spec.name);
// Ensure docs directory exists
let docs_dir = output_dir.join("docs");
std::fs::create_dir_all(&docs_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create docs directory: {}",
e
))
})?;
// Prepare template context
let mut context = Context::new();
context.insert("project_name", &spec.name);
context.insert("project_type", &format!("{:?}", spec.project_type));
context.insert("features", &spec.domain_features);
context.insert("infrastructure", &spec.infrastructure);
// Generate main README
Self::generate_readme(spec, output_dir, template_loader, &context)?;
// Generate quickstart guide
Self::generate_quickstart(spec, &docs_dir, template_loader, &context)?;
// Generate nickel-roundtrip workflow documentation
Self::generate_nickel_roundtrip_guide(spec, &docs_dir, template_loader, &context)?;
tracing::info!("Successfully generated documentation");
Ok(())
}
/// Generate main project README.md.
fn generate_readme(
_spec: &ProjectSpec,
output_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let readme = template_loader.render("docs/readme.md.tera", context)?;
let readme_file = output_dir.join("README.md");
std::fs::write(&readme_file, readme).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write README.md: {}", e))
})?;
tracing::debug!("Generated README.md");
Ok(())
}
/// Generate quickstart guide.
fn generate_quickstart(
_spec: &ProjectSpec,
docs_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let quickstart = template_loader.render("docs/quickstart.md.tera", context)?;
let quickstart_file = docs_dir.join("quickstart.md");
std::fs::write(&quickstart_file, quickstart).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write quickstart.md: {}",
e
))
})?;
tracing::debug!("Generated quickstart.md");
Ok(())
}
/// Generate nickel-roundtrip workflow documentation.
fn generate_nickel_roundtrip_guide(
_spec: &ProjectSpec,
docs_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let roundtrip = template_loader.render("docs/nickel-roundtrip.md.tera", context)?;
let roundtrip_file = docs_dir.join("nickel-roundtrip.md");
std::fs::write(&roundtrip_file, roundtrip).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nickel-roundtrip.md: {}",
e
))
})?;
tracing::debug!("Generated nickel-roundtrip.md");
Ok(())
}
}

View File

@ -0,0 +1,125 @@
//! Form Composer generator: creates master form configuration from fragments.
use crate::error::Result;
use crate::models::ProjectSpec;
use std::path::Path;
/// Generates master form configuration that includes all fragments.
pub struct FormComposerGenerator;
impl FormComposerGenerator {
/// Generate master config-form.toml file.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!(
"Generating master form configuration for project: {}",
spec.name
);
let mut form_content = String::new();
// Generate form metadata
form_content.push_str(&Self::generate_form_metadata(spec)?);
// Generate includes for all feature fragments
form_content.push_str(&Self::generate_includes(spec)?);
// Write master config-form.toml
let form_file = output_dir.join("config-form.toml");
std::fs::write(&form_file, form_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write config-form.toml: {}",
e
))
})?;
tracing::info!("Successfully generated master form configuration");
Ok(())
}
/// Generate form metadata section.
fn generate_form_metadata(spec: &ProjectSpec) -> Result<String> {
let mut metadata = String::new();
metadata.push_str("# Master Form Configuration\n");
metadata.push_str(&format!("# Auto-generated for project: {}\n", spec.name));
metadata.push_str(
"# This form composes all feature fragments into a complete configuration workflow\n\n",
);
metadata.push_str("[form]\n");
metadata.push_str(&format!("name = \"{}-config\"\n", spec.name));
metadata.push_str(&format!(
"title = \"{} Configuration\"\n",
Self::capitalize_first(&spec.name)
));
metadata.push_str(&format!(
"description = \"Complete configuration wizard for {}\"\n",
spec.name
));
metadata.push_str("backend = \"cli\" # Use CLI backend by default (supports TUI, Web)\n");
metadata.push_str("multi_step = true # Enable multi-step form with sections\n");
metadata.push('\n');
Ok(metadata)
}
/// Generate includes section for all feature fragments.
fn generate_includes(spec: &ProjectSpec) -> Result<String> {
let mut includes = String::new();
includes.push_str("# Feature Fragment Includes\n");
includes.push_str(
"# Each feature fragment is conditionally included based on configuration\n\n",
);
for feature in &spec.domain_features {
includes.push_str(&Self::generate_feature_include(spec, feature)?);
includes.push('\n');
}
Ok(includes)
}
/// Generate include entry for a single feature fragment.
fn generate_feature_include(
_spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
) -> Result<String> {
let mut include = String::new();
include.push_str("[[includes]]\n");
include.push_str(&format!(
"path = \"fragments/{}-section.toml\"\n",
feature.name
));
// Add description/label
if let Some(desc) = &feature.description {
include.push_str(&format!("label = \"{}\"\n", desc));
} else {
include.push_str(&format!(
"label = \"{} Configuration\"\n",
Self::capitalize_first(&feature.name)
));
}
// Add conditional logic if feature has optional flag
// For now, we'll include all features by default
// In the future, this could be based on a "required" field in DomainFeature
if !feature.fields.is_empty() {
include.push_str("required = true\n");
}
Ok(include)
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -24,88 +24,322 @@ impl FragmentGenerator {
// Generate fragments for each feature
for feature in &spec.domain_features {
let mut fragment_content = String::new();
fragment_content.push_str(&format!(
"# Form fragment for {} feature\n# Auto-generated for project: {}\n\n",
feature.name, spec.name
));
fragment_content.push_str(&format!("[section.{}]\n", feature.name));
if let Some(desc) = &feature.description {
fragment_content.push_str(&format!("description = \"{}\"\n", desc));
}
fragment_content.push('\n');
// Generate field definitions for this feature
for field in &feature.fields {
fragment_content.push_str(&format!("[[section.{}.fields]]\n", feature.name));
fragment_content.push_str(&format!("name = \"{}\"\n", field.name));
fragment_content.push_str(&format!("prompt = \"{}\"\n", field.prompt));
fragment_content.push_str(&format!(
"type = \"{}\"\n",
Self::field_type_to_form_type(&field.field_type)
));
if let Some(help) = &field.help {
fragment_content.push_str(&format!("help = \"{}\"\n", help));
}
if let Some(placeholder) = &field.placeholder {
fragment_content.push_str(&format!("placeholder = \"{}\"\n", placeholder));
}
if !field.required {
fragment_content.push_str("required = false\n");
}
if field.sensitive {
fragment_content.push_str("sensitive = true\n");
if let Some(backend) = &field.encryption_backend {
fragment_content
.push_str(&format!("encryption_backend = \"{}\"\n", backend));
}
}
if !field.options.is_empty() {
fragment_content.push_str("options = [\n");
for option in &field.options {
fragment_content.push_str(&format!(" \"{}\",\n", option));
}
fragment_content.push_str("]\n");
}
if field.min.is_some() || field.max.is_some() {
if let Some(min) = field.min {
fragment_content.push_str(&format!("min = {}\n", min));
}
if let Some(max) = field.max {
fragment_content.push_str(&format!("max = {}\n", max));
}
}
fragment_content.push('\n');
}
// Write fragment file
let fragment_file = fragments_dir.join(format!("{}-section.toml", feature.name));
std::fs::write(&fragment_file, fragment_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write fragment file: {}",
e
))
})?;
tracing::debug!("Generated fragment for feature: {}", feature.name);
Self::generate_feature_fragment(spec, feature, &fragments_dir)?;
}
// Generate README
Self::generate_fragments_readme(spec, &fragments_dir)?;
tracing::info!("Successfully generated form fragments");
Ok(())
}
/// Generate a fragment file for a single feature.
fn generate_feature_fragment(
spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
fragments_dir: &Path,
) -> Result<()> {
let mut fragment_content = String::new();
fragment_content.push_str(&format!(
"# Form fragment for {} feature\n# Auto-generated for project: {}\n\n",
feature.name, spec.name
));
fragment_content.push_str(&format!("[section.{}]\n", feature.name));
if let Some(desc) = &feature.description {
fragment_content.push_str(&format!("description = \"{}\"\n", desc));
}
fragment_content.push('\n');
// Generate field definitions for this feature
for field in &feature.fields {
fragment_content.push_str(&Self::generate_field_definition(
feature,
field,
spec,
fragments_dir,
)?);
}
// Write fragment file
let fragment_file = fragments_dir.join(format!("{}-section.toml", feature.name));
std::fs::write(&fragment_file, fragment_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write fragment file: {}",
e
))
})?;
tracing::debug!("Generated fragment for feature: {}", feature.name);
Ok(())
}
/// Generate TOML definition for a single field.
fn generate_field_definition(
feature: &crate::models::DomainFeature,
field: &crate::models::ConfigField,
spec: &ProjectSpec,
fragments_dir: &Path,
) -> Result<String> {
let mut field_def = String::new();
field_def.push_str(&format!("[[section.{}.fields]]\n", feature.name));
field_def.push_str(&format!("name = \"{}\"\n", field.name));
field_def.push_str(&format!("prompt = \"{}\"\n", field.prompt));
field_def.push_str(&format!(
"type = \"{}\"\n",
Self::field_type_to_form_type(&field.field_type)
));
// Add nickel_path for hierarchical Nickel output
let nickel_path = format!("{}.{}", feature.name, field.name);
field_def.push_str(&format!("nickel_path = \"{}\"\n", nickel_path));
if let Some(help) = &field.help {
field_def.push_str(&format!("help = \"{}\"\n", help));
}
if let Some(placeholder) = &field.placeholder {
field_def.push_str(&format!("placeholder = \"{}\"\n", placeholder));
}
if !field.required {
field_def.push_str("required = false\n");
}
if field.sensitive {
field_def.push_str("sensitive = true\n");
if let Some(backend) = &field.encryption_backend {
field_def.push_str(&format!("encryption_backend = \"{}\"\n", backend));
}
}
if !field.options.is_empty() {
field_def.push_str("options = [\n");
for option in &field.options {
field_def.push_str(&format!(" \"{}\",\n", option));
}
field_def.push_str("]\n");
}
// Use constraint interpolation for min/max from constraints.toml
if field.min.is_some() || field.max.is_some() {
if field.min.is_some() {
// Use interpolation if constraint might exist, otherwise hardcode
field_def.push_str(&format!(
"min = \"${{constraint.{}.{}.min}}\"\n",
feature.name, field.name
));
}
if field.max.is_some() {
field_def.push_str(&format!(
"max = \"${{constraint.{}.{}.max}}\"\n",
feature.name, field.name
));
}
}
// Handle repeating groups - add constraint interpolation for array bounds
if matches!(field.field_type, FieldType::RepeatingGroup) {
// Check if this field has array constraints in spec
let constraint_path = format!("{}.{}", feature.name, field.name);
if let Some(constraint) = spec.constraints.iter().find(|c| c.path == constraint_path) {
if constraint.min_items.is_some() {
field_def.push_str(&format!(
"min_items = \"${{constraint.{}.min_items}}\"\n",
constraint_path
));
}
if constraint.max_items.is_some() {
field_def.push_str(&format!(
"max_items = \"${{constraint.{}.max_items}}\"\n",
constraint_path
));
}
if constraint.unique {
field_def.push_str("unique = true\n");
if let Some(unique_key) = &constraint.unique_key {
field_def.push_str(&format!("unique_key = \"{}\"\n", unique_key));
}
}
}
// Reference item fragment if it exists
field_def.push_str(&format!(
"item_fragment = \"fragments/{}-item.toml\"\n",
field.name
));
// Generate item fragment file for this repeating group
Self::generate_item_fragment(feature, field, fragments_dir)?;
}
field_def.push('\n');
Ok(field_def)
}
/// Generate item fragment for repeating group fields.
fn generate_item_fragment(
feature: &crate::models::DomainFeature,
parent_field: &crate::models::ConfigField,
fragments_dir: &Path,
) -> Result<()> {
// For now, generate a minimal item fragment
// In a real implementation, this would extract nested fields from the repeating group
let mut item_content = String::new();
item_content.push_str(&format!(
"# Item fragment for {} repeating group\n# Auto-generated for feature: {}\n\n",
parent_field.name, feature.name
));
item_content.push_str(&format!("[item.{}]\n", parent_field.name));
item_content.push_str(&format!(
"description = \"Item definition for {}\"\n\n",
parent_field.prompt
));
// If the parent field has nested structure, we would generate fields here
// For now, we indicate this is a placeholder for future enhancement
item_content.push_str("# Fields for this item would be defined here\n");
item_content.push_str("# Example:\n");
item_content.push_str("# [[item.fields]]\n");
item_content.push_str("# name = \"field_name\"\n");
item_content.push_str("# type = \"text\"\n");
let item_file = fragments_dir.join(format!("{}-item.toml", parent_field.name));
std::fs::write(&item_file, item_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write item fragment: {}",
e
))
})?;
Ok(())
}
/// Generate README.md for fragments directory.
fn generate_fragments_readme(spec: &ProjectSpec, fragments_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Form Fragments - {}\n\n", spec.name));
content.push_str(
"This directory contains TypeDialog form fragments for configuration collection.\n\n\
## Overview\n\n\
Fragments are modular TOML files that define interactive forms for each domain feature. \
They use TypeDialog's declarative syntax to create CLI, TUI, or Web forms.\n\n\
## Files\n\n",
);
for feature in &spec.domain_features {
content.push_str(&format!("### `{}-section.toml`\n\n", feature.name));
if let Some(desc) = &feature.description {
content.push_str(&format!("{}\\n\n", desc));
}
content.push_str(&format!("**Fields:** {}\\n\n", feature.fields.len()));
// List repeating groups if any
let repeating_fields: Vec<_> = feature
.fields
.iter()
.filter(|f| matches!(f.field_type, FieldType::RepeatingGroup))
.collect();
if !repeating_fields.is_empty() {
content.push_str("**Repeating Groups:**\\n");
for field in repeating_fields {
content.push_str(&format!("- `{}` - {}\\n", field.name, field.prompt));
}
content.push('\n');
}
}
content.push_str(
"## Features\n\n\
### Hierarchical Nickel Paths\n\n\
Fields include `nickel_path` attribute for structured output:\n\n\
```toml\n\
[[section.tracker.fields]]\n\
name = \"bind_address\"\n\
nickel_path = \"tracker.bind_address\"\n\
```\n\n\
This generates Nickel output like:\n\n\
```nickel\n\
{\n\
tracker.bind_address = \"0.0.0.0:6969\",\n\
}\n\
```\n\n\
### Constraint Interpolation\n\n\
Min/max values reference centralized constraints:\n\n\
```toml\n\
min = \"${constraint.tracker.port.min}\"\n\
max = \"${constraint.tracker.port.max}\"\n\
```\n\n\
This pulls values from `constraints.toml` at runtime, ensuring single source of truth.\n\n\
### Repeating Groups\n\n\
Array fields with item fragments:\n\n\
```toml\n\
type = \"repeating_group\"\n\
min_items = \"${constraint.tracker.udp_trackers.min_items}\"\n\
max_items = \"${constraint.tracker.udp_trackers.max_items}\"\n\
unique = true\n\
unique_key = \"bind_address\"\n\
item_fragment = \"fragments/udp_trackers-item.toml\"\n\
```\n\n\
Item fragments define the structure of each array element.\n\n\
## Usage\n\n\
### CLI Backend\n\n\
```bash\n\
typedialog --form fragments/tracker-section.toml --output config.json\n\
```\n\n\
### TUI Backend\n\n\
```bash\n\
typedialog-tui --form fragments/tracker-section.toml\n\
```\n\n\
### Web Backend\n\n\
```bash\n\
typedialog-web --fragments-dir fragments/ --port 8080\n\
```\n\n\
### Composing Multiple Fragments\n\n\
Use a master form that includes fragments:\n\n\
```toml\n\
# config-form.toml\n\
[form]\n\
name = \"complete-config\"\n\
\n\
[[includes]]\n\
path = \"fragments/tracker-section.toml\"\n\
\n\
[[includes]]\n\
path = \"fragments/database-section.toml\"\n\
condition = \"${config.enable_database}\"\n\
```\n\n\
## Integration with Nickel\n\n\
Fragment output feeds into Nickel roundtrip workflow:\n\n\
1. **TypeDialog execution** - User fills form JSON output\n\
2. **json-to-nickel** - Convert JSON to Nickel syntax\n\
3. **Validation** - Apply schemas and validators\n\
4. **Export** - Generate final config files\n\n\
See `docs/nickel-roundtrip.md` for complete workflow.\n\n",
);
let readme_file = fragments_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write fragments README: {}",
e
))
})?;
Ok(())
}
/// Map ProjectSpec field types to TypeDialog form field types.
fn field_type_to_form_type(field_type: &FieldType) -> &'static str {
match field_type {

View File

@ -0,0 +1,433 @@
//! IaC Template generator: renders Terraform, Ansible, and Docker Compose templates.
use crate::error::Result;
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::Path;
use tera::Context;
/// Generates Infrastructure as Code templates from domain features.
pub struct IacTemplateGenerator;
impl IacTemplateGenerator {
/// Generate IaC templates for all configured providers.
pub fn generate(
spec: &ProjectSpec,
output_dir: impl AsRef<Path>,
template_loader: &TemplateLoader,
) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating IaC templates for project: {}", spec.name);
// Ensure iac directory exists
let iac_dir = output_dir.join("iac");
std::fs::create_dir_all(&iac_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create iac directory: {}",
e
))
})?;
// Prepare template context
let mut context = Context::new();
context.insert("project_name", &spec.name);
context.insert("features", &spec.domain_features);
context.insert("infrastructure", &spec.infrastructure);
// Generate Terraform/OpenTofu templates
if spec.iac_templates.terraform_enabled {
Self::generate_terraform_templates(spec, &iac_dir, template_loader, &context)?;
}
// Generate Ansible templates
if spec.iac_templates.ansible_enabled {
Self::generate_ansible_templates(spec, &iac_dir, template_loader, &context)?;
}
// Generate Docker Compose templates
if spec.iac_templates.docker_compose_enabled {
Self::generate_docker_compose_templates(spec, &iac_dir, template_loader, &context)?;
}
// Generate README
Self::generate_iac_readme(spec, &iac_dir)?;
tracing::info!("Successfully generated IaC templates");
Ok(())
}
/// Generate Terraform/OpenTofu templates.
fn generate_terraform_templates(
spec: &ProjectSpec,
iac_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let tofu_dir = iac_dir.join("tofu");
std::fs::create_dir_all(&tofu_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create tofu directory: {}",
e
))
})?;
// Generate common Terraform configuration
let common_tf = template_loader.render("iac/tofu/common/main.tf.tera", context)?;
let common_dir = tofu_dir.join("common");
std::fs::create_dir_all(&common_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create tofu/common directory: {}",
e
))
})?;
std::fs::write(common_dir.join("main.tf"), common_tf).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write common/main.tf: {}",
e
))
})?;
// Generate provider-specific configurations
let providers = vec!["hetzner", "lxd"];
for provider in providers {
let provider_dir = tofu_dir.join(provider);
std::fs::create_dir_all(&provider_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create tofu/{} directory: {}",
provider, e
))
})?;
// Render main.tf for this provider
let template_path = format!("iac/tofu/{}/main.tf.tera", provider);
let provider_tf = template_loader.render(&template_path, context)?;
std::fs::write(provider_dir.join("main.tf"), provider_tf).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}/main.tf: {}",
provider, e
))
})?;
// Generate variables.tf
Self::generate_terraform_variables(&provider_dir, spec)?;
// Generate outputs.tf
Self::generate_terraform_outputs(&provider_dir, spec)?;
}
tracing::debug!("Generated Terraform templates");
Ok(())
}
/// Generate Terraform variables.tf file.
fn generate_terraform_variables(provider_dir: &Path, spec: &ProjectSpec) -> Result<()> {
let mut variables = String::new();
variables.push_str("# Terraform variables\n\n");
// Generate variables for each domain feature
for feature in &spec.domain_features {
variables.push_str(&format!("# {} configuration\n", feature.name));
for field in &feature.fields {
variables.push_str(&format!(
"variable \"{}_{} \" {{\n description = \"{}\"\n type = string\n",
feature.name, field.name, field.prompt
));
if let Some(default) = &field.default {
variables.push_str(&format!(" default = {}\n", default));
}
variables.push_str("}\n\n");
}
}
std::fs::write(provider_dir.join("variables.tf"), variables).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write variables.tf: {}",
e
))
})?;
Ok(())
}
/// Generate Terraform outputs.tf file.
fn generate_terraform_outputs(provider_dir: &Path, spec: &ProjectSpec) -> Result<()> {
let mut outputs = String::new();
outputs.push_str("# Terraform outputs\n\n");
outputs.push_str(&format!(
"output \"{}_instance_ip\" {{\n description = \"IP address of the {} instance\"\n value = \"placeholder\"\n}}\n\n",
spec.name, spec.name
));
outputs.push_str(&format!(
"output \"{}_connection_string\" {{\n description = \"Connection string for {}\"\n value = \"placeholder\"\n sensitive = true\n}}\n",
spec.name, spec.name
));
std::fs::write(provider_dir.join("outputs.tf"), outputs).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write outputs.tf: {}", e))
})?;
Ok(())
}
/// Generate Ansible templates.
fn generate_ansible_templates(
spec: &ProjectSpec,
iac_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let ansible_dir = iac_dir.join("ansible");
std::fs::create_dir_all(&ansible_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create ansible directory: {}",
e
))
})?;
// Generate playbook
let playbook = template_loader.render("iac/ansible/playbook.yml.tera", context)?;
std::fs::write(ansible_dir.join("playbook.yml"), playbook).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write playbook.yml: {}",
e
))
})?;
// Generate inventory
let inventory = template_loader.render("iac/ansible/inventory.ini.tera", context)?;
std::fs::write(ansible_dir.join("inventory.ini"), inventory).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write inventory.ini: {}",
e
))
})?;
// Generate roles directory with basic role structure
let roles_dir = ansible_dir.join("roles").join(&spec.name);
std::fs::create_dir_all(roles_dir.join("tasks")).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create roles/{}/tasks directory: {}",
spec.name, e
))
})?;
// Generate tasks/main.yml
let tasks_main = format!(
"---\n# Tasks for {}\n\n- name: Deploy {} configuration\n template:\n src: config.j2\n dest: /etc/{}/config.ncl\n",
spec.name, spec.name, spec.name
);
std::fs::write(roles_dir.join("tasks/main.yml"), tasks_main).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write tasks/main.yml: {}",
e
))
})?;
tracing::debug!("Generated Ansible templates");
Ok(())
}
/// Generate Docker Compose templates.
fn generate_docker_compose_templates(
spec: &ProjectSpec,
iac_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let docker_dir = iac_dir.join("docker");
std::fs::create_dir_all(&docker_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create docker directory: {}",
e
))
})?;
// Generate docker-compose.yml
let compose = template_loader.render("iac/docker-compose/services.yml.tera", context)?;
std::fs::write(docker_dir.join("docker-compose.yml"), compose).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write docker-compose.yml: {}",
e
))
})?;
// Generate monitoring stack if enabled
if !spec.infrastructure.monitoring.is_empty() {
let monitoring =
template_loader.render("iac/docker-compose/monitoring.yml.tera", context)?;
std::fs::write(docker_dir.join("docker-compose.monitoring.yml"), monitoring).map_err(
|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write docker-compose.monitoring.yml: {}",
e
))
},
)?;
}
// Generate Dockerfile
Self::generate_dockerfile(&docker_dir, spec)?;
tracing::debug!("Generated Docker Compose templates");
Ok(())
}
/// Generate Dockerfile for the application.
fn generate_dockerfile(docker_dir: &Path, spec: &ProjectSpec) -> Result<()> {
let dockerfile = format!(
"# Dockerfile for {}\n\
FROM rust:1.75 as builder\n\
\n\
WORKDIR /app\n\
COPY . .\n\
RUN cargo build --release\n\
\n\
FROM debian:bookworm-slim\n\
\n\
# Install Nickel\n\
RUN apt-get update && apt-get install -y curl \\\n\
&& curl -L https://github.com/tweag/nickel/releases/download/1.3.0/nickel-1.3.0-x86_64-unknown-linux-gnu.tar.gz | tar xz -C /usr/local/bin \\\n\
&& rm -rf /var/lib/apt/lists/*\n\
\n\
COPY --from=builder /app/target/release/{} /usr/local/bin/{}\n\
COPY config.ncl /etc/{}/config.ncl\n\
\n\
EXPOSE 8080\n\
\n\
CMD [\"{}\", \"--config\", \"/etc/{}/config.ncl\"]\n",
spec.name, spec.name, spec.name, spec.name, spec.name, spec.name
);
std::fs::write(docker_dir.join("Dockerfile"), dockerfile).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write Dockerfile: {}", e))
})?;
Ok(())
}
/// Generate README.md for iac directory.
fn generate_iac_readme(spec: &ProjectSpec, iac_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Infrastructure as Code - {}\n\n", spec.name));
content.push_str(
"This directory contains Infrastructure as Code templates for provisioning and deployment.\n\n\
## Overview\n\n\
IaC templates use Terraform/OpenTofu, Ansible, and Docker Compose to deploy the application.\n\n\
## Directory Structure\n\n",
);
if spec.iac_templates.terraform_enabled {
content.push_str(
"### `tofu/` - Terraform/OpenTofu\n\n\
Terraform configurations for infrastructure provisioning.\n\n\
- `common/` - Shared Terraform modules\n\
- `hetzner/` - Hetzner Cloud provider configuration\n\
- `lxd/` - LXD/Incus local containers\n\n\
**Usage:**\n\n\
```bash\n\
cd iac/tofu/hetzner\n\
terraform init\n\
terraform plan\n\
terraform apply\n\
```\n\n",
);
}
if spec.iac_templates.ansible_enabled {
content.push_str(
"### `ansible/` - Configuration Management\n\n\
Ansible playbooks for application deployment and configuration.\n\n\
- `playbook.yml` - Main deployment playbook\n\
- `inventory.ini` - Host inventory\n\
- `roles/` - Ansible roles\n\n\
**Usage:**\n\n\
```bash\n\
cd iac/ansible\n\
ansible-playbook -i inventory.ini playbook.yml\n\
```\n\n",
);
}
if spec.iac_templates.docker_compose_enabled {
content.push_str(
"### `docker/` - Container Orchestration\n\n\
Docker Compose configurations for containerized deployment.\n\n\
- `docker-compose.yml` - Main services\n\
- `docker-compose.monitoring.yml` - Monitoring stack (optional)\n\
- `Dockerfile` - Application container image\n\n\
**Usage:**\n\n\
```bash\n\
cd iac/docker\n\
docker-compose up -d\n\
docker-compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d # With monitoring\n\
```\n\n",
);
}
content.push_str(
"## Integration with Nickel Configuration\n\n\
All IaC templates reference the Nickel configuration generated by typedialog:\n\n\
1. **TypeDialog** collects configuration `config.json`\n\
2. **json-to-nickel** converts to Nickel `config.ncl`\n\
3. **Validators** enforce constraints\n\
4. **IaC templates** reference validated config\n\n\
### Example: Terraform\n\n\
```hcl\n\
# Use Nickel config for variables\n\
locals {\n\
config = yamldecode(file(\"../../config.ncl\"))\n\
}\n\
\n\
resource \"server\" \"main\" {\n\
name = local.config.server.name\n\
port = local.config.server.port\n\
}\n\
```\n\n\
### Example: Ansible\n\n\
```yaml\n\
- name: Deploy configuration\n\
template:\n\
src: config.ncl.j2\n\
dest: /etc/app/config.ncl\n\
vars:\n\
config_data: \"{{ lookup('file', '../../config.ncl') }}\"\n\
```\n\n\
## Prerequisites\n\n",
);
if spec.iac_templates.terraform_enabled {
content.push_str("- **terraform** or **tofu** - Infrastructure provisioning\n");
}
if spec.iac_templates.ansible_enabled {
content.push_str("- **ansible** - Configuration management\n");
}
if spec.iac_templates.docker_compose_enabled {
content.push_str("- **docker** and **docker-compose** - Container runtime\n");
}
content.push_str("- **nickel** - Configuration language runtime\n\n");
content.push_str(
"## Workflow\n\n\
1. Generate configuration: `./scripts/configure.sh`\n\
2. Provision infrastructure: `cd iac/tofu/hetzner && terraform apply`\n\
3. Deploy application: `cd iac/ansible && ansible-playbook playbook.yml`\n\
4. Verify deployment: Check monitoring dashboards\n\n",
);
let readme_file = iac_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write iac README: {}", e))
})?;
Ok(())
}
}

View File

@ -0,0 +1,205 @@
//! Locales generator: creates Fluent translation files for i18n support.
use crate::error::Result;
use crate::models::ProjectSpec;
use std::path::Path;
/// Generates Fluent translation files for internationalization.
pub struct LocalesGenerator;
impl LocalesGenerator {
/// Generate locale files for all configured languages.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating locale files for project: {}", spec.name);
// Ensure locales directory exists
let locales_dir = output_dir.join("locales");
std::fs::create_dir_all(&locales_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create locales directory: {}",
e
))
})?;
// Generate English locale (default)
Self::generate_locale(spec, &locales_dir, "en-US")?;
// Generate additional configured locales
for locale_spec in &spec.locales {
Self::generate_locale(spec, &locales_dir, &locale_spec.language_code)?;
}
// Generate README
Self::generate_locales_readme(spec, &locales_dir)?;
tracing::info!("Successfully generated locale files");
Ok(())
}
/// Generate Fluent translation file for a specific locale.
fn generate_locale(spec: &ProjectSpec, locales_dir: &Path, locale_code: &str) -> Result<()> {
let mut fluent_content = String::new();
fluent_content.push_str(&format!(
"# Fluent translations for {} ({})\n",
spec.name, locale_code
));
fluent_content.push_str("# https://projectfluent.org/\n\n");
// App-level strings
fluent_content.push_str("## Application\n\n");
fluent_content.push_str(&format!("app-name = {}\n", spec.name));
fluent_content.push_str(&format!("app-title = {} Configuration\n", spec.name));
fluent_content.push_str("app-description = Interactive configuration wizard\n\n");
// Feature-specific strings
for feature in &spec.domain_features {
fluent_content.push_str(&format!("## {} Feature\n\n", feature.name));
// Feature metadata
fluent_content.push_str(&format!("{}-title = ", feature.name));
if let Some(desc) = &feature.description {
fluent_content.push_str(&format!("{}\n", desc));
} else {
fluent_content.push_str(&format!(
"{} Configuration\n",
Self::capitalize_first(&feature.name)
));
}
// Field labels and help text
for field in &feature.fields {
// Field prompt
fluent_content.push_str(&format!(
"{}-{}-prompt = {}\n",
feature.name, field.name, field.prompt
));
// Field help text
if let Some(help) = &field.help {
fluent_content.push_str(&format!(
"{}-{}-help = {}\n",
feature.name, field.name, help
));
}
// Placeholder
if let Some(placeholder) = &field.placeholder {
fluent_content.push_str(&format!(
"{}-{}-placeholder = {}\n",
feature.name, field.name, placeholder
));
}
}
fluent_content.push('\n');
}
// Common UI strings
fluent_content.push_str("## Common UI Strings\n\n");
fluent_content.push_str("common-continue = Continue\n");
fluent_content.push_str("common-back = Back\n");
fluent_content.push_str("common-cancel = Cancel\n");
fluent_content.push_str("common-finish = Finish\n");
fluent_content.push_str("common-save = Save\n");
fluent_content.push_str("common-required = Required\n");
fluent_content.push_str("common-optional = Optional\n\n");
// Validation messages
fluent_content.push_str("## Validation Messages\n\n");
fluent_content.push_str("validation-required = This field is required\n");
fluent_content.push_str("validation-invalid = Invalid value\n");
fluent_content.push_str("validation-too-short = Value is too short\n");
fluent_content.push_str("validation-too-long = Value is too long\n");
fluent_content.push_str("validation-out-of-range = Value out of range\n\n");
// Success/error messages
fluent_content.push_str("## Messages\n\n");
fluent_content.push_str("success-saved = Configuration saved successfully\n");
fluent_content.push_str("error-load-failed = Failed to load configuration\n");
fluent_content.push_str("error-save-failed = Failed to save configuration\n");
// Write locale file
let locale_file = locales_dir.join(format!("{}.ftl", locale_code));
std::fs::write(&locale_file, fluent_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {} locale file: {}",
locale_code, e
))
})?;
tracing::debug!("Generated {} locale file", locale_code);
Ok(())
}
/// Generate README.md for locales directory.
fn generate_locales_readme(spec: &ProjectSpec, locales_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Localization - {}\n\n", spec.name));
content.push_str(
"This directory contains Fluent translation files for internationalization (i18n).\n\n\
## Overview\n\n\
Fluent is a modern localization system designed for natural-sounding translations. \
See https://projectfluent.org/ for more information.\n\n\
## Files\n\n\
- `en-US.ftl` - English (United States) translations (default)\n",
);
for locale_spec in &spec.locales {
content.push_str(&format!(
"- `{}.ftl` - Translations for {}\n",
locale_spec.language_code, locale_spec.language_code
));
}
content.push_str(
"\n\
## Adding New Locales\n\n\
1. Copy `en-US.ftl` to `<locale-code>.ftl` (e.g., `es-ES.ftl` for Spanish)\n\
2. Translate all strings while preserving the message IDs\n\
3. Update the form configuration to detect and use the new locale\n\n\
## Message Structure\n\n\
Fluent messages follow this pattern:\n\n\
```fluent\n\
# Comments\n\
message-id = Translation text\n\
message-with-variable = Hello, {{ $name }}!\n\
```\n\n\
## Usage in TypeDialog\n\n\
Enable i18n support in forms:\n\n\
```toml\n\
[form]\n\
name = \"config\"\n\
locale = \"en-US\" # Default locale\n\
locale_dir = \"locales\" # Path to .ftl files\n\
\n\
[[fields]]\n\
name = \"server_port\"\n\
prompt_i18n = \"server-port-prompt\" # References locale key\n\
```\n\n\
TypeDialog will load the appropriate translation file based on the user's locale.\n\n",
);
let readme_file = locales_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write locales README: {}",
e
))
})?;
Ok(())
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -2,14 +2,24 @@
pub mod constraint_generator;
pub mod defaults_generator;
pub mod documentation_generator;
pub mod form_composer_generator;
pub mod fragment_generator;
pub mod iac_template_generator;
pub mod locales_generator;
pub mod schema_generator;
pub mod script_generator;
pub mod validator_generator;
pub mod values_generator;
pub use constraint_generator::ConstraintGenerator;
pub use defaults_generator::DefaultsGenerator;
pub use documentation_generator::DocumentationGenerator;
pub use form_composer_generator::FormComposerGenerator;
pub use fragment_generator::FragmentGenerator;
pub use iac_template_generator::IacTemplateGenerator;
pub use locales_generator::LocalesGenerator;
pub use schema_generator::SchemaGenerator;
pub use script_generator::ScriptGenerator;
pub use validator_generator::ValidatorGenerator;
pub use values_generator::ValuesGenerator;

View File

@ -32,6 +32,9 @@ impl SchemaGenerator {
// Generate a main schema that imports all features
Self::generate_main_schema(spec, output_dir)?;
// Generate README.md explaining the schema system
Self::generate_schemas_readme(spec, &schemas_dir)?;
tracing::info!("Successfully generated schemas for project: {}", spec.name);
Ok(())
}
@ -46,27 +49,53 @@ impl SchemaGenerator {
let mut schema_content = String::new();
// Add file header and imports
// Add file header
schema_content.push_str(&format!(
"# Schema for {} feature\n# Generated for project: {}\n\n",
"# Schema for {} feature\n# Generated for project: {}\n",
feature.name, spec.name
));
// Define the feature record
schema_content.push_str(&format!("let {} = {{\n", feature.name));
if let Some(desc) = &feature.description {
schema_content.push_str(&format!("# {}\n", desc));
}
schema_content.push('\n');
// Add Nickel imports if feature has them
if !feature.nickel_imports.is_empty() {
schema_content.push_str("# Schema imports\n");
for import in &feature.nickel_imports {
schema_content.push_str(&format!(
"let {} = import \"{}\" in\n",
import.name, import.path
));
}
schema_content.push('\n');
}
// Define the feature type/schema
let type_name = Self::capitalize_first(&feature.name);
schema_content.push_str(&format!("# {} type schema\n", type_name));
if feature.use_type_contracts {
schema_content.push_str(&format!("{} = {{\n", type_name));
} else {
schema_content.push_str(&format!("let {} = {{\n", feature.name));
}
// Add fields to the record
for field in &feature.fields {
schema_content.push_str(&Self::generate_field_schema(field)?);
schema_content.push_str(&Self::generate_field_schema(field, feature)?);
}
schema_content.push_str("}\n\n");
// Add validators for fields with constraints
if let Some(constraints) = &feature.constraints {
for path in constraints.keys() {
schema_content.push_str(&format!("# Constraint for {}\n", path));
}
if feature.use_type_contracts {
schema_content.push_str("}\n\n");
// Export the type
schema_content.push_str(&format!("{{ {} }}\n", type_name));
} else {
schema_content.push_str("}\n\n");
// Export the schema
schema_content.push_str(&format!("{{ {} }}\n", feature.name));
}
// Write the schema file
@ -86,8 +115,8 @@ impl SchemaGenerator {
Ok(())
}
/// Generate Nickel schema syntax for a single field.
fn generate_field_schema(field: &ConfigField) -> Result<String> {
/// Generate Nickel schema syntax for a single field with type contracts.
fn generate_field_schema(field: &ConfigField, feature: &DomainFeature) -> Result<String> {
let mut field_def = String::new();
// Add field comment if help text exists
@ -95,20 +124,33 @@ impl SchemaGenerator {
field_def.push_str(&format!(" # {}\n", help));
}
// Field name and type
let nickel_type = Self::map_field_type_to_nickel(&field.field_type);
let required_marker = if field.required { "" } else { "?" };
// Field name
let optional_marker = if !field.required { " | optional" } else { "" };
// Check if this field references an imported schema
let nickel_type = if feature.use_type_contracts {
// Look for import that matches this field
let matching_import = feature
.nickel_imports
.iter()
.find(|imp| field.name.contains(&imp.name) || imp.name.contains(&field.name));
if let Some(import) = matching_import {
// Use imported type with contract: database | database_schema.Database
format!("{}.{}", import.name, Self::capitalize_first(&field.name))
} else {
Self::map_field_type_to_nickel(&field.field_type).to_string()
}
} else {
Self::map_field_type_to_nickel(&field.field_type).to_string()
};
// Generate field definition with type contract (same format regardless)
field_def.push_str(&format!(
" {}{} | {},\n",
field.name, required_marker, nickel_type
" {} | {}{},\n",
field.name, nickel_type, optional_marker
));
// Add default value comment if present
if let Some(default) = &field.default {
field_def.push_str(&format!(" # default: {}\n", default));
}
Ok(field_def)
}
@ -120,10 +162,19 @@ impl SchemaGenerator {
FieldType::Password => "String",
FieldType::Confirm => "Bool",
FieldType::Select => "String",
FieldType::MultiSelect => "[String]",
FieldType::MultiSelect => "Array String",
FieldType::Editor => "String",
FieldType::Date => "String",
FieldType::RepeatingGroup => "[_]",
FieldType::RepeatingGroup => "Array Dyn", // Will be refined with type contracts
}
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
@ -217,4 +268,91 @@ impl SchemaGenerator {
Ok(infra_schema)
}
/// Generate README.md documenting the schema system.
fn generate_schemas_readme(spec: &ProjectSpec, schemas_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Nickel Schemas - {}\n\n", spec.name));
content.push_str(
"This directory contains Nickel type schemas for configuration validation.\n\n\
## Overview\n\n\
Schemas define the structure and types of configuration values. They use Nickel's \
type contracts to enforce correctness at evaluation time.\n\n\
## Schema Files\n\n",
);
for feature in &spec.domain_features {
let type_name = Self::capitalize_first(&feature.name);
content.push_str(&format!(
"### `{}.ncl` - {} Schema\n\n",
feature.name, type_name
));
if let Some(desc) = &feature.description {
content.push_str(&format!("{}\n\n", desc));
}
if !feature.nickel_imports.is_empty() {
content.push_str("**Imports:**\n");
for import in &feature.nickel_imports {
content.push_str(&format!("- `{}` from `{}`\n", import.name, import.path));
}
content.push('\n');
}
content.push_str(&format!("**Fields:** {}\n\n", feature.fields.len()));
if feature.use_type_contracts {
content.push_str("*This schema uses type contracts for validation.*\n\n");
}
}
content.push_str(
"## Usage\n\n\
### Importing Schemas\n\n\
```nickel\n\
let schemas = import \"./schemas/config.ncl\" in\n\
let my_config = {...} in\n\
my_config | schemas.Config\n\
```\n\n\
### Type Contracts\n\n\
Type contracts ensure values match expected types:\n\n\
```nickel\n\
{\n\
server | schemas.Server = {\n\
port | Number = 8080,\n\
},\n\
}\n\
```\n\n\
### Nested Contracts\n\n\
Schemas can reference imported types:\n\n\
```nickel\n\
let database_schema = import \"./database.ncl\" in\n\
{\n\
database | database_schema.Database | optional,\n\
}\n\
```\n\n\
### Array Type Contracts\n\n\
Arrays can have typed elements:\n\n\
```nickel\n\
udp_trackers | Array TrackerUdp | optional,\n\
```\n\n\
## Validation\n\n\
Type-check your configuration:\n\n\
```bash\n\
nickel typecheck schemas/config.ncl\n\
```\n\n",
);
let readme_file = schemas_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| crate::error::SchemaGenerationError {
feature_name: "readme".to_string(),
reason: format!("Failed to write schemas README: {}", e),
})?;
tracing::debug!("Generated schemas README: {}", readme_file.display());
Ok(())
}
}

View File

@ -2,14 +2,20 @@
use crate::error::Result;
use crate::models::ProjectSpec;
use crate::template::TemplateLoader;
use std::path::Path;
use tera::Context;
/// Generates orchestration scripts for provisioning.
pub struct ScriptGenerator;
impl ScriptGenerator {
/// Generate bash and nushell scripts for provisioning orchestration.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
pub fn generate(
spec: &ProjectSpec,
output_dir: impl AsRef<Path>,
template_loader: &TemplateLoader,
) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!(
"Generating orchestration scripts for project: {}",
@ -25,126 +31,677 @@ impl ScriptGenerator {
))
})?;
// Generate bash scripts
Self::generate_bash_scripts(spec, &scripts_dir)?;
// Prepare template context
let mut context = Context::new();
context.insert("project_name", &spec.name);
context.insert("features", &spec.domain_features);
// Generate nushell scripts
Self::generate_nushell_scripts(spec, &scripts_dir)?;
// Generate main configuration scripts
Self::generate_configure_scripts(spec, &scripts_dir, template_loader, &context)?;
// Generate nickel conversion scripts
Self::generate_conversion_scripts(spec, &scripts_dir, template_loader, &context)?;
// Generate validation scripts
Self::generate_validation_scripts(spec, &scripts_dir, &context)?;
// Generate utility scripts
Self::generate_utility_scripts(spec, &scripts_dir, &context)?;
// Generate README
Self::generate_scripts_readme(spec, &scripts_dir)?;
tracing::info!("Successfully generated orchestration scripts");
Ok(())
}
/// Generate bash orchestration scripts.
fn generate_bash_scripts(spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
// Config loading script
let config_script = format!(
"#!/bin/bash\n\
# Load and validate configuration for {}\n\
set -euo pipefail\n\n\
CONFIG_DIR=\"{{CONFIG_DIR:-.}}\"\n\
\n\
# Load configuration from JSON\n\
load_config() {{\n\
local config_file=\"$1\"\n\
if [[ ! -f \"$config_file\" ]]; then\n\
echo \"Error: Configuration file not found: $config_file\" >&2\n\
exit 1\n\
fi\n\
cat \"$config_file\"\n\
}}\n\
\n\
# Validate using Nickel\n\
validate_config() {{\n\
local config_file=\"$1\"\n\
nickel eval --raw \"$config_file\" > /dev/null 2>&1 || {{\n\
echo \"Error: Configuration validation failed for $config_file\" >&2\n\
exit 1\n\
}}\n\
}}\n\
\n\
# Main\n\
main() {{\n\
local config_file=\"${{CONFIG_DIR}}/config.json\"\n\
load_config \"$config_file\"\n\
validate_config \"$config_file\"\n\
echo \"Configuration loaded and validated successfully\"\n\
}}\n\
\n\
main \"$@\"\n",
spec.name
);
let config_script_path = scripts_dir.join("config.sh");
std::fs::write(&config_script_path, config_script).map_err(|e| {
/// Generate main configure scripts (configure.sh / configure.nu).
fn generate_configure_scripts(
_spec: &ProjectSpec,
scripts_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
// Generate configure.sh from template
let configure_sh = template_loader.render("scripts/configure.sh.tera", context)?;
let configure_sh_path = scripts_dir.join("configure.sh");
std::fs::write(&configure_sh_path, configure_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write config script: {}",
"Failed to write configure.sh: {}",
e
))
})?;
Self::make_executable(&configure_sh_path);
// Make executable
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let perms = std::fs::Permissions::from_mode(0o755);
std::fs::set_permissions(&config_script_path, perms).ok();
}
// Generate configure.nu from template
let configure_nu = template_loader.render("scripts/configure.nu.tera", context)?;
let configure_nu_path = scripts_dir.join("configure.nu");
std::fs::write(&configure_nu_path, configure_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write configure.nu: {}",
e
))
})?;
Self::make_executable(&configure_nu_path);
tracing::debug!("Generated bash config script");
tracing::debug!("Generated configure scripts");
Ok(())
}
/// Generate nushell orchestration scripts.
fn generate_nushell_scripts(spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
// Config loading script in nushell
let config_script = format!(
/// Generate nickel conversion scripts (nickel-to-X, X-to-nickel).
fn generate_conversion_scripts(
spec: &ProjectSpec,
scripts_dir: &Path,
template_loader: &TemplateLoader,
context: &Context,
) -> Result<()> {
let conversions = vec![
(
"nickel-to-json",
"scripts/nickel-to-json.sh.tera",
"scripts/nickel-to-json.nu.tera",
),
(
"nickel-to-yaml",
"scripts/nickel-to-yaml.sh.tera",
"scripts/nickel-to-yaml.nu.tera",
),
(
"json-to-nickel",
"scripts/json-to-nickel.sh.tera",
"scripts/json-to-nickel.nu.tera",
),
];
for (name, sh_template, nu_template) in conversions {
// Generate bash version
let sh_content = template_loader.render(sh_template, context)?;
let sh_path = scripts_dir.join(format!("{}.sh", name));
std::fs::write(&sh_path, sh_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}.sh: {}",
name, e
))
})?;
Self::make_executable(&sh_path);
// Generate nushell version
let nu_content = template_loader.render(nu_template, context)?;
let nu_path = scripts_dir.join(format!("{}.nu", name));
std::fs::write(&nu_path, nu_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}.nu: {}",
name, e
))
})?;
Self::make_executable(&nu_path);
}
// Generate additional converters (yaml-to-nickel, toml-to-nickel)
Self::generate_yaml_to_nickel(spec, scripts_dir)?;
Self::generate_toml_to_nickel(spec, scripts_dir)?;
Self::generate_nickel_to_toml(spec, scripts_dir)?;
tracing::debug!("Generated conversion scripts");
Ok(())
}
/// Generate validation scripts (validate-nickel.sh, apply-validators.sh).
fn generate_validation_scripts(
spec: &ProjectSpec,
scripts_dir: &Path,
_context: &Context,
) -> Result<()> {
// validate-nickel.sh
let validate_sh = format!(
"#!/bin/bash\n\
# Validate Nickel configuration for {}\n\
set -euo pipefail\n\n\
NICKEL_FILE=\"${{1:-config.ncl}}\"\n\
\n\
if [[ ! -f \"$NICKEL_FILE\" ]]; then\n\
echo \"Error: Nickel file not found: $NICKEL_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
echo \"Validating $NICKEL_FILE...\"\n\
nickel typecheck \"$NICKEL_FILE\" || {{\n\
echo \"Error: Type checking failed\" >&2\n\
exit 1\n\
}}\n\
\n\
nickel eval --raw \"$NICKEL_FILE\" > /dev/null || {{\n\
echo \"Error: Evaluation failed\" >&2\n\
exit 1\n\
}}\n\
\n\
echo \"Validation successful\"\n",
spec.name
);
let validate_sh_path = scripts_dir.join("validate-nickel.sh");
std::fs::write(&validate_sh_path, validate_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validate-nickel.sh: {}",
e
))
})?;
Self::make_executable(&validate_sh_path);
// validate-nickel.nu
let validate_nu = format!(
"#!/usr/bin/env nu\n\
# Load and validate configuration for {} (nushell version)\n\n\
def load_config [config_file: path] {{\n\
if ($config_file | path exists) {{\n\
open $config_file\n\
}} else {{\n\
# Validate Nickel configuration for {} (nushell version)\n\n\
def main [nickel_file: path = \"config.ncl\"] {{\n\
if not ($nickel_file | path exists) {{\n\
error make {{\n\
msg: $\"Configuration file not found: ($config_file)\"\n\
msg: $\"Nickel file not found: ($nickel_file)\"\n\
}}\n\
}}\n\
}}\n\
\n\
def validate_config [config_file: path] {{\n\
let config = (load_config $config_file)\n\
# TODO: Validate against Nickel schema\n\
$config\n\
}}\n\
\n\
def main [--config_dir: path = \".\"] {{\n\
let config_file = ($config_dir | path join config.json)\n\
let config = (validate_config $config_file)\n\
print $\"Configuration loaded: ($config_file)\"\n\
$config\n\
}}\n\
\n\
main $nu.env\n",
\n\
print $\"Validating ($nickel_file)...\"\n\
\n\
# Type check\n\
let typecheck = (nickel typecheck $nickel_file | complete)\n\
if $typecheck.exit_code != 0 {{\n\
error make {{\n\
msg: \"Type checking failed\"\n\
}}\n\
}}\n\
\n\
# Evaluate\n\
let eval = (nickel eval --raw $nickel_file | complete)\n\
if $eval.exit_code != 0 {{\n\
error make {{\n\
msg: \"Evaluation failed\"\n\
}}\n\
}}\n\
\n\
print \"Validation successful\"\n\
}}\n",
spec.name
);
let config_script_path = scripts_dir.join("config.nu");
std::fs::write(&config_script_path, config_script).map_err(|e| {
let validate_nu_path = scripts_dir.join("validate-nickel.nu");
std::fs::write(&validate_nu_path, validate_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nushell config script: {}",
"Failed to write validate-nickel.nu: {}",
e
))
})?;
Self::make_executable(&validate_nu_path);
// apply-validators.sh
let apply_validators_sh = format!(
"#!/bin/bash\n\
# Apply Nickel validators to configuration for {}\n\
set -euo pipefail\n\n\
CONFIG_FILE=\"${{1:-config.ncl}}\"\n\
VALIDATORS_DIR=\"${{VALIDATORS_DIR:-validators}}\"\n\
\n\
echo \"Applying validators from $VALIDATORS_DIR to $CONFIG_FILE...\"\n\
\n\
# Import validators and merge with config\n\
nickel eval --raw <<EOF\n\
let validators = import \"$VALIDATORS_DIR/config.ncl\" in\n\
let config = import \"$CONFIG_FILE\" in\n\
validators.validate config\n\
EOF\n\
\n\
echo \"Validators applied successfully\"\n",
spec.name
);
let apply_validators_sh_path = scripts_dir.join("apply-validators.sh");
std::fs::write(&apply_validators_sh_path, apply_validators_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write apply-validators.sh: {}",
e
))
})?;
Self::make_executable(&apply_validators_sh_path);
// apply-validators.nu
let apply_validators_nu = format!(
"#!/usr/bin/env nu\n\
# Apply Nickel validators to configuration for {} (nushell version)\n\n\
def main [\n\
config_file: path = \"config.ncl\",\n\
--validators_dir: path = \"validators\"\n\
] {{\n\
print $\"Applying validators from ($validators_dir) to ($config_file)...\"\n\
\n\
let nickel_code = $\"let validators = import \\\"($validators_dir)/config.ncl\\\" in\\nlet config = import \\\"($config_file)\\\" in\\nvalidators.validate config\"\n\
\n\
let result = ($nickel_code | nickel eval --raw | complete)\n\
if $result.exit_code != 0 {{\n\
error make {{\n\
msg: $\"Validation failed: ($result.stderr)\"\n\
}}\n\
}}\n\
\n\
print \"Validators applied successfully\"\n\
}}\n",
spec.name
);
let apply_validators_nu_path = scripts_dir.join("apply-validators.nu");
std::fs::write(&apply_validators_nu_path, apply_validators_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write apply-validators.nu: {}",
e
))
})?;
Self::make_executable(&apply_validators_nu_path);
tracing::debug!("Generated validation scripts");
Ok(())
}
/// Generate utility scripts (merge-configs.sh, export-final.sh).
fn generate_utility_scripts(
spec: &ProjectSpec,
scripts_dir: &Path,
_context: &Context,
) -> Result<()> {
// merge-configs.sh
let merge_configs_sh = format!(
"#!/bin/bash\n\
# Merge multiple Nickel configurations for {}\n\
set -euo pipefail\n\n\
if [[ $# -lt 2 ]]; then\n\
echo \"Usage: $0 <base.ncl> <override.ncl> [output.ncl]\" >&2\n\
exit 1\n\
fi\n\
\n\
BASE_FILE=\"$1\"\n\
OVERRIDE_FILE=\"$2\"\n\
OUTPUT_FILE=\"${{3:-merged.ncl}}\"\n\
\n\
echo \"Merging $OVERRIDE_FILE into $BASE_FILE -> $OUTPUT_FILE\"\n\
\n\
nickel eval --raw <<EOF > \"$OUTPUT_FILE\"\n\
let base = import \"$BASE_FILE\" in\n\
let override = import \"$OVERRIDE_FILE\" in\n\
std.record.merge base override\n\
EOF\n\
\n\
echo \"Merge complete: $OUTPUT_FILE\"\n",
spec.name
);
let merge_configs_sh_path = scripts_dir.join("merge-configs.sh");
std::fs::write(&merge_configs_sh_path, merge_configs_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write merge-configs.sh: {}",
e
))
})?;
Self::make_executable(&merge_configs_sh_path);
// merge-configs.nu
let merge_configs_nu = format!(
"#!/usr/bin/env nu\n\
# Merge multiple Nickel configurations for {} (nushell version)\n\n\
def main [\n\
base_file: path,\n\
override_file: path,\n\
output_file: path = \"merged.ncl\"\n\
] {{\n\
print $\"Merging ($override_file) into ($base_file) -> ($output_file)\"\n\
\n\
let nickel_code = $\"let base = import \\\"($base_file)\\\" in\\nlet override = import \\\"($override_file)\\\" in\\nstd.record.merge base override\"\n\
\n\
$nickel_code | nickel eval --raw | save -f $output_file\n\
\n\
print $\"Merge complete: ($output_file)\"\n\
}}\n",
spec.name
);
let merge_configs_nu_path = scripts_dir.join("merge-configs.nu");
std::fs::write(&merge_configs_nu_path, merge_configs_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write merge-configs.nu: {}",
e
))
})?;
Self::make_executable(&merge_configs_nu_path);
// export-final.sh
let export_final_sh = format!(
"#!/bin/bash\n\
# Export final configuration for deployment for {}\n\
set -euo pipefail\n\n\
NICKEL_FILE=\"${{1:-config.ncl}}\"\n\
FORMAT=\"${{2:-json}}\"\n\
OUTPUT_FILE=\"${{3:-config.$FORMAT}}\"\n\
\n\
echo \"Exporting $NICKEL_FILE to $OUTPUT_FILE ($FORMAT format)\"\n\
\n\
case \"$FORMAT\" in\n\
json)\n\
nickel export --format json \"$NICKEL_FILE\" > \"$OUTPUT_FILE\"\n\
;;\n\
yaml)\n\
nickel export --format yaml \"$NICKEL_FILE\" > \"$OUTPUT_FILE\"\n\
;;\n\
toml)\n\
nickel export --format toml \"$NICKEL_FILE\" > \"$OUTPUT_FILE\"\n\
;;\n\
*)\n\
echo \"Error: Unsupported format: $FORMAT\" >&2\n\
echo \"Supported: json, yaml, toml\" >&2\n\
exit 1\n\
;;\n\
esac\n\
\n\
echo \"Export complete: $OUTPUT_FILE\"\n",
spec.name
);
let export_final_sh_path = scripts_dir.join("export-final.sh");
std::fs::write(&export_final_sh_path, export_final_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write export-final.sh: {}",
e
))
})?;
Self::make_executable(&export_final_sh_path);
// export-final.nu
let export_final_nu = format!(
"#!/usr/bin/env nu\n\
# Export final configuration for deployment for {} (nushell version)\n\n\
def main [\n\
nickel_file: path = \"config.ncl\",\n\
format: string = \"json\",\n\
output_file?: path\n\
] {{\n\
let out = if ($output_file | is-empty) {{ $\"config.($format)\" }} else {{ $output_file }}\n\
\n\
print $\"Exporting ($nickel_file) to ($out) (($format) format)\"\n\
\n\
match $format {{\n\
\"json\" => {{ nickel export --format json $nickel_file | save -f $out }},\n\
\"yaml\" => {{ nickel export --format yaml $nickel_file | save -f $out }},\n\
\"toml\" => {{ nickel export --format toml $nickel_file | save -f $out }},\n\
_ => {{\n\
error make {{\n\
msg: $\"Unsupported format: ($format). Supported: json, yaml, toml\"\n\
}}\n\
}}\n\
}}\n\
\n\
print $\"Export complete: ($out)\"\n\
}}\n",
spec.name
);
let export_final_nu_path = scripts_dir.join("export-final.nu");
std::fs::write(&export_final_nu_path, export_final_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write export-final.nu: {}",
e
))
})?;
Self::make_executable(&export_final_nu_path);
tracing::debug!("Generated utility scripts");
Ok(())
}
/// Generate yaml-to-nickel converter.
fn generate_yaml_to_nickel(_spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let yaml_to_nickel_sh = "#!/bin/bash\n\
# Convert YAML to Nickel\n\
set -euo pipefail\n\n\
YAML_FILE=\"${1}\"\n\
NICKEL_FILE=\"${2:-${YAML_FILE%.yaml}.ncl}\"\n\
\n\
if [[ ! -f \"$YAML_FILE\" ]]; then\n\
echo \"Error: YAML file not found: $YAML_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
# Convert via JSON intermediary\n\
TEMP_JSON=\"$(mktemp)\"\n\
yq -o json \"$YAML_FILE\" > \"$TEMP_JSON\"\n\
./json-to-nickel.sh \"$TEMP_JSON\" \"$NICKEL_FILE\"\n\
rm -f \"$TEMP_JSON\"\n";
let yaml_to_nickel_sh_path = scripts_dir.join("yaml-to-nickel.sh");
std::fs::write(&yaml_to_nickel_sh_path, yaml_to_nickel_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write yaml-to-nickel.sh: {}",
e
))
})?;
Self::make_executable(&yaml_to_nickel_sh_path);
let yaml_to_nickel_nu = "#!/usr/bin/env nu\n\
# Convert YAML to Nickel (nushell version)\n\n\
def main [yaml_file: path, nickel_file?: path] {{\n\
let out = if ($nickel_file | is-empty) {{ ($yaml_file | path parse | update extension ncl | path join) }} else {{ $nickel_file }}\n\
\n\
if not ($yaml_file | path exists) {{\n\
error make {{ msg: $\"YAML file not found: ($yaml_file)\" }}\n\
}}\n\
\n\
# Convert via JSON intermediary\n\
let json_content = (open $yaml_file | to json)\n\
$json_content | save -f temp.json\n\
nu json-to-nickel.nu temp.json $out\n\
rm temp.json\n\
}}\n";
let yaml_to_nickel_nu_path = scripts_dir.join("yaml-to-nickel.nu");
std::fs::write(&yaml_to_nickel_nu_path, yaml_to_nickel_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write yaml-to-nickel.nu: {}",
e
))
})?;
Self::make_executable(&yaml_to_nickel_nu_path);
Ok(())
}
/// Generate toml-to-nickel converter.
fn generate_toml_to_nickel(_spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let toml_to_nickel_sh = "#!/bin/bash\n\
# Convert TOML to Nickel\n\
set -euo pipefail\n\n\
TOML_FILE=\"${1}\"\n\
NICKEL_FILE=\"${2:-${TOML_FILE%.toml}.ncl}\"\n\
\n\
if [[ ! -f \"$TOML_FILE\" ]]; then\n\
echo \"Error: TOML file not found: $TOML_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
# Convert via JSON intermediary\n\
TEMP_JSON=\"$(mktemp)\"\n\
cat \"$TOML_FILE\" | toml2json > \"$TEMP_JSON\"\n\
./json-to-nickel.sh \"$TEMP_JSON\" \"$NICKEL_FILE\"\n\
rm -f \"$TEMP_JSON\"\n";
let toml_to_nickel_sh_path = scripts_dir.join("toml-to-nickel.sh");
std::fs::write(&toml_to_nickel_sh_path, toml_to_nickel_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write toml-to-nickel.sh: {}",
e
))
})?;
Self::make_executable(&toml_to_nickel_sh_path);
let toml_to_nickel_nu = "#!/usr/bin/env nu\n\
# Convert TOML to Nickel (nushell version)\n\n\
def main [toml_file: path, nickel_file?: path] {{\n\
let out = if ($nickel_file | is-empty) {{ ($toml_file | path parse | update extension ncl | path join) }} else {{ $nickel_file }}\n\
\n\
if not ($toml_file | path exists) {{\n\
error make {{ msg: $\"TOML file not found: ($toml_file)\" }}\n\
}}\n\
\n\
# Convert via JSON intermediary\n\
let json_content = (open $toml_file | to json)\n\
$json_content | save -f temp.json\n\
nu json-to-nickel.nu temp.json $out\n\
rm temp.json\n\
}}\n";
let toml_to_nickel_nu_path = scripts_dir.join("toml-to-nickel.nu");
std::fs::write(&toml_to_nickel_nu_path, toml_to_nickel_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write toml-to-nickel.nu: {}",
e
))
})?;
Self::make_executable(&toml_to_nickel_nu_path);
Ok(())
}
/// Generate nickel-to-toml converter.
fn generate_nickel_to_toml(_spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let nickel_to_toml_sh = "#!/bin/bash\n\
# Convert Nickel to TOML\n\
set -euo pipefail\n\n\
NICKEL_FILE=\"${1}\"\n\
TOML_FILE=\"${2:-${NICKEL_FILE%.ncl}.toml}\"\n\
\n\
if [[ ! -f \"$NICKEL_FILE\" ]]; then\n\
echo \"Error: Nickel file not found: $NICKEL_FILE\" >&2\n\
exit 1\n\
fi\n\
\n\
nickel export --format toml \"$NICKEL_FILE\" > \"$TOML_FILE\"\n\
echo \"Converted $NICKEL_FILE to $TOML_FILE\"\n";
let nickel_to_toml_sh_path = scripts_dir.join("nickel-to-toml.sh");
std::fs::write(&nickel_to_toml_sh_path, nickel_to_toml_sh).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nickel-to-toml.sh: {}",
e
))
})?;
Self::make_executable(&nickel_to_toml_sh_path);
let nickel_to_toml_nu = "#!/usr/bin/env nu\n\
# Convert Nickel to TOML (nushell version)\n\n\
def main [nickel_file: path, toml_file?: path] {{\n\
let out = if ($toml_file | is-empty) {{ ($nickel_file | path parse | update extension toml | path join) }} else {{ $toml_file }}\n\
\n\
if not ($nickel_file | path exists) {{\n\
error make {{ msg: $\"Nickel file not found: ($nickel_file)\" }}\n\
}}\n\
\n\
nickel export --format toml $nickel_file | save -f $out\n\
print $\"Converted ($nickel_file) to ($out)\"\n\
}}\n";
let nickel_to_toml_nu_path = scripts_dir.join("nickel-to-toml.nu");
std::fs::write(&nickel_to_toml_nu_path, nickel_to_toml_nu).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write nickel-to-toml.nu: {}",
e
))
})?;
Self::make_executable(&nickel_to_toml_nu_path);
Ok(())
}
/// Generate README.md for scripts directory.
fn generate_scripts_readme(spec: &ProjectSpec, scripts_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Orchestration Scripts - {}\n\n", spec.name));
content.push_str(
"This directory contains bash and nushell scripts for provisioning orchestration.\n\n\
## Overview\n\n\
Scripts implement the nickel-roundtrip workflow: TypeDialog JSON Nickel Validation Export.\n\n\
## Script Categories\n\n\
### Main Configuration\n\n\
- `configure.sh` / `configure.nu` - Main nickel-roundtrip orchestration script\n\n\
### Format Conversion\n\n\
**From Nickel:**\n\
- `nickel-to-json.sh` / `.nu` - Convert Nickel to JSON\n\
- `nickel-to-yaml.sh` / `.nu` - Convert Nickel to YAML\n\
- `nickel-to-toml.sh` / `.nu` - Convert Nickel to TOML\n\n\
**To Nickel:**\n\
- `json-to-nickel.sh` / `.nu` - Convert JSON to Nickel\n\
- `yaml-to-nickel.sh` / `.nu` - Convert YAML to Nickel\n\
- `toml-to-nickel.sh` / `.nu` - Convert TOML to Nickel\n\n\
### Validation\n\n\
- `validate-nickel.sh` / `.nu` - Type-check and evaluate Nickel files\n\
- `apply-validators.sh` / `.nu` - Apply custom validators to configuration\n\n\
### Utilities\n\n\
- `merge-configs.sh` / `.nu` - Merge multiple Nickel configurations\n\
- `export-final.sh` / `.nu` - Export final configuration for deployment\n\n\
## Nickel-Roundtrip Workflow\n\n\
Complete configuration workflow:\n\n\
```bash\n\
# 1. Collect configuration via TypeDialog\n\
typedialog --form fragments/complete.toml --output user-input.json\n\
\n\
# 2. Convert JSON to Nickel\n\
./scripts/json-to-nickel.sh user-input.json config.ncl\n\
\n\
# 3. Validate against schemas\n\
./scripts/validate-nickel.sh config.ncl\n\
\n\
# 4. Apply custom validators\n\
./scripts/apply-validators.sh config.ncl\n\
\n\
# 5. Merge with defaults\n\
./scripts/merge-configs.sh defaults/config.ncl config.ncl final.ncl\n\
\n\
# 6. Export to deployment format\n\
./scripts/export-final.sh final.ncl yaml deployment.yaml\n\
```\n\n\
Or use the main orchestration script:\n\n\
```bash\n\
./scripts/configure.sh\n\
```\n\n\
## Nushell Versions\n\n\
All scripts have `.nu` equivalents with better error handling:\n\n\
```bash\n\
nu scripts/configure.nu\n\
```\n\n\
## Requirements\n\n\
- **nickel** - Nickel language runtime\n\
- **typedialog** - For interactive forms (CLI backend)\n\
- **jq** - JSON processing (for bash scripts)\n\
- **yq** - YAML processing (for YAML conversions)\n\
- **toml2json** - TOML to JSON conversion\n\
- **nushell** - For `.nu` script versions (optional)\n\n\
## Error Handling\n\n\
All scripts use `set -euo pipefail` (bash) or structured error handling (nushell) to fail fast on errors.\n\n\
## Permissions\n\n\
Scripts are automatically marked executable. Manual override:\n\n\
```bash\n\
chmod +x scripts/*.sh scripts/*.nu\n\
```\n\n",
);
let readme_file = scripts_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write scripts README: {}",
e
))
})?;
// Make executable
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let perms = std::fs::Permissions::from_mode(0o755);
std::fs::set_permissions(&config_script_path, perms).ok();
}
tracing::debug!("Generated nushell config script");
Ok(())
}
/// Make a script file executable on Unix systems.
#[cfg(unix)]
fn make_executable(path: &Path) {
use std::os::unix::fs::PermissionsExt;
if let Ok(metadata) = std::fs::metadata(path) {
let mut perms = metadata.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(path, perms).ok();
}
}
/// No-op on non-Unix systems.
#[cfg(not(unix))]
fn make_executable(_path: &Path) {}
}

View File

@ -1,6 +1,7 @@
//! Validator generator: produces Nickel validators from constraints.
//! Validator generator: produces Nickel validators using templates.
use crate::error::Result;
use crate::models::project_spec::ValidatorType;
use crate::models::{FieldType, ProjectSpec};
use std::path::Path;
@ -22,133 +23,395 @@ impl ValidatorGenerator {
))
})?;
// Generate validators for each feature
// Always generate common.ncl with reusable validators
Self::generate_common_validators(&validators_dir)?;
// Generate feature-specific validators
for feature in &spec.domain_features {
let mut validator_content = String::new();
validator_content.push_str(&format!(
"# Validators for {} feature\n# Generated for project: {}\n\n",
feature.name, spec.name
));
// Add field-specific validators
for field in &feature.fields {
validator_content.push_str(&Self::generate_field_validator(field)?);
if !feature.custom_validators.is_empty() {
Self::generate_custom_validators(spec, feature, &validators_dir)?;
} else {
Self::generate_basic_validators(spec, feature, &validators_dir)?;
}
// Write validator file
let validator_file = validators_dir.join(format!("{}.ncl", feature.name));
std::fs::write(&validator_file, validator_content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validator file: {}",
e
))
})?;
tracing::debug!("Generated validator for feature: {}", feature.name);
}
// Generate main validators config
Self::generate_main_validators(spec, &validators_dir)?;
// Generate README
Self::generate_validators_readme(spec, &validators_dir)?;
tracing::info!("Successfully generated validators");
Ok(())
}
/// Generate validator function for a single field.
fn generate_field_validator(field: &crate::models::ConfigField) -> Result<String> {
let mut validator = String::new();
/// Generate common.ncl with reusable validation contracts.
fn generate_common_validators(validators_dir: &Path) -> Result<()> {
let content = r#"# Common validators
# Reusable validation predicates and type contracts
validator.push_str(&format!("# Validator for field: {}\n", field.name));
{
# Port number validation (1-65535)
ValidPort = fun label =>
std.contract.from_predicate (fun port =>
std.is_number port &&
port >= 1 &&
port <= 65535
) {
label = label,
message = "must be a valid port number (1-65535)",
},
match field.field_type {
FieldType::Text => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_string value) &&\n");
# Non-empty string validation
NonEmptyString = fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.length s > 0
) {
label = label,
message = "must be a non-empty string",
},
if let Some(min) = field.min {
validator.push_str(&format!(" ((std.string.length value) >= {}) &&\n", min));
}
if let Some(max) = field.max {
validator.push_str(&format!(" ((std.string.length value) <= {})\n", max));
} else {
validator.push_str(" true\n");
}
# Valid bind address format (IP:PORT)
ValidBindAddress = fun label =>
std.contract.from_predicate (fun addr =>
std.is_string addr &&
std.string.contains ":" addr &&
let parts = std.string.split ":" addr in
std.array.length parts == 2
) {
label = label,
message = "must be a valid bind address (IP:PORT format)",
},
validator.push_str(")\n\n");
}
# Valid URL format
ValidUrl = fun label =>
std.contract.from_predicate (fun url =>
std.is_string url &&
(std.string.is_match "^https?://" url)
) {
label = label,
message = "must be a valid HTTP/HTTPS URL",
},
FieldType::Number => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_number value) &&\n");
# Positive number validation
PositiveNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n > 0
) {
label = label,
message = "must be a positive number",
},
if let Some(min) = field.min {
validator.push_str(&format!(" (value >= {}) &&\n", min));
}
if let Some(max) = field.max {
validator.push_str(&format!(" (value <= {})\n", max));
} else {
validator.push_str(" true\n");
}
# Non-negative number validation
NonNegativeNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n >= 0
) {
label = label,
message = "must be a non-negative number",
},
validator.push_str(")\n\n");
}
# Range validation
Range = fun min => fun max => fun label =>
std.contract.from_predicate (fun n =>
std.is_number n &&
n >= min &&
n <= max
) {
label = label,
message = "must be between %{std.to_string min} and %{std.to_string max}",
},
FieldType::Password => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_string value) &&\n");
# String pattern matching
MatchesPattern = fun pattern => fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.is_match pattern s
) {
label = label,
message = "must match pattern: %{pattern}",
},
# Enum validation
OneOf = fun allowed => fun label =>
std.contract.from_predicate (fun value =>
std.array.any (fun v => v == value) allowed
) {
label = label,
message = "must be one of: %{std.serialize 'Json allowed}",
},
# Array length validation
ArrayLength = fun min => fun max => fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let len = std.array.length arr in
len >= min && len <= max
) {
label = label,
message = "array length must be between %{std.to_string min} and %{std.to_string max}",
},
}
"#;
let common_file = validators_dir.join("common.ncl");
std::fs::write(&common_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write common validators: {}",
e
))
})?;
tracing::debug!("Generated common validators");
Ok(())
}
/// Generate custom validators for features with custom validator specs.
fn generate_custom_validators(
spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
validators_dir: &Path,
) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Custom validators for {}\n# Generated for project: {}\n\n",
feature.name, spec.name
));
content.push_str("let common = import \"./common.ncl\" in\n\n{\n");
// Generate each custom validator
for validator in &feature.custom_validators {
content.push_str(&format!(
" # {}\n",
validator
.push_str(" ((std.string.length value) >= 8) # Minimum password length\n");
validator.push_str(")\n\n");
}
.description
.as_ref()
.unwrap_or(&format!("Validator for {}", validator.name))
));
FieldType::Confirm => {
validator.push_str(&format!(
"let validate_{} = fun value => std.is_bool value\n\n",
field.name
));
}
FieldType::Select | FieldType::MultiSelect => {
if !field.options.is_empty() {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" let valid_options = [");
let options_str = field
.options
.iter()
.map(|opt| format!("\"{}\"", opt))
.collect::<Vec<_>>()
.join(", ");
validator.push_str(&options_str);
validator.push_str("] in\n");
validator.push_str(" std.arrays.elem value valid_options\n");
validator.push_str(")\n\n");
match validator.validator_type {
ValidatorType::Range => {
content.push_str(&format!(
" {} = common.Range 0 100 \"{}\",\n\n",
validator.name, validator.name
));
}
ValidatorType::Pattern => {
content.push_str(&format!(
" {} = common.MatchesPattern \".*\" \"{}\",\n\n",
validator.name, validator.name
));
}
ValidatorType::ArrayUniqueness => {
content.push_str(&format!(
" {} = fun label =>\n std.contract.from_predicate (fun arr =>\n std.is_array arr &&\n let values = std.array.map (fun item => item.id) arr in\n std.array.length values == std.array.length (std.array.sort values)\n ) {{\n label = label,\n message = \"array items must have unique values\",\n }},\n\n",
validator.name
));
}
ValidatorType::Composite => {
content.push_str(&format!(
" {} = fun label =>\n std.contract.from_predicate (fun value =>\n true # Composite validation\n ) {{\n label = label,\n message = \"composite validation failed\",\n }},\n\n",
validator.name
));
}
ValidatorType::CustomPredicate => {
content.push_str(&format!(
" {} = fun label =>\n std.contract.from_predicate (fun value =>\n true # Custom predicate\n ) {{\n label = label,\n message = \"{}\",\n }},\n\n",
validator.name,
validator.description.as_ref().unwrap_or(&"validation failed".to_string())
));
}
}
}
FieldType::RepeatingGroup => {
validator.push_str(&format!("let validate_{} = fun value => (\n", field.name));
validator.push_str(" (std.is_array value) &&\n");
content.push_str(" # Master validation function\n");
content.push_str(" validate = fun config => config,\n");
content.push_str("}\n");
if let Some(min) = field.min {
validator.push_str(&format!(" ((std.array.length value) >= {}) &&\n", min));
}
if let Some(max) = field.max {
validator.push_str(&format!(" ((std.array.length value) <= {})\n", max));
} else {
validator.push_str(" true\n");
}
let validator_file = validators_dir.join(format!("{}.ncl", feature.name));
std::fs::write(&validator_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write custom validators: {}",
e
))
})?;
validator.push_str(")\n\n");
}
tracing::debug!("Generated custom validators for: {}", feature.name);
Ok(())
}
_ => {
validator.push_str(&format!(
"let validate_{} = fun value => true # No specific validation\n\n",
field.name
/// Generate basic validators for features without custom validators.
fn generate_basic_validators(
spec: &ProjectSpec,
feature: &crate::models::DomainFeature,
validators_dir: &Path,
) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Validators for {} feature\n# Generated for project: {}\n\n",
feature.name, spec.name
));
content.push_str("let common = import \"./common.ncl\" in\n\n{\n");
// Generate field validators
for field in &feature.fields {
if let Some(validator_fn) = Self::get_common_validator_for_field(field) {
content.push_str(&format!(
" # {} validator\n validate_{} = {},\n\n",
field.name, field.name, validator_fn
));
}
}
Ok(validator)
content.push_str(" # Validation function\n");
content.push_str(" validate = fun config => config,\n");
content.push_str("}\n");
let validator_file = validators_dir.join(format!("{}.ncl", feature.name));
std::fs::write(&validator_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!("Failed to write validators: {}", e))
})?;
Ok(())
}
/// Get appropriate common validator for a field.
fn get_common_validator_for_field(field: &crate::models::ConfigField) -> Option<String> {
match field.field_type {
FieldType::Number => {
if field.min.is_some() && field.max.is_some() {
Some(format!(
"common.Range {} {} \"{}\"",
field.min.unwrap(),
field.max.unwrap(),
field.name
))
} else if field.min == Some(0) {
Some(format!("common.NonNegativeNumber \"{}\"", field.name))
} else if field.min == Some(1) {
Some(format!("common.PositiveNumber \"{}\"", field.name))
} else {
None
}
}
FieldType::Text => {
if field.name.contains("address") && field.name.contains("bind") {
Some(format!("common.ValidBindAddress \"{}\"", field.name))
} else if field.name.contains("url") {
Some(format!("common.ValidUrl \"{}\"", field.name))
} else if !field.required {
None
} else {
Some(format!("common.NonEmptyString \"{}\"", field.name))
}
}
FieldType::Password => Some(format!("common.NonEmptyString \"{}\"", field.name)),
_ => None,
}
}
/// Generate main validators.ncl.
fn generate_main_validators(spec: &ProjectSpec, validators_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Main validators configuration for {}\n\n",
spec.name
));
content.push_str("let common = import \"./common.ncl\" in\n");
for feature in &spec.domain_features {
content.push_str(&format!(
"let {} = import \"./{}.ncl\" in\n",
feature.name, feature.name
));
}
content.push_str("\n{\n common,\n");
for feature in &spec.domain_features {
content.push_str(&format!(" {},\n", feature.name));
}
content.push_str("}\n");
let config_file = validators_dir.join("config.ncl");
std::fs::write(&config_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write main validators: {}",
e
))
})?;
Ok(())
}
/// Generate README.md for validators directory.
fn generate_validators_readme(spec: &ProjectSpec, validators_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Validators - {}\n\n", spec.name));
content.push_str(
"This directory contains Nickel validation contracts.\n\n\
## Overview\n\n\
Validators enforce rules on configuration values using Nickel's contract system.\n\n\
## Files\n\n\
### `common.ncl` - Reusable Validators\n\n\
Common validation contracts used across features:\n\n\
- `ValidPort` - Port numbers (1-65535)\n\
- `NonEmptyString` - Non-empty strings\n\
- `ValidBindAddress` - IP:PORT format\n\
- `ValidUrl` - HTTP/HTTPS URLs\n\
- `PositiveNumber` - Numbers > 0\n\
- `NonNegativeNumber` - Numbers >= 0\n\
- `Range` - Number range validation\n\
- `MatchesPattern` - Regex pattern matching\n\
- `OneOf` - Enum validation\n\
- `ArrayLength` - Array size constraints\n\n",
);
for feature in &spec.domain_features {
content.push_str(&format!("### `{}.ncl`\n\n", feature.name));
if !feature.custom_validators.is_empty() {
content.push_str(&format!(
"Feature-specific validators ({} custom validators).\n\n",
feature.custom_validators.len()
));
} else {
content.push_str("Standard field validators.\n\n");
}
}
content.push_str(
"## Usage\n\n\
### Applying Validators\n\n\
```nickel\n\
let validators = import \"./validators/config.ncl\" in\n\
let config = {...} in\n\
validators.validate config\n\
```\n\n\
### Custom Validation\n\n\
```nickel\n\
let common = import \"./validators/common.ncl\" in\n\
{\n\
port | common.ValidPort \"config.port\" = 8080,\n\
}\n\
```\n\n",
);
let readme_file = validators_dir.join("README.md");
std::fs::write(&readme_file, content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write validators README: {}",
e
))
})?;
Ok(())
}
}

View File

@ -0,0 +1,210 @@
//! Values generator: creates runtime values configuration for deployment environments.
use crate::error::Result;
use crate::models::ProjectSpec;
use std::path::Path;
/// Generates runtime values files for environment-specific configuration.
pub struct ValuesGenerator;
impl ValuesGenerator {
/// Generate values configuration files.
pub fn generate(spec: &ProjectSpec, output_dir: impl AsRef<Path>) -> Result<()> {
let output_dir = output_dir.as_ref();
tracing::info!("Generating values configuration for project: {}", spec.name);
// Ensure values directory exists
let values_dir = output_dir.join("values");
std::fs::create_dir_all(&values_dir).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to create values directory: {}",
e
))
})?;
// Generate main values file
Self::generate_values_config(spec, &values_dir)?;
// Generate environment-specific values
Self::generate_environment_values(spec, &values_dir, "development")?;
Self::generate_environment_values(spec, &values_dir, "staging")?;
Self::generate_environment_values(spec, &values_dir, "production")?;
// Generate README
Self::generate_values_readme(spec, &values_dir)?;
tracing::info!("Successfully generated values configuration");
Ok(())
}
/// Generate main values/config.ncl file.
fn generate_values_config(spec: &ProjectSpec, values_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# Runtime values configuration for {}\n\
# This file separates runtime values from configuration structure\n\n",
spec.name
));
content.push_str("# Import defaults\n");
content.push_str("let defaults = import \"../defaults/config.ncl\" in\n\n");
content.push_str("# Runtime values (override these per environment)\n");
content.push_str("{\n");
for feature in &spec.domain_features {
content.push_str(&format!(" {} = {{\n", feature.name));
for field in &feature.fields {
if field.sensitive {
content.push_str(&format!(
" {} = \"OVERRIDE_IN_ENVIRONMENT\", # Sensitive\n",
field.name
));
} else if let Some(default) = &field.default {
content.push_str(&format!(" {} = {},\n", field.name, default));
} else {
content.push_str(&format!(
" {} = defaults.{}.{},\n",
field.name, feature.name, field.name
));
}
}
content.push_str(" },\n");
}
content.push_str("}\n");
std::fs::write(values_dir.join("config.ncl"), content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write values/config.ncl: {}",
e
))
})?;
Ok(())
}
/// Generate environment-specific values file.
fn generate_environment_values(
spec: &ProjectSpec,
values_dir: &Path,
environment: &str,
) -> Result<()> {
let mut content = String::new();
content.push_str(&format!(
"# {} environment values for {}\n\n",
Self::capitalize_first(environment),
spec.name
));
content.push_str("# Import base values\n");
content.push_str("let base = import \"./config.ncl\" in\n\n");
content.push_str(&format!(
"# {} overrides\n",
Self::capitalize_first(environment)
));
content.push_str("std.record.merge base {\n");
// Add environment-specific examples
match environment {
"development" => {
content.push_str(" # Development-specific values\n");
content.push_str(" # Example: lower resource limits, verbose logging\n");
}
"staging" => {
content.push_str(" # Staging-specific values\n");
content.push_str(" # Example: production-like but with test data\n");
}
"production" => {
content.push_str(" # Production-specific values\n");
content.push_str(" # Example: high availability, strict security\n");
}
_ => {}
}
content.push_str("}\n");
std::fs::write(values_dir.join(format!("{}.ncl", environment)), content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write {}.ncl: {}",
environment, e
))
})?;
Ok(())
}
/// Generate README.md for values directory.
fn generate_values_readme(spec: &ProjectSpec, values_dir: &Path) -> Result<()> {
let mut content = String::new();
content.push_str(&format!("# Runtime Values - {}\n\n", spec.name));
content.push_str(
"This directory contains runtime values separated from configuration structure.\n\n\
## Overview\n\n\
Values files allow environment-specific overrides while keeping the core \
configuration structure unchanged.\n\n\
## Files\n\n\
- `config.ncl` - Base runtime values (references defaults)\n\
- `development.ncl` - Development environment overrides\n\
- `staging.ncl` - Staging environment overrides\n\
- `production.ncl` - Production environment overrides\n\n\
## Usage\n\n\
### Development\n\n\
```bash\n\
nickel export values/development.ncl > config.json\n\
```\n\n\
### Production\n\n\
```bash\n\
nickel export values/production.ncl > config.json\n\
```\n\n\
## Environment Variables\n\n\
Sensitive values should be provided via environment variables:\n\n\
```nickel\n\
{\n\
database = {\n\
password = std.env.DATABASE_PASSWORD,\n\
},\n\
}\n\
```\n\n\
## Merging Strategy\n\n\
1. **Defaults** (`defaults/config.ncl`) - Sensible defaults for all fields\n\
2. **Base values** (`values/config.ncl`) - Runtime values common to all environments\n\
3. **Environment values** (`values/{env}.ncl`) - Environment-specific overrides\n\n\
```nickel\n\
let defaults = import \"defaults/config.ncl\" in\n\
let values = import \"values/config.ncl\" in\n\
let env_values = import \"values/production.ncl\" in\n\
std.record.merge_all [defaults, values, env_values]\n\
```\n\n\
## Security\n\n\
- Never commit sensitive values (passwords, API keys) to version control\n\
- Use environment variables or secret management systems\n\
- Mark sensitive fields in `values/config.ncl` with `# Sensitive` comment\n\n",
);
std::fs::write(values_dir.join("README.md"), content).map_err(|e| {
crate::error::ProvisioningGenError::Other(format!(
"Failed to write values README: {}",
e
))
})?;
Ok(())
}
/// Capitalize first letter of a string.
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
}

View File

@ -50,13 +50,10 @@ impl CargoIntrospector {
// Build constraints from inferred features
let constraints = Self::infer_constraints(&domain_features);
let spec = ProjectSpec {
name,
project_type,
infrastructure,
domain_features,
constraints,
};
let mut spec = ProjectSpec::new(name, project_type);
spec.infrastructure = infrastructure;
spec.domain_features = domain_features;
spec.constraints = constraints;
// Validate the spec
spec.validate().map_err(|errors| CargoIntrospectionError {

View File

@ -209,13 +209,10 @@ impl ConfigLoader {
// Convert constraints
let constraints = Self::convert_constraints(&config.constraints)?;
let spec = ProjectSpec {
name: config.name,
project_type,
infrastructure,
domain_features,
constraints,
};
let mut spec = ProjectSpec::new(config.name, project_type);
spec.infrastructure = infrastructure;
spec.domain_features = domain_features;
spec.constraints = constraints;
// Validate the spec
spec.validate().map_err(|errors| ConfigLoadingError {
@ -292,12 +289,11 @@ impl ConfigLoader {
.map(Self::convert_field)
.collect::<Result<Vec<_>>>()?;
Ok(DomainFeature {
name: f.name.clone(),
description: f.description.clone(),
fields,
constraints: None,
})
let mut feature = DomainFeature::new(f.name.clone());
feature.description = f.description.clone();
feature.fields = fields;
Ok(feature)
})
.collect()
}

View File

@ -62,13 +62,10 @@ impl NickelSchemaLoader {
// Extract constraints from field definitions
let constraints = Self::extract_constraints(content)?;
let spec = ProjectSpec {
name: project_name.to_string(),
project_type,
infrastructure,
domain_features,
constraints,
};
let mut spec = ProjectSpec::new(project_name.to_string(), project_type);
spec.infrastructure = infrastructure;
spec.domain_features = domain_features;
spec.constraints = constraints;
// Validate the spec
spec.validate().map_err(|errors| NickelSchemaLoadingError {

View File

@ -26,6 +26,22 @@ pub struct ProjectSpec {
/// Validation constraints (array sizes, uniqueness rules, etc.)
pub constraints: Vec<Constraint>,
/// Infrastructure-as-Code template configuration
#[serde(default)]
pub iac_templates: IacTemplateSpec,
/// Script generation configuration
#[serde(default)]
pub scripts: ScriptSpec,
/// Documentation generation configuration
#[serde(default)]
pub docs: DocsSpec,
/// Supported locales for i18n
#[serde(default)]
pub locales: Vec<LocaleSpec>,
}
impl ProjectSpec {
@ -37,6 +53,10 @@ impl ProjectSpec {
infrastructure: InfrastructureSpec::default(),
domain_features: Vec::new(),
constraints: Vec::new(),
iac_templates: IacTemplateSpec::default(),
scripts: ScriptSpec::default(),
docs: DocsSpec::default(),
locales: Vec::new(),
}
}
@ -209,6 +229,18 @@ pub struct DomainFeature {
/// Constraints specific to this feature (e.g., array bounds)
pub constraints: Option<HashMap<String, FeatureConstraint>>,
/// Nickel schema imports for this feature
#[serde(default)]
pub nickel_imports: Vec<NickelImport>,
/// Whether to use type contracts in generated schemas
#[serde(default)]
pub use_type_contracts: bool,
/// Custom validators for this feature
#[serde(default)]
pub custom_validators: Vec<ValidatorSpec>,
}
impl DomainFeature {
@ -219,6 +251,9 @@ impl DomainFeature {
description: None,
fields: Vec::new(),
constraints: None,
nickel_imports: Vec::new(),
use_type_contracts: false,
custom_validators: Vec::new(),
}
}
@ -483,6 +518,129 @@ pub struct FeatureConstraint {
pub unique: bool,
}
/// Infrastructure-as-Code template configuration.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct IacTemplateSpec {
/// Generate Terraform/OpenTofu templates
pub terraform_enabled: bool,
/// Generate Ansible playbooks
pub ansible_enabled: bool,
/// Generate docker-compose files
pub docker_compose_enabled: bool,
}
/// Script generation configuration.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ScriptSpec {
/// Generate Bash scripts
pub bash_enabled: bool,
/// Generate Nushell scripts
pub nushell_enabled: bool,
/// Enable nickel-roundtrip integration
pub nickel_roundtrip: bool,
}
/// Documentation generation configuration.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct DocsSpec {
/// Generate main README.md
pub generate_readme: bool,
/// Generate quickstart guide
pub generate_quickstart: bool,
/// Generate nickel-roundtrip workflow guide
pub generate_nickel_roundtrip_guide: bool,
}
/// Locale/translation specification.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LocaleSpec {
/// Language code (e.g., "en-US", "es-ES")
pub language_code: String,
/// Whether this is the default locale
pub is_default: bool,
}
impl LocaleSpec {
/// Create a new locale specification.
pub fn new(language_code: impl Into<String>, is_default: bool) -> Self {
Self {
language_code: language_code.into(),
is_default,
}
}
}
/// Nickel schema import declaration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NickelImport {
/// Import name (e.g., "database_schema")
pub name: String,
/// Import path relative to schema file (e.g., "./database.ncl")
pub path: String,
}
impl NickelImport {
/// Create a new Nickel import.
pub fn new(name: impl Into<String>, path: impl Into<String>) -> Self {
Self {
name: name.into(),
path: path.into(),
}
}
}
/// Custom validator specification for advanced validation logic.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidatorSpec {
/// Validator name (e.g., "ValidPort", "UniqueBindAddresses")
pub name: String,
/// Validation logic description
pub description: Option<String>,
/// Fields this validator applies to
pub applies_to: Vec<String>,
/// Validator implementation type
pub validator_type: ValidatorType,
}
/// Type of validator implementation.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum ValidatorType {
/// Range validation (min/max)
Range,
/// Pattern matching (regex)
Pattern,
/// Custom Nickel predicate function
CustomPredicate,
/// Array uniqueness validator
ArrayUniqueness,
/// Composite validator (combines multiple validators)
Composite,
}
impl ValidatorSpec {
/// Create a new validator specification.
pub fn new(name: impl Into<String>, validator_type: ValidatorType) -> Self {
Self {
name: name.into(),
description: None,
applies_to: Vec::new(),
validator_type,
}
}
/// Add field that this validator applies to.
pub fn applies_to(mut self, field: impl Into<String>) -> Self {
self.applies_to.push(field.into());
self
}
/// Set description.
pub fn with_description(mut self, description: impl Into<String>) -> Self {
self.description = Some(description.into());
self
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -1,14 +1,16 @@
//! Template loader and renderer.
use crate::config::Config;
use crate::error::Result;
use crate::error::{Result, TemplateRenderError};
use std::collections::BTreeMap;
use std::fs;
use std::path::PathBuf;
use tera::Tera;
/// Loads and renders Tera templates for code generation.
pub struct TemplateLoader {
path: PathBuf,
tera: Tera,
}
/// Template category with its templates.
@ -23,7 +25,16 @@ impl TemplateLoader {
/// Load template library from configuration.
pub fn new(config: &Config) -> Result<Self> {
let path = config.templates_dir();
Ok(TemplateLoader { path })
let glob_pattern = path.join("**/*.tera").to_string_lossy().to_string();
let mut tera = Tera::new(&glob_pattern).map_err(|e| TemplateRenderError {
template_name: glob_pattern.clone(),
reason: format!("Failed to initialize Tera engine: {}", e),
})?;
tera.autoescape_on(vec![]);
Ok(TemplateLoader { path, tera })
}
/// Get the templates directory path.
@ -98,8 +109,13 @@ impl TemplateLoader {
}
/// Render a template with given context.
pub fn render(&self, _template_name: &str, _context: &tera::Context) -> Result<String> {
// TODO: Implement template rendering
Ok(String::new())
pub fn render(&self, template_name: &str, context: &tera::Context) -> Result<String> {
self.tera.render(template_name, context).map_err(|e| {
TemplateRenderError {
template_name: template_name.to_string(),
reason: e.to_string(),
}
.into()
})
}
}

View File

@ -0,0 +1,359 @@
# Nickel Roundtrip Workflow
Complete guide to the TypeDialog + Nickel configuration roundtrip for {{ project_name }}.
## Overview
The **Nickel Roundtrip** workflow integrates TypeDialog interactive forms with Nickel's powerful type system and validation:
```
TypeDialog Form → JSON → Nickel → Validation → Export (JSON/YAML/TOML)
↑ ↓
└────────────────── Edit & Re-import ──────────────────┘
```
This enables:
- ✅ User-friendly configuration via TypeDialog forms
- ✅ Type-safe configuration with Nickel contracts
- ✅ Validation against schemas and custom predicates
- ✅ Round-trip editing (export, edit, re-import)
## Workflow Steps
### 1. Collect User Input (TypeDialog)
Run the interactive form:
```bash
typedialog config-form.toml \
--output-format json \
--output generated/user-input.json
```
TypeDialog presents form fields, validates input, and outputs JSON.
### 2. Convert JSON to Nickel
Embed user values into Nickel configuration:
```bash
./scripts/json-to-nickel.sh generated/user-input.json generated/config.ncl
```
Generated `config.ncl`:
```nickel
let schemas = import "../schemas/config.ncl" in
let defaults = import "../defaults/config.ncl" in
let user_values = {
# JSON values embedded here
server = {
bind_address = "0.0.0.0:8080",
max_connections = 100,
},
} in
let final_config = std.record.merge defaults user_values in
final_config | schemas.Config
```
### 3. Validate with Nickel
Type-check the configuration:
```bash
nickel typecheck generated/config.ncl
```
Nickel verifies:
- All required fields present
- Type contracts satisfied (e.g., `ValidPort`)
- Custom validators pass
- Imports resolve correctly
### 4. Export to Target Format
Export validated configuration:
**JSON:**
```bash
nickel export --format json generated/config.ncl > generated/config.json
```
**YAML:**
```bash
nickel export --format yaml generated/config.ncl > generated/config.yaml
```
**TOML (via JSON):**
```bash
nickel export --format json generated/config.ncl | json2toml > generated/config.toml
```
### 5. Apply to Infrastructure
Use exported configuration with your deployment tool:
```bash
# Terraform
terraform apply -var-file="generated/config.tfvars"
# Ansible
ansible-playbook deploy.yml -e @generated/config.yaml
# Docker Compose
docker-compose -f service.yml --env-file generated/config.env up
```
## Round-Trip Editing
### Export → Edit → Re-import
**Step 1: Export current config**
```bash
nickel export --format json values/config.ncl > /tmp/config.json
```
**Step 2: Edit JSON**
```bash
# Edit with jq
jq '.server.max_connections = 200' /tmp/config.json > /tmp/config-modified.json
# Or edit manually
vim /tmp/config-modified.json
```
**Step 3: Re-import to Nickel**
```bash
./scripts/json-to-nickel.sh /tmp/config-modified.json values/config.ncl
```
**Step 4: Validate**
```bash
nickel typecheck values/config.ncl
```
If validation passes, your edited config is ready!
## Advanced Workflows
### Merging Multiple Configurations
Combine base config with environment-specific overrides:
```nickel
let base = import "./config.ncl" in
let production_overrides = {
server.max_connections = 500,
logging.level = "warn",
} in
std.record.merge base production_overrides
```
### Custom Validators
Add domain-specific validation in `validators/`:
```nickel
# validators/custom.ncl
{
ValidTrackerArray = fun label =>
std.contract.from_predicate (fun trackers =>
std.is_array trackers &&
let bind_addresses = std.array.map (fun t => t.bind_address) trackers in
# Ensure all bind addresses are unique
std.array.length bind_addresses ==
std.array.length (std.array.sort bind_addresses)
) {
label = label,
message = "tracker bind addresses must be unique",
},
}
```
Use in schema:
```nickel
let validators = import "../validators/custom.ncl" in
{
Config = {
trackers | validators.ValidTrackerArray "config.trackers",
},
}
```
### Conditional Configuration
Use Nickel's `if-then-else` for environment-specific logic:
```nickel
let env = "production" in # or "development"
{
server = {
max_connections = if env == "production" then 500 else 50,
logging_level = if env == "production" then "warn" else "debug",
},
}
```
### Schema Imports and Composition
Organize schemas into modules:
```nickel
# schemas/server.ncl
{
Server = {
bind_address | String,
port | Number,
max_connections | Number,
},
}
# schemas/database.ncl
{
Database = {
url | String,
pool_size | Number,
},
}
# schemas/config.ncl
let server_schema = import "./server.ncl" in
let database_schema = import "./database.ncl" in
{
Config = {
server | server_schema.Server,
database | database_schema.Database | optional,
},
}
```
## Integration with CI/CD
### Pre-commit Hook
Validate Nickel config before commit:
```bash
#!/bin/bash
# .git/hooks/pre-commit
if ! nickel typecheck values/config.ncl; then
echo "❌ Nickel validation failed!"
exit 1
fi
echo "✅ Nickel validation passed"
```
### CI Pipeline (GitHub Actions)
``yaml
name: Validate Config
on: [push, pull_request]
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install Nickel
run: cargo install nickel
- name: Validate configuration
run: nickel typecheck provisioning/values/config.ncl
- name: Export to JSON
run: nickel export provisioning/values/config.ncl > config.json
```
## Troubleshooting
### Error: "contract violated"
**Example:**
```
Error: contract violated by value
--> bind_address: "invalid"
expected: ValidBindAddress
```
**Fix:** The value doesn't match the validator. Check `validators/common.ncl` for the contract definition.
### Error: "infinite recursion detected"
**Cause:** Circular imports in Nickel files.
**Fix:** Ensure import graph is acyclic:
```
schemas/config.ncl
├→ schemas/server.ncl
└→ schemas/database.ncl
```
Never create cycles like:
```
server.ncl → database.ncl → server.ncl # ❌ BAD
```
### Error: "field not found"
**Cause:** Missing required field in configuration.
**Fix:** Add the field or mark it as `optional` in schema:
```nickel
{
Config = {
required_field | String,
optional_field | String | optional, # Won't error if missing
},
}
```
## Best Practices
1. **Always validate before exporting:**
```bash
nickel typecheck config.ncl && nickel export config.ncl
```
2. **Use defaults for common values:**
```nickel
let defaults = {max_connections = 100} in
std.record.merge defaults user_values
```
3. **Separate schemas from values:**
- `schemas/` → Type contracts (versioned, stable)
- `values/` → Runtime configuration (user-editable)
4. **Document contracts:**
```nickel
{
# Port number must be 1-65535
port | ValidPort "config.port",
}
```
5. **Test with `nickel query`:**
```bash
nickel query --field server.port config.ncl
```
## Resources
- [Nickel Language Documentation](https://nickel-lang.org/user-manual/)
- [TypeDialog Forms Guide](https://github.com/yourusername/typedialog)
- [Example: torrust-tracker-deployer](https://github.com/torrust/torrust-tracker-deployer)
---
**Generated by typedialog-prov-gen for {{ project_name }}**

View File

@ -0,0 +1,201 @@
# {{ project_name }} - Quick Start Guide
Get up and running with {{ project_name }} provisioning in 5 minutes.
## Prerequisites
Install required tools:
```bash
# TypeDialog (interactive forms)
cargo install typedialog
# Nickel (configuration language)
cargo install nickel
# Optional: Format converters
apt-get install jq # JSON processor
cargo install json2toml # TOML converter
```
## Step 1: Run Configuration Wizard
Choose your preferred shell:
**Bash:**
```bash
chmod +x scripts/configure.sh
./scripts/configure.sh
```
**Nushell:**
```bash
chmod +x scripts/configure.nu
./scripts/configure.nu
```
The wizard will:
1. Display an interactive TypeDialog form
2. Collect configuration values
3. Generate Nickel configuration
4. Validate with schema/type contracts
5. Export to JSON, YAML, TOML
## Step 2: Review Generated Configuration
Check the `generated/` directory:
```bash
ls -lh generated/
```
You should see:
- `config.ncl` - Nickel source
- `config.json` - JSON export
- `config.yaml` - YAML export
- `user-input.json` - Raw form data
## Step 3: Validate Configuration
Type-check with Nickel:
```bash
nickel typecheck generated/config.ncl
```
If validation passes, your configuration is correct!
## Step 4: Deploy Infrastructure
{% if iac_templates.terraform_enabled %}
### Option A: Terraform/OpenTofu
```bash
cd templates/tofu/{{ infrastructure.providers[0] | default(value="common") }}
# Initialize Terraform
terraform init
# Preview changes
terraform plan -var-file="../../../generated/config.tfvars"
# Apply infrastructure
terraform apply -var-file="../../../generated/config.tfvars"
```
{% endif %}
{% if iac_templates.ansible_enabled %}
### Option B: Ansible
```bash
# Run deployment playbook
ansible-playbook templates/ansible/deploy.yml \
-i templates/ansible/inventory.yml \
-e @generated/config.yaml
```
{% endif %}
{% if iac_templates.docker_compose_enabled %}
### Option C: Docker Compose
```bash
# Start services
docker-compose -f templates/docker-compose/service.yml up -d
# View logs
docker-compose logs -f
# Stop services
docker-compose down
```
{% endif %}
## Customizing Configuration
### Edit Nickel Values Directly
1. Open `values/config.ncl` in your editor
2. Modify values (type contracts ensure correctness)
3. Validate: `nickel typecheck values/config.ncl`
4. Export: `nickel export values/config.ncl > generated/config.json`
### Re-run the Form
Simply run `./scripts/configure.sh` again to update values.
## Common Tasks
### Export to Different Format
```bash
# Nickel → JSON
./scripts/nickel-to-json.sh values/config.ncl generated/config.json
# Nickel → YAML
./scripts/nickel-to-yaml.sh values/config.ncl generated/config.yaml
```
### Import Existing JSON
```bash
# JSON → Nickel
./scripts/json-to-nickel.sh existing-config.json values/config.ncl
```
### Validate Without Exporting
```bash
nickel typecheck values/config.ncl
```
## Troubleshooting
### Error: "Type checking failed"
**Cause:** Configuration values don't match schema contracts.
**Fix:** Check the error message for the failing field and correct the value.
Example:
```
Error: contract violated by value
--> port: 70000
expected: ValidPort (1-65535)
```
Fix by setting `port = 8080` in `values/config.ncl`.
### Error: "Form file not found"
**Cause:** Missing form fragments or master form.
**Fix:** Ensure all files in `fragments/` directory exist:
```bash
ls -1 fragments/
```
### Error: "nickel command not found"
**Cause:** Nickel not installed.
**Fix:**
```bash
cargo install nickel
```
## Next Steps
- Read [NICKEL_ROUNDTRIP.md](NICKEL_ROUNDTRIP.md) for advanced workflows
- Explore `schemas/` to understand type contracts
- Customize `validators/` for custom validation logic
- Add locales in `locales/` for i18n support
## Support
For issues or questions:
- GitHub: {{ repository | default(value="https://github.com/yourusername/" ~ project_name) }}
- Documentation: `docs/`
---
**Generated by typedialog-prov-gen**

View File

@ -0,0 +1,140 @@
# {{ project_name }} - Provisioning Configuration
**Generated by typedialog-prov-gen**
This directory contains the complete provisioning configuration for {{ project_name }}, using TypeDialog forms with Nickel configuration validation.
## Quick Start
1. **Run the configuration wizard:**
```bash
./scripts/configure.sh
```
2. **Generated files will be in `generated/`:**
- `config.ncl` - Nickel source configuration
- `config.json` - JSON export
- `config.yaml` - YAML export
3. **Validate configuration:**
```bash
nickel typecheck generated/config.ncl
```
## Directory Structure
```
provisioning/
├── config.ncl # Main Nickel configuration entry point
├── config-form.toml # TypeDialog master form
├── constraints.toml # Validation constraints
├── defaults/ # Default values for all features
├── docs/ # Documentation guides
├── fragments/ # Form field fragments
├── generated/ # Generated configurations (git-ignored)
├── locales/ # i18n translations (en-US, es-ES)
├── schemas/ # Nickel type schemas
├── scripts/ # Configuration and deployment scripts
├── templates/ # Infrastructure templates (Terraform, Ansible, etc.)
├── validators/ # Validation predicates
└── values/ # User-editable runtime values
```
## Features
{% for feature in features %}
### {{ feature.name }}
{{ feature.description | default(value="No description available") }}
**Fields:** {{ feature.fields | length }} configuration fields
{% endfor %}
## Configuration Workflow
1. **Interactive Form** → Run `./scripts/configure.sh` to collect user input
2. **JSON Generation** → TypeDialog outputs user values as JSON
3. **Nickel Conversion** → JSON is embedded into Nickel configuration
4. **Schema Validation** → Nickel type contracts enforce correctness
5. **Export** → Final configuration exported to JSON/YAML/TOML
## Infrastructure as Code
{% if iac_templates.terraform_enabled %}
### Terraform/OpenTofu
Templates in `templates/tofu/`:
- `common/` - Shared Terraform modules
- `hetzner/` - Hetzner Cloud provider
- `lxd/` - LXD container provider
**Usage:**
```bash
cd templates/tofu/hetzner
terraform init
terraform plan -var-file="../../../generated/config.tfvars"
```
{% endif %}
{% if iac_templates.ansible_enabled %}
### Ansible
Playbooks in `templates/ansible/`:
- `deploy.yml` - Main deployment playbook
- `inventory.yml` - Dynamic inventory
**Usage:**
```bash
ansible-playbook templates/ansible/deploy.yml -i templates/ansible/inventory.yml
```
{% endif %}
{% if iac_templates.docker_compose_enabled %}
### Docker Compose
Compose files in `templates/docker-compose/`:
- `service.yml` - Main service definition
- `monitoring.yml` - Monitoring stack (Prometheus, Grafana)
**Usage:**
```bash
docker-compose -f templates/docker-compose/service.yml up -d
```
{% endif %}
## Scripts
- `configure.sh` / `configure.nu` - Main configuration wizard (Bash/Nushell)
- `nickel-to-json.sh` / `.nu` - Export Nickel → JSON
- `nickel-to-yaml.sh` / `.nu` - Export Nickel → YAML
- `json-to-nickel.sh` / `.nu` - Import JSON → Nickel
## Nickel Roundtrip Workflow
See [NICKEL_ROUNDTRIP.md](docs/NICKEL_ROUNDTRIP.md) for detailed workflow documentation.
**Summary:**
1. Edit values in `values/config.ncl`
2. Run `nickel typecheck values/config.ncl`
3. Export with `nickel export values/config.ncl > generated/config.json`
4. Apply to infrastructure
## Localization
Supported locales:
{% for locale in locales %}
- {{ locale.language_code }}{% if locale.is_default %} (default){% endif %}
{% endfor %}
Translation files in `locales/{lang}/forms.ftl` (Fluent format).
## Documentation
- [README.md](README.md) - This file
- [docs/QUICKSTART.md](docs/QUICKSTART.md) - Getting started guide
- [docs/NICKEL_ROUNDTRIP.md](docs/NICKEL_ROUNDTRIP.md) - Nickel roundtrip workflow
## License
{{ license | default(value="MIT") }}

View File

@ -0,0 +1,99 @@
---
# {{ project_name }} - Ansible Deployment Playbook
# Generated by typedialog-prov-gen
- name: Deploy {{ project_name }}
hosts: app_servers
become: yes
vars:
project_name: "{{ project_name }}"
deploy_user: "{{ deploy_user | default(value="deploy") }}"
app_dir: "/opt/{{ project_name }}"
tasks:
- name: Update apt cache
apt:
update_cache: yes
cache_valid_time: 3600
- name: Install system dependencies
apt:
name:
- curl
- wget
- git
- build-essential
{% if infrastructure.database.db_type == "postgres" %}
- postgresql-client
{% elif infrastructure.database.db_type == "mysql" %}
- mysql-client
{% endif %}
state: present
- name: Create deployment user
user:
name: "{{"{{ deploy_user }}"}}"
shell: /bin/bash
createhome: yes
state: present
- name: Create application directory
file:
path: "{{"{{ app_dir }}"}}"
state: directory
owner: "{{"{{ deploy_user }}"}}"
group: "{{"{{ deploy_user }}"}}"
mode: '0755'
{% if infrastructure.database %}
- name: Configure database
include_tasks: tasks/database.yml
{% endif %}
- name: Deploy application
git:
repo: "{{ repository_url | default(value="https://github.com/youruser/" ~ project_name) }}"
dest: "{{"{{ app_dir }}"}}"
version: "{{ git_branch | default(value="main") }}"
become_user: "{{"{{ deploy_user }}"}}"
- name: Install application dependencies
shell: |
cd {{ app_dir }}
cargo build --release
become_user: "{{"{{ deploy_user }}"}}"
- name: Install systemd service
template:
src: templates/{{ project_name }}.service.j2
dest: /etc/systemd/system/{{ project_name }}.service
mode: '0644'
notify: Reload systemd
- name: Enable and start service
systemd:
name: "{{ project_name }}"
enabled: yes
state: started
{% if infrastructure.monitoring contains "prometheus" %}
- name: Install Prometheus node exporter
include_tasks: tasks/prometheus-exporter.yml
{% endif %}
handlers:
- name: Reload systemd
systemd:
daemon_reload: yes
- name: Verify deployment
hosts: app_servers
tasks:
- name: Check service status
systemd:
name: "{{ project_name }}"
register: service_status
- name: Display service status
debug:
msg: "Service {{ project_name }} is {{"{{ service_status.status.ActiveState }}"}}"

View File

@ -0,0 +1,30 @@
---
# {{ project_name }} - Ansible Inventory
# Generated by typedialog-prov-gen
all:
vars:
ansible_user: "{{ ansible_user | default(value="ubuntu") }}"
ansible_python_interpreter: /usr/bin/python3
children:
app_servers:
hosts:
{% for i in range(start=1, end=server_count | default(value="1") + 1) %}
app-{{ i }}:
ansible_host: "{{ hostvars[\"app-\" ~ i].address | default(value="192.168.1." ~ (100 + i)) }}"
{% endfor %}
{% if infrastructure.database %}
database_servers:
hosts:
db-1:
ansible_host: "{{ database_host | default(value="192.168.1.10") }}"
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
monitoring_servers:
hosts:
monitoring-1:
ansible_host: "{{ monitoring_host | default(value="192.168.1.20") }}"
{% endif %}

View File

@ -0,0 +1,37 @@
{# {{ project_name }} - Nickel Configuration Template (Jinja2) #}
{# Generated by typedialog-prov-gen #}
{# This template is rendered by infrastructure tools (Ansible, Terraform) #}
{# to produce final Nickel configuration files #}
# {{ project_name }} Configuration
# Generated from template at: {{ "{{" }} template_generation_time {{ "}}" }}
# Environment: {{ "{{" }} environment {{ "}}" }}
let schemas = import "./schemas/config.ncl" in
let defaults = import "./defaults/config.ncl" in
{
{% for feature in features %}
# {{ feature.name }} configuration
{{ feature.name }} = {
{% for field in feature.fields %}
{% if field.field_type == "Text" or field.field_type == "Password" %}
{{ field.name }} = "{{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }}",
{% elif field.field_type == "Number" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }},
{% elif field.field_type == "Confirm" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} | lower {{ "}}" }},
{% elif field.field_type == "Select" %}
{{ field.name }} = "{{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }}",
{% elif field.field_type == "MultiSelect" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} | to_json {{ "}}" }},
{% elif field.field_type == "RepeatingGroup" %}
{{ field.name }} = {{ "{{" }} {{ feature.name }}_{{ field.name }} | to_json {{ "}}" }},
{% else %}
{{ field.name }} = "{{ "{{" }} {{ feature.name }}_{{ field.name }} {{ "}}" }}",
{% endif %}
{% endfor %}
},
{% endfor %}
} | schemas.Config

View File

@ -0,0 +1,69 @@
# {{ project_name }} - Docker Compose Monitoring Stack
# Generated by typedialog-prov-gen
version: '3.8'
services:
prometheus:
image: prom/prometheus:latest
container_name: "{{ project_name }}-prometheus"
restart: unless-stopped
ports:
- "9090:9090"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml:ro
- prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
- '--web.console.libraries=/usr/share/prometheus/console_libraries'
- '--web.console.templates=/usr/share/prometheus/consoles'
grafana:
image: grafana/grafana:latest
container_name: "{{ project_name }}-grafana"
restart: unless-stopped
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_PASSWORD:-admin}
- GF_USERS_ALLOW_SIGN_UP=false
volumes:
- grafana_data:/var/lib/grafana
depends_on:
- prometheus
node_exporter:
image: prom/node-exporter:latest
container_name: "{{ project_name }}-node-exporter"
restart: unless-stopped
ports:
- "9100:9100"
command:
- '--path.procfs=/host/proc'
- '--path.sysfs=/host/sys'
- '--collector.filesystem.mount-points-exclude=^/(sys|proc|dev|host|etc)($$|/)'
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
volumes:
prometheus_data:
driver: local
grafana_data:
driver: local
networks:
default:
name: {{ project_name }}_monitoring

View File

@ -0,0 +1,86 @@
# {{ project_name }} - Docker Compose Service Definition
# Generated by typedialog-prov-gen
version: '3.8'
services:
app:
image: "{{ docker_image | default(value=project_name ~ ":latest") }}"
container_name: "{{ project_name }}-app"
restart: unless-stopped
ports:
- "${APP_PORT:-8080}:8080"
environment:
- RUST_LOG=${RUST_LOG:-info}
- APP_ENV=${APP_ENV:-production}
{% if infrastructure.database %}
- DATABASE_URL=${DATABASE_URL}
{% endif %}
{% if infrastructure.database %}
database:
image: "{% if infrastructure.database.db_type == "postgres" %}postgres:15-alpine
{%- elif infrastructure.database.db_type == "mysql" %}mysql:8.0
{%- elif infrastructure.database.db_type == "redis" %}redis:7-alpine
{%- else %}sqlite:latest{% endif %}"
container_name: "{{ project_name }}-db"
restart: unless-stopped
{% if infrastructure.database.db_type == "postgres" %}
environment:
- POSTGRES_DB={{ project_name }}
- POSTGRES_USER=${DB_USER:-{{ project_name }}}
- POSTGRES_PASSWORD=${DB_PASSWORD}
{% elif infrastructure.database.db_type == "mysql" %}
environment:
- MYSQL_DATABASE={{ project_name }}
- MYSQL_USER=${DB_USER:-{{ project_name }}}
- MYSQL_PASSWORD=${DB_PASSWORD}
- MYSQL_ROOT_PASSWORD=${DB_ROOT_PASSWORD}
{% endif %}
volumes:
- db_data:/var/lib/{% if infrastructure.database.db_type == "postgres" %}postgresql/data
{%- elif infrastructure.database.db_type == "mysql" %}mysql
{%- elif infrastructure.database.db_type == "redis" %}redis{% endif %}
healthcheck:
{% if infrastructure.database.db_type == "postgres" %}
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-{{ project_name }}}"]
{% elif infrastructure.database.db_type == "mysql" %}
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
{% elif infrastructure.database.db_type == "redis" %}
test: ["CMD", "redis-cli", "ping"]
{% endif %}
interval: 10s
timeout: 5s
retries: 5
{% endif %}
nginx:
image: nginx:alpine
container_name: "{{ project_name }}-nginx"
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- ./certs:/etc/nginx/certs:ro
depends_on:
- app
volumes:
{% if infrastructure.database %}
db_data:
driver: local
{% endif %}
networks:
default:
name: {{ project_name }}_network

View File

@ -0,0 +1,64 @@
# {{ project_name }} - Prometheus Configuration
# Generated by typedialog-prov-gen
global:
scrape_interval: 15s
evaluation_interval: 15s
external_labels:
cluster: '{{ project_name }}'
environment: '${ENVIRONMENT:-production}'
# Alertmanager configuration
alerting:
alertmanagers:
- static_configs:
- targets:
- 'alertmanager:9093'
# Load rules once and periodically evaluate them
rule_files:
- '/etc/prometheus/rules/*.yml'
# Scrape configurations
scrape_configs:
# Prometheus itself
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
# {{ project_name }} application metrics
- job_name: '{{ project_name }}'
static_configs:
- targets:
{% for i in range(start=1, end=server_count | default(value="1") + 1) %}
- 'app-{{ i }}:${METRICS_PORT:-9091}'
{% endfor %}
relabel_configs:
- source_labels: [__address__]
target_label: instance
- source_labels: [__address__]
target_label: __param_target
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: '{{ project_name }}:${METRICS_PORT:-9091}'
# Node exporter for system metrics
- job_name: 'node-exporter'
static_configs:
- targets:
{% for i in range(start=1, end=server_count | default(value="1") + 1) %}
- 'app-{{ i }}:9100'
{% endfor %}
{% if infrastructure.database %}
# Database exporter
- job_name: 'database-exporter'
static_configs:
- targets:
- 'db-exporter:9187'
{% endif %}
# Remote write for long-term storage (optional)
# remote_write:
# - url: 'http://mimir:9009/api/v1/push'

View File

@ -0,0 +1,95 @@
# {{ project_name }} - Main Terraform Configuration
# Generated by typedialog-prov-gen
terraform {
required_version = ">= 1.0"
required_providers {
{% if infrastructure.providers contains "hetzner" %}
hetzner = {
source = "hetznercloud/hetzner"
version = "~> 1.44"
}
{% endif %}
{% if infrastructure.providers contains "lxd" %}
lxd = {
source = "terraform-lxd/lxd"
version = "~> 1.10"
}
{% endif %}
{% if infrastructure.providers contains "aws" %}
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
{% endif %}
}
}
# Local variables from Nickel configuration
locals {
project_name = "{{ project_name }}"
environment = var.environment
common_tags = {
Project = local.project_name
Environment = local.environment
ManagedBy = "Terraform"
Generated = "typedialog-prov-gen"
}
}
{% if infrastructure.database %}
# Database resources
resource "{{ infrastructure.database.db_type }}_instance" "main" {
name = "${local.project_name}-${local.environment}-db"
size = var.database_size
region = var.region
tags = local.common_tags
}
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
# Monitoring instance
resource "compute_instance" "monitoring" {
name = "${local.project_name}-${local.environment}-monitoring"
machine_type = var.monitoring_machine_type
zone = var.zone
boot_disk {
initialize_params {
image = var.monitoring_image
}
}
metadata_startup_script = file("${path.module}/../../scripts/install-prometheus.sh")
tags = merge(local.common_tags, {
Role = "monitoring"
})
}
{% endif %}
# Output values
output "infrastructure_id" {
description = "Infrastructure resource IDs"
value = {
{% if infrastructure.database %}
database_id = {{ infrastructure.database.db_type }}_instance.main.id
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
monitoring_id = compute_instance.monitoring.id
{% endif %}
}
}
output "connection_strings" {
description = "Connection information"
sensitive = true
value = {
{% if infrastructure.database %}
database_url = {{ infrastructure.database.db_type }}_instance.main.connection_string
{% endif %}
}
}

View File

@ -0,0 +1,77 @@
# {{ project_name }} - Terraform Variables
# Generated by typedialog-prov-gen
variable "environment" {
description = "Deployment environment (development, staging, production)"
type = string
default = "development"
validation {
condition = contains(["development", "staging", "production"], var.environment)
error_message = "Environment must be development, staging, or production."
}
}
variable "region" {
description = "Cloud provider region"
type = string
default = "{{ infrastructure.default_region | default(value="us-west-2") }}"
}
variable "zone" {
description = "Cloud provider availability zone"
type = string
default = "${var.region}-a"
}
{% if infrastructure.database %}
variable "database_size" {
description = "Database instance size"
type = string
default = "{{ infrastructure.database.size | default(value="db-small") }}"
}
variable "database_backup_enabled" {
description = "Enable automated database backups"
type = bool
default = true
}
{% endif %}
{% if infrastructure.monitoring contains "prometheus" %}
variable "monitoring_machine_type" {
description = "Monitoring instance machine type"
type = string
default = "{{ infrastructure.monitoring_machine_type | default(value="e2-small") }}"
}
variable "monitoring_image" {
description = "Monitoring instance OS image"
type = string
default = "ubuntu-2204-lts"
}
{% endif %}
variable "ssh_keys" {
description = "SSH public keys for instance access"
type = list(string)
default = []
}
variable "network_cidr" {
description = "Network CIDR block"
type = string
default = "10.0.0.0/16"
}
variable "allowed_ips" {
description = "IP addresses allowed to access infrastructure"
type = list(string)
default = ["0.0.0.0/0"] # WARNING: Restrict in production
}
variable "tags" {
description = "Additional tags for resources"
type = map(string)
default = {}
}

View File

@ -0,0 +1,103 @@
# {{ project_name }} - Hetzner Cloud Provider Configuration
# Generated by typedialog-prov-gen
terraform {
required_providers {
hcloud = {
source = "hetznercloud/hcloud"
version = "~> 1.44"
}
}
}
provider "hcloud" {
token = var.hetzner_token
}
# Hetzner-specific variables
variable "hetzner_token" {
description = "Hetzner Cloud API token"
type = string
sensitive = true
}
variable "hetzner_server_type" {
description = "Hetzner server type (cx11, cx21, cx31, etc.)"
type = string
default = "cx11" # 1 vCPU, 2GB RAM
}
variable "hetzner_location" {
description = "Hetzner datacenter location"
type = string
default = "{{ infrastructure.hetzner_location | default(value="nbg1") }}"
validation {
condition = contains(["nbg1", "fsn1", "hel1", "ash"], var.hetzner_location)
error_message = "Location must be nbg1 (Nuremberg), fsn1 (Falkenstein), hel1 (Helsinki), or ash (Ashburn)."
}
}
# SSH key for Hetzner instances
resource "hcloud_ssh_key" "default" {
name = "${local.project_name}-${local.environment}"
public_key = file("~/.ssh/id_ed25519.pub")
}
# Hetzner server instance
resource "hcloud_server" "app" {
name = "${local.project_name}-${local.environment}-app"
server_type = var.hetzner_server_type
location = var.hetzner_location
image = "ubuntu-22.04"
ssh_keys = [hcloud_ssh_key.default.id]
labels = local.common_tags
user_data = templatefile("${path.module}/../../scripts/cloud-init.yml", {
project_name = local.project_name
environment = local.environment
})
}
# Hetzner firewall
resource "hcloud_firewall" "app" {
name = "${local.project_name}-${local.environment}-firewall"
rule {
direction = "in"
protocol = "tcp"
port = "22"
source_ips = var.allowed_ips
}
rule {
direction = "in"
protocol = "tcp"
port = "80"
source_ips = ["0.0.0.0/0", "::/0"]
}
rule {
direction = "in"
protocol = "tcp"
port = "443"
source_ips = ["0.0.0.0/0", "::/0"]
}
}
resource "hcloud_firewall_attachment" "app" {
firewall_id = hcloud_firewall.app.id
server_ids = [hcloud_server.app.id]
}
output "hetzner_server_ip" {
description = "Hetzner server public IP"
value = hcloud_server.app.ipv4_address
}
output "hetzner_server_id" {
description = "Hetzner server ID"
value = hcloud_server.app.id
}

View File

@ -0,0 +1,115 @@
# {{ project_name }} - LXD Provider Configuration
# Generated by typedialog-prov-gen
terraform {
required_providers {
lxd = {
source = "terraform-lxd/lxd"
version = "~> 1.10"
}
}
}
provider "lxd" {
generate_client_certificates = true
accept_remote_certificate = true
lxd_remote {
name = var.lxd_remote_name
scheme = "https"
address = var.lxd_remote_address
password = var.lxd_remote_password
default = true
}
}
# LXD-specific variables
variable "lxd_remote_name" {
description = "LXD remote name"
type = string
default = "local"
}
variable "lxd_remote_address" {
description = "LXD remote server address"
type = string
default = "{{ infrastructure.lxd_address | default(value="127.0.0.1:8443") }}"
}
variable "lxd_remote_password" {
description = "LXD remote server password"
type = string
sensitive = true
default = ""
}
variable "lxd_image" {
description = "LXD container image"
type = string
default = "ubuntu:22.04"
}
# LXD profile for containers
resource "lxd_profile" "app" {
name = "${local.project_name}-${local.environment}"
config = {
"boot.autostart" = "true"
"security.nesting" = "true"
"security.privileged" = "false"
}
device {
name = "root"
type = "disk"
properties = {
pool = "default"
path = "/"
}
}
device {
name = "eth0"
type = "nic"
properties = {
network = "lxdbr0"
name = "eth0"
}
}
}
# LXD container instance
resource "lxd_container" "app" {
name = "${local.project_name}-${local.environment}-app"
image = var.lxd_image
profiles = [lxd_profile.app.name]
config = {
"user.project" = local.project_name
"user.environment" = local.environment
}
limits = {
cpu = "2"
memory = "2GB"
}
provisioner "remote-exec" {
inline = [
"apt-get update",
"apt-get install -y curl wget git",
]
}
}
output "lxd_container_ip" {
description = "LXD container IP address"
value = lxd_container.app.ip_address
}
output "lxd_container_name" {
description = "LXD container name"
value = lxd_container.app.name
}

View File

@ -0,0 +1,187 @@
#!/usr/bin/env nu
# {{ project_name }} - TypeDialog Nickel Roundtrip Configuration Script (Nushell)
#
# This script integrates TypeDialog forms with Nickel configuration:
# 1. Runs TypeDialog form to collect user input (JSON output)
# 2. Converts JSON to Nickel configuration
# 3. Validates Nickel configuration against schemas
# 4. Merges with defaults and applies type contracts
# Configuration paths
const FORM_PATH = "{{ form_path }}"
const CONFIG_PATH = "{{ config_path }}"
const TEMPLATE_PATH = "{{ template_path }}"
const GENERATED_DIR = "generated"
# Color formatting
def log-info [message: string] {
print $"(ansi blue)[INFO](ansi reset) ($message)"
}
def log-success [message: string] {
print $"(ansi green)[SUCCESS](ansi reset) ($message)"
}
def log-warn [message: string] {
print $"(ansi yellow)[WARN](ansi reset) ($message)"
}
def log-error [message: string] {
print $"(ansi red)[ERROR](ansi reset) ($message)"
}
# Check if command exists
def command-exists [cmd: string]: nothing -> bool {
(which $cmd | length) > 0
}
# Check dependencies
def check-dependencies []: nothing -> nothing {
let missing = [
"typedialog",
"nickel"
] | filter {|cmd| not (command-exists $cmd)}
if ($missing | length) > 0 {
log-error $"Missing required dependencies: ($missing | str join ', ')"
log-info "Install with: cargo install typedialog nickel"
error make {msg: "Missing dependencies"}
}
}
# Step 1: Run TypeDialog form
def run-form []: nothing -> nothing {
log-info $"Running TypeDialog form: ($FORM_PATH)"
if not ($FORM_PATH | path exists) {
log-error $"Form file not found: ($FORM_PATH)"
error make {msg: "Form file not found"}
}
mkdir $GENERATED_DIR
try {
^typedialog $FORM_PATH --output-format json --output $"($GENERATED_DIR)/user-input.json"
log-success $"User input captured: ($GENERATED_DIR)/user-input.json"
} catch {|err|
log-error "TypeDialog form execution failed"
error make {msg: $"Form failed: ($err)"}
}
}
# Step 2: Convert JSON to Nickel
def json-to-nickel []: nothing -> nothing {
log-info "Converting JSON to Nickel configuration"
let json_file = $"($GENERATED_DIR)/user-input.json"
let nickel_file = $"($GENERATED_DIR)/config.ncl"
if not ($json_file | path exists) {
log-error $"JSON input file not found: ($json_file)"
error make {msg: "JSON input not found"}
}
let timestamp = (date now | format date "%Y-%m-%dT%H:%M:%SZ")
let user_json = (open $json_file)
# Generate Nickel configuration with embedded JSON
let nickel_content = $"# Generated Nickel configuration from TypeDialog form
# Generated at: ($timestamp)
let schemas = import \"../schemas/config.ncl\" in
let defaults = import \"../defaults/config.ncl\" in
let validators = import \"../validators/config.ncl\" in
# User-provided values \(from TypeDialog form\)
let user_values = ($user_json | to json) in
# Merge user values with defaults and apply type contracts
let final_config = std.record.merge defaults user_values in
# Apply validators
let validated_config = validators.validate final_config in
validated_config | schemas.Config"
$nickel_content | save --force $nickel_file
log-success $"Nickel configuration generated: ($nickel_file)"
}
# Step 3: Validate Nickel configuration
def validate-nickel []: nothing -> nothing {
log-info "Validating Nickel configuration"
let nickel_file = $"($GENERATED_DIR)/config.ncl"
# Type checking
try {
^nickel typecheck $nickel_file
log-success "Nickel type checking passed"
} catch {|err|
log-error "Nickel type checking failed"
error make {msg: $"Type check failed: ($err)"}
}
# Test evaluation
try {
^nickel export $nickel_file | ignore
log-success "Nickel evaluation succeeded"
} catch {|err|
log-error "Nickel evaluation failed"
error make {msg: $"Evaluation failed: ($err)"}
}
}
# Step 4: Export to final formats
def export-config []: nothing -> nothing {
log-info "Exporting configuration to final formats"
let nickel_file = $"($GENERATED_DIR)/config.ncl"
# Export to JSON
try {
^nickel export --format json $nickel_file | save --force $"($GENERATED_DIR)/config.json"
log-success $"Exported: ($GENERATED_DIR)/config.json"
} catch {|err|
log-error $"JSON export failed: ($err)"
}
# Export to YAML
try {
^nickel export --format yaml $nickel_file | save --force $"($GENERATED_DIR)/config.yaml"
log-success $"Exported: ($GENERATED_DIR)/config.yaml"
} catch {|err|
log-error $"YAML export failed: ($err)"
}
# Export to TOML (convert from JSON)
if (command-exists "json2toml") {
try {
open $"($GENERATED_DIR)/config.json" | ^json2toml | save --force $"($GENERATED_DIR)/config.toml"
log-success $"Exported: ($GENERATED_DIR)/config.toml"
} catch {|err|
log-warn $"TOML export failed: ($err)"
}
} else {
log-warn "json2toml not found, skipping TOML export"
}
}
# Main execution
def main []: nothing -> nothing {
log-info "Starting {{ project_name }} configuration"
check-dependencies
run-form
json-to-nickel
validate-nickel
export-config
log-success "Configuration complete!"
log-info $"Generated files in: ($GENERATED_DIR)/"
log-info " - config.ncl (Nickel source)"
log-info " - config.json (JSON export)"
log-info " - config.yaml (YAML export)"
}
main

View File

@ -0,0 +1,186 @@
#!/usr/bin/env bash
# {{ project_name }} - TypeDialog Nickel Roundtrip Configuration Script
#
# This script integrates TypeDialog forms with Nickel configuration:
# 1. Runs TypeDialog form to collect user input (JSON output)
# 2. Converts JSON to Nickel configuration
# 3. Validates Nickel configuration against schemas
# 4. Merges with defaults and applies type contracts
#
# Usage: ./configure.sh
set -euo pipefail
# Configuration paths
FORM_PATH="{{ form_path }}"
CONFIG_PATH="{{ config_path }}"
TEMPLATE_PATH="{{ template_path }}"
GENERATED_DIR="generated"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check dependencies
check_dependencies() {
local missing_deps=()
if ! command -v typedialog &> /dev/null; then
missing_deps+=("typedialog")
fi
if ! command -v nickel &> /dev/null; then
missing_deps+=("nickel")
fi
if ! command -v jq &> /dev/null; then
missing_deps+=("jq")
fi
if [ ${#missing_deps[@]} -gt 0 ]; then
log_error "Missing required dependencies: ${missing_deps[*]}"
log_info "Install with: cargo install typedialog nickel && apt-get install jq"
exit 1
fi
}
# Step 1: Run TypeDialog form
run_form() {
log_info "Running TypeDialog form: ${FORM_PATH}"
if [ ! -f "${FORM_PATH}" ]; then
log_error "Form file not found: ${FORM_PATH}"
exit 1
fi
mkdir -p "${GENERATED_DIR}"
typedialog "${FORM_PATH}" \
--output-format json \
--output "${GENERATED_DIR}/user-input.json" || {
log_error "TypeDialog form execution failed"
exit 1
}
log_success "User input captured: ${GENERATED_DIR}/user-input.json"
}
# Step 2: Convert JSON to Nickel
json_to_nickel() {
log_info "Converting JSON to Nickel configuration"
local json_file="${GENERATED_DIR}/user-input.json"
local nickel_file="${GENERATED_DIR}/config.ncl"
if [ ! -f "${json_file}" ]; then
log_error "JSON input file not found: ${json_file}"
exit 1
fi
# Use Nickel's import capability to embed JSON
cat > "${nickel_file}" <<EOF
# Generated Nickel configuration from TypeDialog form
# Generated at: $(date -u +"%Y-%m-%dT%H:%M:%SZ")
let schemas = import "../schemas/config.ncl" in
let defaults = import "../defaults/config.ncl" in
let validators = import "../validators/config.ncl" in
# User-provided values (from TypeDialog form)
let user_values = $(cat "${json_file}") in
# Merge user values with defaults and apply type contracts
let final_config = std.record.merge defaults user_values in
# Apply validators
let validated_config = validators.validate final_config in
validated_config | schemas.Config
EOF
log_success "Nickel configuration generated: ${nickel_file}"
}
# Step 3: Validate Nickel configuration
validate_nickel() {
log_info "Validating Nickel configuration"
local nickel_file="${GENERATED_DIR}/config.ncl"
if ! nickel typecheck "${nickel_file}"; then
log_error "Nickel type checking failed"
exit 1
fi
log_success "Nickel type checking passed"
# Test evaluation
if ! nickel export "${nickel_file}" > /dev/null; then
log_error "Nickel evaluation failed"
exit 1
fi
log_success "Nickel evaluation succeeded"
}
# Step 4: Export to final formats
export_config() {
log_info "Exporting configuration to final formats"
local nickel_file="${GENERATED_DIR}/config.ncl"
# Export to JSON
nickel export --format json "${nickel_file}" > "${GENERATED_DIR}/config.json"
log_success "Exported: ${GENERATED_DIR}/config.json"
# Export to YAML
nickel export --format yaml "${nickel_file}" > "${GENERATED_DIR}/config.yaml"
log_success "Exported: ${GENERATED_DIR}/config.yaml"
# Export to TOML (via JSON)
if command -v json2toml &> /dev/null; then
jq -r '.' "${GENERATED_DIR}/config.json" | json2toml > "${GENERATED_DIR}/config.toml"
log_success "Exported: ${GENERATED_DIR}/config.toml"
else
log_warn "json2toml not found, skipping TOML export"
fi
}
# Main execution
main() {
log_info "Starting {{ project_name }} configuration"
check_dependencies
run_form
json_to_nickel
validate_nickel
export_config
log_success "Configuration complete!"
log_info "Generated files in: ${GENERATED_DIR}/"
log_info " - config.ncl (Nickel source)"
log_info " - config.json (JSON export)"
log_info " - config.yaml (YAML export)"
}
main "$@"

View File

@ -0,0 +1,40 @@
#!/usr/bin/env nu
# Convert JSON to Nickel configuration with schema imports (Nushell)
# Usage: ./json-to-nickel.nu <input.json> [output.ncl]
def main [
input: path, # Input JSON file
output?: path # Output Nickel file (optional)
] {
let output_path = if ($output == null) {
$"($input | path parse | get stem).ncl"
} else {
$output
}
if not ($input | path exists) {
error make {msg: $"Input file not found: ($input)"}
}
let json_content = (open $input | to json)
let timestamp = (date now | format date "%Y-%m-%dT%H:%M:%SZ")
let nickel_content = $"# Generated Nickel configuration from JSON
# Source: ($input)
# Generated at: ($timestamp)
let schemas = import \"../schemas/config.ncl\" in
let defaults = import \"../defaults/config.ncl\" in
# User-provided values
let user_values = ($json_content) in
# Merge with defaults
let final_config = std.record.merge defaults user_values in
final_config | schemas.Config"
$nickel_content | save --force $output_path
print $"Converted: ($output_path)"
print $"Validate with: nickel typecheck ($output_path)"
}

View File

@ -0,0 +1,45 @@
#!/usr/bin/env bash
# Convert JSON to Nickel configuration with schema imports
# Usage: ./json-to-nickel.sh <input.json> [output.ncl]
set -euo pipefail
if [ $# -lt 1 ]; then
echo "Usage: $0 <input.json> [output.ncl]"
exit 1
fi
INPUT="$1"
OUTPUT="${2:-${INPUT%.json}.ncl}"
if [ ! -f "${INPUT}" ]; then
echo "Error: Input file not found: ${INPUT}"
exit 1
fi
if ! command -v jq &> /dev/null; then
echo "Error: jq command not found"
echo "Install with: apt-get install jq"
exit 1
fi
# Generate Nickel file with embedded JSON
cat > "${OUTPUT}" <<EOF
# Generated Nickel configuration from JSON
# Source: ${INPUT}
# Generated at: $(date -u +"%Y-%m-%dT%H:%M:%SZ")
let schemas = import "../schemas/config.ncl" in
let defaults = import "../defaults/config.ncl" in
# User-provided values
let user_values = $(cat "${INPUT}") in
# Merge with defaults
let final_config = std.record.merge defaults user_values in
final_config | schemas.Config
EOF
echo "Converted: ${OUTPUT}"
echo "Validate with: nickel typecheck ${OUTPUT}"

View File

@ -0,0 +1,25 @@
#!/usr/bin/env nu
# Convert Nickel configuration to JSON (Nushell)
# Usage: ./nickel-to-json.nu <input.ncl> [output.json]
def main [
input: path, # Input Nickel file
output?: path # Output JSON file (optional)
] {
let output_path = if ($output == null) {
$"($input | path parse | get stem).json"
} else {
$output
}
if not ($input | path exists) {
error make {msg: $"Input file not found: ($input)"}
}
if not ((which nickel | length) > 0) {
error make {msg: "nickel command not found. Install with: cargo install nickel"}
}
^nickel export --format json $input | save --force $output_path
print $"Exported: ($output_path)"
}

View File

@ -0,0 +1,27 @@
#!/usr/bin/env bash
# Convert Nickel configuration to JSON
# Usage: ./nickel-to-json.sh <input.ncl> [output.json]
set -euo pipefail
if [ $# -lt 1 ]; then
echo "Usage: $0 <input.ncl> [output.json]"
exit 1
fi
INPUT="$1"
OUTPUT="${2:-${INPUT%.ncl}.json}"
if [ ! -f "${INPUT}" ]; then
echo "Error: Input file not found: ${INPUT}"
exit 1
fi
if ! command -v nickel &> /dev/null; then
echo "Error: nickel command not found"
echo "Install with: cargo install nickel"
exit 1
fi
nickel export --format json "${INPUT}" > "${OUTPUT}"
echo "Exported: ${OUTPUT}"

View File

@ -0,0 +1,25 @@
#!/usr/bin/env nu
# Convert Nickel configuration to YAML (Nushell)
# Usage: ./nickel-to-yaml.nu <input.ncl> [output.yaml]
def main [
input: path, # Input Nickel file
output?: path # Output YAML file (optional)
] {
let output_path = if ($output == null) {
$"($input | path parse | get stem).yaml"
} else {
$output
}
if not ($input | path exists) {
error make {msg: $"Input file not found: ($input)"}
}
if not ((which nickel | length) > 0) {
error make {msg: "nickel command not found. Install with: cargo install nickel"}
}
^nickel export --format yaml $input | save --force $output_path
print $"Exported: ($output_path)"
}

View File

@ -0,0 +1,27 @@
#!/usr/bin/env bash
# Convert Nickel configuration to YAML
# Usage: ./nickel-to-yaml.sh <input.ncl> [output.yaml]
set -euo pipefail
if [ $# -lt 1 ]; then
echo "Usage: $0 <input.ncl> [output.yaml]"
exit 1
fi
INPUT="$1"
OUTPUT="${2:-${INPUT%.ncl}.yaml}"
if [ ! -f "${INPUT}" ]; then
echo "Error: Input file not found: ${INPUT}"
exit 1
fi
if ! command -v nickel &> /dev/null; then
echo "Error: nickel command not found"
echo "Install with: cargo install nickel"
exit 1
fi
nickel export --format yaml "${INPUT}" > "${OUTPUT}"
echo "Exported: ${OUTPUT}"

View File

@ -0,0 +1,61 @@
# Array validators for {{ feature_name }}
#
# Validators for repeating groups and array constraints.
let common = import "./common.ncl" in
let constraints = import "../constraints.toml" in
{
{% for array_field in array_fields %}
# Uniqueness validator for {{ array_field.name }}
Unique{{ array_field.name | capitalize }} = fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let values = std.array.map (fun item => item.{{ array_field.unique_key }}) arr in
let sorted = std.array.sort values in
let unique_count = std.array.fold_left
(fun acc => fun val =>
if acc.prev == val then
acc
else
{count = acc.count + 1, prev = val}
)
{count = 0, prev = null}
sorted
in
unique_count.count == std.array.length arr
) {
label = label,
message = "{{ array_field.name }} items must have unique {{ array_field.unique_key }} values",
},
# Length validator for {{ array_field.name }}
Valid{{ array_field.name | capitalize }}Length = fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let len = std.array.length arr in
{% if array_field.min_items %}len >= {{ array_field.min_items }} &&{% endif %}
{% if array_field.max_items %}len <= {{ array_field.max_items }} &&{% endif %}
true
) {
label = label,
message = "{{ array_field.name }} array length must be between {{ array_field.min_items | default(value="0") }} and {{ array_field.max_items | default(value="unlimited") }}",
},
# Composite validator for {{ array_field.name }}
Valid{{ array_field.name | capitalize }}Full = fun label =>
fun value =>
value
| Valid{{ array_field.name | capitalize }}Length label
{% if array_field.unique %}| Unique{{ array_field.name | capitalize }} label{% endif %}
,
{% endfor %}
# Master array validation
validate_arrays = fun config => {
{% for array_field in array_fields %}
{{ array_field.name }} = config.{{ array_field.name }} | Valid{{ array_field.name | capitalize }}Full "{{ feature_name }}.{{ array_field.name }}",
{% endfor %}
..config
},
}

View File

@ -0,0 +1,108 @@
# Common validators for {{ project_name }}
#
# Reusable validation predicates and type contracts.
# Import with: let validators = import "validators/common.ncl" in
{
# Port number validation (1-65535)
ValidPort = fun label =>
std.contract.from_predicate (fun port =>
std.is_number port &&
port >= 1 &&
port <= 65535
) {
label = label,
message = "must be a valid port number (1-65535)",
},
# Non-empty string validation
NonEmptyString = fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.length s > 0
) {
label = label,
message = "must be a non-empty string",
},
# Valid bind address format (IP:PORT)
ValidBindAddress = fun label =>
std.contract.from_predicate (fun addr =>
std.is_string addr &&
std.string.contains ":" addr &&
let parts = std.string.split ":" addr in
std.array.length parts == 2
) {
label = label,
message = "must be a valid bind address (IP:PORT format)",
},
# Valid URL format
ValidUrl = fun label =>
std.contract.from_predicate (fun url =>
std.is_string url &&
(std.string.is_match "^https?://" url)
) {
label = label,
message = "must be a valid HTTP/HTTPS URL",
},
# Positive number validation
PositiveNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n > 0
) {
label = label,
message = "must be a positive number",
},
# Non-negative number validation
NonNegativeNumber = fun label =>
std.contract.from_predicate (fun n =>
std.is_number n && n >= 0
) {
label = label,
message = "must be a non-negative number",
},
# Range validation
Range = fun min => fun max => fun label =>
std.contract.from_predicate (fun n =>
std.is_number n &&
n >= min &&
n <= max
) {
label = label,
message = "must be between %{std.to_string min} and %{std.to_string max}",
},
# String pattern matching (regex)
MatchesPattern = fun pattern => fun label =>
std.contract.from_predicate (fun s =>
std.is_string s &&
std.string.is_match pattern s
) {
label = label,
message = "must match pattern: %{pattern}",
},
# Enum validation (one of allowed values)
OneOf = fun allowed => fun label =>
std.contract.from_predicate (fun value =>
std.array.any (fun v => v == value) allowed
) {
label = label,
message = "must be one of: %{std.serialize 'Json allowed}",
},
# Array length validation
ArrayLength = fun min => fun max => fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let len = std.array.length arr in
len >= min && len <= max
) {
label = label,
message = "array length must be between %{std.to_string min} and %{std.to_string max}",
},
}

View File

@ -0,0 +1,51 @@
# Custom validators for {{ feature_name }}
#
# Feature-specific validation logic.
let common = import "./common.ncl" in
{
{% for validator in validators %}
# {{ validator.description | default(value="Custom validator for " ~ validator.name) }}
{{ validator.name }} = {% if validator.validator_type == "Range" %}common.Range {{ validator.min | default(value="0") }} {{ validator.max | default(value="100") }} "{{ validator.name }}"
{% elif validator.validator_type == "Pattern" %}common.MatchesPattern "{{ validator.pattern | default(value=".*") }}" "{{ validator.name }}"
{% elif validator.validator_type == "ArrayUniqueness" %}fun label =>
std.contract.from_predicate (fun arr =>
std.is_array arr &&
let values = std.array.map (fun item => item.{{ validator.unique_field | default(value="id") }}) arr in
std.array.length values == std.array.length (std.array.sort values)
) {
label = label,
message = "array items must have unique {{ validator.unique_field | default(value="id") }} values",
}
{% elif validator.validator_type == "Composite" %}fun label =>
std.contract.from_predicate (fun value =>
# Composite validation logic
{% for field in validator.applies_to %}
{{ field }}_valid value &&
{% endfor %}
true
) {
label = label,
message = "composite validation failed for {{ validator.name }}",
}
{% else %}fun label =>
std.contract.from_predicate (fun value =>
# Custom predicate logic
true # TODO: Implement custom validation
) {
label = label,
message = "{{ validator.description | default(value="validation failed") }}",
}
{% endif %},
{% endfor %}
# Master validation function for {{ feature_name }}
validate = fun config =>
config
# Apply all validators here
{% for validator in validators %}
# | {{ validator.name }} "{{ feature_name }}.{{ validator.name }}"
{% endfor %}
,
}

View File

@ -42,6 +42,10 @@ fn test_project_spec_validation() {
infrastructure: Default::default(),
domain_features: vec![DomainFeature::new("basic".to_string())],
constraints: vec![],
iac_templates: Default::default(),
scripts: Default::default(),
docs: Default::default(),
locales: vec![],
};
let result = spec.validate();

View File

@ -3,11 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use typedialog_core::backends::{BackendFactory, BackendType};
use typedialog_core::config::TypeDialogConfig;
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::i18n::LocaleLoader;
use typedialog_core::nickel::NickelCli;
use typedialog_core::{form_parser, helpers, Error, Result};
use typedialog_core::prelude::*;
use unic_langid::LanguageIdentifier;
use super::helpers::{extract_nickel_defaults, flatten_json_object};

View File

@ -3,11 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use typedialog_core::backends::{BackendFactory, BackendType};
use typedialog_core::form_parser;
use typedialog_core::nickel::{
I18nExtractor, MetadataParser, NickelCli, TemplateEngine, TomlGenerator,
};
use typedialog_core::{form_parser, Error, Result};
use typedialog_core::prelude::*;
#[allow(clippy::too_many_arguments)]
pub fn nickel_to_form(

View File

@ -10,8 +10,8 @@ mod commands;
use clap::{Parser, Subcommand};
use std::path::PathBuf;
use typedialog_core::cli_common;
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
use typedialog_core::{Error, Result};
use typedialog_core::config::load_backend_config;
use typedialog_core::prelude::*;
#[derive(Parser)]
#[command(

View File

@ -8,11 +8,12 @@
use clap::{Parser, Subcommand};
use std::fs;
use std::path::{Path, PathBuf};
use typedialog_core::backends::{BackendFactory, BackendType};
use typedialog_core::cli_common;
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
use typedialog_core::{form_parser, helpers, Error, Result};
use typedialog_core::config::load_backend_config;
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::i18n::LocaleLoader;
use typedialog_core::prelude::*;
use unic_langid::LanguageIdentifier;
#[derive(Parser)]

View File

@ -3,11 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use typedialog_core::backends::BackendFactory;
use typedialog_core::config::TypeDialogConfig;
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::i18n::LocaleLoader;
use typedialog_core::nickel::{NickelCli, TemplateEngine};
use typedialog_core::{form_parser, helpers, Error, Result};
use typedialog_core::prelude::*;
use unic_langid::LanguageIdentifier;
use super::helpers::{extract_nickel_defaults, flatten_json_object, print_results};

View File

@ -2,7 +2,9 @@
use std::collections::HashMap;
use std::path::PathBuf;
use typedialog_core::{form_parser, helpers, Error, Result};
use typedialog_core::form_parser;
use typedialog_core::helpers;
use typedialog_core::prelude::*;
/// Print results with encryption/redaction support
pub fn print_results(

View File

@ -3,10 +3,11 @@
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use typedialog_core::form_parser;
use typedialog_core::nickel::{
I18nExtractor, MetadataParser, NickelCli, TemplateEngine, TomlGenerator,
};
use typedialog_core::{form_parser, Error, Result};
use typedialog_core::prelude::*;
#[allow(clippy::too_many_arguments)]
pub fn nickel_to_form(

View File

@ -2,7 +2,7 @@
use std::collections::HashMap;
use std::path::PathBuf;
use typedialog_core::{Error, Result};
use typedialog_core::prelude::*;
/// Print a single result value in the requested format
pub fn print_result(

View File

@ -12,8 +12,9 @@ use clap::{Parser, Subcommand};
use serde_json::json;
use std::path::PathBuf;
use typedialog_core::cli_common;
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
use typedialog_core::{prompts, Result};
use typedialog_core::config::load_backend_config;
use typedialog_core::prelude::*;
use typedialog_core::prompts;
#[derive(Parser)]
#[command(

View File

@ -7,9 +7,10 @@
//!
//! Run with: cargo run --example conditional_required_demo
use typedialog::form_parser;
use typedialog_core::form_parser;
use typedialog_core::prelude::*;
fn main() -> typedialog::Result<()> {
fn main() -> Result<()> {
println!("╔════════════════════════════════════════════════════════════════╗");
println!("║ Conditional & Required Fields Demonstration ║");
println!("╚════════════════════════════════════════════════════════════════╝\n");

View File

@ -7,7 +7,7 @@
//!
//! Run with: cargo run --example autocompletion_demo
use typedialog::autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter, PatternType};
use typedialog_core::autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter, PatternType};
fn main() {
println!("=== typedialog Autocompletion Demo ===\n");

View File

@ -9,10 +9,11 @@
//!
//! Run with: cargo run --example form_with_autocompletion
use typedialog::autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter, PatternType};
use typedialog::prompts;
use typedialog_core::autocompletion::{FilterCompleter, HistoryCompleter, PatternCompleter, PatternType};
use typedialog_core::prelude::*;
use typedialog_core::prompts;
fn main() -> typedialog::Result<()> {
fn main() -> Result<()> {
println!("╔════════════════════════════════════════════════════════════════╗");
println!("║ User Registration with Autocompletion ║");
println!("╚════════════════════════════════════════════════════════════════╝\n");

View File

@ -2,9 +2,11 @@
//!
//! Run with: cargo run --example library_example
use typedialog::{prompts, form_parser};
use typedialog_core::form_parser;
use typedialog_core::prelude::*;
use typedialog_core::prompts;
fn main() -> typedialog::Result<()> {
fn main() -> Result<()> {
println!("\n=== typedialog Library Examples ===\n");
// Example 1: Simple text input

View File

@ -13,6 +13,7 @@
#[cfg(feature = "ai_backend")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
use std::time::Instant;
use typedialog_core::prelude::*;
use typedialog_core::ai::rag::{RagConfig, RagSystem};
println!("\n╔════════════════════════════════════════════════════════╗");