chore: de-coupling code after analysis and refactoring
This commit is contained in:
parent
813abc057e
commit
24d715523f
@ -11,7 +11,7 @@ verbose = false
|
||||
|
||||
[provisioning.templates]
|
||||
base_path = "templates"
|
||||
custom_path = null
|
||||
# custom_path = "" # Optional: set custom templates path
|
||||
|
||||
[provisioning.infrastructure]
|
||||
environment = "production"
|
||||
|
||||
@ -13,6 +13,7 @@ pub mod error;
|
||||
pub mod executor;
|
||||
pub mod formats;
|
||||
pub mod llm;
|
||||
mod loader;
|
||||
pub mod nickel;
|
||||
pub mod parser;
|
||||
pub mod transpiler;
|
||||
@ -23,161 +24,7 @@ pub use cache::{CacheManager, CacheStats, CacheStrategy};
|
||||
pub use error::{Error, Result};
|
||||
pub use executor::{AgentExecutor, ExecutionResult};
|
||||
pub use formats::{AgentFormat, FormatDetector};
|
||||
pub use loader::AgentLoader;
|
||||
pub use nickel::{AgentConfig, AgentDefinition, NickelEvaluator};
|
||||
pub use parser::{AgentDirective, MarkupNode, MarkupParser};
|
||||
pub use transpiler::NickelTranspiler;
|
||||
|
||||
/// Agent loader - main entry point
|
||||
pub struct AgentLoader {
|
||||
parser: MarkupParser,
|
||||
transpiler: NickelTranspiler,
|
||||
evaluator: NickelEvaluator,
|
||||
executor: AgentExecutor,
|
||||
cache: Option<std::sync::Arc<std::sync::Mutex<CacheManager>>>,
|
||||
}
|
||||
|
||||
impl AgentLoader {
|
||||
/// Create new agent loader
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
parser: MarkupParser::new(),
|
||||
transpiler: NickelTranspiler::new(),
|
||||
evaluator: NickelEvaluator::new(),
|
||||
executor: AgentExecutor::new(),
|
||||
cache: Some(std::sync::Arc::new(std::sync::Mutex::new(
|
||||
CacheManager::default(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create without cache
|
||||
pub fn without_cache() -> Self {
|
||||
Self {
|
||||
parser: MarkupParser::new(),
|
||||
transpiler: NickelTranspiler::new(),
|
||||
evaluator: NickelEvaluator::new(),
|
||||
executor: AgentExecutor::new(),
|
||||
cache: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Load agent from file
|
||||
///
|
||||
/// Executes the 3-layer pipeline:
|
||||
/// 1. Parse MDX → AST
|
||||
/// 2. Transpile AST → Nickel
|
||||
/// 3. Evaluate Nickel → AgentDefinition
|
||||
pub async fn load(&self, path: &std::path::Path) -> Result<AgentDefinition> {
|
||||
// Read file content
|
||||
let content = std::fs::read_to_string(path).map_err(|e| {
|
||||
Error::io(
|
||||
format!("Failed to read agent file: {:?}", path),
|
||||
e.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Check cache for transpiled Nickel code
|
||||
let file_mtime = std::fs::metadata(path)
|
||||
.ok()
|
||||
.and_then(|m| m.modified().ok())
|
||||
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_secs())
|
||||
.unwrap_or(0);
|
||||
|
||||
let path_str = path.to_string_lossy().to_string();
|
||||
|
||||
// Try to get from cache
|
||||
let nickel_code = if let Some(cache_arc) = &self.cache {
|
||||
// Try to get from cache first
|
||||
let mut cache = cache_arc.lock().unwrap();
|
||||
if let Some(cached) = cache.get_transpiled(&path_str, file_mtime) {
|
||||
cached
|
||||
} else {
|
||||
// Not in cache, do full transpilation
|
||||
drop(cache); // Release lock before parsing
|
||||
|
||||
let ast = self.parser.parse(&content)?;
|
||||
let nickel = self.transpiler.transpile(&ast)?;
|
||||
|
||||
// Store in cache
|
||||
let mut cache_mut = cache_arc.lock().unwrap();
|
||||
// Ignore cache errors - we still have the nickel code to use
|
||||
cache_mut
|
||||
.put_transpiled(&path_str, file_mtime, &nickel)
|
||||
.ok();
|
||||
nickel
|
||||
}
|
||||
} else {
|
||||
// No cache, do full transpilation
|
||||
let ast = self.parser.parse(&content)?;
|
||||
self.transpiler.transpile(&ast)?
|
||||
};
|
||||
|
||||
// Evaluate Nickel code to get AgentDefinition
|
||||
self.evaluator.evaluate(&nickel_code)
|
||||
}
|
||||
|
||||
/// Execute agent
|
||||
///
|
||||
/// Delegates to AgentExecutor for actual execution with LLM.
|
||||
/// Returns ExecutionResult with output, validation status, and metadata.
|
||||
pub async fn execute(
|
||||
&self,
|
||||
agent: &AgentDefinition,
|
||||
inputs: std::collections::HashMap<String, serde_json::Value>,
|
||||
) -> Result<ExecutionResult> {
|
||||
self.executor.execute(agent, inputs).await
|
||||
}
|
||||
|
||||
/// Execute agent with streaming output
|
||||
///
|
||||
/// The callback is invoked for each chunk of output as it arrives from the LLM.
|
||||
/// Useful for real-time display in CLI or web interfaces.
|
||||
pub async fn execute_streaming<F>(
|
||||
&self,
|
||||
agent: &AgentDefinition,
|
||||
inputs: std::collections::HashMap<String, serde_json::Value>,
|
||||
on_chunk: F,
|
||||
) -> Result<ExecutionResult>
|
||||
where
|
||||
F: FnMut(&str),
|
||||
{
|
||||
self.executor
|
||||
.execute_streaming(agent, inputs, on_chunk)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Load and execute in one call
|
||||
///
|
||||
/// Convenience method that combines load() and execute().
|
||||
pub async fn load_and_execute(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
inputs: std::collections::HashMap<String, serde_json::Value>,
|
||||
) -> Result<ExecutionResult> {
|
||||
let agent = self.load(path).await?;
|
||||
self.execute(&agent, inputs).await
|
||||
}
|
||||
|
||||
/// Load and execute with streaming
|
||||
///
|
||||
/// Convenience method that combines load() and execute_streaming().
|
||||
pub async fn load_and_execute_streaming<F>(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
inputs: std::collections::HashMap<String, serde_json::Value>,
|
||||
on_chunk: F,
|
||||
) -> Result<ExecutionResult>
|
||||
where
|
||||
F: FnMut(&str),
|
||||
{
|
||||
let agent = self.load(path).await?;
|
||||
self.execute_streaming(&agent, inputs, on_chunk).await
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AgentLoader {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
164
crates/typedialog-agent/typedialog-ag-core/src/loader.rs
Normal file
164
crates/typedialog-agent/typedialog-ag-core/src/loader.rs
Normal file
@ -0,0 +1,164 @@
|
||||
//! Agent loader - main entry point for loading and executing agents
|
||||
|
||||
use crate::cache::CacheManager;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::executor::{AgentExecutor, ExecutionResult};
|
||||
use crate::nickel::{AgentDefinition, NickelEvaluator};
|
||||
use crate::parser::MarkupParser;
|
||||
use crate::transpiler::NickelTranspiler;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// Agent loader - main entry point for 3-layer pipeline execution
|
||||
pub struct AgentLoader {
|
||||
parser: MarkupParser,
|
||||
transpiler: NickelTranspiler,
|
||||
evaluator: NickelEvaluator,
|
||||
executor: AgentExecutor,
|
||||
cache: Option<Arc<Mutex<CacheManager>>>,
|
||||
}
|
||||
|
||||
impl AgentLoader {
|
||||
/// Create new agent loader with cache enabled
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
parser: MarkupParser::new(),
|
||||
transpiler: NickelTranspiler::new(),
|
||||
evaluator: NickelEvaluator::new(),
|
||||
executor: AgentExecutor::new(),
|
||||
cache: Some(Arc::new(Mutex::new(CacheManager::default()))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create without cache
|
||||
pub fn without_cache() -> Self {
|
||||
Self {
|
||||
parser: MarkupParser::new(),
|
||||
transpiler: NickelTranspiler::new(),
|
||||
evaluator: NickelEvaluator::new(),
|
||||
executor: AgentExecutor::new(),
|
||||
cache: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Load agent from file
|
||||
///
|
||||
/// Executes the 3-layer pipeline:
|
||||
/// 1. Parse MDX → AST
|
||||
/// 2. Transpile AST → Nickel
|
||||
/// 3. Evaluate Nickel → AgentDefinition
|
||||
pub async fn load(&self, path: &Path) -> Result<AgentDefinition> {
|
||||
// Read file content
|
||||
let content = std::fs::read_to_string(path).map_err(|e| {
|
||||
Error::io(
|
||||
format!("Failed to read agent file: {:?}", path),
|
||||
e.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Check cache for transpiled Nickel code
|
||||
let file_mtime = std::fs::metadata(path)
|
||||
.ok()
|
||||
.and_then(|m| m.modified().ok())
|
||||
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_secs())
|
||||
.unwrap_or(0);
|
||||
|
||||
let path_str = path.to_string_lossy().to_string();
|
||||
|
||||
// Try to get from cache
|
||||
let nickel_code = if let Some(cache_arc) = &self.cache {
|
||||
// Try to get from cache first
|
||||
let mut cache = cache_arc.lock().unwrap();
|
||||
if let Some(cached) = cache.get_transpiled(&path_str, file_mtime) {
|
||||
cached
|
||||
} else {
|
||||
// Not in cache, do full transpilation
|
||||
drop(cache); // Release lock before parsing
|
||||
|
||||
let ast = self.parser.parse(&content)?;
|
||||
let nickel = self.transpiler.transpile(&ast)?;
|
||||
|
||||
// Store in cache
|
||||
let mut cache_mut = cache_arc.lock().unwrap();
|
||||
// Ignore cache errors - we still have the nickel code to use
|
||||
cache_mut
|
||||
.put_transpiled(&path_str, file_mtime, &nickel)
|
||||
.ok();
|
||||
nickel
|
||||
}
|
||||
} else {
|
||||
// No cache, do full transpilation
|
||||
let ast = self.parser.parse(&content)?;
|
||||
self.transpiler.transpile(&ast)?
|
||||
};
|
||||
|
||||
// Evaluate Nickel code to get AgentDefinition
|
||||
self.evaluator.evaluate(&nickel_code)
|
||||
}
|
||||
|
||||
/// Execute agent
|
||||
///
|
||||
/// Delegates to AgentExecutor for actual execution with LLM.
|
||||
/// Returns ExecutionResult with output, validation status, and metadata.
|
||||
pub async fn execute(
|
||||
&self,
|
||||
agent: &AgentDefinition,
|
||||
inputs: HashMap<String, serde_json::Value>,
|
||||
) -> Result<ExecutionResult> {
|
||||
self.executor.execute(agent, inputs).await
|
||||
}
|
||||
|
||||
/// Execute agent with streaming output
|
||||
///
|
||||
/// The callback is invoked for each chunk of output as it arrives from the LLM.
|
||||
/// Useful for real-time display in CLI or web interfaces.
|
||||
pub async fn execute_streaming<F>(
|
||||
&self,
|
||||
agent: &AgentDefinition,
|
||||
inputs: HashMap<String, serde_json::Value>,
|
||||
on_chunk: F,
|
||||
) -> Result<ExecutionResult>
|
||||
where
|
||||
F: FnMut(&str),
|
||||
{
|
||||
self.executor
|
||||
.execute_streaming(agent, inputs, on_chunk)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Load and execute in one call
|
||||
///
|
||||
/// Convenience method that combines load() and execute().
|
||||
pub async fn load_and_execute(
|
||||
&self,
|
||||
path: &Path,
|
||||
inputs: HashMap<String, serde_json::Value>,
|
||||
) -> Result<ExecutionResult> {
|
||||
let agent = self.load(path).await?;
|
||||
self.execute(&agent, inputs).await
|
||||
}
|
||||
|
||||
/// Load and execute with streaming
|
||||
///
|
||||
/// Convenience method that combines load() and execute_streaming().
|
||||
pub async fn load_and_execute_streaming<F>(
|
||||
&self,
|
||||
path: &Path,
|
||||
inputs: HashMap<String, serde_json::Value>,
|
||||
on_chunk: F,
|
||||
) -> Result<ExecutionResult>
|
||||
where
|
||||
F: FnMut(&str),
|
||||
{
|
||||
let agent = self.load(path).await?;
|
||||
self.execute_streaming(&agent, inputs, on_chunk).await
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AgentLoader {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
@ -1 +1 @@
|
||||
|
||||
// Markdown parser module - placeholder
|
||||
|
||||
@ -10,6 +10,8 @@ pub mod websocket;
|
||||
#[allow(unused_imports)]
|
||||
pub use error::ApiError;
|
||||
#[allow(unused_imports)]
|
||||
pub use rest::{create_router, AppState};
|
||||
pub use rest::create_router;
|
||||
#[allow(unused_imports)]
|
||||
pub use types::AppState;
|
||||
#[allow(unused_imports)]
|
||||
pub use websocket::{WsMessage, WsResponse};
|
||||
|
||||
@ -7,24 +7,9 @@ use axum::{
|
||||
Json, Router,
|
||||
};
|
||||
use chrono::Utc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use super::{error::ApiError, types::*, websocket};
|
||||
use crate::assistant::ConfigAssistant;
|
||||
use crate::storage::SurrealDbClient;
|
||||
|
||||
/// Shared application state
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
/// Database client
|
||||
pub db: Arc<SurrealDbClient>,
|
||||
|
||||
/// Currently active assistants by conversation ID
|
||||
pub assistants: Arc<Mutex<std::collections::HashMap<String, ConfigAssistant>>>,
|
||||
|
||||
/// Server start time for uptime tracking
|
||||
pub start_time: std::time::Instant,
|
||||
}
|
||||
use super::{error::ApiError, types::*};
|
||||
use crate::api::websocket;
|
||||
|
||||
/// Create Axum router with all routes
|
||||
pub fn create_router(state: AppState) -> Router {
|
||||
@ -263,6 +248,8 @@ pub async fn get_suggestions(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::storage::SurrealDbClient;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
async fn create_test_state() -> Result<AppState, Box<dyn std::error::Error>> {
|
||||
let db = Arc::new(SurrealDbClient::new("memory://", "default", "test").await?);
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
//! API request and response types
|
||||
|
||||
use crate::assistant::{AssistantResponse, FieldSuggestion, GeneratedConfig};
|
||||
use crate::assistant::{AssistantResponse, ConfigAssistant, FieldSuggestion, GeneratedConfig};
|
||||
use crate::storage::SurrealDbClient;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// Request to start a new conversation
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@ -140,6 +142,19 @@ pub struct HealthResponse {
|
||||
pub uptime: u64,
|
||||
}
|
||||
|
||||
/// Shared application state
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
/// Database client
|
||||
pub db: Arc<SurrealDbClient>,
|
||||
|
||||
/// Currently active assistants by conversation ID
|
||||
pub assistants: Arc<Mutex<std::collections::HashMap<String, ConfigAssistant>>>,
|
||||
|
||||
/// Server start time for uptime tracking
|
||||
pub start_time: std::time::Instant,
|
||||
}
|
||||
|
||||
/// Error response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ErrorResponse {
|
||||
|
||||
@ -12,7 +12,7 @@ use axum::{
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
use super::rest::AppState;
|
||||
use super::types::AppState;
|
||||
use crate::assistant::ConfigAssistant;
|
||||
|
||||
/// WebSocket message from client
|
||||
|
||||
@ -428,7 +428,7 @@ impl FormBackend for InquireBackend {
|
||||
}
|
||||
|
||||
async fn render_display_item(&self, item: &DisplayItem, context: &RenderContext) -> Result<()> {
|
||||
item.render(&context.results);
|
||||
crate::form_parser::render_display_item(item, &context.results);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
308
crates/typedialog-core/src/form_parser/conditions.rs
Normal file
308
crates/typedialog-core/src/form_parser/conditions.rs
Normal file
@ -0,0 +1,308 @@
|
||||
//! Conditional expression evaluation for form fields
|
||||
//!
|
||||
//! Provides logic for evaluating `when` conditions on form elements.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::types::FormDefinition;
|
||||
|
||||
/// Extract field name from a condition string
|
||||
///
|
||||
/// Examples:
|
||||
/// - "enable_prometheus == true" → Some("enable_prometheus")
|
||||
/// - "provider == lxd" → Some("provider")
|
||||
/// - "grafana_port >= 3000" → Some("grafana_port")
|
||||
pub(super) fn extract_field_from_condition(condition: &str) -> Option<String> {
|
||||
let condition = condition.trim();
|
||||
|
||||
// String operators: contains, startswith, endswith
|
||||
let string_operators = ["contains", "startswith", "endswith"];
|
||||
for op_str in &string_operators {
|
||||
if let Some(pos) = condition.find(op_str) {
|
||||
let before_ok = pos == 0
|
||||
|| !condition[..pos]
|
||||
.chars()
|
||||
.last()
|
||||
.unwrap_or(' ')
|
||||
.is_alphanumeric();
|
||||
let after_ok = pos + op_str.len() >= condition.len()
|
||||
|| !condition[pos + op_str.len()..]
|
||||
.chars()
|
||||
.next()
|
||||
.unwrap_or(' ')
|
||||
.is_alphanumeric();
|
||||
|
||||
if before_ok && after_ok {
|
||||
let field_name = condition[..pos].trim();
|
||||
return Some(field_name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Comparison operators: ==, !=, <=, >=, <, >
|
||||
let operators = ["<=", ">=", "==", "!=", ">", "<"];
|
||||
for op_str in &operators {
|
||||
if let Some(pos) = condition.find(op_str) {
|
||||
let field_name = condition[..pos].trim();
|
||||
return Some(field_name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Identify all selector fields in the form
|
||||
///
|
||||
/// Scans all group conditionals and returns field names that are used in `when` expressions.
|
||||
/// These are the fields that determine which fragments to load.
|
||||
pub fn identify_selector_fields(form: &FormDefinition) -> Vec<String> {
|
||||
let mut selectors = std::collections::HashSet::new();
|
||||
let mut selector_list: Vec<String> = Vec::new();
|
||||
|
||||
// Scan all elements for items with conditional visibility
|
||||
// Preserve insertion order (order of appearance in elements)
|
||||
for element in &form.elements {
|
||||
if let Some(condition) = element.when() {
|
||||
if let Some(field_name) = extract_field_from_condition(condition) {
|
||||
// Only add if not already seen (maintain first occurrence order)
|
||||
if !selectors.contains(&field_name) {
|
||||
selectors.insert(field_name.clone());
|
||||
selector_list.push(field_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also scan legacy items array for backward compatibility
|
||||
for item in &form.items {
|
||||
if let Some(ref condition) = item.when {
|
||||
if let Some(field_name) = extract_field_from_condition(condition) {
|
||||
if !selectors.contains(&field_name) {
|
||||
selectors.insert(field_name.clone());
|
||||
selector_list.push(field_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return in order of first appearance, not alphabetically sorted
|
||||
selector_list
|
||||
}
|
||||
|
||||
/// Check if a fragment should be loaded based on its condition
|
||||
///
|
||||
/// Returns true if:
|
||||
/// - No condition is specified (fragment always loads)
|
||||
/// - Condition evaluates to true with current results
|
||||
///
|
||||
/// Returns false if:
|
||||
/// - Condition evaluates to false with current results
|
||||
pub fn should_load_fragment(
|
||||
condition: Option<&str>,
|
||||
results: &HashMap<String, serde_json::Value>,
|
||||
) -> bool {
|
||||
match condition {
|
||||
None => true, // No condition = always load
|
||||
Some(cond) => evaluate_condition(cond, results),
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate a conditional expression against previous results
|
||||
/// Supports formats like:
|
||||
/// - "field_name == value"
|
||||
/// - "field_name != value"
|
||||
/// - "field_name contains value"
|
||||
/// - "field_name startswith value"
|
||||
pub fn evaluate_condition(condition: &str, results: &HashMap<String, serde_json::Value>) -> bool {
|
||||
let condition = condition.trim();
|
||||
|
||||
// Check string operators first (word boundaries)
|
||||
let string_operators = ["contains", "startswith", "endswith"];
|
||||
for op_str in &string_operators {
|
||||
if let Some(pos) = condition.find(op_str) {
|
||||
// Make sure it's word-bounded (not part of another word)
|
||||
let before_ok = pos == 0
|
||||
|| !condition[..pos]
|
||||
.chars()
|
||||
.last()
|
||||
.unwrap_or(' ')
|
||||
.is_alphanumeric();
|
||||
let after_ok = pos + op_str.len() >= condition.len()
|
||||
|| !condition[pos + op_str.len()..]
|
||||
.chars()
|
||||
.next()
|
||||
.unwrap_or(' ')
|
||||
.is_alphanumeric();
|
||||
|
||||
if before_ok && after_ok {
|
||||
let left = condition[..pos].trim();
|
||||
let right = condition[pos + op_str.len()..].trim();
|
||||
|
||||
let field_value = results
|
||||
.get(left)
|
||||
.cloned()
|
||||
.unwrap_or(serde_json::Value::Null);
|
||||
let field_str = value_to_string(&field_value);
|
||||
let expected = parse_condition_value(right);
|
||||
let expected_str = value_to_string(&expected);
|
||||
|
||||
match *op_str {
|
||||
"contains" => return field_str.contains(&expected_str),
|
||||
"startswith" => return field_str.starts_with(&expected_str),
|
||||
"endswith" => return field_str.ends_with(&expected_str),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse numeric/comparison operators in order of precedence (longest first to avoid partial matches)
|
||||
let operators = [
|
||||
("<=", "le"),
|
||||
(">=", "ge"),
|
||||
("==", "eq"),
|
||||
("!=", "ne"),
|
||||
(">", "gt"),
|
||||
("<", "lt"),
|
||||
];
|
||||
|
||||
for (op_str, _op_name) in &operators {
|
||||
if let Some(pos) = condition.find(op_str) {
|
||||
let left = condition[..pos].trim();
|
||||
let right = condition[pos + op_str.len()..].trim();
|
||||
|
||||
// Get the field value from results
|
||||
let field_value = results
|
||||
.get(left)
|
||||
.cloned()
|
||||
.unwrap_or(serde_json::Value::Null);
|
||||
|
||||
// Parse the right side as value (handle quoted strings and raw values)
|
||||
let expected = parse_condition_value(right);
|
||||
|
||||
// Perform comparison
|
||||
match *op_str {
|
||||
"==" => return values_equal(&field_value, &expected),
|
||||
"!=" => return !values_equal(&field_value, &expected),
|
||||
">" => {
|
||||
return compare_values(&field_value, &expected) == std::cmp::Ordering::Greater
|
||||
}
|
||||
"<" => return compare_values(&field_value, &expected) == std::cmp::Ordering::Less,
|
||||
">=" => {
|
||||
let cmp = compare_values(&field_value, &expected);
|
||||
return cmp == std::cmp::Ordering::Greater || cmp == std::cmp::Ordering::Equal;
|
||||
}
|
||||
"<=" => {
|
||||
let cmp = compare_values(&field_value, &expected);
|
||||
return cmp == std::cmp::Ordering::Less || cmp == std::cmp::Ordering::Equal;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no valid condition found, default to true
|
||||
true
|
||||
}
|
||||
|
||||
/// Parse a value from condition right-hand side
|
||||
fn parse_condition_value(s: &str) -> serde_json::Value {
|
||||
let s = s.trim();
|
||||
|
||||
// Remove quotes if present
|
||||
if (s.starts_with('"') && s.ends_with('"')) || (s.starts_with('\'') && s.ends_with('\'')) {
|
||||
return serde_json::json!(s[1..s.len() - 1].to_string());
|
||||
}
|
||||
|
||||
// Try to parse as number
|
||||
if let Ok(n) = s.parse::<i64>() {
|
||||
return serde_json::json!(n);
|
||||
}
|
||||
if let Ok(n) = s.parse::<f64>() {
|
||||
return serde_json::json!(n);
|
||||
}
|
||||
|
||||
// Parse as boolean
|
||||
match s.to_lowercase().as_str() {
|
||||
"true" | "yes" | "1" => serde_json::json!(true),
|
||||
"false" | "no" | "0" => serde_json::json!(false),
|
||||
_ => serde_json::json!(s.to_string()), // Default to string
|
||||
}
|
||||
}
|
||||
|
||||
/// Compare two values for equality, handling different types
|
||||
fn values_equal(a: &serde_json::Value, b: &serde_json::Value) -> bool {
|
||||
match (a, b) {
|
||||
(serde_json::Value::String(s1), serde_json::Value::String(s2)) => s1 == s2,
|
||||
(serde_json::Value::Number(n1), serde_json::Value::Number(n2)) => n1 == n2,
|
||||
(serde_json::Value::Bool(b1), serde_json::Value::Bool(b2)) => b1 == b2,
|
||||
(serde_json::Value::Null, serde_json::Value::Null) => true,
|
||||
// Try numeric comparison if one is string and other is number
|
||||
(serde_json::Value::String(s), serde_json::Value::Number(n))
|
||||
| (serde_json::Value::Number(n), serde_json::Value::String(s)) => {
|
||||
if let Ok(parsed) = s.parse::<f64>() {
|
||||
if let Some(num_val) = n.as_f64() {
|
||||
return (parsed - num_val).abs() < 1e-10;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
// String to bool comparison
|
||||
(serde_json::Value::String(s), serde_json::Value::Bool(b))
|
||||
| (serde_json::Value::Bool(b), serde_json::Value::String(s)) => {
|
||||
matches!(
|
||||
(s.to_lowercase().as_str(), b),
|
||||
("true" | "yes" | "1", true) | ("false" | "no" | "0", false)
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Compare two values numerically
|
||||
fn compare_values(a: &serde_json::Value, b: &serde_json::Value) -> std::cmp::Ordering {
|
||||
use std::cmp::Ordering;
|
||||
|
||||
// Extract numeric values
|
||||
let a_num = extract_numeric(a);
|
||||
let b_num = extract_numeric(b);
|
||||
|
||||
match (a_num, b_num) {
|
||||
(Some(an), Some(bn)) => {
|
||||
if (an - bn).abs() < 1e-10 {
|
||||
Ordering::Equal
|
||||
} else if an > bn {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Less
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// Fall back to string comparison
|
||||
let a_str = value_to_string(a);
|
||||
let b_str = value_to_string(b);
|
||||
a_str.cmp(&b_str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract numeric value from JSON value
|
||||
fn extract_numeric(v: &serde_json::Value) -> Option<f64> {
|
||||
match v {
|
||||
serde_json::Value::Number(n) => n.as_f64(),
|
||||
serde_json::Value::String(s) => s.parse::<f64>().ok(),
|
||||
serde_json::Value::Bool(b) => Some(if *b { 1.0 } else { 0.0 }),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert JSON value to string for comparison
|
||||
pub(super) fn value_to_string(v: &serde_json::Value) -> String {
|
||||
match v {
|
||||
serde_json::Value::String(s) => s.clone(),
|
||||
serde_json::Value::Number(n) => n.to_string(),
|
||||
serde_json::Value::Bool(b) => b.to_string(),
|
||||
serde_json::Value::Null => String::new(),
|
||||
other => other.to_string(),
|
||||
}
|
||||
}
|
||||
928
crates/typedialog-core/src/form_parser/executor.rs
Normal file
928
crates/typedialog-core/src/form_parser/executor.rs
Normal file
@ -0,0 +1,928 @@
|
||||
//! Form execution logic
|
||||
//!
|
||||
//! Handles form execution with various backends and execution modes.
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::prompts;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::path::Path;
|
||||
|
||||
use super::conditions::evaluate_condition;
|
||||
use super::parser::{load_elements_from_file, load_fields_from_file, load_items_from_file};
|
||||
use super::translation::{translate_display_item, translate_field_definition};
|
||||
use super::types::{
|
||||
DisplayItem, DisplayMode, FieldDefinition, FieldType, FormDefinition, FormElement,
|
||||
};
|
||||
|
||||
#[cfg(feature = "i18n")]
|
||||
use crate::i18n::I18nBundle;
|
||||
|
||||
#[cfg(not(feature = "i18n"))]
|
||||
type I18nBundle = ();
|
||||
|
||||
/// Render a display item with formatting, respecting conditionals
|
||||
pub fn render_display_item(item: &DisplayItem, results: &HashMap<String, serde_json::Value>) {
|
||||
// Check if item should be shown based on conditional
|
||||
if let Some(condition) = &item.when {
|
||||
if !evaluate_condition(condition, results) {
|
||||
// Item condition not met, skip it
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if !item.should_display() {
|
||||
return;
|
||||
}
|
||||
|
||||
let default_margin = item.margin_left.unwrap_or(0);
|
||||
let border_margin = item.border_margin_left.unwrap_or(default_margin);
|
||||
let content_margin = item.content_margin_left.unwrap_or(default_margin);
|
||||
|
||||
let border_margin_str = " ".repeat(border_margin);
|
||||
let content_margin_str = " ".repeat(content_margin);
|
||||
|
||||
// Top border line
|
||||
if item.border_top.unwrap_or(false) {
|
||||
let top_l = item.border_top_l.as_deref().unwrap_or("");
|
||||
let top_char = item.border_top_char.as_deref().unwrap_or("═");
|
||||
let top_len = item.border_top_len.unwrap_or(60);
|
||||
let top_r = item.border_top_r.as_deref().unwrap_or("");
|
||||
let top_border = top_char.repeat(top_len);
|
||||
println!("{}{}{}{}", border_margin_str, top_l, top_border, top_r);
|
||||
}
|
||||
|
||||
// Title
|
||||
if let Some(title) = &item.title {
|
||||
if !title.is_empty() {
|
||||
println!("{}{}", content_margin_str, title);
|
||||
}
|
||||
}
|
||||
|
||||
// Content
|
||||
if let Some(content) = &item.content {
|
||||
if !content.is_empty() {
|
||||
for line in content.lines() {
|
||||
println!("{}{}", content_margin_str, line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bottom border line
|
||||
if item.border_bottom.unwrap_or(false) {
|
||||
let bottom_l = item.border_bottom_l.as_deref().unwrap_or("");
|
||||
let bottom_char = item.border_bottom_char.as_deref().unwrap_or("═");
|
||||
let bottom_len = item.border_bottom_len.unwrap_or(60);
|
||||
let bottom_r = item.border_bottom_r.as_deref().unwrap_or("");
|
||||
let bottom_border = bottom_char.repeat(bottom_len);
|
||||
println!(
|
||||
"{}{}{}{}",
|
||||
border_margin_str, bottom_l, bottom_border, bottom_r
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute_with_base_dir(
|
||||
form: FormDefinition,
|
||||
base_dir: &Path,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
let mut results = HashMap::new();
|
||||
|
||||
// Print form header
|
||||
if let Some(desc) = &form.description {
|
||||
println!("\n{}\n{}\n", form.name, desc);
|
||||
} else {
|
||||
println!("\n{}\n", form.name);
|
||||
}
|
||||
|
||||
// Expand groups with includes and build ordered element map
|
||||
let mut element_map: BTreeMap<usize, FormElement> = BTreeMap::new();
|
||||
let mut order_counter = 0;
|
||||
|
||||
// Process items (expand groups and assign order if not specified)
|
||||
for item in form.items.iter() {
|
||||
let mut item_clone = item.clone();
|
||||
|
||||
// Handle group type with includes
|
||||
if item.item_type == "group" {
|
||||
let group_order = item.order;
|
||||
let group_condition = item.when.clone(); // Capture group's when condition
|
||||
if let Some(includes) = &item.includes {
|
||||
// Load items and fields from included files
|
||||
// Use group_order * 100 + relative_order to avoid collisions
|
||||
let mut group_item_counter = 1;
|
||||
|
||||
for include_path in includes {
|
||||
// Try loading items first
|
||||
match load_items_from_file(include_path, base_dir) {
|
||||
Ok(loaded_items) => {
|
||||
for mut loaded_item in loaded_items {
|
||||
// Propagate group's when condition to loaded items if group has a condition
|
||||
if let Some(ref condition) = group_condition {
|
||||
if loaded_item.when.is_none() {
|
||||
loaded_item.when = Some(condition.clone());
|
||||
}
|
||||
}
|
||||
// Adjust order: use group_order as base (multiplied by 100)
|
||||
// plus item's relative order from fragment
|
||||
let relative_order = if loaded_item.order > 0 {
|
||||
loaded_item.order
|
||||
} else {
|
||||
group_item_counter
|
||||
};
|
||||
loaded_item.order = group_order * 100 + relative_order;
|
||||
group_item_counter += 1;
|
||||
element_map
|
||||
.insert(loaded_item.order, FormElement::Item(loaded_item));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ ERROR: Failed to load include '{}': {}", include_path, e);
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
// Try loading fields
|
||||
match load_fields_from_file(include_path, base_dir) {
|
||||
Ok(loaded_fields) => {
|
||||
for mut loaded_field in loaded_fields {
|
||||
// Propagate group's when condition to loaded fields if group has a condition
|
||||
if let Some(ref condition) = group_condition {
|
||||
if loaded_field.when.is_none() {
|
||||
loaded_field.when = Some(condition.clone());
|
||||
}
|
||||
}
|
||||
// Same approach for fields
|
||||
let relative_order = if loaded_field.order > 0 {
|
||||
loaded_field.order
|
||||
} else {
|
||||
group_item_counter
|
||||
};
|
||||
loaded_field.order = group_order * 100 + relative_order;
|
||||
group_item_counter += 1;
|
||||
element_map
|
||||
.insert(loaded_field.order, FormElement::Field(loaded_field));
|
||||
}
|
||||
}
|
||||
Err(_e) => {
|
||||
// Fields might not exist in this file, that's ok
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Don't add group item itself to the map
|
||||
} else {
|
||||
// Regular item
|
||||
if item_clone.order == 0 {
|
||||
item_clone.order = order_counter;
|
||||
order_counter += 1;
|
||||
}
|
||||
element_map.insert(item_clone.order, FormElement::Item(item_clone));
|
||||
}
|
||||
}
|
||||
|
||||
// Add form fields to the element map
|
||||
for field in form.fields.clone() {
|
||||
let mut field_clone = field.clone();
|
||||
if field_clone.order == 0 {
|
||||
field_clone.order = order_counter;
|
||||
order_counter += 1;
|
||||
}
|
||||
element_map.insert(field_clone.order, FormElement::Field(field_clone));
|
||||
}
|
||||
|
||||
// Process elements in order
|
||||
for (_, element) in element_map.iter() {
|
||||
match element {
|
||||
FormElement::Item(item) => {
|
||||
render_display_item(item, &results);
|
||||
}
|
||||
FormElement::Field(field) => {
|
||||
// Check if field should be shown based on conditional
|
||||
if let Some(condition) = &field.when {
|
||||
if !evaluate_condition(condition, &results) {
|
||||
// Field condition not met, skip it
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let value = execute_field(field, &results)?;
|
||||
results.insert(field.name.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Execute a form and collect results (no path resolution - for backwards compatibility)
|
||||
pub fn execute(form: FormDefinition) -> Result<HashMap<String, serde_json::Value>> {
|
||||
execute_with_base_dir(form, Path::new("."))
|
||||
}
|
||||
|
||||
/// Load form from TOML file and execute with proper path resolution
|
||||
pub fn load_and_execute_from_file(
|
||||
path: impl AsRef<Path>,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
use super::parser::{parse_toml, resolve_constraints_in_content};
|
||||
|
||||
let path_ref = path.as_ref();
|
||||
let content = std::fs::read_to_string(path_ref)?;
|
||||
|
||||
// Get the directory of the current file for relative path resolution
|
||||
let base_dir = path_ref.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// Resolve constraint interpolations before parsing
|
||||
let resolved_content = resolve_constraints_in_content(&content, base_dir)?;
|
||||
let form = parse_toml(&resolved_content)?;
|
||||
|
||||
execute_with_base_dir(form, base_dir)
|
||||
}
|
||||
|
||||
/// Execute a single field
|
||||
fn execute_field(
|
||||
field: &FieldDefinition,
|
||||
_previous_results: &HashMap<String, serde_json::Value>,
|
||||
) -> Result<serde_json::Value> {
|
||||
let is_required = field.required.unwrap_or(false);
|
||||
let required_marker = if is_required { " *" } else { " (optional)" };
|
||||
|
||||
match field.field_type {
|
||||
FieldType::Text => {
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let result = prompts::text(
|
||||
&prompt_with_marker,
|
||||
field.default.as_deref(),
|
||||
field.placeholder.as_deref(),
|
||||
)?;
|
||||
|
||||
if is_required && result.is_empty() {
|
||||
eprintln!("⚠ This field is required. Please enter a value.");
|
||||
return execute_field(field, _previous_results); // Retry
|
||||
}
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::Confirm => {
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let default_bool =
|
||||
field
|
||||
.default
|
||||
.as_deref()
|
||||
.and_then(|s| match s.to_lowercase().as_str() {
|
||||
"true" | "yes" => Some(true),
|
||||
"false" | "no" => Some(false),
|
||||
_ => None,
|
||||
});
|
||||
let result = prompts::confirm(&prompt_with_marker, default_bool, None)?;
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::Password => {
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let with_toggle = field.placeholder.as_deref() == Some("toggle");
|
||||
let result = prompts::password(&prompt_with_marker, with_toggle)?;
|
||||
|
||||
if is_required && result.is_empty() {
|
||||
eprintln!("⚠ This field is required. Please enter a value.");
|
||||
return execute_field(field, _previous_results); // Retry
|
||||
}
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::Select => {
|
||||
if field.options.is_empty() {
|
||||
return Err(crate::ErrorWrapper::form_parse_failed(
|
||||
"Select field requires 'options'",
|
||||
));
|
||||
}
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let options = field
|
||||
.options
|
||||
.iter()
|
||||
.map(|opt| opt.as_string())
|
||||
.collect::<Vec<_>>();
|
||||
let result = prompts::select(
|
||||
&prompt_with_marker,
|
||||
options,
|
||||
field.page_size,
|
||||
field.vim_mode.unwrap_or(false),
|
||||
)?;
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::MultiSelect => {
|
||||
if field.options.is_empty() {
|
||||
return Err(crate::ErrorWrapper::form_parse_failed(
|
||||
"MultiSelect field requires 'options'",
|
||||
));
|
||||
}
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let options = field
|
||||
.options
|
||||
.iter()
|
||||
.map(|opt| opt.as_string())
|
||||
.collect::<Vec<_>>();
|
||||
let results = prompts::multi_select(
|
||||
&prompt_with_marker,
|
||||
options,
|
||||
field.page_size,
|
||||
field.vim_mode.unwrap_or(false),
|
||||
)?;
|
||||
|
||||
if is_required && results.is_empty() {
|
||||
eprintln!("⚠ This field is required. Please select at least one option.");
|
||||
return execute_field(field, _previous_results); // Retry
|
||||
}
|
||||
Ok(serde_json::json!(results))
|
||||
}
|
||||
|
||||
FieldType::Editor => {
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let result = prompts::editor(
|
||||
&prompt_with_marker,
|
||||
field.file_extension.as_deref(),
|
||||
field.prefix_text.as_deref(),
|
||||
)?;
|
||||
|
||||
if is_required && result.is_empty() {
|
||||
eprintln!("⚠ This field is required. Please enter a value.");
|
||||
return execute_field(field, _previous_results); // Retry
|
||||
}
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::Date => {
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let week_start = field.week_start.as_deref().unwrap_or("Mon");
|
||||
let result = prompts::date(
|
||||
&prompt_with_marker,
|
||||
field.default.as_deref(),
|
||||
field.min_date.as_deref(),
|
||||
field.max_date.as_deref(),
|
||||
week_start,
|
||||
)?;
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::Custom => {
|
||||
let prompt_with_marker = format!("{}{}", field.prompt, required_marker);
|
||||
let type_name = field.custom_type.as_ref().ok_or_else(|| {
|
||||
crate::ErrorWrapper::form_parse_failed("Custom field requires 'custom_type'")
|
||||
})?;
|
||||
let result = prompts::custom(&prompt_with_marker, type_name, field.default.as_deref())?;
|
||||
|
||||
if is_required && result.is_empty() {
|
||||
eprintln!("⚠ This field is required. Please enter a value.");
|
||||
return execute_field(field, _previous_results); // Retry
|
||||
}
|
||||
Ok(serde_json::json!(result))
|
||||
}
|
||||
|
||||
FieldType::RepeatingGroup => {
|
||||
// Temporary stub - will be implemented in FASE 4
|
||||
Err(crate::ErrorWrapper::form_parse_failed(
|
||||
"RepeatingGroup not yet implemented - use CLI backend (FASE 4)",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Build element list from form definition with lazy loading of fragments
|
||||
fn build_element_list(
|
||||
form: &FormDefinition,
|
||||
base_dir: &Path,
|
||||
_results: &HashMap<String, serde_json::Value>,
|
||||
) -> Result<Vec<(usize, FormElement)>> {
|
||||
let mut element_list: Vec<(usize, FormElement)> = Vec::new();
|
||||
let mut order_counter = 0;
|
||||
|
||||
// Process unified elements (expand groups and maintain insertion order)
|
||||
for element in form.elements.iter() {
|
||||
match element {
|
||||
FormElement::Item(item) => {
|
||||
let mut item_clone = item.clone();
|
||||
|
||||
// Handle group type with includes
|
||||
if item.item_type == "group" {
|
||||
let group_condition = item.when.clone();
|
||||
if let Some(includes) = &item.includes {
|
||||
for include_path in includes {
|
||||
// Load elements from fragment (unified format)
|
||||
// Note: We load ALL fragments regardless of condition
|
||||
// Phase 3 filtering will hide/show based on conditions
|
||||
match load_elements_from_file(include_path, base_dir) {
|
||||
Ok(loaded_elements) => {
|
||||
for mut loaded_element in loaded_elements {
|
||||
// Apply group condition to loaded elements if they don't have one
|
||||
if let Some(ref condition) = group_condition {
|
||||
match &mut loaded_element {
|
||||
FormElement::Item(ref mut loaded_item) => {
|
||||
if loaded_item.when.is_none() {
|
||||
loaded_item.when = Some(condition.clone());
|
||||
}
|
||||
}
|
||||
FormElement::Field(ref mut loaded_field) => {
|
||||
if loaded_field.when.is_none() {
|
||||
loaded_field.when = Some(condition.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Assign order based on position counter (insertion order)
|
||||
match &mut loaded_element {
|
||||
FormElement::Item(ref mut loaded_item) => {
|
||||
loaded_item.order = order_counter;
|
||||
}
|
||||
FormElement::Field(ref mut loaded_field) => {
|
||||
loaded_field.order = order_counter;
|
||||
}
|
||||
}
|
||||
order_counter += 1;
|
||||
element_list.push((order_counter - 1, loaded_element));
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// Fragment failed to load, skip silently
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Non-group items get order from position counter (insertion order)
|
||||
item_clone.order = order_counter;
|
||||
order_counter += 1;
|
||||
element_list.push((item_clone.order, FormElement::Item(item_clone)));
|
||||
}
|
||||
}
|
||||
FormElement::Field(field) => {
|
||||
let mut field_clone = field.clone();
|
||||
// Assign order based on position counter (insertion order)
|
||||
field_clone.order = order_counter;
|
||||
order_counter += 1;
|
||||
element_list.push((field_clone.order, FormElement::Field(field_clone)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No need to sort - elements are already in insertion order from the counter
|
||||
// element_list is already sorted by construction
|
||||
|
||||
Ok(element_list)
|
||||
}
|
||||
|
||||
/// Recompute visible elements based on current results
|
||||
///
|
||||
/// This function rebuilds the element list with lazy loading based on current results,
|
||||
/// filtering out elements whose conditions don't match. Used for reactive rendering
|
||||
/// in TUI and Web backends when user input changes selections that affect conditionals.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `form` - The form definition
|
||||
/// * `base_dir` - Base directory for resolving relative paths
|
||||
/// * `results` - Current accumulated results (used for condition evaluation)
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A tuple of (filtered items, filtered fields) that should be displayed
|
||||
/// based on current conditions and lazy loading rules
|
||||
pub fn recompute_visible_elements(
|
||||
form: &FormDefinition,
|
||||
base_dir: &Path,
|
||||
results: &HashMap<String, serde_json::Value>,
|
||||
) -> Result<(Vec<DisplayItem>, Vec<FieldDefinition>)> {
|
||||
// Clone and migrate form to ensure elements are populated
|
||||
let mut form_clone = form.clone();
|
||||
form_clone.migrate_to_elements();
|
||||
|
||||
// Build complete element list with lazy loading
|
||||
let element_list = build_element_list(&form_clone, base_dir, results)?;
|
||||
|
||||
// Separate and filter items and fields based on conditions
|
||||
let mut visible_items = Vec::new();
|
||||
let mut visible_fields = Vec::new();
|
||||
|
||||
for (_, element) in element_list {
|
||||
match element {
|
||||
FormElement::Item(item) => {
|
||||
// Include item if it has no condition, or condition is true
|
||||
let should_show = item
|
||||
.when
|
||||
.as_ref()
|
||||
.is_none_or(|cond| evaluate_condition(cond, results));
|
||||
|
||||
if should_show {
|
||||
visible_items.push(item);
|
||||
}
|
||||
}
|
||||
FormElement::Field(field) => {
|
||||
// Include field if it has no condition, or condition is true
|
||||
let should_show = field
|
||||
.when
|
||||
.as_ref()
|
||||
.is_none_or(|cond| evaluate_condition(cond, results));
|
||||
|
||||
if should_show {
|
||||
visible_fields.push(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((visible_items, visible_fields))
|
||||
}
|
||||
|
||||
/// Execute a form using two-phase execution (for sequential CLI-like backends)
|
||||
///
|
||||
/// This execution strategy is designed for backends that execute fields sequentially
|
||||
/// and cannot "go back" to modify previous selections. It uses a two-phase approach:
|
||||
///
|
||||
/// **Phase 1:** Identify and execute selector fields (fields that control conditionals)
|
||||
/// **Phase 2:** With selector values known, rebuild element list and execute remaining fields
|
||||
///
|
||||
/// This ensures fragments are only loaded after their controlling conditions are determined.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `form` - The parsed form definition
|
||||
/// * `backend` - A mutable reference to the form backend implementation
|
||||
/// * `i18n_bundle` - Optional I18n bundle for translating form content
|
||||
/// * `base_dir` - Base directory for resolving relative paths in includes
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A HashMap containing all field results (name -> value)
|
||||
/// Unified form execution with all options (primary implementation)
|
||||
/// Handles: two-phase execution, display modes, i18n, initial values, base_dir resolution
|
||||
pub async fn execute_with_backend_complete(
|
||||
form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
i18n_bundle: Option<&I18nBundle>,
|
||||
base_dir: &Path,
|
||||
initial_values: Option<HashMap<String, serde_json::Value>>,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
use crate::backends::RenderContext;
|
||||
|
||||
// Store initial values for later merging before unwrap_or_default
|
||||
let initial_backup = initial_values.clone();
|
||||
let mut results = initial_values.unwrap_or_default();
|
||||
|
||||
// Initialize backend
|
||||
backend.initialize().await?;
|
||||
|
||||
// PHASE 1: Identify selector fields and execute them first
|
||||
let selector_field_names = super::conditions::identify_selector_fields(&form);
|
||||
|
||||
for field_name in &selector_field_names {
|
||||
if let Some(field) = form.fields.iter().find(|f| &f.name == field_name) {
|
||||
let translated_field = translate_field_definition(field, i18n_bundle);
|
||||
let context = RenderContext {
|
||||
results: results.clone(),
|
||||
locale: None,
|
||||
};
|
||||
let value = backend.execute_field(&translated_field, &context).await?;
|
||||
results.insert(field.name.clone(), value);
|
||||
}
|
||||
}
|
||||
|
||||
// PHASE 2: Build element list with lazy loading based on Phase 1 results
|
||||
let element_list = build_element_list(&form, base_dir, &results)?;
|
||||
|
||||
// PHASE 3: Execute remaining fields based on display mode
|
||||
if form.display_mode == DisplayMode::Complete {
|
||||
// Complete mode: pass all fields to backend for complete form display
|
||||
let items: Vec<&DisplayItem> = element_list
|
||||
.iter()
|
||||
.filter_map(|(_, e)| match e {
|
||||
FormElement::Item(item) => Some(item),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let fields: Vec<&FieldDefinition> = element_list
|
||||
.iter()
|
||||
.filter_map(|(_, e)| match e {
|
||||
FormElement::Field(field) => {
|
||||
// Skip selector fields (already executed in Phase 1)
|
||||
if selector_field_names.contains(&field.name) {
|
||||
return None;
|
||||
}
|
||||
Some(field)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let items_owned: Vec<DisplayItem> = items
|
||||
.iter()
|
||||
.map(|i| translate_display_item(i, i18n_bundle))
|
||||
.collect();
|
||||
let fields_owned: Vec<FieldDefinition> = fields
|
||||
.iter()
|
||||
.map(|f| translate_field_definition(f, i18n_bundle))
|
||||
.collect();
|
||||
|
||||
results = backend
|
||||
.execute_form_complete(&form, base_dir, items_owned, fields_owned, initial_backup)
|
||||
.await?;
|
||||
} else {
|
||||
// Field-by-field mode: iterate through element list
|
||||
let mut context = RenderContext {
|
||||
results: results.clone(),
|
||||
locale: None,
|
||||
};
|
||||
|
||||
for (_, element) in element_list.iter() {
|
||||
match element {
|
||||
FormElement::Item(item) => {
|
||||
if let Some(condition) = &item.when {
|
||||
if !evaluate_condition(condition, &results) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
context.results = results.clone();
|
||||
let translated_item = translate_display_item(item, i18n_bundle);
|
||||
backend
|
||||
.render_display_item(&translated_item, &context)
|
||||
.await?;
|
||||
}
|
||||
FormElement::Field(field) => {
|
||||
if selector_field_names.contains(&field.name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(condition) = &field.when {
|
||||
if !evaluate_condition(condition, &results) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
context.results = results.clone();
|
||||
let translated_field = translate_field_definition(field, i18n_bundle);
|
||||
let value = backend.execute_field(&translated_field, &context).await?;
|
||||
results.insert(field.name.clone(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown backend
|
||||
backend.shutdown().await?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Two-phase execution: selectors first, then dynamic fragments, then remaining fields
|
||||
pub async fn execute_with_backend_two_phase(
|
||||
form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
i18n_bundle: Option<&I18nBundle>,
|
||||
base_dir: &Path,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
execute_with_backend_two_phase_with_defaults(form, backend, i18n_bundle, base_dir, None).await
|
||||
}
|
||||
|
||||
/// Two-phase execution with initial values
|
||||
pub async fn execute_with_backend_two_phase_with_defaults(
|
||||
mut form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
i18n_bundle: Option<&I18nBundle>,
|
||||
base_dir: &Path,
|
||||
initial_values: Option<HashMap<String, serde_json::Value>>,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
use crate::backends::RenderContext;
|
||||
|
||||
let initial_backup = initial_values.clone();
|
||||
let mut results = initial_values.unwrap_or_default();
|
||||
|
||||
// Migrate legacy format (items/fields) to new unified elements format
|
||||
form.migrate_to_elements();
|
||||
|
||||
// Initialize backend
|
||||
backend.initialize().await?;
|
||||
|
||||
// PHASE 1: Identify selector fields and execute them first
|
||||
let selector_field_names = super::conditions::identify_selector_fields(&form);
|
||||
|
||||
for field_name in &selector_field_names {
|
||||
let field_option = form.elements.iter().find_map(|e| {
|
||||
if let FormElement::Field(field) = e {
|
||||
if &field.name == field_name {
|
||||
Some(field)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(field) = field_option {
|
||||
let translated_field = translate_field_definition(field, i18n_bundle);
|
||||
let context = RenderContext {
|
||||
results: results.clone(),
|
||||
locale: None,
|
||||
};
|
||||
let value = backend.execute_field(&translated_field, &context).await?;
|
||||
results.insert(field.name.clone(), value);
|
||||
}
|
||||
}
|
||||
|
||||
// PHASE 2: Build element list with lazy loading based on Phase 1 results
|
||||
let element_list = build_element_list(&form, base_dir, &results)?;
|
||||
|
||||
// PHASE 3: Execute remaining fields (non-selectors)
|
||||
let mut context = RenderContext {
|
||||
results: results.clone(),
|
||||
locale: None,
|
||||
};
|
||||
|
||||
for (_, element) in element_list.iter() {
|
||||
match element {
|
||||
FormElement::Item(item) => {
|
||||
if let Some(condition) = &item.when {
|
||||
if !evaluate_condition(condition, &results) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
context.results = results.clone();
|
||||
let translated_item = translate_display_item(item, i18n_bundle);
|
||||
backend
|
||||
.render_display_item(&translated_item, &context)
|
||||
.await?;
|
||||
}
|
||||
FormElement::Field(field) => {
|
||||
if selector_field_names.contains(&field.name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(condition) = &field.when {
|
||||
if !evaluate_condition(condition, &results) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
context.results = results.clone();
|
||||
let translated_field = translate_field_definition(field, i18n_bundle);
|
||||
let value = backend.execute_field(&translated_field, &context).await?;
|
||||
results.insert(field.name.clone(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge back initial values for fields not provided by user
|
||||
if let Some(init_vals) = &initial_backup {
|
||||
for (k, v) in init_vals.iter() {
|
||||
if !results.contains_key(k) {
|
||||
results.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown backend
|
||||
backend.shutdown().await?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Execute a form using a specific backend
|
||||
///
|
||||
/// This is the primary async form execution function that integrates
|
||||
/// with the FormBackend trait abstraction, enabling support for multiple
|
||||
/// rendering backends (CLI, TUI, Web).
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `form` - The parsed form definition
|
||||
/// * `backend` - A mutable reference to the form backend implementation
|
||||
/// * `i18n_bundle` - Optional I18n bundle for translating form content
|
||||
/// * `base_dir` - Base directory for resolving relative paths in includes
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A HashMap containing all field results (name -> value)
|
||||
/// Execute form with i18n support (sin dos fases - este era el que funcionaba)
|
||||
pub async fn execute_with_backend_i18n(
|
||||
form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
i18n_bundle: Option<&I18nBundle>,
|
||||
base_dir: &Path,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
execute_with_backend_i18n_with_defaults(form, backend, i18n_bundle, base_dir, None).await
|
||||
}
|
||||
|
||||
/// Execute form with i18n support and optional initial values
|
||||
pub async fn execute_with_backend_i18n_with_defaults(
|
||||
form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
i18n_bundle: Option<&I18nBundle>,
|
||||
base_dir: &Path,
|
||||
initial_values: Option<HashMap<String, serde_json::Value>>,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
use crate::backends::RenderContext;
|
||||
|
||||
// Store initial values for later merging before unwrap_or_default
|
||||
let initial_backup = initial_values.clone();
|
||||
let mut results = initial_values.unwrap_or_default();
|
||||
|
||||
// Initialize backend
|
||||
backend.initialize().await?;
|
||||
|
||||
// Build element list directly (no two-phase)
|
||||
let element_list = build_element_list(&form, base_dir, &results)?;
|
||||
|
||||
// Check display mode and execute accordingly
|
||||
if form.display_mode == DisplayMode::Complete {
|
||||
// Complete mode: show all fields at once
|
||||
let items: Vec<&DisplayItem> = element_list
|
||||
.iter()
|
||||
.filter_map(|(_, e)| match e {
|
||||
FormElement::Item(item) => Some(item),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let fields: Vec<&FieldDefinition> = element_list
|
||||
.iter()
|
||||
.filter_map(|(_, e)| match e {
|
||||
FormElement::Field(field) => Some(field),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let items_owned: Vec<DisplayItem> = items
|
||||
.iter()
|
||||
.map(|i| translate_display_item(i, i18n_bundle))
|
||||
.collect();
|
||||
let fields_owned: Vec<FieldDefinition> = fields
|
||||
.iter()
|
||||
.map(|f| translate_field_definition(f, i18n_bundle))
|
||||
.collect();
|
||||
|
||||
results = backend
|
||||
.execute_form_complete(&form, base_dir, items_owned, fields_owned, initial_backup)
|
||||
.await?;
|
||||
} else {
|
||||
// Field-by-field mode
|
||||
let mut context = RenderContext {
|
||||
results: results.clone(),
|
||||
locale: None,
|
||||
};
|
||||
|
||||
for (_, element) in element_list.iter() {
|
||||
match element {
|
||||
FormElement::Item(item) => {
|
||||
context.results = results.clone();
|
||||
let translated_item = translate_display_item(item, i18n_bundle);
|
||||
backend
|
||||
.render_display_item(&translated_item, &context)
|
||||
.await?;
|
||||
}
|
||||
FormElement::Field(field) => {
|
||||
if let Some(condition) = &field.when {
|
||||
if !evaluate_condition(condition, &results) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
context.results = results.clone();
|
||||
let translated_field = translate_field_definition(field, i18n_bundle);
|
||||
let value = backend.execute_field(&translated_field, &context).await?;
|
||||
results.insert(field.name.clone(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown backend
|
||||
backend.shutdown().await?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Execute a form using a specific backend (backward compatible wrapper)
|
||||
///
|
||||
/// This is a convenience wrapper around `execute_with_backend_i18n` that
|
||||
/// doesn't use i18n translation. For i18n support, use `execute_with_backend_i18n`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `form` - The parsed form definition
|
||||
/// * `backend` - A mutable reference to the form backend implementation
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A HashMap containing all field results (name -> value)
|
||||
pub async fn execute_with_backend(
|
||||
form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
execute_with_backend_i18n(form, backend, None, Path::new(".")).await
|
||||
}
|
||||
|
||||
pub async fn execute_with_backend_from_dir(
|
||||
form: FormDefinition,
|
||||
backend: &mut dyn crate::backends::FormBackend,
|
||||
base_dir: &Path,
|
||||
) -> Result<HashMap<String, serde_json::Value>> {
|
||||
execute_with_backend_i18n(form, backend, None, base_dir).await
|
||||
}
|
||||
101
crates/typedialog-core/src/form_parser/fragments.rs
Normal file
101
crates/typedialog-core/src/form_parser/fragments.rs
Normal file
@ -0,0 +1,101 @@
|
||||
//! Fragment and include handling
|
||||
//!
|
||||
//! Provides logic for loading form fragments and expanding group includes.
|
||||
|
||||
use crate::error::Result;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use super::parser::{load_elements_from_file, resolve_constraints_in_content};
|
||||
use super::types::{FormDefinition, FormElement};
|
||||
|
||||
/// Load a fragment form from file path (public API for backends)
|
||||
///
|
||||
/// Loads a TOML file containing a FormDefinition and migrates it to the unified format.
|
||||
/// This is the public API used by backends (e.g., CLI RepeatingGroup) to load fragment forms.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `path` - File path (relative or absolute)
|
||||
///
|
||||
/// # Returns
|
||||
/// A FormDefinition with migrated elements
|
||||
pub fn load_fragment_form(path: &str) -> Result<FormDefinition> {
|
||||
let content = std::fs::read_to_string(path)?;
|
||||
|
||||
// Get the directory of the fragment file for constraint resolution
|
||||
let fragment_dir = PathBuf::from(path)
|
||||
.parent()
|
||||
.map(|p| p.to_path_buf())
|
||||
.unwrap_or_else(|| PathBuf::from("."));
|
||||
|
||||
// Resolve constraint interpolations before parsing
|
||||
let resolved_content = resolve_constraints_in_content(&content, &fragment_dir)?;
|
||||
let mut form: FormDefinition = toml::from_str(&resolved_content)?;
|
||||
form.migrate_to_elements();
|
||||
Ok(form)
|
||||
}
|
||||
|
||||
/// Expand groups with includes in a FormDefinition (for non-interactive use)
|
||||
pub fn expand_includes(mut form: FormDefinition, base_dir: &Path) -> Result<FormDefinition> {
|
||||
let mut expanded_elements = Vec::new();
|
||||
|
||||
// First migrate legacy format if necessary
|
||||
if form.elements.is_empty() && (!form.items.is_empty() || !form.fields.is_empty()) {
|
||||
form.migrate_to_elements();
|
||||
}
|
||||
|
||||
// Process elements and expand any groups with includes
|
||||
for element in form.elements.iter() {
|
||||
if let FormElement::Item(item) = element {
|
||||
// Handle group type with includes
|
||||
if item.item_type == "group" {
|
||||
if let Some(includes) = &item.includes {
|
||||
let group_condition = item.when.clone(); // Capture group's when condition
|
||||
|
||||
// Load elements from included files
|
||||
for include_path in includes {
|
||||
match load_elements_from_file(include_path, base_dir) {
|
||||
Ok(mut loaded_elements) => {
|
||||
// Propagate group's when condition to loaded elements if group has one
|
||||
for element in loaded_elements.iter_mut() {
|
||||
if let Some(ref condition) = group_condition {
|
||||
if element.when().is_none() {
|
||||
match element {
|
||||
FormElement::Item(ref mut item) => {
|
||||
item.when = Some(condition.clone())
|
||||
}
|
||||
FormElement::Field(ref mut field) => {
|
||||
field.when = Some(condition.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
expanded_elements.extend(loaded_elements);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"⚠ Warning: Failed to load fragment '{}': {}",
|
||||
include_path, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Non-group items are included as-is, maintaining their position and order
|
||||
expanded_elements.push(element.clone());
|
||||
}
|
||||
} else {
|
||||
// Fields and other elements pass through unchanged
|
||||
expanded_elements.push(element.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Update form with expanded elements
|
||||
form.elements = expanded_elements;
|
||||
// Clear legacy arrays since we've expanded into elements
|
||||
form.items.clear();
|
||||
form.fields.clear();
|
||||
|
||||
Ok(form)
|
||||
}
|
||||
41
crates/typedialog-core/src/form_parser/mod.rs
Normal file
41
crates/typedialog-core/src/form_parser/mod.rs
Normal file
@ -0,0 +1,41 @@
|
||||
//! TOML form parser and executor
|
||||
//!
|
||||
//! Parses form definitions from TOML files and executes them interactively.
|
||||
//!
|
||||
//! This module is organized into focused submodules:
|
||||
//! - `types`: Form data structures (FormDefinition, FieldDefinition, DisplayItem, etc.)
|
||||
//! - `parser`: TOML parsing and file loading
|
||||
//! - `executor`: Form execution logic
|
||||
//! - `conditions`: Conditional expression evaluation
|
||||
//! - `fragments`: Fragment and include handling
|
||||
//! - `translation`: i18n translation support
|
||||
|
||||
mod conditions;
|
||||
mod executor;
|
||||
mod fragments;
|
||||
mod parser;
|
||||
mod translation;
|
||||
mod types;
|
||||
|
||||
// Re-export public types
|
||||
pub use types::{
|
||||
DisplayItem, DisplayMode, FieldDefinition, FieldType, FormDefinition, FormElement, SelectOption,
|
||||
};
|
||||
|
||||
// Re-export public functions - parser
|
||||
pub use parser::{load_from_file, parse_toml};
|
||||
|
||||
// Re-export public functions - executor
|
||||
pub use executor::{
|
||||
execute, execute_with_backend, execute_with_backend_complete, execute_with_backend_from_dir,
|
||||
execute_with_backend_i18n, execute_with_backend_i18n_with_defaults,
|
||||
execute_with_backend_two_phase, execute_with_backend_two_phase_with_defaults,
|
||||
execute_with_base_dir, load_and_execute_from_file, recompute_visible_elements,
|
||||
render_display_item,
|
||||
};
|
||||
|
||||
// Re-export public functions - conditions
|
||||
pub use conditions::{evaluate_condition, identify_selector_fields, should_load_fragment};
|
||||
|
||||
// Re-export public functions - fragments
|
||||
pub use fragments::{expand_includes, load_fragment_form};
|
||||
161
crates/typedialog-core/src/form_parser/parser.rs
Normal file
161
crates/typedialog-core/src/form_parser/parser.rs
Normal file
@ -0,0 +1,161 @@
|
||||
//! TOML form parsing and loading
|
||||
//!
|
||||
//! Handles parsing form definitions from TOML format and loading from files.
|
||||
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
|
||||
use super::types::FormDefinition;
|
||||
|
||||
/// Resolve constraint interpolations in TOML content
|
||||
/// Replaces "${constraint.path.to.value}" (with quotes) with actual values from constraints.toml
|
||||
/// The quotes are removed as part of the replacement, so the value becomes a bare number
|
||||
pub(super) fn resolve_constraints_in_content(content: &str, base_dir: &Path) -> Result<String> {
|
||||
let constraints_path = base_dir.join("constraints.toml");
|
||||
|
||||
// If constraints.toml doesn't exist, return content unchanged
|
||||
if !constraints_path.exists() {
|
||||
return Ok(content.to_string());
|
||||
}
|
||||
|
||||
let constraints_content = std::fs::read_to_string(&constraints_path)?;
|
||||
let constraints_table: toml::Table = toml::from_str(&constraints_content).map_err(|e| {
|
||||
crate::error::ErrorWrapper::validation_failed(format!(
|
||||
"Failed to parse constraints.toml: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut result = content.to_string();
|
||||
|
||||
// Find all "${constraint.*}" patterns (with quotes) by searching
|
||||
// Pattern format: max_items = "${constraint.tracker.udp.max_items}"
|
||||
while let Some(start_pos) = result.find("\"${constraint.") {
|
||||
// Find the closing brace followed by quote sequence: }"
|
||||
let search_start = start_pos + 2; // Skip the opening quote
|
||||
if let Some(close_brace_pos) = result[search_start..].find("}\"") {
|
||||
let close_brace_abs = search_start + close_brace_pos;
|
||||
let end_pos = close_brace_abs + 1; // Position of the closing quote
|
||||
|
||||
let pattern = &result[start_pos..=end_pos];
|
||||
// Extract path between "${constraint. and }"
|
||||
// pattern looks like: "${constraint.tracker.udp.max_items}"
|
||||
// We skip the first 14 chars ("${constraint.) and last 2 chars (}")
|
||||
let constraint_path = &pattern[14..pattern.len() - 2];
|
||||
|
||||
// Navigate through the table following the path
|
||||
let path_parts: Vec<&str> = constraint_path.split('.').collect();
|
||||
let mut current: &toml::Value = &toml::Value::Table(constraints_table.clone());
|
||||
let mut found = true;
|
||||
|
||||
for part in path_parts {
|
||||
if let toml::Value::Table(table) = current {
|
||||
if let Some(next) = table.get(part) {
|
||||
current = next;
|
||||
} else {
|
||||
found = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
found = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
if let toml::Value::Integer(n) = current {
|
||||
// Replace the quoted interpolation with just the number (unquoted)
|
||||
// This allows TOML to parse it as a number, not a string
|
||||
let replacement = n.to_string();
|
||||
result.replace_range(start_pos..=end_pos, &replacement);
|
||||
// Continue searching from the current position
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If not found or not an integer, break to avoid infinite loop
|
||||
break;
|
||||
} else {
|
||||
break; // No closing sequence found
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Parse TOML string into a FormDefinition
|
||||
pub fn parse_toml(content: &str) -> Result<FormDefinition> {
|
||||
toml::from_str(content).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
/// Load form from TOML file (returns FormDefinition, doesn't execute)
|
||||
pub fn load_from_file(path: impl AsRef<Path>) -> Result<FormDefinition> {
|
||||
let path_ref = path.as_ref();
|
||||
let content = std::fs::read_to_string(path_ref)?;
|
||||
|
||||
// Get the directory of the current file for relative path resolution
|
||||
let base_dir = path_ref.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// Resolve constraint interpolations before parsing
|
||||
let resolved_content = resolve_constraints_in_content(&content, base_dir)?;
|
||||
parse_toml(&resolved_content)
|
||||
}
|
||||
|
||||
/// Load unified elements from a TOML file with proper path resolution
|
||||
///
|
||||
/// Automatically migrates legacy items/fields to the unified elements format
|
||||
pub(super) fn load_elements_from_file(
|
||||
path: &str,
|
||||
base_dir: &Path,
|
||||
) -> Result<Vec<super::types::FormElement>> {
|
||||
let resolved_path = if Path::new(path).is_absolute() {
|
||||
Path::new(path).to_path_buf()
|
||||
} else {
|
||||
base_dir.join(path)
|
||||
};
|
||||
let content = std::fs::read_to_string(&resolved_path)?;
|
||||
|
||||
// Resolve constraint interpolations before parsing
|
||||
let resolved_content = resolve_constraints_in_content(&content, base_dir)?;
|
||||
let mut form: FormDefinition = toml::from_str(&resolved_content)?;
|
||||
form.migrate_to_elements();
|
||||
Ok(form.elements)
|
||||
}
|
||||
|
||||
/// Load items from a TOML file with proper path resolution
|
||||
/// (For backward compatibility - prefer load_elements_from_file for new code)
|
||||
pub(super) fn load_items_from_file(
|
||||
path: &str,
|
||||
base_dir: &Path,
|
||||
) -> Result<Vec<super::types::DisplayItem>> {
|
||||
let resolved_path = if Path::new(path).is_absolute() {
|
||||
Path::new(path).to_path_buf()
|
||||
} else {
|
||||
base_dir.join(path)
|
||||
};
|
||||
let content = std::fs::read_to_string(&resolved_path)?;
|
||||
|
||||
// Resolve constraint interpolations before parsing
|
||||
let resolved_content = resolve_constraints_in_content(&content, base_dir)?;
|
||||
let form: FormDefinition = toml::from_str(&resolved_content)?;
|
||||
Ok(form.items)
|
||||
}
|
||||
|
||||
/// Load fields from a TOML file with proper path resolution
|
||||
/// (For backward compatibility - prefer load_elements_from_file for new code)
|
||||
pub(super) fn load_fields_from_file(
|
||||
path: &str,
|
||||
base_dir: &Path,
|
||||
) -> Result<Vec<super::types::FieldDefinition>> {
|
||||
let resolved_path = if Path::new(path).is_absolute() {
|
||||
Path::new(path).to_path_buf()
|
||||
} else {
|
||||
base_dir.join(path)
|
||||
};
|
||||
let content = std::fs::read_to_string(&resolved_path)?;
|
||||
|
||||
// Resolve constraint interpolations before parsing
|
||||
let resolved_content = resolve_constraints_in_content(&content, base_dir)?;
|
||||
let form: FormDefinition = toml::from_str(&resolved_content)?;
|
||||
Ok(form.fields)
|
||||
}
|
||||
58
crates/typedialog-core/src/form_parser/translation.rs
Normal file
58
crates/typedialog-core/src/form_parser/translation.rs
Normal file
@ -0,0 +1,58 @@
|
||||
//! i18n translation support for form elements
|
||||
//!
|
||||
//! Provides functions to translate display items and field definitions.
|
||||
|
||||
use crate::i18n::I18nBundle;
|
||||
|
||||
use super::types::{DisplayItem, FieldDefinition, SelectOption};
|
||||
|
||||
/// Translate a DisplayItem if i18n is enabled
|
||||
pub(super) fn translate_display_item(
|
||||
item: &DisplayItem,
|
||||
bundle: Option<&I18nBundle>,
|
||||
) -> DisplayItem {
|
||||
if item.i18n.unwrap_or(false) {
|
||||
if let Some(bundle) = bundle {
|
||||
let mut translated = item.clone();
|
||||
if let Some(content) = &item.content {
|
||||
translated.content = Some(bundle.translate_if_key(content, None));
|
||||
}
|
||||
if let Some(title) = &item.title {
|
||||
translated.title = Some(bundle.translate_if_key(title, None));
|
||||
}
|
||||
return translated;
|
||||
}
|
||||
}
|
||||
item.clone()
|
||||
}
|
||||
|
||||
/// Translate a FieldDefinition if i18n is enabled
|
||||
pub(super) fn translate_field_definition(
|
||||
field: &FieldDefinition,
|
||||
bundle: Option<&I18nBundle>,
|
||||
) -> FieldDefinition {
|
||||
if field.i18n.unwrap_or(false) {
|
||||
if let Some(bundle) = bundle {
|
||||
let mut translated = field.clone();
|
||||
translated.prompt = bundle.translate_if_key(&field.prompt, None);
|
||||
if let Some(placeholder) = &field.placeholder {
|
||||
translated.placeholder = Some(bundle.translate_if_key(placeholder, None));
|
||||
}
|
||||
if !field.options.is_empty() {
|
||||
translated.options = field
|
||||
.options
|
||||
.iter()
|
||||
.map(|opt| SelectOption {
|
||||
value: opt.value.clone(),
|
||||
label: Some(
|
||||
bundle
|
||||
.translate_if_key(opt.label.as_deref().unwrap_or(&opt.value), None),
|
||||
),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
return translated;
|
||||
}
|
||||
}
|
||||
field.clone()
|
||||
}
|
||||
574
crates/typedialog-core/src/form_parser/types.rs
Normal file
574
crates/typedialog-core/src/form_parser/types.rs
Normal file
@ -0,0 +1,574 @@
|
||||
//! Form parser type definitions
|
||||
//!
|
||||
//! Defines the data structures for form definitions, fields, and display items.
|
||||
|
||||
use serde::{de, Deserialize, Deserializer, Serialize};
|
||||
|
||||
/// Default order for form elements (auto-assigned based on array position)
|
||||
pub(super) fn default_order() -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
/// Deserialize `default` field accepting both string and boolean TOML values
|
||||
pub(super) fn deserialize_default<'de, D>(
|
||||
deserializer: D,
|
||||
) -> std::result::Result<Option<String>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
use serde_json::Value;
|
||||
let value: Option<Value> = Option::deserialize(deserializer)?;
|
||||
Ok(value.and_then(|v| match v {
|
||||
Value::String(s) => Some(s),
|
||||
Value::Bool(b) => Some(b.to_string()),
|
||||
Value::Number(n) => Some(n.to_string()),
|
||||
_ => None,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Form element (can be a display item or a field)
|
||||
/// Public enum for unified form structure
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FormElement {
|
||||
Item(DisplayItem),
|
||||
Field(FieldDefinition),
|
||||
}
|
||||
|
||||
impl FormElement {
|
||||
/// Get as DisplayItem if this is an Item variant
|
||||
pub fn as_item(&self) -> Option<&DisplayItem> {
|
||||
match self {
|
||||
FormElement::Item(item) => Some(item),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get mutable reference as DisplayItem if this is an Item variant
|
||||
pub fn as_item_mut(&mut self) -> Option<&mut DisplayItem> {
|
||||
match self {
|
||||
FormElement::Item(item) => Some(item),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get as FieldDefinition if this is a Field variant
|
||||
pub fn as_field(&self) -> Option<&FieldDefinition> {
|
||||
match self {
|
||||
FormElement::Field(field) => Some(field),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get mutable reference as FieldDefinition if this is a Field variant
|
||||
pub fn as_field_mut(&mut self) -> Option<&mut FieldDefinition> {
|
||||
match self {
|
||||
FormElement::Field(field) => Some(field),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this is an Item variant
|
||||
pub fn is_item(&self) -> bool {
|
||||
matches!(self, FormElement::Item(_))
|
||||
}
|
||||
|
||||
/// Check if this is a Field variant
|
||||
pub fn is_field(&self) -> bool {
|
||||
matches!(self, FormElement::Field(_))
|
||||
}
|
||||
|
||||
/// Get order value (for sorting)
|
||||
pub fn order(&self) -> usize {
|
||||
match self {
|
||||
FormElement::Item(item) => item.order,
|
||||
FormElement::Field(field) => field.order,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get when condition if present
|
||||
pub fn when(&self) -> Option<&str> {
|
||||
match self {
|
||||
FormElement::Item(item) => item.when.as_deref(),
|
||||
FormElement::Field(field) => field.when.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for FormElement {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
use serde::de::MapAccess;
|
||||
|
||||
struct ElementVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for ElementVisitor {
|
||||
type Value = FormElement;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
formatter.write_str("a FormElement with a type field")
|
||||
}
|
||||
|
||||
fn visit_map<M>(self, mut map: M) -> std::result::Result<FormElement, M::Error>
|
||||
where
|
||||
M: MapAccess<'de>,
|
||||
{
|
||||
let mut fields_map = serde_json::Map::new();
|
||||
let mut element_type: Option<String> = None;
|
||||
|
||||
while let Some(key) = map.next_key::<String>()? {
|
||||
if key == "type" {
|
||||
element_type = Some(map.next_value()?);
|
||||
} else {
|
||||
let value: serde_json::Value = map.next_value()?;
|
||||
fields_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let element_type = element_type.ok_or_else(|| de::Error::missing_field("type"))?;
|
||||
|
||||
// Add type back to map for deserialization
|
||||
fields_map.insert("type".to_string(), serde_json::json!(&element_type));
|
||||
|
||||
// Display item types
|
||||
let item_types = [
|
||||
"header",
|
||||
"section",
|
||||
"section_header",
|
||||
"footer",
|
||||
"cta",
|
||||
"group",
|
||||
];
|
||||
|
||||
// Field types
|
||||
let field_types = [
|
||||
"text",
|
||||
"confirm",
|
||||
"select",
|
||||
"multiselect",
|
||||
"password",
|
||||
"custom",
|
||||
"editor",
|
||||
"date",
|
||||
"repeatinggroup",
|
||||
];
|
||||
|
||||
if item_types.contains(&element_type.as_str()) {
|
||||
let item: DisplayItem =
|
||||
serde_json::from_value(serde_json::Value::Object(fields_map))
|
||||
.map_err(de::Error::custom)?;
|
||||
Ok(FormElement::Item(item))
|
||||
} else if field_types.contains(&element_type.as_str()) {
|
||||
let field: FieldDefinition =
|
||||
serde_json::from_value(serde_json::Value::Object(fields_map))
|
||||
.map_err(de::Error::custom)?;
|
||||
Ok(FormElement::Field(field))
|
||||
} else {
|
||||
Err(de::Error::custom(format!(
|
||||
"Unknown element type '{}'. Item types: {}. Field types: {}",
|
||||
element_type,
|
||||
item_types.join(", "),
|
||||
field_types.join(", ")
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_map(ElementVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for FormElement {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
FormElement::Item(item) => item.serialize(serializer),
|
||||
FormElement::Field(field) => field.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A display item (header, section, CTA, footer, etc.)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DisplayItem {
|
||||
/// Item name/identifier (not displayed)
|
||||
pub name: String,
|
||||
/// Item type/purpose
|
||||
#[serde(rename = "type")]
|
||||
pub item_type: String,
|
||||
/// Content to display (can be literal text or i18n key)
|
||||
pub content: Option<String>,
|
||||
/// Optional title (can be literal text or i18n key)
|
||||
pub title: Option<String>,
|
||||
/// Optional template expression (alternative to content, e.g., "Welcome {{ env.USER }}!")
|
||||
pub template: Option<String>,
|
||||
/// Show border on top
|
||||
pub border_top: Option<bool>,
|
||||
/// Show border on bottom
|
||||
pub border_bottom: Option<bool>,
|
||||
/// Left margin (number of spaces) - applies to all content
|
||||
pub margin_left: Option<usize>,
|
||||
/// Left margin for border lines (overrides margin_left for borders)
|
||||
pub border_margin_left: Option<usize>,
|
||||
/// Left margin for content/title (overrides margin_left for content)
|
||||
pub content_margin_left: Option<usize>,
|
||||
/// Optional alignment (left, center, right)
|
||||
pub align: Option<String>,
|
||||
/// Optional conditional display (e.g., "role == admin")
|
||||
pub when: Option<String>,
|
||||
/// Optional group name (items in same group are associated/grouped together)
|
||||
pub group: Option<String>,
|
||||
/// Optional array of file paths to include (for type="group")
|
||||
pub includes: Option<Vec<String>>,
|
||||
/// Display order (position in form flow)
|
||||
#[serde(default = "default_order")]
|
||||
pub order: usize,
|
||||
/// Character to use for top border (default: "═")
|
||||
pub border_top_char: Option<String>,
|
||||
/// Length of top border line (default: 60)
|
||||
pub border_top_len: Option<usize>,
|
||||
/// Character for top-left corner (default: none)
|
||||
pub border_top_l: Option<String>,
|
||||
/// Character for top-right corner (default: none)
|
||||
pub border_top_r: Option<String>,
|
||||
/// Character to use for bottom border (default: "═")
|
||||
pub border_bottom_char: Option<String>,
|
||||
/// Length of bottom border line (default: 60)
|
||||
pub border_bottom_len: Option<usize>,
|
||||
/// Character for bottom-left corner (default: none)
|
||||
pub border_bottom_l: Option<String>,
|
||||
/// Character for bottom-right corner (default: none)
|
||||
pub border_bottom_r: Option<String>,
|
||||
/// Optional flag indicating if content/title are i18n keys
|
||||
pub i18n: Option<bool>,
|
||||
}
|
||||
|
||||
impl DisplayItem {
|
||||
/// Check if this item should be displayed (any non-empty visible attribute)
|
||||
pub(crate) fn should_display(&self) -> bool {
|
||||
self.content.as_deref().is_some_and(|c| !c.is_empty())
|
||||
|| self.title.as_deref().is_some_and(|t| !t.is_empty())
|
||||
|| self.border_top.unwrap_or(false)
|
||||
|| self.border_bottom.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// A complete form definition loaded from TOML
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FormDefinition {
|
||||
/// Form name/identifier
|
||||
pub name: String,
|
||||
/// Optional form description
|
||||
pub description: Option<String>,
|
||||
/// Optional locale override for this form (e.g., "es-ES", "en-US")
|
||||
pub locale: Option<String>,
|
||||
/// Optional template for pre-processing form (generates prompts dynamically)
|
||||
pub template: Option<String>,
|
||||
/// Optional path to template for post-processing results
|
||||
pub output_template: Option<String>,
|
||||
/// Optional i18n prefix for message keys (e.g., "forms.registration")
|
||||
pub i18n_prefix: Option<String>,
|
||||
/// Display mode: Complete (all fields at once) or FieldByField (one at a time)
|
||||
#[serde(default)]
|
||||
pub display_mode: DisplayMode,
|
||||
/// Unified array of form elements (NEW - preferred format)
|
||||
#[serde(default)]
|
||||
pub elements: Vec<FormElement>,
|
||||
/// Array of display items (headers, sections, CTAs, etc.) - DEPRECATED for serialization
|
||||
#[serde(default, skip_serializing)]
|
||||
pub items: Vec<DisplayItem>,
|
||||
/// Array of form fields - DEPRECATED for serialization
|
||||
#[serde(default, skip_serializing)]
|
||||
pub fields: Vec<FieldDefinition>,
|
||||
}
|
||||
|
||||
impl FormDefinition {
|
||||
/// Migrate legacy items/fields arrays to unified elements array
|
||||
/// If elements is already populated, does nothing (already using new format)
|
||||
pub fn migrate_to_elements(&mut self) {
|
||||
if !self.elements.is_empty() {
|
||||
return; // Already using new format
|
||||
}
|
||||
|
||||
// Build unified list with order preservation (items before fields to maintain TOML order)
|
||||
let mut element_list: Vec<FormElement> = Vec::new();
|
||||
|
||||
// Add items first, preserving insertion order
|
||||
for mut item in self.items.drain(..) {
|
||||
// Assign order based on position to preserve insertion order
|
||||
item.order = element_list.len();
|
||||
element_list.push(FormElement::Item(item));
|
||||
}
|
||||
|
||||
// Add fields, preserving insertion order after items
|
||||
for mut field in self.fields.drain(..) {
|
||||
// Assign order based on position to preserve insertion order
|
||||
field.order = element_list.len();
|
||||
element_list.push(FormElement::Field(field));
|
||||
}
|
||||
|
||||
// Assign to elements (already in correct insertion order)
|
||||
self.elements = element_list;
|
||||
}
|
||||
|
||||
/// Migrate from unified elements back to separate fields and items
|
||||
/// Used by backends that still expect separate fields/items arrays
|
||||
pub fn migrate_from_elements(&mut self) {
|
||||
if self.elements.is_empty() {
|
||||
return; // Already using legacy format or empty
|
||||
}
|
||||
|
||||
// Separate elements into fields and items
|
||||
for element in self.elements.drain(..) {
|
||||
match element {
|
||||
FormElement::Field(field) => {
|
||||
self.fields.push(field);
|
||||
}
|
||||
FormElement::Item(item) => {
|
||||
self.items.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clear elements after migration
|
||||
self.elements.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Option with value and optional label for select/multiselect fields
|
||||
/// Supports both simple string format and object format with value/label
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct SelectOption {
|
||||
/// The value to store when selected
|
||||
pub value: String,
|
||||
/// Display label (if None, use value)
|
||||
pub label: Option<String>,
|
||||
}
|
||||
|
||||
impl SelectOption {
|
||||
/// Get the display label, fallback to value if label not provided
|
||||
pub fn display_label(&self) -> &str {
|
||||
self.label.as_deref().unwrap_or(&self.value)
|
||||
}
|
||||
|
||||
/// Convert to simple string for backward compatibility
|
||||
pub fn as_string(&self) -> String {
|
||||
self.value.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Custom deserializer for SelectOption that supports both formats:
|
||||
/// - "simple string" → SelectOption { value: "simple string", label: None }
|
||||
/// - { value: "x", label: "Display Label" } → SelectOption { value: "x", label: Some("Display Label") }
|
||||
impl<'de> Deserialize<'de> for SelectOption {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = serde_json::Value::deserialize(deserializer)?;
|
||||
|
||||
match value {
|
||||
// Simple string format: "cx11" → {value: "cx11", label: None}
|
||||
serde_json::Value::String(s) => Ok(SelectOption {
|
||||
value: s,
|
||||
label: None,
|
||||
}),
|
||||
// Object format: {value: "cx11", label: "Shared CPU"} or {value: "cx11"}
|
||||
serde_json::Value::Object(obj) => {
|
||||
let val = obj
|
||||
.get("value")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string())
|
||||
.ok_or_else(|| de::Error::missing_field("value"))?;
|
||||
|
||||
let lbl = obj
|
||||
.get("label")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
Ok(SelectOption {
|
||||
value: val,
|
||||
label: lbl,
|
||||
})
|
||||
}
|
||||
_ => Err(de::Error::custom(
|
||||
"SelectOption must be a string or object with 'value' field",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Support conversion from simple string to SelectOption
|
||||
impl From<String> for SelectOption {
|
||||
fn from(value: String) -> Self {
|
||||
SelectOption { value, label: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for SelectOption {
|
||||
fn from(value: &str) -> Self {
|
||||
SelectOption {
|
||||
value: value.to_string(),
|
||||
label: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single field in a form
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FieldDefinition {
|
||||
/// Field name (becomes the result key)
|
||||
pub name: String,
|
||||
/// Field input type
|
||||
#[serde(rename = "type")]
|
||||
pub field_type: FieldType,
|
||||
/// Prompt message (can be literal text or i18n key)
|
||||
pub prompt: String,
|
||||
/// Optional default value (can contain template expressions like {{ env.USER }})
|
||||
/// Accepts both string ("false", "true") and boolean (false, true) TOML values
|
||||
#[serde(default, deserialize_with = "deserialize_default")]
|
||||
pub default: Option<String>,
|
||||
/// Optional placeholder text (can be literal text or i18n key)
|
||||
pub placeholder: Option<String>,
|
||||
/// Optional options list with value/label (can contain literal text or i18n keys)
|
||||
#[serde(default)]
|
||||
pub options: Vec<SelectOption>,
|
||||
/// Optional field requirement flag
|
||||
pub required: Option<bool>,
|
||||
/// Optional file extension (for editor)
|
||||
pub file_extension: Option<String>,
|
||||
/// Optional prefix text (for editor)
|
||||
pub prefix_text: Option<String>,
|
||||
/// Optional page size (for select/multiselect)
|
||||
pub page_size: Option<usize>,
|
||||
/// Optional vim mode flag (for select/multiselect)
|
||||
pub vim_mode: Option<bool>,
|
||||
/// Optional display mode for multiselect: "list" (default), "grid", "dropdown", "tags"
|
||||
#[serde(default)]
|
||||
pub display_mode: Option<String>,
|
||||
/// Optional searchable flag for multiselect/dropdown (enables filtering)
|
||||
#[serde(default)]
|
||||
pub searchable: Option<bool>,
|
||||
/// Optional minimum selected items (for multiselect)
|
||||
#[serde(default)]
|
||||
pub min_selected: Option<usize>,
|
||||
/// Optional maximum selected items (for multiselect)
|
||||
#[serde(default)]
|
||||
pub max_selected: Option<usize>,
|
||||
/// Optional custom type name (for custom)
|
||||
pub custom_type: Option<String>,
|
||||
/// Optional min date (for date)
|
||||
pub min_date: Option<String>,
|
||||
/// Optional max date (for date)
|
||||
pub max_date: Option<String>,
|
||||
/// Optional week start day (for date, default: Mon)
|
||||
pub week_start: Option<String>,
|
||||
/// Display order (position in form flow)
|
||||
#[serde(default = "default_order")]
|
||||
pub order: usize,
|
||||
/// Optional conditional display (e.g., "role == admin", "country != US")
|
||||
pub when: Option<String>,
|
||||
/// Optional flag indicating if prompt/placeholder/options are i18n keys
|
||||
pub i18n: Option<bool>,
|
||||
/// Optional semantic grouping for form organization
|
||||
#[serde(default)]
|
||||
pub group: Option<String>,
|
||||
/// Nickel contract/predicate (e.g., "String | std.string.NonEmpty")
|
||||
#[serde(default)]
|
||||
pub nickel_contract: Option<String>,
|
||||
/// Original Nickel field path (e.g., ["user", "name"])
|
||||
#[serde(default)]
|
||||
pub nickel_path: Option<Vec<String>>,
|
||||
/// Original Nickel documentation
|
||||
#[serde(default)]
|
||||
pub nickel_doc: Option<String>,
|
||||
/// Semantic alias for field name (short, human-readable)
|
||||
#[serde(default)]
|
||||
pub nickel_alias: Option<String>,
|
||||
/// Fragment path for RepeatingGroup (defines array element fields)
|
||||
#[serde(default)]
|
||||
pub fragment: Option<String>,
|
||||
/// Minimum items for array (0 = optional)
|
||||
#[serde(default)]
|
||||
pub min_items: Option<usize>,
|
||||
/// Maximum items limit
|
||||
#[serde(default)]
|
||||
pub max_items: Option<usize>,
|
||||
/// Initial number of items to display
|
||||
#[serde(default)]
|
||||
pub default_items: Option<usize>,
|
||||
/// Mark repeating group items as unique (all fields must be different)
|
||||
#[serde(default)]
|
||||
pub unique: Option<bool>,
|
||||
/// Mark this field as the unique key for repeating group (only this field must be different)
|
||||
#[serde(default)]
|
||||
pub unique_key: Option<bool>,
|
||||
/// Mark field value as sensitive (encrypt or redact output)
|
||||
#[serde(default)]
|
||||
pub sensitive: Option<bool>,
|
||||
/// Encryption backend (age, rustyvault, sops)
|
||||
#[serde(default)]
|
||||
pub encryption_backend: Option<String>,
|
||||
/// Encryption config (backend-specific settings: key_file, vault_addr, etc)
|
||||
#[serde(default)]
|
||||
pub encryption_config: Option<std::collections::HashMap<String, String>>,
|
||||
}
|
||||
|
||||
impl FieldDefinition {
|
||||
/// Auto-detect sensitive: true if sensitive=true OR type=password AND sensitive not explicitly false
|
||||
pub fn is_sensitive(&self) -> bool {
|
||||
match self.sensitive {
|
||||
Some(true) => true,
|
||||
Some(false) => false,
|
||||
None => self.field_type == FieldType::Password,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get effective encryption backend (field-level > default > "age")
|
||||
pub fn effective_encryption_backend(&self, cli_default: Option<&str>) -> String {
|
||||
self.encryption_backend
|
||||
.clone()
|
||||
.or_else(|| cli_default.map(String::from))
|
||||
.unwrap_or_else(|| "age".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Supported field input types
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum FieldType {
|
||||
/// Single-line text input
|
||||
Text,
|
||||
/// Yes/no confirmation
|
||||
Confirm,
|
||||
/// Single selection from list
|
||||
Select,
|
||||
/// Multiple selection from list
|
||||
MultiSelect,
|
||||
/// Secure password input
|
||||
Password,
|
||||
/// Custom type parsing
|
||||
Custom,
|
||||
/// External editor
|
||||
Editor,
|
||||
/// Date selection
|
||||
Date,
|
||||
/// Repeating group - dynamic add/remove instances from fragment
|
||||
RepeatingGroup,
|
||||
}
|
||||
|
||||
/// Form display mode - how fields are presented to user
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum DisplayMode {
|
||||
/// Show all fields at once (complete form) - default for TUI and Web
|
||||
#[default]
|
||||
#[serde(alias = "complete", alias = "all")]
|
||||
Complete,
|
||||
/// Show one field at a time (step by step)
|
||||
#[serde(alias = "step")]
|
||||
FieldByField,
|
||||
}
|
||||
154
crates/typedialog-tui/src/commands/form.rs
Normal file
154
crates/typedialog-tui/src/commands/form.rs
Normal file
@ -0,0 +1,154 @@
|
||||
//! Form execution command implementation for TUI backend
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::backends::{BackendFactory, BackendType};
|
||||
use typedialog_core::config::TypeDialogConfig;
|
||||
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
|
||||
use typedialog_core::nickel::NickelCli;
|
||||
use typedialog_core::{form_parser, helpers, Error, Result};
|
||||
use unic_langid::LanguageIdentifier;
|
||||
|
||||
use super::helpers::{extract_nickel_defaults, flatten_json_object};
|
||||
|
||||
pub async fn execute_form(
|
||||
config: PathBuf,
|
||||
defaults: Option<PathBuf>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
cli_locale: &Option<String>,
|
||||
) -> Result<()> {
|
||||
let toml_content = fs::read_to_string(&config).map_err(Error::io)?;
|
||||
let mut form = form_parser::parse_toml(&toml_content)?;
|
||||
|
||||
// TUI backend uses unified elements array internally, migrate if using legacy format
|
||||
form.migrate_to_elements();
|
||||
|
||||
// Extract base directory for resolving relative paths in includes
|
||||
let base_dir = config.parent().unwrap_or_else(|| std::path::Path::new("."));
|
||||
|
||||
// Load default values from JSON or .ncl file if provided
|
||||
let initial_values = if let Some(defaults_path) = defaults {
|
||||
NickelCli::verify()?;
|
||||
let is_ncl = defaults_path.extension().and_then(|s| s.to_str()) == Some("ncl");
|
||||
|
||||
let defaults_json: HashMap<String, serde_json::Value> = if is_ncl {
|
||||
let value = NickelCli::export(&defaults_path)?;
|
||||
match value {
|
||||
serde_json::Value::Object(map) => {
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults .ncl must export to a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let defaults_content = fs::read_to_string(&defaults_path).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to read defaults file: {}", e))
|
||||
})?;
|
||||
let parsed: serde_json::Value =
|
||||
serde_json::from_str(&defaults_content).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to parse defaults JSON: {}", e))
|
||||
})?;
|
||||
match parsed {
|
||||
serde_json::Value::Object(map) => {
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults must be a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !defaults_json.is_empty() {
|
||||
eprintln!(
|
||||
"[DEBUG] Loaded {} default field values",
|
||||
defaults_json.len()
|
||||
);
|
||||
for key in defaults_json.keys().take(5) {
|
||||
eprintln!("[DEBUG] - {}", key);
|
||||
}
|
||||
if defaults_json.len() > 5 {
|
||||
eprintln!("[DEBUG] ... and {} more", defaults_json.len() - 5);
|
||||
}
|
||||
}
|
||||
Some(defaults_json)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Load I18nBundle if needed
|
||||
let i18n_bundle = if form.locale.is_some() || cli_locale.is_some() {
|
||||
let config = TypeDialogConfig::default();
|
||||
let resolver = LocaleResolver::new(config.clone());
|
||||
let form_locale = form.locale.as_deref();
|
||||
let locale = resolver.resolve(cli_locale.as_deref(), form_locale);
|
||||
let fallback_locale: LanguageIdentifier = "en-US"
|
||||
.parse()
|
||||
.map_err(|_| Error::validation_failed("Invalid fallback locale".to_string()))?;
|
||||
let loader = LocaleLoader::new(config.locales_path);
|
||||
Some(I18nBundle::new(locale, fallback_locale, &loader)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Create TUI backend
|
||||
let mut backend = BackendFactory::create(BackendType::Tui)?;
|
||||
|
||||
// Execute form
|
||||
let results = if let Some(ref bundle) = i18n_bundle {
|
||||
form_parser::execute_with_backend_i18n_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
Some(bundle),
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
form_parser::execute_with_backend_i18n_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
None,
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
print_results(&results, format, output_file)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_results(
|
||||
results: &HashMap<String, serde_json::Value>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
let output = helpers::format_results(results, format)?;
|
||||
|
||||
if let Some(path) = output_file {
|
||||
fs::write(path, &output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
83
crates/typedialog-tui/src/commands/helpers.rs
Normal file
83
crates/typedialog-tui/src/commands/helpers.rs
Normal file
@ -0,0 +1,83 @@
|
||||
//! Helper functions for command implementations
|
||||
|
||||
use std::collections::HashMap;
|
||||
use typedialog_core::form_parser;
|
||||
|
||||
/// Recursively flatten nested JSON objects into a single-level map
|
||||
/// Converts {"a": {"b": {"c": "value"}}} to {"a_b_c": "value"}
|
||||
pub fn flatten_json_object(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
let mut result = HashMap::new();
|
||||
flatten_recursive(obj, "", &mut result);
|
||||
result
|
||||
}
|
||||
|
||||
fn flatten_recursive(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
prefix: &str,
|
||||
result: &mut HashMap<String, serde_json::Value>,
|
||||
) {
|
||||
for (key, value) in obj.iter() {
|
||||
let new_key = if prefix.is_empty() {
|
||||
key.clone()
|
||||
} else {
|
||||
format!("{}_{}", prefix, key)
|
||||
};
|
||||
|
||||
match value {
|
||||
serde_json::Value::Object(nested) => {
|
||||
flatten_recursive(nested, &new_key, result);
|
||||
}
|
||||
serde_json::Value::Array(arr) => {
|
||||
result.insert(new_key, serde_json::Value::Array(arr.clone()));
|
||||
}
|
||||
_ => {
|
||||
result.insert(new_key, value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract defaults from Nickel export using schema-driven approach
|
||||
pub fn extract_nickel_defaults(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
form_fields: &[form_parser::FieldDefinition],
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
use typedialog_core::nickel::{DefaultsExtractor, FieldMapper, NickelFieldIR, NickelSchemaIR};
|
||||
|
||||
let mut schema_fields = Vec::new();
|
||||
for field in form_fields {
|
||||
if let Some(nickel_path) = &field.nickel_path {
|
||||
schema_fields.push(NickelFieldIR {
|
||||
path: nickel_path.clone(),
|
||||
flat_name: nickel_path.join("-"),
|
||||
alias: field.nickel_alias.clone(),
|
||||
nickel_type: typedialog_core::nickel::NickelType::String,
|
||||
doc: None,
|
||||
default: None,
|
||||
optional: false,
|
||||
contract: None,
|
||||
contract_call: None,
|
||||
group: None,
|
||||
fragment_marker: None,
|
||||
is_array_of_records: false,
|
||||
array_element_fields: None,
|
||||
encryption_metadata: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if !schema_fields.is_empty() {
|
||||
let schema = NickelSchemaIR {
|
||||
name: "form".to_string(),
|
||||
description: None,
|
||||
fields: schema_fields,
|
||||
};
|
||||
if let Ok(mapper) = FieldMapper::from_schema(&schema) {
|
||||
return DefaultsExtractor::extract(&serde_json::Value::Object(obj.clone()), &mapper);
|
||||
}
|
||||
}
|
||||
|
||||
flatten_json_object(obj)
|
||||
}
|
||||
14
crates/typedialog-tui/src/commands/mod.rs
Normal file
14
crates/typedialog-tui/src/commands/mod.rs
Normal file
@ -0,0 +1,14 @@
|
||||
//! Command implementations for typedialog-tui CLI
|
||||
//!
|
||||
//! Separates business logic from CLI dispatch to reduce coupling.
|
||||
|
||||
pub mod form;
|
||||
pub mod helpers;
|
||||
pub mod nickel;
|
||||
|
||||
// Re-export command functions
|
||||
pub use form::execute_form;
|
||||
pub use nickel::{
|
||||
form_to_nickel as form_to_nickel_cmd, nickel_roundtrip as nickel_roundtrip_cmd,
|
||||
nickel_template as nickel_template_cmd, nickel_to_form as nickel_to_form_cmd,
|
||||
};
|
||||
208
crates/typedialog-tui/src/commands/nickel.rs
Normal file
208
crates/typedialog-tui/src/commands/nickel.rs
Normal file
@ -0,0 +1,208 @@
|
||||
//! Nickel-related command implementations
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::backends::{BackendFactory, BackendType};
|
||||
use typedialog_core::nickel::{
|
||||
I18nExtractor, MetadataParser, NickelCli, TemplateEngine, TomlGenerator,
|
||||
};
|
||||
use typedialog_core::{form_parser, Error, Result};
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn nickel_to_form(
|
||||
schema: PathBuf,
|
||||
_current_data: Option<PathBuf>,
|
||||
flatten: bool,
|
||||
groups: bool,
|
||||
fragments: bool,
|
||||
_conditionals: bool,
|
||||
i18n: bool,
|
||||
output_dir: Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
NickelCli::verify()?;
|
||||
let metadata = NickelCli::query(schema.as_path(), Some("inputs"))?;
|
||||
let mut schema_ir = MetadataParser::parse(metadata)?;
|
||||
|
||||
if fragments {
|
||||
let markers = MetadataParser::extract_fragment_markers_from_source(schema.as_path())?;
|
||||
MetadataParser::apply_fragment_markers(&mut schema_ir, &markers);
|
||||
}
|
||||
|
||||
let forms_output = if fragments && schema_ir.fields.iter().any(|f| f.fragment_marker.is_some())
|
||||
{
|
||||
TomlGenerator::generate_with_fragments(&schema_ir, flatten, groups)?
|
||||
} else {
|
||||
let form_def = TomlGenerator::generate(&schema_ir, flatten, groups)?;
|
||||
let mut single_output = HashMap::new();
|
||||
single_output.insert("form.toml".to_string(), form_def);
|
||||
single_output
|
||||
};
|
||||
|
||||
let output_path = output_dir.unwrap_or_else(|| {
|
||||
if fragments && forms_output.len() > 1 {
|
||||
PathBuf::from("generated")
|
||||
} else {
|
||||
PathBuf::from(".")
|
||||
}
|
||||
});
|
||||
|
||||
if forms_output.len() == 1 && output_path.as_path() == std::path::Path::new(".") {
|
||||
if let Some((_, form_def)) = forms_output.iter().next() {
|
||||
let toml_output = ::toml::to_string_pretty(form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
println!("{}", toml_output);
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(&output_path).map_err(Error::io)?;
|
||||
|
||||
for (filename, form_def) in forms_output {
|
||||
let file_path = if filename.starts_with("fragments/") {
|
||||
output_path
|
||||
.join("fragments")
|
||||
.join(filename.strip_prefix("fragments/").unwrap())
|
||||
} else {
|
||||
output_path.join(&filename)
|
||||
};
|
||||
|
||||
fs::create_dir_all(file_path.parent().unwrap()).map_err(Error::io)?;
|
||||
|
||||
let toml_output = ::toml::to_string_pretty(&form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
fs::write(&file_path, &toml_output).map_err(Error::io)?;
|
||||
eprintln!(" Generated: {}", file_path.display());
|
||||
}
|
||||
|
||||
println!("✓ Forms generated in {}/", output_path.display());
|
||||
}
|
||||
|
||||
if i18n {
|
||||
let i18n_output_dir = output_path.join("locales");
|
||||
let _i18n_mapping = I18nExtractor::extract_and_generate(&schema_ir, &i18n_output_dir)?;
|
||||
|
||||
eprintln!(
|
||||
"✓ i18n translations generated in {}/",
|
||||
i18n_output_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn form_to_nickel(
|
||||
form: PathBuf,
|
||||
input: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
_validate: bool,
|
||||
) -> Result<()> {
|
||||
let form_content = fs::read_to_string(&form).map_err(Error::io)?;
|
||||
let _form_def = form_parser::parse_toml(&form_content)?;
|
||||
|
||||
let results: HashMap<String, serde_json::Value> = if input.extension().and_then(|s| s.to_str())
|
||||
== Some("ncl.j2")
|
||||
{
|
||||
return Err(Error::validation_failed(
|
||||
"Template-based form-to-nickel requires interactive execution. Use .json input instead."
|
||||
));
|
||||
} else if input.extension().and_then(|s| s.to_str()) == Some("json") {
|
||||
let json_content = fs::read_to_string(&input).map_err(Error::io)?;
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
} else {
|
||||
return Err(Error::validation_failed(
|
||||
"Input file must be .json or .ncl.j2",
|
||||
));
|
||||
};
|
||||
|
||||
let nickel_output = format!(
|
||||
"# Form results (JSON format for now)\n{}",
|
||||
serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
);
|
||||
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Nickel output written to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn nickel_template(
|
||||
template: PathBuf,
|
||||
results: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
let json_content = fs::read_to_string(&results).map_err(Error::io)?;
|
||||
let values: HashMap<String, serde_json::Value> =
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
let mut engine = TemplateEngine::new();
|
||||
let nickel_output = engine.render_file(template.as_path(), &values, None)?;
|
||||
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Template rendered to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn nickel_roundtrip(
|
||||
input: PathBuf,
|
||||
form_path: PathBuf,
|
||||
output: PathBuf,
|
||||
ncl_template: Option<PathBuf>,
|
||||
validate: bool,
|
||||
verbose: bool,
|
||||
) -> Result<()> {
|
||||
use typedialog_core::nickel::RoundtripConfig;
|
||||
|
||||
if verbose {
|
||||
eprintln!("Starting Nickel roundtrip workflow with TUI backend");
|
||||
}
|
||||
|
||||
// Create TUI backend
|
||||
let mut backend = BackendFactory::create(BackendType::Tui)?;
|
||||
|
||||
// Create roundtrip config
|
||||
let mut config = RoundtripConfig::with_template(input, form_path, output, ncl_template);
|
||||
config.validate = validate;
|
||||
config.verbose = verbose;
|
||||
|
||||
// Execute roundtrip with TUI backend
|
||||
let result = config.execute_with_backend(backend.as_mut()).await?;
|
||||
|
||||
if verbose {
|
||||
eprintln!("[roundtrip] Generated {} bytes", result.output_nickel.len());
|
||||
}
|
||||
|
||||
// Print summary
|
||||
println!("✓ Roundtrip completed successfully (TUI backend)");
|
||||
println!(" Input fields: {}", result.form_results.len());
|
||||
println!(
|
||||
" Imports preserved: {}",
|
||||
result.input_contracts.imports.len()
|
||||
);
|
||||
println!(
|
||||
" Contracts preserved: {}",
|
||||
result.input_contracts.field_contracts.len()
|
||||
);
|
||||
|
||||
if let Some(passed) = result.validation_passed {
|
||||
if passed {
|
||||
println!(" ✓ Validation: PASSED");
|
||||
} else {
|
||||
println!(" ✗ Validation: FAILED");
|
||||
return Err(Error::validation_failed(
|
||||
"Nickel typecheck failed on output",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -5,21 +5,13 @@
|
||||
//! A terminal UI (TUI) tool for creating interactive forms with enhanced visual presentation.
|
||||
//! Uses ratatui for advanced terminal rendering capabilities.
|
||||
|
||||
mod commands;
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::backends::{BackendFactory, BackendType};
|
||||
use typedialog_core::cli_common;
|
||||
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
|
||||
use typedialog_core::helpers;
|
||||
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
|
||||
use typedialog_core::nickel::{
|
||||
DefaultsExtractor, FieldMapper, I18nExtractor, MetadataParser, NickelCli, NickelFieldIR,
|
||||
NickelSchemaIR, TemplateEngine, TomlGenerator,
|
||||
};
|
||||
use typedialog_core::{form_parser, Error, Result};
|
||||
use unic_langid::LanguageIdentifier;
|
||||
use typedialog_core::{Error, Result};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(
|
||||
@ -155,89 +147,6 @@ enum Commands {
|
||||
},
|
||||
}
|
||||
|
||||
/// Recursively flatten nested JSON objects into a single-level map
|
||||
/// Converts {"a": {"b": {"c": "value"}}} to {"a_b_c": "value"}
|
||||
fn flatten_json_object(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
let mut result = HashMap::new();
|
||||
flatten_recursive(obj, "", &mut result);
|
||||
result
|
||||
}
|
||||
|
||||
fn flatten_recursive(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
prefix: &str,
|
||||
result: &mut HashMap<String, serde_json::Value>,
|
||||
) {
|
||||
for (key, value) in obj.iter() {
|
||||
let new_key = if prefix.is_empty() {
|
||||
key.clone()
|
||||
} else {
|
||||
format!("{}_{}", prefix, key)
|
||||
};
|
||||
|
||||
match value {
|
||||
serde_json::Value::Object(nested) => {
|
||||
// Recursively flatten nested objects
|
||||
flatten_recursive(nested, &new_key, result);
|
||||
}
|
||||
serde_json::Value::Array(arr) => {
|
||||
// For arrays, just store them as-is with their key
|
||||
result.insert(new_key, serde_json::Value::Array(arr.clone()));
|
||||
}
|
||||
_ => {
|
||||
// Keep primitive values (string, number, bool, null)
|
||||
result.insert(new_key, value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract defaults from Nickel export using schema-driven approach
|
||||
fn extract_nickel_defaults(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
form_fields: &[form_parser::FieldDefinition],
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
// Build a minimal schema from form fields that have nickel_path
|
||||
let mut schema_fields = Vec::new();
|
||||
for field in form_fields {
|
||||
if let Some(nickel_path) = &field.nickel_path {
|
||||
schema_fields.push(NickelFieldIR {
|
||||
path: nickel_path.clone(),
|
||||
flat_name: nickel_path.join("-"),
|
||||
alias: field.nickel_alias.clone(),
|
||||
nickel_type: typedialog_core::nickel::NickelType::String,
|
||||
doc: None,
|
||||
default: None,
|
||||
optional: false,
|
||||
contract: None,
|
||||
contract_call: None,
|
||||
group: None,
|
||||
fragment_marker: None,
|
||||
is_array_of_records: false,
|
||||
array_element_fields: None,
|
||||
encryption_metadata: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If we have schema fields, use DefaultsExtractor
|
||||
if !schema_fields.is_empty() {
|
||||
let schema = NickelSchemaIR {
|
||||
name: "form".to_string(),
|
||||
description: None,
|
||||
fields: schema_fields,
|
||||
};
|
||||
if let Ok(mapper) = FieldMapper::from_schema(&schema) {
|
||||
return DefaultsExtractor::extract(&serde_json::Value::Object(obj.clone()), &mapper);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: flatten everything if schema-driven extraction fails
|
||||
flatten_json_object(obj)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
@ -253,7 +162,7 @@ async fn main() -> Result<()> {
|
||||
|
||||
match args.command {
|
||||
Some(Commands::Form { config, defaults }) => {
|
||||
execute_form(config, defaults, &args.format, &args.out, &args.locale).await?;
|
||||
commands::execute_form(config, defaults, &args.format, &args.out, &args.locale).await?;
|
||||
}
|
||||
Some(Commands::NickelToForm {
|
||||
schema,
|
||||
@ -265,7 +174,7 @@ async fn main() -> Result<()> {
|
||||
i18n,
|
||||
output,
|
||||
}) => {
|
||||
nickel_to_form_cmd(
|
||||
commands::nickel_to_form_cmd(
|
||||
schema,
|
||||
current_data,
|
||||
flatten,
|
||||
@ -281,10 +190,10 @@ async fn main() -> Result<()> {
|
||||
input,
|
||||
validate,
|
||||
}) => {
|
||||
form_to_nickel_cmd(form, input, &args.out, validate)?;
|
||||
commands::form_to_nickel_cmd(form, input, &args.out, validate)?;
|
||||
}
|
||||
Some(Commands::NickelTemplate { template, results }) => {
|
||||
nickel_template_cmd(template, results, &args.out)?;
|
||||
commands::nickel_template_cmd(template, results, &args.out)?;
|
||||
}
|
||||
Some(Commands::NickelRoundtrip {
|
||||
input,
|
||||
@ -294,381 +203,21 @@ async fn main() -> Result<()> {
|
||||
no_validate,
|
||||
verbose,
|
||||
}) => {
|
||||
nickel_roundtrip_cmd(input, form, output, ncl_template, !no_validate, verbose).await?;
|
||||
commands::nickel_roundtrip_cmd(
|
||||
input,
|
||||
form,
|
||||
output,
|
||||
ncl_template,
|
||||
!no_validate,
|
||||
verbose,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
None => {
|
||||
let config = args.config.ok_or_else(|| {
|
||||
Error::validation_failed("Please provide a form configuration file")
|
||||
})?;
|
||||
execute_form(config, None, &args.format, &args.out, &args.locale).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute_form(
|
||||
config: PathBuf,
|
||||
defaults: Option<PathBuf>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
cli_locale: &Option<String>,
|
||||
) -> Result<()> {
|
||||
let toml_content = fs::read_to_string(&config).map_err(Error::io)?;
|
||||
|
||||
let mut form = form_parser::parse_toml(&toml_content)?;
|
||||
|
||||
// TUI backend uses unified elements array internally, migrate if using legacy format
|
||||
form.migrate_to_elements();
|
||||
|
||||
// Extract base directory for resolving relative paths in includes
|
||||
let base_dir = config.parent().unwrap_or_else(|| std::path::Path::new("."));
|
||||
|
||||
// Note: expand_includes() is handled internally by build_element_list()
|
||||
|
||||
// Load default values from JSON or .ncl file if provided
|
||||
let initial_values = if let Some(defaults_path) = defaults {
|
||||
use typedialog_core::nickel::NickelCli;
|
||||
|
||||
let is_ncl = defaults_path.extension().and_then(|s| s.to_str()) == Some("ncl");
|
||||
|
||||
let defaults_json: HashMap<String, serde_json::Value> = if is_ncl {
|
||||
// Convert .ncl to JSON using nickel export
|
||||
NickelCli::verify()?;
|
||||
let value = NickelCli::export(&defaults_path)?;
|
||||
match value {
|
||||
serde_json::Value::Object(map) => {
|
||||
// Use schema-driven extraction with form fields, fallback to flattening
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
// Flattened values fill in gaps not covered by extraction
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults .ncl must export to a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Read JSON directly - combine extraction and flatten
|
||||
let defaults_content = fs::read_to_string(&defaults_path).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to read defaults file: {}", e))
|
||||
})?;
|
||||
let parsed: serde_json::Value =
|
||||
serde_json::from_str(&defaults_content).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to parse defaults JSON: {}", e))
|
||||
})?;
|
||||
match parsed {
|
||||
serde_json::Value::Object(map) => {
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults must be a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !defaults_json.is_empty() {
|
||||
eprintln!(
|
||||
"[DEBUG] Loaded {} default field values",
|
||||
defaults_json.len()
|
||||
);
|
||||
for key in defaults_json.keys().take(5) {
|
||||
eprintln!("[DEBUG] - {}", key);
|
||||
}
|
||||
if defaults_json.len() > 5 {
|
||||
eprintln!("[DEBUG] ... and {} more", defaults_json.len() - 5);
|
||||
}
|
||||
}
|
||||
Some(defaults_json)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Load I18nBundle if needed
|
||||
let i18n_bundle = if form.locale.is_some() || cli_locale.is_some() {
|
||||
let config = TypeDialogConfig::default();
|
||||
let resolver = LocaleResolver::new(config.clone());
|
||||
let form_locale = form.locale.as_deref();
|
||||
let locale = resolver.resolve(cli_locale.as_deref(), form_locale);
|
||||
let fallback_locale: LanguageIdentifier = "en-US"
|
||||
.parse()
|
||||
.map_err(|_| Error::validation_failed("Invalid fallback locale".to_string()))?;
|
||||
let loader = LocaleLoader::new(config.locales_path);
|
||||
Some(I18nBundle::new(locale, fallback_locale, &loader)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut backend = BackendFactory::create(BackendType::Tui)?;
|
||||
let results = if let Some(ref bundle) = i18n_bundle {
|
||||
form_parser::execute_with_backend_i18n_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
Some(bundle),
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
form_parser::execute_with_backend_i18n_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
None,
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
print_results(&results, format, output_file)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_results(
|
||||
results: &HashMap<String, serde_json::Value>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
let output = helpers::format_results(results, format)?;
|
||||
|
||||
if let Some(path) = output_file {
|
||||
fs::write(path, &output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn nickel_to_form_cmd(
|
||||
schema: PathBuf,
|
||||
_current_data: Option<PathBuf>,
|
||||
flatten: bool,
|
||||
groups: bool,
|
||||
fragments: bool,
|
||||
_conditionals: bool, // Conditionals are auto-generated by ContractAnalyzer
|
||||
i18n: bool,
|
||||
output_dir: Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
// Verify nickel CLI is available
|
||||
NickelCli::verify()?;
|
||||
|
||||
// Extract metadata from schema
|
||||
let metadata = NickelCli::query(schema.as_path(), Some("inputs"))?;
|
||||
|
||||
// Parse into intermediate representation
|
||||
let mut schema_ir = MetadataParser::parse(metadata)?;
|
||||
|
||||
// Step 1: Extract fragment markers from schema source file (if enabled)
|
||||
if fragments {
|
||||
let markers = MetadataParser::extract_fragment_markers_from_source(schema.as_path())?;
|
||||
MetadataParser::apply_fragment_markers(&mut schema_ir, &markers);
|
||||
}
|
||||
|
||||
// Step 2: Generate TOML form(s)
|
||||
let forms_output = if fragments && schema_ir.fields.iter().any(|f| f.fragment_marker.is_some())
|
||||
{
|
||||
// Multi-file output: main form + fragments
|
||||
TomlGenerator::generate_with_fragments(&schema_ir, flatten, groups)?
|
||||
} else {
|
||||
// Single file output
|
||||
let form_def = TomlGenerator::generate(&schema_ir, flatten, groups)?;
|
||||
let mut single_output = HashMap::new();
|
||||
single_output.insert("form.toml".to_string(), form_def);
|
||||
single_output
|
||||
};
|
||||
|
||||
// Determine output directory
|
||||
let output_path = output_dir.unwrap_or_else(|| {
|
||||
if fragments && forms_output.len() > 1 {
|
||||
PathBuf::from("generated")
|
||||
} else {
|
||||
PathBuf::from(".")
|
||||
}
|
||||
});
|
||||
|
||||
// Step 3: Write form files
|
||||
if forms_output.len() == 1 && output_path.as_path() == std::path::Path::new(".") {
|
||||
// Single file to stdout or specified path
|
||||
if let Some((_, form_def)) = forms_output.iter().next() {
|
||||
let toml_output = ::toml::to_string_pretty(form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
println!("{}", toml_output);
|
||||
}
|
||||
} else {
|
||||
// Write multiple files or to directory
|
||||
fs::create_dir_all(&output_path).map_err(Error::io)?;
|
||||
|
||||
for (filename, form_def) in forms_output {
|
||||
let file_path = if filename.starts_with("fragments/") {
|
||||
output_path
|
||||
.join("fragments")
|
||||
.join(filename.strip_prefix("fragments/").unwrap())
|
||||
} else {
|
||||
output_path.join(&filename)
|
||||
};
|
||||
|
||||
fs::create_dir_all(file_path.parent().unwrap()).map_err(Error::io)?;
|
||||
|
||||
let toml_output = ::toml::to_string_pretty(&form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
fs::write(&file_path, &toml_output).map_err(Error::io)?;
|
||||
eprintln!(" Generated: {}", file_path.display());
|
||||
}
|
||||
|
||||
println!("✓ Forms generated in {}/", output_path.display());
|
||||
}
|
||||
|
||||
// Step 4: Extract i18n translations (if enabled)
|
||||
if i18n {
|
||||
let i18n_output_dir = output_path.join("locales");
|
||||
let _i18n_mapping = I18nExtractor::extract_and_generate(&schema_ir, &i18n_output_dir)?;
|
||||
|
||||
eprintln!(
|
||||
"✓ i18n translations generated in {}/",
|
||||
i18n_output_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn form_to_nickel_cmd(
|
||||
form: PathBuf,
|
||||
input: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
_validate: bool,
|
||||
) -> Result<()> {
|
||||
let form_content = fs::read_to_string(&form).map_err(Error::io)?;
|
||||
let _form_def = form_parser::parse_toml(&form_content)?;
|
||||
|
||||
// Determine input type based on extension
|
||||
let results: HashMap<String, serde_json::Value> = if input.extension().and_then(|s| s.to_str())
|
||||
== Some("ncl.j2")
|
||||
{
|
||||
// Template: would require executing form and rendering template
|
||||
// For now, return error as this requires interactive execution
|
||||
return Err(Error::validation_failed(
|
||||
"Template-based form-to-nickel requires interactive execution. Use .json input instead."
|
||||
));
|
||||
} else if input.extension().and_then(|s| s.to_str()) == Some("json") {
|
||||
// Load pre-computed results from JSON
|
||||
let json_content = fs::read_to_string(&input).map_err(Error::io)?;
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
} else {
|
||||
return Err(Error::validation_failed(
|
||||
"Input file must be .json or .ncl.j2",
|
||||
));
|
||||
};
|
||||
|
||||
// For now, provide a placeholder message as full Nickel serialization requires schema
|
||||
let nickel_output = format!(
|
||||
"# Form results (JSON format for now)\n{}",
|
||||
serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
);
|
||||
|
||||
// Write output
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Nickel output written to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn nickel_template_cmd(
|
||||
template: PathBuf,
|
||||
results: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
// Load results JSON file
|
||||
let json_content = fs::read_to_string(&results).map_err(Error::io)?;
|
||||
let values: HashMap<String, serde_json::Value> =
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
// Load and render template
|
||||
let mut engine = TemplateEngine::new();
|
||||
let nickel_output = engine.render_file(template.as_path(), &values, None)?;
|
||||
|
||||
// Write output
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Template rendered to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn nickel_roundtrip_cmd(
|
||||
input: PathBuf,
|
||||
form_path: PathBuf,
|
||||
output: PathBuf,
|
||||
ncl_template: Option<PathBuf>,
|
||||
validate: bool,
|
||||
verbose: bool,
|
||||
) -> Result<()> {
|
||||
use typedialog_core::nickel::RoundtripConfig;
|
||||
|
||||
if verbose {
|
||||
eprintln!("Starting Nickel roundtrip workflow with TUI backend");
|
||||
}
|
||||
|
||||
// Create TUI backend
|
||||
let mut backend = BackendFactory::create(BackendType::Tui)?;
|
||||
|
||||
// Create roundtrip config
|
||||
let mut config = RoundtripConfig::with_template(input, form_path, output, ncl_template);
|
||||
config.validate = validate;
|
||||
config.verbose = verbose;
|
||||
|
||||
// Execute roundtrip with TUI backend
|
||||
let result = config.execute_with_backend(backend.as_mut()).await?;
|
||||
|
||||
if verbose {
|
||||
eprintln!("[roundtrip] Generated {} bytes", result.output_nickel.len());
|
||||
}
|
||||
|
||||
// Print summary
|
||||
println!("✓ Roundtrip completed successfully (TUI backend)");
|
||||
println!(" Input fields: {}", result.form_results.len());
|
||||
println!(
|
||||
" Imports preserved: {}",
|
||||
result.input_contracts.imports.len()
|
||||
);
|
||||
println!(
|
||||
" Contracts preserved: {}",
|
||||
result.input_contracts.field_contracts.len()
|
||||
);
|
||||
|
||||
if let Some(passed) = result.validation_passed {
|
||||
if passed {
|
||||
println!(" ✓ Validation: PASSED");
|
||||
} else {
|
||||
println!(" ✗ Validation: FAILED");
|
||||
return Err(Error::validation_failed(
|
||||
"Nickel typecheck failed on output",
|
||||
));
|
||||
commands::execute_form(config, None, &args.format, &args.out, &args.locale).await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
186
crates/typedialog/src/commands/form.rs
Normal file
186
crates/typedialog/src/commands/form.rs
Normal file
@ -0,0 +1,186 @@
|
||||
//! Form execution command implementation
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::backends::BackendFactory;
|
||||
use typedialog_core::config::TypeDialogConfig;
|
||||
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
|
||||
use typedialog_core::nickel::{NickelCli, TemplateEngine};
|
||||
use typedialog_core::{form_parser, helpers, Error, Result};
|
||||
use unic_langid::LanguageIdentifier;
|
||||
|
||||
use super::helpers::{extract_nickel_defaults, flatten_json_object, print_results};
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn execute_form(
|
||||
config: PathBuf,
|
||||
template: Option<PathBuf>,
|
||||
defaults: Option<PathBuf>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
cli_locale: &Option<String>,
|
||||
redact: bool,
|
||||
encrypt: bool,
|
||||
encryption_backend: String,
|
||||
key_file: Option<PathBuf>,
|
||||
vault_addr: Option<String>,
|
||||
vault_token: Option<String>,
|
||||
vault_key_path: Option<String>,
|
||||
) -> Result<()> {
|
||||
let toml_content = fs::read_to_string(&config).map_err(Error::io)?;
|
||||
let form = form_parser::parse_toml(&toml_content)?;
|
||||
let base_dir = config.parent().unwrap_or_else(|| std::path::Path::new("."));
|
||||
|
||||
// Load default values from JSON or .ncl file if provided
|
||||
let initial_values = if let Some(defaults_path) = defaults {
|
||||
NickelCli::verify()?;
|
||||
let is_ncl = defaults_path.extension().and_then(|s| s.to_str()) == Some("ncl");
|
||||
|
||||
let defaults_json: HashMap<String, serde_json::Value> = if is_ncl {
|
||||
let value = NickelCli::export(&defaults_path)?;
|
||||
match value {
|
||||
serde_json::Value::Object(map) => {
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults .ncl must export to a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let defaults_content = fs::read_to_string(&defaults_path).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to read defaults file: {}", e))
|
||||
})?;
|
||||
let parsed: serde_json::Value =
|
||||
serde_json::from_str(&defaults_content).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to parse defaults JSON: {}", e))
|
||||
})?;
|
||||
match parsed {
|
||||
serde_json::Value::Object(map) => {
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults must be a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !defaults_json.is_empty() {
|
||||
eprintln!(
|
||||
"[DEBUG] Loaded {} default field values",
|
||||
defaults_json.len()
|
||||
);
|
||||
for key in defaults_json.keys().take(5) {
|
||||
eprintln!("[DEBUG] - {}", key);
|
||||
}
|
||||
if defaults_json.len() > 5 {
|
||||
eprintln!("[DEBUG] ... and {} more", defaults_json.len() - 5);
|
||||
}
|
||||
}
|
||||
Some(defaults_json)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Load I18nBundle if needed
|
||||
let i18n_bundle = if form.locale.is_some() || cli_locale.is_some() {
|
||||
let config = TypeDialogConfig::default();
|
||||
let resolver = LocaleResolver::new(config.clone());
|
||||
let form_locale = form.locale.as_deref();
|
||||
let locale = resolver.resolve(cli_locale.as_deref(), form_locale);
|
||||
let fallback_locale: LanguageIdentifier = "en-US"
|
||||
.parse()
|
||||
.map_err(|_| Error::validation_failed("Invalid fallback locale".to_string()))?;
|
||||
let loader = LocaleLoader::new(config.locales_path);
|
||||
Some(I18nBundle::new(locale, fallback_locale, &loader)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Auto-detect backend
|
||||
let backend_type = BackendFactory::auto_detect();
|
||||
let mut backend = BackendFactory::create(backend_type)?;
|
||||
let form_fields = form.fields.clone();
|
||||
|
||||
// Execute form
|
||||
let results = if let Some(ref bundle) = i18n_bundle {
|
||||
form_parser::execute_with_backend_two_phase_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
Some(bundle),
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
form_parser::execute_with_backend_two_phase_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
None,
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
// Generate output
|
||||
if let Some(template_path) = template {
|
||||
let mut engine = TemplateEngine::new();
|
||||
let nickel_output = engine.render_file(template_path.as_path(), &results, None)?;
|
||||
|
||||
if let Some(path) = output_file {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
} else {
|
||||
let encryption_context = if redact {
|
||||
helpers::EncryptionContext::redact_only()
|
||||
} else if encrypt {
|
||||
let mut backend_config = std::collections::HashMap::new();
|
||||
if let Some(key) = key_file {
|
||||
backend_config.insert("key_file".to_string(), key.to_string_lossy().to_string());
|
||||
}
|
||||
if let Some(addr) = vault_addr {
|
||||
backend_config.insert("vault_addr".to_string(), addr);
|
||||
}
|
||||
if let Some(token) = vault_token {
|
||||
backend_config.insert("vault_token".to_string(), token);
|
||||
}
|
||||
if let Some(path) = vault_key_path {
|
||||
backend_config.insert("vault_key_path".to_string(), path);
|
||||
}
|
||||
helpers::EncryptionContext::encrypt_with(&encryption_backend, backend_config)
|
||||
} else {
|
||||
helpers::EncryptionContext::noop()
|
||||
};
|
||||
|
||||
let config = TypeDialogConfig::default();
|
||||
print_results(
|
||||
&results,
|
||||
format,
|
||||
output_file,
|
||||
&form_fields,
|
||||
&encryption_context,
|
||||
config.encryption.as_ref(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
105
crates/typedialog/src/commands/helpers.rs
Normal file
105
crates/typedialog/src/commands/helpers.rs
Normal file
@ -0,0 +1,105 @@
|
||||
//! Helper functions for command implementations
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::{form_parser, helpers, Error, Result};
|
||||
|
||||
/// Print results with encryption/redaction support
|
||||
pub fn print_results(
|
||||
results: &HashMap<String, serde_json::Value>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
fields: &[form_parser::FieldDefinition],
|
||||
encryption_context: &helpers::EncryptionContext,
|
||||
global_config: Option<&typedialog_core::config::EncryptionDefaults>,
|
||||
) -> Result<()> {
|
||||
let output =
|
||||
helpers::format_results_secure(results, fields, format, encryption_context, global_config)?;
|
||||
|
||||
if let Some(path) = output_file {
|
||||
std::fs::write(path, &output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recursively flatten nested JSON objects into a single-level map
|
||||
/// Converts {"a": {"b": {"c": "value"}}} to {"a_b_c": "value"}
|
||||
pub fn flatten_json_object(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
let mut result = HashMap::new();
|
||||
flatten_recursive(obj, "", &mut result);
|
||||
result
|
||||
}
|
||||
|
||||
fn flatten_recursive(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
prefix: &str,
|
||||
result: &mut HashMap<String, serde_json::Value>,
|
||||
) {
|
||||
for (key, value) in obj.iter() {
|
||||
let new_key = if prefix.is_empty() {
|
||||
key.clone()
|
||||
} else {
|
||||
format!("{}_{}", prefix, key)
|
||||
};
|
||||
|
||||
match value {
|
||||
serde_json::Value::Object(nested) => {
|
||||
flatten_recursive(nested, &new_key, result);
|
||||
}
|
||||
serde_json::Value::Array(arr) => {
|
||||
result.insert(new_key, serde_json::Value::Array(arr.clone()));
|
||||
}
|
||||
_ => {
|
||||
result.insert(new_key, value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract defaults from Nickel export using schema-driven approach
|
||||
pub fn extract_nickel_defaults(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
form_fields: &[form_parser::FieldDefinition],
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
use typedialog_core::nickel::{DefaultsExtractor, FieldMapper, NickelFieldIR, NickelSchemaIR};
|
||||
|
||||
let mut schema_fields = Vec::new();
|
||||
for field in form_fields {
|
||||
if let Some(nickel_path) = &field.nickel_path {
|
||||
schema_fields.push(NickelFieldIR {
|
||||
path: nickel_path.clone(),
|
||||
flat_name: nickel_path.join("-"),
|
||||
alias: field.nickel_alias.clone(),
|
||||
nickel_type: typedialog_core::nickel::NickelType::String,
|
||||
doc: None,
|
||||
default: None,
|
||||
optional: false,
|
||||
contract: None,
|
||||
contract_call: None,
|
||||
group: None,
|
||||
fragment_marker: None,
|
||||
is_array_of_records: false,
|
||||
array_element_fields: None,
|
||||
encryption_metadata: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if !schema_fields.is_empty() {
|
||||
let schema = NickelSchemaIR {
|
||||
name: "form".to_string(),
|
||||
description: None,
|
||||
fields: schema_fields,
|
||||
};
|
||||
if let Ok(mapper) = FieldMapper::from_schema(&schema) {
|
||||
return DefaultsExtractor::extract(&serde_json::Value::Object(obj.clone()), &mapper);
|
||||
}
|
||||
}
|
||||
|
||||
flatten_json_object(obj)
|
||||
}
|
||||
16
crates/typedialog/src/commands/mod.rs
Normal file
16
crates/typedialog/src/commands/mod.rs
Normal file
@ -0,0 +1,16 @@
|
||||
//! Command implementations for typedialog CLI
|
||||
//!
|
||||
//! Separates business logic from CLI dispatch to reduce coupling.
|
||||
|
||||
pub mod form;
|
||||
pub mod helpers;
|
||||
pub mod nickel;
|
||||
pub mod prompts;
|
||||
|
||||
// Re-export command functions
|
||||
pub use form::execute_form;
|
||||
pub use nickel::{
|
||||
form_to_nickel as form_to_nickel_cmd, nickel_roundtrip as nickel_roundtrip_cmd,
|
||||
nickel_template as nickel_template_cmd, nickel_to_form as nickel_to_form_cmd,
|
||||
};
|
||||
pub use prompts::print_result;
|
||||
201
crates/typedialog/src/commands/nickel.rs
Normal file
201
crates/typedialog/src/commands/nickel.rs
Normal file
@ -0,0 +1,201 @@
|
||||
//! Nickel-related command implementations
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::nickel::{
|
||||
I18nExtractor, MetadataParser, NickelCli, TemplateEngine, TomlGenerator,
|
||||
};
|
||||
use typedialog_core::{form_parser, Error, Result};
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn nickel_to_form(
|
||||
schema: PathBuf,
|
||||
_current_data: Option<PathBuf>,
|
||||
flatten: bool,
|
||||
groups: bool,
|
||||
fragments: bool,
|
||||
_conditionals: bool,
|
||||
i18n: bool,
|
||||
output_dir: Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
NickelCli::verify()?;
|
||||
let metadata = NickelCli::query(schema.as_path(), Some("inputs"))?;
|
||||
let mut schema_ir = MetadataParser::parse(metadata)?;
|
||||
|
||||
if fragments {
|
||||
let markers = MetadataParser::extract_fragment_markers_from_source(schema.as_path())?;
|
||||
MetadataParser::apply_fragment_markers(&mut schema_ir, &markers);
|
||||
}
|
||||
|
||||
let forms_output = if fragments && schema_ir.fields.iter().any(|f| f.fragment_marker.is_some())
|
||||
{
|
||||
TomlGenerator::generate_with_fragments(&schema_ir, flatten, groups)?
|
||||
} else {
|
||||
let form_def = TomlGenerator::generate(&schema_ir, flatten, groups)?;
|
||||
let mut single_output = HashMap::new();
|
||||
single_output.insert("form.toml".to_string(), form_def);
|
||||
single_output
|
||||
};
|
||||
|
||||
let output_path = output_dir.unwrap_or_else(|| {
|
||||
if fragments && forms_output.len() > 1 {
|
||||
PathBuf::from("generated")
|
||||
} else {
|
||||
PathBuf::from(".")
|
||||
}
|
||||
});
|
||||
|
||||
if forms_output.len() == 1 && output_path.as_path() == std::path::Path::new(".") {
|
||||
if let Some((_, form_def)) = forms_output.iter().next() {
|
||||
let toml_output = ::toml::to_string_pretty(form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
println!("{}", toml_output);
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(&output_path).map_err(Error::io)?;
|
||||
|
||||
for (filename, form_def) in forms_output {
|
||||
let file_path = if filename.starts_with("fragments/") {
|
||||
output_path
|
||||
.join("fragments")
|
||||
.join(filename.strip_prefix("fragments/").unwrap())
|
||||
} else {
|
||||
output_path.join(&filename)
|
||||
};
|
||||
|
||||
fs::create_dir_all(file_path.parent().unwrap()).map_err(Error::io)?;
|
||||
|
||||
let toml_output = ::toml::to_string_pretty(&form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
fs::write(&file_path, &toml_output).map_err(Error::io)?;
|
||||
eprintln!(" Generated: {}", file_path.display());
|
||||
}
|
||||
|
||||
println!("✓ Forms generated in {}/", output_path.display());
|
||||
}
|
||||
|
||||
if i18n {
|
||||
let i18n_output_dir = output_path.join("locales");
|
||||
let _i18n_mapping = I18nExtractor::extract_and_generate(&schema_ir, &i18n_output_dir)?;
|
||||
|
||||
eprintln!(
|
||||
"✓ i18n translations generated in {}/",
|
||||
i18n_output_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn form_to_nickel(
|
||||
form: PathBuf,
|
||||
input: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
_validate: bool,
|
||||
) -> Result<()> {
|
||||
let form_content = fs::read_to_string(&form).map_err(Error::io)?;
|
||||
let _form_def = form_parser::parse_toml(&form_content)?;
|
||||
|
||||
let results: HashMap<String, serde_json::Value> = if input.extension().and_then(|s| s.to_str())
|
||||
== Some("ncl.j2")
|
||||
{
|
||||
return Err(Error::validation_failed(
|
||||
"Template-based form-to-nickel requires interactive execution. Use .json input instead."
|
||||
));
|
||||
} else if input.extension().and_then(|s| s.to_str()) == Some("json") {
|
||||
let json_content = fs::read_to_string(&input).map_err(Error::io)?;
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
} else {
|
||||
return Err(Error::validation_failed(
|
||||
"Input file must be .json or .ncl.j2",
|
||||
));
|
||||
};
|
||||
|
||||
let nickel_output = format!(
|
||||
"# Form results (JSON format for now)\n{}",
|
||||
serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
);
|
||||
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Nickel output written to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn nickel_template(
|
||||
template: PathBuf,
|
||||
results: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
let json_content = fs::read_to_string(&results).map_err(Error::io)?;
|
||||
let values: HashMap<String, serde_json::Value> =
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
let mut engine = TemplateEngine::new();
|
||||
let nickel_output = engine.render_file(template.as_path(), &values, None)?;
|
||||
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Template rendered to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn nickel_roundtrip(
|
||||
input: PathBuf,
|
||||
form: PathBuf,
|
||||
output: PathBuf,
|
||||
ncl_template: Option<PathBuf>,
|
||||
validate: bool,
|
||||
verbose: bool,
|
||||
) -> Result<()> {
|
||||
use typedialog_core::nickel::RoundtripConfig;
|
||||
|
||||
if verbose {
|
||||
eprintln!("Starting Nickel roundtrip workflow");
|
||||
}
|
||||
|
||||
let mut config = RoundtripConfig::with_template(input, form, output, ncl_template);
|
||||
config.validate = validate;
|
||||
config.verbose = verbose;
|
||||
|
||||
let result = config.execute()?;
|
||||
|
||||
if verbose {
|
||||
eprintln!("[roundtrip] Generated {} bytes", result.output_nickel.len());
|
||||
}
|
||||
|
||||
println!("✓ Roundtrip completed successfully");
|
||||
println!(" Input fields: {}", result.form_results.len());
|
||||
println!(
|
||||
" Imports preserved: {}",
|
||||
result.input_contracts.imports.len()
|
||||
);
|
||||
println!(
|
||||
" Contracts preserved: {}",
|
||||
result.input_contracts.field_contracts.len()
|
||||
);
|
||||
|
||||
if let Some(passed) = result.validation_passed {
|
||||
if passed {
|
||||
println!(" ✓ Validation: PASSED");
|
||||
} else {
|
||||
println!(" ✗ Validation: FAILED");
|
||||
return Err(Error::validation_failed(
|
||||
"Nickel typecheck failed on output",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
36
crates/typedialog/src/commands/prompts.rs
Normal file
36
crates/typedialog/src/commands/prompts.rs
Normal file
@ -0,0 +1,36 @@
|
||||
//! Simple prompt command implementations
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::{Error, Result};
|
||||
|
||||
/// Print a single result value in the requested format
|
||||
pub fn print_result(
|
||||
key: &str,
|
||||
value: &str,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
let output = match format {
|
||||
"json" => {
|
||||
let mut map = HashMap::new();
|
||||
map.insert(key, value);
|
||||
serde_json::to_string_pretty(&map).unwrap_or_default()
|
||||
}
|
||||
"yaml" => {
|
||||
format!("{}: {}", key, value)
|
||||
}
|
||||
"toml" => {
|
||||
format!("{} = \"{}\"", key, value.escape_default())
|
||||
}
|
||||
_ => value.to_string(),
|
||||
};
|
||||
|
||||
if let Some(path) = output_file {
|
||||
std::fs::write(path, &output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -6,22 +6,14 @@
|
||||
//! A powerful CLI tool for creating interactive forms and prompts using multiple backends.
|
||||
//! Works with piped input for batch processing and scripts.
|
||||
|
||||
mod commands;
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use typedialog_core::backends::BackendFactory;
|
||||
use typedialog_core::cli_common;
|
||||
use typedialog_core::config::{load_backend_config, TypeDialogConfig};
|
||||
use typedialog_core::helpers;
|
||||
use typedialog_core::i18n::{I18nBundle, LocaleLoader, LocaleResolver};
|
||||
use typedialog_core::nickel::{
|
||||
DefaultsExtractor, FieldMapper, I18nExtractor, MetadataParser, NickelCli, NickelFieldIR,
|
||||
NickelSchemaIR, TemplateEngine, TomlGenerator,
|
||||
};
|
||||
use typedialog_core::{form_parser, prompts, Error, Result};
|
||||
use unic_langid::LanguageIdentifier;
|
||||
use typedialog_core::{prompts, Result};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(
|
||||
@ -326,12 +318,12 @@ async fn main() -> Result<()> {
|
||||
placeholder,
|
||||
} => {
|
||||
let result = prompts::text(&prompt, default.as_deref(), placeholder.as_deref())?;
|
||||
print_result("value", &result, &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result, &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Confirm { prompt, default } => {
|
||||
let result = prompts::confirm(&prompt, default, None)?;
|
||||
print_result("value", &result.to_string(), &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result.to_string(), &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Select {
|
||||
@ -341,7 +333,7 @@ async fn main() -> Result<()> {
|
||||
vim_mode,
|
||||
} => {
|
||||
let result = prompts::select(&prompt, options, page_size, vim_mode)?;
|
||||
print_result("value", &result, &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result, &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::MultiSelect {
|
||||
@ -352,7 +344,7 @@ async fn main() -> Result<()> {
|
||||
} => {
|
||||
let results = prompts::multi_select(&prompt, options, page_size, vim_mode)?;
|
||||
let output = json!(results);
|
||||
print_result("values", &output.to_string(), &cli.format, &cli.out)?;
|
||||
commands::print_result("values", &output.to_string(), &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Password {
|
||||
@ -360,7 +352,7 @@ async fn main() -> Result<()> {
|
||||
with_toggle,
|
||||
} => {
|
||||
let result = prompts::password(&prompt, with_toggle)?;
|
||||
print_result("value", &result, &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result, &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Custom {
|
||||
@ -369,7 +361,7 @@ async fn main() -> Result<()> {
|
||||
default,
|
||||
} => {
|
||||
let result = prompts::custom(&prompt, &type_name, default.as_deref())?;
|
||||
print_result("value", &result, &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result, &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Editor {
|
||||
@ -378,7 +370,7 @@ async fn main() -> Result<()> {
|
||||
default,
|
||||
} => {
|
||||
let result = prompts::editor(&prompt, file_extension.as_deref(), default.as_deref())?;
|
||||
print_result("value", &result, &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result, &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Date {
|
||||
@ -395,7 +387,7 @@ async fn main() -> Result<()> {
|
||||
max_date.as_deref(),
|
||||
&week_start,
|
||||
)?;
|
||||
print_result("value", &result, &cli.format, &cli.out)?;
|
||||
commands::print_result("value", &result, &cli.format, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::Form {
|
||||
@ -410,7 +402,7 @@ async fn main() -> Result<()> {
|
||||
vault_token,
|
||||
vault_key_path,
|
||||
} => {
|
||||
execute_form(
|
||||
commands::execute_form(
|
||||
config,
|
||||
template,
|
||||
defaults,
|
||||
@ -438,7 +430,7 @@ async fn main() -> Result<()> {
|
||||
i18n,
|
||||
output,
|
||||
} => {
|
||||
nickel_to_form_cmd(
|
||||
commands::nickel_to_form_cmd(
|
||||
schema,
|
||||
current_data,
|
||||
flatten,
|
||||
@ -455,11 +447,11 @@ async fn main() -> Result<()> {
|
||||
input,
|
||||
validate,
|
||||
} => {
|
||||
form_to_nickel_cmd(form, input, &cli.out, validate)?;
|
||||
commands::form_to_nickel_cmd(form, input, &cli.out, validate)?;
|
||||
}
|
||||
|
||||
Commands::NickelTemplate { template, results } => {
|
||||
nickel_template_cmd(template, results, &cli.out)?;
|
||||
commands::nickel_template_cmd(template, results, &cli.out)?;
|
||||
}
|
||||
|
||||
Commands::NickelRoundtrip {
|
||||
@ -470,552 +462,14 @@ async fn main() -> Result<()> {
|
||||
no_validate,
|
||||
verbose,
|
||||
} => {
|
||||
nickel_roundtrip_cmd(input, form, output, ncl_template, !no_validate, verbose)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recursively flatten nested JSON objects into a single-level map
|
||||
/// Converts {"a": {"b": {"c": "value"}}} to {"a_b_c": "value"}
|
||||
fn flatten_json_object(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
let mut result = HashMap::new();
|
||||
flatten_recursive(obj, "", &mut result);
|
||||
result
|
||||
}
|
||||
|
||||
fn flatten_recursive(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
prefix: &str,
|
||||
result: &mut HashMap<String, serde_json::Value>,
|
||||
) {
|
||||
for (key, value) in obj.iter() {
|
||||
let new_key = if prefix.is_empty() {
|
||||
key.clone()
|
||||
} else {
|
||||
format!("{}_{}", prefix, key)
|
||||
};
|
||||
|
||||
match value {
|
||||
serde_json::Value::Object(nested) => {
|
||||
// Recursively flatten nested objects
|
||||
flatten_recursive(nested, &new_key, result);
|
||||
}
|
||||
serde_json::Value::Array(arr) => {
|
||||
// For arrays, just store them as-is with their key
|
||||
result.insert(new_key, serde_json::Value::Array(arr.clone()));
|
||||
}
|
||||
_ => {
|
||||
// Keep primitive values (string, number, bool, null)
|
||||
result.insert(new_key, value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract defaults from Nickel export using schema-driven approach
|
||||
///
|
||||
/// Builds a minimal schema from the form fields to enable DefaultsExtractor,
|
||||
/// which provides generic, maintainable default extraction.
|
||||
fn extract_nickel_defaults(
|
||||
obj: &serde_json::Map<String, serde_json::Value>,
|
||||
form_fields: &[form_parser::FieldDefinition],
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
// Build a minimal schema from form fields that have nickel_path
|
||||
let mut schema_fields = Vec::new();
|
||||
for field in form_fields {
|
||||
if let Some(nickel_path) = &field.nickel_path {
|
||||
schema_fields.push(NickelFieldIR {
|
||||
path: nickel_path.clone(),
|
||||
flat_name: nickel_path.join("-"),
|
||||
alias: field.nickel_alias.clone(),
|
||||
nickel_type: typedialog_core::nickel::NickelType::String, // Type doesn't matter for extraction
|
||||
doc: None,
|
||||
default: None,
|
||||
optional: false,
|
||||
contract: None,
|
||||
contract_call: None,
|
||||
group: None,
|
||||
fragment_marker: None,
|
||||
is_array_of_records: false,
|
||||
array_element_fields: None,
|
||||
encryption_metadata: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If we have schema fields, use DefaultsExtractor
|
||||
if !schema_fields.is_empty() {
|
||||
let schema = NickelSchemaIR {
|
||||
name: "form".to_string(),
|
||||
description: None,
|
||||
fields: schema_fields,
|
||||
};
|
||||
if let Ok(mapper) = FieldMapper::from_schema(&schema) {
|
||||
return DefaultsExtractor::extract(&serde_json::Value::Object(obj.clone()), &mapper);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: flatten everything if schema-driven extraction fails
|
||||
flatten_json_object(obj)
|
||||
}
|
||||
|
||||
async fn execute_form(
|
||||
config: PathBuf,
|
||||
template: Option<PathBuf>,
|
||||
defaults: Option<PathBuf>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
cli_locale: &Option<String>,
|
||||
redact: bool,
|
||||
encrypt: bool,
|
||||
encryption_backend: String,
|
||||
key_file: Option<PathBuf>,
|
||||
vault_addr: Option<String>,
|
||||
vault_token: Option<String>,
|
||||
vault_key_path: Option<String>,
|
||||
) -> Result<()> {
|
||||
let toml_content = fs::read_to_string(&config).map_err(Error::io)?;
|
||||
|
||||
let form = form_parser::parse_toml(&toml_content)?;
|
||||
|
||||
// Extract base directory for resolving relative paths in includes
|
||||
let base_dir = config.parent().unwrap_or_else(|| std::path::Path::new("."));
|
||||
|
||||
// Note: migrate_to_elements() and expand_includes() are handled internally
|
||||
// by execute_with_backend_two_phase_with_defaults()
|
||||
|
||||
// Load default values from JSON or .ncl file if provided
|
||||
let initial_values = if let Some(defaults_path) = defaults {
|
||||
use typedialog_core::nickel::NickelCli;
|
||||
|
||||
let is_ncl = defaults_path.extension().and_then(|s| s.to_str()) == Some("ncl");
|
||||
|
||||
let defaults_json: HashMap<String, serde_json::Value> = if is_ncl {
|
||||
// Convert .ncl to JSON using nickel export
|
||||
NickelCli::verify()?;
|
||||
let value = NickelCli::export(&defaults_path)?;
|
||||
match value {
|
||||
serde_json::Value::Object(map) => {
|
||||
// Use schema-driven extraction with form fields, fallback to flattening
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
// Also flatten to catch any fields not in the form definition
|
||||
let flattened = flatten_json_object(&map);
|
||||
// Merge: extracted values + flattened fill gaps
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults .ncl must export to a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Read JSON directly - combine extraction and flatten
|
||||
let defaults_content = fs::read_to_string(&defaults_path).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to read defaults file: {}", e))
|
||||
})?;
|
||||
let parsed: serde_json::Value =
|
||||
serde_json::from_str(&defaults_content).map_err(|e| {
|
||||
Error::validation_failed(format!("Failed to parse defaults JSON: {}", e))
|
||||
})?;
|
||||
match parsed {
|
||||
serde_json::Value::Object(map) => {
|
||||
let extracted = extract_nickel_defaults(&map, &form.fields);
|
||||
let flattened = flatten_json_object(&map);
|
||||
let mut combined = extracted;
|
||||
for (k, v) in flattened {
|
||||
combined.entry(k).or_insert(v);
|
||||
}
|
||||
combined
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::validation_failed(
|
||||
"Defaults must be a JSON object".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !defaults_json.is_empty() {
|
||||
eprintln!(
|
||||
"[DEBUG] Loaded {} default field values",
|
||||
defaults_json.len()
|
||||
);
|
||||
for key in defaults_json.keys().take(5) {
|
||||
eprintln!("[DEBUG] - {}", key);
|
||||
}
|
||||
if defaults_json.len() > 5 {
|
||||
eprintln!("[DEBUG] ... and {} more", defaults_json.len() - 5);
|
||||
}
|
||||
}
|
||||
Some(defaults_json)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Load I18nBundle if needed
|
||||
let i18n_bundle = if form.locale.is_some() || cli_locale.is_some() {
|
||||
// Resolve locale: CLI flag > form locale > env var > default
|
||||
let config = TypeDialogConfig::default();
|
||||
let resolver = LocaleResolver::new(config.clone());
|
||||
let form_locale = form.locale.as_deref();
|
||||
|
||||
// resolve() already returns a LanguageIdentifier
|
||||
let locale = resolver.resolve(cli_locale.as_deref(), form_locale);
|
||||
let fallback_locale: LanguageIdentifier = "en-US"
|
||||
.parse()
|
||||
.map_err(|_| Error::validation_failed("Invalid fallback locale".to_string()))?;
|
||||
|
||||
// Load translations
|
||||
let loader = LocaleLoader::new(config.locales_path);
|
||||
Some(I18nBundle::new(locale, fallback_locale, &loader)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Auto-detect backend from TYPEDIALOG_BACKEND env var (tui/web/cli, default cli)
|
||||
let backend_type = BackendFactory::auto_detect();
|
||||
let mut backend = BackendFactory::create(backend_type)?;
|
||||
|
||||
// Save form fields before form is consumed (needed for encryption context later)
|
||||
let form_fields = form.fields.clone();
|
||||
|
||||
// Execute form using two-phase execution (selector fields -> dynamic loading -> remaining fields)
|
||||
let results = if let Some(ref bundle) = i18n_bundle {
|
||||
form_parser::execute_with_backend_two_phase_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
Some(bundle),
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
form_parser::execute_with_backend_two_phase_with_defaults(
|
||||
form,
|
||||
backend.as_mut(),
|
||||
None,
|
||||
base_dir,
|
||||
initial_values,
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
// If template provided, generate Nickel output directly
|
||||
if let Some(template_path) = template {
|
||||
// Load and render template with form results
|
||||
let mut engine = TemplateEngine::new();
|
||||
let nickel_output = engine.render_file(template_path.as_path(), &results, None)?;
|
||||
|
||||
// Write output
|
||||
if let Some(path) = output_file {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
} else {
|
||||
// No template: return results in requested format (json, yaml, text)
|
||||
// Build encryption context from CLI flags
|
||||
let encryption_context = if redact {
|
||||
helpers::EncryptionContext::redact_only()
|
||||
} else if encrypt {
|
||||
let mut backend_config = std::collections::HashMap::new();
|
||||
if let Some(key) = key_file {
|
||||
backend_config.insert("key_file".to_string(), key.to_string_lossy().to_string());
|
||||
}
|
||||
if let Some(addr) = vault_addr {
|
||||
backend_config.insert("vault_addr".to_string(), addr);
|
||||
}
|
||||
if let Some(token) = vault_token {
|
||||
backend_config.insert("vault_token".to_string(), token);
|
||||
}
|
||||
if let Some(path) = vault_key_path {
|
||||
backend_config.insert("vault_key_path".to_string(), path);
|
||||
}
|
||||
helpers::EncryptionContext::encrypt_with(&encryption_backend, backend_config)
|
||||
} else {
|
||||
helpers::EncryptionContext::noop()
|
||||
};
|
||||
|
||||
let config = TypeDialogConfig::default();
|
||||
print_results(
|
||||
&results,
|
||||
format,
|
||||
output_file,
|
||||
&form_fields,
|
||||
&encryption_context,
|
||||
config.encryption.as_ref(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_result(key: &str, value: &str, format: &str, output_file: &Option<PathBuf>) -> Result<()> {
|
||||
let output = match format {
|
||||
"json" => {
|
||||
let mut map = HashMap::new();
|
||||
map.insert(key, value);
|
||||
serde_json::to_string_pretty(&map).unwrap_or_default()
|
||||
}
|
||||
"yaml" => {
|
||||
format!("{}: {}", key, value)
|
||||
}
|
||||
"toml" => {
|
||||
format!("{} = \"{}\"", key, value.escape_default())
|
||||
}
|
||||
_ => value.to_string(),
|
||||
};
|
||||
|
||||
if let Some(path) = output_file {
|
||||
fs::write(path, &output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_results(
|
||||
results: &HashMap<String, serde_json::Value>,
|
||||
format: &str,
|
||||
output_file: &Option<PathBuf>,
|
||||
fields: &[form_parser::FieldDefinition],
|
||||
encryption_context: &helpers::EncryptionContext,
|
||||
global_config: Option<&typedialog_core::config::EncryptionDefaults>,
|
||||
) -> Result<()> {
|
||||
let output =
|
||||
helpers::format_results_secure(results, fields, format, encryption_context, global_config)?;
|
||||
|
||||
if let Some(path) = output_file {
|
||||
fs::write(path, &output).map_err(Error::io)?;
|
||||
} else {
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn nickel_to_form_cmd(
|
||||
schema: PathBuf,
|
||||
_current_data: Option<PathBuf>,
|
||||
flatten: bool,
|
||||
groups: bool,
|
||||
fragments: bool,
|
||||
_conditionals: bool, // Conditionals are auto-generated by ContractAnalyzer in TomlGenerator
|
||||
i18n: bool,
|
||||
output_dir: Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
// Verify nickel CLI is available
|
||||
NickelCli::verify()?;
|
||||
|
||||
// Extract metadata from schema
|
||||
let metadata = NickelCli::query(schema.as_path(), Some("inputs"))?;
|
||||
|
||||
// Parse into intermediate representation
|
||||
let mut schema_ir = MetadataParser::parse(metadata)?;
|
||||
|
||||
// Step 1: Extract fragment markers from schema source file (if enabled)
|
||||
if fragments {
|
||||
let markers = MetadataParser::extract_fragment_markers_from_source(schema.as_path())?;
|
||||
MetadataParser::apply_fragment_markers(&mut schema_ir, &markers);
|
||||
}
|
||||
|
||||
// Step 2: Generate TOML form(s)
|
||||
let forms_output = if fragments && schema_ir.fields.iter().any(|f| f.fragment_marker.is_some())
|
||||
{
|
||||
// Multi-file output: main form + fragments
|
||||
TomlGenerator::generate_with_fragments(&schema_ir, flatten, groups)?
|
||||
} else {
|
||||
// Single file output
|
||||
let form_def = TomlGenerator::generate(&schema_ir, flatten, groups)?;
|
||||
let mut single_output = HashMap::new();
|
||||
single_output.insert("form.toml".to_string(), form_def);
|
||||
single_output
|
||||
};
|
||||
|
||||
// Determine output directory
|
||||
let output_path = output_dir.unwrap_or_else(|| {
|
||||
if fragments && forms_output.len() > 1 {
|
||||
PathBuf::from("generated")
|
||||
} else {
|
||||
PathBuf::from(".")
|
||||
}
|
||||
});
|
||||
|
||||
// Step 3: Write form files
|
||||
if forms_output.len() == 1 && output_path.as_path() == std::path::Path::new(".") {
|
||||
// Single file to stdout or specified path
|
||||
if let Some((_, form_def)) = forms_output.iter().next() {
|
||||
let toml_output = ::toml::to_string_pretty(form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
println!("{}", toml_output);
|
||||
}
|
||||
} else {
|
||||
// Write multiple files or to directory
|
||||
fs::create_dir_all(&output_path).map_err(Error::io)?;
|
||||
|
||||
for (filename, form_def) in forms_output {
|
||||
let file_path = if filename.starts_with("fragments/") {
|
||||
output_path
|
||||
.join("fragments")
|
||||
.join(filename.strip_prefix("fragments/").unwrap())
|
||||
} else {
|
||||
output_path.join(&filename)
|
||||
};
|
||||
|
||||
fs::create_dir_all(file_path.parent().unwrap()).map_err(Error::io)?;
|
||||
|
||||
let toml_output = ::toml::to_string_pretty(&form_def)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
fs::write(&file_path, &toml_output).map_err(Error::io)?;
|
||||
eprintln!(" Generated: {}", file_path.display());
|
||||
}
|
||||
|
||||
println!("✓ Forms generated in {}/", output_path.display());
|
||||
}
|
||||
|
||||
// Step 4: Extract i18n translations (if enabled)
|
||||
if i18n {
|
||||
let i18n_output_dir = output_path.join("locales");
|
||||
let _i18n_mapping = I18nExtractor::extract_and_generate(&schema_ir, &i18n_output_dir)?;
|
||||
|
||||
eprintln!(
|
||||
"✓ i18n translations generated in {}/",
|
||||
i18n_output_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn form_to_nickel_cmd(
|
||||
form: PathBuf,
|
||||
input: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
_validate: bool,
|
||||
) -> Result<()> {
|
||||
let form_content = fs::read_to_string(&form).map_err(Error::io)?;
|
||||
let _form_def = form_parser::parse_toml(&form_content)?;
|
||||
|
||||
// Determine input type based on extension
|
||||
let results: HashMap<String, serde_json::Value> = if input.extension().and_then(|s| s.to_str())
|
||||
== Some("ncl.j2")
|
||||
{
|
||||
// Template: would require executing form and rendering template
|
||||
// For now, return error as this requires interactive execution
|
||||
return Err(Error::validation_failed(
|
||||
"Template-based form-to-nickel requires interactive execution. Use .json input instead."
|
||||
));
|
||||
} else if input.extension().and_then(|s| s.to_str()) == Some("json") {
|
||||
// Load pre-computed results from JSON
|
||||
let json_content = fs::read_to_string(&input).map_err(Error::io)?;
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
} else {
|
||||
return Err(Error::validation_failed(
|
||||
"Input file must be .json or .ncl.j2",
|
||||
));
|
||||
};
|
||||
|
||||
// For now, provide a placeholder message as full Nickel serialization requires schema
|
||||
let nickel_output = format!(
|
||||
"# Form results (JSON format for now)\n{}",
|
||||
serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| Error::validation_failed(e.to_string()))?
|
||||
);
|
||||
|
||||
// Write output
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Nickel output written to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn nickel_template_cmd(
|
||||
template: PathBuf,
|
||||
results: PathBuf,
|
||||
output: &Option<PathBuf>,
|
||||
) -> Result<()> {
|
||||
// Load results JSON file
|
||||
let json_content = fs::read_to_string(&results).map_err(Error::io)?;
|
||||
let values: HashMap<String, serde_json::Value> =
|
||||
serde_json::from_str(&json_content).map_err(|e| Error::validation_failed(e.to_string()))?;
|
||||
|
||||
// Load and render template
|
||||
let mut engine = TemplateEngine::new();
|
||||
let nickel_output = engine.render_file(template.as_path(), &values, None)?;
|
||||
|
||||
// Write output
|
||||
if let Some(path) = output {
|
||||
fs::write(path, &nickel_output).map_err(Error::io)?;
|
||||
println!("Template rendered to {}", path.display());
|
||||
} else {
|
||||
println!("{}", nickel_output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn nickel_roundtrip_cmd(
|
||||
input: PathBuf,
|
||||
form: PathBuf,
|
||||
output: PathBuf,
|
||||
ncl_template: Option<PathBuf>,
|
||||
validate: bool,
|
||||
verbose: bool,
|
||||
) -> Result<()> {
|
||||
use typedialog_core::nickel::RoundtripConfig;
|
||||
|
||||
if verbose {
|
||||
eprintln!("Starting Nickel roundtrip workflow");
|
||||
}
|
||||
|
||||
// Create roundtrip config
|
||||
let mut config = RoundtripConfig::with_template(input, form, output, ncl_template);
|
||||
config.validate = validate;
|
||||
config.verbose = verbose;
|
||||
|
||||
// Execute roundtrip
|
||||
let result = config.execute()?;
|
||||
|
||||
if verbose {
|
||||
eprintln!("[roundtrip] Generated {} bytes", result.output_nickel.len());
|
||||
}
|
||||
|
||||
// Print summary
|
||||
println!("✓ Roundtrip completed successfully");
|
||||
println!(" Input fields: {}", result.form_results.len());
|
||||
println!(
|
||||
" Imports preserved: {}",
|
||||
result.input_contracts.imports.len()
|
||||
);
|
||||
println!(
|
||||
" Contracts preserved: {}",
|
||||
result.input_contracts.field_contracts.len()
|
||||
);
|
||||
|
||||
if let Some(passed) = result.validation_passed {
|
||||
if passed {
|
||||
println!(" ✓ Validation: PASSED");
|
||||
} else {
|
||||
println!(" ✗ Validation: FAILED");
|
||||
return Err(Error::validation_failed(
|
||||
"Nickel typecheck failed on output",
|
||||
));
|
||||
commands::nickel_roundtrip_cmd(
|
||||
input,
|
||||
form,
|
||||
output,
|
||||
ncl_template,
|
||||
!no_validate,
|
||||
verbose,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user