prvng_kcl/workflows.k

288 lines
9.9 KiB
Plaintext
Raw Normal View History

2025-10-07 11:17:54 +01:00
# Info: KCL batch workflow schemas for provisioning (Provisioning)
# Author: JesusPerezLorenzo
# Release: 0.0.1
# Date: 25-09-2025
# Description: Core batch workflow schemas following PAP principles
import .settings
schema DependencyDef:
"""
Dependency definition between batch operations
Supports both sequential and conditional dependencies
"""
# Target operation ID that this dependency points to
target_operation_id: str
# Dependency type: 'sequential' waits for completion, 'conditional' waits for specific conditions
dependency_type: "sequential" | "conditional" = "sequential"
# For conditional dependencies, specify required conditions
conditions?: [str] = []
# Timeout in seconds to wait for dependency resolution
timeout: int = 300
# Whether failure of dependency should fail this operation
fail_on_dependency_error: bool = True
check:
len(target_operation_id) > 0, "Target operation ID cannot be empty"
timeout > 0, "Timeout must be positive"
schema RetryPolicy:
"""
Retry policy configuration for batch operations
Supports exponential backoff and custom retry conditions
"""
# Maximum number of retry attempts (0 = no retries)
max_attempts: int = 3
# Initial delay between retries in seconds
initial_delay: int = 5
# Maximum delay between retries in seconds
max_delay: int = 300
# Backoff multiplier (1.0 = linear, >1.0 = exponential)
backoff_multiplier: float = 2
# Specific error codes/conditions that should trigger retries
retry_on_errors?: [str] = ["connection_error", "timeout", "rate_limit"]
# Whether to retry on any error (if retry_on_errors is empty)
retry_on_any_error: bool = False
check:
max_attempts >= 0, "Max attempts cannot be negative"
initial_delay > 0, "Initial delay must be positive"
max_delay >= initial_delay, "Max delay must be >= initial delay"
backoff_multiplier >= 1, "Backoff multiplier must be >= 1.0"
schema RollbackStrategy:
"""
Rollback strategy configuration for failed batch operations
Supports different rollback approaches based on operation type
"""
# Whether rollback is enabled
enabled: bool = True
# Rollback strategy: 'none', 'immediate', 'batch_end', 'manual'
strategy: "none" | "immediate" | "batch_end" | "manual" = "immediate"
# Whether to preserve partial state for manual recovery
preserve_partial_state: bool = False
# Custom rollback commands/operations
custom_rollback_operations?: [str] = []
# Timeout for rollback operations
rollback_timeout: int = 600
check:
rollback_timeout > 0, "Rollback timeout must be positive"
schema MonitoringConfig:
"""
Monitoring and observability configuration for batch workflows
Integrates with various monitoring backends
"""
# Whether monitoring is enabled
enabled: bool = True
# Monitoring backend: 'prometheus', 'grafana', 'datadog', 'custom'
backend: "prometheus" | "grafana" | "datadog" | "custom" = "prometheus"
# Metrics endpoint URL (for custom backends)
endpoint?: str
# Metric collection interval in seconds
collection_interval: int = 30
# Whether to enable detailed operation tracing
enable_tracing: bool = True
# Log level for batch operations
log_level: "debug" | "info" | "warn" | "error" = "info"
# Whether to send notifications on workflow completion/failure
enable_notifications: bool = False
# Notification channels (webhooks, slack, email, etc.)
notification_channels?: [str] = []
check:
collection_interval > 0, "Collection interval must be positive"
schema StorageConfig:
"""
Storage backend configuration for batch workflow state and results
Supports multiple storage backends including SurrealDB and filesystem
"""
# Storage backend type
backend: "surrealdb" | "filesystem" | "redis" | "postgresql" = "filesystem"
# Connection configuration for database backends
connection_config?: {str:str} = {}
# Base path for filesystem backend
base_path: str = "./batch_workflows"
# Whether to enable state persistence
enable_persistence: bool = True
# State retention period in hours (0 = keep forever)
# 1 week default
retention_hours: int = 168
# Whether to compress stored data
enable_compression: bool = False
# Encryption settings for stored data
encryption?: settings.SecretProvider
check:
len(base_path) > 0, "Base path cannot be empty"
retention_hours >= 0, "Retention hours cannot be negative"
schema BatchOperation:
"""
Individual operation definition within a batch workflow
Supports various operation types with provider-agnostic configuration
"""
# Unique operation identifier within workflow
operation_id: str
# Human-readable operation name/description
name: str
# Operation type: server, taskserv, cluster, custom
operation_type: "server" | "taskserv" | "cluster" | "custom" = "server"
# Target provider (upcloud, aws, mixed, etc.)
provider?: str
# Specific action: create, delete, update, scale, etc.
action: "create" | "delete" | "update" | "scale" | "configure" = "create"
# Operation-specific parameters (flexible configuration)
parameters: {str:str} = {}
# Dependencies on other operations
dependencies?: [DependencyDef] = []
# Retry policy for this operation
retry_policy: RetryPolicy = RetryPolicy {}
# Rollback strategy for this operation
rollback_strategy: RollbackStrategy = RollbackStrategy {}
# Operation execution settings
# 30 minutes default
timeout: int = 1800
# Whether operation can run in parallel with others
allow_parallel: bool = True
# Priority for operation scheduling (higher = earlier execution)
priority: int = 0
# Validation rules for operation parameters
validation_rules?: [str] = []
# Expected outcomes/conditions for success
success_conditions?: [str] = []
check:
len(operation_id) > 0, "Operation ID cannot be empty"
len(name) > 0, "Operation name cannot be empty"
timeout > 0, "Timeout must be positive"
schema BatchWorkflow:
"""
Main batch workflow definition supporting mixed provider operations
Follows PAP principles with configuration-driven architecture
"""
# Unique workflow identifier
workflow_id: str
# Human-readable workflow name
name: str
# Workflow description
description?: str = ""
# Workflow metadata
version: str = "1.0.0"
created_at?: str
modified_at?: str
# List of operations in this workflow
operations: [BatchOperation]
# Global workflow settings
# Maximum parallel operations (0 = unlimited)
max_parallel_operations: int = 5
# Global timeout for entire workflow in seconds
# 2 hours default
global_timeout: int = 7200
# Whether to stop workflow on first failure
fail_fast: bool = False
# Storage backend configuration
storage: StorageConfig = StorageConfig {}
# Monitoring configuration
monitoring: MonitoringConfig = MonitoringConfig {}
# Global retry policy (can be overridden per operation)
default_retry_policy: RetryPolicy = RetryPolicy {}
# Global rollback strategy
default_rollback_strategy: RollbackStrategy = RollbackStrategy {}
# Workflow execution context
execution_context: {str:str} = {}
# Pre and post workflow hooks
pre_workflow_hooks?: [str] = []
post_workflow_hooks?: [str] = []
# Notification settings
notifications?: MonitoringConfig
check:
len(workflow_id) > 0, "Workflow ID cannot be empty"
len(name) > 0, "Workflow name cannot be empty"
len(operations) > 0, "Workflow must contain at least one operation"
max_parallel_operations >= 0, "Max parallel operations cannot be negative"
global_timeout > 0, "Global timeout must be positive"
# Validate operation IDs are unique (simplified check)
len(operations) >= 1, "Operations list must not be empty"
schema WorkflowExecution:
"""
Runtime execution state for batch workflows
Tracks progress, results, and state changes
"""
# Reference to the workflow being executed
workflow_id: str
# Unique execution instance identifier
execution_id: str
# Current execution status
status: "pending" | "running" | "paused" | "completed" | "failed" | "cancelled" = "pending"
# Execution timing
started_at?: str
completed_at?: str
# seconds
duration?: int
# Operation execution states
operation_states: {str:{str:str}} = {}
# Execution results and outputs
results: {str:str} = {}
# Error information
errors: [str] = []
# Resource usage tracking
resource_usage?: {str:str} = {}
# Rollback history
rollback_history: [str] = []
check:
len(workflow_id) > 0, "Workflow ID cannot be empty"
len(execution_id) > 0, "Execution ID cannot be empty"
schema WorkflowTemplate:
"""
Reusable workflow templates for common batch operations
Supports parameterization and customization
"""
# Template identifier
template_id: str
# Template name and description
name: str
description?: str = ""
# Template category
category: "infrastructure" | "deployment" | "maintenance" | "testing" | "custom" = "infrastructure"
# Base workflow definition
workflow_template: BatchWorkflow
# Template parameters that can be customized
parameters: {str:str} = {}
# Required parameters that must be provided
required_parameters: [str] = []
# Template versioning
version: str = "1.0.0"
# Compatibility information
min_provisioning_version?: str
# Usage examples and documentation
examples?: [str] = []
documentation_url?: str
check:
len(template_id) > 0, "Template ID cannot be empty"
len(name) > 0, "Template name cannot be empty"