237 lines
6.8 KiB
Rust
Raw Normal View History

2025-10-07 10:59:52 +01:00
//! Configuration management for the Provisioning MCP Server
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::env;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
/// Path to the provisioning system
pub provisioning_path: PathBuf,
/// AI provider configuration
pub ai: AIConfig,
/// Server configuration
pub server: ServerConfig,
/// Debug mode
pub debug: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIConfig {
/// Enable AI capabilities
pub enabled: bool,
/// AI provider (openai, claude, generic)
pub provider: String,
/// API endpoint URL
pub api_endpoint: Option<String>,
/// API key (loaded from environment)
pub api_key: Option<String>,
/// Model name
pub model: Option<String>,
/// Maximum tokens for responses
pub max_tokens: u32,
/// Temperature for creativity (0.0-1.0)
pub temperature: f32,
/// Request timeout in seconds
pub timeout: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerConfig {
/// Server name/identifier
pub name: String,
/// Server version
pub version: String,
/// Enable resource capabilities
pub enable_resources: bool,
/// Enable tool change notifications
pub enable_tool_notifications: bool,
}
impl Default for Config {
fn default() -> Self {
Self {
provisioning_path: PathBuf::from("/usr/local/provisioning"),
ai: AIConfig::default(),
server: ServerConfig::default(),
debug: false,
}
}
}
impl Default for AIConfig {
fn default() -> Self {
Self {
enabled: true,
provider: "openai".to_string(),
api_endpoint: None,
api_key: None,
model: Some("gpt-4".to_string()),
max_tokens: 2048,
temperature: 0.3,
timeout: 30,
}
}
}
impl Default for ServerConfig {
fn default() -> Self {
Self {
name: "provisioning-server-rust".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
enable_resources: true,
enable_tool_notifications: true,
}
}
}
impl Config {
/// Load configuration from file, environment, and arguments
pub fn load(
config_path: Option<PathBuf>,
provisioning_path: Option<PathBuf>,
debug: bool,
) -> Result<Self> {
let mut config = Config::default();
// Override with file configuration if provided
if let Some(path) = config_path {
let config_content = std::fs::read_to_string(&path)
.with_context(|| format!("Failed to read config file: {}", path.display()))?;
let file_config: Config = toml::from_str(&config_content)
.with_context(|| format!("Failed to parse config file: {}", path.display()))?;
config = file_config;
}
// Override with environment variables
config.load_from_env()?;
// Override with command line arguments
if let Some(path) = provisioning_path {
config.provisioning_path = path;
}
config.debug = debug;
// Validate configuration
config.validate()?;
Ok(config)
}
/// Load configuration from environment variables
fn load_from_env(&mut self) -> Result<()> {
// Provisioning path
if let Ok(path) = env::var("PROVISIONING_PATH") {
self.provisioning_path = PathBuf::from(path);
}
// AI configuration
if let Ok(enabled) = env::var("PROVISIONING_AI_ENABLED") {
self.ai.enabled = enabled.parse().unwrap_or(true);
}
if let Ok(provider) = env::var("PROVISIONING_AI_PROVIDER") {
self.ai.provider = provider;
}
if let Ok(endpoint) = env::var("PROVISIONING_AI_ENDPOINT") {
self.ai.api_endpoint = Some(endpoint);
}
// Load API keys from environment
self.ai.api_key = match self.ai.provider.as_str() {
"openai" => env::var("OPENAI_API_KEY").ok(),
"claude" => env::var("ANTHROPIC_API_KEY").ok(),
"generic" => env::var("LLM_API_KEY").ok(),
_ => None,
};
if let Ok(model) = env::var("PROVISIONING_AI_MODEL") {
self.ai.model = Some(model);
}
if let Ok(max_tokens) = env::var("PROVISIONING_AI_MAX_TOKENS") {
self.ai.max_tokens = max_tokens.parse().unwrap_or(2048);
}
if let Ok(temperature) = env::var("PROVISIONING_AI_TEMPERATURE") {
self.ai.temperature = temperature.parse().unwrap_or(0.3);
}
if let Ok(timeout) = env::var("PROVISIONING_AI_TIMEOUT") {
self.ai.timeout = timeout.parse().unwrap_or(30);
}
// Debug mode
if let Ok(debug) = env::var("PROVISIONING_DEBUG") {
self.debug = debug.parse().unwrap_or(false);
}
Ok(())
}
/// Validate the configuration
fn validate(&self) -> Result<()> {
// Validate provisioning path exists
if !self.provisioning_path.exists() {
return Err(anyhow::anyhow!(
"Provisioning path does not exist: {}",
self.provisioning_path.display()
));
}
// Check if the main provisioning script exists
let provisioning_script = self.provisioning_path.join("core/nulib/provisioning");
if !provisioning_script.exists() {
return Err(anyhow::anyhow!(
"Provisioning script not found: {}",
provisioning_script.display()
));
}
// Validate AI configuration if enabled
if self.ai.enabled {
if self.ai.api_key.is_none() {
tracing::warn!(
"AI is enabled but no API key found for provider: {}",
self.ai.provider
);
}
if self.ai.temperature < 0.0 || self.ai.temperature > 1.0 {
return Err(anyhow::anyhow!(
"AI temperature must be between 0.0 and 1.0, got: {}",
self.ai.temperature
));
}
}
Ok(())
}
/// Get the provisioning command path
pub fn provisioning_command(&self) -> PathBuf {
self.provisioning_path.join("core/nulib/provisioning")
}
/// Check if AI is available
pub fn is_ai_available(&self) -> bool {
self.ai.enabled && self.ai.api_key.is_some()
}
}