Jesús Pérez 09a97ac8f5
chore: update platform submodule to monorepo crates structure
Platform restructured into crates/, added AI service and detector,
       migrated control-center-ui to Leptos 0.8
2026-01-08 21:32:59 +00:00

290 lines
8.4 KiB
Rust

//! Configuration management for the Provisioning MCP Server
use std::env;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use platform_config::ConfigLoader;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
/// Path to the provisioning system
pub provisioning_path: PathBuf,
/// AI provider configuration
pub ai: AIConfig,
/// Server configuration
pub server: ServerConfig,
/// Debug mode
pub debug: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIConfig {
/// Enable AI capabilities
pub enabled: bool,
/// AI provider (openai, claude, generic)
pub provider: String,
/// API endpoint URL
pub api_endpoint: Option<String>,
/// API key (loaded from environment)
pub api_key: Option<String>,
/// Model name
pub model: Option<String>,
/// Maximum tokens for responses
pub max_tokens: u32,
/// Temperature for creativity (0.0-1.0)
pub temperature: f32,
/// Request timeout in seconds
pub timeout: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerConfig {
/// Server name/identifier
pub name: String,
/// Server version
pub version: String,
/// Enable resource capabilities
pub enable_resources: bool,
/// Enable tool change notifications
pub enable_tool_notifications: bool,
}
impl Default for Config {
fn default() -> Self {
Self {
provisioning_path: PathBuf::from("/usr/local/provisioning"),
ai: AIConfig::default(),
server: ServerConfig::default(),
debug: false,
}
}
}
impl Default for AIConfig {
fn default() -> Self {
Self {
enabled: true,
provider: "openai".to_string(),
api_endpoint: None,
api_key: None,
model: Some("gpt-4".to_string()),
max_tokens: 2048,
temperature: 0.3,
timeout: 30,
}
}
}
impl Default for ServerConfig {
fn default() -> Self {
Self {
name: "provisioning-server-rust".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
enable_resources: true,
enable_tool_notifications: true,
}
}
}
impl ConfigLoader for Config {
fn service_name() -> &'static str {
"mcp-server"
}
fn load_from_hierarchy() -> std::result::Result<Self, Box<dyn std::error::Error + Send + Sync>>
{
// Use platform-config's hierarchy resolution
let service = Self::service_name();
if let Some(path) = platform_config::resolve_config_path(service) {
return Self::from_path(&path);
}
// Fallback to defaults
Ok(Self::default())
}
fn apply_env_overrides(
&mut self,
) -> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
self.load_from_env().map_err(|e| {
// Convert anyhow::Error to Box<dyn Error>
let err_msg = format!("{}", e);
Box::new(std::io::Error::other(err_msg)) as Box<dyn std::error::Error + Send + Sync>
})
}
fn from_path<P: AsRef<Path>>(
path: P,
) -> std::result::Result<Self, Box<dyn std::error::Error + Send + Sync>> {
let path = path.as_ref();
let json_value = platform_config::format::load_config(path).map_err(|e| {
let err: Box<dyn std::error::Error + Send + Sync> = Box::new(e);
err
})?;
serde_json::from_value(json_value).map_err(|e| {
let err_msg = format!("Failed to deserialize config from {:?}: {}", path, e);
Box::new(std::io::Error::new(
std::io::ErrorKind::InvalidData,
err_msg,
)) as Box<dyn std::error::Error + Send + Sync>
})
}
}
impl Config {
/// Load configuration with hierarchical fallback logic:
/// 1. Explicit config path (parameter or MCP_SERVER_CONFIG env var)
/// 2. Mode-specific config:
/// provisioning/platform/config/mcp-server.{mode}.ncl or .toml
/// 3. Built-in defaults
///
/// Then environment variables override specific fields.
pub fn load(
config_path: Option<PathBuf>,
provisioning_path: Option<PathBuf>,
debug: bool,
) -> Result<Self> {
let mut config = if let Some(path) = config_path {
Self::from_path(&path)
.map_err(|e| anyhow::anyhow!("Failed to load from path: {}", e))?
} else {
<Self as ConfigLoader>::load()
.map_err(|e| anyhow::anyhow!("Failed to load config: {}", e))?
};
// Override with command line arguments
if let Some(path) = provisioning_path {
config.provisioning_path = path;
}
config.debug = debug;
// Validate configuration
config.validate()?;
Ok(config)
}
/// Load configuration from file (legacy wrapper for compatibility)
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::from_path(&path).map_err(|e| anyhow::anyhow!("Failed to load from file: {}", e))
}
/// Load configuration from environment variables
fn load_from_env(&mut self) -> Result<()> {
// Provisioning path
if let Ok(path) = env::var("PROVISIONING_PATH") {
self.provisioning_path = PathBuf::from(path);
}
// AI configuration
if let Ok(enabled) = env::var("PROVISIONING_AI_ENABLED") {
self.ai.enabled = enabled.parse().unwrap_or(true);
}
if let Ok(provider) = env::var("PROVISIONING_AI_PROVIDER") {
self.ai.provider = provider;
}
if let Ok(endpoint) = env::var("PROVISIONING_AI_ENDPOINT") {
self.ai.api_endpoint = Some(endpoint);
}
// Load API keys from environment
self.ai.api_key = match self.ai.provider.as_str() {
"openai" => env::var("OPENAI_API_KEY").ok(),
"claude" => env::var("ANTHROPIC_API_KEY").ok(),
"generic" => env::var("LLM_API_KEY").ok(),
_ => None,
};
if let Ok(model) = env::var("PROVISIONING_AI_MODEL") {
self.ai.model = Some(model);
}
if let Ok(max_tokens) = env::var("PROVISIONING_AI_MAX_TOKENS") {
self.ai.max_tokens = max_tokens.parse().unwrap_or(2048);
}
if let Ok(temperature) = env::var("PROVISIONING_AI_TEMPERATURE") {
self.ai.temperature = temperature.parse().unwrap_or(0.3);
}
if let Ok(timeout) = env::var("PROVISIONING_AI_TIMEOUT") {
self.ai.timeout = timeout.parse().unwrap_or(30);
}
// Debug mode
if let Ok(debug) = env::var("PROVISIONING_DEBUG") {
self.debug = debug.parse().unwrap_or(false);
}
Ok(())
}
/// Validate the configuration
fn validate(&self) -> Result<()> {
// Validate provisioning path exists
if !self.provisioning_path.exists() {
return Err(anyhow::anyhow!(
"Provisioning path does not exist: {}",
self.provisioning_path.display()
));
}
// Check if the main provisioning script exists
let provisioning_script = self.provisioning_path.join("core/nulib/provisioning");
if !provisioning_script.exists() {
return Err(anyhow::anyhow!(
"Provisioning script not found: {}",
provisioning_script.display()
));
}
// Validate AI configuration if enabled
if self.ai.enabled {
if self.ai.api_key.is_none() {
tracing::warn!(
"AI is enabled but no API key found for provider: {}",
self.ai.provider
);
}
if self.ai.temperature < 0.0 || self.ai.temperature > 1.0 {
return Err(anyhow::anyhow!(
"AI temperature must be between 0.0 and 1.0, got: {}",
self.ai.temperature
));
}
}
Ok(())
}
/// Get the provisioning command path
pub fn provisioning_command(&self) -> PathBuf {
self.provisioning_path.join("core/nulib/provisioning")
}
/// Check if AI is available
pub fn is_ai_available(&self) -> bool {
self.ai.enabled && self.ai.api_key.is_some()
}
}