2025-10-07 10:59:52 +01:00

59 lines
1.9 KiB
Rust

use criterion::{black_box, criterion_group, criterion_main, Criterion, BenchmarkId};
use provisioning_mcp_server::{Config, ProvisioningTools, ProvisioningEngine};
fn bench_server_parsing(c: &mut Criterion) {
let config = Config::default();
let tools = ProvisioningTools::new(&config);
let test_cases = vec![
"Create 1 server for web hosting",
"Create 3 servers with 8 CPU cores for kubernetes cluster on AWS",
"Deploy 5 t3.large instances in us-west-2 for database cluster",
"Setup 2 medium servers with 4 CPU cores for local development",
];
let mut group = c.benchmark_group("server_parsing");
for case in &test_cases {
group.bench_with_input(
BenchmarkId::new("parse_description", case.len()),
case,
|b, case| {
b.iter(|| {
tools.parse_server_description(black_box(case))
});
},
);
}
group.finish();
}
fn bench_ai_status(c: &mut Criterion) {
let config = Config::default();
let tools = ProvisioningTools::new(&config);
c.bench_function("ai_status", |b| {
b.iter(|| {
tools.get_ai_status()
});
});
}
fn bench_infrastructure_status(c: &mut Criterion) {
let mut config = Config::default();
// Use current directory to avoid path issues in tests
config.provisioning_path = std::env::current_dir().unwrap();
if let Ok(engine) = ProvisioningEngine::new(&config) {
c.bench_function("infrastructure_status", |b| {
b.iter(|| {
// This will likely fail but we're measuring the time it takes
let _ = engine.get_status(None, false);
});
});
}
}
criterion_group!(benches, bench_server_parsing, bench_ai_status, bench_infrastructure_status);
criterion_main!(benches);