- Exclude problematic markdown files from linting (existing legacy issues) - Make clippy check less aggressive (warnings only, not -D warnings) - Move cargo test to manual stage (too slow for pre-commit) - Exclude SVG files from end-of-file-fixer and trailing-whitespace - Add markdown linting exclusions for existing documentation This allows pre-commit hooks to run successfully on new code without blocking commits due to existing issues in legacy documentation files.
143 lines
4.5 KiB
Rust
143 lines
4.5 KiB
Rust
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
|
use vapora_telemetry::MetricsCollector;
|
|
|
|
fn metrics_record_task(c: &mut Criterion) {
|
|
c.bench_function("record_task_success", |b| {
|
|
b.iter(|| {
|
|
let collector = MetricsCollector::new();
|
|
black_box(collector.record_task_start());
|
|
black_box(collector.record_task_success(black_box(1000)));
|
|
});
|
|
});
|
|
}
|
|
|
|
fn metrics_record_provider_call(c: &mut Criterion) {
|
|
c.bench_function("record_provider_call", |b| {
|
|
b.iter(|| {
|
|
let collector = MetricsCollector::new();
|
|
black_box(collector.record_provider_call(
|
|
black_box("claude"),
|
|
black_box(1000),
|
|
black_box(500),
|
|
black_box(0.05),
|
|
));
|
|
});
|
|
});
|
|
}
|
|
|
|
fn metrics_get_task_metrics(c: &mut Criterion) {
|
|
c.bench_function("get_task_metrics_1000_records", |b| {
|
|
b.iter_batched(
|
|
|| {
|
|
let collector = MetricsCollector::new();
|
|
for i in 0..1000 {
|
|
collector.record_task_start();
|
|
if i % 100 != 0 {
|
|
collector.record_task_success(1000 + (i as u64 * 10) % 5000);
|
|
} else {
|
|
collector.record_task_failure(5000, "timeout");
|
|
}
|
|
}
|
|
collector
|
|
},
|
|
|collector| black_box(collector.get_task_metrics()),
|
|
criterion::BatchSize::SmallInput,
|
|
);
|
|
});
|
|
}
|
|
|
|
fn metrics_get_provider_metrics(c: &mut Criterion) {
|
|
c.bench_function("get_provider_metrics_500_calls", |b| {
|
|
b.iter_batched(
|
|
|| {
|
|
let collector = MetricsCollector::new();
|
|
for i in 0..500 {
|
|
let provider = match i % 3 {
|
|
0 => "claude",
|
|
1 => "openai",
|
|
_ => "gemini",
|
|
};
|
|
collector.record_provider_call(
|
|
provider,
|
|
100 + (i as u64 * 10),
|
|
200 + (i as u64 * 20),
|
|
0.01 + (i as f64 * 0.001),
|
|
);
|
|
}
|
|
collector
|
|
},
|
|
|collector| black_box(collector.get_provider_metrics()),
|
|
criterion::BatchSize::SmallInput,
|
|
);
|
|
});
|
|
}
|
|
|
|
fn metrics_get_system_metrics(c: &mut Criterion) {
|
|
c.bench_function("get_system_metrics_200_tasks_10_providers", |b| {
|
|
b.iter_batched(
|
|
|| {
|
|
let collector = MetricsCollector::new();
|
|
|
|
// Record tasks
|
|
for i in 0..200 {
|
|
collector.record_task_start();
|
|
if i % 20 != 0 {
|
|
collector.record_task_success(1000 + (i as u64 * 100));
|
|
} else {
|
|
collector.record_task_failure(5000, "execution_error");
|
|
}
|
|
}
|
|
|
|
// Record provider calls
|
|
for i in 0..100 {
|
|
let provider = match i % 5 {
|
|
0 => "claude",
|
|
1 => "openai",
|
|
2 => "gemini",
|
|
3 => "ollama",
|
|
_ => "anthropic",
|
|
};
|
|
collector.record_provider_call(
|
|
provider,
|
|
100 + (i as u64 * 20),
|
|
200 + (i as u64 * 40),
|
|
0.01 + (i as f64 * 0.002),
|
|
);
|
|
}
|
|
|
|
// Record heartbeats and coalitions
|
|
for _ in 0..50 {
|
|
collector.record_heartbeat();
|
|
}
|
|
for _ in 0..10 {
|
|
collector.record_coalition();
|
|
}
|
|
|
|
collector
|
|
},
|
|
|collector| black_box(collector.get_system_metrics()),
|
|
criterion::BatchSize::SmallInput,
|
|
);
|
|
});
|
|
}
|
|
|
|
fn metrics_clone_overhead(c: &mut Criterion) {
|
|
c.bench_function("clone_metrics_collector", |b| {
|
|
b.iter(|| {
|
|
let collector = MetricsCollector::new();
|
|
black_box(collector.clone())
|
|
});
|
|
});
|
|
}
|
|
|
|
criterion_group!(
|
|
benches,
|
|
metrics_record_task,
|
|
metrics_record_provider_call,
|
|
metrics_get_task_metrics,
|
|
metrics_get_provider_metrics,
|
|
metrics_get_system_metrics,
|
|
metrics_clone_overhead
|
|
);
|
|
criterion_main!(benches);
|