Vapora/crates/vapora-backend/tests/rlm_api_test.rs

287 lines
9.1 KiB
Rust
Raw Normal View History

2026-02-16 05:09:51 +00:00
// RLM API Integration Tests
// Tests require SurrealDB: docker run -p 8000:8000 surrealdb/surrealdb:latest
// start --bind 0.0.0.0:8000
use axum::{
body::Body,
http::{Request, StatusCode},
Router,
};
use serde_json::{json, Value};
use surrealdb::engine::remote::ws::Ws;
use surrealdb::opt::auth::Root;
use surrealdb::Surreal;
use tower::ServiceExt;
use vapora_backend::api::AppState;
use vapora_backend::services::{
AgentService, ProjectService, ProposalService, ProviderAnalyticsService, TaskService,
};
async fn setup_test_app() -> Router {
// Connect to SurrealDB
let db = Surreal::new::<Ws>("127.0.0.1:8000")
.await
.expect("Failed to connect to SurrealDB");
db.signin(Root {
username: "root",
password: "root",
})
.await
.expect("Failed to sign in");
db.use_ns("test_rlm_api")
.use_db("test_rlm_api")
.await
.expect("Failed to use namespace");
// Initialize services
let project_service = ProjectService::new(db.clone());
let task_service = TaskService::new(db.clone());
let agent_service = AgentService::new(db.clone());
let proposal_service = ProposalService::new(db.clone());
let provider_analytics_service = ProviderAnalyticsService::new(db.clone());
// Create RLM engine
let rlm_storage = vapora_rlm::storage::SurrealDBStorage::new(db.clone());
let rlm_bm25_index = std::sync::Arc::new(vapora_rlm::search::bm25::BM25Index::new().unwrap());
let rlm_engine = std::sync::Arc::new(
vapora_rlm::RLMEngine::new(std::sync::Arc::new(rlm_storage), rlm_bm25_index).unwrap(),
);
// Create application state
let app_state = AppState::new(
project_service,
task_service,
agent_service,
proposal_service,
provider_analytics_service,
)
.with_rlm_engine(rlm_engine);
// Build router with RLM endpoints
Router::new()
.route(
"/api/v1/rlm/documents",
axum::routing::post(vapora_backend::api::rlm::load_document),
)
.route(
"/api/v1/rlm/query",
axum::routing::post(vapora_backend::api::rlm::query_document),
)
.route(
"/api/v1/rlm/analyze",
axum::routing::post(vapora_backend::api::rlm::analyze_document),
)
.with_state(app_state)
}
#[tokio::test]
#[ignore] // Requires SurrealDB
async fn test_load_document_endpoint() {
let app = setup_test_app().await;
let request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/documents")
.header("content-type", "application/json")
.body(Body::from(
json!({
"doc_id": "test-doc-1",
"content": "Rust is a systems programming language. It provides memory safety without garbage collection. Rust uses ownership and borrowing.",
"strategy": "semantic"
})
.to_string(),
))
.unwrap();
let response = app.oneshot(request).await.unwrap();
assert_eq!(response.status(), StatusCode::CREATED);
let body = axum::body::to_bytes(response.into_body(), usize::MAX)
.await
.unwrap();
let json: Value = serde_json::from_slice(&body).unwrap();
assert_eq!(json["doc_id"], "test-doc-1");
assert_eq!(json["strategy"], "semantic");
assert!(json["chunk_count"].as_u64().unwrap() > 0);
}
#[tokio::test]
#[ignore] // Requires SurrealDB
async fn test_query_document_endpoint() {
// First, load a document
let load_request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/documents")
.header("content-type", "application/json")
.body(Body::from(
json!({
"doc_id": "test-doc-2",
"content": "Rust ownership system ensures memory safety. \
The borrow checker validates references at compile time. \
Lifetimes track how long references are valid.",
"strategy": "semantic"
})
.to_string(),
))
.unwrap();
let load_response = setup_test_app().await.oneshot(load_request).await.unwrap();
assert_eq!(load_response.status(), StatusCode::CREATED);
// Small delay to ensure indexing completes
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Query the document
let query_request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/query")
.header("content-type", "application/json")
.body(Body::from(
json!({
"doc_id": "test-doc-2",
"query": "How does Rust ensure memory safety?",
"limit": 3
})
.to_string(),
))
.unwrap();
let response = setup_test_app().await.oneshot(query_request).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = axum::body::to_bytes(response.into_body(), usize::MAX)
.await
.unwrap();
let json: Value = serde_json::from_slice(&body).unwrap();
assert_eq!(json["query"], "How does Rust ensure memory safety?");
assert!(json["result_count"].as_u64().unwrap() > 0);
assert!(json["chunks"].is_array());
}
#[tokio::test]
#[ignore] // Requires SurrealDB and LLM provider
async fn test_analyze_document_endpoint() {
// First, load a document
let load_request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/documents")
.header("content-type", "application/json")
.body(Body::from(
json!({
"doc_id": "test-doc-3",
"content": "Rust programming language features: \
1. Memory safety without garbage collection. \
2. Zero-cost abstractions. \
3. Fearless concurrency. \
4. Trait-based generics.",
"strategy": "semantic"
})
.to_string(),
))
.unwrap();
let load_response = setup_test_app().await.oneshot(load_request).await.unwrap();
assert_eq!(load_response.status(), StatusCode::CREATED);
// Small delay to ensure indexing completes
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Analyze the document (Note: This test requires LLM provider configured)
let analyze_request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/analyze")
.header("content-type", "application/json")
.body(Body::from(
json!({
"doc_id": "test-doc-3",
"query": "Summarize the key features of Rust",
"limit": 5
})
.to_string(),
))
.unwrap();
let response = setup_test_app()
.await
.oneshot(analyze_request)
.await
.unwrap();
// This might fail if no LLM provider is configured
// We check for either success or expected error
if response.status() == StatusCode::OK {
let body = axum::body::to_bytes(response.into_body(), usize::MAX)
.await
.unwrap();
let json: Value = serde_json::from_slice(&body).unwrap();
assert_eq!(json["query"], "Summarize the key features of Rust");
assert!(json["result"].is_string());
assert!(json["chunks_used"].as_u64().unwrap() > 0);
} else {
// Expected if no LLM provider configured
assert!(
response.status().is_client_error() || response.status().is_server_error(),
"Expected error status due to missing LLM provider"
);
}
}
#[tokio::test]
#[ignore] // Requires SurrealDB
async fn test_load_document_validation() {
let app = setup_test_app().await;
// Test with missing doc_id
let request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/documents")
.header("content-type", "application/json")
.body(Body::from(
json!({
"content": "Some content"
})
.to_string(),
))
.unwrap();
let response = app.oneshot(request).await.unwrap();
assert_eq!(response.status(), StatusCode::UNPROCESSABLE_ENTITY);
}
#[tokio::test]
#[ignore] // Requires SurrealDB
async fn test_query_nonexistent_document() {
let app = setup_test_app().await;
let request = Request::builder()
.method("POST")
.uri("/api/v1/rlm/query")
.header("content-type", "application/json")
.body(Body::from(
json!({
"doc_id": "nonexistent-doc",
"query": "test query",
"limit": 5
})
.to_string(),
))
.unwrap();
let response = app.oneshot(request).await.unwrap();
// Should return OK with empty results
assert_eq!(response.status(), StatusCode::OK);
let body = axum::body::to_bytes(response.into_body(), usize::MAX)
.await
.unwrap();
let json: Value = serde_json::from_slice(&body).unwrap();
assert_eq!(json["result_count"], 0);
assert_eq!(json["chunks"].as_array().unwrap().len(), 0);
}