2026-01-24 02:15:31 +00:00

69 lines
1.6 KiB
TOML

[package]
name = "vapora-llm-router"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
rust-version.workspace = true
[lib]
crate-type = ["rlib"]
[dependencies]
# Internal crates
vapora-shared = { workspace = true }
# Embeddings
# Note: Update STRATUM_EMBEDDINGS_PATH environment variable or adjust path relative to your workspace
stratum-embeddings = { path = "../../../stratumiops/crates/stratum-embeddings", features = ["vapora"] }
# Secrets management
secretumvault = { workspace = true }
# LLM integration
typedialog-ai = { path = "../../../typedialog/crates/typedialog-ai", features = ["anthropic", "openai", "ollama"] }
# Async runtime
tokio = { workspace = true }
futures = { workspace = true }
async-trait = { workspace = true }
# Serialization
serde = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }
# Error handling
anyhow = { workspace = true }
thiserror = { workspace = true }
# HTTP client
reqwest = { workspace = true }
# LLM Agent Framework
rig-core = { workspace = true }
# RAG & Embeddings: Use provider embedding APIs (Claude, OpenAI, Gemini, Ollama)
# Utilities
uuid = { workspace = true }
once_cell = { workspace = true }
chrono = { workspace = true }
# Logging
tracing = { workspace = true }
# Monitoring
prometheus = { workspace = true }
[dev-dependencies]
mockall = { workspace = true }
wiremock = { workspace = true }
tempfile = { workspace = true }
[features]
default = ["anthropic", "openai", "ollama"]
anthropic = []
openai = []
ollama = []