Jesús Pérez b9e2cee9f7
Some checks failed
Documentation Lint & Validation / Markdown Linting (push) Has been cancelled
Documentation Lint & Validation / Validate mdBook Configuration (push) Has been cancelled
Documentation Lint & Validation / Content & Structure Validation (push) Has been cancelled
mdBook Build & Deploy / Build mdBook (push) Has been cancelled
Rust CI / Security Audit (push) Has been cancelled
Rust CI / Check + Test + Lint (nightly) (push) Has been cancelled
Rust CI / Check + Test + Lint (stable) (push) Has been cancelled
Documentation Lint & Validation / Lint & Validation Summary (push) Has been cancelled
mdBook Build & Deploy / Documentation Quality Check (push) Has been cancelled
mdBook Build & Deploy / Deploy to GitHub Pages (push) Has been cancelled
mdBook Build & Deploy / Notification (push) Has been cancelled
feat(workflow-engine): add saga, persistence, auth, and NATS-integrated orchestrator hardening
Key changes driving this: new saga.rs, persistence.rs, auth.rs in workflow-engine; SurrealDB migration 009_workflow_state.surql; backend
  services refactored; frontend dist built; ADR-0033 documenting the hardening decision.
2026-02-22 21:44:42 +00:00

68 lines
1.4 KiB
TOML

[package]
name = "vapora-llm-router"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
rust-version.workspace = true
[lib]
crate-type = ["rlib"]
[dependencies]
# Internal crates
vapora-shared = { workspace = true }
# Embeddings
stratum-embeddings = { workspace = true }
# Secrets management
secretumvault = { workspace = true }
# LLM integration
typedialog-ai = { path = "../../../typedialog/crates/typedialog-ai", features = ["anthropic", "openai", "ollama"] }
# Async runtime
tokio = { workspace = true }
futures = { workspace = true }
async-trait = { workspace = true }
# Serialization
serde = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }
# Error handling
anyhow = { workspace = true }
thiserror = { workspace = true }
# HTTP client
reqwest = { workspace = true }
# LLM Agent Framework
rig-core = { workspace = true }
# RAG & Embeddings: Use provider embedding APIs (Claude, OpenAI, Gemini, Ollama)
# Utilities
uuid = { workspace = true }
once_cell = { workspace = true }
chrono = { workspace = true }
# Logging
tracing = { workspace = true }
# Monitoring
prometheus = { workspace = true }
[dev-dependencies]
mockall = { workspace = true }
wiremock = { workspace = true }
tempfile = { workspace = true }
[features]
default = ["anthropic", "openai", "ollama"]
anthropic = []
openai = []
ollama = []