chore: use +nightly for cargo fmt and fix pre-commit a just recipes
Some checks failed
Rust CI / Security Audit (push) Has been cancelled
Rust CI / Check + Test + Lint (nightly) (push) Has been cancelled
Rust CI / Check + Test + Lint (stable) (push) Has been cancelled

This commit is contained in:
Jesús Pérez 2025-12-29 05:04:53 +00:00
parent 13af9fa932
commit 2cc472b0bf
Signed by: jesus
GPG Key ID: 9F243E355E0BC939
41 changed files with 301 additions and 282 deletions

17
.clippy.toml Normal file
View File

@ -0,0 +1,17 @@
# Generated by dev-system/ci
# Clippy configuration for Rust linting
# Lint level thresholds
cognitive-complexity-threshold = 25
type-complexity-threshold = 500
excessive-nesting-threshold = 5
# Allowed patterns (prevent lints on specific code)
# allow-expect-in-tests = true
# allow-unwrap-in-tests = true
# Single-character variable name threshold
single-char-binding-names-threshold = 4
# Note: Lint configurations belong in Cargo.toml under [lints.clippy] or [workspace.lints.clippy]
# This file only contains clippy configuration parameters, not lint levels

View File

@ -9,8 +9,8 @@ repos:
- repo: local
hooks:
- id: rust-fmt
name: Rust formatting (cargo fmt)
entry: bash -c 'cargo fmt --all -- --check'
name: Rust formatting (cargo +nightly fmt)
entry: bash -c 'cargo +nightly fmt --all -- --check'
language: system
types: [rust]
pass_filenames: false

View File

@ -1,5 +1,6 @@
# Generated by dev-system/ci
# Rustfmt configuration for consistent Rust code formatting
# Configured for cargo +nightly fmt with advanced features enabled
# Basic formatting options
edition = "2021"
@ -8,50 +9,45 @@ hard_tabs = false
tab_spaces = 4
newline_style = "Unix"
# Comment formatting
comment_width = 80
wrap_comments = true
# Code structure
use_small_heuristics = "Default"
# Spaces and indentation
fn_single_line = false
fn_args_layout = "Tall"
where_single_line = false
# Match expressions
match_block_trailing_comma = false
# Imports
reorder_imports = true
reorder_modules = true
remove_nested_parens = true
group_imports = "StdExternalCrate"
# Match expressions
match_block_trailing_comma = false
# Chains
chain_width = 60
chain_indent = "Block"
# Formatting
format_strings = true
format_code_in_doc_comments = false
# Comment formatting (nightly)
comment_width = 80
wrap_comments = true
normalize_comments = true
normalize_doc_attributes = true
# Line breaks
match_arm_blocks = true
overflow_delimited_expressions = false
blank_lines_lower_bound = 0
blank_lines_upper_bound = 1
# Spaces and indentation (nightly)
fn_single_line = false
fn_params_layout = "Tall"
where_single_line = false
# Performance
condense_wildcard_imports = false
# Formatting (nightly)
format_strings = true
format_code_in_doc_comments = false
# Spaces
# Spaces (nightly)
space_before_colon = false
space_after_colon = true
spaces_around_ranges = false
# Stability
unstable_features = false
# Line breaks (nightly)
match_arm_blocks = true
blank_lines_lower_bound = 0
blank_lines_upper_bound = 1
# Enable nightly features
unstable_features = true

View File

@ -1,46 +0,0 @@
# Generated by dev-system/ci
# Clippy configuration for Rust linting
# Lint level thresholds
cognitive-complexity-threshold = 25
type-complexity-threshold = 500
excessive-nesting-threshold = 5
# Allowed patterns (prevent lints on specific code)
# allow-expect-in-tests = true
# allow-unwrap-in-tests = true
# Single-letter lifetime parameters
single-char-lifetime-names-threshold = 4
# Lint configuration
[clippy]
# Additional lints to enable by default
enable = []
# Specific lint configurations
[lints]
# Warn on panics in tests (but allow expect)
"clippy::panic" = "warn"
# Warn on todo! and unimplemented! macros
"clippy::todo" = "warn"
# Warn on large copies
"clippy::large-include-file" = "warn"
# These are good practices but not strict requirements
"clippy::missing-docs-in-crate-items" = "allow"
"clippy::missing-errors-doc" = "allow"
# Performance lints
"clippy::perf" = "warn"
"clippy::single-match" = "warn"
"clippy::match-bool" = "warn"
# Style lints
"clippy::style" = "warn"
"clippy::all" = "warn"
# Pedantic is too strict for production code, so warn only on important ones
"clippy::pedantic" = "allow"
"clippy::match-wild-err-arm" = "warn"
"clippy::or-patterns" = "warn"

View File

@ -158,12 +158,12 @@ ci-full:
# Format all code
[doc("Format Rust code")]
fmt:
cargo fmt --all
@just dev::fmt
# Check formatting
[doc("Check formatting without modifying")]
fmt-check:
cargo fmt --all -- --check
@just dev::fmt-check
# Run clippy linter
[doc("Run clippy with all warnings denied")]

View File

@ -20,12 +20,12 @@ help:
# Format all code
[doc("Format all Rust code")]
fmt:
cargo fmt --all
cargo +nightly fmt --all
# Check formatting without modifying
[doc("Check formatting")]
fmt-check:
cargo fmt --all -- --check
cargo +nightly fmt --all -- --check
# Lint with clippy
[doc("Run clippy linter (all targets, all features)")]

View File

@ -1,3 +1,5 @@
use std::sync::Arc;
#[cfg(feature = "server")]
use axum::{
extract::{Path, State},
@ -6,7 +8,6 @@ use axum::{
Json,
};
use serde_json::{json, Value};
use std::sync::Arc;
use super::ApiResponse;
use crate::core::VaultCore;

View File

@ -1,3 +1,5 @@
use std::sync::Arc;
/// API middleware for authentication and authorization
use axum::{
extract::{Request, State},
@ -5,7 +7,6 @@ use axum::{
middleware::Next,
response::Response,
};
use std::sync::Arc;
use tracing::{error, warn};
use crate::auth::extract_bearer_token;

View File

@ -9,10 +9,9 @@ pub mod middleware;
#[cfg(feature = "server")]
pub mod tls;
pub use server::build_router;
use serde::{Deserialize, Serialize};
use serde_json::Value;
pub use server::build_router;
/// Standard API response envelope
#[derive(Debug, Clone, Serialize, Deserialize)]

View File

@ -1,3 +1,5 @@
use std::sync::Arc;
#[cfg(feature = "server")]
use axum::{
extract::State,
@ -6,7 +8,6 @@ use axum::{
routing::{get, post},
Json, Router,
};
use std::sync::Arc;
use super::handlers;
use super::{ApiResponse, HealthResponse, SealRequest, SealStatus};
@ -181,9 +182,10 @@ pub fn build_router(_vault: Arc<VaultCore>) -> Router<()> {
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
use super::*;
#[test]
fn test_api_response_success() {
let response = ApiResponse::success(json!({"key": "value"}));

View File

@ -1,4 +1,3 @@
use crate::error::{Result, VaultError};
#[cfg(feature = "server")]
use std::path::PathBuf;
@ -7,6 +6,8 @@ use rustls::ServerConfig;
#[cfg(feature = "server")]
use tokio_rustls::TlsAcceptor;
use crate::error::{Result, VaultError};
/// TLS/mTLS configuration from vault config
#[derive(Debug, Clone)]
pub struct TlsConfig {
@ -57,10 +58,11 @@ impl TlsConfig {
/// Create a rustls ServerConfig from certificate and key files
#[cfg(feature = "server")]
pub fn load_server_config(tls: &TlsConfig) -> Result<ServerConfig> {
use rustls::pki_types::CertificateDer;
use std::fs::File;
use std::io::BufReader;
use rustls::pki_types::CertificateDer;
// Validate paths first
tls.validate()?;
@ -100,11 +102,12 @@ pub fn load_server_config(tls: &TlsConfig) -> Result<ServerConfig> {
/// Create a rustls ServerConfig with mTLS (client certificate verification)
#[cfg(feature = "server")]
pub fn load_server_config_with_mtls(tls: &TlsConfig) -> Result<ServerConfig> {
use rustls::pki_types::CertificateDer;
use rustls::server::WebPkiClientVerifier;
use std::fs::File;
use std::io::BufReader;
use rustls::pki_types::CertificateDer;
use rustls::server::WebPkiClientVerifier;
// Validate paths first
tls.validate()?;
@ -187,10 +190,12 @@ pub fn create_tls_acceptor(tls: &TlsConfig) -> Result<TlsAcceptor> {
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
use super::*;
fn create_test_cert_and_key(temp_dir: &TempDir) -> (PathBuf, PathBuf) {
// Create a self-signed certificate for testing
// Using openssl would require it as a dependency for tests,
@ -198,7 +203,8 @@ mod tests {
let cert_path = temp_dir.path().join("cert.pem");
let key_path = temp_dir.path().join("key.pem");
// Minimal self-signed cert (created with: openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -days 365 -nodes)
// Minimal self-signed cert (created with: openssl req -x509 -newkey rsa:2048
// -keyout key.pem -out cert.pem -days 365 -nodes)
let cert_content = r#"-----BEGIN CERTIFICATE-----
MIIDazCCAlOgAwIBAgIUfEYF3nU/nfKYZcKgkX9vZj0VqAAwDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM

View File

@ -1,14 +1,14 @@
use std::collections::HashMap;
use std::path::PathBuf;
use crate::error::{AuthError, AuthResult};
#[cfg(feature = "cedar")]
use {
cedar_policy::{Authorizer, Entities, PolicySet},
std::sync::{Arc, RwLock},
};
use crate::error::{AuthError, AuthResult};
/// Authorization decision result
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AuthDecision {
@ -45,35 +45,14 @@ impl CedarEvaluator {
}
}
/// Load policies from the configured directory
pub fn load_policies(&self) -> AuthResult<()> {
if let Some(dir) = &self.policies_dir {
if !dir.exists() {
return Err(AuthError::CedarPolicy(format!(
"Policies directory not found: {}",
dir.display()
)));
/// Helper function to read and validate a single Cedar policy file
fn read_cedar_policy_file(path: &std::path::Path) -> AuthResult<Option<(String, String)>> {
let is_cedar = path.extension().and_then(|ext| ext.to_str()) == Some("cedar");
if !is_cedar {
return Ok(None);
}
let entries = std::fs::read_dir(dir).map_err(|e| {
AuthError::CedarPolicy(format!("Failed to read policies dir: {}", e))
})?;
#[cfg(feature = "cedar")]
{
use std::str::FromStr;
let mut all_policies = Vec::new();
let mut policy_count = 0;
for entry in entries {
let entry = entry.map_err(|e| {
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
})?;
let path = entry.path();
if path.extension().and_then(|ext| ext.to_str()) == Some("cedar") {
let policy_content = std::fs::read_to_string(&path).map_err(|e| {
let policy_content = std::fs::read_to_string(path).map_err(|e| {
AuthError::CedarPolicy(format!(
"Failed to read policy file {}: {}",
path.display(),
@ -81,25 +60,53 @@ impl CedarEvaluator {
))
})?;
all_policies.push((path.display().to_string(), policy_content));
policy_count += 1;
}
Ok(Some((path.display().to_string(), policy_content)))
}
if policy_count == 0 {
/// Load policies from the configured directory
pub fn load_policies(&self) -> AuthResult<()> {
let dir = match &self.policies_dir {
Some(d) => d,
None => return Ok(()),
};
if !dir.exists() {
return Err(AuthError::CedarPolicy(format!(
"Policies directory not found: {}",
dir.display()
)));
}
let entries = std::fs::read_dir(dir)
.map_err(|e| AuthError::CedarPolicy(format!("Failed to read policies dir: {}", e)))?;
#[cfg(feature = "cedar")]
{
use std::str::FromStr;
let all_policies: Result<Vec<_>, AuthError> = entries
.map(|entry| {
let entry = entry.map_err(|e| {
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
})?;
Self::read_cedar_policy_file(&entry.path())
})
.collect();
let all_policies: Vec<_> = all_policies?.into_iter().flatten().collect();
if all_policies.is_empty() {
return Err(AuthError::CedarPolicy(
"No Cedar policies found in configured directory".to_string(),
));
}
// Combine all policy files
let combined = all_policies
.iter()
.map(|(_, content)| content.as_str())
.collect::<Vec<_>>()
.join("\n");
// Parse policies from Cedar syntax
let policy_set = PolicySet::from_str(&combined).map_err(|e| {
AuthError::CedarPolicy(format!("Failed to parse Cedar policies: {}", e))
})?;
@ -109,34 +116,25 @@ impl CedarEvaluator {
#[cfg(not(feature = "cedar"))]
{
let mut policy_count = 0;
for entry in entries {
let policy_count: Result<usize, AuthError> = entries
.map(|entry| {
let entry = entry.map_err(|e| {
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
})?;
Ok(Self::read_cedar_policy_file(&entry.path())?.is_some() as usize)
})
.sum();
let path = entry.path();
if path.extension().and_then(|ext| ext.to_str()) == Some("cedar") {
let _policy_content = std::fs::read_to_string(&path).map_err(|e| {
AuthError::CedarPolicy(format!(
"Failed to read policy file {}: {}",
path.display(),
e
))
})?;
policy_count += 1;
}
}
if policy_count == 0 {
if policy_count? == 0 {
return Err(AuthError::CedarPolicy(
"No Cedar policies found in configured directory".to_string(),
));
}
// Without cedar feature, we can only validate files exist
tracing::warn!("Cedar feature not enabled - policy evaluation will not work. Compile with --features cedar");
}
tracing::warn!(
"Cedar feature not enabled - policy evaluation will not work. Compile with \
--features cedar"
);
}
Ok(())
@ -195,7 +193,8 @@ impl CedarEvaluator {
/// - principal: entity making the request (e.g., "user::alice")
/// - action: action being requested (e.g., "Action::read")
/// - resource: resource being accessed (e.g., "Secret::database_password")
/// - context: additional context for decision (e.g., IP address, MFA status)
/// - context: additional context for decision (e.g., IP address, MFA
/// status)
pub fn evaluate(
&self,
principal: &str,
@ -203,7 +202,8 @@ impl CedarEvaluator {
resource: &str,
context: Option<&HashMap<String, String>>,
) -> AuthResult<AuthDecision> {
// Note: principal, action, resource, context are used in cedar feature, unused without
// Note: principal, action, resource, context are used in cedar feature, unused
// without
#[allow(unused_variables)]
let _ = (principal, action, resource, context);
#[cfg(feature = "cedar")]
@ -291,10 +291,12 @@ impl CedarEvaluator {
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
use super::*;
#[test]
fn test_cedar_evaluator_creation() {
let evaluator = CedarEvaluator::new(None, None);

View File

@ -1,3 +1,5 @@
use std::sync::Arc;
#[cfg(feature = "server")]
use axum::{
extract::Request,
@ -5,7 +7,6 @@ use axum::{
middleware::Next,
response::{IntoResponse, Response},
};
use std::sync::Arc;
#[cfg(feature = "server")]
use crate::core::VaultCore;
@ -59,7 +60,8 @@ impl IntoResponse for TokenValidationError {
}
#[cfg(feature = "server")]
/// Middleware for token validation (optional - checks if token is valid when present)
/// Middleware for token validation (optional - checks if token is valid when
/// present)
pub async fn optional_token_validation(
headers: HeaderMap,
vault: Arc<VaultCore>,
@ -81,7 +83,8 @@ pub async fn optional_token_validation(
}
#[cfg(feature = "server")]
/// Middleware for mandatory token validation (rejects requests without valid token)
/// Middleware for mandatory token validation (rejects requests without valid
/// token)
pub async fn required_token_validation(
headers: HeaderMap,
vault: Arc<VaultCore>,

View File

@ -5,7 +5,6 @@ pub mod token;
pub mod middleware;
pub use cedar::{AuthDecision, CedarEvaluator};
pub use token::{Token, TokenManager, TokenMetadata};
#[cfg(feature = "server")]
pub use middleware::{extract_bearer_token, TokenValidationError};
pub use token::{Token, TokenManager, TokenMetadata};

View File

@ -1,6 +1,7 @@
use std::sync::Arc;
use chrono::{DateTime, Duration, Utc};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
use crate::crypto::CryptoBackend;
@ -206,16 +207,19 @@ impl TokenManager {
let mut tokens = Vec::new();
for token_id in token_ids {
// Extract token ID from storage key
let parts: Vec<&str> = token_id.split('/').collect();
if let Some(id) = parts.last() {
if let Ok(Some(token)) = self.lookup(id).await {
let Some(id) = parts.last() else {
continue;
};
let Ok(Some(token)) = self.lookup(id).await else {
continue;
};
if token.metadata.client_id == client_id {
tokens.push(token);
}
}
}
}
Ok(tokens)
}
@ -223,11 +227,12 @@ impl TokenManager {
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::config::{FilesystemStorageConfig, StorageConfig};
use crate::crypto::CryptoRegistry;
use crate::storage::StorageRegistry;
use tempfile::TempDir;
async fn setup_token_manager() -> Result<(TokenManager, TempDir)> {
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;

View File

@ -1,15 +1,15 @@
use chrono::Utc;
use std::collections::VecDeque;
use std::sync::Arc;
use std::time::Duration;
use chrono::Utc;
use tokio::sync::RwLock;
use tokio::task::JoinHandle;
use crate::error::Result;
use crate::storage::{Lease, StorageBackend};
#[cfg(test)]
use crate::error::VaultError;
use crate::storage::{Lease, StorageBackend};
/// Configuration for lease revocation worker
#[derive(Debug, Clone)]
@ -250,10 +250,11 @@ impl LeaseRevocationWorker {
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::config::{FilesystemStorageConfig, StorageConfig};
use crate::storage::StorageRegistry;
use tempfile::TempDir;
async fn setup_worker() -> Result<(LeaseRevocationWorker, TempDir)> {
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;

View File

@ -1,10 +1,11 @@
#[cfg(feature = "cli")]
use crate::error::{Result, VaultError};
#[cfg(feature = "cli")]
use reqwest::{Client, Response, StatusCode};
#[cfg(feature = "cli")]
use serde_json::{json, Value};
#[cfg(feature = "cli")]
use crate::error::{Result, VaultError};
#[cfg(feature = "cli")]
pub struct VaultClient {
client: Client,

View File

@ -4,11 +4,12 @@ pub mod commands;
#[cfg(feature = "cli")]
pub mod client;
#[cfg(feature = "cli")]
use clap::{Parser, Subcommand};
#[cfg(feature = "cli")]
use std::path::PathBuf;
#[cfg(feature = "cli")]
use clap::{Parser, Subcommand};
#[cfg(feature = "cli")]
/// SecretumVault CLI - Post-quantum secrets management
#[derive(Parser)]

View File

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
/// Authentication configuration
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct AuthConfig {

View File

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
/// Secrets engines configuration
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct EnginesConfig {

View File

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
/// Logging configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct LoggingConfig {

View File

@ -10,6 +10,8 @@ mod telemetry;
mod vault;
// Re-export all public types
use std::path::Path;
pub use auth::{AuthConfig, CedarAuthConfig, TokenAuthConfig};
pub use crypto::{AwsLcCryptoConfig, CryptoConfig, OpenSSLCryptoConfig, RustCryptoCryptoConfig};
pub use engines::{EngineConfig, EnginesConfig};
@ -18,14 +20,12 @@ pub use logging::LoggingConfig;
pub use seal::{AutoUnsealConfig, SealConfig, ShamirSealConfig};
pub use server::ServerSection;
pub use storage::{
EtcdStorageConfig, FilesystemStorageConfig, PostgreSQLStorageConfig,
StorageConfig, SurrealDBStorageConfig,
EtcdStorageConfig, FilesystemStorageConfig, PostgreSQLStorageConfig, StorageConfig,
SurrealDBStorageConfig,
};
pub use telemetry::TelemetryConfig;
pub use vault::VaultSection;
use std::path::Path;
/// Main vault configuration
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct VaultConfig {

View File

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
/// Server configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ServerSection {

View File

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
/// Storage configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StorageConfig {

View File

@ -2,6 +2,8 @@ use std::collections::HashMap;
use std::sync::Arc;
use crate::auth::TokenManager;
#[cfg(feature = "server")]
use crate::background::LeaseRevocationWorker;
use crate::config::VaultConfig;
use crate::crypto::CryptoBackend;
use crate::engines::{DatabaseEngine, Engine, KVEngine, PkiEngine, TransitEngine};
@ -9,9 +11,6 @@ use crate::error::Result;
use crate::storage::StorageBackend;
use crate::telemetry::Metrics;
#[cfg(feature = "server")]
use crate::background::LeaseRevocationWorker;
/// Vault core - manages engines, crypto backend, and storage
pub struct VaultCore {
/// Mounted secrets engines (mount_path -> engine)
@ -78,21 +77,21 @@ impl VaultCore {
/// Find engine by path prefix
pub fn route_to_engine(&self, path: &str) -> Option<&dyn Engine> {
// Find the longest matching mount path
let mut best_match: Option<(&str, &dyn Engine)> = None;
for (mount_path, engine) in &self.engines {
if path.starts_with(mount_path) {
match best_match {
None => best_match = Some((mount_path, engine.as_ref())),
Some((best_path, _)) => {
if mount_path.len() > best_path.len() {
if !path.starts_with(mount_path) {
continue;
}
let should_update = best_match
.map(|(best_path, _)| mount_path.len() > best_path.len())
.unwrap_or(true);
if should_update {
best_match = Some((mount_path, engine.as_ref()));
}
}
}
}
}
best_match.map(|(_, engine)| engine)
}
@ -102,17 +101,18 @@ impl VaultCore {
let mut best_match: Option<(&str, &str)> = None;
for mount_path in self.engines.keys() {
if path.starts_with(mount_path) {
match best_match {
None => best_match = Some((mount_path, path)),
Some((best_path, _)) => {
if mount_path.len() > best_path.len() {
if !path.starts_with(mount_path) {
continue;
}
let should_update = best_match
.map(|(best_path, _)| mount_path.len() > best_path.len())
.unwrap_or(true);
if should_update {
best_match = Some((mount_path, path));
}
}
}
}
}
best_match.map(|(mount_path, path)| {
let relative = if path.len() > mount_path.len() {
@ -193,11 +193,12 @@ impl EngineRegistry {
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::config::{
EngineConfig, FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig,
};
use tempfile::TempDir;
fn create_test_vault_config(temp_dir: &TempDir) -> VaultConfig {
VaultConfig {

View File

@ -1,6 +1,7 @@
use std::sync::Arc;
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use super::openssl_backend::OpenSSLBackend;
use crate::config::CryptoConfig;
@ -95,7 +96,8 @@ pub struct KeyPair {
pub public_key: PublicKey,
}
/// Crypto backend trait - abstraction over different cryptographic implementations
/// Crypto backend trait - abstraction over different cryptographic
/// implementations
#[async_trait]
pub trait CryptoBackend: Send + Sync + std::fmt::Debug {
/// Generate a keypair for the given algorithm

View File

@ -5,11 +5,10 @@ pub mod rustcrypto_backend;
#[cfg(feature = "aws-lc")]
pub mod aws_lc;
#[cfg(feature = "aws-lc")]
pub use aws_lc::AwsLcBackend;
pub use backend::{
CryptoBackend, CryptoRegistry, KeyAlgorithm, KeyPair, PrivateKey, PublicKey, SymmetricAlgorithm,
};
pub use openssl_backend::OpenSSLBackend;
pub use rustcrypto_backend::RustCryptoBackend;
#[cfg(feature = "aws-lc")]
pub use aws_lc::AwsLcBackend;

View File

@ -6,9 +6,10 @@
//! - Symmetric: AES-256-GCM, ChaCha20-Poly1305
//! - Hashing: SHA-256, SHA-512
use std::fmt;
use async_trait::async_trait;
use rand::RngCore;
use std::fmt;
use crate::crypto::backend::{
CryptoBackend, KeyAlgorithm, KeyPair, PrivateKey, PublicKey, SymmetricAlgorithm,

View File

@ -1,10 +1,11 @@
use std::collections::HashMap;
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{Duration, Utc};
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use super::Engine as SecretEngine;
@ -426,11 +427,12 @@ impl SecretEngine for DatabaseEngine {
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
use crate::crypto::CryptoRegistry;
use crate::storage::StorageRegistry;
use tempfile::TempDir;
async fn setup_engine() -> Result<(DatabaseEngine, TempDir)> {
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;

View File

@ -1,8 +1,9 @@
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::sync::Arc;
use super::Engine;
use crate::core::SealMechanism;
@ -268,12 +269,13 @@ impl Engine for KVEngine {
#[cfg(test)]
mod tests {
use serde_json::json;
use tempfile::TempDir;
use super::*;
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
use crate::crypto::CryptoRegistry;
use crate::storage::StorageRegistry;
use serde_json::json;
use tempfile::TempDir;
async fn setup_engine() -> Result<(KVEngine, TempDir, Arc<dyn CryptoBackend>)> {
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;

View File

@ -3,13 +3,12 @@ pub mod kv;
pub mod pki;
pub mod transit;
use async_trait::async_trait;
pub use database::DatabaseEngine;
pub use kv::KVEngine;
pub use pki::PkiEngine;
pub use transit::TransitEngine;
use async_trait::async_trait;
use serde_json::Value;
pub use transit::TransitEngine;
use crate::error::Result;

View File

@ -1,8 +1,9 @@
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{Duration, Utc};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::sync::Arc;
use super::Engine as SecretEngine;
use crate::core::SealMechanism;
@ -556,11 +557,12 @@ impl SecretEngine for PkiEngine {
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
use crate::crypto::CryptoRegistry;
use crate::storage::StorageRegistry;
use tempfile::TempDir;
async fn setup_engine() -> Result<(PkiEngine, TempDir)> {
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;

View File

@ -1,9 +1,10 @@
use std::collections::HashMap;
use std::sync::Arc;
use async_trait::async_trait;
use base64::engine::general_purpose::STANDARD as BASE64;
use base64::Engine as _;
use serde_json::{json, Value};
use std::collections::HashMap;
use std::sync::Arc;
use super::Engine;
use crate::core::SealMechanism;
@ -114,7 +115,8 @@ impl TransitEngine {
let current_version = key.current_version;
drop(keys);
// Encrypt plaintext using the current key version (lock is dropped before await)
// Encrypt plaintext using the current key version (lock is dropped before
// await)
let ciphertext = self
.crypto
.encrypt_symmetric(&key_material, plaintext, SymmetricAlgorithm::Aes256Gcm)
@ -214,7 +216,8 @@ impl Engine for TransitEngine {
.ok_or_else(|| VaultError::storage("Missing 'plaintext' in request".to_string()))?;
let _ciphertext = self.encrypt(key_name, plaintext.as_bytes()).await?;
// Note: In a full implementation, this would return the ciphertext in the response
// Note: In a full implementation, this would return the ciphertext
// in the response
} else if let Some(key_name) = path.strip_prefix("decrypt/") {
let ciphertext = data
.get("ciphertext")
@ -224,7 +227,8 @@ impl Engine for TransitEngine {
})?;
let _plaintext = self.decrypt(key_name, ciphertext).await?;
// Note: In a full implementation, this would return the plaintext in the response
// Note: In a full implementation, this would return the plaintext
// in the response
} else if let Some(key_name) = path.strip_prefix("rewrap/") {
let ciphertext = data
.get("ciphertext")
@ -234,7 +238,8 @@ impl Engine for TransitEngine {
})?;
let _new_ciphertext = self.rewrap(key_name, ciphertext).await?;
// Note: In a full implementation, this would return the new ciphertext in the response
// Note: In a full implementation, this would return the new
// ciphertext in the response
}
Ok(())
@ -279,11 +284,12 @@ impl Engine for TransitEngine {
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
use crate::crypto::CryptoRegistry;
use crate::storage::StorageRegistry;
use tempfile::TempDir;
async fn setup_engine() -> Result<(TransitEngine, TempDir)> {
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;

View File

@ -1,5 +1,6 @@
use std::backtrace::Backtrace;
use std::fmt;
use thiserror::Error;
/// Main vault error type

View File

@ -1,16 +1,16 @@
#[cfg(feature = "cli")]
use std::path::PathBuf;
#[cfg(feature = "cli")]
use std::sync::Arc;
#[cfg(feature = "cli")]
use clap::Parser;
#[cfg(feature = "cli")]
use secretumvault::cli::{Cli, Command, OperatorCommand, SecretCommand};
#[cfg(feature = "cli")]
use secretumvault::config::VaultConfig;
#[cfg(feature = "cli")]
use secretumvault::core::VaultCore;
#[cfg(feature = "cli")]
use std::path::PathBuf;
#[cfg(feature = "cli")]
use std::sync::Arc;
#[tokio::main]
#[cfg(feature = "cli")]
@ -53,7 +53,10 @@ async fn server_command(
#[cfg(feature = "server")]
{
eprintln!("Note: Server mode via CLI is limited. Use library API with --features server for full functionality including TLS.");
eprintln!(
"Note: Server mode via CLI is limited. Use library API with --features server for \
full functionality including TLS."
);
eprintln!("Server feature not fully implemented in CLI mode.");
std::process::exit(1);
}

View File

@ -13,13 +13,15 @@
//! ```
//!
//! For development, run etcd with:
//! - Docker: `docker run -d --name etcd -p 2379:2379 quay.io/coreos/etcd:v3.5.0`
//! - Docker: `docker run -d --name etcd -p 2379:2379
//! quay.io/coreos/etcd:v3.5.0`
//! - Local: `etcd` (requires etcd binary installed)
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use serde_json::{json, Value};
use std::sync::Arc;
use crate::config::EtcdStorageConfig;
use crate::error::{StorageError, StorageResult};

View File

@ -1,8 +1,9 @@
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use std::fs;
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use crate::config::FilesystemStorageConfig;
use crate::error::StorageError;
use crate::storage::{
@ -314,9 +315,10 @@ impl StorageBackend for FilesystemBackend {
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use super::*;
fn create_test_backend() -> (FilesystemBackend, TempDir) {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let config = FilesystemStorageConfig {

View File

@ -1,8 +1,9 @@
use std::collections::HashMap;
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
pub mod filesystem;
pub mod postgresql;
@ -13,17 +14,15 @@ pub mod surrealdb;
#[cfg(feature = "etcd-storage")]
pub mod etcd;
use crate::config::StorageConfig;
use crate::error::{Result, StorageResult};
#[cfg(feature = "etcd-storage")]
pub use etcd::EtcdBackend;
pub use filesystem::FilesystemBackend;
pub use postgresql::PostgreSQLBackend;
#[cfg(feature = "surrealdb-storage")]
pub use surrealdb::SurrealDBBackend;
#[cfg(feature = "etcd-storage")]
pub use etcd::EtcdBackend;
use crate::config::StorageConfig;
use crate::error::{Result, StorageResult};
/// Encrypted data stored in backend
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -160,7 +159,8 @@ impl StorageRegistry {
}
if config.backend == "postgresql" && cfg!(not(feature = "postgresql-storage")) {
return Err(crate::VaultError::config(
"PostgreSQL backend not enabled. Compile with --features postgresql-storage"
"PostgreSQL backend not enabled. Compile with --features \
postgresql-storage",
));
}
Err(crate::VaultError::config(format!(

View File

@ -1,12 +1,14 @@
//! PostgreSQL storage backend for SecretumVault
//!
//! Provides persistent secret storage using PostgreSQL as the backend.
//! This implementation uses an in-memory store (production would use sqlx + real DB).
//! This implementation uses an in-memory store (production would use sqlx +
//! real DB).
use std::collections::HashMap;
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use crate::config::PostgreSQLStorageConfig;

View File

@ -12,19 +12,20 @@
//! url = "ws://localhost:8000" # For future real SurrealDB connections
//! ```
use std::collections::HashMap;
use std::sync::Arc;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use serde_json::{json, Value};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use crate::config::SurrealDBStorageConfig;
use crate::error::{StorageError, StorageResult};
use crate::storage::{EncryptedData, Lease, StorageBackend, StoredKey, StoredPolicy};
/// SurrealDB storage backend - in-memory implementation with SurrealDB semantics
/// Tables are organized as HashMap<table_name, HashMap<id, record>>
/// SurrealDB storage backend - in-memory implementation with SurrealDB
/// semantics Tables are organized as HashMap<table_name, HashMap<id, record>>
pub struct SurrealDBBackend {
store: Arc<RwLock<HashMap<String, HashMap<String, Value>>>>,
}

View File

@ -6,10 +6,11 @@
//! - Metrics collection and reporting
//! - Performance monitoring
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::RwLock;
use tracing::{debug, error, info, span, warn, Level};