chore: use +nightly for cargo fmt and fix pre-commit a just recipes
This commit is contained in:
parent
13af9fa932
commit
2cc472b0bf
17
.clippy.toml
Normal file
17
.clippy.toml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Generated by dev-system/ci
|
||||||
|
# Clippy configuration for Rust linting
|
||||||
|
|
||||||
|
# Lint level thresholds
|
||||||
|
cognitive-complexity-threshold = 25
|
||||||
|
type-complexity-threshold = 500
|
||||||
|
excessive-nesting-threshold = 5
|
||||||
|
|
||||||
|
# Allowed patterns (prevent lints on specific code)
|
||||||
|
# allow-expect-in-tests = true
|
||||||
|
# allow-unwrap-in-tests = true
|
||||||
|
|
||||||
|
# Single-character variable name threshold
|
||||||
|
single-char-binding-names-threshold = 4
|
||||||
|
|
||||||
|
# Note: Lint configurations belong in Cargo.toml under [lints.clippy] or [workspace.lints.clippy]
|
||||||
|
# This file only contains clippy configuration parameters, not lint levels
|
||||||
@ -9,8 +9,8 @@ repos:
|
|||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: rust-fmt
|
- id: rust-fmt
|
||||||
name: Rust formatting (cargo fmt)
|
name: Rust formatting (cargo +nightly fmt)
|
||||||
entry: bash -c 'cargo fmt --all -- --check'
|
entry: bash -c 'cargo +nightly fmt --all -- --check'
|
||||||
language: system
|
language: system
|
||||||
types: [rust]
|
types: [rust]
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
# Generated by dev-system/ci
|
# Generated by dev-system/ci
|
||||||
# Rustfmt configuration for consistent Rust code formatting
|
# Rustfmt configuration for consistent Rust code formatting
|
||||||
|
# Configured for cargo +nightly fmt with advanced features enabled
|
||||||
|
|
||||||
# Basic formatting options
|
# Basic formatting options
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
@ -8,50 +9,45 @@ hard_tabs = false
|
|||||||
tab_spaces = 4
|
tab_spaces = 4
|
||||||
newline_style = "Unix"
|
newline_style = "Unix"
|
||||||
|
|
||||||
# Comment formatting
|
|
||||||
comment_width = 80
|
|
||||||
wrap_comments = true
|
|
||||||
|
|
||||||
# Code structure
|
# Code structure
|
||||||
use_small_heuristics = "Default"
|
use_small_heuristics = "Default"
|
||||||
|
|
||||||
# Spaces and indentation
|
|
||||||
fn_single_line = false
|
|
||||||
fn_args_layout = "Tall"
|
|
||||||
where_single_line = false
|
|
||||||
|
|
||||||
# Match expressions
|
|
||||||
match_block_trailing_comma = false
|
|
||||||
|
|
||||||
# Imports
|
# Imports
|
||||||
reorder_imports = true
|
reorder_imports = true
|
||||||
reorder_modules = true
|
reorder_modules = true
|
||||||
remove_nested_parens = true
|
remove_nested_parens = true
|
||||||
group_imports = "StdExternalCrate"
|
group_imports = "StdExternalCrate"
|
||||||
|
|
||||||
|
# Match expressions
|
||||||
|
match_block_trailing_comma = false
|
||||||
|
|
||||||
# Chains
|
# Chains
|
||||||
chain_width = 60
|
chain_width = 60
|
||||||
chain_indent = "Block"
|
|
||||||
|
|
||||||
# Formatting
|
# Comment formatting (nightly)
|
||||||
format_strings = true
|
comment_width = 80
|
||||||
format_code_in_doc_comments = false
|
wrap_comments = true
|
||||||
normalize_comments = true
|
normalize_comments = true
|
||||||
normalize_doc_attributes = true
|
normalize_doc_attributes = true
|
||||||
|
|
||||||
# Line breaks
|
# Spaces and indentation (nightly)
|
||||||
match_arm_blocks = true
|
fn_single_line = false
|
||||||
overflow_delimited_expressions = false
|
fn_params_layout = "Tall"
|
||||||
blank_lines_lower_bound = 0
|
where_single_line = false
|
||||||
blank_lines_upper_bound = 1
|
|
||||||
|
|
||||||
# Performance
|
# Formatting (nightly)
|
||||||
condense_wildcard_imports = false
|
format_strings = true
|
||||||
|
format_code_in_doc_comments = false
|
||||||
|
|
||||||
# Spaces
|
# Spaces (nightly)
|
||||||
space_before_colon = false
|
space_before_colon = false
|
||||||
space_after_colon = true
|
space_after_colon = true
|
||||||
spaces_around_ranges = false
|
spaces_around_ranges = false
|
||||||
|
|
||||||
# Stability
|
# Line breaks (nightly)
|
||||||
unstable_features = false
|
match_arm_blocks = true
|
||||||
|
blank_lines_lower_bound = 0
|
||||||
|
blank_lines_upper_bound = 1
|
||||||
|
|
||||||
|
# Enable nightly features
|
||||||
|
unstable_features = true
|
||||||
46
clippy.toml
46
clippy.toml
@ -1,46 +0,0 @@
|
|||||||
# Generated by dev-system/ci
|
|
||||||
# Clippy configuration for Rust linting
|
|
||||||
|
|
||||||
# Lint level thresholds
|
|
||||||
cognitive-complexity-threshold = 25
|
|
||||||
type-complexity-threshold = 500
|
|
||||||
excessive-nesting-threshold = 5
|
|
||||||
|
|
||||||
# Allowed patterns (prevent lints on specific code)
|
|
||||||
# allow-expect-in-tests = true
|
|
||||||
# allow-unwrap-in-tests = true
|
|
||||||
|
|
||||||
# Single-letter lifetime parameters
|
|
||||||
single-char-lifetime-names-threshold = 4
|
|
||||||
|
|
||||||
# Lint configuration
|
|
||||||
[clippy]
|
|
||||||
# Additional lints to enable by default
|
|
||||||
enable = []
|
|
||||||
|
|
||||||
# Specific lint configurations
|
|
||||||
[lints]
|
|
||||||
# Warn on panics in tests (but allow expect)
|
|
||||||
"clippy::panic" = "warn"
|
|
||||||
# Warn on todo! and unimplemented! macros
|
|
||||||
"clippy::todo" = "warn"
|
|
||||||
# Warn on large copies
|
|
||||||
"clippy::large-include-file" = "warn"
|
|
||||||
|
|
||||||
# These are good practices but not strict requirements
|
|
||||||
"clippy::missing-docs-in-crate-items" = "allow"
|
|
||||||
"clippy::missing-errors-doc" = "allow"
|
|
||||||
|
|
||||||
# Performance lints
|
|
||||||
"clippy::perf" = "warn"
|
|
||||||
"clippy::single-match" = "warn"
|
|
||||||
"clippy::match-bool" = "warn"
|
|
||||||
|
|
||||||
# Style lints
|
|
||||||
"clippy::style" = "warn"
|
|
||||||
"clippy::all" = "warn"
|
|
||||||
|
|
||||||
# Pedantic is too strict for production code, so warn only on important ones
|
|
||||||
"clippy::pedantic" = "allow"
|
|
||||||
"clippy::match-wild-err-arm" = "warn"
|
|
||||||
"clippy::or-patterns" = "warn"
|
|
||||||
4
justfile
4
justfile
@ -158,12 +158,12 @@ ci-full:
|
|||||||
# Format all code
|
# Format all code
|
||||||
[doc("Format Rust code")]
|
[doc("Format Rust code")]
|
||||||
fmt:
|
fmt:
|
||||||
cargo fmt --all
|
@just dev::fmt
|
||||||
|
|
||||||
# Check formatting
|
# Check formatting
|
||||||
[doc("Check formatting without modifying")]
|
[doc("Check formatting without modifying")]
|
||||||
fmt-check:
|
fmt-check:
|
||||||
cargo fmt --all -- --check
|
@just dev::fmt-check
|
||||||
|
|
||||||
# Run clippy linter
|
# Run clippy linter
|
||||||
[doc("Run clippy with all warnings denied")]
|
[doc("Run clippy with all warnings denied")]
|
||||||
|
|||||||
@ -20,12 +20,12 @@ help:
|
|||||||
# Format all code
|
# Format all code
|
||||||
[doc("Format all Rust code")]
|
[doc("Format all Rust code")]
|
||||||
fmt:
|
fmt:
|
||||||
cargo fmt --all
|
cargo +nightly fmt --all
|
||||||
|
|
||||||
# Check formatting without modifying
|
# Check formatting without modifying
|
||||||
[doc("Check formatting")]
|
[doc("Check formatting")]
|
||||||
fmt-check:
|
fmt-check:
|
||||||
cargo fmt --all -- --check
|
cargo +nightly fmt --all -- --check
|
||||||
|
|
||||||
# Lint with clippy
|
# Lint with clippy
|
||||||
[doc("Run clippy linter (all targets, all features)")]
|
[doc("Run clippy linter (all targets, all features)")]
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
@ -6,7 +8,6 @@ use axum::{
|
|||||||
Json,
|
Json,
|
||||||
};
|
};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::ApiResponse;
|
use super::ApiResponse;
|
||||||
use crate::core::VaultCore;
|
use crate::core::VaultCore;
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
/// API middleware for authentication and authorization
|
/// API middleware for authentication and authorization
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Request, State},
|
extract::{Request, State},
|
||||||
@ -5,7 +7,6 @@ use axum::{
|
|||||||
middleware::Next,
|
middleware::Next,
|
||||||
response::Response,
|
response::Response,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
|
||||||
use tracing::{error, warn};
|
use tracing::{error, warn};
|
||||||
|
|
||||||
use crate::auth::extract_bearer_token;
|
use crate::auth::extract_bearer_token;
|
||||||
|
|||||||
@ -9,10 +9,9 @@ pub mod middleware;
|
|||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
pub mod tls;
|
pub mod tls;
|
||||||
|
|
||||||
pub use server::build_router;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
pub use server::build_router;
|
||||||
|
|
||||||
/// Standard API response envelope
|
/// Standard API response envelope
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::State,
|
extract::State,
|
||||||
@ -6,7 +8,6 @@ use axum::{
|
|||||||
routing::{get, post},
|
routing::{get, post},
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::handlers;
|
use super::handlers;
|
||||||
use super::{ApiResponse, HealthResponse, SealRequest, SealStatus};
|
use super::{ApiResponse, HealthResponse, SealRequest, SealStatus};
|
||||||
@ -181,9 +182,10 @@ pub fn build_router(_vault: Arc<VaultCore>) -> Router<()> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_api_response_success() {
|
fn test_api_response_success() {
|
||||||
let response = ApiResponse::success(json!({"key": "value"}));
|
let response = ApiResponse::success(json!({"key": "value"}));
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
use crate::error::{Result, VaultError};
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
@ -7,6 +6,8 @@ use rustls::ServerConfig;
|
|||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
use tokio_rustls::TlsAcceptor;
|
use tokio_rustls::TlsAcceptor;
|
||||||
|
|
||||||
|
use crate::error::{Result, VaultError};
|
||||||
|
|
||||||
/// TLS/mTLS configuration from vault config
|
/// TLS/mTLS configuration from vault config
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct TlsConfig {
|
pub struct TlsConfig {
|
||||||
@ -57,10 +58,11 @@ impl TlsConfig {
|
|||||||
/// Create a rustls ServerConfig from certificate and key files
|
/// Create a rustls ServerConfig from certificate and key files
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
pub fn load_server_config(tls: &TlsConfig) -> Result<ServerConfig> {
|
pub fn load_server_config(tls: &TlsConfig) -> Result<ServerConfig> {
|
||||||
use rustls::pki_types::CertificateDer;
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
|
|
||||||
|
use rustls::pki_types::CertificateDer;
|
||||||
|
|
||||||
// Validate paths first
|
// Validate paths first
|
||||||
tls.validate()?;
|
tls.validate()?;
|
||||||
|
|
||||||
@ -100,11 +102,12 @@ pub fn load_server_config(tls: &TlsConfig) -> Result<ServerConfig> {
|
|||||||
/// Create a rustls ServerConfig with mTLS (client certificate verification)
|
/// Create a rustls ServerConfig with mTLS (client certificate verification)
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
pub fn load_server_config_with_mtls(tls: &TlsConfig) -> Result<ServerConfig> {
|
pub fn load_server_config_with_mtls(tls: &TlsConfig) -> Result<ServerConfig> {
|
||||||
use rustls::pki_types::CertificateDer;
|
|
||||||
use rustls::server::WebPkiClientVerifier;
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
|
|
||||||
|
use rustls::pki_types::CertificateDer;
|
||||||
|
use rustls::server::WebPkiClientVerifier;
|
||||||
|
|
||||||
// Validate paths first
|
// Validate paths first
|
||||||
tls.validate()?;
|
tls.validate()?;
|
||||||
|
|
||||||
@ -187,10 +190,12 @@ pub fn create_tls_acceptor(tls: &TlsConfig) -> Result<TlsAcceptor> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
fn create_test_cert_and_key(temp_dir: &TempDir) -> (PathBuf, PathBuf) {
|
fn create_test_cert_and_key(temp_dir: &TempDir) -> (PathBuf, PathBuf) {
|
||||||
// Create a self-signed certificate for testing
|
// Create a self-signed certificate for testing
|
||||||
// Using openssl would require it as a dependency for tests,
|
// Using openssl would require it as a dependency for tests,
|
||||||
@ -198,7 +203,8 @@ mod tests {
|
|||||||
let cert_path = temp_dir.path().join("cert.pem");
|
let cert_path = temp_dir.path().join("cert.pem");
|
||||||
let key_path = temp_dir.path().join("key.pem");
|
let key_path = temp_dir.path().join("key.pem");
|
||||||
|
|
||||||
// Minimal self-signed cert (created with: openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -days 365 -nodes)
|
// Minimal self-signed cert (created with: openssl req -x509 -newkey rsa:2048
|
||||||
|
// -keyout key.pem -out cert.pem -days 365 -nodes)
|
||||||
let cert_content = r#"-----BEGIN CERTIFICATE-----
|
let cert_content = r#"-----BEGIN CERTIFICATE-----
|
||||||
MIIDazCCAlOgAwIBAgIUfEYF3nU/nfKYZcKgkX9vZj0VqAAwDQYJKoZIhvcNAQEL
|
MIIDazCCAlOgAwIBAgIUfEYF3nU/nfKYZcKgkX9vZj0VqAAwDQYJKoZIhvcNAQEL
|
||||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::error::{AuthError, AuthResult};
|
|
||||||
|
|
||||||
#[cfg(feature = "cedar")]
|
#[cfg(feature = "cedar")]
|
||||||
use {
|
use {
|
||||||
cedar_policy::{Authorizer, Entities, PolicySet},
|
cedar_policy::{Authorizer, Entities, PolicySet},
|
||||||
std::sync::{Arc, RwLock},
|
std::sync::{Arc, RwLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::error::{AuthError, AuthResult};
|
||||||
|
|
||||||
/// Authorization decision result
|
/// Authorization decision result
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum AuthDecision {
|
pub enum AuthDecision {
|
||||||
@ -45,35 +45,14 @@ impl CedarEvaluator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load policies from the configured directory
|
/// Helper function to read and validate a single Cedar policy file
|
||||||
pub fn load_policies(&self) -> AuthResult<()> {
|
fn read_cedar_policy_file(path: &std::path::Path) -> AuthResult<Option<(String, String)>> {
|
||||||
if let Some(dir) = &self.policies_dir {
|
let is_cedar = path.extension().and_then(|ext| ext.to_str()) == Some("cedar");
|
||||||
if !dir.exists() {
|
if !is_cedar {
|
||||||
return Err(AuthError::CedarPolicy(format!(
|
return Ok(None);
|
||||||
"Policies directory not found: {}",
|
|
||||||
dir.display()
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let entries = std::fs::read_dir(dir).map_err(|e| {
|
let policy_content = std::fs::read_to_string(path).map_err(|e| {
|
||||||
AuthError::CedarPolicy(format!("Failed to read policies dir: {}", e))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
#[cfg(feature = "cedar")]
|
|
||||||
{
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
let mut all_policies = Vec::new();
|
|
||||||
let mut policy_count = 0;
|
|
||||||
|
|
||||||
for entry in entries {
|
|
||||||
let entry = entry.map_err(|e| {
|
|
||||||
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let path = entry.path();
|
|
||||||
if path.extension().and_then(|ext| ext.to_str()) == Some("cedar") {
|
|
||||||
let policy_content = std::fs::read_to_string(&path).map_err(|e| {
|
|
||||||
AuthError::CedarPolicy(format!(
|
AuthError::CedarPolicy(format!(
|
||||||
"Failed to read policy file {}: {}",
|
"Failed to read policy file {}: {}",
|
||||||
path.display(),
|
path.display(),
|
||||||
@ -81,25 +60,53 @@ impl CedarEvaluator {
|
|||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
all_policies.push((path.display().to_string(), policy_content));
|
Ok(Some((path.display().to_string(), policy_content)))
|
||||||
policy_count += 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if policy_count == 0 {
|
/// Load policies from the configured directory
|
||||||
|
pub fn load_policies(&self) -> AuthResult<()> {
|
||||||
|
let dir = match &self.policies_dir {
|
||||||
|
Some(d) => d,
|
||||||
|
None => return Ok(()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !dir.exists() {
|
||||||
|
return Err(AuthError::CedarPolicy(format!(
|
||||||
|
"Policies directory not found: {}",
|
||||||
|
dir.display()
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let entries = std::fs::read_dir(dir)
|
||||||
|
.map_err(|e| AuthError::CedarPolicy(format!("Failed to read policies dir: {}", e)))?;
|
||||||
|
|
||||||
|
#[cfg(feature = "cedar")]
|
||||||
|
{
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
let all_policies: Result<Vec<_>, AuthError> = entries
|
||||||
|
.map(|entry| {
|
||||||
|
let entry = entry.map_err(|e| {
|
||||||
|
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
|
||||||
|
})?;
|
||||||
|
Self::read_cedar_policy_file(&entry.path())
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let all_policies: Vec<_> = all_policies?.into_iter().flatten().collect();
|
||||||
|
|
||||||
|
if all_policies.is_empty() {
|
||||||
return Err(AuthError::CedarPolicy(
|
return Err(AuthError::CedarPolicy(
|
||||||
"No Cedar policies found in configured directory".to_string(),
|
"No Cedar policies found in configured directory".to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Combine all policy files
|
|
||||||
let combined = all_policies
|
let combined = all_policies
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(_, content)| content.as_str())
|
.map(|(_, content)| content.as_str())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join("\n");
|
.join("\n");
|
||||||
|
|
||||||
// Parse policies from Cedar syntax
|
|
||||||
let policy_set = PolicySet::from_str(&combined).map_err(|e| {
|
let policy_set = PolicySet::from_str(&combined).map_err(|e| {
|
||||||
AuthError::CedarPolicy(format!("Failed to parse Cedar policies: {}", e))
|
AuthError::CedarPolicy(format!("Failed to parse Cedar policies: {}", e))
|
||||||
})?;
|
})?;
|
||||||
@ -109,34 +116,25 @@ impl CedarEvaluator {
|
|||||||
|
|
||||||
#[cfg(not(feature = "cedar"))]
|
#[cfg(not(feature = "cedar"))]
|
||||||
{
|
{
|
||||||
let mut policy_count = 0;
|
let policy_count: Result<usize, AuthError> = entries
|
||||||
for entry in entries {
|
.map(|entry| {
|
||||||
let entry = entry.map_err(|e| {
|
let entry = entry.map_err(|e| {
|
||||||
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
|
AuthError::CedarPolicy(format!("Failed to read policy entry: {}", e))
|
||||||
})?;
|
})?;
|
||||||
|
Ok(Self::read_cedar_policy_file(&entry.path())?.is_some() as usize)
|
||||||
|
})
|
||||||
|
.sum();
|
||||||
|
|
||||||
let path = entry.path();
|
if policy_count? == 0 {
|
||||||
if path.extension().and_then(|ext| ext.to_str()) == Some("cedar") {
|
|
||||||
let _policy_content = std::fs::read_to_string(&path).map_err(|e| {
|
|
||||||
AuthError::CedarPolicy(format!(
|
|
||||||
"Failed to read policy file {}: {}",
|
|
||||||
path.display(),
|
|
||||||
e
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
policy_count += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if policy_count == 0 {
|
|
||||||
return Err(AuthError::CedarPolicy(
|
return Err(AuthError::CedarPolicy(
|
||||||
"No Cedar policies found in configured directory".to_string(),
|
"No Cedar policies found in configured directory".to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Without cedar feature, we can only validate files exist
|
tracing::warn!(
|
||||||
tracing::warn!("Cedar feature not enabled - policy evaluation will not work. Compile with --features cedar");
|
"Cedar feature not enabled - policy evaluation will not work. Compile with \
|
||||||
}
|
--features cedar"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -195,7 +193,8 @@ impl CedarEvaluator {
|
|||||||
/// - principal: entity making the request (e.g., "user::alice")
|
/// - principal: entity making the request (e.g., "user::alice")
|
||||||
/// - action: action being requested (e.g., "Action::read")
|
/// - action: action being requested (e.g., "Action::read")
|
||||||
/// - resource: resource being accessed (e.g., "Secret::database_password")
|
/// - resource: resource being accessed (e.g., "Secret::database_password")
|
||||||
/// - context: additional context for decision (e.g., IP address, MFA status)
|
/// - context: additional context for decision (e.g., IP address, MFA
|
||||||
|
/// status)
|
||||||
pub fn evaluate(
|
pub fn evaluate(
|
||||||
&self,
|
&self,
|
||||||
principal: &str,
|
principal: &str,
|
||||||
@ -203,7 +202,8 @@ impl CedarEvaluator {
|
|||||||
resource: &str,
|
resource: &str,
|
||||||
context: Option<&HashMap<String, String>>,
|
context: Option<&HashMap<String, String>>,
|
||||||
) -> AuthResult<AuthDecision> {
|
) -> AuthResult<AuthDecision> {
|
||||||
// Note: principal, action, resource, context are used in cedar feature, unused without
|
// Note: principal, action, resource, context are used in cedar feature, unused
|
||||||
|
// without
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
let _ = (principal, action, resource, context);
|
let _ = (principal, action, resource, context);
|
||||||
#[cfg(feature = "cedar")]
|
#[cfg(feature = "cedar")]
|
||||||
@ -291,10 +291,12 @@ impl CedarEvaluator {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_cedar_evaluator_creation() {
|
fn test_cedar_evaluator_creation() {
|
||||||
let evaluator = CedarEvaluator::new(None, None);
|
let evaluator = CedarEvaluator::new(None, None);
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::Request,
|
extract::Request,
|
||||||
@ -5,7 +7,6 @@ use axum::{
|
|||||||
middleware::Next,
|
middleware::Next,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
use crate::core::VaultCore;
|
use crate::core::VaultCore;
|
||||||
@ -59,7 +60,8 @@ impl IntoResponse for TokenValidationError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
/// Middleware for token validation (optional - checks if token is valid when present)
|
/// Middleware for token validation (optional - checks if token is valid when
|
||||||
|
/// present)
|
||||||
pub async fn optional_token_validation(
|
pub async fn optional_token_validation(
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
vault: Arc<VaultCore>,
|
vault: Arc<VaultCore>,
|
||||||
@ -81,7 +83,8 @@ pub async fn optional_token_validation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
/// Middleware for mandatory token validation (rejects requests without valid token)
|
/// Middleware for mandatory token validation (rejects requests without valid
|
||||||
|
/// token)
|
||||||
pub async fn required_token_validation(
|
pub async fn required_token_validation(
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
vault: Arc<VaultCore>,
|
vault: Arc<VaultCore>,
|
||||||
|
|||||||
@ -5,7 +5,6 @@ pub mod token;
|
|||||||
pub mod middleware;
|
pub mod middleware;
|
||||||
|
|
||||||
pub use cedar::{AuthDecision, CedarEvaluator};
|
pub use cedar::{AuthDecision, CedarEvaluator};
|
||||||
pub use token::{Token, TokenManager, TokenMetadata};
|
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
pub use middleware::{extract_bearer_token, TokenValidationError};
|
pub use middleware::{extract_bearer_token, TokenValidationError};
|
||||||
|
pub use token::{Token, TokenManager, TokenMetadata};
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::sync::Arc;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::crypto::CryptoBackend;
|
use crate::crypto::CryptoBackend;
|
||||||
@ -206,16 +207,19 @@ impl TokenManager {
|
|||||||
|
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
for token_id in token_ids {
|
for token_id in token_ids {
|
||||||
// Extract token ID from storage key
|
|
||||||
let parts: Vec<&str> = token_id.split('/').collect();
|
let parts: Vec<&str> = token_id.split('/').collect();
|
||||||
if let Some(id) = parts.last() {
|
let Some(id) = parts.last() else {
|
||||||
if let Ok(Some(token)) = self.lookup(id).await {
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let Ok(Some(token)) = self.lookup(id).await else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
if token.metadata.client_id == client_id {
|
if token.metadata.client_id == client_id {
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tokens)
|
Ok(tokens)
|
||||||
}
|
}
|
||||||
@ -223,11 +227,12 @@ impl TokenManager {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{FilesystemStorageConfig, StorageConfig};
|
use crate::config::{FilesystemStorageConfig, StorageConfig};
|
||||||
use crate::crypto::CryptoRegistry;
|
use crate::crypto::CryptoRegistry;
|
||||||
use crate::storage::StorageRegistry;
|
use crate::storage::StorageRegistry;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
async fn setup_token_manager() -> Result<(TokenManager, TempDir)> {
|
async fn setup_token_manager() -> Result<(TokenManager, TempDir)> {
|
||||||
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
||||||
|
|||||||
@ -1,15 +1,15 @@
|
|||||||
use chrono::Utc;
|
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use chrono::Utc;
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
use tokio::task::JoinHandle;
|
use tokio::task::JoinHandle;
|
||||||
|
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::storage::{Lease, StorageBackend};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use crate::error::VaultError;
|
use crate::error::VaultError;
|
||||||
|
use crate::storage::{Lease, StorageBackend};
|
||||||
|
|
||||||
/// Configuration for lease revocation worker
|
/// Configuration for lease revocation worker
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -250,10 +250,11 @@ impl LeaseRevocationWorker {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{FilesystemStorageConfig, StorageConfig};
|
use crate::config::{FilesystemStorageConfig, StorageConfig};
|
||||||
use crate::storage::StorageRegistry;
|
use crate::storage::StorageRegistry;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
async fn setup_worker() -> Result<(LeaseRevocationWorker, TempDir)> {
|
async fn setup_worker() -> Result<(LeaseRevocationWorker, TempDir)> {
|
||||||
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use crate::error::{Result, VaultError};
|
|
||||||
#[cfg(feature = "cli")]
|
|
||||||
use reqwest::{Client, Response, StatusCode};
|
use reqwest::{Client, Response, StatusCode};
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
#[cfg(feature = "cli")]
|
||||||
|
use crate::error::{Result, VaultError};
|
||||||
|
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
pub struct VaultClient {
|
pub struct VaultClient {
|
||||||
client: Client,
|
client: Client,
|
||||||
|
|||||||
@ -4,11 +4,12 @@ pub mod commands;
|
|||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
pub mod client;
|
pub mod client;
|
||||||
|
|
||||||
#[cfg(feature = "cli")]
|
|
||||||
use clap::{Parser, Subcommand};
|
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[cfg(feature = "cli")]
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
/// SecretumVault CLI - Post-quantum secrets management
|
/// SecretumVault CLI - Post-quantum secrets management
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Authentication configuration
|
/// Authentication configuration
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
|
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
|
||||||
pub struct AuthConfig {
|
pub struct AuthConfig {
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Secrets engines configuration
|
/// Secrets engines configuration
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
|
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
|
||||||
pub struct EnginesConfig {
|
pub struct EnginesConfig {
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Logging configuration
|
/// Logging configuration
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
pub struct LoggingConfig {
|
pub struct LoggingConfig {
|
||||||
|
|||||||
@ -10,6 +10,8 @@ mod telemetry;
|
|||||||
mod vault;
|
mod vault;
|
||||||
|
|
||||||
// Re-export all public types
|
// Re-export all public types
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
pub use auth::{AuthConfig, CedarAuthConfig, TokenAuthConfig};
|
pub use auth::{AuthConfig, CedarAuthConfig, TokenAuthConfig};
|
||||||
pub use crypto::{AwsLcCryptoConfig, CryptoConfig, OpenSSLCryptoConfig, RustCryptoCryptoConfig};
|
pub use crypto::{AwsLcCryptoConfig, CryptoConfig, OpenSSLCryptoConfig, RustCryptoCryptoConfig};
|
||||||
pub use engines::{EngineConfig, EnginesConfig};
|
pub use engines::{EngineConfig, EnginesConfig};
|
||||||
@ -18,14 +20,12 @@ pub use logging::LoggingConfig;
|
|||||||
pub use seal::{AutoUnsealConfig, SealConfig, ShamirSealConfig};
|
pub use seal::{AutoUnsealConfig, SealConfig, ShamirSealConfig};
|
||||||
pub use server::ServerSection;
|
pub use server::ServerSection;
|
||||||
pub use storage::{
|
pub use storage::{
|
||||||
EtcdStorageConfig, FilesystemStorageConfig, PostgreSQLStorageConfig,
|
EtcdStorageConfig, FilesystemStorageConfig, PostgreSQLStorageConfig, StorageConfig,
|
||||||
StorageConfig, SurrealDBStorageConfig,
|
SurrealDBStorageConfig,
|
||||||
};
|
};
|
||||||
pub use telemetry::TelemetryConfig;
|
pub use telemetry::TelemetryConfig;
|
||||||
pub use vault::VaultSection;
|
pub use vault::VaultSection;
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
/// Main vault configuration
|
/// Main vault configuration
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||||
pub struct VaultConfig {
|
pub struct VaultConfig {
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Server configuration
|
/// Server configuration
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
pub struct ServerSection {
|
pub struct ServerSection {
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Storage configuration
|
/// Storage configuration
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
pub struct StorageConfig {
|
pub struct StorageConfig {
|
||||||
|
|||||||
@ -2,6 +2,8 @@ use std::collections::HashMap;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::auth::TokenManager;
|
use crate::auth::TokenManager;
|
||||||
|
#[cfg(feature = "server")]
|
||||||
|
use crate::background::LeaseRevocationWorker;
|
||||||
use crate::config::VaultConfig;
|
use crate::config::VaultConfig;
|
||||||
use crate::crypto::CryptoBackend;
|
use crate::crypto::CryptoBackend;
|
||||||
use crate::engines::{DatabaseEngine, Engine, KVEngine, PkiEngine, TransitEngine};
|
use crate::engines::{DatabaseEngine, Engine, KVEngine, PkiEngine, TransitEngine};
|
||||||
@ -9,9 +11,6 @@ use crate::error::Result;
|
|||||||
use crate::storage::StorageBackend;
|
use crate::storage::StorageBackend;
|
||||||
use crate::telemetry::Metrics;
|
use crate::telemetry::Metrics;
|
||||||
|
|
||||||
#[cfg(feature = "server")]
|
|
||||||
use crate::background::LeaseRevocationWorker;
|
|
||||||
|
|
||||||
/// Vault core - manages engines, crypto backend, and storage
|
/// Vault core - manages engines, crypto backend, and storage
|
||||||
pub struct VaultCore {
|
pub struct VaultCore {
|
||||||
/// Mounted secrets engines (mount_path -> engine)
|
/// Mounted secrets engines (mount_path -> engine)
|
||||||
@ -78,21 +77,21 @@ impl VaultCore {
|
|||||||
|
|
||||||
/// Find engine by path prefix
|
/// Find engine by path prefix
|
||||||
pub fn route_to_engine(&self, path: &str) -> Option<&dyn Engine> {
|
pub fn route_to_engine(&self, path: &str) -> Option<&dyn Engine> {
|
||||||
// Find the longest matching mount path
|
|
||||||
let mut best_match: Option<(&str, &dyn Engine)> = None;
|
let mut best_match: Option<(&str, &dyn Engine)> = None;
|
||||||
|
|
||||||
for (mount_path, engine) in &self.engines {
|
for (mount_path, engine) in &self.engines {
|
||||||
if path.starts_with(mount_path) {
|
if !path.starts_with(mount_path) {
|
||||||
match best_match {
|
continue;
|
||||||
None => best_match = Some((mount_path, engine.as_ref())),
|
}
|
||||||
Some((best_path, _)) => {
|
|
||||||
if mount_path.len() > best_path.len() {
|
let should_update = best_match
|
||||||
|
.map(|(best_path, _)| mount_path.len() > best_path.len())
|
||||||
|
.unwrap_or(true);
|
||||||
|
|
||||||
|
if should_update {
|
||||||
best_match = Some((mount_path, engine.as_ref()));
|
best_match = Some((mount_path, engine.as_ref()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
best_match.map(|(_, engine)| engine)
|
best_match.map(|(_, engine)| engine)
|
||||||
}
|
}
|
||||||
@ -102,17 +101,18 @@ impl VaultCore {
|
|||||||
let mut best_match: Option<(&str, &str)> = None;
|
let mut best_match: Option<(&str, &str)> = None;
|
||||||
|
|
||||||
for mount_path in self.engines.keys() {
|
for mount_path in self.engines.keys() {
|
||||||
if path.starts_with(mount_path) {
|
if !path.starts_with(mount_path) {
|
||||||
match best_match {
|
continue;
|
||||||
None => best_match = Some((mount_path, path)),
|
}
|
||||||
Some((best_path, _)) => {
|
|
||||||
if mount_path.len() > best_path.len() {
|
let should_update = best_match
|
||||||
|
.map(|(best_path, _)| mount_path.len() > best_path.len())
|
||||||
|
.unwrap_or(true);
|
||||||
|
|
||||||
|
if should_update {
|
||||||
best_match = Some((mount_path, path));
|
best_match = Some((mount_path, path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
best_match.map(|(mount_path, path)| {
|
best_match.map(|(mount_path, path)| {
|
||||||
let relative = if path.len() > mount_path.len() {
|
let relative = if path.len() > mount_path.len() {
|
||||||
@ -193,11 +193,12 @@ impl EngineRegistry {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{
|
use crate::config::{
|
||||||
EngineConfig, FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig,
|
EngineConfig, FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig,
|
||||||
};
|
};
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
fn create_test_vault_config(temp_dir: &TempDir) -> VaultConfig {
|
fn create_test_vault_config(temp_dir: &TempDir) -> VaultConfig {
|
||||||
VaultConfig {
|
VaultConfig {
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::openssl_backend::OpenSSLBackend;
|
use super::openssl_backend::OpenSSLBackend;
|
||||||
use crate::config::CryptoConfig;
|
use crate::config::CryptoConfig;
|
||||||
@ -95,7 +96,8 @@ pub struct KeyPair {
|
|||||||
pub public_key: PublicKey,
|
pub public_key: PublicKey,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Crypto backend trait - abstraction over different cryptographic implementations
|
/// Crypto backend trait - abstraction over different cryptographic
|
||||||
|
/// implementations
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait CryptoBackend: Send + Sync + std::fmt::Debug {
|
pub trait CryptoBackend: Send + Sync + std::fmt::Debug {
|
||||||
/// Generate a keypair for the given algorithm
|
/// Generate a keypair for the given algorithm
|
||||||
|
|||||||
@ -5,11 +5,10 @@ pub mod rustcrypto_backend;
|
|||||||
#[cfg(feature = "aws-lc")]
|
#[cfg(feature = "aws-lc")]
|
||||||
pub mod aws_lc;
|
pub mod aws_lc;
|
||||||
|
|
||||||
|
#[cfg(feature = "aws-lc")]
|
||||||
|
pub use aws_lc::AwsLcBackend;
|
||||||
pub use backend::{
|
pub use backend::{
|
||||||
CryptoBackend, CryptoRegistry, KeyAlgorithm, KeyPair, PrivateKey, PublicKey, SymmetricAlgorithm,
|
CryptoBackend, CryptoRegistry, KeyAlgorithm, KeyPair, PrivateKey, PublicKey, SymmetricAlgorithm,
|
||||||
};
|
};
|
||||||
pub use openssl_backend::OpenSSLBackend;
|
pub use openssl_backend::OpenSSLBackend;
|
||||||
pub use rustcrypto_backend::RustCryptoBackend;
|
pub use rustcrypto_backend::RustCryptoBackend;
|
||||||
|
|
||||||
#[cfg(feature = "aws-lc")]
|
|
||||||
pub use aws_lc::AwsLcBackend;
|
|
||||||
|
|||||||
@ -6,9 +6,10 @@
|
|||||||
//! - Symmetric: AES-256-GCM, ChaCha20-Poly1305
|
//! - Symmetric: AES-256-GCM, ChaCha20-Poly1305
|
||||||
//! - Hashing: SHA-256, SHA-512
|
//! - Hashing: SHA-256, SHA-512
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use rand::RngCore;
|
use rand::RngCore;
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use crate::crypto::backend::{
|
use crate::crypto::backend::{
|
||||||
CryptoBackend, KeyAlgorithm, KeyPair, PrivateKey, PublicKey, SymmetricAlgorithm,
|
CryptoBackend, KeyAlgorithm, KeyPair, PrivateKey, PublicKey, SymmetricAlgorithm,
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{Duration, Utc};
|
use chrono::{Duration, Utc};
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
use super::Engine as SecretEngine;
|
use super::Engine as SecretEngine;
|
||||||
@ -426,11 +427,12 @@ impl SecretEngine for DatabaseEngine {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
||||||
use crate::crypto::CryptoRegistry;
|
use crate::crypto::CryptoRegistry;
|
||||||
use crate::storage::StorageRegistry;
|
use crate::storage::StorageRegistry;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
async fn setup_engine() -> Result<(DatabaseEngine, TempDir)> {
|
async fn setup_engine() -> Result<(DatabaseEngine, TempDir)> {
|
||||||
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
||||||
|
|||||||
@ -1,8 +1,9 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::Engine;
|
use super::Engine;
|
||||||
use crate::core::SealMechanism;
|
use crate::core::SealMechanism;
|
||||||
@ -268,12 +269,13 @@ impl Engine for KVEngine {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use serde_json::json;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
||||||
use crate::crypto::CryptoRegistry;
|
use crate::crypto::CryptoRegistry;
|
||||||
use crate::storage::StorageRegistry;
|
use crate::storage::StorageRegistry;
|
||||||
use serde_json::json;
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
async fn setup_engine() -> Result<(KVEngine, TempDir, Arc<dyn CryptoBackend>)> {
|
async fn setup_engine() -> Result<(KVEngine, TempDir, Arc<dyn CryptoBackend>)> {
|
||||||
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
||||||
|
|||||||
@ -3,13 +3,12 @@ pub mod kv;
|
|||||||
pub mod pki;
|
pub mod pki;
|
||||||
pub mod transit;
|
pub mod transit;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
pub use database::DatabaseEngine;
|
pub use database::DatabaseEngine;
|
||||||
pub use kv::KVEngine;
|
pub use kv::KVEngine;
|
||||||
pub use pki::PkiEngine;
|
pub use pki::PkiEngine;
|
||||||
pub use transit::TransitEngine;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
pub use transit::TransitEngine;
|
||||||
|
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,9 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{Duration, Utc};
|
use chrono::{Duration, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::Engine as SecretEngine;
|
use super::Engine as SecretEngine;
|
||||||
use crate::core::SealMechanism;
|
use crate::core::SealMechanism;
|
||||||
@ -556,11 +557,12 @@ impl SecretEngine for PkiEngine {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
||||||
use crate::crypto::CryptoRegistry;
|
use crate::crypto::CryptoRegistry;
|
||||||
use crate::storage::StorageRegistry;
|
use crate::storage::StorageRegistry;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
async fn setup_engine() -> Result<(PkiEngine, TempDir)> {
|
async fn setup_engine() -> Result<(PkiEngine, TempDir)> {
|
||||||
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
||||||
|
|||||||
@ -1,9 +1,10 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use base64::engine::general_purpose::STANDARD as BASE64;
|
use base64::engine::general_purpose::STANDARD as BASE64;
|
||||||
use base64::Engine as _;
|
use base64::Engine as _;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::Engine;
|
use super::Engine;
|
||||||
use crate::core::SealMechanism;
|
use crate::core::SealMechanism;
|
||||||
@ -114,7 +115,8 @@ impl TransitEngine {
|
|||||||
let current_version = key.current_version;
|
let current_version = key.current_version;
|
||||||
drop(keys);
|
drop(keys);
|
||||||
|
|
||||||
// Encrypt plaintext using the current key version (lock is dropped before await)
|
// Encrypt plaintext using the current key version (lock is dropped before
|
||||||
|
// await)
|
||||||
let ciphertext = self
|
let ciphertext = self
|
||||||
.crypto
|
.crypto
|
||||||
.encrypt_symmetric(&key_material, plaintext, SymmetricAlgorithm::Aes256Gcm)
|
.encrypt_symmetric(&key_material, plaintext, SymmetricAlgorithm::Aes256Gcm)
|
||||||
@ -214,7 +216,8 @@ impl Engine for TransitEngine {
|
|||||||
.ok_or_else(|| VaultError::storage("Missing 'plaintext' in request".to_string()))?;
|
.ok_or_else(|| VaultError::storage("Missing 'plaintext' in request".to_string()))?;
|
||||||
|
|
||||||
let _ciphertext = self.encrypt(key_name, plaintext.as_bytes()).await?;
|
let _ciphertext = self.encrypt(key_name, plaintext.as_bytes()).await?;
|
||||||
// Note: In a full implementation, this would return the ciphertext in the response
|
// Note: In a full implementation, this would return the ciphertext
|
||||||
|
// in the response
|
||||||
} else if let Some(key_name) = path.strip_prefix("decrypt/") {
|
} else if let Some(key_name) = path.strip_prefix("decrypt/") {
|
||||||
let ciphertext = data
|
let ciphertext = data
|
||||||
.get("ciphertext")
|
.get("ciphertext")
|
||||||
@ -224,7 +227,8 @@ impl Engine for TransitEngine {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
let _plaintext = self.decrypt(key_name, ciphertext).await?;
|
let _plaintext = self.decrypt(key_name, ciphertext).await?;
|
||||||
// Note: In a full implementation, this would return the plaintext in the response
|
// Note: In a full implementation, this would return the plaintext
|
||||||
|
// in the response
|
||||||
} else if let Some(key_name) = path.strip_prefix("rewrap/") {
|
} else if let Some(key_name) = path.strip_prefix("rewrap/") {
|
||||||
let ciphertext = data
|
let ciphertext = data
|
||||||
.get("ciphertext")
|
.get("ciphertext")
|
||||||
@ -234,7 +238,8 @@ impl Engine for TransitEngine {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
let _new_ciphertext = self.rewrap(key_name, ciphertext).await?;
|
let _new_ciphertext = self.rewrap(key_name, ciphertext).await?;
|
||||||
// Note: In a full implementation, this would return the new ciphertext in the response
|
// Note: In a full implementation, this would return the new
|
||||||
|
// ciphertext in the response
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -279,11 +284,12 @@ impl Engine for TransitEngine {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
use crate::config::{FilesystemStorageConfig, SealConfig, ShamirSealConfig, StorageConfig};
|
||||||
use crate::crypto::CryptoRegistry;
|
use crate::crypto::CryptoRegistry;
|
||||||
use crate::storage::StorageRegistry;
|
use crate::storage::StorageRegistry;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
async fn setup_engine() -> Result<(TransitEngine, TempDir)> {
|
async fn setup_engine() -> Result<(TransitEngine, TempDir)> {
|
||||||
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
let temp_dir = TempDir::new().map_err(|e| VaultError::storage(e.to_string()))?;
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
use std::backtrace::Backtrace;
|
use std::backtrace::Backtrace;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
/// Main vault error type
|
/// Main vault error type
|
||||||
|
|||||||
15
src/main.rs
15
src/main.rs
@ -1,16 +1,16 @@
|
|||||||
|
#[cfg(feature = "cli")]
|
||||||
|
use std::path::PathBuf;
|
||||||
|
#[cfg(feature = "cli")]
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use secretumvault::cli::{Cli, Command, OperatorCommand, SecretCommand};
|
use secretumvault::cli::{Cli, Command, OperatorCommand, SecretCommand};
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use secretumvault::config::VaultConfig;
|
use secretumvault::config::VaultConfig;
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use secretumvault::core::VaultCore;
|
use secretumvault::core::VaultCore;
|
||||||
#[cfg(feature = "cli")]
|
|
||||||
use std::path::PathBuf;
|
|
||||||
#[cfg(feature = "cli")]
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
@ -53,7 +53,10 @@ async fn server_command(
|
|||||||
|
|
||||||
#[cfg(feature = "server")]
|
#[cfg(feature = "server")]
|
||||||
{
|
{
|
||||||
eprintln!("Note: Server mode via CLI is limited. Use library API with --features server for full functionality including TLS.");
|
eprintln!(
|
||||||
|
"Note: Server mode via CLI is limited. Use library API with --features server for \
|
||||||
|
full functionality including TLS."
|
||||||
|
);
|
||||||
eprintln!("Server feature not fully implemented in CLI mode.");
|
eprintln!("Server feature not fully implemented in CLI mode.");
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,13 +13,15 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! For development, run etcd with:
|
//! For development, run etcd with:
|
||||||
//! - Docker: `docker run -d --name etcd -p 2379:2379 quay.io/coreos/etcd:v3.5.0`
|
//! - Docker: `docker run -d --name etcd -p 2379:2379
|
||||||
|
//! quay.io/coreos/etcd:v3.5.0`
|
||||||
//! - Local: `etcd` (requires etcd binary installed)
|
//! - Local: `etcd` (requires etcd binary installed)
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::config::EtcdStorageConfig;
|
use crate::config::EtcdStorageConfig;
|
||||||
use crate::error::{StorageError, StorageResult};
|
use crate::error::{StorageError, StorageResult};
|
||||||
|
|||||||
@ -1,8 +1,9 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
use crate::config::FilesystemStorageConfig;
|
use crate::config::FilesystemStorageConfig;
|
||||||
use crate::error::StorageError;
|
use crate::error::StorageError;
|
||||||
use crate::storage::{
|
use crate::storage::{
|
||||||
@ -314,9 +315,10 @@ impl StorageBackend for FilesystemBackend {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
fn create_test_backend() -> (FilesystemBackend, TempDir) {
|
fn create_test_backend() -> (FilesystemBackend, TempDir) {
|
||||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
let config = FilesystemStorageConfig {
|
let config = FilesystemStorageConfig {
|
||||||
|
|||||||
@ -1,8 +1,9 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
pub mod filesystem;
|
pub mod filesystem;
|
||||||
pub mod postgresql;
|
pub mod postgresql;
|
||||||
@ -13,17 +14,15 @@ pub mod surrealdb;
|
|||||||
#[cfg(feature = "etcd-storage")]
|
#[cfg(feature = "etcd-storage")]
|
||||||
pub mod etcd;
|
pub mod etcd;
|
||||||
|
|
||||||
use crate::config::StorageConfig;
|
#[cfg(feature = "etcd-storage")]
|
||||||
use crate::error::{Result, StorageResult};
|
pub use etcd::EtcdBackend;
|
||||||
|
|
||||||
pub use filesystem::FilesystemBackend;
|
pub use filesystem::FilesystemBackend;
|
||||||
pub use postgresql::PostgreSQLBackend;
|
pub use postgresql::PostgreSQLBackend;
|
||||||
|
|
||||||
#[cfg(feature = "surrealdb-storage")]
|
#[cfg(feature = "surrealdb-storage")]
|
||||||
pub use surrealdb::SurrealDBBackend;
|
pub use surrealdb::SurrealDBBackend;
|
||||||
|
|
||||||
#[cfg(feature = "etcd-storage")]
|
use crate::config::StorageConfig;
|
||||||
pub use etcd::EtcdBackend;
|
use crate::error::{Result, StorageResult};
|
||||||
|
|
||||||
/// Encrypted data stored in backend
|
/// Encrypted data stored in backend
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@ -160,7 +159,8 @@ impl StorageRegistry {
|
|||||||
}
|
}
|
||||||
if config.backend == "postgresql" && cfg!(not(feature = "postgresql-storage")) {
|
if config.backend == "postgresql" && cfg!(not(feature = "postgresql-storage")) {
|
||||||
return Err(crate::VaultError::config(
|
return Err(crate::VaultError::config(
|
||||||
"PostgreSQL backend not enabled. Compile with --features postgresql-storage"
|
"PostgreSQL backend not enabled. Compile with --features \
|
||||||
|
postgresql-storage",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
Err(crate::VaultError::config(format!(
|
Err(crate::VaultError::config(format!(
|
||||||
|
|||||||
@ -1,12 +1,14 @@
|
|||||||
//! PostgreSQL storage backend for SecretumVault
|
//! PostgreSQL storage backend for SecretumVault
|
||||||
//!
|
//!
|
||||||
//! Provides persistent secret storage using PostgreSQL as the backend.
|
//! Provides persistent secret storage using PostgreSQL as the backend.
|
||||||
//! This implementation uses an in-memory store (production would use sqlx + real DB).
|
//! This implementation uses an in-memory store (production would use sqlx +
|
||||||
|
//! real DB).
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
use crate::config::PostgreSQLStorageConfig;
|
use crate::config::PostgreSQLStorageConfig;
|
||||||
|
|||||||
@ -12,19 +12,20 @@
|
|||||||
//! url = "ws://localhost:8000" # For future real SurrealDB connections
|
//! url = "ws://localhost:8000" # For future real SurrealDB connections
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
use crate::config::SurrealDBStorageConfig;
|
use crate::config::SurrealDBStorageConfig;
|
||||||
use crate::error::{StorageError, StorageResult};
|
use crate::error::{StorageError, StorageResult};
|
||||||
use crate::storage::{EncryptedData, Lease, StorageBackend, StoredKey, StoredPolicy};
|
use crate::storage::{EncryptedData, Lease, StorageBackend, StoredKey, StoredPolicy};
|
||||||
|
|
||||||
/// SurrealDB storage backend - in-memory implementation with SurrealDB semantics
|
/// SurrealDB storage backend - in-memory implementation with SurrealDB
|
||||||
/// Tables are organized as HashMap<table_name, HashMap<id, record>>
|
/// semantics Tables are organized as HashMap<table_name, HashMap<id, record>>
|
||||||
pub struct SurrealDBBackend {
|
pub struct SurrealDBBackend {
|
||||||
store: Arc<RwLock<HashMap<String, HashMap<String, Value>>>>,
|
store: Arc<RwLock<HashMap<String, HashMap<String, Value>>>>,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,10 +6,11 @@
|
|||||||
//! - Metrics collection and reporting
|
//! - Metrics collection and reporting
|
||||||
//! - Performance monitoring
|
//! - Performance monitoring
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::sync::atomic::{AtomicU64, Ordering};
|
use std::sync::atomic::{AtomicU64, Ordering};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
use tracing::{debug, error, info, span, warn, Level};
|
use tracing::{debug, error, info, span, warn, Level};
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user