Vapora/scripts/orchestrate-backup-recovery.nu

455 lines
12 KiB
Plaintext
Raw Permalink Normal View History

2026-01-12 03:36:55 +00:00
#!/usr/bin/env nu
# VAPORA Backup & Recovery Orchestrator
# Coordinates S3 + Restic backups and recovery procedures
# Follows NUSHELL_GUIDELINES.md strictly (0.109.0+)
# Get timestamp
def get-timestamp []: nothing -> string {
date now | format date "%Y%m%d-%H%M%S"
}
# Export SurrealDB database
def export-surrealdb [
surreal_url: string
surreal_user: string
surreal_pass: string
output_dir: string
]: nothing -> record {
print $"Exporting SurrealDB from [$surreal_url]..."
let backup_file = $"($output_dir)/vapora-db-$(get-timestamp).sql"
let result = do {
^mkdir -p $output_dir
^surreal export \
--conn $surreal_url \
--user $surreal_user \
--pass $surreal_pass \
--output $backup_file
} | complete
if ($result.exit_code == 0) {
{
success: true
file: $backup_file
size: (
do {
^ls -lh $backup_file
} | complete | if ($in.exit_code == 0) {
($in.stdout | str trim)
} else {
"unknown"
}
)
error: null
}
} else {
{
success: false
file: null
error: ($result.stderr | str trim)
}
}
}
# Export Kubernetes configuration
def export-k8s-config [
namespace: string
output_dir: string
]: nothing -> record {
print $"Exporting Kubernetes config from namespace [$namespace]..."
let config_file = $"($output_dir)/k8s-config-$(get-timestamp).yaml"
let result = do {
^mkdir -p $output_dir
^kubectl get configmaps,secrets,services,ingresses,deployments,statefulsets \
-n $namespace \
-o yaml \
> $config_file
} | complete
if ($result.exit_code == 0) {
{
success: true
file: $config_file
resource_count: (
do {
^grep "^kind:" $config_file
} | complete | if ($in.exit_code == 0) {
($in.stdout | lines | length)
} else {
0
}
)
error: null
}
} else {
{
success: false
file: null
error: ($result.stderr | str trim)
}
}
}
# Run S3 direct backup
def run-s3-backup [
database_export: record
s3_bucket: string
s3_prefix: string
encryption_key: string
]: nothing -> record {
print "Running S3 direct backup..."
if (not $database_export.success) {
return {
success: false
method: "s3-direct"
location: null
error: "Database export failed"
}
}
# Compress
let compress = do {
^gzip --force $database_export.file
} | complete
if (not ($compress.exit_code == 0)) {
return {
success: false
method: "s3-direct"
location: null
error: "Compression failed"
}
}
let compressed = $"($database_export.file).gz"
# Encrypt
let encrypt = do {
^openssl enc -aes-256-cbc \
-in $compressed \
-out $"($compressed).enc" \
-pass file:$encryption_key
} | complete
if (not ($encrypt.exit_code == 0)) {
return {
success: false
method: "s3-direct"
location: null
error: "Encryption failed"
}
}
# Upload
let encrypted = $"($compressed).enc"
let s3_key = $"($s3_prefix)/database-$(get-timestamp).sql.gz.enc"
let upload = do {
^aws s3 cp $encrypted \
$"s3://($s3_bucket)/($s3_key)" \
--sse AES256
} | complete
if ($upload.exit_code == 0) {
{
success: true
method: "s3-direct"
location: $"s3://($s3_bucket)/($s3_key)"
error: null
}
} else {
{
success: false
method: "s3-direct"
location: $"s3://($s3_bucket)/($s3_key)"
error: ($upload.stderr | str trim)
}
}
}
# Run Restic backup
def run-restic-backup [
database_export: record
k8s_export: record
restic_repo: string
restic_password: string
iac_dir: string
]: nothing -> record {
print "Running Restic backup..."
let timestamp = (get-timestamp)
# Build backup paths
let backup_paths = if ($database_export.success and $k8s_export.success) {
$"($database_export.file) ($k8s_export.file) ($iac_dir)"
} else if $database_export.success {
$"($database_export.file) ($iac_dir)"
} else if $k8s_export.success {
$"($k8s_export.file) ($iac_dir)"
} else {
$iac_dir
}
let backup_cmd = (
$"RESTIC_PASSWORD=($restic_password) restic -r ($restic_repo) " +
$"backup ($backup_paths) --tag ($timestamp) --tag automated"
)
let result = do {
^bash -c $backup_cmd
} | complete
if ($result.exit_code == 0) {
{
success: true
method: "restic"
repo: $restic_repo
timestamp: $timestamp
error: null
}
} else {
{
success: false
method: "restic"
repo: $restic_repo
timestamp: $timestamp
error: ($result.stderr | str trim)
}
}
}
# Collect backup results
def collect-results [items: list]: nothing -> list {
$items | reduce --fold [] {|item, acc|
$acc | append $item
}
}
# Cleanup files
def cleanup-files [paths: list]: nothing -> record {
print "Cleaning up temporary files..."
let cleanup-item = { path: string |
do {
^rm -rf $path
} | complete
}
let results = $paths | each {|p| ($cleanup-item | call {path: $p})}
let failures = ($results | where {|r| not ($r.exit_code == 0)})
if (($failures | length) > 0) {
{
success: false
cleaned: ($paths | length)
failed: ($failures | length)
error: "Some files failed to clean"
}
} else {
{
success: true
cleaned: ($paths | length)
failed: 0
error: null
}
}
}
# Main orchestration
def main [
--operation: string = "backup" # backup | recovery
--mode: string = "full" # full | database-only
--surreal-url: string = "ws://localhost:8000"
--surreal-user: string = "root"
--surreal-pass: string = ""
--namespace: string = "vapora"
--s3-bucket: string = ""
--s3-prefix: string = "backups/database"
--encryption-key: string = ""
--restic-repo: string = ""
--restic-password: string = ""
--iac-dir: string = "provisioning"
--s3-location: string = ""
--work-dir: string = "/tmp/vapora-backup-recovery"
--no-cleanup: bool = false
]: nothing {
print "=== VAPORA Backup & Recovery Orchestrator ==="
print $"Operation: [$operation]"
print $"Mode: [$mode]"
print ""
if ($operation == "backup") {
# Backup mode
if ($surreal_pass == "") {
print "ERROR: --surreal-pass required"
exit 1
}
if ($s3_bucket == "") {
print "ERROR: --s3-bucket required"
exit 1
}
print "Starting backup sequence..."
print ""
# Create work directory
let work_path = $"($work_dir)/$(get-timestamp)"
let create = do {
^mkdir -p $work_path
} | complete
if (not ($create.exit_code == 0)) {
print "ERROR: Failed to create work directory"
exit 1
}
# Export database
let db_export = (export-surrealdb $surreal_url $surreal_user $surreal_pass $work_path)
if (not $db_export.success) {
print $"ERROR: Database export failed: [$db_export.error]"
exit 1
}
print "✓ Database exported"
# Export Kubernetes config
let k8s_export = (export-k8s-config $namespace $work_path)
if (not $k8s_export.success) {
print $"WARNING: Kubernetes export failed: [$k8s_export.error]"
} else {
print $"✓ Kubernetes config exported ([$k8s_export.resource_count] resources)"
}
# Run backups
let s3_result = (run-s3-backup $db_export $s3_bucket $s3_prefix $encryption_key)
let restic_result = (run-restic-backup $db_export $k8s_export $restic_repo $restic_password $iac_dir)
let backup_results = (collect-results [$s3_result, $restic_result])
print ""
print "Backup Results:"
print $"S3: [$s3_result.location]"
print $"Restic: [$restic_result.repo] (tag: [$restic_result.timestamp])"
# Cleanup
if (not $no_cleanup) {
cleanup-files [$work_path] | ignore
} else {
print $"Work files preserved at: [$work_path]"
}
print ""
print "=== Backup Complete ==="
print $"Timestamp: [$(get-timestamp)]"
} else if ($operation == "recovery") {
# Recovery mode
if ($surreal_pass == "") {
print "ERROR: --surreal-pass required"
exit 1
}
if ($s3_location == "") {
print "ERROR: --s3-location required (s3://bucket/path/backup.sql.gz.enc)"
exit 1
}
if ($encryption_key == "") {
print "ERROR: --encryption-key required"
exit 1
}
print "Starting recovery sequence..."
print ""
# Create work directory
let work_path = $"($work_dir)/$(get-timestamp)"
let create = do {
^mkdir -p $work_path
} | complete
if (not ($create.exit_code == 0)) {
print "ERROR: Failed to create work directory"
exit 1
}
# Download backup
let encrypted_file = $"($work_path)/backup.sql.gz.enc"
let download = do {
^aws s3 cp $s3_location $encrypted_file
} | complete
if (not ($download.exit_code == 0)) {
print $"ERROR: S3 download failed"
exit 1
}
print "✓ Backup downloaded"
# Decrypt
let compressed_file = $"($work_path)/backup.sql.gz"
let decrypt = do {
^openssl enc -d -aes-256-cbc \
-in $encrypted_file \
-out $compressed_file \
-pass file:$encryption_key
} | complete
if (not ($decrypt.exit_code == 0)) {
print "ERROR: Decryption failed"
exit 1
}
print "✓ Backup decrypted"
# Decompress
let backup_file = $"($work_path)/backup.sql"
let decompress = do {
^gunzip --force $compressed_file
} | complete
if (not ($decompress.exit_code == 0)) {
print "ERROR: Decompression failed"
exit 1
}
print "✓ Backup decompressed"
# Import to database
let import = do {
^surreal import --conn $surreal_url \
--user $surreal_user \
--pass $surreal_pass \
--input $backup_file
} | complete
if (not ($import.exit_code == 0)) {
print "ERROR: Database import failed"
exit 1
}
print "✓ Backup imported"
# Cleanup
if (not $no_cleanup) {
cleanup-files [$work_path] | ignore
} else {
print $"Work files preserved at: [$work_path]"
}
print ""
print "=== Recovery Complete ==="
print $"Database: [$surreal_url]"
print $"Timestamp: [$(get-timestamp)]"
} else {
print $"ERROR: Unknown operation [$operation]"
exit 1
}
}