
Some checks failed
CI/CD Pipeline / Test Suite (push) Has been cancelled
CI/CD Pipeline / Security Audit (push) Has been cancelled
CI/CD Pipeline / Build Docker Image (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / Performance Benchmarks (push) Has been cancelled
CI/CD Pipeline / Cleanup (push) Has been cancelled
539 lines
14 KiB
Bash
Executable File
539 lines
14 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Database Backup and Restore Script
|
|
# Provides convenient commands for database backup and restore operations
|
|
|
|
set -e
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Script directory
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
|
|
# Change to project root
|
|
cd "$PROJECT_ROOT"
|
|
|
|
# Default backup directory
|
|
BACKUP_DIR="backups"
|
|
DATE_FORMAT="%Y%m%d_%H%M%S"
|
|
|
|
# Logging functions
|
|
log() {
|
|
echo -e "${GREEN}[INFO]${NC} $1"
|
|
}
|
|
|
|
log_warn() {
|
|
echo -e "${YELLOW}[WARN]${NC} $1"
|
|
}
|
|
|
|
log_error() {
|
|
echo -e "${RED}[ERROR]${NC} $1"
|
|
}
|
|
|
|
print_header() {
|
|
echo -e "${BLUE}=== $1 ===${NC}"
|
|
}
|
|
|
|
print_usage() {
|
|
echo "Database Backup and Restore Script"
|
|
echo
|
|
echo "Usage: $0 <command> [options]"
|
|
echo
|
|
echo "Commands:"
|
|
echo " backup Create database backup"
|
|
echo " restore Restore database from backup"
|
|
echo " list List available backups"
|
|
echo " clean Clean old backups"
|
|
echo " export Export data to JSON/CSV"
|
|
echo " import Import data from JSON/CSV"
|
|
echo " clone Clone database to different name"
|
|
echo " compare Compare two databases"
|
|
echo
|
|
echo "Options:"
|
|
echo " --env ENV Environment (dev/prod) [default: dev]"
|
|
echo " --backup-dir DIR Backup directory [default: backups]"
|
|
echo " --file FILE Backup file path"
|
|
echo " --format FORMAT Backup format (sql/custom/tar) [default: sql]"
|
|
echo " --compress Compress backup file"
|
|
echo " --schema-only Backup schema only (no data)"
|
|
echo " --data-only Backup data only (no schema)"
|
|
echo " --tables TABLES Comma-separated list of tables to backup"
|
|
echo " --keep-days DAYS Keep backups for N days [default: 30]"
|
|
echo " --force Skip confirmations"
|
|
echo " --quiet Suppress verbose output"
|
|
echo
|
|
echo "Examples:"
|
|
echo " $0 backup # Create full backup"
|
|
echo " $0 backup --compress # Create compressed backup"
|
|
echo " $0 backup --schema-only # Backup schema only"
|
|
echo " $0 backup --tables users,content # Backup specific tables"
|
|
echo " $0 restore --file backup.sql # Restore from backup"
|
|
echo " $0 list # List backups"
|
|
echo " $0 clean --keep-days 7 # Clean old backups"
|
|
echo " $0 export --format json # Export to JSON"
|
|
echo " $0 clone --env prod # Clone to prod database"
|
|
}
|
|
|
|
# Check if .env file exists and load it
|
|
load_env() {
|
|
if [ ! -f ".env" ]; then
|
|
log_error ".env file not found"
|
|
echo "Please run the database setup script first:"
|
|
echo " ./scripts/db-setup.sh setup"
|
|
exit 1
|
|
fi
|
|
|
|
# Load environment variables
|
|
export $(grep -v '^#' .env | xargs)
|
|
}
|
|
|
|
# Parse database URL
|
|
parse_database_url() {
|
|
if [[ $DATABASE_URL == postgresql://* ]] || [[ $DATABASE_URL == postgres://* ]]; then
|
|
DB_TYPE="postgresql"
|
|
DB_HOST=$(echo $DATABASE_URL | sed -n 's/.*@\([^:]*\):.*/\1/p')
|
|
DB_PORT=$(echo $DATABASE_URL | sed -n 's/.*:\([0-9]*\)\/.*/\1/p')
|
|
DB_NAME=$(echo $DATABASE_URL | sed -n 's/.*\/\([^?]*\).*/\1/p')
|
|
DB_USER=$(echo $DATABASE_URL | sed -n 's/.*\/\/\([^:]*\):.*/\1/p')
|
|
DB_PASS=$(echo $DATABASE_URL | sed -n 's/.*:\/\/[^:]*:\([^@]*\)@.*/\1/p')
|
|
elif [[ $DATABASE_URL == sqlite://* ]]; then
|
|
DB_TYPE="sqlite"
|
|
DB_FILE=$(echo $DATABASE_URL | sed 's/sqlite:\/\///')
|
|
else
|
|
log_error "Unsupported database URL format: $DATABASE_URL"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# Create backup directory
|
|
setup_backup_dir() {
|
|
if [ ! -d "$BACKUP_DIR" ]; then
|
|
log "Creating backup directory: $BACKUP_DIR"
|
|
mkdir -p "$BACKUP_DIR"
|
|
fi
|
|
}
|
|
|
|
# Generate backup filename
|
|
generate_backup_filename() {
|
|
local timestamp=$(date +"$DATE_FORMAT")
|
|
local env_suffix=""
|
|
|
|
if [ "$ENVIRONMENT" != "dev" ]; then
|
|
env_suffix="_${ENVIRONMENT}"
|
|
fi
|
|
|
|
local format_ext=""
|
|
case "$FORMAT" in
|
|
"sql") format_ext=".sql" ;;
|
|
"custom") format_ext=".dump" ;;
|
|
"tar") format_ext=".tar" ;;
|
|
esac
|
|
|
|
local compress_ext=""
|
|
if [ "$COMPRESS" = "true" ]; then
|
|
compress_ext=".gz"
|
|
fi
|
|
|
|
echo "${BACKUP_DIR}/${DB_NAME}_${timestamp}${env_suffix}${format_ext}${compress_ext}"
|
|
}
|
|
|
|
# Create PostgreSQL backup
|
|
backup_postgresql() {
|
|
local backup_file="$1"
|
|
local pg_dump_args=()
|
|
|
|
# Add connection parameters
|
|
pg_dump_args+=("-h" "$DB_HOST")
|
|
pg_dump_args+=("-p" "$DB_PORT")
|
|
pg_dump_args+=("-U" "$DB_USER")
|
|
pg_dump_args+=("-d" "$DB_NAME")
|
|
|
|
# Add format options
|
|
case "$FORMAT" in
|
|
"sql")
|
|
pg_dump_args+=("--format=plain")
|
|
;;
|
|
"custom")
|
|
pg_dump_args+=("--format=custom")
|
|
;;
|
|
"tar")
|
|
pg_dump_args+=("--format=tar")
|
|
;;
|
|
esac
|
|
|
|
# Add backup type options
|
|
if [ "$SCHEMA_ONLY" = "true" ]; then
|
|
pg_dump_args+=("--schema-only")
|
|
elif [ "$DATA_ONLY" = "true" ]; then
|
|
pg_dump_args+=("--data-only")
|
|
fi
|
|
|
|
# Add table selection
|
|
if [ -n "$TABLES" ]; then
|
|
IFS=',' read -ra TABLE_ARRAY <<< "$TABLES"
|
|
for table in "${TABLE_ARRAY[@]}"; do
|
|
pg_dump_args+=("--table=$table")
|
|
done
|
|
fi
|
|
|
|
# Add other options
|
|
pg_dump_args+=("--verbose")
|
|
pg_dump_args+=("--no-password")
|
|
|
|
# Set password environment variable
|
|
export PGPASSWORD="$DB_PASS"
|
|
|
|
log "Creating PostgreSQL backup: $backup_file"
|
|
|
|
if [ "$COMPRESS" = "true" ]; then
|
|
pg_dump "${pg_dump_args[@]}" | gzip > "$backup_file"
|
|
else
|
|
pg_dump "${pg_dump_args[@]}" > "$backup_file"
|
|
fi
|
|
|
|
unset PGPASSWORD
|
|
}
|
|
|
|
# Create SQLite backup
|
|
backup_sqlite() {
|
|
local backup_file="$1"
|
|
|
|
if [ ! -f "$DB_FILE" ]; then
|
|
log_error "SQLite database file not found: $DB_FILE"
|
|
exit 1
|
|
fi
|
|
|
|
log "Creating SQLite backup: $backup_file"
|
|
|
|
if [ "$COMPRESS" = "true" ]; then
|
|
sqlite3 "$DB_FILE" ".dump" | gzip > "$backup_file"
|
|
else
|
|
sqlite3 "$DB_FILE" ".dump" > "$backup_file"
|
|
fi
|
|
}
|
|
|
|
# Restore PostgreSQL backup
|
|
restore_postgresql() {
|
|
local backup_file="$1"
|
|
|
|
if [ ! -f "$backup_file" ]; then
|
|
log_error "Backup file not found: $backup_file"
|
|
exit 1
|
|
fi
|
|
|
|
if [ "$FORCE" != "true" ]; then
|
|
echo -n "This will restore the database '$DB_NAME'. Continue? (y/N): "
|
|
read -r confirm
|
|
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
|
|
log "Restore cancelled"
|
|
exit 0
|
|
fi
|
|
fi
|
|
|
|
export PGPASSWORD="$DB_PASS"
|
|
|
|
log "Restoring PostgreSQL backup: $backup_file"
|
|
|
|
if [[ "$backup_file" == *.gz ]]; then
|
|
gunzip -c "$backup_file" | psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME"
|
|
else
|
|
psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" < "$backup_file"
|
|
fi
|
|
|
|
unset PGPASSWORD
|
|
}
|
|
|
|
# Restore SQLite backup
|
|
restore_sqlite() {
|
|
local backup_file="$1"
|
|
|
|
if [ ! -f "$backup_file" ]; then
|
|
log_error "Backup file not found: $backup_file"
|
|
exit 1
|
|
fi
|
|
|
|
if [ "$FORCE" != "true" ]; then
|
|
echo -n "This will restore the database '$DB_FILE'. Continue? (y/N): "
|
|
read -r confirm
|
|
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
|
|
log "Restore cancelled"
|
|
exit 0
|
|
fi
|
|
fi
|
|
|
|
log "Restoring SQLite backup: $backup_file"
|
|
|
|
# Create backup of existing database
|
|
if [ -f "$DB_FILE" ]; then
|
|
local existing_backup="${DB_FILE}.backup.$(date +"$DATE_FORMAT")"
|
|
cp "$DB_FILE" "$existing_backup"
|
|
log "Created backup of existing database: $existing_backup"
|
|
fi
|
|
|
|
if [[ "$backup_file" == *.gz ]]; then
|
|
gunzip -c "$backup_file" | sqlite3 "$DB_FILE"
|
|
else
|
|
sqlite3 "$DB_FILE" < "$backup_file"
|
|
fi
|
|
}
|
|
|
|
# List available backups
|
|
list_backups() {
|
|
print_header "Available Backups"
|
|
|
|
if [ ! -d "$BACKUP_DIR" ]; then
|
|
log_warn "No backup directory found: $BACKUP_DIR"
|
|
return
|
|
fi
|
|
|
|
if [ ! "$(ls -A "$BACKUP_DIR")" ]; then
|
|
log_warn "No backups found in $BACKUP_DIR"
|
|
return
|
|
fi
|
|
|
|
echo "Format: filename | size | date"
|
|
echo "----------------------------------------"
|
|
|
|
for backup in "$BACKUP_DIR"/*; do
|
|
if [ -f "$backup" ]; then
|
|
local filename=$(basename "$backup")
|
|
local size=$(du -h "$backup" | cut -f1)
|
|
local date=$(date -r "$backup" '+%Y-%m-%d %H:%M:%S')
|
|
echo "$filename | $size | $date"
|
|
fi
|
|
done
|
|
}
|
|
|
|
# Clean old backups
|
|
clean_backups() {
|
|
print_header "Cleaning Old Backups"
|
|
|
|
if [ ! -d "$BACKUP_DIR" ]; then
|
|
log_warn "No backup directory found: $BACKUP_DIR"
|
|
return
|
|
fi
|
|
|
|
log "Removing backups older than $KEEP_DAYS days..."
|
|
|
|
local deleted=0
|
|
while IFS= read -r -d '' backup; do
|
|
if [ -f "$backup" ]; then
|
|
local filename=$(basename "$backup")
|
|
rm "$backup"
|
|
log "Deleted: $filename"
|
|
((deleted++))
|
|
fi
|
|
done < <(find "$BACKUP_DIR" -name "*.sql*" -o -name "*.dump*" -o -name "*.tar*" -type f -mtime +$KEEP_DAYS -print0)
|
|
|
|
log "Deleted $deleted old backup files"
|
|
}
|
|
|
|
# Export data to JSON/CSV
|
|
export_data() {
|
|
print_header "Exporting Data"
|
|
|
|
local export_file="${BACKUP_DIR}/export_$(date +"$DATE_FORMAT").json"
|
|
|
|
if [ "$DB_TYPE" = "postgresql" ]; then
|
|
log "Exporting PostgreSQL data to JSON..."
|
|
# This would require a custom script or tool
|
|
log_warn "JSON export for PostgreSQL not yet implemented"
|
|
log "Consider using pg_dump with --data-only and custom processing"
|
|
elif [ "$DB_TYPE" = "sqlite" ]; then
|
|
log "Exporting SQLite data to JSON..."
|
|
# This would require a custom script or tool
|
|
log_warn "JSON export for SQLite not yet implemented"
|
|
log "Consider using sqlite3 with custom queries"
|
|
fi
|
|
}
|
|
|
|
# Clone database
|
|
clone_database() {
|
|
print_header "Cloning Database"
|
|
|
|
local timestamp=$(date +"$DATE_FORMAT")
|
|
local temp_backup="${BACKUP_DIR}/temp_clone_${timestamp}.sql"
|
|
|
|
# Create temporary backup
|
|
log "Creating temporary backup for cloning..."
|
|
COMPRESS="false"
|
|
FORMAT="sql"
|
|
|
|
if [ "$DB_TYPE" = "postgresql" ]; then
|
|
backup_postgresql "$temp_backup"
|
|
elif [ "$DB_TYPE" = "sqlite" ]; then
|
|
backup_sqlite "$temp_backup"
|
|
fi
|
|
|
|
# TODO: Implement actual cloning logic
|
|
# This would involve creating a new database and restoring the backup
|
|
log_warn "Database cloning not yet fully implemented"
|
|
log "Temporary backup created: $temp_backup"
|
|
log "Manual steps required to complete cloning"
|
|
}
|
|
|
|
# Parse command line arguments
|
|
COMMAND=""
|
|
ENVIRONMENT="dev"
|
|
FORMAT="sql"
|
|
COMPRESS="false"
|
|
SCHEMA_ONLY="false"
|
|
DATA_ONLY="false"
|
|
TABLES=""
|
|
BACKUP_FILE=""
|
|
KEEP_DAYS=30
|
|
FORCE="false"
|
|
QUIET="false"
|
|
|
|
while [[ $# -gt 0 ]]; do
|
|
case $1 in
|
|
--env)
|
|
ENVIRONMENT="$2"
|
|
shift 2
|
|
;;
|
|
--backup-dir)
|
|
BACKUP_DIR="$2"
|
|
shift 2
|
|
;;
|
|
--file)
|
|
BACKUP_FILE="$2"
|
|
shift 2
|
|
;;
|
|
--format)
|
|
FORMAT="$2"
|
|
shift 2
|
|
;;
|
|
--compress)
|
|
COMPRESS="true"
|
|
shift
|
|
;;
|
|
--schema-only)
|
|
SCHEMA_ONLY="true"
|
|
shift
|
|
;;
|
|
--data-only)
|
|
DATA_ONLY="true"
|
|
shift
|
|
;;
|
|
--tables)
|
|
TABLES="$2"
|
|
shift 2
|
|
;;
|
|
--keep-days)
|
|
KEEP_DAYS="$2"
|
|
shift 2
|
|
;;
|
|
--force)
|
|
FORCE="true"
|
|
shift
|
|
;;
|
|
--quiet)
|
|
QUIET="true"
|
|
shift
|
|
;;
|
|
-h|--help)
|
|
print_usage
|
|
exit 0
|
|
;;
|
|
*)
|
|
if [ -z "$COMMAND" ]; then
|
|
COMMAND="$1"
|
|
else
|
|
log_error "Unknown option: $1"
|
|
print_usage
|
|
exit 1
|
|
fi
|
|
shift
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Set environment variable
|
|
export ENVIRONMENT="$ENVIRONMENT"
|
|
|
|
# Validate command
|
|
if [ -z "$COMMAND" ]; then
|
|
print_usage
|
|
exit 1
|
|
fi
|
|
|
|
# Check if we're in the right directory
|
|
if [ ! -f "Cargo.toml" ]; then
|
|
log_error "Please run this script from the project root directory"
|
|
exit 1
|
|
fi
|
|
|
|
# Load environment and parse database URL
|
|
load_env
|
|
parse_database_url
|
|
|
|
# Setup backup directory
|
|
setup_backup_dir
|
|
|
|
# Execute command
|
|
case "$COMMAND" in
|
|
"backup")
|
|
print_header "Creating Database Backup"
|
|
|
|
if [ -z "$BACKUP_FILE" ]; then
|
|
BACKUP_FILE=$(generate_backup_filename)
|
|
fi
|
|
|
|
if [ "$DB_TYPE" = "postgresql" ]; then
|
|
backup_postgresql "$BACKUP_FILE"
|
|
elif [ "$DB_TYPE" = "sqlite" ]; then
|
|
backup_sqlite "$BACKUP_FILE"
|
|
fi
|
|
|
|
local file_size=$(du -h "$BACKUP_FILE" | cut -f1)
|
|
log "Backup created successfully: $BACKUP_FILE ($file_size)"
|
|
;;
|
|
"restore")
|
|
print_header "Restoring Database"
|
|
|
|
if [ -z "$BACKUP_FILE" ]; then
|
|
log_error "Please specify backup file with --file option"
|
|
exit 1
|
|
fi
|
|
|
|
if [ "$DB_TYPE" = "postgresql" ]; then
|
|
restore_postgresql "$BACKUP_FILE"
|
|
elif [ "$DB_TYPE" = "sqlite" ]; then
|
|
restore_sqlite "$BACKUP_FILE"
|
|
fi
|
|
|
|
log "Database restored successfully"
|
|
;;
|
|
"list")
|
|
list_backups
|
|
;;
|
|
"clean")
|
|
clean_backups
|
|
;;
|
|
"export")
|
|
export_data
|
|
;;
|
|
"import")
|
|
log_warn "Import functionality not yet implemented"
|
|
;;
|
|
"clone")
|
|
clone_database
|
|
;;
|
|
"compare")
|
|
log_warn "Database comparison not yet implemented"
|
|
;;
|
|
*)
|
|
log_error "Unknown command: $COMMAND"
|
|
print_usage
|
|
exit 1
|
|
;;
|
|
esac
|
|
|
|
log "Operation completed successfully"
|