363 lines
9.9 KiB
Bash
Executable File
363 lines
9.9 KiB
Bash
Executable File
#!/bin/bash
|
|
# Deploy mdBook documentation to custom server
|
|
# Usage: deploy-docs.sh [environment]
|
|
# Environments: staging, production, custom
|
|
|
|
set -euo pipefail
|
|
|
|
# ============================================================================
|
|
# Configuration
|
|
# ============================================================================
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
DOCS_DIR="$REPO_ROOT/docs"
|
|
BUILD_DIR="$DOCS_DIR/book"
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
LOG_FILE="/tmp/docs-deploy-$TIMESTAMP.log"
|
|
|
|
# Default environment
|
|
ENVIRONMENT="${1:-production}"
|
|
|
|
# ============================================================================
|
|
# Logging
|
|
# ============================================================================
|
|
|
|
log() {
|
|
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOG_FILE"
|
|
}
|
|
|
|
error() {
|
|
echo "[ERROR] $*" | tee -a "$LOG_FILE" >&2
|
|
exit 1
|
|
}
|
|
|
|
success() {
|
|
echo "[✓] $*" | tee -a "$LOG_FILE"
|
|
}
|
|
|
|
# ============================================================================
|
|
# Environment Configuration
|
|
# ============================================================================
|
|
|
|
load_config() {
|
|
local env=$1
|
|
local config_file="$SCRIPT_DIR/.deploy-config.$env"
|
|
|
|
if [ ! -f "$config_file" ]; then
|
|
error "Configuration file not found: $config_file"
|
|
fi
|
|
|
|
log "Loading configuration for environment: $env"
|
|
source "$config_file"
|
|
|
|
# Validate required variables
|
|
local required_vars=("DEPLOY_HOST" "DEPLOY_USER" "DEPLOY_PATH" "DEPLOY_METHOD")
|
|
for var in "${required_vars[@]}"; do
|
|
if [ -z "${!var:-}" ]; then
|
|
error "Missing required configuration: $var"
|
|
fi
|
|
done
|
|
|
|
success "Configuration loaded"
|
|
}
|
|
|
|
# ============================================================================
|
|
# Pre-Flight Checks
|
|
# ============================================================================
|
|
|
|
preflight_checks() {
|
|
log "Running preflight checks..."
|
|
|
|
# Check if mdBook is built
|
|
if [ ! -d "$BUILD_DIR" ]; then
|
|
error "Build directory not found: $BUILD_DIR"
|
|
fi
|
|
|
|
if [ ! -f "$BUILD_DIR/index.html" ]; then
|
|
error "Built documentation not found: $BUILD_DIR/index.html"
|
|
fi
|
|
|
|
# Verify essential files
|
|
local required_files=("index.html" "print.html" "css/general.css" "js/book.js")
|
|
for file in "${required_files[@]}"; do
|
|
if [ ! -f "$BUILD_DIR/$file" ]; then
|
|
error "Missing essential file: $file"
|
|
fi
|
|
done
|
|
|
|
# Check connectivity
|
|
case $DEPLOY_METHOD in
|
|
ssh|sftp)
|
|
if ! ssh -o ConnectTimeout=5 "$DEPLOY_USER@$DEPLOY_HOST" "echo ok" > /dev/null 2>&1; then
|
|
error "Cannot connect to $DEPLOY_USER@$DEPLOY_HOST via SSH"
|
|
fi
|
|
;;
|
|
http)
|
|
if ! curl -sf "$DEPLOY_ENDPOINT/health" > /dev/null 2>&1; then
|
|
log "⚠ Deployment endpoint health check failed (non-blocking)"
|
|
fi
|
|
;;
|
|
esac
|
|
|
|
success "Preflight checks passed"
|
|
}
|
|
|
|
# ============================================================================
|
|
# Deployment Methods
|
|
# ============================================================================
|
|
|
|
deploy_ssh() {
|
|
log "Deploying via SSH to $DEPLOY_USER@$DEPLOY_HOST:$DEPLOY_PATH"
|
|
|
|
# Create backup on remote
|
|
ssh "$DEPLOY_USER@$DEPLOY_HOST" \
|
|
"mkdir -p $DEPLOY_PATH/backups && \
|
|
[ -d $DEPLOY_PATH/current ] && \
|
|
mv $DEPLOY_PATH/current $DEPLOY_PATH/backups/backup_$TIMESTAMP || true"
|
|
|
|
# Upload files
|
|
rsync -avz \
|
|
--delete \
|
|
--exclude '.gitignore' \
|
|
--exclude 'CNAME' \
|
|
"$BUILD_DIR/" \
|
|
"$DEPLOY_USER@$DEPLOY_HOST:$DEPLOY_PATH/current/"
|
|
|
|
# Update symlink (atomic)
|
|
ssh "$DEPLOY_USER@$DEPLOY_HOST" \
|
|
"ln -sfT $DEPLOY_PATH/current $DEPLOY_PATH/docs && \
|
|
chmod -R 755 $DEPLOY_PATH/current"
|
|
|
|
success "SSH deployment completed"
|
|
}
|
|
|
|
deploy_sftp() {
|
|
log "Deploying via SFTP to $DEPLOY_USER@$DEPLOY_HOST:$DEPLOY_PATH"
|
|
|
|
# Create SFTP batch commands
|
|
local sftp_batch=$(mktemp)
|
|
cat > "$sftp_batch" << SFTP_CMDS
|
|
cd $DEPLOY_PATH
|
|
-mkdir backups
|
|
-mkdir current
|
|
mput -r $BUILD_DIR/* current/
|
|
quit
|
|
SFTP_CMDS
|
|
|
|
# Execute SFTP batch
|
|
sftp -b "$sftp_batch" "$DEPLOY_USER@$DEPLOY_HOST"
|
|
rm "$sftp_batch"
|
|
|
|
success "SFTP deployment completed"
|
|
}
|
|
|
|
deploy_http() {
|
|
log "Deploying via HTTP to $DEPLOY_ENDPOINT"
|
|
|
|
# Create temporary tarball
|
|
local tar_file=$(mktemp --suffix=.tar.gz)
|
|
tar -czf "$tar_file" -C "$BUILD_DIR" .
|
|
|
|
# Upload via HTTP
|
|
local response=$(curl -s -w "\n%{http_code}" \
|
|
-X POST \
|
|
-H "Authorization: Bearer $DEPLOY_TOKEN" \
|
|
-F "archive=@$tar_file" \
|
|
"$DEPLOY_ENDPOINT/deploy")
|
|
|
|
local http_code=$(echo "$response" | tail -n1)
|
|
local body=$(echo "$response" | head -n-1)
|
|
|
|
rm "$tar_file"
|
|
|
|
if [ "$http_code" != "200" ] && [ "$http_code" != "201" ]; then
|
|
error "HTTP deployment failed (HTTP $http_code): $body"
|
|
fi
|
|
|
|
success "HTTP deployment completed (HTTP $http_code)"
|
|
}
|
|
|
|
deploy_docker() {
|
|
log "Deploying via Docker to $DEPLOY_REGISTRY"
|
|
|
|
local image="$DEPLOY_REGISTRY/vapora-docs:$ENVIRONMENT-$TIMESTAMP"
|
|
local dockerfile=$(mktemp)
|
|
|
|
# Generate Dockerfile
|
|
cat > "$dockerfile" << 'EOF'
|
|
FROM nginx:alpine
|
|
COPY . /usr/share/nginx/html
|
|
EXPOSE 80
|
|
CMD ["nginx", "-g", "daemon off;"]
|
|
EOF
|
|
|
|
# Build image
|
|
log "Building Docker image: $image"
|
|
docker build -f "$dockerfile" -t "$image" "$BUILD_DIR"
|
|
docker tag "$image" "$DEPLOY_REGISTRY/vapora-docs:$ENVIRONMENT-latest"
|
|
|
|
# Push image
|
|
log "Pushing image to registry"
|
|
docker push "$image"
|
|
docker push "$DEPLOY_REGISTRY/vapora-docs:$ENVIRONMENT-latest"
|
|
|
|
rm "$dockerfile"
|
|
|
|
log "Container image available: $image"
|
|
success "Docker deployment completed"
|
|
}
|
|
|
|
deploy_s3() {
|
|
log "Deploying to S3: s3://$AWS_BUCKET/$DEPLOY_PATH"
|
|
|
|
aws s3 sync \
|
|
"$BUILD_DIR/" \
|
|
"s3://$AWS_BUCKET/$DEPLOY_PATH/" \
|
|
--delete \
|
|
--region "${AWS_REGION:-us-east-1}" \
|
|
--cache-control "public, max-age=300"
|
|
|
|
# Invalidate CloudFront if configured
|
|
if [ -n "${CLOUDFRONT_DISTRIBUTION:-}" ]; then
|
|
log "Invalidating CloudFront distribution"
|
|
aws cloudfront create-invalidation \
|
|
--distribution-id "$CLOUDFRONT_DISTRIBUTION" \
|
|
--paths "/$DEPLOY_PATH/*" \
|
|
--region "${AWS_REGION:-us-east-1}"
|
|
fi
|
|
|
|
success "S3 deployment completed"
|
|
}
|
|
|
|
deploy_gcs() {
|
|
log "Deploying to Google Cloud Storage: gs://$GCS_BUCKET/$DEPLOY_PATH"
|
|
|
|
gsutil -m rsync -r -d "$BUILD_DIR/" "gs://$GCS_BUCKET/$DEPLOY_PATH/"
|
|
|
|
# Set cache control
|
|
gsutil -m setmeta -h "Cache-Control:public, max-age=300" \
|
|
"gs://$GCS_BUCKET/$DEPLOY_PATH/**"
|
|
|
|
success "GCS deployment completed"
|
|
}
|
|
|
|
# ============================================================================
|
|
# Post-Deployment Verification
|
|
# ============================================================================
|
|
|
|
verify_deployment() {
|
|
log "Verifying deployment..."
|
|
|
|
case $DEPLOY_METHOD in
|
|
ssh|sftp)
|
|
# Check if files exist on remote
|
|
ssh "$DEPLOY_USER@$DEPLOY_HOST" \
|
|
"[ -f $DEPLOY_PATH/docs/index.html ] && echo 'Files verified' || exit 1"
|
|
;;
|
|
http)
|
|
# Hit deployment endpoint
|
|
if ! curl -sf "$DEPLOY_ENDPOINT/health" > /dev/null; then
|
|
log "⚠ Health check failed (deployment may still be in progress)"
|
|
fi
|
|
;;
|
|
docker)
|
|
log "Docker image deployed: $image"
|
|
;;
|
|
s3)
|
|
# Verify S3 object
|
|
aws s3 ls "s3://$AWS_BUCKET/$DEPLOY_PATH/index.html" \
|
|
--region "${AWS_REGION:-us-east-1}" > /dev/null
|
|
;;
|
|
gcs)
|
|
# Verify GCS object
|
|
gsutil stat "gs://$GCS_BUCKET/$DEPLOY_PATH/index.html" > /dev/null
|
|
;;
|
|
esac
|
|
|
|
success "Deployment verified"
|
|
}
|
|
|
|
# ============================================================================
|
|
# Rollback
|
|
# ============================================================================
|
|
|
|
rollback() {
|
|
log "Rolling back deployment..."
|
|
|
|
case $DEPLOY_METHOD in
|
|
ssh)
|
|
if [ -d "$DEPLOY_PATH/backups/backup_$TIMESTAMP" ]; then
|
|
ssh "$DEPLOY_USER@$DEPLOY_HOST" \
|
|
"ln -sfT $DEPLOY_PATH/backups/backup_$TIMESTAMP $DEPLOY_PATH/docs"
|
|
success "Rollback completed"
|
|
else
|
|
error "No backup available for rollback"
|
|
fi
|
|
;;
|
|
*)
|
|
error "Rollback not implemented for deployment method: $DEPLOY_METHOD"
|
|
;;
|
|
esac
|
|
}
|
|
|
|
# ============================================================================
|
|
# Main
|
|
# ============================================================================
|
|
|
|
main() {
|
|
log "Starting documentation deployment"
|
|
log "Environment: $ENVIRONMENT"
|
|
log "Repository: $REPO_ROOT"
|
|
log "Build directory: $BUILD_DIR"
|
|
log "Log file: $LOG_FILE"
|
|
|
|
# Load configuration
|
|
load_config "$ENVIRONMENT"
|
|
|
|
# Preflight checks
|
|
preflight_checks
|
|
|
|
# Deploy based on method
|
|
case $DEPLOY_METHOD in
|
|
ssh)
|
|
deploy_ssh
|
|
;;
|
|
sftp)
|
|
deploy_sftp
|
|
;;
|
|
http)
|
|
deploy_http
|
|
;;
|
|
docker)
|
|
deploy_docker
|
|
;;
|
|
s3)
|
|
deploy_s3
|
|
;;
|
|
gcs)
|
|
deploy_gcs
|
|
;;
|
|
*)
|
|
error "Unknown deployment method: $DEPLOY_METHOD"
|
|
;;
|
|
esac
|
|
|
|
# Verify deployment
|
|
verify_deployment
|
|
|
|
success "Documentation deployment completed successfully"
|
|
log "Summary:"
|
|
log " Environment: $ENVIRONMENT"
|
|
log " Method: $DEPLOY_METHOD"
|
|
log " Build size: $(du -sh "$BUILD_DIR" | cut -f1)"
|
|
log " Timestamp: $TIMESTAMP"
|
|
log " Log: $LOG_FILE"
|
|
}
|
|
|
|
# Handle errors
|
|
trap 'error "Deployment failed at line $LINENO"' ERR
|
|
|
|
# Run main function
|
|
main "$@"
|