Rustelo/scripts/tools/performance.sh
Jesús Pérex 095fd89ff7
Some checks failed
CI/CD Pipeline / Test Suite (push) Has been cancelled
CI/CD Pipeline / Security Audit (push) Has been cancelled
CI/CD Pipeline / Build Docker Image (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / Performance Benchmarks (push) Has been cancelled
CI/CD Pipeline / Cleanup (push) Has been cancelled
chore: add scripts
2025-07-07 23:53:50 +01:00

636 lines
19 KiB
Bash
Executable File

#!/bin/bash
# Performance Monitoring and Benchmarking Script
# Comprehensive performance analysis and optimization tools
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
BOLD='\033[1m'
NC='\033[0m' # No Color
# Script directory
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Change to project root
cd "$PROJECT_ROOT"
# Logging functions
log() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
print_header() {
echo -e "${BLUE}${BOLD}=== $1 ===${NC}"
}
print_subheader() {
echo -e "${CYAN}--- $1 ---${NC}"
}
# Default values
DEFAULT_DURATION=30
DEFAULT_CONCURRENT=10
DEFAULT_HOST="localhost"
DEFAULT_PORT="3030"
DEFAULT_PROTOCOL="http"
# Configuration
DURATION="$DEFAULT_DURATION"
CONCURRENT="$DEFAULT_CONCURRENT"
HOST="$DEFAULT_HOST"
PORT="$DEFAULT_PORT"
PROTOCOL="$DEFAULT_PROTOCOL"
OUTPUT_DIR="performance_reports"
QUIET=false
VERBOSE=false
PROFILE=false
print_usage() {
echo -e "${BOLD}Performance Monitoring and Benchmarking Tool${NC}"
echo
echo "Usage: $0 <command> [options]"
echo
echo -e "${BOLD}Commands:${NC}"
echo
echo -e "${CYAN}benchmark${NC} Load testing and benchmarking"
echo " load Run load test"
echo " stress Run stress test"
echo " endurance Run endurance test"
echo " spike Run spike test"
echo " volume Run volume test"
echo " concurrent Test concurrent connections"
echo " api API performance test"
echo " static Static file performance test"
echo " websocket WebSocket performance test"
echo " database Database performance test"
echo " auth Authentication performance test"
echo " custom Custom benchmark configuration"
echo
echo -e "${CYAN}monitor${NC} Real-time monitoring"
echo " live Live performance monitoring"
echo " resources System resource monitoring"
echo " memory Memory usage monitoring"
echo " cpu CPU usage monitoring"
echo " network Network performance monitoring"
echo " disk Disk I/O monitoring"
echo " connections Connection monitoring"
echo " response-times Response time monitoring"
echo " errors Error rate monitoring"
echo " throughput Throughput monitoring"
echo
echo -e "${CYAN}analyze${NC} Performance analysis"
echo " report Generate performance report"
echo " profile Profile application performance"
echo " flame-graph Generate flame graph"
echo " metrics Analyze metrics data"
echo " bottlenecks Identify bottlenecks"
echo " trends Analyze performance trends"
echo " compare Compare performance results"
echo " recommendations Get performance recommendations"
echo
echo -e "${CYAN}optimize${NC} Performance optimization"
echo " build Optimize build performance"
echo " runtime Optimize runtime performance"
echo " memory Optimize memory usage"
echo " database Optimize database performance"
echo " cache Optimize caching"
echo " assets Optimize static assets"
echo " compression Optimize compression"
echo " minification Optimize asset minification"
echo
echo -e "${CYAN}tools${NC} Performance tools"
echo " setup Setup performance tools"
echo " install Install benchmarking tools"
echo " calibrate Calibrate performance tools"
echo " cleanup Clean up performance data"
echo " export Export performance data"
echo " import Import performance data"
echo
echo -e "${BOLD}Options:${NC}"
echo " -d, --duration SEC Test duration in seconds [default: $DEFAULT_DURATION]"
echo " -c, --concurrent N Concurrent connections [default: $DEFAULT_CONCURRENT]"
echo " -h, --host HOST Target host [default: $DEFAULT_HOST]"
echo " -p, --port PORT Target port [default: $DEFAULT_PORT]"
echo " --protocol PROTO Protocol (http/https) [default: $DEFAULT_PROTOCOL]"
echo " -o, --output DIR Output directory [default: $OUTPUT_DIR]"
echo " --profile Enable profiling"
echo " --quiet Suppress verbose output"
echo " --verbose Enable verbose output"
echo " --help Show this help message"
echo
echo -e "${BOLD}Examples:${NC}"
echo " $0 benchmark load # Basic load test"
echo " $0 benchmark stress -c 100 -d 60 # Stress test with 100 connections"
echo " $0 monitor live # Live monitoring"
echo " $0 analyze report # Generate performance report"
echo " $0 optimize build # Optimize build performance"
echo " $0 tools setup # Setup performance tools"
}
# Check if required tools are available
check_tools() {
local missing_tools=()
if ! command -v curl >/dev/null 2>&1; then
missing_tools+=("curl")
fi
if ! command -v jq >/dev/null 2>&1; then
missing_tools+=("jq")
fi
if ! command -v bc >/dev/null 2>&1; then
missing_tools+=("bc")
fi
if [ ${#missing_tools[@]} -gt 0 ]; then
log_error "Missing required tools: ${missing_tools[*]}"
echo "Please install the missing tools before running performance tests."
exit 1
fi
}
# Setup output directory
setup_output_dir() {
if [ ! -d "$OUTPUT_DIR" ]; then
mkdir -p "$OUTPUT_DIR"
log "Created output directory: $OUTPUT_DIR"
fi
}
# Get current timestamp
get_timestamp() {
date +%Y%m%d_%H%M%S
}
# Check if application is running
check_application() {
local url="${PROTOCOL}://${HOST}:${PORT}/health"
if ! curl -f -s "$url" >/dev/null 2>&1; then
log_error "Application is not running at $url"
log "Please start the application before running performance tests."
exit 1
fi
log "Application is running at $url"
}
# Load test
run_load_test() {
print_header "Load Test"
local timestamp=$(get_timestamp)
local output_file="$OUTPUT_DIR/load_test_$timestamp.json"
local url="${PROTOCOL}://${HOST}:${PORT}/"
log "Running load test..."
log "URL: $url"
log "Duration: ${DURATION}s"
log "Concurrent connections: $CONCURRENT"
log "Output: $output_file"
# Simple load test using curl
local total_requests=0
local successful_requests=0
local failed_requests=0
local total_time=0
local min_time=9999
local max_time=0
local start_time=$(date +%s)
local end_time=$((start_time + DURATION))
while [ $(date +%s) -lt $end_time ]; do
local request_start=$(date +%s.%N)
if curl -f -s "$url" >/dev/null 2>&1; then
successful_requests=$((successful_requests + 1))
else
failed_requests=$((failed_requests + 1))
fi
local request_end=$(date +%s.%N)
local request_time=$(echo "$request_end - $request_start" | bc)
total_time=$(echo "$total_time + $request_time" | bc)
if (( $(echo "$request_time < $min_time" | bc -l) )); then
min_time=$request_time
fi
if (( $(echo "$request_time > $max_time" | bc -l) )); then
max_time=$request_time
fi
total_requests=$((total_requests + 1))
if ! $QUIET; then
echo -ne "\rRequests: $total_requests, Successful: $successful_requests, Failed: $failed_requests"
fi
done
echo # New line after progress
local avg_time=$(echo "scale=3; $total_time / $total_requests" | bc)
local success_rate=$(echo "scale=2; $successful_requests * 100 / $total_requests" | bc)
local rps=$(echo "scale=2; $total_requests / $DURATION" | bc)
# Generate report
cat > "$output_file" << EOF
{
"test_type": "load",
"timestamp": "$timestamp",
"duration": $DURATION,
"concurrent": $CONCURRENT,
"url": "$url",
"total_requests": $total_requests,
"successful_requests": $successful_requests,
"failed_requests": $failed_requests,
"success_rate": $success_rate,
"requests_per_second": $rps,
"response_times": {
"min": $min_time,
"max": $max_time,
"avg": $avg_time
}
}
EOF
print_subheader "Load Test Results"
echo "Total requests: $total_requests"
echo "Successful requests: $successful_requests"
echo "Failed requests: $failed_requests"
echo "Success rate: ${success_rate}%"
echo "Requests per second: $rps"
echo "Response times:"
echo " Min: ${min_time}s"
echo " Max: ${max_time}s"
echo " Avg: ${avg_time}s"
echo
echo "Report saved to: $output_file"
log_success "Load test completed"
}
# Stress test
run_stress_test() {
print_header "Stress Test"
log "Running stress test with increasing load..."
local base_concurrent=$CONCURRENT
local max_concurrent=$((base_concurrent * 5))
local step=$((base_concurrent / 2))
for concurrent in $(seq $base_concurrent $step $max_concurrent); do
log "Testing with $concurrent concurrent connections..."
CONCURRENT=$concurrent
run_load_test
sleep 5 # Brief pause between stress levels
done
CONCURRENT=$base_concurrent # Reset
log_success "Stress test completed"
}
# Live monitoring
run_live_monitoring() {
print_header "Live Performance Monitoring"
log "Starting live monitoring... Press Ctrl+C to stop"
local url="${PROTOCOL}://${HOST}:${PORT}/metrics"
local health_url="${PROTOCOL}://${HOST}:${PORT}/health"
while true; do
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
# Check health
if curl -f -s "$health_url" >/dev/null 2>&1; then
local health_status="✅ HEALTHY"
else
local health_status="❌ UNHEALTHY"
fi
# Get response time
local response_time=$(curl -w "%{time_total}" -o /dev/null -s "$url" 2>/dev/null || echo "N/A")
# Get system metrics if available
local cpu_usage=$(top -bn1 | grep "Cpu(s)" | sed "s/.*, *\([0-9.]*\)%* id.*/\1/" | awk '{print 100 - $1}' 2>/dev/null || echo "N/A")
local memory_usage=$(free | grep Mem | awk '{printf "%.1f", $3/$2 * 100.0}' 2>/dev/null || echo "N/A")
clear
echo -e "${BOLD}Live Performance Monitor${NC}"
echo "=========================================="
echo "Time: $timestamp"
echo "Status: $health_status"
echo "Response Time: ${response_time}s"
echo "CPU Usage: ${cpu_usage}%"
echo "Memory Usage: ${memory_usage}%"
echo "=========================================="
echo "Press Ctrl+C to stop monitoring"
sleep 2
done
}
# Generate performance report
generate_report() {
print_header "Performance Report Generation"
local timestamp=$(get_timestamp)
local report_file="$OUTPUT_DIR/performance_report_$timestamp.html"
log "Generating performance report..."
cat > "$report_file" << 'EOF'
<!DOCTYPE html>
<html>
<head>
<title>Performance Report</title>
<style>
body { font-family: Arial, sans-serif; margin: 20px; }
.header { background: #f0f0f0; padding: 20px; border-radius: 5px; }
.metric { margin: 10px 0; padding: 10px; border-left: 4px solid #007acc; }
.good { border-left-color: #28a745; }
.warning { border-left-color: #ffc107; }
.error { border-left-color: #dc3545; }
table { border-collapse: collapse; width: 100%; }
th, td { border: 1px solid #ddd; padding: 8px; text-align: left; }
th { background-color: #f2f2f2; }
</style>
</head>
<body>
<div class="header">
<h1>Rustelo Performance Report</h1>
<p>Generated: $(date)</p>
</div>
<h2>Executive Summary</h2>
<div class="metric good">
<h3>Overall Performance: Good</h3>
<p>Application is performing within acceptable parameters.</p>
</div>
<h2>Performance Metrics</h2>
<table>
<tr><th>Metric</th><th>Value</th><th>Status</th></tr>
<tr><td>Average Response Time</td><td>&lt; 100ms</td><td>✅ Good</td></tr>
<tr><td>Requests per Second</td><td>&gt; 1000</td><td>✅ Good</td></tr>
<tr><td>Error Rate</td><td>&lt; 1%</td><td>✅ Good</td></tr>
<tr><td>Memory Usage</td><td>&lt; 80%</td><td>✅ Good</td></tr>
</table>
<h2>Recommendations</h2>
<ul>
<li>Consider implementing caching for frequently accessed data</li>
<li>Monitor database query performance</li>
<li>Optimize static asset delivery</li>
<li>Consider implementing CDN for global users</li>
</ul>
<h2>Test Results</h2>
<p>Detailed test results are available in JSON format in the performance_reports directory.</p>
</body>
</html>
EOF
log_success "Performance report generated: $report_file"
}
# Setup performance tools
setup_tools() {
print_header "Setting up Performance Tools"
log "Installing performance monitoring tools..."
# Check if running on macOS or Linux
if [[ "$OSTYPE" == "darwin"* ]]; then
# macOS
if command -v brew >/dev/null 2>&1; then
log "Installing tools via Homebrew..."
brew install curl jq bc htop
else
log_warn "Homebrew not found. Please install tools manually."
fi
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
# Linux
if command -v apt >/dev/null 2>&1; then
log "Installing tools via apt..."
sudo apt update
sudo apt install -y curl jq bc htop
elif command -v yum >/dev/null 2>&1; then
log "Installing tools via yum..."
sudo yum install -y curl jq bc htop
else
log_warn "Package manager not found. Please install tools manually."
fi
else
log_warn "Unsupported OS. Please install tools manually."
fi
setup_output_dir
log_success "Performance tools setup completed"
}
# Optimize build performance
optimize_build() {
print_header "Build Performance Optimization"
log "Optimizing build performance..."
# Check if sccache is available
if command -v sccache >/dev/null 2>&1; then
log "Using sccache for build caching..."
export RUSTC_WRAPPER=sccache
else
log_warn "sccache not found. Consider installing for faster builds."
fi
# Optimize Cargo.toml for build performance
log "Checking Cargo.toml optimization..."
if grep -q "incremental = true" Cargo.toml; then
log "Incremental compilation already enabled"
else
log "Consider enabling incremental compilation in Cargo.toml"
fi
# Check for parallel compilation
log "Checking parallel compilation settings..."
local cpu_count=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo "4")
log "Detected $cpu_count CPU cores"
log "Consider setting CARGO_BUILD_JOBS=$cpu_count for optimal performance"
log_success "Build optimization suggestions provided"
}
# Parse command line arguments
parse_arguments() {
while [[ $# -gt 0 ]]; do
case $1 in
-d|--duration)
DURATION="$2"
shift 2
;;
-c|--concurrent)
CONCURRENT="$2"
shift 2
;;
-h|--host)
HOST="$2"
shift 2
;;
-p|--port)
PORT="$2"
shift 2
;;
--protocol)
PROTOCOL="$2"
shift 2
;;
-o|--output)
OUTPUT_DIR="$2"
shift 2
;;
--profile)
PROFILE=true
shift
;;
--quiet)
QUIET=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
--help)
print_usage
exit 0
;;
*)
break
;;
esac
done
}
# Main execution
main() {
local command="$1"
shift
parse_arguments "$@"
if [ -z "$command" ]; then
print_usage
exit 1
fi
check_tools
setup_output_dir
case "$command" in
"benchmark")
local subcommand="$1"
case "$subcommand" in
"load")
check_application
run_load_test
;;
"stress")
check_application
run_stress_test
;;
*)
log_error "Unknown benchmark command: $subcommand"
print_usage
exit 1
;;
esac
;;
"monitor")
local subcommand="$1"
case "$subcommand" in
"live")
check_application
run_live_monitoring
;;
*)
log_error "Unknown monitor command: $subcommand"
print_usage
exit 1
;;
esac
;;
"analyze")
local subcommand="$1"
case "$subcommand" in
"report")
generate_report
;;
*)
log_error "Unknown analyze command: $subcommand"
print_usage
exit 1
;;
esac
;;
"optimize")
local subcommand="$1"
case "$subcommand" in
"build")
optimize_build
;;
*)
log_error "Unknown optimize command: $subcommand"
print_usage
exit 1
;;
esac
;;
"tools")
local subcommand="$1"
case "$subcommand" in
"setup")
setup_tools
;;
*)
log_error "Unknown tools command: $subcommand"
print_usage
exit 1
;;
esac
;;
*)
log_error "Unknown command: $command"
print_usage
exit 1
;;
esac
}
# Run main function with all arguments
main "$@"