# Integration Test Runner # Orchestrates execution of all integration tests with parallel support and reporting use std log use test_helpers.nu * use orbstack_helpers.nu * # Main test runner export def main [ --filter: string = "" # Filter pattern (regex) to run subset of tests --parallel: int = 1 # Number of parallel test workers --mode: string = "" # Test specific mode (solo, multiuser, cicd, enterprise) --verbose: bool = false # Detailed output --report: string = "" # Generate HTML report at path --skip-setup: bool = false # Skip environment setup --skip-teardown: bool = false # Skip environment teardown ] { log info "Integration Test Runner" log info "=======================" let test_config = (load-test-config) # Discover all test files let test_files = discover-test-files $filter log info $"Found ($test_files | length) test files" if $verbose { log info "Test files:" $test_files | each { |f| log info $" - ($f)" } } mut all_results = [] # Determine which modes to test let modes_to_test = if ($mode | is-not-empty) { [$mode] } else { ["solo", "multiuser", "cicd", "enterprise"] } for mode in $modes_to_test { log info $"Testing mode: ($mode)" # Setup environment for this mode if not $skip_setup { setup-test-environment $mode $test_config } # Run tests for this mode let mode_results = run-tests-for-mode $mode $test_files $parallel $verbose $all_results = ($all_results | append $mode_results) # Teardown environment if not $skip_teardown { teardown-test-environment $test_config } } # Generate reports generate-junit-report $all_results $test_config if ($report | is-not-empty) { generate-html-report $all_results $report } # Print summary print-test-summary $all_results # Exit with appropriate code let failed_count = ($all_results | where status == "failed" | length) if $failed_count > 0 { exit 1 } else { exit 0 } } # Discover all test files matching pattern def discover-test-files [filter: string] -> list { let test_root = $"($env.PWD)/provisioning/tests/integration" let all_tests = ( ls $"($test_root)/**/*test*.nu" | get name | where ($it | path basename) starts-with "test_" ) if ($filter | is-empty) { $all_tests } else { $all_tests | where ($it =~ $filter) } } # Setup test environment for mode def setup-test-environment [mode: string, test_config: record] { log info $"Setting up test environment for mode: ($mode)" nu provisioning/tests/integration/setup_test_environment.nu --mode $mode log info "Test environment ready" } # Teardown test environment def teardown-test-environment [test_config: record] { log info "Tearing down test environment..." nu provisioning/tests/integration/teardown_test_environment.nu --force log info "Test environment cleaned up" } # Run tests for a specific mode def run-tests-for-mode [ mode: string test_files: list parallel: int verbose: bool ] -> list { log info $"Running tests for mode: ($mode)" # Filter test files relevant to this mode let mode_tests = $test_files | where ($it =~ $"modes/test_($mode)_mode.nu" or not ($it =~ "modes/")) if $parallel > 1 { run-tests-parallel $mode_tests $parallel $verbose } else { run-tests-sequential $mode_tests $verbose } } # Run tests sequentially def run-tests-sequential [test_files: list, verbose: bool] -> list { mut results = [] for test_file in $test_files { log info $"Running test file: ($test_file | path basename)" let test_result = execute-test-file $test_file $verbose $results = ($results | append $test_result) } $results } # Run tests in parallel def run-tests-parallel [ test_files: list workers: int verbose: bool ] -> list { log info $"Running tests in parallel with ($workers) workers" # Split test files into chunks let chunk_size = (($test_files | length) / $workers | into int) + 1 let chunks = ( $test_files | enumerate | group-by { |x| ($x.index / $chunk_size | into int) } | values | each { |chunk| $chunk | get item } ) # Run each chunk in parallel let results = ( $chunks | par-each { |chunk| $chunk | each { |test_file| execute-test-file $test_file $verbose } } | flatten ) $results } # Execute a single test file def execute-test-file [test_file: string, verbose: bool] -> record { let start_time = (date now) try { # Run the test file let output = (nu $test_file | complete) let duration = ((date now) - $start_time | into int) / 1000000 if $output.exit_code == 0 { if $verbose { log info $"✓ ($test_file | path basename) passed \(($duration)ms\)" } { test_file: $test_file test_name: ($test_file | path basename | str replace ".nu" "") status: "passed" duration_ms: $duration error_message: "" stdout: $output.stdout stderr: $output.stderr timestamp: (date now | format date "%Y-%m-%dT%H:%M:%S%.3fZ") } } else { log error $"✗ ($test_file | path basename) failed \(($duration)ms\)" { test_file: $test_file test_name: ($test_file | path basename | str replace ".nu" "") status: "failed" duration_ms: $duration error_message: $output.stderr stdout: $output.stdout stderr: $output.stderr timestamp: (date now | format date "%Y-%m-%dT%H:%M:%S%.3fZ") } } } catch { |err| let duration = ((date now) - $start_time | into int) / 1000000 log error $"✗ ($test_file | path basename) crashed \(($duration)ms\): ($err.msg)" { test_file: $test_file test_name: ($test_file | path basename | str replace ".nu" "") status: "failed" duration_ms: $duration error_message: $err.msg stdout: "" stderr: $err.msg timestamp: (date now | format date "%Y-%m-%dT%H:%M:%S%.3fZ") } } } # Generate JUnit XML report def generate-junit-report [results: list, test_config: record] { log info "Generating JUnit report..." let report_dir = $test_config.reporting.output_dir mkdir $report_dir let junit_file = $"($report_dir)/($test_config.reporting.junit.filename)" let total = ($results | length) let failures = ($results | where status == "failed" | length) let total_time = ($results | get duration_ms | math sum) / 1000.0 let xml = $" ($results | each { |test| let status_tag = if $test.status == "failed" { $" " } else { "" } $" ($status_tag) " } | str join "\n") " $xml | save -f $junit_file log info $"JUnit report saved: ($junit_file)" } # Generate HTML report def generate-html-report [results: list, output_path: string] { log info "Generating HTML report..." let total = ($results | length) let passed = ($results | where status == "passed" | length) let failed = ($results | where status == "failed" | length) let pass_rate = (($passed / $total) * 100 | into int) let html = $" Integration Test Report

Integration Test Report

Summary

Total Tests: ($total)
Passed: ($passed)
Failed: ($failed)
Pass Rate: ($pass_rate)%

Test Results

($results | each { |test| let status_class = if $test.status == "passed" { "status-passed" } else { "status-failed" } $" " } | str join "\n")
Test Name Status Duration (ms) Error Message
($test.test_name) ($test.status | str upcase) ($test.duration_ms) ($test.error_message)

Generated: (date now | format date "%Y-%m-%d %H:%M:%S")

" $html | save -f $output_path log info $"HTML report saved: ($output_path)" } # Print test summary def print-test-summary [results: list] { let total = ($results | length) let passed = ($results | where status == "passed" | length) let failed = ($results | where status == "failed" | length) let total_time = ($results | get duration_ms | math sum) print "" print "=========================================" print "Integration Test Summary" print "=========================================" print $"Total Tests: ($total)" print $"Passed: ($passed)" print $"Failed: ($failed)" print $"Total Time: ($total_time)ms" print "=========================================" if $failed > 0 { print "" print "Failed Tests:" $results | where status == "failed" | each { |test| print $" ✗ ($test.test_name)" print $" Error: ($test.error_message)" } } }