mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 03:20:12 -08:00
Refactor variable assignments and improve script readability in validate-plex-backups.sh and validate-plex-recovery.sh
- Changed inline variable assignments to separate declaration and assignment for clarity. - Updated condition checks and log messages for better readability and consistency. - Added a backup of validate-plex-recovery.sh for safety. - Introduced a new script run-docker-tests.sh for testing setup in Docker containers. - Enhanced ssh-login.sh to improve condition checks and logging functionality.
This commit is contained in:
@@ -62,7 +62,6 @@ CYAN='\033[0;36m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Test configuration
|
||||
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
|
||||
TEST_DIR="/tmp/plex-backup-test-$(date +%s)"
|
||||
TEST_BACKUP_ROOT="$TEST_DIR/backups"
|
||||
TEST_LOG_ROOT="$TEST_DIR/logs"
|
||||
@@ -76,30 +75,35 @@ declare -a FAILED_TESTS=()
|
||||
|
||||
# Logging functions
|
||||
log_test() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${CYAN}[TEST ${timestamp}]${NC} $1"
|
||||
}
|
||||
|
||||
log_pass() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${GREEN}[PASS ${timestamp}]${NC} $1"
|
||||
TESTS_PASSED=$((TESTS_PASSED + 1))
|
||||
}
|
||||
|
||||
log_fail() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${RED}[FAIL ${timestamp}]${NC} $1"
|
||||
TESTS_FAILED=$((TESTS_FAILED + 1))
|
||||
FAILED_TESTS+=("$1")
|
||||
}
|
||||
|
||||
log_info() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${BLUE}[INFO ${timestamp}]${NC} $1"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${YELLOW}[WARN ${timestamp}]${NC} $1"
|
||||
}
|
||||
|
||||
@@ -124,14 +128,16 @@ record_test_result() {
|
||||
local test_name="$1"
|
||||
local status="$2"
|
||||
local error_message="$3"
|
||||
local timestamp=$(date -Iseconds)
|
||||
local timestamp
|
||||
timestamp=$(date -Iseconds)
|
||||
|
||||
# Initialize results file if it doesn't exist
|
||||
if [ ! -f "$TEST_RESULTS_FILE" ]; then
|
||||
echo "[]" > "$TEST_RESULTS_FILE"
|
||||
fi
|
||||
|
||||
local result=$(jq -n \
|
||||
local result
|
||||
result=$(jq -n \
|
||||
--arg test_name "$test_name" \
|
||||
--arg status "$status" \
|
||||
--arg error_message "$error_message" \
|
||||
@@ -186,7 +192,7 @@ mock_manage_plex_service() {
|
||||
|
||||
mock_calculate_checksum() {
|
||||
local file="$1"
|
||||
echo "$(echo "$file" | md5sum | cut -d' ' -f1)"
|
||||
echo "$file" | md5sum | cut -d' ' -f1
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -226,10 +232,12 @@ test_performance_tracking() {
|
||||
track_performance_test() {
|
||||
local operation="$1"
|
||||
local start_time="$2"
|
||||
local end_time=$(date +%s)
|
||||
local end_time
|
||||
end_time=$(date +%s)
|
||||
local duration=$((end_time - start_time))
|
||||
|
||||
local entry=$(jq -n \
|
||||
local entry
|
||||
entry=$(jq -n \
|
||||
--arg operation "$operation" \
|
||||
--arg duration "$duration" \
|
||||
--arg timestamp "$(date -Iseconds)" \
|
||||
@@ -244,12 +252,14 @@ test_performance_tracking() {
|
||||
}
|
||||
|
||||
# Test tracking
|
||||
local start_time=$(date +%s)
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
sleep 1 # Simulate work
|
||||
track_performance_test "test_operation" "$start_time"
|
||||
|
||||
# Verify entry was added
|
||||
local entry_count=$(jq length "$test_perf_log")
|
||||
local entry_count
|
||||
entry_count=$(jq length "$test_perf_log")
|
||||
if [ "$entry_count" -eq 1 ]; then
|
||||
return 0
|
||||
else
|
||||
@@ -297,11 +307,13 @@ test_checksum_caching() {
|
||||
calculate_checksum_test() {
|
||||
local file="$1"
|
||||
local cache_file="${file}.md5"
|
||||
local file_mtime=$(stat -c %Y "$file" 2>/dev/null || echo "0")
|
||||
local file_mtime
|
||||
file_mtime=$(stat -c %Y "$file" 2>/dev/null || echo "0")
|
||||
|
||||
# Check cache
|
||||
if [ -f "$cache_file" ]; then
|
||||
local cache_mtime=$(stat -c %Y "$cache_file" 2>/dev/null || echo "0")
|
||||
local cache_mtime
|
||||
cache_mtime=$(stat -c %Y "$cache_file" 2>/dev/null || echo "0")
|
||||
if [ "$cache_mtime" -gt "$file_mtime" ]; then
|
||||
cat "$cache_file"
|
||||
return 0
|
||||
@@ -309,16 +321,19 @@ test_checksum_caching() {
|
||||
fi
|
||||
|
||||
# Calculate and cache
|
||||
local checksum=$(md5sum "$file" | cut -d' ' -f1)
|
||||
local checksum
|
||||
checksum=$(md5sum "$file" | cut -d' ' -f1)
|
||||
echo "$checksum" > "$cache_file"
|
||||
echo "$checksum"
|
||||
}
|
||||
|
||||
# First calculation (should create cache)
|
||||
local checksum1=$(calculate_checksum_test "$test_file")
|
||||
local checksum1
|
||||
checksum1=$(calculate_checksum_test "$test_file")
|
||||
|
||||
# Second calculation (should use cache)
|
||||
local checksum2=$(calculate_checksum_test "$test_file")
|
||||
local checksum2
|
||||
checksum2=$(calculate_checksum_test "$test_file")
|
||||
|
||||
# Verify checksums match and cache file exists
|
||||
if [ "$checksum1" = "$checksum2" ] && [ -f "$cache_file" ]; then
|
||||
@@ -342,8 +357,10 @@ test_backup_verification() {
|
||||
local src="$1"
|
||||
local dest="$2"
|
||||
|
||||
local src_checksum=$(md5sum "$src" | cut -d' ' -f1)
|
||||
local dest_checksum=$(md5sum "$dest" | cut -d' ' -f1)
|
||||
local src_checksum
|
||||
src_checksum=$(md5sum "$src" | cut -d' ' -f1)
|
||||
local dest_checksum
|
||||
dest_checksum=$(md5sum "$dest" | cut -d' ' -f1)
|
||||
|
||||
if [ "$src_checksum" = "$dest_checksum" ]; then
|
||||
return 0
|
||||
@@ -362,16 +379,17 @@ test_backup_verification() {
|
||||
|
||||
# Test: Parallel processing framework
|
||||
test_parallel_processing() {
|
||||
local temp_dir=$(mktemp -d)
|
||||
local temp_dir
|
||||
temp_dir=$(mktemp -d)
|
||||
local -a pids=()
|
||||
local total_jobs=5
|
||||
local completed_jobs=0
|
||||
|
||||
# Simulate parallel jobs
|
||||
for i in $(seq 1 $total_jobs); do
|
||||
for i in $(seq 1 "$total_jobs"); do
|
||||
(
|
||||
# Simulate work
|
||||
sleep 0.$i
|
||||
sleep 0."$i"
|
||||
echo "$i" > "$temp_dir/job_$i.result"
|
||||
) &
|
||||
pids+=($!)
|
||||
@@ -385,7 +403,8 @@ test_parallel_processing() {
|
||||
done
|
||||
|
||||
# Verify all jobs completed
|
||||
local result_files=$(find "$temp_dir" -name "job_*.result" | wc -l)
|
||||
local result_files
|
||||
result_files=$(find "$temp_dir" -name "job_*.result" | wc -l)
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$temp_dir"
|
||||
@@ -410,7 +429,8 @@ test_database_integrity() {
|
||||
local db_file="$1"
|
||||
|
||||
# Use sqlite3 instead of Plex SQLite for testing
|
||||
local result=$(sqlite3 "$db_file" "PRAGMA integrity_check;" 2>/dev/null)
|
||||
local result
|
||||
result=$(sqlite3 "$db_file" "PRAGMA integrity_check;" 2>/dev/null)
|
||||
|
||||
if echo "$result" | grep -q "ok"; then
|
||||
return 0
|
||||
@@ -449,7 +469,8 @@ test_configuration_parsing() {
|
||||
}
|
||||
|
||||
# Test parsing
|
||||
local result=$(parse_args_test --auto-repair --webhook=http://example.com)
|
||||
local result
|
||||
result=$(parse_args_test --auto-repair --webhook=http://example.com)
|
||||
|
||||
if echo "$result" | grep -q "true true http://example.com"; then
|
||||
return 0
|
||||
@@ -523,19 +544,22 @@ run_integration_tests() {
|
||||
run_performance_tests() {
|
||||
log_info "Starting performance benchmarks"
|
||||
|
||||
local start_time=$(date +%s)
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
|
||||
# Test file operations
|
||||
local test_file="$TEST_DIR/perf_test.dat"
|
||||
dd if=/dev/zero of="$test_file" bs=1M count=10 2>/dev/null
|
||||
|
||||
# Benchmark checksum calculation
|
||||
local checksum_start=$(date +%s)
|
||||
local checksum_start
|
||||
checksum_start=$(date +%s)
|
||||
md5sum "$test_file" > /dev/null
|
||||
local checksum_time=$(($(date +%s) - checksum_start))
|
||||
|
||||
# Benchmark compression
|
||||
local compress_start=$(date +%s)
|
||||
local compress_start
|
||||
compress_start=$(date +%s)
|
||||
tar -czf "$TEST_DIR/perf_test.tar.gz" -C "$TEST_DIR" "perf_test.dat"
|
||||
local compress_time=$(($(date +%s) - compress_start))
|
||||
|
||||
@@ -547,7 +571,8 @@ run_performance_tests() {
|
||||
log_info " Total benchmark time: ${total_time}s"
|
||||
|
||||
# Record performance data
|
||||
local perf_entry=$(jq -n \
|
||||
local perf_entry
|
||||
perf_entry=$(jq -n \
|
||||
--arg checksum_time "$checksum_time" \
|
||||
--arg compress_time "$compress_time" \
|
||||
--arg total_time "$total_time" \
|
||||
@@ -565,7 +590,8 @@ run_performance_tests() {
|
||||
|
||||
# Generate comprehensive test report
|
||||
generate_test_report() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
|
||||
echo
|
||||
echo "=============================================="
|
||||
@@ -601,7 +627,8 @@ generate_test_report() {
|
||||
|
||||
# Save detailed results
|
||||
if [ -f "$TEST_RESULTS_FILE" ]; then
|
||||
local report_file="$TEST_DIR/test_report_$(date +%Y%m%d_%H%M%S).json"
|
||||
local report_file
|
||||
report_file="$TEST_DIR/test_report_$(date +%Y%m%d_%H%M%S).json"
|
||||
jq -n \
|
||||
--arg timestamp "$timestamp" \
|
||||
--arg tests_run "$TESTS_RUN" \
|
||||
@@ -645,22 +672,27 @@ run_integration_tests() {
|
||||
run_performance_tests() {
|
||||
log_info "Running performance benchmarks..."
|
||||
|
||||
local start_time=$(date +%s)
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
|
||||
# Create large test files
|
||||
local large_file="$TEST_DIR/large_test.db"
|
||||
dd if=/dev/zero of="$large_file" bs=1M count=100 2>/dev/null
|
||||
|
||||
# Benchmark checksum calculation
|
||||
local checksum_start=$(date +%s)
|
||||
local checksum_start
|
||||
checksum_start=$(date +%s)
|
||||
md5sum "$large_file" > /dev/null
|
||||
local checksum_end=$(date +%s)
|
||||
local checksum_end
|
||||
checksum_end=$(date +%s)
|
||||
local checksum_time=$((checksum_end - checksum_start))
|
||||
|
||||
# Benchmark compression
|
||||
local compress_start=$(date +%s)
|
||||
local compress_start
|
||||
compress_start=$(date +%s)
|
||||
tar -czf "$TEST_DIR/large_test.tar.gz" -C "$TEST_DIR" "large_test.db"
|
||||
local compress_end=$(date +%s)
|
||||
local compress_end
|
||||
compress_end=$(date +%s)
|
||||
local compress_time=$((compress_end - compress_start))
|
||||
|
||||
local total_time=$(($(date +%s) - start_time))
|
||||
|
||||
Reference in New Issue
Block a user