mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 00:00:13 -08:00
Refactor variable assignments and improve script readability in validate-plex-backups.sh and validate-plex-recovery.sh
- Changed inline variable assignments to separate declaration and assignment for clarity. - Updated condition checks and log messages for better readability and consistency. - Added a backup of validate-plex-recovery.sh for safety. - Introduced a new script run-docker-tests.sh for testing setup in Docker containers. - Enhanced ssh-login.sh to improve condition checks and logging functionality.
This commit is contained in:
@@ -75,30 +75,35 @@ declare -a FAILED_INTEGRATION_TESTS=()
|
||||
|
||||
# Logging functions
|
||||
log_test() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${CYAN}[INTEGRATION ${timestamp}]${NC} $1"
|
||||
}
|
||||
|
||||
log_pass() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${GREEN}[PASS ${timestamp}]${NC} $1"
|
||||
INTEGRATION_ASSERTIONS_PASSED=$((INTEGRATION_ASSERTIONS_PASSED + 1))
|
||||
}
|
||||
|
||||
log_fail() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${RED}[FAIL ${timestamp}]${NC} $1"
|
||||
INTEGRATION_ASSERTIONS_FAILED=$((INTEGRATION_ASSERTIONS_FAILED + 1))
|
||||
FAILED_INTEGRATION_TESTS+=("$1")
|
||||
}
|
||||
|
||||
log_info() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${BLUE}[INFO ${timestamp}]${NC} $1"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
local timestamp
|
||||
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||
echo -e "${YELLOW}[WARN ${timestamp}]${NC} $1"
|
||||
}
|
||||
|
||||
@@ -209,13 +214,16 @@ test_performance_monitoring() {
|
||||
echo "[]" > "$test_perf_log"
|
||||
|
||||
# Simulate performance tracking
|
||||
local start_time=$(date +%s)
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
sleep 1
|
||||
local end_time=$(date +%s)
|
||||
local end_time
|
||||
end_time=$(date +%s)
|
||||
local duration=$((end_time - start_time))
|
||||
|
||||
# Create performance entry
|
||||
local entry=$(jq -n \
|
||||
local entry
|
||||
entry=$(jq -n \
|
||||
--arg operation "integration_test" \
|
||||
--arg duration "$duration" \
|
||||
--arg timestamp "$(date -Iseconds)" \
|
||||
@@ -230,7 +238,8 @@ test_performance_monitoring() {
|
||||
mv "${test_perf_log}.tmp" "$test_perf_log"
|
||||
|
||||
# Verify entry was added
|
||||
local entry_count=$(jq length "$test_perf_log")
|
||||
local entry_count
|
||||
entry_count=$(jq length "$test_perf_log")
|
||||
if [ "$entry_count" -eq 1 ]; then
|
||||
log_pass "Performance monitoring integration works"
|
||||
else
|
||||
@@ -338,7 +347,8 @@ test_parallel_processing() {
|
||||
INTEGRATION_TEST_FUNCTIONS=$((INTEGRATION_TEST_FUNCTIONS + 1))
|
||||
log_test "Parallel Processing Capabilities"
|
||||
|
||||
local temp_dir=$(mktemp -d)
|
||||
local temp_dir
|
||||
temp_dir=$(mktemp -d)
|
||||
local -a pids=()
|
||||
local total_jobs=3
|
||||
local completed_jobs=0
|
||||
@@ -347,7 +357,7 @@ test_parallel_processing() {
|
||||
for i in $(seq 1 $total_jobs); do
|
||||
(
|
||||
# Simulate parallel work
|
||||
sleep 0.$i
|
||||
sleep "0.$i"
|
||||
echo "Job $i completed" > "$temp_dir/job_$i.result"
|
||||
) &
|
||||
pids+=($!)
|
||||
@@ -361,7 +371,8 @@ test_parallel_processing() {
|
||||
done
|
||||
|
||||
# Verify results
|
||||
local result_files=$(find "$temp_dir" -name "job_*.result" | wc -l)
|
||||
local result_files
|
||||
result_files=$(find "$temp_dir" -name "job_*.result" | wc -l)
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$temp_dir"
|
||||
@@ -386,15 +397,19 @@ test_checksum_caching() {
|
||||
echo "checksum test content" > "$test_file"
|
||||
|
||||
# First checksum calculation (should create cache)
|
||||
local checksum1=$(md5sum "$test_file" | cut -d' ' -f1)
|
||||
local checksum1
|
||||
checksum1=$(md5sum "$test_file" | cut -d' ' -f1)
|
||||
echo "$checksum1" > "$cache_file"
|
||||
|
||||
# Simulate cache check
|
||||
local file_mtime=$(stat -c %Y "$test_file")
|
||||
local cache_mtime=$(stat -c %Y "$cache_file")
|
||||
local file_mtime
|
||||
file_mtime=$(stat -c %Y "$test_file")
|
||||
local cache_mtime
|
||||
cache_mtime=$(stat -c %Y "$cache_file")
|
||||
|
||||
if [ "$cache_mtime" -ge "$file_mtime" ]; then
|
||||
local cached_checksum=$(cat "$cache_file")
|
||||
local cached_checksum
|
||||
cached_checksum=$(cat "$cache_file")
|
||||
if [ "$cached_checksum" = "$checksum1" ]; then
|
||||
log_pass "Checksum caching system works correctly"
|
||||
else
|
||||
|
||||
Reference in New Issue
Block a user