mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 01:10:12 -08:00
Refactor variable assignments and improve script readability in validate-plex-backups.sh and validate-plex-recovery.sh
- Changed inline variable assignments to separate declaration and assignment for clarity. - Updated condition checks and log messages for better readability and consistency. - Added a backup of validate-plex-recovery.sh for safety. - Introduced a new script run-docker-tests.sh for testing setup in Docker containers. - Enhanced ssh-login.sh to improve condition checks and logging functionality.
This commit is contained in:
@@ -110,7 +110,8 @@ log_info() {
|
||||
|
||||
# Log synchronization functions
|
||||
sync_logs_to_shared() {
|
||||
local sync_start_time=$(date +%s)
|
||||
local sync_start_time
|
||||
sync_start_time=$(date +%s)
|
||||
log_info "Starting log synchronization to shared location"
|
||||
|
||||
# Ensure shared log directory exists
|
||||
@@ -131,7 +132,8 @@ sync_logs_to_shared() {
|
||||
|
||||
for log_file in "$LOCAL_LOG_ROOT"/*.log; do
|
||||
if [ -f "$log_file" ]; then
|
||||
local filename=$(basename "$log_file")
|
||||
local filename
|
||||
filename=$(basename "$log_file")
|
||||
local shared_file="$SHARED_LOG_ROOT/$filename"
|
||||
|
||||
# Only copy if file doesn't exist in shared location or local is newer
|
||||
@@ -147,7 +149,8 @@ sync_logs_to_shared() {
|
||||
fi
|
||||
done
|
||||
|
||||
local sync_end_time=$(date +%s)
|
||||
local sync_end_time
|
||||
sync_end_time=$(date +%s)
|
||||
local sync_duration=$((sync_end_time - sync_start_time))
|
||||
|
||||
if [ $error_count -eq 0 ]; then
|
||||
@@ -161,7 +164,8 @@ sync_logs_to_shared() {
|
||||
|
||||
# Cleanup old local logs (30 day retention)
|
||||
cleanup_old_local_logs() {
|
||||
local cleanup_start_time=$(date +%s)
|
||||
local cleanup_start_time
|
||||
cleanup_start_time=$(date +%s)
|
||||
log_info "Starting cleanup of old local logs (30+ days)"
|
||||
|
||||
if [ ! -d "$LOCAL_LOG_ROOT" ]; then
|
||||
@@ -174,7 +178,8 @@ cleanup_old_local_logs() {
|
||||
|
||||
# Find and remove log files older than 30 days
|
||||
while IFS= read -r -d '' old_file; do
|
||||
local filename=$(basename "$old_file")
|
||||
local filename
|
||||
filename=$(basename "$old_file")
|
||||
if rm "$old_file" 2>/dev/null; then
|
||||
((cleanup_count++))
|
||||
log_info "Removed old log: $filename"
|
||||
@@ -184,7 +189,8 @@ cleanup_old_local_logs() {
|
||||
fi
|
||||
done < <(find "$LOCAL_LOG_ROOT" -name "*.log" -mtime +30 -print0 2>/dev/null)
|
||||
|
||||
local cleanup_end_time=$(date +%s)
|
||||
local cleanup_end_time
|
||||
cleanup_end_time=$(date +%s)
|
||||
local cleanup_duration=$((cleanup_end_time - cleanup_start_time))
|
||||
|
||||
if [ $cleanup_count -gt 0 ]; then
|
||||
@@ -235,7 +241,8 @@ validate_backup_structure() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
local backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | wc -l)
|
||||
local backup_count
|
||||
backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | wc -l)
|
||||
log_info "Found $backup_count backup files"
|
||||
|
||||
if [ "$backup_count" -eq 0 ]; then
|
||||
@@ -249,7 +256,8 @@ validate_backup_structure() {
|
||||
# Validate individual backup
|
||||
validate_backup() {
|
||||
local backup_file="$1"
|
||||
local backup_name=$(basename "$backup_file")
|
||||
local backup_name
|
||||
backup_name=$(basename "$backup_file")
|
||||
local errors=0
|
||||
|
||||
log_info "Validating backup: $backup_name"
|
||||
@@ -268,7 +276,8 @@ validate_backup() {
|
||||
log_success "Archive integrity check passed: $backup_name"
|
||||
|
||||
# Check for expected files in archive
|
||||
local archive_contents=$(tar -tzf "$backup_file" 2>/dev/null)
|
||||
local archive_contents
|
||||
archive_contents=$(tar -tzf "$backup_file" 2>/dev/null)
|
||||
|
||||
# Check if this is a legacy backup with dated subdirectory
|
||||
local has_dated_subdir=false
|
||||
@@ -300,8 +309,11 @@ validate_backup() {
|
||||
log_success " Found: $file"
|
||||
else
|
||||
# Check if this is an optional file that might not exist in older backups
|
||||
local backup_name=$(basename "$backup_file")
|
||||
local backup_datetime=$(echo "$backup_name" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
|
||||
local backup_name
|
||||
backup_name=$(basename "$backup_file")
|
||||
local backup_datetime
|
||||
backup_datetime=${backup_name#plex-backup-}
|
||||
backup_datetime=${backup_datetime%.tar.gz}
|
||||
|
||||
if [[ -n "${OPTIONAL_FILES[$file]}" ]] && [[ "$backup_datetime" < "${OPTIONAL_FILES[$file]}" ]]; then
|
||||
log_warning " Missing file (expected for backup date): $file"
|
||||
@@ -327,7 +339,7 @@ validate_backup() {
|
||||
# Legacy format: extract filename from dated subdirectory
|
||||
filename="${BASH_REMATCH[1]}"
|
||||
# Remove timestamp suffix if present
|
||||
filename=$(echo "$filename" | sed 's/\.[0-9]\{8\}_[0-9]\{6\}$//')
|
||||
filename=${filename%.*_*}
|
||||
elif [[ "$line" =~ ^\./(.+)$ ]]; then
|
||||
# New format: extract filename from ./ prefix
|
||||
filename="${BASH_REMATCH[1]}"
|
||||
@@ -365,16 +377,20 @@ validate_backup() {
|
||||
check_backup_freshness() {
|
||||
log_info "Checking backup freshness..."
|
||||
|
||||
local latest_backup=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | sort | tail -1)
|
||||
local latest_backup
|
||||
latest_backup=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | sort | tail -1)
|
||||
|
||||
if [ -z "$latest_backup" ]; then
|
||||
log_error "No backups found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local backup_filename=$(basename "$latest_backup")
|
||||
local backup_filename
|
||||
backup_filename=$(basename "$latest_backup")
|
||||
# Extract date from filename: plex-backup-YYYYMMDD_HHMMSS.tar.gz
|
||||
local backup_datetime=$(echo "$backup_filename" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
|
||||
local backup_datetime
|
||||
backup_datetime=${backup_filename#plex-backup-}
|
||||
backup_datetime=${backup_datetime%.tar.gz}
|
||||
|
||||
# Validate that we extracted a valid datetime
|
||||
if [[ ! "$backup_datetime" =~ ^[0-9]{8}_[0-9]{6}$ ]]; then
|
||||
@@ -390,7 +406,8 @@ check_backup_freshness() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
local current_timestamp=$(date +%s)
|
||||
local current_timestamp
|
||||
current_timestamp=$(date +%s)
|
||||
local age_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
|
||||
|
||||
log_info "Latest backup: $backup_datetime ($age_days days old)"
|
||||
@@ -427,7 +444,8 @@ check_backup_sizes() {
|
||||
while IFS= read -r backup_file; do
|
||||
if [ -f "$backup_file" ] && [ -r "$backup_file" ]; then
|
||||
backup_files+=("$backup_file")
|
||||
local size=$(stat -f%z "$backup_file" 2>/dev/null || stat -c%s "$backup_file" 2>/dev/null || echo "0")
|
||||
local size
|
||||
size=$(stat -f%z "$backup_file" 2>/dev/null || stat -c%s "$backup_file" 2>/dev/null || echo "0")
|
||||
backup_sizes+=("$size")
|
||||
total_size=$((total_size + size))
|
||||
fi
|
||||
@@ -440,8 +458,10 @@ check_backup_sizes() {
|
||||
|
||||
# Calculate average size
|
||||
local avg_size=$((total_size / ${#backup_files[@]}))
|
||||
local human_total=$(numfmt --to=iec "$total_size" 2>/dev/null || echo "${total_size} bytes")
|
||||
local human_avg=$(numfmt --to=iec "$avg_size" 2>/dev/null || echo "${avg_size} bytes")
|
||||
local human_total
|
||||
human_total=$(numfmt --to=iec "$total_size" 2>/dev/null || echo "${total_size} bytes")
|
||||
local human_avg
|
||||
human_avg=$(numfmt --to=iec "$avg_size" 2>/dev/null || echo "${avg_size} bytes")
|
||||
|
||||
log_info "Total backup size: $human_total"
|
||||
log_info "Average backup size: $human_avg"
|
||||
@@ -453,13 +473,17 @@ check_backup_sizes() {
|
||||
for i in "${!backup_files[@]}"; do
|
||||
local file="${backup_files[$i]}"
|
||||
local size="${backup_sizes[$i]}"
|
||||
local filename=$(basename "$file")
|
||||
local filename
|
||||
filename=$(basename "$file")
|
||||
|
||||
if [ "$size" -lt "$min_size" ] && [ "$size" -gt 0 ]; then
|
||||
local human_size=$(numfmt --to=iec "$size" 2>/dev/null || echo "${size} bytes")
|
||||
local human_size
|
||||
human_size=$(numfmt --to=iec "$size" 2>/dev/null || echo "${size} bytes")
|
||||
|
||||
# Extract backup datetime to check if it's a pre-blobs backup
|
||||
local backup_datetime=$(echo "$filename" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
|
||||
local backup_datetime
|
||||
backup_datetime=${filename#plex-backup-}
|
||||
backup_datetime=${backup_datetime%.tar.gz}
|
||||
if [[ "$backup_datetime" =~ ^[0-9]{8}_[0-9]{6}$ ]] && [[ "$backup_datetime" < "20250526_144500" ]]; then
|
||||
log_info "Small backup (pre-blobs DB): $filename ($human_size)"
|
||||
log_info " This backup predates the blobs database introduction, size is expected"
|
||||
@@ -484,9 +508,12 @@ check_backup_sizes() {
|
||||
check_disk_space() {
|
||||
log_info "Checking disk space..."
|
||||
|
||||
local backup_disk_usage=$(du -sh "$BACKUP_ROOT" | cut -f1)
|
||||
local available_space=$(df -h "$BACKUP_ROOT" | awk 'NR==2 {print $4}')
|
||||
local used_percentage=$(df "$BACKUP_ROOT" | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
local backup_disk_usage
|
||||
backup_disk_usage=$(du -sh "$BACKUP_ROOT" | cut -f1)
|
||||
local available_space
|
||||
available_space=$(df -h "$BACKUP_ROOT" | awk 'NR==2 {print $4}')
|
||||
local used_percentage
|
||||
used_percentage=$(df "$BACKUP_ROOT" | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
|
||||
log_info "Backup disk usage: $backup_disk_usage"
|
||||
log_info "Available space: $available_space"
|
||||
@@ -513,10 +540,12 @@ generate_report() {
|
||||
local total_errors=0
|
||||
|
||||
# Header
|
||||
echo "==================================" >> "$REPORT_FILE"
|
||||
echo "Plex Backup Validation Report" >> "$REPORT_FILE"
|
||||
echo "Generated: $(date)" >> "$REPORT_FILE"
|
||||
echo "==================================" >> "$REPORT_FILE"
|
||||
{
|
||||
echo "=================================="
|
||||
echo "Plex Backup Validation Report"
|
||||
echo "Generated: $(date)"
|
||||
echo "=================================="
|
||||
} >> "$REPORT_FILE"
|
||||
|
||||
# Use process substitution to avoid subshell variable scope issues
|
||||
while IFS= read -r backup_file; do
|
||||
@@ -532,11 +561,13 @@ generate_report() {
|
||||
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort)
|
||||
|
||||
# Summary
|
||||
echo >> "$REPORT_FILE"
|
||||
echo "Summary:" >> "$REPORT_FILE"
|
||||
echo " Total backups: $total_backups" >> "$REPORT_FILE"
|
||||
echo " Valid backups: $valid_backups" >> "$REPORT_FILE"
|
||||
echo " Total errors: $total_errors" >> "$REPORT_FILE"
|
||||
{
|
||||
echo
|
||||
echo "Summary:"
|
||||
echo " Total backups: $total_backups"
|
||||
echo " Valid backups: $valid_backups"
|
||||
echo " Total errors: $total_errors"
|
||||
} >> "$REPORT_FILE"
|
||||
|
||||
log_success "Report generated: $REPORT_FILE"
|
||||
}
|
||||
@@ -546,7 +577,8 @@ fix_issues() {
|
||||
log_info "Attempting to fix common issues..."
|
||||
|
||||
# Create corrupted backups directory
|
||||
local corrupted_dir="$(dirname "$REPORT_FILE")/corrupted-backups"
|
||||
local corrupted_dir
|
||||
corrupted_dir="$(dirname "$REPORT_FILE")/corrupted-backups"
|
||||
mkdir -p "$corrupted_dir"
|
||||
|
||||
# Check for and move corrupted backup files using process substitution
|
||||
@@ -554,7 +586,8 @@ fix_issues() {
|
||||
while IFS= read -r backup_file; do
|
||||
if ! tar -tzf "$backup_file" >/dev/null 2>&1; then
|
||||
log_warning "Found corrupted backup: $(basename "$backup_file")"
|
||||
local backup_name=$(basename "$backup_file")
|
||||
local backup_name
|
||||
backup_name=$(basename "$backup_file")
|
||||
local corrupted_backup="$corrupted_dir/$backup_name"
|
||||
|
||||
if mv "$backup_file" "$corrupted_backup"; then
|
||||
@@ -650,9 +683,9 @@ main() {
|
||||
|
||||
# Check backup freshness
|
||||
if ! check_backup_freshness; then
|
||||
local freshness_result=$?
|
||||
# Check if this is a "no backups found" error vs "old backup" warning
|
||||
local backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | wc -l)
|
||||
local backup_count
|
||||
backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | wc -l)
|
||||
if [ "$backup_count" -eq 0 ]; then
|
||||
# No backups found - critical error
|
||||
critical_errors=$((critical_errors + 1))
|
||||
|
||||
Reference in New Issue
Block a user