mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 00:00:13 -08:00
feat: Enhance Plex backup validation script with optional file handling and dependency checks
This commit is contained in:
@@ -24,6 +24,12 @@ EXPECTED_FILES=(
|
|||||||
"Preferences.xml"
|
"Preferences.xml"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Files that may not exist in older backups (with approximate introduction dates)
|
||||||
|
# This helps provide better context for missing files
|
||||||
|
declare -A OPTIONAL_FILES=(
|
||||||
|
["com.plexapp.plugins.library.blobs.db"]="20250526_144500" # Blobs DB introduced between 07:55 and 14:45 on 5/26
|
||||||
|
)
|
||||||
|
|
||||||
log_message() {
|
log_message() {
|
||||||
local message="$1"
|
local message="$1"
|
||||||
local clean_message="$2"
|
local clean_message="$2"
|
||||||
@@ -56,6 +62,40 @@ log_info() {
|
|||||||
log_message "${BLUE}INFO: $1${NC}" "INFO: $1"
|
log_message "${BLUE}INFO: $1${NC}" "INFO: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
check_dependencies() {
|
||||||
|
local missing_deps=()
|
||||||
|
|
||||||
|
# Check for required commands
|
||||||
|
if ! command -v tar >/dev/null 2>&1; then
|
||||||
|
missing_deps+=("tar")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! command -v jq >/dev/null 2>&1; then
|
||||||
|
missing_deps+=("jq")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! command -v find >/dev/null 2>&1; then
|
||||||
|
missing_deps+=("find")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! command -v df >/dev/null 2>&1; then
|
||||||
|
missing_deps+=("df")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! command -v du >/dev/null 2>&1; then
|
||||||
|
missing_deps+=("du")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ${#missing_deps[@]} -gt 0 ]; then
|
||||||
|
log_error "Missing required dependencies: ${missing_deps[*]}"
|
||||||
|
log_info "Please install missing dependencies before running this script"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
# Check backup directory structure
|
# Check backup directory structure
|
||||||
validate_backup_structure() {
|
validate_backup_structure() {
|
||||||
log_info "Validating backup directory structure..."
|
log_info "Validating backup directory structure..."
|
||||||
@@ -100,21 +140,92 @@ validate_backup() {
|
|||||||
# Check for expected files in archive
|
# Check for expected files in archive
|
||||||
local archive_contents=$(tar -tzf "$backup_file" 2>/dev/null)
|
local archive_contents=$(tar -tzf "$backup_file" 2>/dev/null)
|
||||||
|
|
||||||
|
# Check if this is a legacy backup with dated subdirectory
|
||||||
|
local has_dated_subdir=false
|
||||||
|
if echo "$archive_contents" | grep -q "^\./[0-9]\{8\}/" || echo "$archive_contents" | grep -q "^[0-9]\{8\}/"; then
|
||||||
|
has_dated_subdir=true
|
||||||
|
log_info " Detected legacy backup format with dated subdirectory"
|
||||||
|
fi
|
||||||
|
|
||||||
for file in "${EXPECTED_FILES[@]}"; do
|
for file in "${EXPECTED_FILES[@]}"; do
|
||||||
if echo "$archive_contents" | grep -q "^$file$"; then
|
local file_found=false
|
||||||
|
|
||||||
|
if [ "$has_dated_subdir" = true ]; then
|
||||||
|
# For legacy backups, look for files in dated subdirectory (with or without timestamps)
|
||||||
|
if echo "$archive_contents" | grep -q "^\./[0-9]\{8\}/$file" || \
|
||||||
|
echo "$archive_contents" | grep -q "^\./[0-9]\{8\}/$file\.[0-9]\{8\}_[0-9]\{6\}$" || \
|
||||||
|
echo "$archive_contents" | grep -q "^[0-9]\{8\}/$file" || \
|
||||||
|
echo "$archive_contents" | grep -q "^[0-9]\{8\}/$file\.[0-9]\{8\}_[0-9]\{6\}$"; then
|
||||||
|
file_found=true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# For new backups, look for files at root level
|
||||||
|
if echo "$archive_contents" | grep -q "^\./$file$" || \
|
||||||
|
echo "$archive_contents" | grep -q "^$file$"; then
|
||||||
|
file_found=true
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$file_found" = true ]; then
|
||||||
log_success " Found: $file"
|
log_success " Found: $file"
|
||||||
else
|
else
|
||||||
log_error " Missing file: $file"
|
# Check if this is an optional file that might not exist in older backups
|
||||||
errors=$((errors + 1))
|
local backup_name=$(basename "$backup_file")
|
||||||
|
local backup_datetime=$(echo "$backup_name" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
|
||||||
|
|
||||||
|
if [[ -n "${OPTIONAL_FILES[$file]}" ]] && [[ "$backup_datetime" < "${OPTIONAL_FILES[$file]}" ]]; then
|
||||||
|
log_warning " Missing file (expected for backup date): $file"
|
||||||
|
log_info " Note: $file was introduced around ${OPTIONAL_FILES[$file]}, this backup is from $backup_datetime"
|
||||||
|
else
|
||||||
|
log_error " Missing file: $file"
|
||||||
|
errors=$((errors + 1))
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Check for unexpected files
|
# Check for unexpected files (more lenient for legacy backups)
|
||||||
echo "$archive_contents" | while IFS= read -r line; do
|
local unexpected_files=()
|
||||||
if [[ ! " ${EXPECTED_FILES[@]} " =~ " ${line} " ]]; then
|
while IFS= read -r line; do
|
||||||
log_warning " Unexpected file: $line"
|
# Skip directory entries and current directory marker
|
||||||
|
if [[ "$line" == "./" ]] || [[ "$line" == */ ]] || [[ -z "$line" ]]; then
|
||||||
|
continue
|
||||||
fi
|
fi
|
||||||
done
|
|
||||||
|
# Extract filename from path (handle both legacy and new formats)
|
||||||
|
local filename=""
|
||||||
|
if [[ "$line" =~ ^\./[0-9]{8}/(.+)$ ]] || [[ "$line" =~ ^[0-9]{8}/(.+)$ ]]; then
|
||||||
|
# Legacy format: extract filename from dated subdirectory
|
||||||
|
filename="${BASH_REMATCH[1]}"
|
||||||
|
# Remove timestamp suffix if present
|
||||||
|
filename=$(echo "$filename" | sed 's/\.[0-9]\{8\}_[0-9]\{6\}$//')
|
||||||
|
elif [[ "$line" =~ ^\./(.+)$ ]]; then
|
||||||
|
# New format: extract filename from ./ prefix
|
||||||
|
filename="${BASH_REMATCH[1]}"
|
||||||
|
else
|
||||||
|
# Direct filename
|
||||||
|
filename="$line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if this is an expected file
|
||||||
|
local is_expected=false
|
||||||
|
for expected_file in "${EXPECTED_FILES[@]}"; do
|
||||||
|
if [[ "$filename" == "$expected_file" ]]; then
|
||||||
|
is_expected=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$is_expected" = false ]; then
|
||||||
|
unexpected_files+=("$line")
|
||||||
|
fi
|
||||||
|
done <<< "$archive_contents"
|
||||||
|
|
||||||
|
# Report unexpected files if any found
|
||||||
|
if [ ${#unexpected_files[@]} -gt 0 ]; then
|
||||||
|
for unexpected_file in "${unexpected_files[@]}"; do
|
||||||
|
log_warning " Unexpected file: $unexpected_file"
|
||||||
|
done
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
return $errors
|
return $errors
|
||||||
@@ -124,7 +235,7 @@ validate_backup() {
|
|||||||
check_backup_freshness() {
|
check_backup_freshness() {
|
||||||
log_info "Checking backup freshness..."
|
log_info "Checking backup freshness..."
|
||||||
|
|
||||||
local latest_backup=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort | tail -1)
|
local latest_backup=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | sort | tail -1)
|
||||||
|
|
||||||
if [ -z "$latest_backup" ]; then
|
if [ -z "$latest_backup" ]; then
|
||||||
log_error "No backups found"
|
log_error "No backups found"
|
||||||
@@ -133,12 +244,26 @@ check_backup_freshness() {
|
|||||||
|
|
||||||
local backup_filename=$(basename "$latest_backup")
|
local backup_filename=$(basename "$latest_backup")
|
||||||
# Extract date from filename: plex-backup-YYYYMMDD_HHMMSS.tar.gz
|
# Extract date from filename: plex-backup-YYYYMMDD_HHMMSS.tar.gz
|
||||||
local backup_date=$(echo "$backup_filename" | sed 's/plex-backup-//' | sed 's/_.*$//')
|
local backup_datetime=$(echo "$backup_filename" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
|
||||||
local backup_timestamp=$(date -d "${backup_date:0:4}-${backup_date:4:2}-${backup_date:6:2}" +%s)
|
|
||||||
|
# Validate that we extracted a valid datetime
|
||||||
|
if [[ ! "$backup_datetime" =~ ^[0-9]{8}_[0-9]{6}$ ]]; then
|
||||||
|
log_error "Could not parse backup date from filename: $backup_filename"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local backup_date="${backup_datetime%_*}" # Remove time part
|
||||||
|
|
||||||
|
# Validate date format and convert to timestamp
|
||||||
|
if ! backup_timestamp=$(date -d "${backup_date:0:4}-${backup_date:4:2}-${backup_date:6:2}" +%s 2>/dev/null); then
|
||||||
|
log_error "Invalid backup date format: $backup_date"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
local current_timestamp=$(date +%s)
|
local current_timestamp=$(date +%s)
|
||||||
local age_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
|
local age_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
|
||||||
|
|
||||||
log_info "Latest backup: $backup_date ($age_days days old)"
|
log_info "Latest backup: $backup_datetime ($age_days days old)"
|
||||||
|
|
||||||
if [ "$age_days" -gt 7 ]; then
|
if [ "$age_days" -gt 7 ]; then
|
||||||
log_warning "Latest backup is older than 7 days"
|
log_warning "Latest backup is older than 7 days"
|
||||||
@@ -161,13 +286,91 @@ validate_json_log() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! jq empty "$JSON_LOG_FILE" 2>/dev/null; then
|
# Check if jq is available (should be caught by dependency check)
|
||||||
log_error "JSON log file is invalid"
|
if ! command -v jq >/dev/null 2>&1; then
|
||||||
|
log_error "jq command not found - cannot validate JSON"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate JSON syntax
|
||||||
|
local jq_output
|
||||||
|
if ! jq_output=$(jq empty "$JSON_LOG_FILE" 2>&1); then
|
||||||
|
log_error "JSON log file is invalid: $jq_output"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get entry count safely
|
||||||
|
local entry_count
|
||||||
|
if ! entry_count=$(jq 'length' "$JSON_LOG_FILE" 2>/dev/null); then
|
||||||
|
log_error "Could not count entries in JSON log file"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local entry_count=$(jq 'length' "$JSON_LOG_FILE")
|
|
||||||
log_success "JSON log file is valid ($entry_count entries)"
|
log_success "JSON log file is valid ($entry_count entries)"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check backup file sizes for anomalies
|
||||||
|
check_backup_sizes() {
|
||||||
|
log_info "Checking backup file sizes..."
|
||||||
|
|
||||||
|
local backup_files=()
|
||||||
|
local backup_sizes=()
|
||||||
|
local total_size=0
|
||||||
|
|
||||||
|
# Collect backup files and their sizes
|
||||||
|
while IFS= read -r backup_file; do
|
||||||
|
if [ -f "$backup_file" ] && [ -r "$backup_file" ]; then
|
||||||
|
backup_files+=("$backup_file")
|
||||||
|
local size=$(stat -f%z "$backup_file" 2>/dev/null || stat -c%s "$backup_file" 2>/dev/null || echo "0")
|
||||||
|
backup_sizes+=("$size")
|
||||||
|
total_size=$((total_size + size))
|
||||||
|
fi
|
||||||
|
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | sort)
|
||||||
|
|
||||||
|
if [ ${#backup_files[@]} -eq 0 ]; then
|
||||||
|
log_warning "No backup files found for size analysis"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Calculate average size
|
||||||
|
local avg_size=$((total_size / ${#backup_files[@]}))
|
||||||
|
local human_total=$(numfmt --to=iec "$total_size" 2>/dev/null || echo "${total_size} bytes")
|
||||||
|
local human_avg=$(numfmt --to=iec "$avg_size" 2>/dev/null || echo "${avg_size} bytes")
|
||||||
|
|
||||||
|
log_info "Total backup size: $human_total"
|
||||||
|
log_info "Average backup size: $human_avg"
|
||||||
|
|
||||||
|
# Check for suspiciously small backups (less than 50% of average)
|
||||||
|
local min_size=$((avg_size / 2))
|
||||||
|
local suspicious_count=0
|
||||||
|
|
||||||
|
for i in "${!backup_files[@]}"; do
|
||||||
|
local file="${backup_files[$i]}"
|
||||||
|
local size="${backup_sizes[$i]}"
|
||||||
|
local filename=$(basename "$file")
|
||||||
|
|
||||||
|
if [ "$size" -lt "$min_size" ] && [ "$size" -gt 0 ]; then
|
||||||
|
local human_size=$(numfmt --to=iec "$size" 2>/dev/null || echo "${size} bytes")
|
||||||
|
|
||||||
|
# Extract backup datetime to check if it's a pre-blobs backup
|
||||||
|
local backup_datetime=$(echo "$filename" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
|
||||||
|
if [[ "$backup_datetime" =~ ^[0-9]{8}_[0-9]{6}$ ]] && [[ "$backup_datetime" < "20250526_144500" ]]; then
|
||||||
|
log_info "Small backup (pre-blobs DB): $filename ($human_size)"
|
||||||
|
log_info " This backup predates the blobs database introduction, size is expected"
|
||||||
|
else
|
||||||
|
log_warning "Suspiciously small backup: $filename ($human_size)"
|
||||||
|
suspicious_count=$((suspicious_count + 1))
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$suspicious_count" -gt 0 ]; then
|
||||||
|
log_warning "Found $suspicious_count backup(s) that may be incomplete"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
log_success "All backup sizes appear normal"
|
||||||
|
fi
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
@@ -210,8 +413,8 @@ generate_report() {
|
|||||||
echo "Generated: $(date)" >> "$REPORT_FILE"
|
echo "Generated: $(date)" >> "$REPORT_FILE"
|
||||||
echo "==================================" >> "$REPORT_FILE"
|
echo "==================================" >> "$REPORT_FILE"
|
||||||
|
|
||||||
# Validate each backup
|
# Use process substitution to avoid subshell variable scope issues
|
||||||
find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort | while read backup_file; do
|
while IFS= read -r backup_file; do
|
||||||
total_backups=$((total_backups + 1))
|
total_backups=$((total_backups + 1))
|
||||||
validate_backup "$backup_file"
|
validate_backup "$backup_file"
|
||||||
local backup_errors=$?
|
local backup_errors=$?
|
||||||
@@ -221,7 +424,7 @@ generate_report() {
|
|||||||
else
|
else
|
||||||
total_errors=$((total_errors + backup_errors))
|
total_errors=$((total_errors + backup_errors))
|
||||||
fi
|
fi
|
||||||
done
|
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort)
|
||||||
|
|
||||||
# Summary
|
# Summary
|
||||||
echo >> "$REPORT_FILE"
|
echo >> "$REPORT_FILE"
|
||||||
@@ -245,13 +448,38 @@ fix_issues() {
|
|||||||
log_success "JSON log file created/fixed"
|
log_success "JSON log file created/fixed"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Create corrupted backups directory
|
||||||
|
local corrupted_dir="$(dirname "$REPORT_FILE")/corrupted-backups"
|
||||||
|
mkdir -p "$corrupted_dir"
|
||||||
|
|
||||||
|
# Check for and move corrupted backup files using process substitution
|
||||||
|
local corrupted_count=0
|
||||||
|
while IFS= read -r backup_file; do
|
||||||
|
if ! tar -tzf "$backup_file" >/dev/null 2>&1; then
|
||||||
|
log_warning "Found corrupted backup: $(basename "$backup_file")"
|
||||||
|
local backup_name=$(basename "$backup_file")
|
||||||
|
local corrupted_backup="$corrupted_dir/$backup_name"
|
||||||
|
|
||||||
|
if mv "$backup_file" "$corrupted_backup"; then
|
||||||
|
log_success "Moved corrupted backup to: $corrupted_backup"
|
||||||
|
corrupted_count=$((corrupted_count + 1))
|
||||||
|
else
|
||||||
|
log_error "Failed to move corrupted backup: $backup_name"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null || true)
|
||||||
|
|
||||||
|
if [ "$corrupted_count" -gt 0 ]; then
|
||||||
|
log_info "Moved $corrupted_count corrupted backup(s) to $corrupted_dir"
|
||||||
|
fi
|
||||||
|
|
||||||
# Clean up any remaining dated directories from old backup structure
|
# Clean up any remaining dated directories from old backup structure
|
||||||
find "$BACKUP_ROOT" -maxdepth 1 -type d -name "????????" -exec rm -rf {} \; 2>/dev/null || true
|
find "$BACKUP_ROOT" -maxdepth 1 -type d -name "????????" -exec rm -rf {} \; 2>/dev/null || true
|
||||||
|
|
||||||
# Fix permissions if needed
|
# Fix permissions if needed
|
||||||
if [ -d "$BACKUP_ROOT" ]; then
|
if [ -d "$BACKUP_ROOT" ]; then
|
||||||
chmod 755 "$BACKUP_ROOT"
|
chmod 755 "$BACKUP_ROOT" 2>/dev/null || log_warning "Could not fix backup root permissions"
|
||||||
find "$BACKUP_ROOT" -type f -name "plex-backup-*.tar.gz" -exec chmod 644 {} \; 2>/dev/null || true
|
find "$BACKUP_ROOT" -type f -name "plex-backup-*.tar.gz" -exec chmod 644 {} \; 2>/dev/null || log_warning "Could not fix backup file permissions"
|
||||||
log_success "Fixed backup permissions"
|
log_success "Fixed backup permissions"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -260,6 +488,7 @@ fix_issues() {
|
|||||||
main() {
|
main() {
|
||||||
local fix_mode=false
|
local fix_mode=false
|
||||||
local report_mode=false
|
local report_mode=false
|
||||||
|
local verbose_mode=false
|
||||||
|
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
while [[ $# -gt 0 ]]; do
|
while [[ $# -gt 0 ]]; do
|
||||||
@@ -272,10 +501,27 @@ main() {
|
|||||||
report_mode=true
|
report_mode=true
|
||||||
shift
|
shift
|
||||||
;;
|
;;
|
||||||
|
--verbose|-v)
|
||||||
|
verbose_mode=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
echo "Usage: $0 [OPTIONS]"
|
||||||
|
echo "Validate Plex backup files and system health"
|
||||||
|
echo ""
|
||||||
|
echo "OPTIONS:"
|
||||||
|
echo " --fix Attempt to fix common issues"
|
||||||
|
echo " --report Generate detailed backup report"
|
||||||
|
echo " --verbose Enable verbose output"
|
||||||
|
echo " --help Show this help message"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "Usage: $0 [--fix] [--report]"
|
echo "Usage: $0 [--fix] [--report] [--verbose]"
|
||||||
echo " --fix Attempt to fix common issues"
|
echo " --fix Attempt to fix common issues"
|
||||||
echo " --report Generate detailed backup report"
|
echo " --report Generate detailed backup report"
|
||||||
|
echo " --verbose Enable verbose output"
|
||||||
|
echo "Use --help for more information"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@@ -283,10 +529,17 @@ main() {
|
|||||||
|
|
||||||
log_info "Starting Plex backup validation..."
|
log_info "Starting Plex backup validation..."
|
||||||
|
|
||||||
|
# Check dependencies first
|
||||||
|
if ! check_dependencies; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Create logs directory if needed
|
# Create logs directory if needed
|
||||||
mkdir -p "$(dirname "$REPORT_FILE")"
|
mkdir -p "$(dirname "$REPORT_FILE")"
|
||||||
|
|
||||||
local overall_status=0
|
local overall_status=0
|
||||||
|
local critical_errors=0
|
||||||
|
local warnings=0
|
||||||
|
|
||||||
# Fix issues if requested
|
# Fix issues if requested
|
||||||
if [ "$fix_mode" = true ]; then
|
if [ "$fix_mode" = true ]; then
|
||||||
@@ -295,22 +548,38 @@ main() {
|
|||||||
|
|
||||||
# Validate backup structure
|
# Validate backup structure
|
||||||
if ! validate_backup_structure; then
|
if ! validate_backup_structure; then
|
||||||
overall_status=1
|
critical_errors=$((critical_errors + 1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check backup freshness
|
# Check backup freshness
|
||||||
if ! check_backup_freshness; then
|
if ! check_backup_freshness; then
|
||||||
overall_status=1
|
local freshness_result=$?
|
||||||
|
# Check if this is a "no backups found" error vs "old backup" warning
|
||||||
|
local backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | wc -l)
|
||||||
|
if [ "$backup_count" -eq 0 ]; then
|
||||||
|
# No backups found - critical error
|
||||||
|
critical_errors=$((critical_errors + 1))
|
||||||
|
else
|
||||||
|
# Old backup - warning only
|
||||||
|
warnings=$((warnings + 1))
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Validate JSON log
|
# Validate JSON log
|
||||||
if ! validate_json_log; then
|
if ! validate_json_log; then
|
||||||
overall_status=1
|
critical_errors=$((critical_errors + 1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check disk space
|
# Check disk space
|
||||||
if ! check_disk_space; then
|
if ! check_disk_space; then
|
||||||
overall_status=1
|
warnings=$((warnings + 1))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check backup file sizes
|
||||||
|
if [ "$verbose_mode" = true ] || [ "$report_mode" = true ]; then
|
||||||
|
if ! check_backup_sizes; then
|
||||||
|
warnings=$((warnings + 1))
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Generate detailed report if requested
|
# Generate detailed report if requested
|
||||||
@@ -320,10 +589,16 @@ main() {
|
|||||||
|
|
||||||
# Final summary
|
# Final summary
|
||||||
echo
|
echo
|
||||||
if [ "$overall_status" -eq 0 ]; then
|
if [ "$critical_errors" -eq 0 ] && [ "$warnings" -eq 0 ]; then
|
||||||
log_success "All validation checks passed"
|
log_success "All validation checks passed"
|
||||||
|
overall_status=0
|
||||||
|
elif [ "$critical_errors" -eq 0 ]; then
|
||||||
|
log_warning "Validation completed with $warnings warning(s) but no critical errors"
|
||||||
|
log_info "System appears healthy despite warnings"
|
||||||
|
overall_status=0
|
||||||
else
|
else
|
||||||
log_error "Some validation checks failed"
|
log_error "Validation failed with $critical_errors critical error(s) and $warnings warning(s)"
|
||||||
|
overall_status=1
|
||||||
echo
|
echo
|
||||||
echo "Consider running with --fix to attempt automatic repairs"
|
echo "Consider running with --fix to attempt automatic repairs"
|
||||||
echo "Use --report for a detailed backup analysis"
|
echo "Use --report for a detailed backup analysis"
|
||||||
|
|||||||
Reference in New Issue
Block a user