Files
shell/plex/validate-plex-backups.sh

611 lines
20 KiB
Bash
Executable File

#!/bin/bash
# Plex Backup Validation and Monitoring Script
# Usage: ./validate-plex-backups.sh [--fix] [--report]
set -e
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Configuration
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
BACKUP_ROOT="/mnt/share/media/backups/plex"
JSON_LOG_FILE="$SCRIPT_DIR/logs/plex-backup.json"
REPORT_FILE="$SCRIPT_DIR/logs/backup-validation-$(date +%Y%m%d_%H%M%S).log"
# Expected files in backup
EXPECTED_FILES=(
"com.plexapp.plugins.library.db"
"com.plexapp.plugins.library.blobs.db"
"Preferences.xml"
)
# Files that may not exist in older backups (with approximate introduction dates)
# This helps provide better context for missing files
declare -A OPTIONAL_FILES=(
["com.plexapp.plugins.library.blobs.db"]="20250526_144500" # Blobs DB introduced between 07:55 and 14:45 on 5/26
)
log_message() {
local message="$1"
local clean_message="$2"
# Display colored message to terminal
echo -e "$(date '+%H:%M:%S') $message"
# Strip ANSI codes and log clean version to file
if [ -n "$clean_message" ]; then
echo "$(date '+%H:%M:%S') $clean_message" >> "$REPORT_FILE"
else
# Strip ANSI escape codes for file logging
echo "$(date '+%H:%M:%S') $message" | sed 's/\x1b\[[0-9;]*m//g' >> "$REPORT_FILE"
fi
}
log_error() {
log_message "${RED}ERROR: $1${NC}" "ERROR: $1"
}
log_success() {
log_message "${GREEN}SUCCESS: $1${NC}" "SUCCESS: $1"
}
log_warning() {
log_message "${YELLOW}WARNING: $1${NC}" "WARNING: $1"
}
log_info() {
log_message "${BLUE}INFO: $1${NC}" "INFO: $1"
}
# Check dependencies
check_dependencies() {
local missing_deps=()
# Check for required commands
if ! command -v tar >/dev/null 2>&1; then
missing_deps+=("tar")
fi
if ! command -v jq >/dev/null 2>&1; then
missing_deps+=("jq")
fi
if ! command -v find >/dev/null 2>&1; then
missing_deps+=("find")
fi
if ! command -v df >/dev/null 2>&1; then
missing_deps+=("df")
fi
if ! command -v du >/dev/null 2>&1; then
missing_deps+=("du")
fi
if [ ${#missing_deps[@]} -gt 0 ]; then
log_error "Missing required dependencies: ${missing_deps[*]}"
log_info "Please install missing dependencies before running this script"
return 1
fi
return 0
}
# Check backup directory structure
validate_backup_structure() {
log_info "Validating backup directory structure..."
if [ ! -d "$BACKUP_ROOT" ]; then
log_error "Backup root directory not found: $BACKUP_ROOT"
return 1
fi
local backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | wc -l)
log_info "Found $backup_count backup files"
if [ "$backup_count" -eq 0 ]; then
log_warning "No backup files found"
return 1
fi
return 0
}
# Validate individual backup
validate_backup() {
local backup_file="$1"
local backup_name=$(basename "$backup_file")
local errors=0
log_info "Validating backup: $backup_name"
# Check if file exists and is readable
if [ ! -f "$backup_file" ] || [ ! -r "$backup_file" ]; then
log_error "Backup file not accessible: $backup_file"
return 1
fi
# Test archive integrity
if ! tar -tzf "$backup_file" >/dev/null 2>&1; then
log_error "Archive integrity check failed: $backup_name"
errors=$((errors + 1))
else
log_success "Archive integrity check passed: $backup_name"
# Check for expected files in archive
local archive_contents=$(tar -tzf "$backup_file" 2>/dev/null)
# Check if this is a legacy backup with dated subdirectory
local has_dated_subdir=false
if echo "$archive_contents" | grep -q "^\./[0-9]\{8\}/" || echo "$archive_contents" | grep -q "^[0-9]\{8\}/"; then
has_dated_subdir=true
log_info " Detected legacy backup format with dated subdirectory"
fi
for file in "${EXPECTED_FILES[@]}"; do
local file_found=false
if [ "$has_dated_subdir" = true ]; then
# For legacy backups, look for files in dated subdirectory (with or without timestamps)
if echo "$archive_contents" | grep -q "^\./[0-9]\{8\}/$file" || \
echo "$archive_contents" | grep -q "^\./[0-9]\{8\}/$file\.[0-9]\{8\}_[0-9]\{6\}$" || \
echo "$archive_contents" | grep -q "^[0-9]\{8\}/$file" || \
echo "$archive_contents" | grep -q "^[0-9]\{8\}/$file\.[0-9]\{8\}_[0-9]\{6\}$"; then
file_found=true
fi
else
# For new backups, look for files at root level
if echo "$archive_contents" | grep -q "^\./$file$" || \
echo "$archive_contents" | grep -q "^$file$"; then
file_found=true
fi
fi
if [ "$file_found" = true ]; then
log_success " Found: $file"
else
# Check if this is an optional file that might not exist in older backups
local backup_name=$(basename "$backup_file")
local backup_datetime=$(echo "$backup_name" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
if [[ -n "${OPTIONAL_FILES[$file]}" ]] && [[ "$backup_datetime" < "${OPTIONAL_FILES[$file]}" ]]; then
log_warning " Missing file (expected for backup date): $file"
log_info " Note: $file was introduced around ${OPTIONAL_FILES[$file]}, this backup is from $backup_datetime"
else
log_error " Missing file: $file"
errors=$((errors + 1))
fi
fi
done
# Check for unexpected files (more lenient for legacy backups)
local unexpected_files=()
while IFS= read -r line; do
# Skip directory entries and current directory marker
if [[ "$line" == "./" ]] || [[ "$line" == */ ]] || [[ -z "$line" ]]; then
continue
fi
# Extract filename from path (handle both legacy and new formats)
local filename=""
if [[ "$line" =~ ^\./[0-9]{8}/(.+)$ ]] || [[ "$line" =~ ^[0-9]{8}/(.+)$ ]]; then
# Legacy format: extract filename from dated subdirectory
filename="${BASH_REMATCH[1]}"
# Remove timestamp suffix if present
filename=$(echo "$filename" | sed 's/\.[0-9]\{8\}_[0-9]\{6\}$//')
elif [[ "$line" =~ ^\./(.+)$ ]]; then
# New format: extract filename from ./ prefix
filename="${BASH_REMATCH[1]}"
else
# Direct filename
filename="$line"
fi
# Check if this is an expected file
local is_expected=false
for expected_file in "${EXPECTED_FILES[@]}"; do
if [[ "$filename" == "$expected_file" ]]; then
is_expected=true
break
fi
done
if [ "$is_expected" = false ]; then
unexpected_files+=("$line")
fi
done <<< "$archive_contents"
# Report unexpected files if any found
if [ ${#unexpected_files[@]} -gt 0 ]; then
for unexpected_file in "${unexpected_files[@]}"; do
log_warning " Unexpected file: $unexpected_file"
done
fi
fi
return $errors
}
# Check backup freshness
check_backup_freshness() {
log_info "Checking backup freshness..."
local latest_backup=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | sort | tail -1)
if [ -z "$latest_backup" ]; then
log_error "No backups found"
return 1
fi
local backup_filename=$(basename "$latest_backup")
# Extract date from filename: plex-backup-YYYYMMDD_HHMMSS.tar.gz
local backup_datetime=$(echo "$backup_filename" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
# Validate that we extracted a valid datetime
if [[ ! "$backup_datetime" =~ ^[0-9]{8}_[0-9]{6}$ ]]; then
log_error "Could not parse backup date from filename: $backup_filename"
return 1
fi
local backup_date="${backup_datetime%_*}" # Remove time part
# Validate date format and convert to timestamp
if ! backup_timestamp=$(date -d "${backup_date:0:4}-${backup_date:4:2}-${backup_date:6:2}" +%s 2>/dev/null); then
log_error "Invalid backup date format: $backup_date"
return 1
fi
local current_timestamp=$(date +%s)
local age_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
log_info "Latest backup: $backup_datetime ($age_days days old)"
if [ "$age_days" -gt 7 ]; then
log_warning "Latest backup is older than 7 days"
return 1
elif [ "$age_days" -gt 3 ]; then
log_warning "Latest backup is older than 3 days"
else
log_success "Latest backup is recent"
fi
return 0
}
# Validate JSON log file
validate_json_log() {
log_info "Validating JSON log file..."
if [ ! -f "$JSON_LOG_FILE" ]; then
log_error "JSON log file not found: $JSON_LOG_FILE"
return 1
fi
# Check if jq is available (should be caught by dependency check)
if ! command -v jq >/dev/null 2>&1; then
log_error "jq command not found - cannot validate JSON"
return 1
fi
# Validate JSON syntax
local jq_output
if ! jq_output=$(jq empty "$JSON_LOG_FILE" 2>&1); then
log_error "JSON log file is invalid: $jq_output"
return 1
fi
# Get entry count safely
local entry_count
if ! entry_count=$(jq 'length' "$JSON_LOG_FILE" 2>/dev/null); then
log_error "Could not count entries in JSON log file"
return 1
fi
log_success "JSON log file is valid ($entry_count entries)"
return 0
}
# Check backup file sizes for anomalies
check_backup_sizes() {
log_info "Checking backup file sizes..."
local backup_files=()
local backup_sizes=()
local total_size=0
# Collect backup files and their sizes
while IFS= read -r backup_file; do
if [ -f "$backup_file" ] && [ -r "$backup_file" ]; then
backup_files+=("$backup_file")
local size=$(stat -f%z "$backup_file" 2>/dev/null || stat -c%s "$backup_file" 2>/dev/null || echo "0")
backup_sizes+=("$size")
total_size=$((total_size + size))
fi
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | sort)
if [ ${#backup_files[@]} -eq 0 ]; then
log_warning "No backup files found for size analysis"
return 1
fi
# Calculate average size
local avg_size=$((total_size / ${#backup_files[@]}))
local human_total=$(numfmt --to=iec "$total_size" 2>/dev/null || echo "${total_size} bytes")
local human_avg=$(numfmt --to=iec "$avg_size" 2>/dev/null || echo "${avg_size} bytes")
log_info "Total backup size: $human_total"
log_info "Average backup size: $human_avg"
# Check for suspiciously small backups (less than 50% of average)
local min_size=$((avg_size / 2))
local suspicious_count=0
for i in "${!backup_files[@]}"; do
local file="${backup_files[$i]}"
local size="${backup_sizes[$i]}"
local filename=$(basename "$file")
if [ "$size" -lt "$min_size" ] && [ "$size" -gt 0 ]; then
local human_size=$(numfmt --to=iec "$size" 2>/dev/null || echo "${size} bytes")
# Extract backup datetime to check if it's a pre-blobs backup
local backup_datetime=$(echo "$filename" | sed 's/plex-backup-\([0-9]\{8\}_[0-9]\{6\}\)\.tar\.gz/\1/')
if [[ "$backup_datetime" =~ ^[0-9]{8}_[0-9]{6}$ ]] && [[ "$backup_datetime" < "20250526_144500" ]]; then
log_info "Small backup (pre-blobs DB): $filename ($human_size)"
log_info " This backup predates the blobs database introduction, size is expected"
else
log_warning "Suspiciously small backup: $filename ($human_size)"
suspicious_count=$((suspicious_count + 1))
fi
fi
done
if [ "$suspicious_count" -gt 0 ]; then
log_warning "Found $suspicious_count backup(s) that may be incomplete"
return 1
else
log_success "All backup sizes appear normal"
fi
return 0
}
# Check disk space
check_disk_space() {
log_info "Checking disk space..."
local backup_disk_usage=$(du -sh "$BACKUP_ROOT" | cut -f1)
local available_space=$(df -h "$BACKUP_ROOT" | awk 'NR==2 {print $4}')
local used_percentage=$(df "$BACKUP_ROOT" | awk 'NR==2 {print $5}' | sed 's/%//')
log_info "Backup disk usage: $backup_disk_usage"
log_info "Available space: $available_space"
log_info "Disk usage: $used_percentage%"
if [ "$used_percentage" -gt 90 ]; then
log_error "Disk usage is above 90%"
return 1
elif [ "$used_percentage" -gt 80 ]; then
log_warning "Disk usage is above 80%"
else
log_success "Disk usage is acceptable"
fi
return 0
}
# Generate backup report
generate_report() {
log_info "Generating backup report..."
local total_backups=0
local valid_backups=0
local total_errors=0
# Header
echo "==================================" >> "$REPORT_FILE"
echo "Plex Backup Validation Report" >> "$REPORT_FILE"
echo "Generated: $(date)" >> "$REPORT_FILE"
echo "==================================" >> "$REPORT_FILE"
# Use process substitution to avoid subshell variable scope issues
while IFS= read -r backup_file; do
total_backups=$((total_backups + 1))
validate_backup "$backup_file"
local backup_errors=$?
if [ "$backup_errors" -eq 0 ]; then
valid_backups=$((valid_backups + 1))
else
total_errors=$((total_errors + backup_errors))
fi
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort)
# Summary
echo >> "$REPORT_FILE"
echo "Summary:" >> "$REPORT_FILE"
echo " Total backups: $total_backups" >> "$REPORT_FILE"
echo " Valid backups: $valid_backups" >> "$REPORT_FILE"
echo " Total errors: $total_errors" >> "$REPORT_FILE"
log_success "Report generated: $REPORT_FILE"
}
# Fix common issues
fix_issues() {
log_info "Attempting to fix common issues..."
# Fix JSON log file
if [ ! -f "$JSON_LOG_FILE" ] || ! jq empty "$JSON_LOG_FILE" 2>/dev/null; then
log_info "Fixing JSON log file..."
mkdir -p "$(dirname "$JSON_LOG_FILE")"
echo "{}" > "$JSON_LOG_FILE"
log_success "JSON log file created/fixed"
fi
# Create corrupted backups directory
local corrupted_dir="$(dirname "$REPORT_FILE")/corrupted-backups"
mkdir -p "$corrupted_dir"
# Check for and move corrupted backup files using process substitution
local corrupted_count=0
while IFS= read -r backup_file; do
if ! tar -tzf "$backup_file" >/dev/null 2>&1; then
log_warning "Found corrupted backup: $(basename "$backup_file")"
local backup_name=$(basename "$backup_file")
local corrupted_backup="$corrupted_dir/$backup_name"
if mv "$backup_file" "$corrupted_backup"; then
log_success "Moved corrupted backup to: $corrupted_backup"
corrupted_count=$((corrupted_count + 1))
else
log_error "Failed to move corrupted backup: $backup_name"
fi
fi
done < <(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null || true)
if [ "$corrupted_count" -gt 0 ]; then
log_info "Moved $corrupted_count corrupted backup(s) to $corrupted_dir"
fi
# Clean up any remaining dated directories from old backup structure
find "$BACKUP_ROOT" -maxdepth 1 -type d -name "????????" -exec rm -rf {} \; 2>/dev/null || true
# Fix permissions if needed
if [ -d "$BACKUP_ROOT" ]; then
chmod 755 "$BACKUP_ROOT" 2>/dev/null || log_warning "Could not fix backup root permissions"
find "$BACKUP_ROOT" -type f -name "plex-backup-*.tar.gz" -exec chmod 644 {} \; 2>/dev/null || log_warning "Could not fix backup file permissions"
log_success "Fixed backup permissions"
fi
}
# Main function
main() {
local fix_mode=false
local report_mode=false
local verbose_mode=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--fix)
fix_mode=true
shift
;;
--report)
report_mode=true
shift
;;
--verbose|-v)
verbose_mode=true
shift
;;
--help|-h)
echo "Usage: $0 [OPTIONS]"
echo "Validate Plex backup files and system health"
echo ""
echo "OPTIONS:"
echo " --fix Attempt to fix common issues"
echo " --report Generate detailed backup report"
echo " --verbose Enable verbose output"
echo " --help Show this help message"
exit 0
;;
*)
echo "Usage: $0 [--fix] [--report] [--verbose]"
echo " --fix Attempt to fix common issues"
echo " --report Generate detailed backup report"
echo " --verbose Enable verbose output"
echo "Use --help for more information"
exit 1
;;
esac
done
log_info "Starting Plex backup validation..."
# Check dependencies first
if ! check_dependencies; then
exit 1
fi
# Create logs directory if needed
mkdir -p "$(dirname "$REPORT_FILE")"
local overall_status=0
local critical_errors=0
local warnings=0
# Fix issues if requested
if [ "$fix_mode" = true ]; then
fix_issues
fi
# Validate backup structure
if ! validate_backup_structure; then
critical_errors=$((critical_errors + 1))
fi
# Check backup freshness
if ! check_backup_freshness; then
local freshness_result=$?
# Check if this is a "no backups found" error vs "old backup" warning
local backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" 2>/dev/null | wc -l)
if [ "$backup_count" -eq 0 ]; then
# No backups found - critical error
critical_errors=$((critical_errors + 1))
else
# Old backup - warning only
warnings=$((warnings + 1))
fi
fi
# Validate JSON log
if ! validate_json_log; then
critical_errors=$((critical_errors + 1))
fi
# Check disk space
if ! check_disk_space; then
warnings=$((warnings + 1))
fi
# Check backup file sizes
if [ "$verbose_mode" = true ] || [ "$report_mode" = true ]; then
if ! check_backup_sizes; then
warnings=$((warnings + 1))
fi
fi
# Generate detailed report if requested
if [ "$report_mode" = true ]; then
generate_report
fi
# Final summary
echo
if [ "$critical_errors" -eq 0 ] && [ "$warnings" -eq 0 ]; then
log_success "All validation checks passed"
overall_status=0
elif [ "$critical_errors" -eq 0 ]; then
log_warning "Validation completed with $warnings warning(s) but no critical errors"
log_info "System appears healthy despite warnings"
overall_status=0
else
log_error "Validation failed with $critical_errors critical error(s) and $warnings warning(s)"
overall_status=1
echo
echo "Consider running with --fix to attempt automatic repairs"
echo "Use --report for a detailed backup analysis"
fi
exit $overall_status
}
main "$@"