mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 08:50:12 -08:00
336 lines
9.2 KiB
Bash
Executable File
336 lines
9.2 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Plex Backup Validation and Monitoring Script
|
|
# Usage: ./validate-plex-backups.sh [--fix] [--report]
|
|
|
|
set -e
|
|
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m'
|
|
|
|
# Configuration
|
|
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
|
|
BACKUP_ROOT="/mnt/share/media/backups/plex"
|
|
JSON_LOG_FILE="$SCRIPT_DIR/logs/plex-backup.json"
|
|
REPORT_FILE="$SCRIPT_DIR/logs/backup-validation-$(date +%Y%m%d_%H%M%S).log"
|
|
|
|
# Expected files in backup
|
|
EXPECTED_FILES=(
|
|
"com.plexapp.plugins.library.db"
|
|
"com.plexapp.plugins.library.blobs.db"
|
|
"Preferences.xml"
|
|
)
|
|
|
|
log_message() {
|
|
local message="$1"
|
|
local clean_message="$2"
|
|
|
|
# Display colored message to terminal
|
|
echo -e "$(date '+%H:%M:%S') $message"
|
|
|
|
# Strip ANSI codes and log clean version to file
|
|
if [ -n "$clean_message" ]; then
|
|
echo "$(date '+%H:%M:%S') $clean_message" >> "$REPORT_FILE"
|
|
else
|
|
# Strip ANSI escape codes for file logging
|
|
echo "$(date '+%H:%M:%S') $message" | sed 's/\x1b\[[0-9;]*m//g' >> "$REPORT_FILE"
|
|
fi
|
|
}
|
|
|
|
log_error() {
|
|
log_message "${RED}ERROR: $1${NC}" "ERROR: $1"
|
|
}
|
|
|
|
log_success() {
|
|
log_message "${GREEN}SUCCESS: $1${NC}" "SUCCESS: $1"
|
|
}
|
|
|
|
log_warning() {
|
|
log_message "${YELLOW}WARNING: $1${NC}" "WARNING: $1"
|
|
}
|
|
|
|
log_info() {
|
|
log_message "${BLUE}INFO: $1${NC}" "INFO: $1"
|
|
}
|
|
|
|
# Check backup directory structure
|
|
validate_backup_structure() {
|
|
log_info "Validating backup directory structure..."
|
|
|
|
if [ ! -d "$BACKUP_ROOT" ]; then
|
|
log_error "Backup root directory not found: $BACKUP_ROOT"
|
|
return 1
|
|
fi
|
|
|
|
local backup_count=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | wc -l)
|
|
log_info "Found $backup_count backup files"
|
|
|
|
if [ "$backup_count" -eq 0 ]; then
|
|
log_warning "No backup files found"
|
|
return 1
|
|
fi
|
|
|
|
return 0
|
|
}
|
|
|
|
# Validate individual backup
|
|
validate_backup() {
|
|
local backup_file="$1"
|
|
local backup_name=$(basename "$backup_file")
|
|
local errors=0
|
|
|
|
log_info "Validating backup: $backup_name"
|
|
|
|
# Check if file exists and is readable
|
|
if [ ! -f "$backup_file" ] || [ ! -r "$backup_file" ]; then
|
|
log_error "Backup file not accessible: $backup_file"
|
|
return 1
|
|
fi
|
|
|
|
# Test archive integrity
|
|
if ! tar -tzf "$backup_file" >/dev/null 2>&1; then
|
|
log_error "Archive integrity check failed: $backup_name"
|
|
errors=$((errors + 1))
|
|
else
|
|
log_success "Archive integrity check passed: $backup_name"
|
|
|
|
# Check for expected files in archive
|
|
local archive_contents=$(tar -tzf "$backup_file" 2>/dev/null)
|
|
|
|
for file in "${EXPECTED_FILES[@]}"; do
|
|
if echo "$archive_contents" | grep -q "^$file$"; then
|
|
log_success " Found: $file"
|
|
else
|
|
log_error " Missing file: $file"
|
|
errors=$((errors + 1))
|
|
fi
|
|
done
|
|
|
|
# Check for unexpected files
|
|
echo "$archive_contents" | while IFS= read -r line; do
|
|
if [[ ! " ${EXPECTED_FILES[@]} " =~ " ${line} " ]]; then
|
|
log_warning " Unexpected file: $line"
|
|
fi
|
|
done
|
|
fi
|
|
|
|
return $errors
|
|
}
|
|
|
|
# Check backup freshness
|
|
check_backup_freshness() {
|
|
log_info "Checking backup freshness..."
|
|
|
|
local latest_backup=$(find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort | tail -1)
|
|
|
|
if [ -z "$latest_backup" ]; then
|
|
log_error "No backups found"
|
|
return 1
|
|
fi
|
|
|
|
local backup_filename=$(basename "$latest_backup")
|
|
# Extract date from filename: plex-backup-YYYYMMDD_HHMMSS.tar.gz
|
|
local backup_date=$(echo "$backup_filename" | sed 's/plex-backup-//' | sed 's/_.*$//')
|
|
local backup_timestamp=$(date -d "${backup_date:0:4}-${backup_date:4:2}-${backup_date:6:2}" +%s)
|
|
local current_timestamp=$(date +%s)
|
|
local age_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
|
|
|
|
log_info "Latest backup: $backup_date ($age_days days old)"
|
|
|
|
if [ "$age_days" -gt 7 ]; then
|
|
log_warning "Latest backup is older than 7 days"
|
|
return 1
|
|
elif [ "$age_days" -gt 3 ]; then
|
|
log_warning "Latest backup is older than 3 days"
|
|
else
|
|
log_success "Latest backup is recent"
|
|
fi
|
|
|
|
return 0
|
|
}
|
|
|
|
# Validate JSON log file
|
|
validate_json_log() {
|
|
log_info "Validating JSON log file..."
|
|
|
|
if [ ! -f "$JSON_LOG_FILE" ]; then
|
|
log_error "JSON log file not found: $JSON_LOG_FILE"
|
|
return 1
|
|
fi
|
|
|
|
if ! jq empty "$JSON_LOG_FILE" 2>/dev/null; then
|
|
log_error "JSON log file is invalid"
|
|
return 1
|
|
fi
|
|
|
|
local entry_count=$(jq 'length' "$JSON_LOG_FILE")
|
|
log_success "JSON log file is valid ($entry_count entries)"
|
|
|
|
return 0
|
|
}
|
|
|
|
# Check disk space
|
|
check_disk_space() {
|
|
log_info "Checking disk space..."
|
|
|
|
local backup_disk_usage=$(du -sh "$BACKUP_ROOT" | cut -f1)
|
|
local available_space=$(df -h "$BACKUP_ROOT" | awk 'NR==2 {print $4}')
|
|
local used_percentage=$(df "$BACKUP_ROOT" | awk 'NR==2 {print $5}' | sed 's/%//')
|
|
|
|
log_info "Backup disk usage: $backup_disk_usage"
|
|
log_info "Available space: $available_space"
|
|
log_info "Disk usage: $used_percentage%"
|
|
|
|
if [ "$used_percentage" -gt 90 ]; then
|
|
log_error "Disk usage is above 90%"
|
|
return 1
|
|
elif [ "$used_percentage" -gt 80 ]; then
|
|
log_warning "Disk usage is above 80%"
|
|
else
|
|
log_success "Disk usage is acceptable"
|
|
fi
|
|
|
|
return 0
|
|
}
|
|
|
|
# Generate backup report
|
|
generate_report() {
|
|
log_info "Generating backup report..."
|
|
|
|
local total_backups=0
|
|
local valid_backups=0
|
|
local total_errors=0
|
|
|
|
# Header
|
|
echo "==================================" >> "$REPORT_FILE"
|
|
echo "Plex Backup Validation Report" >> "$REPORT_FILE"
|
|
echo "Generated: $(date)" >> "$REPORT_FILE"
|
|
echo "==================================" >> "$REPORT_FILE"
|
|
|
|
# Validate each backup
|
|
find "$BACKUP_ROOT" -maxdepth 1 -type f -name "plex-backup-*.tar.gz" | sort | while read backup_file; do
|
|
total_backups=$((total_backups + 1))
|
|
validate_backup "$backup_file"
|
|
local backup_errors=$?
|
|
|
|
if [ "$backup_errors" -eq 0 ]; then
|
|
valid_backups=$((valid_backups + 1))
|
|
else
|
|
total_errors=$((total_errors + backup_errors))
|
|
fi
|
|
done
|
|
|
|
# Summary
|
|
echo >> "$REPORT_FILE"
|
|
echo "Summary:" >> "$REPORT_FILE"
|
|
echo " Total backups: $total_backups" >> "$REPORT_FILE"
|
|
echo " Valid backups: $valid_backups" >> "$REPORT_FILE"
|
|
echo " Total errors: $total_errors" >> "$REPORT_FILE"
|
|
|
|
log_success "Report generated: $REPORT_FILE"
|
|
}
|
|
|
|
# Fix common issues
|
|
fix_issues() {
|
|
log_info "Attempting to fix common issues..."
|
|
|
|
# Fix JSON log file
|
|
if [ ! -f "$JSON_LOG_FILE" ] || ! jq empty "$JSON_LOG_FILE" 2>/dev/null; then
|
|
log_info "Fixing JSON log file..."
|
|
mkdir -p "$(dirname "$JSON_LOG_FILE")"
|
|
echo "{}" > "$JSON_LOG_FILE"
|
|
log_success "JSON log file created/fixed"
|
|
fi
|
|
|
|
# Clean up any remaining dated directories from old backup structure
|
|
find "$BACKUP_ROOT" -maxdepth 1 -type d -name "????????" -exec rm -rf {} \; 2>/dev/null || true
|
|
|
|
# Fix permissions if needed
|
|
if [ -d "$BACKUP_ROOT" ]; then
|
|
chmod 755 "$BACKUP_ROOT"
|
|
find "$BACKUP_ROOT" -type f -name "plex-backup-*.tar.gz" -exec chmod 644 {} \; 2>/dev/null || true
|
|
log_success "Fixed backup permissions"
|
|
fi
|
|
}
|
|
|
|
# Main function
|
|
main() {
|
|
local fix_mode=false
|
|
local report_mode=false
|
|
|
|
# Parse arguments
|
|
while [[ $# -gt 0 ]]; do
|
|
case $1 in
|
|
--fix)
|
|
fix_mode=true
|
|
shift
|
|
;;
|
|
--report)
|
|
report_mode=true
|
|
shift
|
|
;;
|
|
*)
|
|
echo "Usage: $0 [--fix] [--report]"
|
|
echo " --fix Attempt to fix common issues"
|
|
echo " --report Generate detailed backup report"
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
|
|
log_info "Starting Plex backup validation..."
|
|
|
|
# Create logs directory if needed
|
|
mkdir -p "$(dirname "$REPORT_FILE")"
|
|
|
|
local overall_status=0
|
|
|
|
# Fix issues if requested
|
|
if [ "$fix_mode" = true ]; then
|
|
fix_issues
|
|
fi
|
|
|
|
# Validate backup structure
|
|
if ! validate_backup_structure; then
|
|
overall_status=1
|
|
fi
|
|
|
|
# Check backup freshness
|
|
if ! check_backup_freshness; then
|
|
overall_status=1
|
|
fi
|
|
|
|
# Validate JSON log
|
|
if ! validate_json_log; then
|
|
overall_status=1
|
|
fi
|
|
|
|
# Check disk space
|
|
if ! check_disk_space; then
|
|
overall_status=1
|
|
fi
|
|
|
|
# Generate detailed report if requested
|
|
if [ "$report_mode" = true ]; then
|
|
generate_report
|
|
fi
|
|
|
|
# Final summary
|
|
echo
|
|
if [ "$overall_status" -eq 0 ]; then
|
|
log_success "All validation checks passed"
|
|
else
|
|
log_error "Some validation checks failed"
|
|
echo
|
|
echo "Consider running with --fix to attempt automatic repairs"
|
|
echo "Use --report for a detailed backup analysis"
|
|
fi
|
|
|
|
exit $overall_status
|
|
}
|
|
|
|
main "$@"
|