feat: Refactor logging system to use local log directory and add synchronization and cleanup functions

This commit is contained in:
Peter Wood
2025-06-01 13:22:06 -04:00
parent 6811d2ba5e
commit 209b855977
4 changed files with 275 additions and 81 deletions

View File

@@ -152,8 +152,7 @@ The enhanced backup system implements:
├── plex-backup-20250123_143008.tar.gz # Older backup
└── logs/
├── backup_log_20250125_143022.md
── plex-backup-performance.json
└── plex-backup.json
── plex-backup-performance.json
```
## Enhanced Features

View File

@@ -21,9 +21,10 @@ SERVICE_START_TIME=""
MAX_BACKUP_AGE_DAYS=30
MAX_BACKUPS_TO_KEEP=10
BACKUP_ROOT="/mnt/share/media/backups/plex"
LOG_ROOT="/mnt/share/media/backups/logs"
SHARED_LOG_ROOT="/mnt/share/media/backups/logs"
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
PERFORMANCE_LOG_FILE="${SCRIPT_DIR}/logs/plex-backup-performance.json"
LOCAL_LOG_ROOT="${SCRIPT_DIR}/logs"
PERFORMANCE_LOG_FILE="${LOCAL_LOG_ROOT}/plex-backup-performance.json"
# Backup strategy configuration - Always perform full backups
@@ -107,35 +108,40 @@ log_message() {
local message="$1"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "${CYAN}[${timestamp}]${NC} ${message}"
echo "[${timestamp}] $message" >> "${LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
mkdir -p "$LOCAL_LOG_ROOT"
echo "[${timestamp}] $message" >> "${LOCAL_LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
}
log_error() {
local message="$1"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "${RED}[${timestamp}] ERROR:${NC} ${message}"
echo "[${timestamp}] ERROR: $message" >> "${LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
mkdir -p "$LOCAL_LOG_ROOT"
echo "[${timestamp}] ERROR: $message" >> "${LOCAL_LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
}
log_success() {
local message="$1"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "${GREEN}[${timestamp}] SUCCESS:${NC} ${message}"
echo "[${timestamp}] SUCCESS: $message" >> "${LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
mkdir -p "$LOCAL_LOG_ROOT"
echo "[${timestamp}] SUCCESS: $message" >> "${LOCAL_LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
}
log_warning() {
local message="$1"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "${YELLOW}[${timestamp}] WARNING:${NC} ${message}"
echo "[${timestamp}] WARNING: $message" >> "${LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
mkdir -p "$LOCAL_LOG_ROOT"
echo "[${timestamp}] WARNING: $message" >> "${LOCAL_LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
}
log_info() {
local message="$1"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "${BLUE}[${timestamp}] INFO:${NC} ${message}"
echo "[${timestamp}] INFO: $message" >> "${LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
mkdir -p "$LOCAL_LOG_ROOT"
echo "[${timestamp}] INFO: $message" >> "${LOCAL_LOG_ROOT}/plex-backup-$(date '+%Y-%m-%d').log" 2>/dev/null || true
}
# Performance tracking functions
@@ -181,6 +187,118 @@ initialize_logs() {
fi
}
# Log synchronization functions
sync_logs_to_shared() {
local sync_start_time=$(date +%s)
log_info "Starting log synchronization to shared location"
# Ensure shared log directory exists
if ! mkdir -p "$SHARED_LOG_ROOT" 2>/dev/null; then
log_warning "Could not create shared log directory: $SHARED_LOG_ROOT"
return 1
fi
# Check if shared location is accessible
if [ ! -w "$SHARED_LOG_ROOT" ]; then
log_warning "Shared log directory is not writable: $SHARED_LOG_ROOT"
return 1
fi
# Sync log files (one-way: local -> shared)
local sync_count=0
local error_count=0
for log_file in "$LOCAL_LOG_ROOT"/*.log "$LOCAL_LOG_ROOT"/*.json; do
if [ -f "$log_file" ]; then
local filename=$(basename "$log_file")
local shared_file="$SHARED_LOG_ROOT/$filename"
# Only copy if file doesn't exist in shared location or local is newer
if [ ! -f "$shared_file" ] || [ "$log_file" -nt "$shared_file" ]; then
if cp "$log_file" "$shared_file" 2>/dev/null; then
((sync_count++))
log_info "Synced: $filename"
else
((error_count++))
log_warning "Failed to sync: $filename"
fi
fi
fi
done
local sync_end_time=$(date +%s)
local sync_duration=$((sync_end_time - sync_start_time))
if [ $error_count -eq 0 ]; then
log_success "Log sync completed: $sync_count files synced in ${sync_duration}s"
else
log_warning "Log sync completed with errors: $sync_count synced, $error_count failed in ${sync_duration}s"
fi
return $error_count
}
# Cleanup old local logs (30 day retention)
cleanup_old_local_logs() {
local cleanup_start_time=$(date +%s)
log_info "Starting cleanup of old local logs (30+ days)"
if [ ! -d "$LOCAL_LOG_ROOT" ]; then
log_info "Local log directory does not exist, nothing to clean up"
return 0
fi
local cleanup_count=0
local error_count=0
# Find and remove log files older than 30 days
while IFS= read -r -d '' old_file; do
local filename=$(basename "$old_file")
if rm "$old_file" 2>/dev/null; then
((cleanup_count++))
log_info "Removed old log: $filename"
else
((error_count++))
log_warning "Failed to remove old log: $filename"
fi
done < <(find "$LOCAL_LOG_ROOT" -name "*.log" -mtime +30 -print0 2>/dev/null)
# Also clean up old performance log entries (keep structure, remove old entries)
if [ -f "$PERFORMANCE_LOG_FILE" ]; then
local thirty_days_ago=$(date -d '30 days ago' -Iseconds)
local temp_perf_file="${PERFORMANCE_LOG_FILE}.cleanup.tmp"
if jq --arg cutoff "$thirty_days_ago" '[.[] | select(.timestamp >= $cutoff)]' "$PERFORMANCE_LOG_FILE" > "$temp_perf_file" 2>/dev/null; then
local old_count=$(jq length "$PERFORMANCE_LOG_FILE" 2>/dev/null || echo "0")
local new_count=$(jq length "$temp_perf_file" 2>/dev/null || echo "0")
local removed_count=$((old_count - new_count))
if [ "$removed_count" -gt 0 ]; then
mv "$temp_perf_file" "$PERFORMANCE_LOG_FILE"
log_info "Cleaned up $removed_count old performance entries"
((cleanup_count += removed_count))
else
rm -f "$temp_perf_file"
fi
else
rm -f "$temp_perf_file"
log_warning "Failed to clean up old performance log entries"
((error_count++))
fi
fi
local cleanup_end_time=$(date +%s)
local cleanup_duration=$((cleanup_end_time - cleanup_start_time))
if [ $cleanup_count -gt 0 ]; then
log_success "Cleanup completed: $cleanup_count items removed in ${cleanup_duration}s"
else
log_info "Cleanup completed: no old items found to remove in ${cleanup_duration}s"
fi
return $error_count
}
# Enhanced notification system
send_notification() {
local title="$1"
@@ -1170,6 +1288,11 @@ main() {
log_message "Files backed up: $files_backed_up"
log_message "Errors encountered: $backup_errors"
# Sync logs to shared location and cleanup old local logs
log_info "Post-backup: synchronizing logs and cleaning up old files"
sync_logs_to_shared
cleanup_old_local_logs
if [ "$backup_errors" -gt 0 ]; then
log_error "Backup completed with errors"
send_notification "Backup Error" "Backup completed with $backup_errors errors" "error"

View File

@@ -17,9 +17,9 @@ NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
BACKUP_ROOT="/mnt/share/media/backups/plex"
LOG_ROOT="/mnt/share/media/backups/logs"
JSON_LOG_FILE="$SCRIPT_DIR/logs/plex-backup.json"
PERFORMANCE_LOG_FILE="$SCRIPT_DIR/logs/plex-backup-performance.json"
SHARED_LOG_ROOT="/mnt/share/media/backups/logs"
LOCAL_LOG_ROOT="$SCRIPT_DIR/logs"
PERFORMANCE_LOG_FILE="$LOCAL_LOG_ROOT/plex-backup-performance.json"
# Display mode
WATCH_MODE=false
@@ -71,6 +71,24 @@ clear_screen() {
fi
}
# Helper function to find most recent log from local or shared location
find_most_recent_log() {
local log_pattern="$1"
local recent_log=""
# Check local logs first (preferred)
if [ -d "$LOCAL_LOG_ROOT" ]; then
recent_log=$(find "$LOCAL_LOG_ROOT" -name "$log_pattern" -type f 2>/dev/null | sort | tail -1)
fi
# If no local log found, check shared location as fallback
if [ -z "$recent_log" ] && [ -d "$SHARED_LOG_ROOT" ]; then
recent_log=$(find "$SHARED_LOG_ROOT" -name "$log_pattern" -type f 2>/dev/null | sort | tail -1)
fi
echo "$recent_log"
}
# Header display
show_header() {
echo -e "${CYAN}╔══════════════════════════════════════════════════════════════════════════════╗${NC}"
@@ -106,10 +124,17 @@ check_system_status() {
log_status "ERROR" "Backup directory missing: $BACKUP_ROOT"
fi
if [ -d "$LOG_ROOT" ]; then
log_status "OK" "Log directory exists"
# Check log directories (prioritize local, show shared as secondary)
if [ -d "$LOCAL_LOG_ROOT" ]; then
log_status "OK" "Local log directory exists"
else
log_status "WARN" "Log directory missing: $LOG_ROOT"
log_status "WARN" "Local log directory missing: $LOCAL_LOG_ROOT"
fi
if [ -d "$SHARED_LOG_ROOT" ]; then
log_status "INFO" "Shared log directory accessible"
else
log_status "WARN" "Shared log directory missing: $SHARED_LOG_ROOT"
fi
# Check dependencies
@@ -238,31 +263,21 @@ show_recent_activity() {
echo -e "${BLUE}📋 RECENT ACTIVITY${NC}"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
# Check JSON log for last backup times
if [ -f "$JSON_LOG_FILE" ]; then
log_status "OK" "Backup tracking log found"
local file_count=$(jq 'length' "$JSON_LOG_FILE" 2>/dev/null || echo "0")
log_status "INFO" "Tracked files: $file_count"
if [ "$file_count" -gt 0 ]; then
echo -e "${YELLOW}Last Backup Times:${NC}"
jq -r 'to_entries | .[] | " \(.key | split("/") | .[-1]): \(.value | strftime("%Y-%m-%d %H:%M:%S"))"' "$JSON_LOG_FILE" 2>/dev/null | head -5
fi
else
log_status "WARN" "Backup tracking log not found"
fi
# Check recent log files
if [ -d "$LOG_ROOT" ]; then
local recent_log=$(find "$LOG_ROOT" -name "plex-backup-*.log" -type f 2>/dev/null | sort | tail -1)
if [ -n "$recent_log" ]; then
local log_date=$(basename "$recent_log" | sed 's/plex-backup-//' | sed 's/.log//')
log_status "INFO" "Most recent log: $log_date"
# Check for errors in recent log
local error_count=$(grep -c "ERROR:" "$recent_log" 2>/dev/null || echo "0")
local warning_count=$(grep -c "WARNING:" "$recent_log" 2>/dev/null || echo "0")
local recent_log=$(find_most_recent_log "plex-backup-*.log")
if [ -n "$recent_log" ]; then
local log_date=$(basename "$recent_log" | sed 's/plex-backup-//' | sed 's/.log//')
local log_location=""
if [[ "$recent_log" == "$LOCAL_LOG_ROOT"* ]]; then
log_location=" (local)"
else
log_location=" (shared)"
fi
log_status "INFO" "Most recent log: $log_date$log_location"
# Check for errors in recent log
local error_count=$(grep -c "ERROR:" "$recent_log" 2>/dev/null || echo "0")
local warning_count=$(grep -c "WARNING:" "$recent_log" 2>/dev/null || echo "0")
if [ "$error_count" -eq 0 ] && [ "$warning_count" -eq 0 ]; then
log_status "OK" "No errors or warnings in recent log"

View File

@@ -14,8 +14,9 @@ NC='\033[0m'
# Configuration
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
BACKUP_ROOT="/mnt/share/media/backups/plex"
JSON_LOG_FILE="$SCRIPT_DIR/logs/plex-backup.json"
REPORT_FILE="$SCRIPT_DIR/logs/backup-validation-$(date +%Y%m%d_%H%M%S).log"
SHARED_LOG_ROOT="/mnt/share/media/backups/logs"
LOCAL_LOG_ROOT="$SCRIPT_DIR/logs"
REPORT_FILE="$LOCAL_LOG_ROOT/backup-validation-$(date +%Y%m%d_%H%M%S).log"
# Expected files in backup
EXPECTED_FILES=(
@@ -62,6 +63,94 @@ log_info() {
log_message "${BLUE}INFO: $1${NC}" "INFO: $1"
}
# Log synchronization functions
sync_logs_to_shared() {
local sync_start_time=$(date +%s)
log_info "Starting log synchronization to shared location"
# Ensure shared log directory exists
if ! mkdir -p "$SHARED_LOG_ROOT" 2>/dev/null; then
log_warning "Could not create shared log directory: $SHARED_LOG_ROOT"
return 1
fi
# Check if shared location is accessible
if [ ! -w "$SHARED_LOG_ROOT" ]; then
log_warning "Shared log directory is not writable: $SHARED_LOG_ROOT"
return 1
fi
# Sync log files (one-way: local -> shared)
local sync_count=0
local error_count=0
for log_file in "$LOCAL_LOG_ROOT"/*.log; do
if [ -f "$log_file" ]; then
local filename=$(basename "$log_file")
local shared_file="$SHARED_LOG_ROOT/$filename"
# Only copy if file doesn't exist in shared location or local is newer
if [ ! -f "$shared_file" ] || [ "$log_file" -nt "$shared_file" ]; then
if cp "$log_file" "$shared_file" 2>/dev/null; then
((sync_count++))
log_info "Synced: $filename"
else
((error_count++))
log_warning "Failed to sync: $filename"
fi
fi
fi
done
local sync_end_time=$(date +%s)
local sync_duration=$((sync_end_time - sync_start_time))
if [ $error_count -eq 0 ]; then
log_success "Log sync completed: $sync_count files synced in ${sync_duration}s"
else
log_warning "Log sync completed with errors: $sync_count synced, $error_count failed in ${sync_duration}s"
fi
return $error_count
}
# Cleanup old local logs (30 day retention)
cleanup_old_local_logs() {
local cleanup_start_time=$(date +%s)
log_info "Starting cleanup of old local logs (30+ days)"
if [ ! -d "$LOCAL_LOG_ROOT" ]; then
log_info "Local log directory does not exist, nothing to clean up"
return 0
fi
local cleanup_count=0
local error_count=0
# Find and remove log files older than 30 days
while IFS= read -r -d '' old_file; do
local filename=$(basename "$old_file")
if rm "$old_file" 2>/dev/null; then
((cleanup_count++))
log_info "Removed old log: $filename"
else
((error_count++))
log_warning "Failed to remove old log: $filename"
fi
done < <(find "$LOCAL_LOG_ROOT" -name "*.log" -mtime +30 -print0 2>/dev/null)
local cleanup_end_time=$(date +%s)
local cleanup_duration=$((cleanup_end_time - cleanup_start_time))
if [ $cleanup_count -gt 0 ]; then
log_success "Cleanup completed: $cleanup_count items removed in ${cleanup_duration}s"
else
log_info "Cleanup completed: no old items found to remove in ${cleanup_duration}s"
fi
return $error_count
}
# Check dependencies
check_dependencies() {
local missing_deps=()
@@ -71,10 +160,6 @@ check_dependencies() {
missing_deps+=("tar")
fi
if ! command -v jq >/dev/null 2>&1; then
missing_deps+=("jq")
fi
if ! command -v find >/dev/null 2>&1; then
missing_deps+=("find")
fi
@@ -279,34 +364,9 @@ check_backup_freshness() {
# Validate JSON log file
validate_json_log() {
log_info "Validating JSON log file..."
if [ ! -f "$JSON_LOG_FILE" ]; then
log_error "JSON log file not found: $JSON_LOG_FILE"
return 1
fi
# Check if jq is available (should be caught by dependency check)
if ! command -v jq >/dev/null 2>&1; then
log_error "jq command not found - cannot validate JSON"
return 1
fi
# Validate JSON syntax
local jq_output
if ! jq_output=$(jq empty "$JSON_LOG_FILE" 2>&1); then
log_error "JSON log file is invalid: $jq_output"
return 1
fi
# Get entry count safely
local entry_count
if ! entry_count=$(jq 'length' "$JSON_LOG_FILE" 2>/dev/null); then
log_error "Could not count entries in JSON log file"
return 1
fi
log_success "JSON log file is valid ($entry_count entries)"
# This function has been removed as the JSON log file is no longer used
# The plex backup system only uses the performance log file
log_info "JSON log validation skipped (feature removed)"
return 0
}
@@ -440,14 +500,6 @@ generate_report() {
fix_issues() {
log_info "Attempting to fix common issues..."
# Fix JSON log file
if [ ! -f "$JSON_LOG_FILE" ] || ! jq empty "$JSON_LOG_FILE" 2>/dev/null; then
log_info "Fixing JSON log file..."
mkdir -p "$(dirname "$JSON_LOG_FILE")"
echo "{}" > "$JSON_LOG_FILE"
log_success "JSON log file created/fixed"
fi
# Create corrupted backups directory
local corrupted_dir="$(dirname "$REPORT_FILE")/corrupted-backups"
mkdir -p "$corrupted_dir"
@@ -604,6 +656,11 @@ main() {
echo "Use --report for a detailed backup analysis"
fi
# Sync logs to shared location and cleanup old local logs
log_info "Post-validation: synchronizing logs and cleaning up old files"
sync_logs_to_shared
cleanup_old_local_logs
exit $overall_status
}