mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 07:50:11 -08:00
- Created a base HTML template for consistent layout across pages. - Developed a dashboard page to display backup service metrics and statuses. - Implemented a log viewer for detailed log file inspection. - Added error handling page for better user experience during failures. - Introduced service detail page to show specific service metrics and actions. - Enhanced log filtering and viewing capabilities. - Integrated auto-refresh functionality for real-time updates on metrics. - Created integration and unit test scripts for backup metrics functionality.
490 lines
16 KiB
Bash
490 lines
16 KiB
Bash
#!/bin/bash
|
|
|
|
################################################################################
|
|
# Backup JSON Logger Library
|
|
################################################################################
|
|
#
|
|
# Author: Peter Wood <peter@peterwood.dev>
|
|
# Description: Reusable JSON logging system for backup scripts to generate
|
|
# real-time metrics and status updates during backup operations.
|
|
#
|
|
# Features:
|
|
# - Real-time JSON metrics generation during backup operations
|
|
# - Standardized JSON structure across all backup services
|
|
# - Runtime metrics tracking (start time, duration, status, etc.)
|
|
# - Progress tracking with file-by-file updates
|
|
# - Error handling and recovery state tracking
|
|
# - Web application compatible JSON format
|
|
#
|
|
# Usage:
|
|
# source /home/acedanger/shell/lib/backup-json-logger.sh
|
|
#
|
|
# # Initialize backup session
|
|
# json_backup_init "plex" "/mnt/share/media/backups/plex"
|
|
#
|
|
# # Update status during backup
|
|
# json_backup_start
|
|
# json_backup_add_file "/path/to/file" "success" "1024" "abc123"
|
|
# json_backup_complete "success"
|
|
#
|
|
################################################################################
|
|
|
|
# Global configuration
|
|
JSON_METRICS_ROOT="${BACKUP_ROOT:-/mnt/share/media/backups}/metrics"
|
|
JSON_LOGGER_DEBUG="${JSON_LOGGER_DEBUG:-false}"
|
|
|
|
# JSON logger internal variables
|
|
declare -g JSON_BACKUP_SERVICE=""
|
|
declare -g JSON_BACKUP_PATH=""
|
|
declare -g JSON_BACKUP_SESSION_ID=""
|
|
declare -g JSON_BACKUP_START_TIME=""
|
|
declare -g JSON_BACKUP_LOG_FILE=""
|
|
declare -g JSON_BACKUP_METRICS_FILE=""
|
|
declare -g JSON_BACKUP_TEMP_DIR=""
|
|
|
|
# Logging function for debug messages
|
|
json_log_debug() {
|
|
if [ "$JSON_LOGGER_DEBUG" = "true" ]; then
|
|
echo "[JSON-LOGGER] $1" >&2
|
|
fi
|
|
}
|
|
|
|
# Initialize JSON logging for a backup session
|
|
json_backup_init() {
|
|
local service_name="$1"
|
|
local backup_path="$2"
|
|
local custom_session_id="$3"
|
|
|
|
if [ -z "$service_name" ] || [ -z "$backup_path" ]; then
|
|
echo "Error: json_backup_init requires service_name and backup_path" >&2
|
|
return 1
|
|
fi
|
|
|
|
# Set global variables
|
|
JSON_BACKUP_SERVICE="$service_name"
|
|
JSON_BACKUP_PATH="$backup_path"
|
|
JSON_BACKUP_SESSION_ID="${custom_session_id:-$(date +%Y%m%d_%H%M%S)}"
|
|
JSON_BACKUP_START_TIME=$(date +%s)
|
|
|
|
# Create metrics directory structure
|
|
local service_metrics_dir="$JSON_METRICS_ROOT/$service_name"
|
|
mkdir -p "$service_metrics_dir"
|
|
|
|
# Create temporary directory for this session
|
|
JSON_BACKUP_TEMP_DIR="$service_metrics_dir/.tmp_${JSON_BACKUP_SESSION_ID}"
|
|
mkdir -p "$JSON_BACKUP_TEMP_DIR"
|
|
|
|
# Set file paths
|
|
JSON_BACKUP_LOG_FILE="$JSON_BACKUP_TEMP_DIR/backup_session.json"
|
|
JSON_BACKUP_METRICS_FILE="$service_metrics_dir/metrics.json"
|
|
|
|
json_log_debug "Initialized JSON logging for $service_name (session: $JSON_BACKUP_SESSION_ID)"
|
|
|
|
# Create initial session file
|
|
json_create_initial_session
|
|
|
|
return 0
|
|
}
|
|
|
|
# Create initial backup session JSON structure
|
|
json_create_initial_session() {
|
|
local session_data
|
|
session_data=$(jq -n \
|
|
--arg service "$JSON_BACKUP_SERVICE" \
|
|
--arg session_id "$JSON_BACKUP_SESSION_ID" \
|
|
--arg backup_path "$JSON_BACKUP_PATH" \
|
|
--argjson start_time "$JSON_BACKUP_START_TIME" \
|
|
--arg start_iso "$(date -d "@$JSON_BACKUP_START_TIME" --iso-8601=seconds)" \
|
|
--arg status "initialized" \
|
|
--arg hostname "$(hostname)" \
|
|
'{
|
|
service_name: $service,
|
|
session_id: $session_id,
|
|
backup_path: $backup_path,
|
|
hostname: $hostname,
|
|
status: $status,
|
|
start_time: {
|
|
epoch: $start_time,
|
|
iso: $start_iso
|
|
},
|
|
end_time: null,
|
|
duration_seconds: null,
|
|
files: [],
|
|
summary: {
|
|
total_files: 0,
|
|
successful_files: 0,
|
|
failed_files: 0,
|
|
total_size_bytes: 0,
|
|
errors: []
|
|
},
|
|
performance: {
|
|
backup_phase_duration: null,
|
|
verification_phase_duration: null,
|
|
compression_phase_duration: null,
|
|
cleanup_phase_duration: null
|
|
},
|
|
metadata: {
|
|
script_version: "1.0",
|
|
json_logger_version: "1.0",
|
|
last_updated: $start_iso
|
|
}
|
|
}')
|
|
|
|
echo "$session_data" > "$JSON_BACKUP_LOG_FILE"
|
|
json_log_debug "Created initial session file: $JSON_BACKUP_LOG_FILE"
|
|
}
|
|
|
|
# Update backup status
|
|
json_backup_update_status() {
|
|
local new_status="$1"
|
|
local error_message="$2"
|
|
|
|
if [ ! -f "$JSON_BACKUP_LOG_FILE" ]; then
|
|
json_log_debug "Warning: Session file not found, cannot update status"
|
|
return 1
|
|
fi
|
|
|
|
local updated_session
|
|
local current_time
|
|
current_time=$(date +%s)
|
|
local current_iso
|
|
current_iso=$(date --iso-8601=seconds)
|
|
|
|
# Build jq command based on whether we have an error message
|
|
if [ -n "$error_message" ]; then
|
|
updated_session=$(jq \
|
|
--arg status "$new_status" \
|
|
--arg error "$error_message" \
|
|
--arg updated "$current_iso" \
|
|
'.status = $status | .summary.errors += [$error] | .metadata.last_updated = $updated' \
|
|
"$JSON_BACKUP_LOG_FILE")
|
|
else
|
|
updated_session=$(jq \
|
|
--arg status "$new_status" \
|
|
--arg updated "$current_iso" \
|
|
'.status = $status | .metadata.last_updated = $updated' \
|
|
"$JSON_BACKUP_LOG_FILE")
|
|
fi
|
|
|
|
echo "$updated_session" > "$JSON_BACKUP_LOG_FILE"
|
|
json_log_debug "Updated status to: $new_status"
|
|
|
|
# Update the main metrics file
|
|
json_update_main_metrics
|
|
}
|
|
|
|
# Mark backup as started
|
|
json_backup_start() {
|
|
json_backup_update_status "running"
|
|
}
|
|
|
|
# Add a file to the backup session
|
|
json_backup_add_file() {
|
|
local file_path="$1"
|
|
local status="$2" # "success", "failed", "skipped"
|
|
local size_bytes="$3" # File size in bytes
|
|
local checksum="$4" # Optional checksum
|
|
local error_message="$5" # Optional error message
|
|
|
|
if [ ! -f "$JSON_BACKUP_LOG_FILE" ]; then
|
|
json_log_debug "Warning: Session file not found, cannot add file"
|
|
return 1
|
|
fi
|
|
|
|
# Get file metadata
|
|
local filename
|
|
filename=$(basename "$file_path")
|
|
local modified_time=""
|
|
local modified_iso=""
|
|
|
|
if [ -f "$file_path" ]; then
|
|
modified_time=$(stat -c%Y "$file_path" 2>/dev/null || echo "0")
|
|
modified_iso=$(date -d "@$modified_time" --iso-8601=seconds 2>/dev/null || echo "")
|
|
fi
|
|
|
|
# Create file entry
|
|
local file_entry
|
|
file_entry=$(jq -n \
|
|
--arg path "$file_path" \
|
|
--arg filename "$filename" \
|
|
--arg status "$status" \
|
|
--argjson size_bytes "${size_bytes:-0}" \
|
|
--arg checksum "${checksum:-}" \
|
|
--argjson modified_time "${modified_time:-0}" \
|
|
--arg modified_iso "$modified_iso" \
|
|
--arg processed_at "$(date --iso-8601=seconds)" \
|
|
--arg error_message "${error_message:-}" \
|
|
'{
|
|
path: $path,
|
|
filename: $filename,
|
|
status: $status,
|
|
size_bytes: $size_bytes,
|
|
size_human: (if $size_bytes > 0 then ($size_bytes | tostring | tonumber | . / 1048576 | tostring + "MB") else "0B" end),
|
|
checksum: $checksum,
|
|
modified_time: {
|
|
epoch: $modified_time,
|
|
iso: $modified_iso
|
|
},
|
|
processed_at: $processed_at,
|
|
error_message: (if $error_message != "" then $error_message else null end)
|
|
}')
|
|
|
|
# Add file to session and update summary
|
|
local updated_session
|
|
updated_session=$(jq \
|
|
--argjson file_entry "$file_entry" \
|
|
--arg current_time "$(date --iso-8601=seconds)" \
|
|
'
|
|
.files += [$file_entry] |
|
|
.summary.total_files += 1 |
|
|
(if $file_entry.status == "success" then .summary.successful_files += 1 else . end) |
|
|
(if $file_entry.status == "failed" then .summary.failed_files += 1 else . end) |
|
|
.summary.total_size_bytes += $file_entry.size_bytes |
|
|
.metadata.last_updated = $current_time
|
|
' \
|
|
"$JSON_BACKUP_LOG_FILE")
|
|
|
|
echo "$updated_session" > "$JSON_BACKUP_LOG_FILE"
|
|
json_log_debug "Added file: $filename ($status)"
|
|
|
|
# Update the main metrics file
|
|
json_update_main_metrics
|
|
}
|
|
|
|
# Record performance phase timing
|
|
json_backup_record_phase() {
|
|
local phase_name="$1" # "backup", "verification", "compression", "cleanup"
|
|
local duration_seconds="$2" # Duration in seconds
|
|
|
|
if [ ! -f "$JSON_BACKUP_LOG_FILE" ]; then
|
|
json_log_debug "Warning: Session file not found, cannot record phase"
|
|
return 1
|
|
fi
|
|
|
|
local phase_field="${phase_name}_phase_duration"
|
|
|
|
local updated_session
|
|
updated_session=$(jq \
|
|
--arg phase "$phase_field" \
|
|
--argjson duration "$duration_seconds" \
|
|
--arg updated "$(date --iso-8601=seconds)" \
|
|
'.performance[$phase] = $duration | .metadata.last_updated = $updated' \
|
|
"$JSON_BACKUP_LOG_FILE")
|
|
|
|
echo "$updated_session" > "$JSON_BACKUP_LOG_FILE"
|
|
json_log_debug "Recorded $phase_name phase: ${duration_seconds}s"
|
|
}
|
|
|
|
# Complete the backup session
|
|
json_backup_complete() {
|
|
local final_status="$1" # "success", "failed", "partial"
|
|
local final_message="$2" # Optional completion message
|
|
|
|
if [ ! -f "$JSON_BACKUP_LOG_FILE" ]; then
|
|
json_log_debug "Warning: Session file not found, cannot complete"
|
|
return 1
|
|
fi
|
|
|
|
local end_time
|
|
end_time=$(date +%s)
|
|
local end_iso
|
|
end_iso=$(date --iso-8601=seconds)
|
|
local duration
|
|
duration=$((end_time - JSON_BACKUP_START_TIME))
|
|
|
|
# Complete the session
|
|
local completed_session
|
|
if [ -n "$final_message" ]; then
|
|
completed_session=$(jq \
|
|
--arg status "$final_status" \
|
|
--argjson end_time "$end_time" \
|
|
--arg end_iso "$end_iso" \
|
|
--argjson duration "$duration" \
|
|
--arg message "$final_message" \
|
|
--arg updated "$end_iso" \
|
|
'
|
|
.status = $status |
|
|
.end_time = {epoch: $end_time, iso: $end_iso} |
|
|
.duration_seconds = $duration |
|
|
.completion_message = $message |
|
|
.metadata.last_updated = $updated
|
|
' \
|
|
"$JSON_BACKUP_LOG_FILE")
|
|
else
|
|
completed_session=$(jq \
|
|
--arg status "$final_status" \
|
|
--argjson end_time "$end_time" \
|
|
--arg end_iso "$end_iso" \
|
|
--argjson duration "$duration" \
|
|
--arg updated "$end_iso" \
|
|
'
|
|
.status = $status |
|
|
.end_time = {epoch: $end_time, iso: $end_iso} |
|
|
.duration_seconds = $duration |
|
|
.metadata.last_updated = $updated
|
|
' \
|
|
"$JSON_BACKUP_LOG_FILE")
|
|
fi
|
|
|
|
echo "$completed_session" > "$JSON_BACKUP_LOG_FILE"
|
|
json_log_debug "Completed backup session: $final_status (${duration}s)"
|
|
|
|
# Final update to main metrics
|
|
json_update_main_metrics
|
|
|
|
# Archive session to history
|
|
json_archive_session
|
|
|
|
# Cleanup temporary directory
|
|
json_cleanup_session
|
|
}
|
|
|
|
# Update the main metrics.json file
|
|
json_update_main_metrics() {
|
|
if [ ! -f "$JSON_BACKUP_LOG_FILE" ]; then
|
|
return 1
|
|
fi
|
|
|
|
# Read current session data
|
|
local session_data
|
|
session_data=$(cat "$JSON_BACKUP_LOG_FILE")
|
|
|
|
# Get latest backup info (most recent successful file)
|
|
local latest_backup
|
|
latest_backup=$(echo "$session_data" | jq '
|
|
.files |
|
|
map(select(.status == "success")) |
|
|
sort_by(.processed_at) |
|
|
last // {}
|
|
')
|
|
|
|
# Create current metrics
|
|
local current_metrics
|
|
current_metrics=$(echo "$session_data" | jq \
|
|
--argjson latest_backup "$latest_backup" \
|
|
'{
|
|
service_name: .service_name,
|
|
backup_path: .backup_path,
|
|
current_session: {
|
|
session_id: .session_id,
|
|
status: .status,
|
|
start_time: .start_time,
|
|
end_time: .end_time,
|
|
duration_seconds: .duration_seconds,
|
|
files_processed: .summary.total_files,
|
|
files_successful: .summary.successful_files,
|
|
files_failed: .summary.failed_files,
|
|
total_size_bytes: .summary.total_size_bytes,
|
|
total_size_human: (if .summary.total_size_bytes > 0 then (.summary.total_size_bytes / 1048576 | tostring + "MB") else "0B" end),
|
|
errors: .summary.errors,
|
|
performance: .performance
|
|
},
|
|
latest_backup: $latest_backup,
|
|
generated_at: .metadata.last_updated
|
|
}')
|
|
|
|
# Write to main metrics file
|
|
echo "$current_metrics" > "$JSON_BACKUP_METRICS_FILE"
|
|
json_log_debug "Updated main metrics file"
|
|
}
|
|
|
|
# Archive completed session to history
|
|
json_archive_session() {
|
|
if [ ! -f "$JSON_BACKUP_LOG_FILE" ]; then
|
|
return 1
|
|
fi
|
|
|
|
local service_metrics_dir
|
|
service_metrics_dir=$(dirname "$JSON_BACKUP_METRICS_FILE")
|
|
local history_file="$service_metrics_dir/history.json"
|
|
|
|
# Read current session
|
|
local session_data
|
|
session_data=$(cat "$JSON_BACKUP_LOG_FILE")
|
|
|
|
# Initialize history file if it doesn't exist
|
|
if [ ! -f "$history_file" ]; then
|
|
echo '{"service_name": "'$JSON_BACKUP_SERVICE'", "sessions": []}' > "$history_file"
|
|
fi
|
|
|
|
# Add session to history
|
|
local updated_history
|
|
updated_history=$(jq \
|
|
--argjson session "$session_data" \
|
|
'.sessions += [$session] | .sessions |= sort_by(.start_time.epoch) | .sessions |= reverse' \
|
|
"$history_file")
|
|
|
|
echo "$updated_history" > "$history_file"
|
|
json_log_debug "Archived session to history"
|
|
}
|
|
|
|
# Cleanup session temporary files
|
|
json_cleanup_session() {
|
|
if [ -d "$JSON_BACKUP_TEMP_DIR" ]; then
|
|
rm -rf "$JSON_BACKUP_TEMP_DIR"
|
|
json_log_debug "Cleaned up temporary session directory"
|
|
fi
|
|
}
|
|
|
|
# Get current backup status (for external monitoring)
|
|
json_get_current_status() {
|
|
local service_name="$1"
|
|
|
|
if [ -z "$service_name" ]; then
|
|
echo "Error: Service name required" >&2
|
|
return 1
|
|
fi
|
|
|
|
local metrics_file="$JSON_METRICS_ROOT/$service_name/metrics.json"
|
|
|
|
if [ -f "$metrics_file" ]; then
|
|
cat "$metrics_file"
|
|
else
|
|
echo "{\"error\": \"No metrics found for service: $service_name\"}"
|
|
fi
|
|
}
|
|
|
|
# Helper function to track phase timing
|
|
json_backup_time_phase() {
|
|
local phase_name="$1"
|
|
local start_time="$2"
|
|
|
|
if [ -z "$start_time" ]; then
|
|
echo "Error: Start time required for phase timing" >&2
|
|
return 1
|
|
fi
|
|
|
|
local end_time
|
|
end_time=$(date +%s)
|
|
local duration
|
|
duration=$((end_time - start_time))
|
|
|
|
json_backup_record_phase "$phase_name" "$duration"
|
|
}
|
|
|
|
# Convenience function for error handling
|
|
json_backup_error() {
|
|
local error_message="$1"
|
|
local file_path="$2"
|
|
|
|
if [ -n "$file_path" ]; then
|
|
json_backup_add_file "$file_path" "failed" "0" "" "$error_message"
|
|
else
|
|
json_backup_update_status "failed" "$error_message"
|
|
fi
|
|
}
|
|
|
|
# Export all functions for use in other scripts
|
|
export -f json_backup_init
|
|
export -f json_backup_start
|
|
export -f json_backup_add_file
|
|
export -f json_backup_record_phase
|
|
export -f json_backup_complete
|
|
export -f json_backup_update_status
|
|
export -f json_backup_error
|
|
export -f json_backup_time_phase
|
|
export -f json_get_current_status
|
|
export -f json_log_debug
|
|
|
|
json_log_debug "Backup JSON Logger library loaded"
|