feat: Enhance backup script documentation and workflow for shared storage integration

This commit is contained in:
Peter Wood
2025-06-03 12:35:08 -04:00
parent 4ebf1ca87d
commit 48e51fa094
2 changed files with 254 additions and 59 deletions

View File

@@ -63,7 +63,7 @@ USAGE:
$(basename "$0") [OPTIONS]
OPTIONS:
--help, -h Show this help message and exit
--help, -h Show this help message and exit
--dry-run Show what would be backed up without performing actual backup
--no-upload Skip B2 upload (local backup only)
--verbose Enable verbose logging
@@ -71,7 +71,7 @@ OPTIONS:
CONFIGURATION:
This script requires a .env file in the parent directory with:
- DB_USERNAME=<database_username>
- DB_DATABASE_NAME=<database_name>
- DB_DATABASE_NAME=<database_name>
- UPLOAD_LOCATION=<path_to_upload_directory>
OPTIONAL B2 CONFIGURATION:
@@ -134,19 +134,19 @@ send_notification() {
local message="$2"
local status="${3:-info}" # success, error, warning, info
local hostname=$(hostname)
# Console notification
log_message "$title: $message"
# Webhook notification
if [ -n "$WEBHOOK_URL" ]; then
local tags="backup,immich,${hostname}"
[ "$status" == "error" ] && tags="${tags},errors"
[ "$status" == "warning" ] && tags="${tags},warnings"
# Clean message without newlines or timestamps for webhook
local webhook_message="$message"
curl -s \
-H "tags:${tags}" \
-d "$webhook_message" \
@@ -158,27 +158,27 @@ send_notification() {
upload_to_b2() {
local file_path="$1"
local filename=$(basename "$file_path")
# Check if B2 is configured
if [ -z "$B2_APPLICATION_KEY_ID" ] || [ -z "$B2_APPLICATION_KEY" ] || [ -z "$B2_BUCKET_NAME" ]; then
log_message "B2 upload skipped: B2 credentials not configured in .env file"
return 0
fi
# Check if B2 CLI exists
if [ ! -f "$B2_CLI" ]; then
log_message "Error: B2 CLI not found at $B2_CLI"
return 1
fi
log_message "Uploading $filename to B2 bucket: $B2_BUCKET_NAME"
# Authorize B2 account
if ! "$B2_CLI" authorize-account "$B2_APPLICATION_KEY_ID" "$B2_APPLICATION_KEY" 2>/dev/null; then
log_message "Error: Failed to authorize B2 account"
return 1
fi
# Upload file to B2
if "$B2_CLI" upload-file "$B2_BUCKET_NAME" "$file_path" "immich-backups/$filename" 2>/dev/null; then
log_message "✅ Successfully uploaded $filename to B2"
@@ -233,7 +233,13 @@ if [ "$DRY_RUN" = true ]; then
echo " - Database backup: ${DB_BACKUP_PATH}.gz"
echo " - Upload backup: ${UPLOAD_BACKUP_PATH}"
echo ""
echo "Workflow:"
echo " 1. Create local backups"
echo " 2. Copy to shared storage: /mnt/share/media/backups/immich/"
echo " 3. Upload to B2 (if configured)"
echo " 4. Delete local backups (keep shared copies)"
echo ""
# Check container status in dry-run
echo "Container Status Check:"
if docker ps -q --filter "name=immich_server" | grep -q .; then
@@ -241,14 +247,14 @@ if [ "$DRY_RUN" = true ]; then
else
echo " ! immich_server: Not running or not found"
fi
if docker ps -q --filter "name=immich_postgres" | grep -q .; then
echo " ✓ immich_postgres: Running"
else
echo " ✗ immich_postgres: Not running - backup would fail!"
exit 1
fi
# Check upload directory
if [ -d "${UPLOAD_LOCATION}" ]; then
UPLOAD_SIZE=$(du -sh "${UPLOAD_LOCATION}" 2>/dev/null | cut -f1 || echo "unknown")
@@ -257,7 +263,7 @@ if [ "$DRY_RUN" = true ]; then
echo " ✗ Upload directory: ${UPLOAD_LOCATION} does not exist - backup would fail!"
exit 1
fi
# Check B2 configuration
echo ""
echo "B2 Upload Configuration:"
@@ -273,7 +279,22 @@ if [ "$DRY_RUN" = true ]; then
else
echo " ! B2 not configured - would skip upload"
fi
# Check shared storage directory
echo ""
echo "Shared Storage Check:"
if [ -d "/mnt/share/media/backups" ]; then
echo " ✓ Shared storage accessible: /mnt/share/media/backups"
if [ -w "/mnt/share/media/backups" ]; then
echo " ✓ Shared storage writable - would copy backups before B2 upload"
else
echo " ⚠ Shared storage not writable - backups would remain in ${BACKUP_DIR}"
fi
else
echo " ⚠ Shared storage not accessible: /mnt/share/media/backups"
echo " Backups would remain in ${BACKUP_DIR}"
fi
echo ""
echo "=== DRY RUN COMPLETE - No files were created or modified ==="
exit 0
@@ -319,20 +340,6 @@ if ! docker ps -q --filter "name=immich_postgres" | grep -q .; then
exit 1
fi
# Check if the Immich server container exists and is running
log_status "Checking immich_server container status..."
if docker ps -q --filter "name=immich_server" | grep -q .; then
log_message "Pausing immich_server container to minimize database writes during backup..."
if ! docker pause immich_server; then
log_message "Failed to pause immich_server container."
# Continue with backup instead of exiting
fi
else
log_message "Note: immich_server container not found or not running. Continuing with backup anyway."
# Set a flag so we don't try to unpause it later
IMMICH_SERVER_RUNNING=false
fi
echo ""
echo "=== PHASE 1: DATABASE BACKUP ==="
log_message "Taking database backup using pg_dumpall as recommended by Immich documentation..."
@@ -393,23 +400,65 @@ if [ "${IMMICH_SERVER_RUNNING:-true}" = true ]; then
fi
fi
# Resume the Immich server only if it was running and we paused it
if [ "${IMMICH_SERVER_RUNNING:-true}" = true ]; then
log_status "Resuming immich_server container..."
if ! docker unpause immich_server 2>/dev/null; then
log_message "Note: No need to unpause immich_server container."
echo ""
echo "=== COPYING BACKUPS TO SHARED STORAGE ==="
SHARED_BACKUP_DIR="/mnt/share/media/backups/immich"
# Initialize COPY_SUCCESS before use
COPY_SUCCESS=false
# Create shared backup directory if it doesn't exist
if ! mkdir -p "$SHARED_BACKUP_DIR"; then
log_message "Warning: Failed to create shared backup directory: $SHARED_BACKUP_DIR"
log_message "Backup files remain only in: $BACKUP_DIR"
COPY_SUCCESS=false
else
log_message "Copying backup files to shared storage: $SHARED_BACKUP_DIR"
COPY_SUCCESS=true
# Copy database backup
if [ -f "${DB_BACKUP_PATH}.gz" ]; then
if cp "${DB_BACKUP_PATH}.gz" "$SHARED_BACKUP_DIR/"; then
log_message "✅ Copied database backup to shared storage"
else
log_message "❌ Failed to copy database backup to shared storage"
COPY_SUCCESS=false
fi
fi
# Copy uploads backup
if [ -f "${UPLOAD_BACKUP_PATH}" ]; then
if cp "${UPLOAD_BACKUP_PATH}" "$SHARED_BACKUP_DIR/"; then
log_message "✅ Copied uploads backup to shared storage"
else
log_message "❌ Failed to copy uploads backup to shared storage"
COPY_SUCCESS=false
fi
fi
if [ "$COPY_SUCCESS" = true ]; then
log_message "All backup files successfully copied to shared storage"
else
log_message "Some backup files failed to copy to shared storage"
fi
fi
echo ""
echo "=== BACKUP COMPLETED SUCCESSFULLY! ==="
echo "Database backup saved to: ${DB_BACKUP_PATH}.gz"
echo "Upload directory backup saved to: ${UPLOAD_BACKUP_PATH}"
# Calculate backup sizes
DB_BACKUP_SIZE=$(du -h "${DB_BACKUP_PATH}.gz" 2>/dev/null | cut -f1 || echo "Unknown")
UPLOAD_BACKUP_SIZE=$(du -h "${UPLOAD_BACKUP_PATH}" 2>/dev/null | cut -f1 || echo "Unknown")
echo ""
echo "=== BACKUP COMPLETED SUCCESSFULLY! ==="
if [ "$COPY_SUCCESS" = true ]; then
echo "Database backup saved to: ${SHARED_BACKUP_DIR}/$(basename "${DB_BACKUP_PATH}.gz")"
echo "Upload directory backup saved to: ${SHARED_BACKUP_DIR}/$(basename "${UPLOAD_BACKUP_PATH}")"
echo "(Local backup files have been cleaned up)"
else
echo "Database backup saved to: ${DB_BACKUP_PATH}.gz"
echo "Upload directory backup saved to: ${UPLOAD_BACKUP_PATH}"
echo "(Local backup files retained due to copy failure)"
fi
echo ""
echo "=== BACKUP SUMMARY ==="
echo "Database backup size: ${DB_BACKUP_SIZE}"
@@ -425,23 +474,70 @@ else
echo "=== UPLOADING TO BACKBLAZE B2 ==="
B2_UPLOAD_SUCCESS=true
# Upload database backup
# Upload database backup from local location
if ! upload_to_b2 "${DB_BACKUP_PATH}.gz"; then
B2_UPLOAD_SUCCESS=false
fi
# Upload uploads backup
# Upload uploads backup from local location
if ! upload_to_b2 "${UPLOAD_BACKUP_PATH}"; then
B2_UPLOAD_SUCCESS=false
fi
fi
# Clean up local backup files after successful copy and B2 upload
echo ""
echo "=== CLEANING UP LOCAL BACKUPS ==="
if [ "$COPY_SUCCESS" = true ]; then
log_message "Removing local backup files (copies exist in shared storage)..."
# Remove local database backup
if [ -f "${DB_BACKUP_PATH}.gz" ]; then
if rm "${DB_BACKUP_PATH}.gz"; then
log_message "✅ Removed local database backup"
else
log_message "⚠ Failed to remove local database backup: ${DB_BACKUP_PATH}.gz"
fi
fi
# Remove local uploads backup
if [ -f "${UPLOAD_BACKUP_PATH}" ]; then
if rm "${UPLOAD_BACKUP_PATH}"; then
log_message "✅ Removed local uploads backup"
else
log_message "⚠ Failed to remove local uploads backup: ${UPLOAD_BACKUP_PATH}"
fi
fi
log_message "Local cleanup completed - backups are now in shared storage"
# Store original paths for health checks before updating
ORIGINAL_DB_PATH="${DB_BACKUP_PATH}.gz"
ORIGINAL_UPLOAD_PATH="${UPLOAD_BACKUP_PATH}"
else
log_message "Skipping local cleanup - copy to shared storage failed"
log_message "Local backup files retained in: $BACKUP_DIR"
# Keep original paths for health checks
ORIGINAL_DB_PATH="${DB_BACKUP_PATH}.gz"
ORIGINAL_UPLOAD_PATH="${UPLOAD_BACKUP_PATH}"
fi
# Prepare notification message
DB_FILENAME=$(basename "${DB_BACKUP_PATH}.gz")
DB_FILENAME=$(basename "${DB_BACKUP_PATH}" .gz)
UPLOAD_FILENAME=$(basename "${UPLOAD_BACKUP_PATH}")
NOTIFICATION_MESSAGE="📦 Database: ${DB_FILENAME} (${DB_BACKUP_SIZE})
📁 Uploads: ${UPLOAD_FILENAME} (${UPLOAD_BACKUP_SIZE})"
# Add storage location info to notification
if [ "$COPY_SUCCESS" = true ]; then
NOTIFICATION_MESSAGE="${NOTIFICATION_MESSAGE}
💾 Stored in: ${SHARED_BACKUP_DIR} (local files cleaned up)"
else
NOTIFICATION_MESSAGE="${NOTIFICATION_MESSAGE}
💾 Located in: ${BACKUP_DIR} (copy to shared storage failed)"
fi
if [ "$B2_UPLOAD_SUCCESS" = "skipped" ]; then
NOTIFICATION_MESSAGE="${NOTIFICATION_MESSAGE}
💾 Local backup only (B2 upload skipped)"
@@ -461,17 +557,30 @@ fi
# Show backup information
echo ""
echo "=== BACKUP INVENTORY ==="
find "${BACKUP_DIR}" -name "*.gz" | wc -l | xargs echo "Total number of backup files:"
du -sh "${BACKUP_DIR}" | cut -f1 | xargs echo "Total backup directory size:"
if [ "$COPY_SUCCESS" = true ]; then
INVENTORY_DIR="$SHARED_BACKUP_DIR"
echo "Backup location: $SHARED_BACKUP_DIR"
else
INVENTORY_DIR="$BACKUP_DIR"
echo "Backup location: $BACKUP_DIR"
fi
find "${INVENTORY_DIR}" -name "*.gz" -o -name "*.tar.gz" | wc -l | xargs echo "Total number of backup files:"
du -sh "${INVENTORY_DIR}" | cut -f1 | xargs echo "Total backup directory size:"
# List recent backups
echo ""
echo "Recent backups:"
find "${BACKUP_DIR}" -name "*.gz" -mtime -7 | sort
find "${INVENTORY_DIR}" -name "*.gz" -o -name "*.tar.gz" -mtime -7 | sort
# Health check: Verify backup file sizes
DB_BACKUP_SIZE_KB=$(du -k "${DB_BACKUP_PATH}.gz" 2>/dev/null | cut -f1 || echo "0")
UPLOAD_BACKUP_SIZE_KB=$(du -k "${UPLOAD_BACKUP_PATH}" 2>/dev/null | cut -f1 || echo "0")
if [ "$COPY_SUCCESS" = true ]; then
DB_BACKUP_SIZE_KB=$(du -k "${SHARED_BACKUP_DIR}/$(basename "${ORIGINAL_DB_PATH}")" 2>/dev/null | cut -f1 || echo "0")
UPLOAD_BACKUP_SIZE_KB=$(du -k "${SHARED_BACKUP_DIR}/$(basename "${ORIGINAL_UPLOAD_PATH}")" 2>/dev/null | cut -f1 || echo "0")
else
DB_BACKUP_SIZE_KB=$(du -k "${ORIGINAL_DB_PATH}" 2>/dev/null | cut -f1 || echo "0")
UPLOAD_BACKUP_SIZE_KB=$(du -k "${ORIGINAL_UPLOAD_PATH}" 2>/dev/null | cut -f1 || echo "0")
fi
echo ""
echo "=== BACKUP VALIDATION ==="
@@ -490,21 +599,29 @@ fi
# Optional: Remove old backups (older than 30 days)
echo ""
echo "=== CLEANUP ==="
OLD_BACKUPS=$(find "${BACKUP_DIR}" -name "*.gz" -mtime +30 | wc -l)
OLD_BACKUPS=$(find "${INVENTORY_DIR}" -name "*.gz" -o -name "*.tar.gz" -mtime +30 | wc -l)
if [ "${OLD_BACKUPS}" -gt 0 ]; then
echo "Found ${OLD_BACKUPS} backup files older than 30 days."
echo "Found ${OLD_BACKUPS} backup files older than 30 days in ${INVENTORY_DIR}."
echo "To remove them automatically, uncomment the cleanup line in this script."
# Uncomment the next line to automatically remove old backups
# find "${BACKUP_DIR}" -name "*.gz" -mtime +30 -delete
# find "${INVENTORY_DIR}" -name "*.gz" -o -name "*.tar.gz" -mtime +30 -delete
else
echo "No old backup files found (older than 30 days)."
echo "No old backup files found (older than 30 days) in ${INVENTORY_DIR}."
fi
echo ""
echo "=== RESTORE INSTRUCTIONS ==="
echo "To restore from this backup:"
echo "1. Database restore instructions: https://immich.app/docs/administration/backup-and-restore/#database"
echo "2. Upload directory: Extract ${UPLOAD_BACKUP_PATH} to your UPLOAD_LOCATION"
if [ "$COPY_SUCCESS" = true ]; then
echo "2. Upload directory: Extract ${SHARED_BACKUP_DIR}/$(basename "${UPLOAD_BACKUP_PATH}") to your UPLOAD_LOCATION"
echo ""
echo "Backup files are located in: ${SHARED_BACKUP_DIR}"
else
echo "2. Upload directory: Extract ${UPLOAD_BACKUP_PATH} to your UPLOAD_LOCATION"
echo ""
echo "Backup files are located in: ${BACKUP_DIR}"
fi
echo ""
echo "IMPORTANT: For a complete restore, you need BOTH the database backup AND the upload directory backup."
echo "The database contains metadata, while the upload directory contains your actual photos and videos."