mirror of
https://github.com/acedanger/shell.git
synced 2025-12-05 21:40:12 -08:00
348 lines
9.6 KiB
Bash
Executable File
348 lines
9.6 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Log ANSI Color Code Cleanup Utility
|
|
# This script removes ANSI color codes from log files to improve readability
|
|
# and reduce file sizes when viewing logs in editors or processing them with tools
|
|
|
|
set -e
|
|
|
|
# Color codes for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
CYAN='\033[0;36m'
|
|
NC='\033[0m' # No Color
|
|
|
|
SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
|
|
BACKUP_SUFFIX=".ansi-backup"
|
|
|
|
# Function to display usage
|
|
show_usage() {
|
|
echo -e "${CYAN}Log ANSI Cleanup Utility${NC}"
|
|
echo
|
|
echo "Usage: $0 [OPTIONS] [FILE|DIRECTORY]..."
|
|
echo
|
|
echo "Options:"
|
|
echo " -h, --help Show this help message"
|
|
echo " -r, --recursive Process directories recursively"
|
|
echo " -b, --backup Create backup files before cleaning (recommended)"
|
|
echo " -n, --no-backup Don't create backup files (use with caution)"
|
|
echo " -v, --verbose Show detailed output"
|
|
echo " -d, --dry-run Show what would be done without making changes"
|
|
echo " -f, --filter PATTERN Only process files matching pattern (e.g., '*.log')"
|
|
echo " -a, --auto-discover Find common log files automatically"
|
|
echo
|
|
echo "Examples:"
|
|
echo " $0 --backup /var/log/app.log"
|
|
echo " $0 --recursive --backup /var/log/"
|
|
echo " $0 --auto-discover --backup"
|
|
echo " $0 --filter '*.log' --recursive /home/user/logs/"
|
|
echo " $0 --dry-run --auto-discover"
|
|
echo
|
|
echo "Common log locations checked:"
|
|
echo " - ~/shell/logs/"
|
|
echo " - ~/shell/crontab/logs/"
|
|
echo " - ~/shell/plex/logs/"
|
|
echo " - ~/shell/jellyfin/logs/"
|
|
echo " - ~/shell/*/logs/ (any subdirectory with logs folder, excluding .git)"
|
|
}
|
|
|
|
# Function to log messages
|
|
log_message() {
|
|
local level="$1"
|
|
local message="$2"
|
|
local color=""
|
|
|
|
case "$level" in
|
|
"INFO") color="$BLUE" ;;
|
|
"SUCCESS") color="$GREEN" ;;
|
|
"WARNING") color="$YELLOW" ;;
|
|
"ERROR") color="$RED" ;;
|
|
*) color="$NC" ;;
|
|
esac
|
|
|
|
if [[ "$VERBOSE" == "true" || "$level" != "INFO" ]]; then
|
|
echo -e "${color}${level}: ${message}${NC}"
|
|
fi
|
|
}
|
|
|
|
# Function to check if file contains ANSI codes
|
|
has_ansi_codes() {
|
|
local file="$1"
|
|
# Check for both literal \033 and actual escape sequences
|
|
grep -q '\\033\[[0-9;]*m\|\x1b\[[0-9;]*m' "$file" 2>/dev/null
|
|
}
|
|
|
|
# Function to get file size in human readable format
|
|
get_file_size() {
|
|
local file="$1"
|
|
if [[ -f "$file" ]]; then
|
|
du -h "$file" | cut -f1
|
|
else
|
|
echo "0B"
|
|
fi
|
|
}
|
|
|
|
# Function to clean ANSI codes from a file
|
|
clean_file() {
|
|
local file="$1"
|
|
local backup_file="${file}${BACKUP_SUFFIX}"
|
|
local temp_file
|
|
temp_file=$(mktemp)
|
|
|
|
if [[ ! -f "$file" ]]; then
|
|
log_message "ERROR" "File not found: $file"
|
|
return 1
|
|
fi
|
|
|
|
if [[ ! -r "$file" ]]; then
|
|
log_message "ERROR" "Cannot read file: $file"
|
|
return 1
|
|
fi
|
|
|
|
# Check if file has ANSI codes
|
|
if ! has_ansi_codes "$file"; then
|
|
log_message "INFO" "No ANSI codes found in: $file"
|
|
return 0
|
|
fi
|
|
|
|
local original_size
|
|
original_size=$(get_file_size "$file")
|
|
|
|
if [[ "$DRY_RUN" == "true" ]]; then
|
|
log_message "INFO" "Would clean ANSI codes from: $file (${original_size})"
|
|
return 0
|
|
fi
|
|
|
|
# Create backup if requested
|
|
if [[ "$CREATE_BACKUP" == "true" ]]; then
|
|
if ! cp "$file" "$backup_file"; then
|
|
log_message "ERROR" "Failed to create backup: $backup_file"
|
|
return 1
|
|
fi
|
|
log_message "INFO" "Backup created: $backup_file"
|
|
fi
|
|
|
|
# Clean ANSI codes using multiple patterns to be thorough
|
|
if sed -e 's/\\033\[[0-9;]*m//g' \
|
|
-e 's/\x1b\[[0-9;]*m//g' \
|
|
-e 's/\033\[[0-9;]*m//g' \
|
|
"$file" > "$temp_file"; then
|
|
|
|
# Verify the temp file was created successfully
|
|
if [[ -s "$temp_file" || ! -s "$file" ]]; then
|
|
if mv "$temp_file" "$file"; then
|
|
local new_size
|
|
new_size=$(get_file_size "$file")
|
|
log_message "SUCCESS" "Cleaned: $file (${original_size} → ${new_size})"
|
|
((FILES_CLEANED++))
|
|
else
|
|
log_message "ERROR" "Failed to replace original file: $file"
|
|
rm -f "$temp_file"
|
|
return 1
|
|
fi
|
|
else
|
|
log_message "ERROR" "Cleaning resulted in empty file: $file"
|
|
rm -f "$temp_file"
|
|
return 1
|
|
fi
|
|
else
|
|
log_message "ERROR" "Failed to clean ANSI codes from: $file"
|
|
rm -f "$temp_file"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Function to find common log files automatically
|
|
auto_discover_logs() {
|
|
local shell_dir="$HOME/shell"
|
|
local search_paths=(
|
|
"$shell_dir/logs"
|
|
"$shell_dir/crontab/logs"
|
|
"$shell_dir/plex/logs"
|
|
"$shell_dir/jellyfin/logs"
|
|
)
|
|
|
|
# Add any other log directories found in ~/shell (excluding .git/logs)
|
|
while IFS= read -r -d '' dir; do
|
|
# Skip .git/logs and already included directories
|
|
if [[ "$dir" != *"/.git/logs" ]] &&
|
|
[[ "$dir" != "$shell_dir/logs" ]] &&
|
|
[[ "$dir" != "$shell_dir/crontab/logs" ]] &&
|
|
[[ "$dir" != "$shell_dir/plex/logs" ]] &&
|
|
[[ "$dir" != "$shell_dir/jellyfin/logs" ]]; then
|
|
search_paths+=("$dir")
|
|
fi
|
|
done < <(find "$shell_dir" -type d -name "logs" -print0 2>/dev/null)
|
|
|
|
local found_files=()
|
|
|
|
for path in "${search_paths[@]}"; do
|
|
if [[ -d "$path" && -r "$path" ]]; then
|
|
# Find log files with common extensions
|
|
while IFS= read -r -d '' file; do
|
|
if [[ -f "$file" && -r "$file" ]]; then
|
|
found_files+=("$file")
|
|
fi
|
|
done < <(find "$path" -maxdepth 2 \( -name "*.log" -o -name "*.out" -o -name "*.err" \) -type f -print0 2>/dev/null)
|
|
fi
|
|
done
|
|
|
|
if [[ ${#found_files[@]} -eq 0 ]]; then
|
|
return 1
|
|
fi
|
|
|
|
for file in "${found_files[@]}"; do
|
|
echo "$file"
|
|
done
|
|
}
|
|
|
|
# Function to process a directory
|
|
process_directory() {
|
|
local dir="$1"
|
|
local files_found=0
|
|
|
|
if [[ ! -d "$dir" ]]; then
|
|
log_message "ERROR" "Directory not found: $dir"
|
|
return 1
|
|
fi
|
|
|
|
local find_args=("$dir")
|
|
|
|
if [[ "$RECURSIVE" != "true" ]]; then
|
|
find_args+=("-maxdepth" "1")
|
|
fi
|
|
|
|
find_args+=("-type" "f")
|
|
|
|
if [[ -n "$FILE_PATTERN" ]]; then
|
|
find_args+=("-name" "$FILE_PATTERN")
|
|
fi
|
|
|
|
while IFS= read -r -d '' file; do
|
|
clean_file "$file"
|
|
((files_found++))
|
|
done < <(find "${find_args[@]}" -print0 2>/dev/null)
|
|
|
|
if [[ $files_found -eq 0 ]]; then
|
|
log_message "WARNING" "No files found in directory: $dir"
|
|
fi
|
|
}
|
|
|
|
# Initialize variables
|
|
RECURSIVE=false
|
|
CREATE_BACKUP=true
|
|
VERBOSE=false
|
|
DRY_RUN=false
|
|
FILE_PATTERN=""
|
|
AUTO_DISCOVER=false
|
|
FILES_CLEANED=0
|
|
|
|
# Parse command line arguments
|
|
while [[ $# -gt 0 ]]; do
|
|
case $1 in
|
|
-h|--help)
|
|
show_usage
|
|
exit 0
|
|
;;
|
|
-r|--recursive)
|
|
RECURSIVE=true
|
|
shift
|
|
;;
|
|
-b|--backup)
|
|
CREATE_BACKUP=true
|
|
shift
|
|
;;
|
|
-n|--no-backup)
|
|
CREATE_BACKUP=false
|
|
shift
|
|
;;
|
|
-v|--verbose)
|
|
VERBOSE=true
|
|
shift
|
|
;;
|
|
-d|--dry-run)
|
|
DRY_RUN=true
|
|
VERBOSE=true
|
|
shift
|
|
;;
|
|
-f|--filter)
|
|
FILE_PATTERN="$2"
|
|
shift 2
|
|
;;
|
|
-a|--auto-discover)
|
|
AUTO_DISCOVER=true
|
|
shift
|
|
;;
|
|
-*)
|
|
echo "Unknown option: $1"
|
|
show_usage
|
|
exit 1
|
|
;;
|
|
*)
|
|
break
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Main execution
|
|
log_message "INFO" "Starting ANSI cleanup utility"
|
|
|
|
if [[ "$DRY_RUN" == "true" ]]; then
|
|
log_message "WARNING" "DRY RUN MODE - No files will be modified"
|
|
fi
|
|
|
|
if [[ "$AUTO_DISCOVER" == "true" ]]; then
|
|
log_message "INFO" "Auto-discovering log files..."
|
|
|
|
discovered_files=()
|
|
while IFS= read -r file; do
|
|
if [[ -f "$file" ]]; then
|
|
discovered_files+=("$file")
|
|
fi
|
|
done < <(auto_discover_logs)
|
|
|
|
if [[ ${#discovered_files[@]} -eq 0 ]]; then
|
|
log_message "WARNING" "No log files discovered in common locations"
|
|
log_message "INFO" "Try specifying files or directories manually"
|
|
exit 0
|
|
fi
|
|
|
|
log_message "INFO" "Found ${#discovered_files[@]} log files"
|
|
for file in "${discovered_files[@]}"; do
|
|
clean_file "$file"
|
|
done
|
|
elif [[ $# -eq 0 ]]; then
|
|
log_message "ERROR" "No files or directories specified"
|
|
show_usage
|
|
exit 1
|
|
else
|
|
# Process specified files/directories
|
|
for target in "$@"; do
|
|
if [[ -f "$target" ]]; then
|
|
clean_file "$target"
|
|
elif [[ -d "$target" ]]; then
|
|
process_directory "$target"
|
|
else
|
|
log_message "ERROR" "File or directory not found: $target"
|
|
fi
|
|
done
|
|
fi
|
|
|
|
# Summary
|
|
if [[ "$DRY_RUN" != "true" ]]; then
|
|
if [[ $FILES_CLEANED -gt 0 ]]; then
|
|
log_message "SUCCESS" "Cleanup completed. $FILES_CLEANED files processed."
|
|
|
|
if [[ "$CREATE_BACKUP" == "true" ]]; then
|
|
log_message "INFO" "Backup files created with suffix: $BACKUP_SUFFIX"
|
|
log_message "INFO" "Remove backup files when satisfied: rm -f *$BACKUP_SUFFIX"
|
|
fi
|
|
else
|
|
log_message "INFO" "No files needed cleaning."
|
|
fi
|
|
else
|
|
log_message "INFO" "Dry run completed. Use without --dry-run to make changes."
|
|
fi
|