mirror of
https://github.com/acedanger/shell.git
synced 2025-12-05 22:50:18 -08:00
feat: Add base HTML template and implement dashboard, logs, and service views
- Created a base HTML template for consistent layout across pages. - Developed a dashboard page to display backup service metrics and statuses. - Implemented a log viewer for detailed log file inspection. - Added error handling page for better user experience during failures. - Introduced service detail page to show specific service metrics and actions. - Enhanced log filtering and viewing capabilities. - Integrated auto-refresh functionality for real-time updates on metrics. - Created integration and unit test scripts for backup metrics functionality.
This commit is contained in:
88
test-web-integration.py
Normal file
88
test-web-integration.py
Normal file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# Set environment
|
||||
os.environ['BACKUP_ROOT'] = '/home/acedanger/shell'
|
||||
METRICS_DIR = '/home/acedanger/shell/metrics'
|
||||
|
||||
|
||||
def load_json_file(filepath):
|
||||
"""Safely load JSON file with error handling"""
|
||||
try:
|
||||
if os.path.exists(filepath):
|
||||
with open(filepath, 'r') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
print(f"Error loading JSON file {filepath}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_service_metrics(service_name):
|
||||
"""Get metrics for a specific service"""
|
||||
# Simple status file approach
|
||||
status_file = os.path.join(METRICS_DIR, f'{service_name}_status.json')
|
||||
|
||||
status = load_json_file(status_file)
|
||||
|
||||
return {
|
||||
'status': status,
|
||||
'last_run': status.get('end_time') if status else None,
|
||||
'current_status': status.get('status', 'unknown') if status else 'never_run',
|
||||
'files_processed': status.get('files_processed', 0) if status else 0,
|
||||
'total_size': status.get('total_size_bytes', 0) if status else 0,
|
||||
'duration': status.get('duration_seconds', 0) if status else 0
|
||||
}
|
||||
|
||||
|
||||
def get_consolidated_metrics():
|
||||
"""Get consolidated metrics across all services"""
|
||||
# With simplified approach, we consolidate by reading all status files
|
||||
services = {}
|
||||
|
||||
if os.path.exists(METRICS_DIR):
|
||||
for filename in os.listdir(METRICS_DIR):
|
||||
if filename.endswith('_status.json'):
|
||||
service_name = filename.replace('_status.json', '')
|
||||
status_file = os.path.join(METRICS_DIR, filename)
|
||||
status = load_json_file(status_file)
|
||||
if status:
|
||||
services[service_name] = status
|
||||
|
||||
return {
|
||||
'services': services,
|
||||
'total_services': len(services),
|
||||
'last_updated': '2025-06-18T05:15:00-04:00'
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print('=== Testing Simplified Metrics Web Integration ===')
|
||||
|
||||
# Test individual service metrics
|
||||
print('\n1. Individual Service Metrics:')
|
||||
for service in ['plex', 'immich', 'media-services']:
|
||||
try:
|
||||
metrics = get_service_metrics(service)
|
||||
status = metrics['current_status']
|
||||
files = metrics['files_processed']
|
||||
duration = metrics['duration']
|
||||
print(f' {service}: {status} ({files} files, {duration}s)')
|
||||
except Exception as e:
|
||||
print(f' {service}: Error - {e}')
|
||||
|
||||
# Test consolidated metrics
|
||||
print('\n2. Consolidated Metrics:')
|
||||
try:
|
||||
consolidated = get_consolidated_metrics()
|
||||
services = consolidated['services']
|
||||
print(f' Total services: {len(services)}')
|
||||
for name, status in services.items():
|
||||
message = status.get('message', 'N/A')
|
||||
print(f' {name}: {status["status"]} - {message}')
|
||||
except Exception as e:
|
||||
print(f' Error: {e}')
|
||||
|
||||
print('\n✅ Web integration test completed successfully!')
|
||||
Reference in New Issue
Block a user