mirror of
https://github.com/acedanger/shell.git
synced 2025-12-05 21:40:12 -08:00
- Created a base HTML template for consistent layout across pages. - Developed a dashboard page to display backup service metrics and statuses. - Implemented a log viewer for detailed log file inspection. - Added error handling page for better user experience during failures. - Introduced service detail page to show specific service metrics and actions. - Enhanced log filtering and viewing capabilities. - Integrated auto-refresh functionality for real-time updates on metrics. - Created integration and unit test scripts for backup metrics functionality.
97 lines
3.5 KiB
Bash
Executable File
97 lines
3.5 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Convenience script to run setup without Ollama installation
|
|
# This script sets SKIP_OLLAMA=true and runs the main setup script
|
|
#
|
|
# Usage: ./setup-no-ollama.sh [setup script options]
|
|
# Author: acedanger
|
|
# Description: Runs setup while skipping Ollama and configures Fabric for external AI providers
|
|
|
|
set -e
|
|
|
|
# Define colors for output
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[0;33m'
|
|
BLUE='\033[0;34m'
|
|
RED='\033[0;31m'
|
|
NC='\033[0m' # No Color
|
|
|
|
echo -e "${GREEN}=== Shell Setup (Without Ollama) ===${NC}"
|
|
echo -e "${YELLOW}This will install all packages and configurations except Ollama Docker setup${NC}"
|
|
echo -e "${YELLOW}Fabric will be installed but configured for external AI providers${NC}"
|
|
|
|
# Get the directory of this script
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
|
|
# Set SKIP_OLLAMA environment variable and run setup
|
|
export SKIP_OLLAMA=true
|
|
|
|
echo -e "\n${YELLOW}Running setup with SKIP_OLLAMA=true...${NC}"
|
|
|
|
# Run the main setup script
|
|
"$SCRIPT_DIR/setup.sh" "$@"
|
|
|
|
# Configure Fabric after main setup completes
|
|
echo -e "\n${BLUE}Configuring Fabric with external AI providers...${NC}"
|
|
|
|
# Create Fabric config directory if it doesn't exist
|
|
mkdir -p ~/.config/fabric
|
|
|
|
# Download the pre-configured .env file from git repository
|
|
echo -e "${YELLOW}Downloading Fabric .env configuration from git repository...${NC}"
|
|
GIST_URL="https://git.ptrwd.com/peterwood/config/raw/branch/main/fabric/.env"
|
|
|
|
if curl -s "$GIST_URL" -o ~/.config/fabric/.env; then
|
|
chmod 600 ~/.config/fabric/.env
|
|
echo -e "${GREEN}✓ Fabric .env file configured successfully${NC}"
|
|
|
|
# Verify the file was downloaded correctly
|
|
if [ -s ~/.config/fabric/.env ]; then
|
|
FILE_SIZE=$(stat -c%s ~/.config/fabric/.env 2>/dev/null || echo "unknown")
|
|
echo -e "${GREEN}✓ Configuration file downloaded: ${FILE_SIZE} bytes${NC}"
|
|
else
|
|
echo -e "${RED}⚠ Downloaded file appears to be empty${NC}"
|
|
fi
|
|
else
|
|
echo -e "${RED}⚠ Could not download .env file from git repository. Creating basic template...${NC}"
|
|
|
|
# Create a basic .env template as fallback
|
|
cat > ~/.config/fabric/.env << 'EOF'
|
|
# Fabric AI Provider Configuration
|
|
# Add your API keys below and uncomment the lines you want to use
|
|
|
|
# OpenAI Configuration
|
|
#OPENAI_API_KEY=your_openai_api_key_here
|
|
#OPENAI_API_BASE_URL=https://api.openai.com/v1
|
|
|
|
# Anthropic Configuration
|
|
#ANTHROPIC_API_KEY=your_anthropic_api_key_here
|
|
|
|
# Google Gemini Configuration
|
|
#GOOGLE_API_KEY=your_google_api_key_here
|
|
|
|
# Groq Configuration
|
|
#GROQ_API_KEY=your_groq_api_key_here
|
|
|
|
# Set your preferred default model
|
|
DEFAULT_MODEL=gpt-4o-mini
|
|
|
|
# For complete provider list, see:
|
|
# https://git.ptrwd.com/peterwood/config/raw/branch/main/fabric/.env
|
|
EOF
|
|
chmod 600 ~/.config/fabric/.env
|
|
echo -e "${YELLOW}✓ Basic .env template created${NC}"
|
|
fi
|
|
|
|
echo -e "\n${GREEN}=== Setup completed without Ollama ===${NC}"
|
|
echo -e "${BLUE}Next steps for Fabric configuration:${NC}"
|
|
echo -e "${YELLOW}1. Edit ~/.config/fabric/.env and add your API keys${NC}"
|
|
echo -e "${YELLOW}2. Uncomment your preferred AI provider section${NC}"
|
|
echo -e "${YELLOW}3. Set DEFAULT_MODEL to your preferred model${NC}"
|
|
echo -e "${YELLOW}4. Test configuration with: fabric --list-patterns${NC}"
|
|
echo -e "\n${BLUE}Supported AI providers:${NC}"
|
|
echo -e "${YELLOW}- OpenAI (GPT-4, GPT-4o, GPT-3.5-turbo)${NC}"
|
|
echo -e "${YELLOW}- Anthropic (Claude-3.5-sonnet, Claude-3-haiku)${NC}"
|
|
echo -e "${YELLOW}- Google (Gemini-pro, Gemini-1.5-pro)${NC}"
|
|
echo -e "${YELLOW}- Groq (Fast inference with Llama, Mixtral)${NC}"
|
|
echo -e "${YELLOW}- And many more providers...${NC}" |