mirror of
https://github.com/acedanger/shell.git
synced 2025-12-06 02:20:11 -08:00
feat: Remove Ollama integration and update documentation for Fabric setup
This commit is contained in:
264
setup/setup.sh
264
setup/setup.sh
@@ -185,11 +185,7 @@ for pkg in "${pkgs[@]}"; do
|
||||
continue
|
||||
fi
|
||||
|
||||
# Handle ollama Docker installation
|
||||
if [ "$pkg" = "ollama" ]; then
|
||||
special_installs+=("$pkg")
|
||||
continue
|
||||
fi
|
||||
|
||||
|
||||
# Handle lazygit - available in COPR for Fedora, special install for Debian/Ubuntu
|
||||
if [ "$pkg" = "lazygit" ] && [ "$OS_NAME" != "fedora" ]; then
|
||||
@@ -289,66 +285,38 @@ for pkg in "${special_installs[@]}"; do
|
||||
# Download and install the latest Fabric binary for Linux AMD64
|
||||
curl -L https://github.com/danielmiessler/fabric/releases/latest/download/fabric-linux-amd64 -o /tmp/fabric
|
||||
chmod +x /tmp/fabric
|
||||
sudo mv /tmp/fabric /usr/local/bin/fabric
|
||||
echo -e "${GREEN}Fabric binary installed successfully!${NC}"
|
||||
sudo mv /tmp/fabric /usr/local/bin/fabric echo -e "${GREEN}Fabric binary installed successfully!${NC}"
|
||||
|
||||
# Verify installation
|
||||
if fabric --version; then
|
||||
echo -e "${GREEN}Fabric installation verified!${NC}"
|
||||
echo -e "${YELLOW}Running Fabric setup...${NC}"
|
||||
# Verify installation
|
||||
if fabric --version; then
|
||||
echo -e "${GREEN}Fabric installation verified!${NC}"
|
||||
echo -e "${YELLOW}Running Fabric setup...${NC}"
|
||||
|
||||
# Create fabric config directory
|
||||
mkdir -p "$HOME/.config/fabric"
|
||||
# Create fabric config directory
|
||||
mkdir -p "$HOME/.config/fabric"
|
||||
|
||||
# Run fabric setup with proper configuration
|
||||
echo -e "${YELLOW}Setting up Fabric patterns and configuration...${NC}"
|
||||
# Run fabric setup with proper configuration
|
||||
echo -e "${YELLOW}Setting up Fabric patterns and configuration...${NC}"
|
||||
|
||||
# Initialize fabric with default patterns
|
||||
fabric --setup || echo -e "${YELLOW}Initial fabric setup completed${NC}"
|
||||
# Initialize fabric with default patterns
|
||||
fabric --setup || echo -e "${YELLOW}Initial fabric setup completed${NC}"
|
||||
|
||||
# Update patterns to get the latest
|
||||
echo -e "${YELLOW}Updating Fabric patterns...${NC}"
|
||||
fabric --updatepatterns || echo -e "${YELLOW}Pattern update completed${NC}"
|
||||
# Update patterns to get the latest
|
||||
echo -e "${YELLOW}Updating Fabric patterns...${NC}"
|
||||
fabric --updatepatterns || echo -e "${YELLOW}Pattern update completed${NC}"
|
||||
|
||||
# Configure Ollama as the default model provider
|
||||
echo -e "${YELLOW}Configuring Fabric to use Ollama...${NC}"
|
||||
|
||||
# Create or update fabric config to use Ollama
|
||||
cat > "$HOME/.config/fabric/.env" << 'FABRIC_EOF'
|
||||
# Fabric Configuration for Ollama
|
||||
DEFAULT_MODEL=phi3:mini
|
||||
OLLAMA_API_BASE=http://localhost:11434
|
||||
FABRIC_EOF
|
||||
|
||||
echo -e "${GREEN}Fabric setup completed successfully!${NC}"
|
||||
echo -e "${YELLOW}Fabric is configured to use Ollama at http://localhost:11434${NC}"
|
||||
echo -e "${YELLOW}Default model: phi3:mini${NC}"
|
||||
echo -e "${YELLOW}You can test fabric with: fabric --list-patterns${NC}"
|
||||
else
|
||||
echo -e "${RED}Fabric installation verification failed${NC}"
|
||||
fi
|
||||
echo -e "${GREEN}Fabric setup completed successfully!${NC}"
|
||||
echo -e "${YELLOW}You can test fabric with: fabric --list-patterns${NC}"
|
||||
else
|
||||
echo -e "${RED}Fabric installation verification failed${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${GREEN}Fabric is already installed${NC}"
|
||||
# Still try to update patterns and ensure Ollama configuration
|
||||
# Still try to update patterns
|
||||
echo -e "${YELLOW}Updating Fabric patterns...${NC}"
|
||||
fabric --updatepatterns || echo -e "${YELLOW}Pattern update completed${NC}"
|
||||
|
||||
# Ensure Ollama configuration exists
|
||||
mkdir -p "$HOME/.config/fabric"
|
||||
if [ ! -f "$HOME/.config/fabric/.env" ]; then
|
||||
echo -e "${YELLOW}Creating Ollama configuration for existing Fabric installation...${NC}"
|
||||
cat > "$HOME/.config/fabric/.env" << 'FABRIC_EOF'
|
||||
# Fabric Configuration for Ollama
|
||||
DEFAULT_MODEL=phi3:mini
|
||||
OLLAMA_API_BASE=http://localhost:11434
|
||||
FABRIC_EOF
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
"ollama")
|
||||
# Ollama installation is handled in the main Ollama Docker setup section below
|
||||
echo -e "${YELLOW}Ollama Docker installation will be handled in dedicated section...${NC}"
|
||||
;;
|
||||
"lazygit")
|
||||
if ! command -v lazygit &> /dev/null; then
|
||||
echo -e "${YELLOW}Installing Lazygit from GitHub releases...${NC}"
|
||||
@@ -369,159 +337,6 @@ FABRIC_EOF
|
||||
esac
|
||||
done
|
||||
|
||||
# Setup Ollama with Docker for local AI (required for Fabric)
|
||||
if [ "${SKIP_OLLAMA:-false}" = "true" ]; then
|
||||
echo -e "${YELLOW}Skipping Ollama installation (SKIP_OLLAMA=true)${NC}"
|
||||
else
|
||||
# Setup Ollama with Docker for local AI (required for Fabric)
|
||||
echo -e "${YELLOW}Setting up Ollama with Docker for local AI support...${NC}"
|
||||
|
||||
# Check if user can run docker commands without sudo
|
||||
if docker ps >/dev/null 2>&1; then
|
||||
DOCKER_CMD="docker"
|
||||
echo -e "${GREEN}Docker access confirmed without sudo${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}Docker requires sudo access (group membership may need session refresh)${NC}"
|
||||
DOCKER_CMD="sudo docker"
|
||||
fi
|
||||
|
||||
# Check if Ollama Docker container is already running
|
||||
if ! $DOCKER_CMD ps | grep -q ollama; then
|
||||
echo -e "${YELLOW}Setting up Ollama Docker container...${NC}"
|
||||
|
||||
# Pull the Ollama Docker image
|
||||
$DOCKER_CMD pull ollama/ollama:latest
|
||||
|
||||
# Create a Docker volume for Ollama data
|
||||
$DOCKER_CMD volume create ollama-data 2>/dev/null || true
|
||||
|
||||
# Remove any existing ollama container
|
||||
$DOCKER_CMD rm -f ollama 2>/dev/null || true
|
||||
|
||||
# Start Ollama container with GPU support (if available) or CPU-only
|
||||
if command -v nvidia-docker &> /dev/null || $DOCKER_CMD info 2>/dev/null | grep -q nvidia; then
|
||||
echo -e "${YELLOW}Starting Ollama with GPU support...${NC}"
|
||||
$DOCKER_CMD run -d \
|
||||
--name ollama \
|
||||
--restart unless-stopped \
|
||||
--gpus all \
|
||||
-v ollama-data:/root/.ollama \
|
||||
-p 11434:11434 \
|
||||
ollama/ollama
|
||||
else
|
||||
echo -e "${YELLOW}Starting Ollama in CPU-only mode...${NC}"
|
||||
$DOCKER_CMD run -d \
|
||||
--name ollama \
|
||||
--restart unless-stopped \
|
||||
-v ollama-data:/root/.ollama \
|
||||
-p 11434:11434 \
|
||||
ollama/ollama
|
||||
fi
|
||||
|
||||
# Wait for the container to be ready
|
||||
echo -e "${YELLOW}Waiting for Ollama to start...${NC}"
|
||||
sleep 10
|
||||
|
||||
# Install a lightweight model for basic functionality
|
||||
echo -e "${YELLOW}Installing a basic AI model (phi3:mini)...${NC}"
|
||||
$DOCKER_CMD exec ollama ollama pull phi3:mini
|
||||
|
||||
echo -e "${GREEN}Ollama Docker setup completed with phi3:mini model!${NC}"
|
||||
echo -e "${YELLOW}Ollama is accessible at http://localhost:11434${NC}"
|
||||
else
|
||||
echo -e "${GREEN}Ollama Docker container is already running${NC}"
|
||||
fi
|
||||
|
||||
# Add helper aliases for Ollama Docker management
|
||||
OLLAMA_ALIASES_FILE="$HOME/.oh-my-zsh/custom/ollama-aliases.zsh"
|
||||
echo -e "${YELLOW}Setting up Ollama Docker aliases...${NC}"
|
||||
cat > "$OLLAMA_ALIASES_FILE" << 'EOF'
|
||||
# Ollama Docker Management Aliases
|
||||
alias ollama-start='docker start ollama'
|
||||
alias ollama-stop='docker stop ollama'
|
||||
alias ollama-restart='docker restart ollama'
|
||||
alias ollama-logs='docker logs -f ollama'
|
||||
alias ollama-shell='docker exec -it ollama /bin/bash'
|
||||
alias ollama-pull='docker exec ollama ollama pull'
|
||||
alias ollama-list='docker exec ollama ollama list'
|
||||
alias ollama-run='docker exec ollama ollama run'
|
||||
alias ollama-status='docker ps | grep ollama'
|
||||
|
||||
# Function to run ollama commands in Docker
|
||||
ollama() {
|
||||
if [ "$1" = "serve" ]; then
|
||||
echo "Ollama is running in Docker. Use 'ollama-start' to start the container."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Check if user can run docker without sudo
|
||||
if docker ps >/dev/null 2>&1; then
|
||||
docker exec ollama ollama "$@"
|
||||
else
|
||||
sudo docker exec ollama ollama "$@"
|
||||
fi
|
||||
}
|
||||
EOF
|
||||
|
||||
echo -e "${GREEN}Ollama Docker aliases created in $OLLAMA_ALIASES_FILE${NC}"
|
||||
echo -e "${YELLOW}You can install additional models with: ollama pull <model-name>${NC}"
|
||||
|
||||
# Function to finalize Fabric configuration after Ollama is running
|
||||
configure_fabric_for_ollama() {
|
||||
echo -e "${YELLOW}Finalizing Fabric configuration for Ollama...${NC}"
|
||||
|
||||
# Ensure Ollama is accessible before configuring Fabric
|
||||
local max_attempts=30
|
||||
local attempt=0
|
||||
|
||||
while [ $attempt -lt $max_attempts ]; do
|
||||
if curl -s http://localhost:11434/api/tags >/dev/null 2>&1; then
|
||||
echo -e "${GREEN}Ollama API is accessible, configuring Fabric...${NC}"
|
||||
break
|
||||
fi
|
||||
echo -e "${YELLOW}Waiting for Ollama to be ready... (attempt $((attempt + 1))/$max_attempts)${NC}"
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
if [ $attempt -eq $max_attempts ]; then
|
||||
echo -e "${YELLOW}Warning: Ollama API not accessible, Fabric configuration may need manual setup${NC}"
|
||||
return
|
||||
fi
|
||||
|
||||
# Create a comprehensive Fabric configuration
|
||||
mkdir -p "$HOME/.config/fabric"
|
||||
|
||||
# Create the main configuration file
|
||||
cat > "$HOME/.config/fabric/config.yaml" << 'FABRIC_CONFIG_EOF'
|
||||
# Fabric Configuration for Ollama Integration
|
||||
model:
|
||||
default: "phi3:mini"
|
||||
|
||||
providers:
|
||||
ollama:
|
||||
base_url: "http://localhost:11434"
|
||||
api_key: "" # Ollama doesn't require an API key for local access
|
||||
|
||||
# Default provider
|
||||
default_provider: "ollama"
|
||||
|
||||
# Pattern settings
|
||||
patterns:
|
||||
auto_update: true
|
||||
directory: "~/.config/fabric/patterns"
|
||||
FABRIC_CONFIG_EOF
|
||||
|
||||
echo -e "${GREEN}Fabric configuration file created${NC}"
|
||||
}
|
||||
|
||||
# Call the configuration function if Ollama container is running
|
||||
if docker ps | grep -q ollama; then
|
||||
configure_fabric_for_ollama
|
||||
fi
|
||||
|
||||
fi # End SKIP_OLLAMA check
|
||||
|
||||
# Install Zsh if not already installed
|
||||
echo -e "${YELLOW}Installing Zsh...${NC}"
|
||||
if ! command -v zsh &> /dev/null; then
|
||||
@@ -770,27 +585,6 @@ echo -e "${GREEN}OS: $OS_NAME $OS_VERSION${NC}"
|
||||
echo -e "${GREEN}Package Manager: $PKG_MANAGER${NC}"
|
||||
echo -e "${GREEN}Shell: $(basename "$SHELL") → zsh${NC}"
|
||||
|
||||
# Test Ollama and Fabric integration
|
||||
echo -e "\n${GREEN}=== Testing Ollama and Fabric Integration ===${NC}"
|
||||
echo -e "${YELLOW}Testing Ollama Docker container...${NC}"
|
||||
if docker ps | grep -q ollama; then
|
||||
echo -e "${GREEN}✓ Ollama Docker container is running${NC}"
|
||||
|
||||
# Test if Ollama API is responding
|
||||
echo -e "${YELLOW}Testing Ollama API...${NC}"
|
||||
if curl -s http://localhost:11434/api/tags >/dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓ Ollama API is responding${NC}"
|
||||
|
||||
# List available models
|
||||
echo -e "${YELLOW}Available Ollama models:${NC}"
|
||||
docker exec ollama ollama list || echo -e "${YELLOW}No models listed or command failed${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠ Ollama API not responding yet (may need more time to start)${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗ Ollama Docker container is not running${NC}"
|
||||
fi
|
||||
|
||||
echo -e "\n${YELLOW}Testing Fabric installation...${NC}"
|
||||
if command -v fabric &> /dev/null; then
|
||||
echo -e "${GREEN}✓ Fabric is installed${NC}"
|
||||
@@ -803,28 +597,16 @@ if command -v fabric &> /dev/null; then
|
||||
else
|
||||
echo -e "${YELLOW}⚠ Fabric patterns may need to be updated${NC}"
|
||||
fi
|
||||
|
||||
# Check fabric configuration
|
||||
if [ -f "$HOME/.config/fabric/.env" ]; then
|
||||
echo -e "${GREEN}✓ Fabric Ollama configuration found${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠ Fabric Ollama configuration not found${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗ Fabric is not installed${NC}"
|
||||
fi
|
||||
|
||||
echo -e "\n${GREEN}=== Post-Installation Instructions ===${NC}"
|
||||
echo -e "${YELLOW}1. Restart your shell or run: source ~/.zshrc${NC}"
|
||||
echo -e "${YELLOW}2. Test Ollama: ollama list${NC}"
|
||||
echo -e "${YELLOW}3. Test Fabric: fabric --list-patterns${NC}"
|
||||
echo -e "${YELLOW}4. Try a Fabric pattern: echo 'Hello world' | fabric --pattern summarize${NC}"
|
||||
echo -e "${YELLOW}5. Install more models: ollama pull llama2${NC}"
|
||||
echo -e "${YELLOW}6. Manage Ollama container: ollama-start, ollama-stop, ollama-logs${NC}"
|
||||
echo -e "${YELLOW}2. Test Fabric: fabric --list-patterns${NC}"
|
||||
echo -e "${YELLOW}3. Try a Fabric pattern: echo 'Hello world' | fabric --pattern summarize${NC}"
|
||||
|
||||
echo -e "\n${GREEN}=== Useful Commands ===${NC}"
|
||||
echo -e "${YELLOW}• View running containers: docker ps${NC}"
|
||||
echo -e "${YELLOW}• Ollama logs: docker logs -f ollama${NC}"
|
||||
echo -e "${YELLOW}• Fabric help: fabric --help${NC}"
|
||||
echo -e "${YELLOW}• Update patterns: fabric --updatepatterns${NC}"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user