AUTOMATED SETUP SCRIPT
This script automates the Claude Code + Local Ollama setup. Save it as setup-claude-local.sh, make it executable, and run it.
Usage:
chmod +x setup-claude-local.sh
./setup-claude-local.sh
The script will:
- Check prerequisites (Node.js, Ollama 0.14.0+)
- Ask for your Ollama server IP
- Ask for context size based on your RAM
- Create the Modelfile and configured model
- Generate ~/.claude/settings.json
- Install Claude Code if needed
Copy everything below the line and save as setup-claude-local.sh
========================================================================
#!/bin/bash
# Claude Code + Local Ollama Setup Script # Techalicious Academy - February 2026 # https://techalicious.academy
set -e
echo "" echo "========================================" echo " Claude Code + Local Ollama Setup" echo "========================================" echo ""
# ------------------------------------------------------------------------------ # Colors for output # ------------------------------------------------------------------------------ RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' BLUE='\033[0;34m' NC='\033[0m' # No Color
ok() { echo -e "${GREEN}[OK]${NC} $1"; } warn() { echo -e "${YELLOW}[WARN]${NC} $1"; } fail() { echo -e "${RED}[FAIL]${NC} $1"; exit 1; } info() { echo -e "${BLUE}[INFO]${NC} $1"; }
# ------------------------------------------------------------------------------ # Check Prerequisites # ------------------------------------------------------------------------------ echo "Checking prerequisites..." echo ""
# Check Node.js if command -v node &> /dev/null; then
NODE_VERSION=$(node --version)
ok "Node.js installed: $NODE_VERSION"
else
fail "Node.js not found. Install it first: https://nodejs.org"
fi
# Check npm if command -v npm &> /dev/null; then
NPM_VERSION=$(npm --version)
ok "npm installed: $NPM_VERSION"
else
fail "npm not found. Install Node.js with npm."
fi
# Check Ollama if command -v ollama &> /dev/null; then
OLLAMA_VERSION=$(ollama --version 2>&1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1)
ok "Ollama installed: $OLLAMA_VERSION"
# Check version is 0.14.0+
MAJOR=$(echo $OLLAMA_VERSION | cut -d. -f1)
MINOR=$(echo $OLLAMA_VERSION | cut -d. -f2)
if [ "$MAJOR" -eq 0 ] && [ "$MINOR" -lt 14 ]; then
fail "Ollama version must be 0.14.0 or higher (you have $OLLAMA_VERSION)"
fi
else
fail "Ollama not found. Install it first: https://ollama.com"
fi
echo ""
# ------------------------------------------------------------------------------ # Get Ollama Server Location # ------------------------------------------------------------------------------ echo "Where is your Ollama server running?" echo "" echo " 1) This machine (localhost)" echo " 2) Another machine on my network" echo "" read -p "Enter choice [1]: " SERVER_CHOICE SERVER_CHOICE=${SERVER_CHOICE:-1}
if [ "$SERVER_CHOICE" = "1" ]; then
OLLAMA_IP="localhost"
info "Using localhost"
else
read -p "Enter Ollama server IP (e.g., 10.0.0.79): " OLLAMA_IP
if [ -z "$OLLAMA_IP" ]; then
fail "IP address required for remote server"
fi
info "Using remote server: $OLLAMA_IP"
fi
echo ""
# ------------------------------------------------------------------------------ # Test Ollama Connection # ------------------------------------------------------------------------------ info "Testing connection to Ollama at $OLLAMA_IP:11434..."
if curl -s --connect-timeout 5 "http://$OLLAMA_IP:11434/api/version" > /dev/null; then
ok "Ollama is reachable"
else
fail "Cannot connect to Ollama at http://$OLLAMA_IP:11434
If remote: Add 'export OLLAMA_HOST=\"0.0.0.0:11434\"' to ~/.zshrc, then source and restart ollama. If local: Is Ollama running? Try: ollama serve" fi
echo ""
# ------------------------------------------------------------------------------ # Select Base Model # ------------------------------------------------------------------------------ echo "Which base model do you want to use?" echo "" echo " 1) qwen3-coder-next (recommended, ~18GB)" echo " 2) qwen2.5-coder:14b (~9GB)" echo " 3) qwen2.5-coder:7b (~4.5GB)" echo " 4) Custom (enter your own)" echo "" read -p "Enter choice [1]: " MODEL_CHOICE MODEL_CHOICE=${MODEL_CHOICE:-1}
case $MODEL_CHOICE in
- BASE_MODEL="qwen3-coder-next:latest" ;;
- BASE_MODEL="qwen2.5-coder:14b" ;;
- BASE_MODEL="qwen2.5-coder:7b" ;; 4) read -p "Enter model name (as shown in ollama list): " BASE_MODEL if [ -z "$BASE_MODEL" ]; then fail "Model name required" fi ;; *) BASE_MODEL="qwen3-coder-next:latest" ;;
esac
info "Using base model: $BASE_MODEL" echo ""
# ------------------------------------------------------------------------------ # Check if model exists, offer to pull # ------------------------------------------------------------------------------ if [ "$OLLAMA_IP" = "localhost" ]; then
if ! ollama list | grep -q "${BASE_MODEL%%:*}"; then
warn "Model $BASE_MODEL not found locally"
read -p "Pull it now? [Y/n]: " PULL_MODEL
PULL_MODEL=${PULL_MODEL:-Y}
if [[ "$PULL_MODEL" =~ ^[Yy] ]]; then
info "Pulling $BASE_MODEL (this may take a while)..."
ollama pull $BASE_MODEL
ok "Model pulled successfully"
else
warn "Skipping pull. Make sure the model exists before continuing."
fi
else
ok "Base model found"
fi
fi
echo ""
# ------------------------------------------------------------------------------ # Select Context Size # ------------------------------------------------------------------------------ echo "How much unified memory (RAM) does your AI machine have?" echo "" echo " 1) 16GB -> 8K context" echo " 2) 32GB -> 32K context (recommended)" echo " 3) 64GB+ -> 64K context" echo " 4) Custom" echo "" read -p "Enter choice [2]: " RAM_CHOICE RAM_CHOICE=${RAM_CHOICE:-2}
case $RAM_CHOICE in
- CONTEXT_SIZE=8192; CTX_LABEL="8k" ;;
- CONTEXT_SIZE=32768; CTX_LABEL="32k" ;;
- CONTEXT_SIZE=65536; CTX_LABEL="64k" ;; 4) read -p "Enter context size (e.g., 16384): " CONTEXT_SIZE CTX_LABEL="${CONTEXT_SIZE}" ;; *) CONTEXT_SIZE=32768; CTX_LABEL="32k" ;;
esac
# Generate configured model name MODEL_BASE_NAME=$(echo $BASE_MODEL | cut -d: -f1 | sed 's/\./-/g') CONFIGURED_MODEL="${MODEL_BASE_NAME}-${CTX_LABEL}"
info "Will create model: $CONFIGURED_MODEL with $CONTEXT_SIZE context" echo ""
# ------------------------------------------------------------------------------ # Create Modelfile # ------------------------------------------------------------------------------ MODELFILE_PATH="$HOME/Modelfile-claude-local"
info "Creating Modelfile at $MODELFILE_PATH..."
cat > "$MODELFILE_PATH" << EOF FROM $BASE_MODEL PARAMETER num_ctx $CONTEXT_SIZE EOF
ok "Modelfile created" echo ""
# ------------------------------------------------------------------------------ # Create Configured Model # ------------------------------------------------------------------------------ if [ "$OLLAMA_IP" = "localhost" ]; then
info "Creating configured model: $CONFIGURED_MODEL..."
if ollama list | grep -q "$CONFIGURED_MODEL"; then
warn "Model $CONFIGURED_MODEL already exists"
read -p "Recreate it? [y/N]: " RECREATE
RECREATE=${RECREATE:-N}
if [[ "$RECREATE" =~ ^[Yy] ]]; then
ollama create "$CONFIGURED_MODEL" -f "$MODELFILE_PATH"
ok "Model recreated"
else
info "Keeping existing model"
fi
else
ollama create "$CONFIGURED_MODEL" -f "$MODELFILE_PATH"
ok "Model created: $CONFIGURED_MODEL"
fi
else
warn "Remote server detected."
echo ""
echo "Run these commands on your Ollama server ($OLLAMA_IP):"
echo ""
echo " # Create Modelfile"
echo " cat > ~/Modelfile-claude-local << 'EOF'"
echo " FROM $BASE_MODEL"
echo " PARAMETER num_ctx $CONTEXT_SIZE"
echo " EOF"
echo ""
echo " # Create configured model"
echo " ollama create $CONFIGURED_MODEL -f ~/Modelfile-claude-local"
echo ""
read -p "Press Enter when done..."
fi
echo ""
# ------------------------------------------------------------------------------ # Create settings.json # ------------------------------------------------------------------------------ CLAUDE_DIR="$HOME/.claude" SETTINGS_FILE="$CLAUDE_DIR/settings.json"
info "Creating Claude Code settings..."
mkdir -p "$CLAUDE_DIR"
# Check if settings.json exists if [ -f "$SETTINGS_FILE" ]; then
warn "Settings file already exists at $SETTINGS_FILE"
read -p "Overwrite? [y/N]: " OVERWRITE
OVERWRITE=${OVERWRITE:-N}
if [[ ! "$OVERWRITE" =~ ^[Yy] ]]; then
info "Keeping existing settings"
echo ""
echo "Your model name is: $CONFIGURED_MODEL"
echo "Update ANTHROPIC_MODEL in $SETTINGS_FILE manually if needed."
echo ""
fi
fi
if [ ! -f "$SETTINGS_FILE" ] || [[ "$OVERWRITE" =~ ^[Yy] ]]; then
cat > "$SETTINGS_FILE" << EOF
{
"env": {
"ANTHROPIC_BASE_URL": "http://$OLLAMA_IP:11434",
"ANTHROPIC_AUTH_TOKEN": "ollama",
"ANTHROPIC_MODEL": "$CONFIGURED_MODEL",
"ANTHROPIC_SMALL_FAST_MODEL": "$CONFIGURED_MODEL",
"CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC": "1",
"API_TIMEOUT_MS": "600000"
}
} EOF
ok "Settings created at $SETTINGS_FILE"
fi
echo ""
# ------------------------------------------------------------------------------ # Add Environment Variables to ~/.zshrc # ------------------------------------------------------------------------------ ZSHRC="$HOME/.zshrc" MARKER="# Claude Code + Local Ollama"
info "Adding environment variables to ~/.zshrc..."
if grep -q "$MARKER" "$ZSHRC" 2>/dev/null; then
warn "Environment variables already in ~/.zshrc"
read -p "Replace them? [y/N]: " REPLACE_ENV
REPLACE_ENV=${REPLACE_ENV:-N}
if [[ "$REPLACE_ENV" =~ ^[Yy] ]]; then
# Remove old block
sed -i '' "/$MARKER/,/^$/d" "$ZSHRC"
fi
fi
if ! grep -q "$MARKER" "$ZSHRC" 2>/dev/null || [[ "$REPLACE_ENV" =~ ^[Yy] ]]; then
cat >> "$ZSHRC" << EOF
$MARKER export ANTHROPIC_BASE_URL="http://$OLLAMA_IP:11434" export ANTHROPIC_AUTH_TOKEN="ollama" export ANTHROPIC_MODEL="$CONFIGURED_MODEL" export ANTHROPIC_SMALL_FAST_MODEL="$CONFIGURED_MODEL" export CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC="1" export API_TIMEOUT_MS="600000" EOF
ok "Environment variables added to ~/.zshrc"
fi
echo ""
# ------------------------------------------------------------------------------ # Install Claude Code # ------------------------------------------------------------------------------ if command -v claude &> /dev/null; then
CLAUDE_VERSION=$(claude --version 2>&1 | head -1)
ok "Claude Code already installed: $CLAUDE_VERSION"
else
info "Installing Claude Code..."
npm install -g @anthropic-ai/claude-code
if command -v claude &> /dev/null; then
ok "Claude Code installed"
else
warn "Claude Code installed but not in PATH"
echo "You may need to restart your terminal or add npm bin to PATH"
fi
fi
echo ""
# ------------------------------------------------------------------------------ # Summary # ------------------------------------------------------------------------------ echo "========================================" echo " Setup Complete!" echo "========================================" echo "" echo "Configuration:" echo " Ollama Server: http://$OLLAMA_IP:11434" echo " Model: $CONFIGURED_MODEL" echo " Context Size: $CONTEXT_SIZE tokens" echo " Settings File: $SETTINGS_FILE" echo " Environment: ~/.zshrc" echo "" echo "Next steps:" echo "" echo " 1. source ~/.zshrc" echo " 2. cd into a project directory" echo " 3. Run: claude" echo " 4. Select option 3 (3rd-party platform)" echo " 5. Start coding!" echo "" echo "Verification commands:" echo "" echo " # Test Ollama connection" echo " curl http://$OLLAMA_IP:11434/api/version" echo "" echo " # Verify model context (run model first)" echo " ollama run $CONFIGURED_MODEL" echo " # In another terminal:" echo " ollama ps" echo "" echo " # Should show CONTEXT: $CONTEXT_SIZE" echo ""